diff --git a/changelogs/drizzle-kit/0.26.0.md b/changelogs/drizzle-kit/0.26.0.md new file mode 100644 index 000000000..8e054812b --- /dev/null +++ b/changelogs/drizzle-kit/0.26.0.md @@ -0,0 +1,122 @@ +# New Features + +## Checks support in `drizzle-kit` + +You can use drizzle-kit to manage your `check` constraint defined in drizzle-orm schema definition + +For example current drizzle table: + +```ts +import { sql } from "drizzle-orm"; +import { check, pgTable } from "drizzle-orm/pg-core"; + +export const users = pgTable( + "users", + (c) => ({ + id: c.uuid().defaultRandom().primaryKey(), + username: c.text().notNull(), + age: c.integer(), + }), + (table) => ({ + checkConstraint: check("age_check", sql`${table.age} > 21`), + }) +); +``` + +will be generated into + +```sql +CREATE TABLE IF NOT EXISTS "users" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "username" text NOT NULL, + "age" integer, + CONSTRAINT "age_check" CHECK ("users"."age" > 21) +); +``` + +The same is supported in all dialects + +### Limitations + +- `generate` will work as expected for all check constraint changes. +- `push` will detect only check renames and will recreate the constraint. All other changes to SQL won't be detected and will be ignored. + +So, if you want to change the constraint's SQL definition using only `push`, you would need to manually comment out the constraint, `push`, then put it back with the new SQL definition and `push` one more time. + +## Views support in `drizzle-kit` + +You can use drizzle-kit to manage your `views` defined in drizzle-orm schema definition. It will work with all existing dialects and view options + +### PostgreSQL + +For example current drizzle table: + +```ts +import { sql } from "drizzle-orm"; +import { + check, + pgMaterializedView, + pgTable, + pgView, +} from "drizzle-orm/pg-core"; + +export const users = pgTable( + "users", + (c) => ({ + id: c.uuid().defaultRandom().primaryKey(), + username: c.text().notNull(), + age: c.integer(), + }), + (table) => ({ + checkConstraint: check("age_check", sql`${table.age} > 21`), + }) +); + +export const simpleView = pgView("simple_users_view").as((qb) => + qb.select().from(users) +); + +export const materializedView = pgMaterializedView( + "materialized_users_view" +).as((qb) => qb.select().from(users)); +``` + +will be generated into + +```sql +CREATE TABLE IF NOT EXISTS "users" ( + "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, + "username" text NOT NULL, + "age" integer, + CONSTRAINT "age_check" CHECK ("users"."age" > 21) +); + +CREATE VIEW "public"."simple_users_view" AS (select "id", "username", "age" from "users"); + +CREATE MATERIALIZED VIEW "public"."materialized_users_view" AS (select "id", "username", "age" from "users"); +``` + +Views supported in all dialects, but materialized views are supported only in PostgreSQL + +#### Limitations + +- `generate` will work as expected for all view changes +- `push` limitations: + +1. If you want to change the view's SQL definition using only `push`, you would need to manually comment out the view, `push`, then put it back with the new SQL definition and `push` one more time. + +## Updates for PostgreSQL enums behavior + +We've updated enum behavior in Drizzle with PostgreSQL: + +- Add value after or before in enum: With this change, Drizzle will now respect the order of values in the enum and allow adding new values after or before a specific one. + +- Support for dropping a value from an enum: In this case, Drizzle will attempt to alter all columns using the enum to text, then drop the existing enum and create a new one with the updated set of values. After that, all columns previously using the enum will be altered back to the new enum. + +> If the deleted enum value was used by a column, this process will result in a database error. + +- Support for dropping an enum + +- Support for moving enums between schemas + +- Support for renaming enums diff --git a/changelogs/drizzle-orm/0.35.0.md b/changelogs/drizzle-orm/0.35.0.md new file mode 100644 index 000000000..92bafdff1 --- /dev/null +++ b/changelogs/drizzle-orm/0.35.0.md @@ -0,0 +1,85 @@ +# Important change after 0.34.0 release + +## Updated the init Drizzle database API + +The API from version 0.34.0 turned out to be unusable and needs to be changed. You can read more about our decisions in [this discussion](https://github.com/drizzle-team/drizzle-orm/discussions/3097) + +If you still want to use the new API introduced in 0.34.0, which can create driver clients for you under the hood, you can now do so +```ts +import { drizzle } from "drizzle-orm/node-postgres"; + +const db = drizzle(process.env.DATABASE_URL); +// or +const db = drizzle({ + connection: process.env.DATABASE_URL +}); +const db = drizzle({ + connection: { + user: "...", + password: "...", + host: "...", + port: 4321, + db: "...", + }, +}); + +// if you need to pass logger or schema +const db = drizzle({ + connection: process.env.DATABASE_URL, + logger: true, + schema: schema, +}); +``` + +in order to not introduce breaking change - we will still leave support for deprecated API until V1 release. +It will degrade autocomplete performance in connection params due to `DatabaseDriver` | `ConnectionParams` types collision, +but that's a decent compromise against breaking changes + +```ts +import { drizzle } from "drizzle-orm/node-postgres"; +import { Pool } from "pg"; + +const client = new Pool({ connectionString: process.env.DATABASE_URL }); +const db = drizzle(client); // deprecated but available + +// new version +const db = drizzle({ + client: client, +}); +``` + +# New Features + +## New .orderBy() and .limit() functions in update and delete statements SQLite and MySQL + +You now have more options for the `update` and `delete` query builders in MySQL and SQLite + +**Example** + +```ts +await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); + +await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); +``` + +## New `drizzle.mock()` function + +There were cases where you didn't need to provide a driver to the Drizzle object, and this served as a workaround +```ts +const db = drizzle({} as any) +``` + +Now you can do this using a mock function +```ts +const db = drizzle.mock() +``` + +There is no valid production use case for this, but we used it in situations where we needed to check types, etc., without making actual database calls or dealing with driver creation. If anyone was using it, please switch to using mocks now + +# Internal updates + +- Upgraded TS in codebase to the version 5.6.3 + +# Bug fixes + +- [[BUG]: New $count API error with @neondatabase/serverless](https://github.com/drizzle-team/drizzle-orm/issues/3081) \ No newline at end of file diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json index cf771296a..ce2de1468 100644 --- a/drizzle-kit/package.json +++ b/drizzle-kit/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-kit", - "version": "0.25.0", + "version": "0.26.0", "homepage": "https://orm.drizzle.team", "keywords": [ "drizzle", diff --git a/drizzle-kit/src/@types/utils.ts b/drizzle-kit/src/@types/utils.ts index 3f14151a4..04e7e125f 100644 --- a/drizzle-kit/src/@types/utils.ts +++ b/drizzle-kit/src/@types/utils.ts @@ -4,6 +4,8 @@ declare global { squashSpaces(): string; capitalise(): string; camelCase(): string; + snake_case(): string; + concatIf(it: string, condition: boolean): string; } @@ -44,6 +46,10 @@ String.prototype.concatIf = function(it: string, condition: boolean) { return condition ? `${this}${it}` : String(this); }; +String.prototype.snake_case = function() { + return this && this.length > 0 ? `${this.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`)}` : String(this); +}; + Array.prototype.random = function() { return this[~~(Math.random() * this.length)]; }; diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts index 51e5dfcfc..a5b8bfe69 100644 --- a/drizzle-kit/src/api.ts +++ b/drizzle-kit/src/api.ts @@ -1,14 +1,16 @@ import { randomUUID } from 'crypto'; -import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { LibSQLDatabase } from 'drizzle-orm/libsql'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { PgDatabase } from 'drizzle-orm/pg-core'; import { columnsResolver, enumsResolver, + mySqlViewsResolver, schemasResolver, sequencesResolver, + sqliteViewsResolver, tablesResolver, + viewsResolver, } from './cli/commands/migrate'; import { pgPushIntrospect } from './cli/commands/pgIntrospect'; import { pgSuggestions } from './cli/commands/pgPushUtils'; @@ -45,6 +47,8 @@ export const generateDrizzleJson = ( prepared.enums, prepared.schemas, prepared.sequences, + prepared.views, + prepared.matViews, casing, schemaFilters, ); @@ -76,6 +80,7 @@ export const generateMigration = async ( sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, validatedPrev, validatedCur, ); @@ -119,6 +124,7 @@ export const pushSchema = async ( sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, validatedPrev, validatedCur, 'push', @@ -151,7 +157,7 @@ export const generateSQLiteDrizzleJson = async ( const id = randomUUID(); - const snapshot = generateSqliteSnapshot(prepared.tables, casing); + const snapshot = generateSqliteSnapshot(prepared.tables, prepared.views, casing); return { ...snapshot, @@ -177,6 +183,7 @@ export const generateSQLiteMigration = async ( squashedCur, tablesResolver, columnsResolver, + sqliteViewsResolver, validatedPrev, validatedCur, ); @@ -217,6 +224,7 @@ export const pushSQLiteSchema = async ( squashedCur, tablesResolver, columnsResolver, + sqliteViewsResolver, validatedPrev, validatedCur, 'push', @@ -255,7 +263,7 @@ export const generateMySQLDrizzleJson = async ( const id = randomUUID(); - const snapshot = generateMySqlSnapshot(prepared.tables, casing); + const snapshot = generateMySqlSnapshot(prepared.tables, prepared.views, casing); return { ...snapshot, @@ -281,6 +289,7 @@ export const generateMySQLMigration = async ( squashedCur, tablesResolver, columnsResolver, + mySqlViewsResolver, validatedPrev, validatedCur, ); @@ -322,6 +331,7 @@ export const pushMySQLSchema = async ( squashedCur, tablesResolver, columnsResolver, + mySqlViewsResolver, validatedPrev, validatedCur, 'push', diff --git a/drizzle-kit/src/cli/commands/introspect.ts b/drizzle-kit/src/cli/commands/introspect.ts index acd569dea..257150dc0 100644 --- a/drizzle-kit/src/cli/commands/introspect.ts +++ b/drizzle-kit/src/cli/commands/introspect.ts @@ -14,7 +14,12 @@ import { dryPg, type PgSchema, squashPgScheme } from '../../serializer/pgSchema' import { fromDatabase as fromPostgresDatabase } from '../../serializer/pgSerializer'; import { drySQLite, type SQLiteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; import { fromDatabase as fromSqliteDatabase } from '../../serializer/sqliteSerializer'; -import { applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, applySqliteSnapshotsDiff } from '../../snapshotsDiffer'; +import { + applyLibSQLSnapshotsDiff, + applyMysqlSnapshotsDiff, + applyPgSnapshotsDiff, + applySqliteSnapshotsDiff, +} from '../../snapshotsDiffer'; import { prepareOutFolder } from '../../utils'; import type { Casing, Prefix } from '../validations/common'; import { LibSQLCredentials } from '../validations/libsql'; @@ -25,9 +30,12 @@ import { IntrospectProgress } from '../views'; import { columnsResolver, enumsResolver, + mySqlViewsResolver, schemasResolver, sequencesResolver, + sqliteViewsResolver, tablesResolver, + viewsResolver, writeResult, } from './migrate'; @@ -100,6 +108,7 @@ export const introspectPostgres = async ( sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, dryPg, schema, ); @@ -210,6 +219,7 @@ export const introspectMysql = async ( squashMysqlScheme(schema), tablesResolver, columnsResolver, + mySqlViewsResolver, dryMySql, schema, ); @@ -321,6 +331,7 @@ export const introspectSqlite = async ( squashSqliteScheme(schema), tablesResolver, columnsResolver, + sqliteViewsResolver, drySQLite, schema, ); @@ -427,11 +438,12 @@ export const introspectLibSQL = async ( const { snapshots, journal } = prepareOutFolder(out, 'sqlite'); if (snapshots.length === 0) { - const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( + const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( squashSqliteScheme(drySQLite), squashSqliteScheme(schema), tablesResolver, columnsResolver, + sqliteViewsResolver, drySQLite, schema, ); diff --git a/drizzle-kit/src/cli/commands/libSqlPushUtils.ts b/drizzle-kit/src/cli/commands/libSqlPushUtils.ts index 01bb61334..31e90c872 100644 --- a/drizzle-kit/src/cli/commands/libSqlPushUtils.ts +++ b/drizzle-kit/src/cli/commands/libSqlPushUtils.ts @@ -40,9 +40,17 @@ export const _moveDataStatements = ( const compositePKs = Object.values( json.tables[tableName].compositePrimaryKeys, ).map((it) => SQLiteSquasher.unsquashPK(it)); + const checkConstraints = Object.values(json.tables[tableName].checkConstraints); const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); + const mappedCheckConstraints: string[] = checkConstraints.map((it) => + it.replaceAll(`"${tableName}".`, `"${newTableName}".`) + .replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) + .replaceAll(`${tableName}.`, `${newTableName}.`) + .replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) + ); + // create new table statements.push( new SQLiteCreateTableConvertor().convert({ @@ -51,6 +59,7 @@ export const _moveDataStatements = ( columns: tableColumns, referenceData: fks, compositePKs, + checkConstraints: mappedCheckConstraints, }), ); diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts index b24fa77bc..c4f1e65d1 100644 --- a/drizzle-kit/src/cli/commands/migrate.ts +++ b/drizzle-kit/src/cli/commands/migrate.ts @@ -13,9 +13,9 @@ import { render } from 'hanji'; import path, { join } from 'path'; import { TypeOf } from 'zod'; import type { CommonSchema } from '../../schemaValidator'; -import { MySqlSchema, mysqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema'; -import { PgSchema, pgSchema, squashPgScheme } from '../../serializer/pgSchema'; -import { SQLiteSchema, sqliteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; +import { MySqlSchema, mysqlSchema, squashMysqlScheme, ViewSquashed } from '../../serializer/mysqlSchema'; +import { PgSchema, pgSchema, squashPgScheme, View } from '../../serializer/pgSchema'; +import { SQLiteSchema, sqliteSchema, squashSqliteScheme, View as SQLiteView } from '../../serializer/sqliteSchema'; import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, @@ -92,6 +92,72 @@ export const tablesResolver = async ( } }; +export const viewsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'view', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const mySqlViewsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'view', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const sqliteViewsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'view', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + export const sequencesResolver = async ( input: ResolverInput, ): Promise> => { @@ -200,6 +266,7 @@ export const prepareAndMigratePg = async (config: GenerateConfig) => { sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, validatedPrev, validatedCur, ); @@ -245,6 +312,7 @@ export const preparePgPush = async ( sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, validatedPrev, validatedCur, 'push', @@ -328,6 +396,7 @@ export const prepareMySQLPush = async ( squashedCur, tablesResolver, columnsResolver, + mySqlViewsResolver, validatedPrev, validatedCur, 'push', @@ -381,6 +450,7 @@ export const prepareAndMigrateMysql = async (config: GenerateConfig) => { squashedCur, tablesResolver, columnsResolver, + mySqlViewsResolver, validatedPrev, validatedCur, ); @@ -441,6 +511,7 @@ export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { squashedCur, tablesResolver, columnsResolver, + sqliteViewsResolver, validatedPrev, validatedCur, ); @@ -502,6 +573,7 @@ export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => { squashedCur, tablesResolver, columnsResolver, + sqliteViewsResolver, validatedPrev, validatedCur, ); @@ -540,6 +612,7 @@ export const prepareSQLitePush = async ( squashedCur, tablesResolver, columnsResolver, + sqliteViewsResolver, validatedPrev, validatedCur, 'push', @@ -572,6 +645,7 @@ export const prepareLibSQLPush = async ( squashedCur, tablesResolver, columnsResolver, + sqliteViewsResolver, validatedPrev, validatedCur, 'push', @@ -665,7 +739,7 @@ export const promptColumnsConflicts = async ( export const promptNamedWithSchemasConflict = async ( newItems: T[], missingItems: T[], - entity: 'table' | 'enum' | 'sequence', + entity: 'table' | 'enum' | 'sequence' | 'view', ): Promise<{ created: T[]; renamed: { from: T; to: T }[]; diff --git a/drizzle-kit/src/cli/commands/pgPushUtils.ts b/drizzle-kit/src/cli/commands/pgPushUtils.ts index eee0dc954..a8e2570df 100644 --- a/drizzle-kit/src/cli/commands/pgPushUtils.ts +++ b/drizzle-kit/src/cli/commands/pgPushUtils.ts @@ -47,15 +47,9 @@ function tableNameWithSchemaFrom( renamedSchemas: Record, renamedTables: Record, ) { - const newSchemaName = schema - ? renamedSchemas[schema] - ? renamedSchemas[schema] - : schema - : undefined; + const newSchemaName = schema ? (renamedSchemas[schema] ? renamedSchemas[schema] : schema) : undefined; - const newTableName = renamedTables[ - concatSchemaAndTableName(newSchemaName, tableName) - ] + const newTableName = renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] ? renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] : tableName; @@ -71,6 +65,7 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { const columnsToRemove: string[] = []; const schemasToRemove: string[] = []; const tablesToTruncate: string[] = []; + const matViewsToRemove: string[] = []; let renamedSchemas: Record = {}; let renamedTables: Record = {}; @@ -79,53 +74,44 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { if (statement.type === 'rename_schema') { renamedSchemas[statement.to] = statement.from; } else if (statement.type === 'rename_table') { - renamedTables[ - concatSchemaAndTableName(statement.toSchema, statement.tableNameTo) - ] = statement.tableNameFrom; + renamedTables[concatSchemaAndTableName(statement.toSchema, statement.tableNameTo)] = statement.tableNameFrom; } else if (statement.type === 'drop_table') { const res = await db.query( `select count(*) as count from ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.tableName, - ) - } table with ${count} items`, - ); + infoToPrint.push(`· You're about to delete ${chalk.underline(statement.tableName)} table with ${count} items`); // statementsToExecute.push( // `truncate table ${tableNameWithSchemaFrom(statement)} cascade;` // ); tablesToRemove.push(statement.tableName); shouldAskForApprove = true; } + } else if (statement.type === 'drop_view' && statement.materialized) { + const res = await db.query(`select count(*) as count from "${statement.schema ?? 'public'}"."${statement.name}"`); + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete "${chalk.underline(statement.name)}" materialized view with ${count} items`, + ); + + matViewsToRemove.push(statement.name); + shouldAskForApprove = true; + } } else if (statement.type === 'alter_table_drop_column') { const res = await db.query( `select count(*) as count from ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ - chalk.underline( - statement.columnName, - ) + chalk.underline(statement.columnName) } column in ${statement.tableName} table with ${count} items`, ); columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); @@ -137,48 +123,30 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { ); const count = Number(res[0].count); if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.name, - ) - } schema with ${count} tables`, - ); + infoToPrint.push(`· You're about to delete ${chalk.underline(statement.name)} schema with ${count} tables`); schemasToRemove.push(statement.name); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_set_type') { const res = await db.query( `select count(*) as count from ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( - `· You're about to change ${ - chalk.underline( - statement.columnName, - ) - } column type from ${ + `· You're about to change ${chalk.underline(statement.columnName)} column type from ${ + chalk.underline(statement.oldDataType) + } to ${ chalk.underline( - statement.oldDataType, + statement.newDataType, ) - } to ${chalk.underline(statement.newDataType)} with ${count} items`, + } with ${count} items`, ); statementsToExecute.push( `truncate table ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) } cascade;`, ); tablesToTruncate.push(statement.tableName); @@ -187,21 +155,14 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { } else if (statement.type === 'alter_table_alter_column_drop_pk') { const res = await db.query( `select count(*) as count from ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${ - chalk.underline( - statement.tableName, - ) + chalk.underline(statement.tableName) } primary key. This statements may fail and you table may left without primary key`, ); @@ -219,9 +180,7 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { const pkNameResponse = await db.query( `SELECT constraint_name FROM information_schema.table_constraints WHERE table_schema = '${ - typeof statement.schema === 'undefined' || statement.schema === '' - ? 'public' - : statement.schema + typeof statement.schema === 'undefined' || statement.schema === '' ? 'public' : statement.schema }' AND table_name = '${statement.tableName}' AND constraint_type = 'PRIMARY KEY';`, @@ -233,39 +192,24 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { // we will generate statement for drop pk here and not after all if-else statements continue; } else if (statement.type === 'alter_table_add_column') { - if ( - statement.column.notNull - && typeof statement.column.default === 'undefined' - ) { + if (statement.column.notNull && typeof statement.column.default === 'undefined') { const res = await db.query( `select count(*) as count from ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null ${ - chalk.underline( - statement.column.name, - ) + chalk.underline(statement.column.name) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push( `truncate table ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) } cascade;`, ); @@ -275,12 +219,7 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { } else if (statement.type === 'create_unique_constraint') { const res = await db.query( `select count(*) as count from ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); @@ -298,21 +237,13 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { } table?\n`, ); const { status, data } = await render( - new Select([ - 'No, add the constraint without truncating the table', - `Yes, truncate the table`, - ]), + new Select(['No, add the constraint without truncating the table', `Yes, truncate the table`]), ); if (data?.index === 1) { tablesToTruncate.push(statement.tableName); statementsToExecute.push( `truncate table ${ - tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ) + tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) } cascade;`, ); shouldAskForApprove = true; @@ -323,12 +254,7 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { if (typeof stmnt !== 'undefined') { if (statement.type === 'drop_table') { statementsToExecute.push( - `DROP TABLE ${ - concatSchemaAndTableName( - statement.schema, - statement.tableName, - ) - } CASCADE;`, + `DROP TABLE ${concatSchemaAndTableName(statement.schema, statement.tableName)} CASCADE;`, ); } else { statementsToExecute.push(...stmnt); @@ -340,6 +266,7 @@ export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { statementsToExecute, shouldAskForApprove, infoToPrint, + matViewsToRemove: [...new Set(matViewsToRemove)], columnsToRemove: [...new Set(columnsToRemove)], schemasToRemove: [...new Set(schemasToRemove)], tablesToTruncate: [...new Set(tablesToTruncate)], diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts index d4bd70d08..f84f84d9c 100644 --- a/drizzle-kit/src/cli/commands/push.ts +++ b/drizzle-kit/src/cli/commands/push.ts @@ -183,6 +183,7 @@ export const pgPush = async ( statementsToExecute, columnsToRemove, tablesToRemove, + matViewsToRemove, tablesToTruncate, infoToPrint, schemasToRemove, @@ -238,6 +239,12 @@ export const pgPush = async ( tablesToTruncate.length > 0 ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` : '' + }${ + matViewsToRemove.length > 0 + ? ` remove ${matViewsToRemove.length} ${ + matViewsToRemove.length > 1 ? 'materialized views' : 'materialize view' + },` + : ' ' }` .replace(/(^,)|(,$)/g, '') .replace(/ +(?= )/g, ''), diff --git a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts index bcc2d19db..a18b36945 100644 --- a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts +++ b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts @@ -27,6 +27,14 @@ export const _moveDataStatements = ( const compositePKs = Object.values( json.tables[tableName].compositePrimaryKeys, ).map((it) => SQLiteSquasher.unsquashPK(it)); + const checkConstraints = Object.values(json.tables[tableName].checkConstraints); + + const mappedCheckConstraints: string[] = checkConstraints.map((it) => + it.replaceAll(`"${tableName}".`, `"${newTableName}".`) + .replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) + .replaceAll(`${tableName}.`, `${newTableName}.`) + .replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) + ); const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); @@ -38,6 +46,7 @@ export const _moveDataStatements = ( columns: tableColumns, referenceData: fks, compositePKs, + checkConstraints: mappedCheckConstraints, }), ); diff --git a/drizzle-kit/src/cli/views.ts b/drizzle-kit/src/cli/views.ts index 56e0331df..fd4a68d71 100644 --- a/drizzle-kit/src/cli/views.ts +++ b/drizzle-kit/src/cli/views.ts @@ -166,7 +166,7 @@ export class ResolveSelect extends Prompt< constructor( private readonly base: T, data: (RenamePropmtItem | T)[], - private readonly entityType: 'table' | 'enum' | 'sequence', + private readonly entityType: 'table' | 'enum' | 'sequence' | 'view', ) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); @@ -330,7 +330,9 @@ export type IntrospectStage = | 'columns' | 'enums' | 'indexes' - | 'fks'; + | 'checks' + | 'fks' + | 'views'; type IntrospectState = { [key in IntrospectStage]: { count: number; @@ -369,6 +371,16 @@ export class IntrospectProgress extends TaskView { name: 'foreign keys', status: 'fetching', }, + checks: { + count: 0, + name: 'check constraints', + status: 'fetching', + }, + views: { + count: 0, + name: 'views', + status: 'fetching', + }, }; constructor(private readonly hasEnums: boolean = false) { @@ -422,6 +434,9 @@ export class IntrospectProgress extends TaskView { info += this.hasEnums ? this.statusText(spin, this.state.enums) : ''; info += this.statusText(spin, this.state.indexes); info += this.statusText(spin, this.state.fks); + info += this.statusText(spin, this.state.checks); + info += this.statusText(spin, this.state.views); + return info; } } diff --git a/drizzle-kit/src/introspect-mysql.ts b/drizzle-kit/src/introspect-mysql.ts index 8c1aa3a76..ea287713f 100644 --- a/drizzle-kit/src/introspect-mysql.ts +++ b/drizzle-kit/src/introspect-mysql.ts @@ -4,6 +4,7 @@ import './@types/utils'; import type { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; import { + CheckConstraint, Column, ForeignKey, Index, @@ -155,11 +156,15 @@ export const schemaToTypeScript = ( const uniqueImports = Object.values(it.uniqueConstraints).map( (it) => 'unique', ); + const checkImports = Object.values(it.checkConstraint).map( + (it) => 'check', + ); res.mysql.push(...idxImports); res.mysql.push(...fkImpots); res.mysql.push(...pkImports); res.mysql.push(...uniqueImports); + res.mysql.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { @@ -186,6 +191,31 @@ export const schemaToTypeScript = ( { mysql: [] as string[] }, ); + Object.values(schema.views).forEach((it) => { + imports.mysql.push('mysqlView'); + + const columnImports = Object.values(it.columns) + .map((col) => { + let patched = importsPatch[col.type] ?? col.type; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('binary(') ? 'binary' : patched; + patched = patched.startsWith('decimal(') ? 'decimal' : patched; + patched = patched.startsWith('smallint(') ? 'smallint' : patched; + patched = patched.startsWith('enum(') ? 'mysqlEnum' : patched; + patched = patched.startsWith('datetime(') ? 'datetime' : patched; + patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; + patched = patched.startsWith('int(') ? 'int' : patched; + patched = patched.startsWith('double(') ? 'double' : patched; + return patched; + }) + .filter((type) => { + return mysqlImportsList.has(type); + }); + + imports.mysql.push(...columnImports); + }); + const tableStatements = Object.values(schema.tables).map((table) => { const func = 'mysqlTable'; let statement = ''; @@ -217,6 +247,7 @@ export const schemaToTypeScript = ( || filteredFKs.length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 + || Object.keys(table.checkConstraint).length > 0 ) { statement += ',\n'; statement += '(table) => {\n'; @@ -235,6 +266,10 @@ export const schemaToTypeScript = ( Object.values(table.uniqueConstraints), withCasing, ); + statement += createTableChecks( + Object.values(table.checkConstraint), + withCasing, + ); statement += '\t}\n'; statement += '}'; } @@ -243,6 +278,37 @@ export const schemaToTypeScript = ( return statement; }); + const viewsStatements = Object.values(schema.views).map((view) => { + const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view; + const func = 'mysqlView'; + let statement = ''; + + if (imports.mysql.includes(withCasing(name))) { + statement = `// Table name is in conflict with ${ + withCasing( + view.name, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`; + statement += createTableColumns( + Object.values(columns), + [], + withCasing, + casing, + name, + schema, + ); + statement += '})'; + + statement += algorithm ? `.algorithm("${algorithm}")` : ''; + statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; + statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; + statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; + + return statement; + }); + const uniqueMySqlImports = [ 'mysqlTable', 'mysqlSchema', @@ -257,6 +323,8 @@ export const schemaToTypeScript = ( let decalrations = ''; decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + decalrations += viewsStatements.join('\n\n'); const file = importsTs + decalrations; @@ -855,6 +923,25 @@ const createTableUniques = ( return statement; }; +const createTableChecks = ( + checks: CheckConstraint[], + casing: (value: string) => string, +): string => { + let statement = ''; + + checks.forEach((it) => { + const checkKey = casing(it.name); + + statement += `\t\t${checkKey}: `; + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value.replace(/`/g, '\\`')}\`)`; + statement += `,\n`; + }); + + return statement; +}; + const createTablePKs = ( pks: PrimaryKey[], casing: (value: string) => string, diff --git a/drizzle-kit/src/introspect-pg.ts b/drizzle-kit/src/introspect-pg.ts index 8eed3d35f..1b57f0115 100644 --- a/drizzle-kit/src/introspect-pg.ts +++ b/drizzle-kit/src/introspect-pg.ts @@ -14,6 +14,7 @@ import { Casing } from './cli/validations/common'; import { vectorOps } from './extensions/vector'; import { assertUnreachable } from './global'; import { + CheckConstraint, Column, ForeignKey, Index, @@ -133,9 +134,7 @@ const intervalConfig = (str: string) => { if (keys.length === 0) return; let statement = '{ '; - statement += keys - .map((it: keyof typeof json) => `${it}: ${json[it]}`) - .join(', '); + statement += keys.map((it: keyof typeof json) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; @@ -207,10 +206,7 @@ export const relationsToTypeScriptForStudio = ( ...relations, }; - const relationsConfig = extractTablesRelationalConfig( - relationalSchema, - createTableRelationsHelpers, - ); + const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); let result = ''; @@ -239,45 +235,29 @@ export const relationsToTypeScriptForStudio = ( if (is(relation, Many)) { hasMany = true; relationsObjAsStr += `\t\t${relation.fieldName}: many(${ - relationsConfig.tableNamesMap[relation.referencedTableName].split( - '.', - )[1] - }${ - typeof relation.relationName !== 'undefined' - ? `, { relationName: "${relation.relationName}"}` - : '' - }),`; + relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] + }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; } if (is(relation, One)) { hasOne = true; relationsObjAsStr += `\t\t${relation.fieldName}: one(${ - relationsConfig.tableNamesMap[relation.referencedTableName].split( - '.', - )[1] + relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] }, { fields: [${ relation.config?.fields.map( (c) => - `${ - relationsConfig.tableNamesMap[ - getTableName(relation.sourceTable) - ].split('.')[1] - }.${findColumnKey(relation.sourceTable, c.name)}`, + `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ + findColumnKey(relation.sourceTable, c.name) + }`, ) }], references: [${ relation.config?.references.map( (c) => - `${ - relationsConfig.tableNamesMap[ - getTableName(relation.referencedTable) - ].split('.')[1] - }.${findColumnKey(relation.referencedTable, c.name)}`, + `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ + findColumnKey(relation.referencedTable, c.name) + }`, ) - }]${ - typeof relation.relationName !== 'undefined' - ? `, relationName: "${relation.relationName}"` - : '' - }}),`; + }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; } }); @@ -325,10 +305,7 @@ export const paramNameFor = (name: string, schema?: string) => { return `${name}${schemaSuffix}`; }; -export const schemaToTypeScript = ( - schema: PgSchemaInternal, - casing: Casing, -) => { +export const schemaToTypeScript = (schema: PgSchemaInternal, casing: Casing) => { // collectFKs Object.values(schema.tables).forEach((table) => { Object.values(table.foreignKeys).forEach((fk) => { @@ -343,28 +320,23 @@ export const schemaToTypeScript = ( }), ); - const enumTypes = Object.values(schema.enums).reduce( - (acc, cur) => { - acc.add(`${cur.schema}.${cur.name}`); - return acc; - }, - new Set(), - ); + const enumTypes = Object.values(schema.enums).reduce((acc, cur) => { + acc.add(`${cur.schema}.${cur.name}`); + return acc; + }, new Set()); const imports = Object.values(schema.tables).reduce( (res, it) => { - const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); + const idxImports = Object.values(it.indexes).map((idx) => (idx.isUnique ? 'uniqueIndex' : 'index')); const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); - if ( - Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it)) - ) { + if (Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it))) { res.pg.push('type AnyPgColumn'); } - const pkImports = Object.values(it.compositePrimaryKeys).map( - (it) => 'primaryKey', - ); - const uniqueImports = Object.values(it.uniqueConstraints).map( - (it) => 'unique', + const pkImports = Object.values(it.compositePrimaryKeys).map((it) => 'primaryKey'); + const uniqueImports = Object.values(it.uniqueConstraints).map((it) => 'unique'); + + const checkImports = Object.values(it.checkConstraints).map( + (it) => 'check', ); if (it.schema && it.schema !== 'public' && it.schema !== '') { @@ -375,6 +347,7 @@ export const schemaToTypeScript = ( res.pg.push(...fkImpots); res.pg.push(...pkImports); res.pg.push(...uniqueImports); + res.pg.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { @@ -399,6 +372,35 @@ export const schemaToTypeScript = ( { pg: [] as string[] }, ); + Object.values(schema.views).forEach((it) => { + if (it.schema && it.schema !== 'public' && it.schema !== '') { + imports.pg.push('pgSchema'); + } else if (it.schema === 'public') { + it.materialized ? imports.pg.push('pgMaterializedView') : imports.pg.push('pgView'); + } + + Object.values(it.columns).forEach(() => { + const columnImports = Object.values(it.columns) + .map((col) => { + let patched: string = (importsPatch[col.type] || col.type).replace('[]', ''); + patched = patched === 'double precision' ? 'doublePrecision' : patched; + patched = patched.startsWith('varchar(') ? 'varchar' : patched; + patched = patched.startsWith('char(') ? 'char' : patched; + patched = patched.startsWith('numeric(') ? 'numeric' : patched; + patched = patched.startsWith('time(') ? 'time' : patched; + patched = patched.startsWith('timestamp(') ? 'timestamp' : patched; + patched = patched.startsWith('vector(') ? 'vector' : patched; + patched = patched.startsWith('geometry(') ? 'geometry' : patched; + return patched; + }) + .filter((type) => { + return pgImportsList.has(type); + }); + + imports.pg.push(...columnImports); + }); + }); + Object.values(schema.sequences).forEach((it) => { if (it.schema && it.schema !== 'public' && it.schema !== '') { imports.pg.push('pgSchema'); @@ -503,15 +505,12 @@ export const schemaToTypeScript = ( || Object.values(table.foreignKeys).length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 + || Object.keys(table.checkConstraints).length > 0 ) { statement += ',\n'; statement += '(table) => {\n'; statement += '\treturn {\n'; - statement += createTableIndexes( - table.name, - Object.values(table.indexes), - casing, - ); + statement += createTableIndexes(table.name, Object.values(table.indexes), casing); statement += createTableFKs(Object.values(table.foreignKeys), schemas, casing); statement += createTablePKs( Object.values(table.compositePrimaryKeys), @@ -521,6 +520,10 @@ export const schemaToTypeScript = ( Object.values(table.uniqueConstraints), casing, ); + statement += createTableChecks( + Object.values(table.checkConstraints), + casing, + ); statement += '\t}\n'; statement += '}'; } @@ -529,13 +532,46 @@ export const schemaToTypeScript = ( return statement; }); + const viewsStatements = Object.values(schema.views) + .map((it) => { + const viewSchema = schemas[it.schema]; + + const paramName = paramNameFor(it.name, viewSchema); + + const func = viewSchema + ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) + : it.materialized + ? 'pgMaterializedView' + : 'pgView'; + + const withOption = it.with ?? ''; + + const as = `sql\`${it.definition}\``; + + const tablespace = it.tablespace ?? ''; + + const columns = createTableColumns( + '', + Object.values(it.columns), + [], + enumTypes, + schemas, + casing, + schema.internal, + ); + + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; + statement += tablespace ? `.tablespace("${tablespace}")` : ''; + statement += withOption ? `.with(${JSON.stringify(withOption)})` : ''; + statement += `.as(${as});`; + + return statement; + }) + .join('\n\n'); + const uniquePgImports = ['pgTable', ...new Set(imports.pg)]; - const importsTs = `import { ${ - uniquePgImports.join( - ', ', - ) - } } from "drizzle-orm/pg-core" + const importsTs = `import { ${uniquePgImports.join(', ')} } from "drizzle-orm/pg-core" import { sql } from "drizzle-orm"\n\n`; let decalrations = schemaStatements; @@ -543,6 +579,8 @@ export const schemaToTypeScript = ( decalrations += sequencesStatements; decalrations += '\n'; decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + decalrations += viewsStatements; const file = importsTs + decalrations; @@ -586,9 +624,7 @@ const buildArrayDefault = (defaultValue: string, typeName: string): string => { // } else if (typeName === 'boolean') { // return value === 't' ? 'true' : 'false'; if (typeName === 'json' || typeName === 'jsonb') { - return value - .substring(1, value.length - 1) - .replaceAll('\\', ''); + return value.substring(1, value.length - 1).replaceAll('\\', ''); } return value; // } @@ -652,9 +688,9 @@ const mapDefault = ( if (lowered.startsWith('numeric')) { defaultValue = defaultValue - ? defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) + ? (defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) ? defaultValue.substring(1, defaultValue.length - 1) - : defaultValue + : defaultValue) : undefined; return defaultValue ? `.default('${mapColumnDefault(defaultValue, isExpression)}')` : ''; } @@ -663,7 +699,7 @@ const mapDefault = ( return defaultValue === 'now()' ? '.defaultNow()' : defaultValue === 'CURRENT_TIMESTAMP' - ? '.default(sql\`CURRENT_TIMESTAMP\`)' + ? '.default(sql`CURRENT_TIMESTAMP`)' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; @@ -766,12 +802,9 @@ const column = ( const lowered = type.toLowerCase().replace('[]', ''); if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { - let out = `${withCasing(name, casing)}: ${ - withCasing( - paramNameFor(type.replace('[]', ''), typeSchema), - casing, - ) - }(${dbColumnName({ name, casing })})`; + let out = `${withCasing(name, casing)}: ${withCasing(paramNameFor(type.replace('[]', ''), typeSchema), casing)}(${ + dbColumnName({ name, casing }) + })`; return out; } @@ -784,12 +817,9 @@ const column = ( } if (lowered.startsWith('bigserial')) { - return `${ - withCasing( - name, - casing, - ) - }: bigserial(${dbColumnName({ name, casing, withMode: true })}{ mode: "bigint" })`; + return `${withCasing(name, casing)}: bigserial(${ + dbColumnName({ name, casing, withMode: true }) + }{ mode: "bigint" })`; } if (lowered.startsWith('integer')) { @@ -830,14 +860,10 @@ const column = ( } if (lowered.startsWith('numeric')) { - let params: - | { precision: string | undefined; scale: string | undefined } - | undefined; + let params: { precision: string | undefined; scale: string | undefined } | undefined; if (lowered.length > 7) { - const [precision, scale] = lowered - .slice(8, lowered.length - 1) - .split(','); + const [precision, scale] = lowered.slice(8, lowered.length - 1).split(','); params = { precision, scale }; } @@ -852,11 +878,7 @@ const column = ( const withTimezone = lowered.includes('with time zone'); // const split = lowered.split(" "); let precision = lowered.startsWith('timestamp(') - ? Number( - lowered - .split(' ')[0] - .substring('timestamp('.length, lowered.split(' ')[0].length - 1), - ) + ? Number(lowered.split(' ')[0].substring('timestamp('.length, lowered.split(' ')[0].length - 1)) : null; precision = precision ? precision : null; @@ -877,11 +899,7 @@ const column = ( const withTimezone = lowered.includes('with time zone'); let precision = lowered.startsWith('time(') - ? Number( - lowered - .split(' ')[0] - .substring('time('.length, lowered.split(' ')[0].length - 1), - ) + ? Number(lowered.split(' ')[0].substring('time('.length, lowered.split(' ')[0].length - 1)) : null; precision = precision ? precision : null; @@ -953,16 +971,8 @@ const column = ( if (lowered.startsWith('varchar')) { let out: string; if (lowered.length !== 7) { - out = `${ - withCasing( - name, - casing, - ) - }: varchar(${dbColumnName({ name, casing, withMode: true })}{ length: ${ - lowered.substring( - 8, - lowered.length - 1, - ) + out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing, withMode: true })}{ length: ${ + lowered.substring(8, lowered.length - 1) } })`; } else { out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing })})`; @@ -1015,16 +1025,8 @@ const column = ( if (lowered.startsWith('vector')) { let out: string; if (lowered.length !== 6) { - out = `${ - withCasing( - name, - casing, - ) - }: vector(${dbColumnName({ name, casing, withMode: true })}{ dimensions: ${ - lowered.substring( - 7, - lowered.length - 1, - ) + out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing, withMode: true })}{ dimensions: ${ + lowered.substring(7, lowered.length - 1) } })`; } else { out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing })})`; @@ -1036,16 +1038,8 @@ const column = ( if (lowered.startsWith('char')) { let out: string; if (lowered.length !== 4) { - out = `${ - withCasing( - name, - casing, - ) - }: char(${dbColumnName({ name, casing, withMode: true })}{ length: ${ - lowered.substring( - 5, - lowered.length - 1, - ) + out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing, withMode: true })}{ length: ${ + lowered.substring(5, lowered.length - 1) } })`; } else { out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing })})`; @@ -1108,27 +1102,15 @@ const createTableColumns = ( statement += columnStatement; // Provide just this in column function if (internals?.tables[tableName]?.columns[it.name]?.isArray) { - statement += dimensionsInArray( - internals?.tables[tableName]?.columns[it.name]?.dimensions, - ); + statement += dimensionsInArray(internals?.tables[tableName]?.columns[it.name]?.dimensions); } - statement += mapDefault( - tableName, - it.type, - it.name, - enumTypes, - it.typeSchema ?? 'public', - it.default, - internals, - ); + statement += mapDefault(tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', it.default, internals); statement += it.primaryKey ? '.primaryKey()' : ''; statement += it.notNull && !it.identity ? '.notNull()' : ''; statement += it.identity ? generateIdentityParams(it.identity) : ''; - statement += it.generated - ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` - : ''; + statement += it.generated ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` : ''; // const fks = fkByColumnName[it.name]; // Andrii: I switched it off until we will get a custom naem setting in references @@ -1169,21 +1151,13 @@ const createTableColumns = ( return statement; }; -const createTableIndexes = ( - tableName: string, - idxs: Index[], - casing: Casing, -): string => { +const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { let statement = ''; idxs.forEach((it) => { // we have issue when index is called as table called - let idxKey = it.name.startsWith(tableName) && it.name !== tableName - ? it.name.slice(tableName.length + 1) - : it.name; - idxKey = idxKey.endsWith('_index') - ? idxKey.slice(0, -'_index'.length) + '_idx' - : idxKey; + let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; + idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; idxKey = withCasing(idxKey, casing); @@ -1206,11 +1180,7 @@ const createTableIndexes = ( } else { return `table.${withCasing(it.expression, casing)}${it.asc ? '.asc()' : '.desc()'}${ it.nulls === 'first' ? '.nullsFirst()' : '.nullsLast()' - }${ - it.opclass && vectorOps.includes(it.opclass) - ? `.op("${it.opclass}")` - : '' - }`; + }${it.opclass && vectorOps.includes(it.opclass) ? `.op("${it.opclass}")` : ''}`; } }) .join(', ') @@ -1224,15 +1194,11 @@ const createTableIndexes = ( reversedString += `${key}: "${mappedWith[key]}",`; } } - reversedString = reversedString.length > 1 - ? reversedString.slice(0, reversedString.length - 1) - : reversedString; + reversedString = reversedString.length > 1 ? reversedString.slice(0, reversedString.length - 1) : reversedString; return `${reversedString}}`; } - statement += it.with && Object.keys(it.with).length > 0 - ? `.with(${reverseLogic(it.with)})` - : ''; + statement += it.with && Object.keys(it.with).length > 0 ? `.with(${reverseLogic(it.with)})` : ''; statement += `,\n`; }); @@ -1261,10 +1227,7 @@ const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { return statement; }; -const createTableUniques = ( - unqs: UniqueConstraint[], - casing: Casing, -): string => { +const createTableUniques = (unqs: UniqueConstraint[], casing: Casing): string => { let statement = ''; unqs.forEach((it) => { @@ -1273,11 +1236,7 @@ const createTableUniques = ( statement += `\t\t${idxKey}: `; statement += 'unique('; statement += `"${it.name}")`; - statement += `.on(${ - it.columns - .map((it) => `table.${withCasing(it, casing)}`) - .join(', ') - })`; + statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; statement += `,\n`; }); @@ -1285,11 +1244,25 @@ const createTableUniques = ( return statement; }; -const createTableFKs = ( - fks: ForeignKey[], - schemas: Record, +const createTableChecks = ( + checkConstraints: CheckConstraint[], casing: Casing, -): string => { +) => { + let statement = ''; + + checkConstraints.forEach((it) => { + const checkKey = withCasing(it.name, casing); + statement += `\t\t${checkKey}: `; + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value}\`)`; + statement += `,\n`; + }); + + return statement; +}; + +const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { let statement = ''; fks.forEach((it) => { @@ -1299,26 +1272,16 @@ const createTableFKs = ( const isSelf = it.tableTo === it.tableFrom; const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; statement += `\t\t${withCasing(it.name, casing)}: foreignKey({\n`; - statement += `\t\t\tcolumns: [${ - it.columnsFrom - .map((i) => `table.${withCasing(i, casing)}`) - .join(', ') - }],\n`; + statement += `\t\t\tcolumns: [${it.columnsFrom.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; statement += `\t\t\tforeignColumns: [${ - it.columnsTo - .map((i) => `${tableTo}.${withCasing(i, casing)}`) - .join(', ') + it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') }],\n`; statement += `\t\t\tname: "${it.name}"\n`; statement += `\t\t})`; - statement += it.onUpdate && it.onUpdate !== 'no action' - ? `.onUpdate("${it.onUpdate}")` - : ''; + statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; - statement += it.onDelete && it.onDelete !== 'no action' - ? `.onDelete("${it.onDelete}")` - : ''; + statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; statement += `,\n`; }); diff --git a/drizzle-kit/src/introspect-sqlite.ts b/drizzle-kit/src/introspect-sqlite.ts index 422e58f86..e21f2a5c4 100644 --- a/drizzle-kit/src/introspect-sqlite.ts +++ b/drizzle-kit/src/introspect-sqlite.ts @@ -3,6 +3,7 @@ import { toCamelCase } from 'drizzle-orm/casing'; import './@types/utils'; import type { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; +import { CheckConstraint } from './serializer/mysqlSchema'; import type { Column, ForeignKey, @@ -91,11 +92,15 @@ export const schemaToTypeScript = ( const uniqueImports = Object.values(it.uniqueConstraints).map( (it) => 'unique', ); + const checkImports = Object.values(it.checkConstraints).map( + (it) => 'check', + ); res.sqlite.push(...idxImports); res.sqlite.push(...fkImpots); res.sqlite.push(...pkImports); res.sqlite.push(...uniqueImports); + res.sqlite.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { @@ -111,6 +116,20 @@ export const schemaToTypeScript = ( { sqlite: [] as string[] }, ); + Object.values(schema.views).forEach((it) => { + imports.sqlite.push('sqliteView'); + + const columnImports = Object.values(it.columns) + .map((col) => { + return col.type; + }) + .filter((type) => { + return sqliteImportsList.has(type); + }); + + imports.sqlite.push(...columnImports); + }); + const tableStatements = Object.values(schema.tables).map((table) => { const func = 'sqliteTable'; let statement = ''; @@ -140,6 +159,7 @@ export const schemaToTypeScript = ( || filteredFKs.length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 + || Object.keys(table.checkConstraints).length > 0 ) { statement += ',\n'; statement += '(table) => {\n'; @@ -158,6 +178,10 @@ export const schemaToTypeScript = ( Object.values(table.uniqueConstraints), casing, ); + statement += createTableChecks( + Object.values(table.checkConstraints), + casing, + ); statement += '\t}\n'; statement += '}'; } @@ -166,6 +190,30 @@ export const schemaToTypeScript = ( return statement; }); + const viewsStatements = Object.values(schema.views).map((view) => { + const func = 'sqliteView'; + + let statement = ''; + if (imports.sqlite.includes(withCasing(view.name, casing))) { + statement = `// Table name is in conflict with ${ + withCasing( + view.name, + casing, + ) + } import.\n// Please change to any other name, that is not in imports list\n`; + } + statement += `export const ${withCasing(view.name, casing)} = ${func}("${view.name}", {\n`; + statement += createTableColumns( + Object.values(view.columns), + [], + casing, + ); + statement += '})'; + statement += `.as(sql\`${view.definition?.replaceAll('`', '\\`')}\`);`; + + return statement; + }); + const uniqueSqliteImports = [ 'sqliteTable', 'AnySQLiteColumn', @@ -179,7 +227,9 @@ export const schemaToTypeScript = ( } } from "drizzle-orm/sqlite-core" import { sql } from "drizzle-orm"\n\n`; - const decalrations = tableStatements.join('\n\n'); + let decalrations = tableStatements.join('\n\n'); + decalrations += '\n\n'; + decalrations += viewsStatements.join('\n\n'); const file = importsTs + decalrations; @@ -417,6 +467,24 @@ const createTableUniques = ( return statement; }; +const createTableChecks = ( + checks: CheckConstraint[], + casing: Casing, +): string => { + let statement = ''; + + checks.forEach((it) => { + const checkKey = withCasing(it.name, casing); + + statement += `\t\t${checkKey}: `; + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value}\`)`; + statement += `,\n`; + }); + + return statement; +}; const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { let statement = ''; diff --git a/drizzle-kit/src/jsonDiffer.js b/drizzle-kit/src/jsonDiffer.js index 113d7e0a4..b5a0e3652 100644 --- a/drizzle-kit/src/jsonDiffer.js +++ b/drizzle-kit/src/jsonDiffer.js @@ -158,6 +158,7 @@ export function applyJsonDiff(json1, json2) { difference.tables = difference.tables || {}; difference.enums = difference.enums || {}; difference.sequences = difference.sequences || {}; + difference.views = difference.views || {}; // remove added/deleted schemas const schemaKeys = Object.keys(difference.schemas); @@ -239,6 +240,85 @@ export function applyJsonDiff(json1, json2) { return json2.sequences[it[0]]; }); + const viewsEntries = Object.entries(difference.views); + + const alteredViews = viewsEntries.filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted'))).map( + ([nameWithSchema, view]) => { + const deletedWithOption = view.with__deleted; + + const addedWithOption = view.with__added; + + const deletedWith = Object.fromEntries( + Object.entries(view.with || {}).filter((it) => it[0].endsWith('__deleted')).map(([key, value]) => { + return [key.replace('__deleted', ''), value]; + }), + ); + + const addedWith = Object.fromEntries( + Object.entries(view.with || {}).filter((it) => it[0].endsWith('__added')).map(([key, value]) => { + return [key.replace('__added', ''), value]; + }), + ); + + const alterWith = Object.fromEntries( + Object.entries(view.with || {}).filter((it) => + typeof it[1].__old !== 'undefined' && typeof it[1].__new !== 'undefined' + ).map( + (it) => { + return [it[0], it[1].__new]; + }, + ), + ); + + const alteredSchema = view.schema; + + const alteredDefinition = view.definition; + + const alteredExisting = view.isExisting; + + const addedTablespace = view.tablespace__added; + const droppedTablespace = view.tablespace__deleted; + const alterTablespaceTo = view.tablespace; + + let alteredTablespace; + if (addedTablespace) alteredTablespace = { __new: addedTablespace, __old: 'pg_default' }; + if (droppedTablespace) alteredTablespace = { __new: 'pg_default', __old: droppedTablespace }; + if (alterTablespaceTo) alteredTablespace = alterTablespaceTo; + + const addedUsing = view.using__added; + const droppedUsing = view.using__deleted; + const alterUsingTo = view.using; + + let alteredUsing; + if (addedUsing) alteredUsing = { __new: addedUsing, __old: 'heap' }; + if (droppedUsing) alteredUsing = { __new: 'heap', __old: droppedUsing }; + if (alterUsingTo) alteredUsing = alterUsingTo; + + const alteredMeta = view.meta; + + return Object.fromEntries( + Object.entries({ + name: json2.views[nameWithSchema].name, + schema: json2.views[nameWithSchema].schema, + // pg + deletedWithOption: deletedWithOption, + addedWithOption: addedWithOption, + deletedWith: Object.keys(deletedWith).length ? deletedWith : undefined, + addedWith: Object.keys(addedWith).length ? addedWith : undefined, + alteredWith: Object.keys(alterWith).length ? alterWith : undefined, + alteredSchema, + alteredTablespace, + alteredUsing, + // mysql + alteredMeta, + // common + alteredDefinition, + alteredExisting, + }).filter(([_, value]) => value !== undefined), + ); + }, + ); + const alteredTablesWithColumns = Object.values(difference.tables).map( (table) => { return findAlternationsInTable(table); @@ -249,6 +329,7 @@ export function applyJsonDiff(json1, json2) { alteredTablesWithColumns, alteredEnums, alteredSequences, + alteredViews, }; } @@ -346,6 +427,24 @@ const findAlternationsInTable = (table) => { }), ); + const addedCheckConstraints = Object.fromEntries( + Object.entries(table.checkConstraints || {}).filter((it) => { + return it[0].endsWith('__added'); + }), + ); + + const deletedCheckConstraints = Object.fromEntries( + Object.entries(table.checkConstraints || {}).filter((it) => { + return it[0].endsWith('__deleted'); + }), + ); + + const alteredCheckConstraints = Object.fromEntries( + Object.entries(table.checkConstraints || {}).filter((it) => { + return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); + }), + ); + const mappedAltered = altered.map((it) => alternationsInColumn(it)).filter(Boolean); return { @@ -364,11 +463,15 @@ const findAlternationsInTable = (table) => { addedUniqueConstraints, deletedUniqueConstraints, alteredUniqueConstraints, + addedCheckConstraints, + deletedCheckConstraints, + alteredCheckConstraints, }; }; const alternationsInColumn = (column) => { const altered = [column]; + const result = altered .filter((it) => { if ('type' in it && it.type.__old.replace(' (', '(') === it.type.__new.replace(' (', '(')) { @@ -632,6 +735,33 @@ const alternationsInColumn = (column) => { } return it; }) + .map((it) => { + if ('' in it) { + return { + ...it, + autoincrement: { + type: 'changed', + old: it.autoincrement.__old, + new: it.autoincrement.__new, + }, + }; + } + if ('autoincrement__added' in it) { + const { autoincrement__added, ...others } = it; + return { + ...others, + autoincrement: { type: 'added', value: it.autoincrement__added }, + }; + } + if ('autoincrement__deleted' in it) { + const { autoincrement__deleted, ...others } = it; + return { + ...others, + autoincrement: { type: 'deleted', value: it.autoincrement__deleted }, + }; + } + return it; + }) .filter(Boolean); return result[0]; diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/jsonStatements.ts index 47cb08908..4285c4687 100644 --- a/drizzle-kit/src/jsonStatements.ts +++ b/drizzle-kit/src/jsonStatements.ts @@ -1,14 +1,15 @@ import chalk from 'chalk'; -import { getNewTableName, getOldTableName } from './cli/commands/sqlitePushUtils'; +import { getNewTableName } from './cli/commands/sqlitePushUtils'; import { warning } from './cli/views'; -import { CommonSquashedSchema, Dialect } from './schemaValidator'; -import { MySqlKitInternals, MySqlSchema, MySqlSquasher } from './serializer/mysqlSchema'; -import { Index, PgSchema, PgSquasher } from './serializer/pgSchema'; +import { CommonSquashedSchema } from './schemaValidator'; +import { MySqlKitInternals, MySqlSchema, MySqlSquasher, View as MySqlView } from './serializer/mysqlSchema'; +import { Index, MatViewWithOption, PgSchema, PgSquasher, View as PgView, ViewWithOption } from './serializer/pgSchema'; import { SQLiteKitInternals, SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher, + View as SqliteView, } from './serializer/sqliteSchema'; import { AlteredColumn, Column, Sequence, Table } from './snapshotsDiffer'; @@ -27,6 +28,7 @@ export interface JsonSqliteCreateTableStatement { }[]; compositePKs: string[][]; uniqueConstraints?: string[]; + checkConstraints?: string[]; } export interface JsonCreateTableStatement { @@ -37,6 +39,7 @@ export interface JsonCreateTableStatement { compositePKs: string[]; compositePkName?: string; uniqueConstraints?: string[]; + checkConstraints?: string[]; internals?: MySqlKitInternals; } @@ -55,6 +58,7 @@ export interface JsonRecreateTableStatement { }[]; compositePKs: string[][]; uniqueConstraints?: string[]; + checkConstraints: string[]; } export interface JsonDropTableStatement { @@ -106,6 +110,15 @@ export interface JsonAddValueToEnumStatement { before: string; } +export interface JsonDropValueFromEnumStatement { + type: 'alter_type_drop_value'; + name: string; + schema: string; + deletedValues: string[]; + newValues: string[]; + columnsWithEnum: { schema: string; table: string; column: string }[]; +} + export interface JsonCreateSequenceStatement { type: 'create_sequence'; name: string; @@ -234,6 +247,20 @@ export interface JsonAlterUniqueConstraint { newConstraintName?: string; } +export interface JsonCreateCheckConstraint { + type: 'create_check_constraint'; + tableName: string; + data: string; + schema?: string; +} + +export interface JsonDeleteCheckConstraint { + type: 'delete_check_constraint'; + tableName: string; + constraintName: string; + schema?: string; +} + export interface JsonCreateCompositePK { type: 'create_composite_pk'; tableName: string; @@ -524,6 +551,105 @@ export interface JsonRenameSchema { to: string; } +export type JsonCreatePgViewStatement = { + type: 'create_view'; +} & Omit; + +export type JsonCreateMySqlViewStatement = { + type: 'mysql_create_view'; + replace: boolean; +} & Omit; + +export type JsonCreateSqliteViewStatement = { + type: 'sqlite_create_view'; +} & Omit; + +export interface JsonDropViewStatement { + type: 'drop_view'; + name: string; + schema?: string; + materialized?: boolean; +} + +export interface JsonRenameViewStatement { + type: 'rename_view'; + nameTo: string; + nameFrom: string; + schema: string; + materialized?: boolean; +} + +export interface JsonRenameMySqlViewStatement { + type: 'rename_view'; + nameTo: string; + nameFrom: string; + schema: string; + materialized?: boolean; +} + +export interface JsonAlterViewAlterSchemaStatement { + type: 'alter_view_alter_schema'; + fromSchema: string; + toSchema: string; + name: string; + materialized?: boolean; +} + +export type JsonAlterViewAddWithOptionStatement = + & { + type: 'alter_view_add_with_option'; + schema: string; + name: string; + } + & ({ + materialized: true; + with: MatViewWithOption; + } | { + materialized: false; + with: ViewWithOption; + }); + +export type JsonAlterViewDropWithOptionStatement = + & { + type: 'alter_view_drop_with_option'; + schema: string; + name: string; + } + & ({ + materialized: true; + with: MatViewWithOption; + } | { + materialized: false; + with: ViewWithOption; + }); + +export interface JsonAlterViewAlterTablespaceStatement { + type: 'alter_view_alter_tablespace'; + toTablespace: string; + name: string; + schema: string; + materialized: true; +} + +export interface JsonAlterViewAlterUsingStatement { + type: 'alter_view_alter_using'; + toUsing: string; + name: string; + schema: string; + materialized: true; +} + +export type JsonAlterMySqlViewStatement = { + type: 'alter_mysql_view'; +} & Omit; + +export type JsonAlterViewStatement = + | JsonAlterViewAlterSchemaStatement + | JsonAlterViewAddWithOptionStatement + | JsonAlterViewDropWithOptionStatement + | JsonAlterViewAlterTablespaceStatement + | JsonAlterViewAlterUsingStatement; + export type JsonAlterColumnStatement = | JsonRenameColumnStatement | JsonAlterColumnTypeStatement @@ -582,14 +708,24 @@ export type JsonStatement = | JsonDropSequenceStatement | JsonCreateSequenceStatement | JsonMoveSequenceStatement - | JsonRenameSequenceStatement; + | JsonRenameSequenceStatement + | JsonCreatePgViewStatement + | JsonDropViewStatement + | JsonRenameViewStatement + | JsonAlterViewStatement + | JsonCreateMySqlViewStatement + | JsonAlterMySqlViewStatement + | JsonCreateSqliteViewStatement + | JsonCreateCheckConstraint + | JsonDeleteCheckConstraint + | JsonDropValueFromEnumStatement; export const preparePgCreateTableJson = ( table: Table, // TODO: remove? json2: PgSchema, ): JsonCreateTableStatement => { - const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints } = table; const tableKey = `${schema || 'public'}.${name}`; // TODO: @AndriiSherman. We need this, will add test cases @@ -607,6 +743,7 @@ export const preparePgCreateTableJson = ( compositePKs: Object.values(compositePrimaryKeys), compositePkName: compositePkName, uniqueConstraints: Object.values(uniqueConstraints), + checkConstraints: Object.values(checkConstraints), }; }; @@ -619,7 +756,7 @@ export const prepareMySqlCreateTableJson = ( // if previously it was an expression or column internals: MySqlKitInternals, ): JsonCreateTableStatement => { - const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints } = table; return { type: 'create_table', @@ -635,6 +772,7 @@ export const prepareMySqlCreateTableJson = ( : '', uniqueConstraints: Object.values(uniqueConstraints), internals, + checkConstraints: Object.values(checkConstraints), }; }; @@ -642,7 +780,7 @@ export const prepareSQLiteCreateTable = ( table: Table, action?: 'push' | undefined, ): JsonSqliteCreateTableStatement => { - const { name, columns, uniqueConstraints } = table; + const { name, columns, uniqueConstraints, checkConstraints } = table; const references: string[] = Object.values(table.foreignKeys); @@ -663,6 +801,7 @@ export const prepareSQLiteCreateTable = ( referenceData: fks, compositePKs: composites, uniqueConstraints: Object.values(uniqueConstraints), + checkConstraints: Object.values(checkConstraints), }; }; @@ -717,6 +856,36 @@ export const prepareAddValuesToEnumJson = ( }); }; +export const prepareDropEnumValues = ( + name: string, + schema: string, + removedValues: string[], + json2: PgSchema, +): JsonDropValueFromEnumStatement[] => { + if (!removedValues.length) return []; + + const affectedColumns: { schema: string; table: string; column: string }[] = []; + + for (const tableKey in json2.tables) { + const table = json2.tables[tableKey]; + for (const columnKey in table.columns) { + const column = table.columns[columnKey]; + if (column.type === name && column.typeSchema === schema) { + affectedColumns.push({ schema: table.schema || 'public', table: table.name, column: column.name }); + } + } + } + + return [{ + type: 'alter_type_drop_value', + name: name, + schema: schema, + deletedValues: removedValues, + newValues: json2.enums[`${schema}.${name}`].values, + columnsWithEnum: affectedColumns, + }]; +}; + export const prepareDropEnumJson = ( name: string, schema: string, @@ -2331,6 +2500,36 @@ export const prepareDeleteUniqueConstraintPg = ( }); }; +export const prepareAddCheckConstraint = ( + tableName: string, + schema: string, + check: Record, +): JsonCreateCheckConstraint[] => { + return Object.values(check).map((it) => { + return { + type: 'create_check_constraint', + tableName, + data: it, + schema, + } as JsonCreateCheckConstraint; + }); +}; + +export const prepareDeleteCheckConstraint = ( + tableName: string, + schema: string, + check: Record, +): JsonDeleteCheckConstraint[] => { + return Object.values(check).map((it) => { + return { + type: 'delete_check_constraint', + tableName, + constraintName: PgSquasher.unsquashCheck(it).name, + schema, + } as JsonDeleteCheckConstraint; + }); +}; + // add create table changes // add handler to make drop and add and not alter(looking at __old and __new) // add serializer for mysql and sqlite + types @@ -2425,3 +2624,170 @@ export const prepareAlterCompositePrimaryKeyMySql = ( } as JsonAlterCompositePK; }); }; + +export const preparePgCreateViewJson = ( + name: string, + schema: string, + definition: string, + materialized: boolean, + withNoData: boolean = false, + withOption?: any, + using?: string, + tablespace?: string, +): JsonCreatePgViewStatement => { + return { + type: 'create_view', + name: name, + schema: schema, + definition: definition, + with: withOption, + materialized: materialized, + withNoData, + using, + tablespace, + }; +}; + +export const prepareMySqlCreateViewJson = ( + name: string, + definition: string, + meta: string, + replace: boolean = false, +): JsonCreateMySqlViewStatement => { + const { algorithm, sqlSecurity, withCheckOption } = MySqlSquasher.unsquashView(meta); + return { + type: 'mysql_create_view', + name: name, + definition: definition, + algorithm, + sqlSecurity, + withCheckOption, + replace, + }; +}; + +export const prepareSqliteCreateViewJson = ( + name: string, + definition: string, +): JsonCreateSqliteViewStatement => { + return { + type: 'sqlite_create_view', + name: name, + definition: definition, + }; +}; + +export const prepareDropViewJson = ( + name: string, + schema?: string, + materialized?: boolean, +): JsonDropViewStatement => { + const resObject: JsonDropViewStatement = { name, type: 'drop_view' }; + + if (schema) resObject['schema'] = schema; + + if (materialized) resObject['materialized'] = materialized; + + return resObject; +}; + +export const prepareRenameViewJson = ( + to: string, + from: string, + schema?: string, + materialized?: boolean, +): JsonRenameViewStatement => { + const resObject: JsonRenameViewStatement = { + type: 'rename_view', + nameTo: to, + nameFrom: from, + }; + + if (schema) resObject['schema'] = schema; + if (materialized) resObject['materialized'] = materialized; + + return resObject; +}; + +export const preparePgAlterViewAlterSchemaJson = ( + to: string, + from: string, + name: string, + materialized?: boolean, +): JsonAlterViewAlterSchemaStatement => { + const returnObject: JsonAlterViewAlterSchemaStatement = { + type: 'alter_view_alter_schema', + fromSchema: from, + toSchema: to, + name, + }; + + if (materialized) returnObject['materialized'] = materialized; + return returnObject; +}; + +export const preparePgAlterViewAddWithOptionJson = ( + name: string, + schema: string, + materialized: boolean, + withOption: MatViewWithOption | ViewWithOption, +): JsonAlterViewAddWithOptionStatement => { + return { + type: 'alter_view_add_with_option', + name, + schema, + materialized: materialized, + with: withOption, + } as JsonAlterViewAddWithOptionStatement; +}; + +export const preparePgAlterViewDropWithOptionJson = ( + name: string, + schema: string, + materialized: boolean, + withOption: MatViewWithOption | ViewWithOption, +): JsonAlterViewDropWithOptionStatement => { + return { + type: 'alter_view_drop_with_option', + name, + schema, + materialized: materialized, + with: withOption, + } as JsonAlterViewDropWithOptionStatement; +}; + +export const preparePgAlterViewAlterTablespaceJson = ( + name: string, + schema: string, + materialized: boolean, + to: string, +): JsonAlterViewAlterTablespaceStatement => { + return { + type: 'alter_view_alter_tablespace', + name, + schema, + materialized: materialized, + toTablespace: to, + } as JsonAlterViewAlterTablespaceStatement; +}; + +export const preparePgAlterViewAlterUsingJson = ( + name: string, + schema: string, + materialized: boolean, + to: string, +): JsonAlterViewAlterUsingStatement => { + return { + type: 'alter_view_alter_using', + name, + schema, + materialized: materialized, + toUsing: to, + } as JsonAlterViewAlterUsingStatement; +}; + +export const prepareMySqlAlterView = ( + view: Omit, +): JsonAlterMySqlViewStatement => { + return { type: 'alter_mysql_view', ...view }; +}; diff --git a/drizzle-kit/src/serializer/index.ts b/drizzle-kit/src/serializer/index.ts index 8f2543d61..05e4a6f37 100644 --- a/drizzle-kit/src/serializer/index.ts +++ b/drizzle-kit/src/serializer/index.ts @@ -51,9 +51,9 @@ export const serializeMySql = async ( const { prepareFromMySqlImports } = await import('./mysqlImports'); const { generateMySqlSnapshot } = await import('./mysqlSerializer'); - const { tables } = await prepareFromMySqlImports(filenames); + const { tables, views } = await prepareFromMySqlImports(filenames); - return generateMySqlSnapshot(tables, casing); + return generateMySqlSnapshot(tables, views, casing); }; export const serializePg = async ( @@ -66,11 +66,11 @@ export const serializePg = async ( const { prepareFromPgImports } = await import('./pgImports'); const { generatePgSnapshot } = await import('./pgSerializer'); - const { tables, enums, schemas, sequences } = await prepareFromPgImports( + const { tables, enums, schemas, sequences, views, matViews } = await prepareFromPgImports( filenames, ); - return generatePgSnapshot(tables, enums, schemas, sequences, casing, schemaFilter); + return generatePgSnapshot(tables, enums, schemas, sequences, views, matViews, casing, schemaFilter); }; export const serializeSQLite = async ( @@ -81,8 +81,8 @@ export const serializeSQLite = async ( const { prepareFromSqliteImports } = await import('./sqliteImports'); const { generateSqliteSnapshot } = await import('./sqliteSerializer'); - const { tables } = await prepareFromSqliteImports(filenames); - return generateSqliteSnapshot(tables, casing); + const { tables, views } = await prepareFromSqliteImports(filenames); + return generateSqliteSnapshot(tables, views, casing); }; export const prepareFilenames = (path: string | string[]) => { diff --git a/drizzle-kit/src/serializer/mysqlImports.ts b/drizzle-kit/src/serializer/mysqlImports.ts index d9899026b..a8e8ead39 100644 --- a/drizzle-kit/src/serializer/mysqlImports.ts +++ b/drizzle-kit/src/serializer/mysqlImports.ts @@ -1,22 +1,28 @@ import { is } from 'drizzle-orm'; -import { AnyMySqlTable, MySqlTable } from 'drizzle-orm/mysql-core'; +import { AnyMySqlTable, MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { const tables: AnyMySqlTable[] = []; + const views: MySqlView[] = []; const i0values = Object.values(exports); i0values.forEach((t) => { if (is(t, MySqlTable)) { tables.push(t); } + + if (is(t, MySqlView)) { + views.push(t); + } }); - return { tables }; + return { tables, views }; }; export const prepareFromMySqlImports = async (imports: string[]) => { const tables: AnyMySqlTable[] = []; + const views: MySqlView[] = []; const { unregister } = await safeRegister(); for (let i = 0; i < imports.length; i++) { @@ -25,7 +31,8 @@ export const prepareFromMySqlImports = async (imports: string[]) => { const prepared = prepareFromExports(i0); tables.push(...prepared.tables); + views.push(...prepared.views); } unregister(); - return { tables: Array.from(new Set(tables)) }; + return { tables: Array.from(new Set(tables)), views }; }; diff --git a/drizzle-kit/src/serializer/mysqlSchema.ts b/drizzle-kit/src/serializer/mysqlSchema.ts index 5bc62ab2f..0255afc10 100644 --- a/drizzle-kit/src/serializer/mysqlSchema.ts +++ b/drizzle-kit/src/serializer/mysqlSchema.ts @@ -1,5 +1,5 @@ import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; -import { mapValues, originUUID, snapshotVersion } from '../global'; +import { mapValues, originUUID } from '../global'; // ------- V3 -------- const index = object({ @@ -52,6 +52,11 @@ const uniqueConstraint = object({ columns: string().array(), }).strict(); +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + const tableV4 = object({ name: string(), schema: string().optional(), @@ -67,8 +72,23 @@ const table = object({ foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), + checkConstraint: record(string(), checkConstraint).default({}), }).strict(); +const viewMeta = object({ + algorithm: enumType(['undefined', 'merge', 'temptable']), + sqlSecurity: enumType(['definer', 'invoker']), + withCheckOption: enumType(['local', 'cascaded']).optional(), +}).strict(); + +export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), +}).strict().merge(viewMeta); +type SquasherViewMeta = Omit, 'definer'>; + export const kitInternals = object({ tables: record( string(), @@ -128,6 +148,7 @@ export const schemaInternal = object({ version: literal('5'), dialect: dialect, tables: record(string(), table), + views: record(string(), view), _meta: object({ tables: record(string(), string()), columns: record(string(), string()), @@ -155,12 +176,20 @@ const tableSquashed = object({ foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()).default({}), + checkConstraints: record(string(), string()).default({}), }).strict(); +const viewSquashed = view.omit({ + algorithm: true, + sqlSecurity: true, + withCheckOption: true, +}).extend({ meta: string() }); + export const schemaSquashed = object({ version: literal('5'), dialect: dialect, tables: record(string(), tableSquashed), + views: record(string(), viewSquashed), }).strict(); export const schemaSquashedV4 = object({ @@ -186,6 +215,9 @@ export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; +export type CheckConstraint = TypeOf; +export type View = TypeOf; +export type ViewSquashed = TypeOf; export const MySqlSquasher = { squashIdx: (idx: Index) => { @@ -247,6 +279,27 @@ export const MySqlSquasher = { }); return result; }, + squashCheck: (input: CheckConstraint): string => { + return `${input.name};${input.value}`; + }, + unsquashCheck: (input: string): CheckConstraint => { + const [name, value] = input.split(';'); + + return { name, value }; + }, + squashView: (view: View): string => { + return `${view.algorithm};${view.sqlSecurity};${view.withCheckOption}`; + }, + unsquashView: (meta: string): SquasherViewMeta => { + const [algorithm, sqlSecurity, withCheckOption] = meta.split(';'); + const toReturn = { + algorithm: algorithm, + sqlSecurity: sqlSecurity, + withCheckOption: withCheckOption !== 'undefined' ? withCheckOption : undefined, + }; + + return viewMeta.parse(toReturn); + }, }; export const squashMysqlSchemeV4 = ( @@ -304,6 +357,10 @@ export const squashMysqlScheme = (json: MySqlSchema): MySqlSchemaSquashed => { }, ); + const squashedCheckConstraints = mapValues(it[1].checkConstraint, (check) => { + return MySqlSquasher.squashCheck(check); + }); + return [ it[0], { @@ -313,14 +370,31 @@ export const squashMysqlScheme = (json: MySqlSchema): MySqlSchemaSquashed => { foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, + checkConstraints: squashedCheckConstraints, }, ]; }), ); + + const mappedViews = Object.fromEntries( + Object.entries(json.views).map(([key, value]) => { + const meta = MySqlSquasher.squashView(value); + + return [key, { + name: value.name, + isExisting: value.isExisting, + columns: value.columns, + definition: value.definition, + meta, + }]; + }), + ); + return { version: '5', dialect: json.dialect, tables: mappedTables, + views: mappedViews, }; }; @@ -340,6 +414,7 @@ export const dryMySql = mysqlSchema.parse({ prevId: '', tables: {}, schemas: {}, + views: {}, _meta: { schemas: {}, tables: {}, diff --git a/drizzle-kit/src/serializer/mysqlSerializer.ts b/drizzle-kit/src/serializer/mysqlSerializer.ts index da52ac2fb..5ac717525 100644 --- a/drizzle-kit/src/serializer/mysqlSerializer.ts +++ b/drizzle-kit/src/serializer/mysqlSerializer.ts @@ -1,7 +1,7 @@ import chalk from 'chalk'; import { getTableName, is } from 'drizzle-orm'; import { SQL } from 'drizzle-orm'; -import { toCamelCase, toSnakeCase } from 'drizzle-orm/casing'; +import { getViewConfig, MySqlColumn, MySqlView } from 'drizzle-orm/mysql-core'; import { AnyMySqlTable, MySqlDialect, type PrimaryKey as PrimaryKeyORM, uniqueKeyName } from 'drizzle-orm/mysql-core'; import { getTableConfig } from 'drizzle-orm/mysql-core'; import { RowDataPacket } from 'mysql2/promise'; @@ -9,6 +9,7 @@ import { CasingType } from 'src/cli/validations/common'; import { withStyle } from '../cli/validations/outputs'; import { IntrospectStage, IntrospectStatus } from '../cli/views'; import { + CheckConstraint, Column, ForeignKey, Index, @@ -17,11 +18,10 @@ import { PrimaryKey, Table, UniqueConstraint, + View, } from '../serializer/mysqlSchema'; import { type DB, getColumnCasing } from '../utils'; import { sqlToStr } from '.'; -// import { MySqlColumnWithAutoIncrement } from "drizzle-orm/mysql-core"; -// import { MySqlDateBaseColumn } from "drizzle-orm/mysql-core"; export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; @@ -29,11 +29,14 @@ export const indexName = (tableName: string, columns: string[]) => { export const generateMySqlSnapshot = ( tables: AnyMySqlTable[], + views: MySqlView[], casing: CasingType | undefined, ): MySqlSchemaInternal => { const dialect = new MySqlDialect({ casing }); const result: Record = {}; + const resultViews: Record = {}; const internal: MySqlKitInternals = { tables: {}, indexes: {} }; + for (const table of tables) { const { name: tableName, @@ -41,14 +44,20 @@ export const generateMySqlSnapshot = ( indexes, foreignKeys, schema, + checks, primaryKeys, uniqueConstraints, } = getTableConfig(table); + const columnsObject: Record = {}; const indexesObject: Record = {}; const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; + const checkConstraintObject: Record = {}; + + // this object will help to identify same check names + let checksInTable: Record = {}; columns.forEach((column) => { const name = getColumnCasing(column, casing); @@ -347,6 +356,39 @@ export const generateMySqlSnapshot = ( }; }); + checks.forEach((check) => { + check; + const checkName = check.name; + if (typeof checksInTable[tableName] !== 'undefined') { + if (checksInTable[tableName].includes(check.name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated check constraint name in ${ + chalk.underline.blue( + tableName, + ) + }. Please rename your check constraint in the ${ + chalk.underline.blue( + tableName, + ) + } table`, + ) + }`, + ); + process.exit(1); + } + checksInTable[tableName].push(checkName); + } else { + checksInTable[tableName] = [check.name]; + } + + checkConstraintObject[checkName] = { + name: checkName, + value: dialect.sqlToQuery(check.value).sql, + }; + }); + // only handle tables without schemas if (!schema) { result[tableName] = { @@ -356,14 +398,126 @@ export const generateMySqlSnapshot = ( foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, + checkConstraint: checkConstraintObject, }; } } + for (const view of views) { + const { + isExisting, + name, + query, + schema, + selectedFields, + algorithm, + sqlSecurity, + withCheckOption, + } = getViewConfig(view); + + const columnsObject: Record = {}; + + const existingView = resultViews[name]; + if (typeof existingView !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated view name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema. Please rename your view`, + ) + }`, + ); + process.exit(1); + } + + for (const key in selectedFields) { + if (is(selectedFields[key], MySqlColumn)) { + const column = selectedFields[key]; + + const notNull: boolean = column.notNull; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + if (['blob', 'text', 'json'].includes(column.getSQLType())) { + columnToSet.default = `(${columnToSet.default})`; + } + } + } + columnsObject[column.name] = columnToSet; + } + } + + resultViews[name] = { + columns: columnsObject, + name, + isExisting, + definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, + withCheckOption, + algorithm: algorithm ?? 'undefined', // set default values + sqlSecurity: sqlSecurity ?? 'definer', // set default values + }; + } + return { version: '5', dialect: 'mysql', tables: result, + views: resultViews, _meta: { tables: {}, columns: {}, @@ -418,6 +572,8 @@ export const fromDatabase = async ( let tablesCount = new Set(); let indexesCount = 0; let foreignKeysCount = 0; + let checksCount = 0; + let viewsCount = 0; const idxs = await db.query( `select * from INFORMATION_SCHEMA.STATISTICS @@ -561,6 +717,7 @@ export const fromDatabase = async ( indexes: {}, foreignKeys: {}, uniqueConstraints: {}, + checkConstraint: {}, }; } else { result[tableName]!.columns[columnName] = newColumn; @@ -734,16 +891,91 @@ export const fromDatabase = async ( } } + const views = await db.query( + `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`, + ); + + const resultViews: Record = {}; + + viewsCount = views.length; + if (progressCallback) { + progressCallback('views', viewsCount, 'fetching'); + } + for await (const view of views) { + const viewName = view['TABLE_NAME']; + const definition = view['VIEW_DEFINITION']; + + const withCheckOption = view['CHECK_OPTION'] === 'NONE' ? undefined : view['CHECK_OPTION'].toLowerCase(); + const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); + + const [createSqlStatement] = await db.query(`SHOW CREATE VIEW \`${viewName}\`;`); + const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); + const algorithm = algorithmMatch ? algorithmMatch[1].toLowerCase() : undefined; + + const columns = result[viewName].columns; + delete result[viewName]; + + resultViews[viewName] = { + columns: columns, + isExisting: false, + name: viewName, + algorithm, + definition, + sqlSecurity, + withCheckOption, + }; + } + if (progressCallback) { progressCallback('indexes', indexesCount, 'done'); // progressCallback("enums", 0, "fetching"); progressCallback('enums', 0, 'done'); + progressCallback('views', viewsCount, 'done'); + } + + const checkConstraints = await db.query( + `SELECT + tc.table_name, + tc.constraint_name, + cc.check_clause +FROM + information_schema.table_constraints tc +JOIN + information_schema.check_constraints cc + ON tc.constraint_name = cc.constraint_name +WHERE + tc.constraint_schema = '${inputSchema}' +AND + tc.constraint_type = 'CHECK';`, + ); + + checksCount += checkConstraints.length; + if (progressCallback) { + progressCallback('checks', checksCount, 'fetching'); + } + for (const checkConstraintRow of checkConstraints) { + const constraintName = checkConstraintRow['CONSTRAINT_NAME']; + const constraintValue = checkConstraintRow['CHECK_CLAUSE']; + const tableName = checkConstraintRow['TABLE_NAME']; + + const tableInResult = result[tableName]; + // if (typeof tableInResult === 'undefined') continue; + + tableInResult.checkConstraint[constraintName] = { + name: constraintName, + value: constraintValue, + }; + } + + if (progressCallback) { + progressCallback('checks', checksCount, 'done'); } return { version: '5', dialect: 'mysql', tables: result, + views: resultViews, _meta: { tables: {}, columns: {}, diff --git a/drizzle-kit/src/serializer/pgImports.ts b/drizzle-kit/src/serializer/pgImports.ts index ffedd084c..e0b3fb743 100644 --- a/drizzle-kit/src/serializer/pgImports.ts +++ b/drizzle-kit/src/serializer/pgImports.ts @@ -1,5 +1,17 @@ import { is } from 'drizzle-orm'; -import { AnyPgTable, isPgEnum, isPgSequence, PgEnum, PgSchema, PgSequence, PgTable } from 'drizzle-orm/pg-core'; +import { + AnyPgTable, + isPgEnum, + isPgMaterializedView, + isPgSequence, + isPgView, + PgEnum, + PgMaterializedView, + PgSchema, + PgSequence, + PgTable, + PgView, +} from 'drizzle-orm/pg-core'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { @@ -7,6 +19,8 @@ export const prepareFromExports = (exports: Record) => { const enums: PgEnum[] = []; const schemas: PgSchema[] = []; const sequences: PgSequence[] = []; + const views: PgView[] = []; + const matViews: PgMaterializedView[] = []; const i0values = Object.values(exports); i0values.forEach((t) => { @@ -22,19 +36,29 @@ export const prepareFromExports = (exports: Record) => { schemas.push(t); } + if (isPgView(t)) { + views.push(t); + } + + if (isPgMaterializedView(t)) { + matViews.push(t); + } + if (isPgSequence(t)) { sequences.push(t); } }); - return { tables, enums, schemas, sequences }; + return { tables, enums, schemas, sequences, views, matViews }; }; export const prepareFromPgImports = async (imports: string[]) => { - let tables: AnyPgTable[] = []; - let enums: PgEnum[] = []; - let schemas: PgSchema[] = []; - let sequences: PgSequence[] = []; + const tables: AnyPgTable[] = []; + const enums: PgEnum[] = []; + const schemas: PgSchema[] = []; + const sequences: PgSequence[] = []; + const views: PgView[] = []; + const matViews: PgMaterializedView[] = []; const { unregister } = await safeRegister(); for (let i = 0; i < imports.length; i++) { @@ -47,8 +71,10 @@ export const prepareFromPgImports = async (imports: string[]) => { enums.push(...prepared.enums); schemas.push(...prepared.schemas); sequences.push(...prepared.sequences); + views.push(...prepared.views); + matViews.push(...prepared.matViews); } unregister(); - return { tables: Array.from(new Set(tables)), enums, schemas, sequences }; + return { tables: Array.from(new Set(tables)), enums, schemas, sequences, views, matViews }; }; diff --git a/drizzle-kit/src/serializer/pgSchema.ts b/drizzle-kit/src/serializer/pgSchema.ts index 5860a6fef..d4d27cb86 100644 --- a/drizzle-kit/src/serializer/pgSchema.ts +++ b/drizzle-kit/src/serializer/pgSchema.ts @@ -185,6 +185,11 @@ const column = object({ .optional(), }).strict(); +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + const columnSquashed = object({ name: string(), type: string(), @@ -220,6 +225,48 @@ const uniqueConstraint = object({ nullsNotDistinct: boolean(), }).strict(); +const viewWithOption = object({ + checkOption: enumType(['local', 'cascaded']).optional(), + securityBarrier: boolean().optional(), + securityInvoker: boolean().optional(), +}).strict(); + +const matViewWithOption = object({ + fillfactor: number().optional(), + toastTupleTarget: number().optional(), + parallelWorkers: number().optional(), + autovacuumEnabled: boolean().optional(), + vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), + vacuumTruncate: boolean().optional(), + autovacuumVacuumThreshold: number().optional(), + autovacuumVacuumScaleFactor: number().optional(), + autovacuumVacuumCostDelay: number().optional(), + autovacuumVacuumCostLimit: number().optional(), + autovacuumFreezeMinAge: number().optional(), + autovacuumFreezeMaxAge: number().optional(), + autovacuumFreezeTableAge: number().optional(), + autovacuumMultixactFreezeMinAge: number().optional(), + autovacuumMultixactFreezeMaxAge: number().optional(), + autovacuumMultixactFreezeTableAge: number().optional(), + logAutovacuumMinDuration: number().optional(), + userCatalogTable: boolean().optional(), +}).strict(); + +export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); + +export const view = object({ + name: string(), + schema: string(), + columns: record(string(), column), + definition: string().optional(), + materialized: boolean(), + with: mergedViewWithOption.optional(), + isExisting: boolean(), + withNoData: boolean().optional(), + using: string().optional(), + tablespace: string().optional(), +}).strict(); + const tableV4 = object({ name: string(), schema: string(), @@ -266,6 +313,7 @@ const table = object({ foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), + checkConstraints: record(string(), checkConstraint).default({}), }).strict(); const schemaHash = object({ @@ -368,6 +416,7 @@ export const pgSchemaInternal = object({ tables: record(string(), table), enums: record(string(), enumSchema), schemas: record(string(), string()), + views: record(string(), view).default({}), sequences: record(string(), sequenceSchema).default({}), _meta: object({ schemas: record(string(), string()), @@ -385,6 +434,7 @@ const tableSquashed = object({ foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()), + checkConstraints: record(string(), string()), }).strict(); const tableSquashedV4 = object({ @@ -417,6 +467,7 @@ export const pgSchemaSquashed = object({ tables: record(string(), tableSquashed), enums: record(string(), enumSchema), schemas: record(string(), string()), + views: record(string(), view), sequences: record(string(), sequenceSquashed), }).strict(); @@ -445,7 +496,12 @@ export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; +export type View = TypeOf; +export type MatViewWithOption = TypeOf; +export type ViewWithOption = TypeOf; + export type PgKitInternals = TypeOf; +export type CheckConstraint = TypeOf; export type PgSchemaV1 = TypeOf; export type PgSchemaV2 = TypeOf; @@ -627,6 +683,17 @@ export const PgSquasher = { cycle: splitted[7] === 'true', }; }, + squashCheck: (check: CheckConstraint) => { + return `${check.name};${check.value}`; + }, + unsquashCheck: (input: string): CheckConstraint => { + const [ + name, + value, + ] = input.split(';'); + + return { name, value }; + }, }; export const squashPgScheme = ( @@ -671,6 +738,13 @@ export const squashPgScheme = ( }, ); + const squashedChecksContraints = mapValues( + it[1].checkConstraints, + (check) => { + return PgSquasher.squashCheck(check); + }, + ); + return [ it[0], { @@ -681,6 +755,7 @@ export const squashPgScheme = ( foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, + checkConstraints: squashedChecksContraints, }, ]; }), @@ -705,6 +780,7 @@ export const squashPgScheme = ( tables: mappedTables, enums: json.enums, schemas: json.schemas, + views: json.views, sequences: mappedSequences, }; }; diff --git a/drizzle-kit/src/serializer/pgSerializer.ts b/drizzle-kit/src/serializer/pgSerializer.ts index cc7b18725..3c54b01f4 100644 --- a/drizzle-kit/src/serializer/pgSerializer.ts +++ b/drizzle-kit/src/serializer/pgSerializer.ts @@ -4,14 +4,17 @@ import { toCamelCase, toSnakeCase } from 'drizzle-orm/casing'; import { AnyPgTable, ExtraConfigColumn, + getMaterializedViewConfig, + getViewConfig, IndexedColumn, PgColumn, PgDialect, PgEnum, PgEnumColumn, - PgInteger, + PgMaterializedView, PgSchema, PgSequence, + PgView, uniqueKeyName, } from 'drizzle-orm/pg-core'; import { getTableConfig } from 'drizzle-orm/pg-core'; @@ -20,6 +23,7 @@ import { vectorOps } from 'src/extensions/vector'; import { withStyle } from '../cli/validations/outputs'; import type { IntrospectStage, IntrospectStatus } from '../cli/views'; import type { + CheckConstraint, Column as Column, Enum, ForeignKey, @@ -31,6 +35,7 @@ import type { Sequence, Table, UniqueConstraint, + View, } from '../serializer/pgSchema'; import { type DB, getColumnCasing, isPgArrayType } from '../utils'; import { sqlToStr } from '.'; @@ -39,30 +44,16 @@ export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; }; -function stringFromIdentityProperty( - field: string | number | undefined, -): string | undefined { - return typeof field === 'string' - ? (field as string) - : typeof field === 'undefined' - ? undefined - : String(field); +function stringFromIdentityProperty(field: string | number | undefined): string | undefined { + return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); } function maxRangeForIdentityBasedOn(columnType: string) { - return columnType === 'integer' - ? '2147483647' - : columnType === 'bigint' - ? '9223372036854775807' - : '32767'; + return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; } function minRangeForIdentityBasedOn(columnType: string) { - return columnType === 'integer' - ? '-2147483648' - : columnType === 'bitint' - ? '-9223372036854775808' - : '-32768'; + return columnType === 'integer' ? '-2147483648' : columnType === 'bitint' ? '-9223372036854775808' : '-32768'; } function stringFromDatabaseIdentityProperty(field: any): string | undefined { @@ -89,20 +80,12 @@ function buildArrayString(array: any[], sqlType: string): string { if (sqlType === 'date') { return `"${value.toISOString().split('T')[0]}"`; } else if (sqlType === 'timestamp') { - return `"${ - value.toISOString() - .replace('T', ' ') - .slice(0, 23) - }"`; + return `"${value.toISOString().replace('T', ' ').slice(0, 23)}"`; } else { return `"${value.toISOString()}"`; } } else if (typeof value === 'object') { - return `"${ - JSON - .stringify(value) - .replaceAll('"', '\\"') - }"`; + return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; } return `"${value}"`; @@ -117,11 +100,14 @@ export const generatePgSnapshot = ( enums: PgEnum[], schemas: PgSchema[], sequences: PgSequence[], + views: PgView[], + matViews: PgMaterializedView[], casing: CasingType | undefined, schemaFilter?: string[], ): PgSchemaInternal => { const dialect = new PgDialect({ casing }); const result: Record = {}; + const resultViews: Record = {}; const sequencesToReturn: Record = {}; // This object stores unique names for indexes and will be used to detect if you have the same names for indexes @@ -129,16 +115,12 @@ export const generatePgSnapshot = ( const indexesInSchema: Record = {}; for (const table of tables) { - const { - name: tableName, - columns, - indexes, - foreignKeys, - checks, - schema, - primaryKeys, - uniqueConstraints, - } = getTableConfig(table); + // This object stores unique names for checks and will be used to detect if you have the same names for checks + // within the same PostgreSQL table + const checksInTable: Record = {}; + + const { name: tableName, columns, indexes, foreignKeys, checks, schema, primaryKeys, uniqueConstraints } = + getTableConfig(table); if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { continue; @@ -146,6 +128,7 @@ export const generatePgSnapshot = ( const columnsObject: Record = {}; const indexesObject: Record = {}; + const checksObject: Record = {}; const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; @@ -156,21 +139,15 @@ export const generatePgSnapshot = ( const primaryKey: boolean = column.primary; const sqlTypeLowered = column.getSQLType().toLowerCase(); - const typeSchema = is(column, PgEnumColumn) - ? column.enum.schema || 'public' - : undefined; + const typeSchema = is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; const generated = column.generated; const identity = column.generatedIdentity; const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) - ?? (parseFloat(increment) < 0 - ? minRangeForIdentityBasedOn(column.columnType) - : '1'); + ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) - ?? (parseFloat(increment) < 0 - ? '-1' - : maxRangeForIdentityBasedOn(column.getSQLType())); + ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; @@ -224,7 +201,7 @@ export const generatePgSnapshot = ( chalk.underline.blue( name, ) - } column is confilcting with a unique constraint name already defined for ${ + } column is conflicting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) @@ -248,31 +225,17 @@ export const generatePgSnapshot = ( columnToSet.default = `'${column.default}'`; } else { if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { - columnToSet.default = `'${ - JSON.stringify( - column.default, - ) - }'::${sqlTypeLowered}`; + columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if (sqlTypeLowered === 'timestamp') { - columnToSet.default = `'${ - column.default - .toISOString() - .replace('T', ' ') - .slice(0, 23) - }'`; + columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; } else { columnToSet.default = `'${column.default.toISOString()}'`; } } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { - columnToSet.default = `'${ - buildArrayString( - column.default, - sqlTypeLowered, - ) - }'`; + columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; } else { // Should do for all types // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; @@ -310,24 +273,16 @@ export const generatePgSnapshot = ( if (typeof existingUnique !== 'undefined') { console.log( `\n${ - withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. - The unique constraint ${ - chalk.underline.blue( - name, - ) - } on the ${ - chalk.underline.blue( - columnNames.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`) + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(tableName)} table. + The unique constraint ${chalk.underline.blue(name)} on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue(existingUnique.columns.join(',')) + } columns\n`, + ) }`, ); process.exit(1); @@ -392,11 +347,7 @@ export const generatePgSnapshot = ( console.log( `\n${ withStyle.errorWarning( - `Please specify an index name in ${ - getTableName( - value.config.table, - ) - } table that has "${ + `Please specify an index name in ${getTableName(value.config.table)} table that has "${ dialect.sqlToQuery(it).sql }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, ) @@ -430,9 +381,7 @@ export const generatePgSnapshot = ( } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ vectorOps .map((it) => `${chalk.underline(`${it}`)}`) - .join( - ', ', - ) + .join(', ') }].\n\nYou can specify it using current syntax: ${ chalk.underline( `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${ @@ -448,9 +397,7 @@ export const generatePgSnapshot = ( indexColumnNames.push(name); }); - const name = value.config.name - ? value.config.name - : indexName(tableName, indexColumnNames); + const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames); let indexColumns: IndexColumnType[] = columns.map( (it): IndexColumnType => { @@ -485,9 +432,7 @@ export const generatePgSnapshot = ( `\n${ withStyle.errorWarning( `We\'ve found duplicated index name across ${ - chalk.underline.blue( - schema ?? 'public', - ) + chalk.underline.blue(schema ?? 'public') } schema. Please rename your index in either the ${ chalk.underline.blue( tableName, @@ -507,15 +452,50 @@ export const generatePgSnapshot = ( name, columns: indexColumns, isUnique: value.config.unique ?? false, - where: value.config.where - ? dialect.sqlToQuery(value.config.where).sql - : undefined, + where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : undefined, concurrently: value.config.concurrently ?? false, method: value.config.method ?? 'btree', with: value.config.with ?? {}, }; }); + checks.forEach((check) => { + const checkName = check.name; + + if (typeof checksInTable[`"${schema ?? 'public'}"."${tableName}"`] !== 'undefined') { + if (checksInTable[`"${schema ?? 'public'}"."${tableName}"`].includes(check.name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated check constraint name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema in ${ + chalk.underline.blue( + tableName, + ) + }. Please rename your check constraint in either the ${ + chalk.underline.blue( + tableName, + ) + } table or the table with the duplicated check contraint name`, + ) + }`, + ); + process.exit(1); + } + checksInTable[`"${schema ?? 'public'}"."${tableName}"`].push(checkName); + } else { + checksInTable[`"${schema ?? 'public'}"."${tableName}"`] = [check.name]; + } + + checksObject[checkName] = { + name: checkName, + value: dialect.sqlToQuery(check.value).sql, + }; + }); + const tableKey = `${schema ?? 'public'}.${tableName}`; result[tableKey] = { @@ -526,15 +506,13 @@ export const generatePgSnapshot = ( foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, + checkConstraints: checksObject, }; } for (const sequence of sequences) { const name = sequence.seqName!; - if ( - typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] - === 'undefined' - ) { + if (typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] === 'undefined') { const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); @@ -559,6 +537,172 @@ export const generatePgSnapshot = ( } } + const combinedViews = [...views, ...matViews]; + for (const view of combinedViews) { + let viewName; + let schema; + let query; + let selectedFields; + let isExisting; + let withOption; + let tablespace; + let using; + let withNoData; + let materialized: boolean = false; + + if (is(view, PgView)) { + ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption } = getViewConfig(view)); + } else { + ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = + getMaterializedViewConfig(view)); + + materialized = true; + } + + const viewSchema = schema ?? 'public'; + + const viewKey = `${viewSchema}.${viewName}`; + + const columnsObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + const existingView = resultViews[viewKey]; + if (typeof existingView !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated view name across ${ + chalk.underline.blue(schema ?? 'public') + } schema. Please rename your view`, + ) + }`, + ); + process.exit(1); + } + + for (const key in selectedFields) { + if (is(selectedFields[key], PgColumn)) { + const column = selectedFields[key]; + + const notNull: boolean = column.notNull; + const primaryKey: boolean = column.primary; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + + const typeSchema = is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; + const generated = column.generated; + const identity = column.generatedIdentity; + + const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) + ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); + const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); + const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + typeSchema: typeSchema, + primaryKey, + notNull, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: 'stored', + } + : undefined, + identity: identity + ? { + type: identity.type, + name: identity.sequenceName ?? `${viewName}_${column.name}_seq`, + schema: schema ?? 'public', + increment, + startWith, + minValue, + maxValue, + cache, + cycle: identity?.sequenceOptions?.cycle ?? false, + } + : undefined, + }; + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(viewName)} table. + The unique constraint ${chalk.underline.blue(column.uniqueName)} on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue(existingUnique.columns.join(',')) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + nullsNotDistinct: column.uniqueType === 'not distinct', + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if (sqlTypeLowered === 'timestamp') { + columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; + } else { + columnToSet.default = `'${column.default.toISOString()}'`; + } + } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { + columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; + } else { + // Should do for all types + // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; + columnToSet.default = column.default; + } + } + } + } + columnsObject[column.name] = columnToSet; + } + } + + resultViews[viewKey] = { + columns: columnsObject, + definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, + name: viewName, + schema: viewSchema, + isExisting, + with: withOption, + withNoData, + materialized, + tablespace, + using, + }; + } + const enumsToReturn: Record = enums.reduce<{ [key: string]: Enum; }>((map, obj) => { @@ -576,9 +720,7 @@ export const generatePgSnapshot = ( schemas .filter((it) => { if (schemaFilter) { - return ( - schemaFilter.includes(it.schemaName) && it.schemaName !== 'public' - ); + return schemaFilter.includes(it.schemaName) && it.schemaName !== 'public'; } else { return it.schemaName !== 'public'; } @@ -593,6 +735,7 @@ export const generatePgSnapshot = ( enums: enumsToReturn, schemas: schemasObject, sequences: sequencesToReturn, + views: resultViews, _meta: { schemas: {}, tables: {}, @@ -609,28 +752,37 @@ const trimChar = (str: string, char: string) => { while (end > start && str[end - 1] === char) --end; // this.toString() due to ava deep equal issue with String { "value" } - return start > 0 || end < str.length - ? str.substring(start, end) - : str.toString(); + return start > 0 || end < str.length ? str.substring(start, end) : str.toString(); }; export const fromDatabase = async ( db: DB, tablesFilter: (table: string) => boolean = () => true, schemaFilters: string[], - progressCallback?: ( - stage: IntrospectStage, - count: number, - status: IntrospectStatus, - ) => void, + progressCallback?: (stage: IntrospectStage, count: number, status: IntrospectStatus) => void, ): Promise => { const result: Record = {}; + const views: Record = {}; const internals: PgKitInternals = { tables: {} }; - const where = schemaFilters.map((t) => `table_schema = '${t}'`).join(' or '); - - const allTables = await db.query( - `SELECT table_schema, table_name FROM information_schema.tables${where === '' ? '' : ` WHERE ${where}`};`, + const where = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); + + const allTables = await db.query<{ table_schema: string; table_name: string; type: string }>( + `SELECT + n.nspname AS table_schema, + c.relname AS table_name, + CASE + WHEN c.relkind = 'r' THEN 'table' + WHEN c.relkind = 'v' THEN 'view' + WHEN c.relkind = 'm' THEN 'materialized_view' + END AS type +FROM + pg_catalog.pg_class c +JOIN + pg_catalog.pg_namespace n ON n.oid = c.relnamespace +WHERE + c.relkind IN ('r', 'v', 'm') + ${where === '' ? '' : ` AND ${where}`};`, ); const schemas = new Set(allTables.map((it) => it.table_schema)); @@ -656,6 +808,8 @@ export const fromDatabase = async ( let indexesCount = 0; let foreignKeysCount = 0; let tableCount = 0; + let checksCount = 0; + let viewsCount = 0; const sequencesToReturn: Record = {}; @@ -690,9 +844,7 @@ export const fromDatabase = async ( }; } - const whereEnums = schemaFilters - .map((t) => `n.nspname = '${t}'`) - .join(' or '); + const whereEnums = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); const allEnums = await db.query( `select n.nspname as enum_schema, @@ -730,73 +882,64 @@ export const fromDatabase = async ( const sequencesInColumns: string[] = []; - const all = allTables.map((row) => { - return new Promise(async (res, rej) => { - const tableName = row.table_name as string; - if (!tablesFilter(tableName)) return res(''); - tableCount += 1; - const tableSchema = row.table_schema; - - try { - const columnToReturn: Record = {}; - const indexToReturn: Record = {}; - const foreignKeysToReturn: Record = {}; - const primaryKeys: Record = {}; - const uniqueConstrains: Record = {}; - - const tableResponse = await db.query( - `SELECT a.attrelid::regclass::text, a.attname, is_nullable, a.attndims as array_dimensions - , CASE WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) - AND EXISTS ( - SELECT FROM pg_attrdef ad - WHERE ad.adrelid = a.attrelid - AND ad.adnum = a.attnum - AND pg_get_expr(ad.adbin, ad.adrelid) - = 'nextval(''' - || (pg_get_serial_sequence (a.attrelid::regclass::text - , a.attname))::regclass - || '''::regclass)' - ) - THEN CASE a.atttypid - WHEN 'int'::regtype THEN 'serial' - WHEN 'int8'::regtype THEN 'bigserial' - WHEN 'int2'::regtype THEN 'smallserial' - END - ELSE format_type(a.atttypid, a.atttypmod) - END AS data_type, INFORMATION_SCHEMA.COLUMNS.table_name, ns.nspname as type_schema, - pg_get_serial_sequence('"${tableSchema}"."${tableName}"', a.attname)::regclass as seq_name, INFORMATION_SCHEMA.COLUMNS.column_name, - INFORMATION_SCHEMA.COLUMNS.column_default, INFORMATION_SCHEMA.COLUMNS.data_type as additional_dt, - INFORMATION_SCHEMA.COLUMNS.udt_name as enum_name, - INFORMATION_SCHEMA.COLUMNS.is_generated, generation_expression, - INFORMATION_SCHEMA.COLUMNS.is_identity,INFORMATION_SCHEMA.COLUMNS.identity_generation, - INFORMATION_SCHEMA.COLUMNS.identity_start, INFORMATION_SCHEMA.COLUMNS.identity_increment, - INFORMATION_SCHEMA.COLUMNS.identity_maximum, INFORMATION_SCHEMA.COLUMNS.identity_minimum, - INFORMATION_SCHEMA.COLUMNS.identity_cycle - FROM pg_attribute a - JOIN INFORMATION_SCHEMA.COLUMNS ON INFORMATION_SCHEMA.COLUMNS.column_name = a.attname - JOIN pg_type t ON t.oid = a.atttypid LEFT JOIN pg_namespace ns ON ns.oid = t.typnamespace - WHERE a.attrelid = '"${tableSchema}"."${tableName}"'::regclass and INFORMATION_SCHEMA.COLUMNS.table_name = '${tableName}' and INFORMATION_SCHEMA.COLUMNS.table_schema = '${tableSchema}' - AND a.attnum > 0 - AND NOT a.attisdropped - ORDER BY a.attnum;`, - ); - - const tableConstraints = await db.query( - `SELECT c.column_name, c.data_type, constraint_type, constraint_name, constraint_schema + const all = allTables + .filter((it) => it.type === 'table') + .map((row) => { + return new Promise(async (res, rej) => { + const tableName = row.table_name as string; + if (!tablesFilter(tableName)) return res(''); + tableCount += 1; + const tableSchema = row.table_schema; + + try { + const columnToReturn: Record = {}; + const indexToReturn: Record = {}; + const foreignKeysToReturn: Record = {}; + const primaryKeys: Record = {}; + const uniqueConstrains: Record = {}; + const checkConstraints: Record = {}; + + const tableResponse = await getColumnsInfoQuery({ schema: tableSchema, table: tableName, db }); + + const tableConstraints = await db.query( + `SELECT c.column_name, c.data_type, constraint_type, constraint_name, constraint_schema FROM information_schema.table_constraints tc JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name WHERE tc.table_name = '${tableName}' and constraint_schema = '${tableSchema}';`, - ); + ); - columnsCount += tableResponse.length; - if (progressCallback) { - progressCallback('columns', columnsCount, 'fetching'); - } + const tableChecks = await db.query(`SELECT + tc.constraint_name, + tc.constraint_type, + pg_get_constraintdef(con.oid) AS constraint_definition + FROM + information_schema.table_constraints AS tc + JOIN pg_constraint AS con + ON tc.constraint_name = con.conname + AND con.conrelid = ( + SELECT oid + FROM pg_class + WHERE relname = tc.table_name + AND relnamespace = ( + SELECT oid + FROM pg_namespace + WHERE nspname = tc.constraint_schema + ) + ) + WHERE + tc.table_name = '${tableName}' + AND tc.constraint_schema = '${tableSchema}' + AND tc.constraint_type = 'CHECK';`); + + columnsCount += tableResponse.length; + if (progressCallback) { + progressCallback('columns', columnsCount, 'fetching'); + } - const tableForeignKeys = await db.query( - `SELECT + const tableForeignKeys = await db.query( + `SELECT con.contype AS constraint_type, nsp.nspname AS constraint_schema, con.conname AS constraint_name, @@ -833,254 +976,244 @@ export const fromDatabase = async ( nsp.nspname = '${tableSchema}' AND rel.relname = '${tableName}' AND con.contype IN ('f');`, - ); + ); - foreignKeysCount += tableForeignKeys.length; - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'fetching'); - } - for (const fk of tableForeignKeys) { - // const tableFrom = fk.table_name; - const columnFrom: string = fk.column_name; - const tableTo = fk.foreign_table_name; - const columnTo: string = fk.foreign_column_name; - const schemaTo: string = fk.foreign_table_schema; - const foreignKeyName = fk.constraint_name; - const onUpdate = fk.update_rule?.toLowerCase(); - const onDelete = fk.delete_rule?.toLowerCase(); - - if (typeof foreignKeysToReturn[foreignKeyName] !== 'undefined') { - foreignKeysToReturn[foreignKeyName].columnsFrom.push(columnFrom); - foreignKeysToReturn[foreignKeyName].columnsTo.push(columnTo); - } else { - foreignKeysToReturn[foreignKeyName] = { - name: foreignKeyName, - tableFrom: tableName, - tableTo, - schemaTo, - columnsFrom: [columnFrom], - columnsTo: [columnTo], - onDelete, - onUpdate, - }; + foreignKeysCount += tableForeignKeys.length; + if (progressCallback) { + progressCallback('fks', foreignKeysCount, 'fetching'); } + for (const fk of tableForeignKeys) { + // const tableFrom = fk.table_name; + const columnFrom: string = fk.column_name; + const tableTo = fk.foreign_table_name; + const columnTo: string = fk.foreign_column_name; + const schemaTo: string = fk.foreign_table_schema; + const foreignKeyName = fk.constraint_name; + const onUpdate = fk.update_rule?.toLowerCase(); + const onDelete = fk.delete_rule?.toLowerCase(); + + if (typeof foreignKeysToReturn[foreignKeyName] !== 'undefined') { + foreignKeysToReturn[foreignKeyName].columnsFrom.push(columnFrom); + foreignKeysToReturn[foreignKeyName].columnsTo.push(columnTo); + } else { + foreignKeysToReturn[foreignKeyName] = { + name: foreignKeyName, + tableFrom: tableName, + tableTo, + schemaTo, + columnsFrom: [columnFrom], + columnsTo: [columnTo], + onDelete, + onUpdate, + }; + } - foreignKeysToReturn[foreignKeyName].columnsFrom = [ - ...new Set(foreignKeysToReturn[foreignKeyName].columnsFrom), - ]; + foreignKeysToReturn[foreignKeyName].columnsFrom = [ + ...new Set(foreignKeysToReturn[foreignKeyName].columnsFrom), + ]; - foreignKeysToReturn[foreignKeyName].columnsTo = [ - ...new Set(foreignKeysToReturn[foreignKeyName].columnsTo), - ]; - } + foreignKeysToReturn[foreignKeyName].columnsTo = [...new Set(foreignKeysToReturn[foreignKeyName].columnsTo)]; + } - const uniqueConstrainsRows = tableConstraints.filter( - (mapRow) => mapRow.constraint_type === 'UNIQUE', - ); + const uniqueConstrainsRows = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'UNIQUE'); - for (const unqs of uniqueConstrainsRows) { - // const tableFrom = fk.table_name; - const columnName: string = unqs.column_name; - const constraintName: string = unqs.constraint_name; + for (const unqs of uniqueConstrainsRows) { + // const tableFrom = fk.table_name; + const columnName: string = unqs.column_name; + const constraintName: string = unqs.constraint_name; - if (typeof uniqueConstrains[constraintName] !== 'undefined') { - uniqueConstrains[constraintName].columns.push(columnName); - } else { - uniqueConstrains[constraintName] = { - columns: [columnName], - nullsNotDistinct: false, + if (typeof uniqueConstrains[constraintName] !== 'undefined') { + uniqueConstrains[constraintName].columns.push(columnName); + } else { + uniqueConstrains[constraintName] = { + columns: [columnName], + nullsNotDistinct: false, + name: constraintName, + }; + } + } + + checksCount += tableChecks.length; + if (progressCallback) { + progressCallback('checks', checksCount, 'fetching'); + } + for (const checks of tableChecks) { + // CHECK (((email)::text <> 'test@gmail.com'::text)) + // Where (email) is column in table + let checkValue: string = checks.constraint_definition; + const constraintName: string = checks.constraint_name; + + checkValue = checkValue.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); + + checkConstraints[constraintName] = { name: constraintName, + value: checkValue, }; } - } - for (const columnResponse of tableResponse) { - const columnName = columnResponse.attname; - const columnAdditionalDT = columnResponse.additional_dt; - const columnDimensions = columnResponse.array_dimensions; - const enumType: string = columnResponse.enum_name; - let columnType: string = columnResponse.data_type; - const typeSchema = columnResponse.type_schema; - const defaultValueRes: string = columnResponse.column_default; - - const isGenerated = columnResponse.is_generated === 'ALWAYS'; - const generationExpression = columnResponse.generation_expression; - const isIdentity = columnResponse.is_identity === 'YES'; - const identityGeneration = columnResponse.identity_generation === 'ALWAYS' - ? 'always' - : 'byDefault'; - const identityStart = columnResponse.identity_start; - const identityIncrement = columnResponse.identity_increment; - const identityMaximum = columnResponse.identity_maximum; - const identityMinimum = columnResponse.identity_minimum; - const identityCycle = columnResponse.identity_cycle === 'YES'; - const identityName = columnResponse.seq_name; - - const primaryKey = tableConstraints.filter( - (mapRow) => - columnName === mapRow.column_name - && mapRow.constraint_type === 'PRIMARY KEY', - ); + for (const columnResponse of tableResponse) { + const columnName = columnResponse.column_name; + const columnAdditionalDT = columnResponse.additional_dt; + const columnDimensions = columnResponse.array_dimensions; + const enumType: string = columnResponse.enum_name; + let columnType: string = columnResponse.data_type; + const typeSchema = columnResponse.type_schema; + const defaultValueRes: string = columnResponse.column_default; + + const isGenerated = columnResponse.is_generated === 'ALWAYS'; + const generationExpression = columnResponse.generation_expression; + const isIdentity = columnResponse.is_identity === 'YES'; + const identityGeneration = columnResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; + const identityStart = columnResponse.identity_start; + const identityIncrement = columnResponse.identity_increment; + const identityMaximum = columnResponse.identity_maximum; + const identityMinimum = columnResponse.identity_minimum; + const identityCycle = columnResponse.identity_cycle === 'YES'; + const identityName = columnResponse.seq_name; + + const primaryKey = tableConstraints.filter((mapRow) => + columnName === mapRow.column_name && mapRow.constraint_type === 'PRIMARY KEY' + ); - const cprimaryKey = tableConstraints.filter( - (mapRow) => mapRow.constraint_type === 'PRIMARY KEY', - ); + const cprimaryKey = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'PRIMARY KEY'); - if (cprimaryKey.length > 1) { - const tableCompositePkName = await db.query( - `SELECT conname AS primary_key + if (cprimaryKey.length > 1) { + const tableCompositePkName = await db.query( + `SELECT conname AS primary_key FROM pg_constraint join pg_class on (pg_class.oid = conrelid) WHERE contype = 'p' AND connamespace = $1::regnamespace AND pg_class.relname = $2;`, - [tableSchema, tableName], - ); - primaryKeys[tableCompositePkName[0].primary_key] = { - name: tableCompositePkName[0].primary_key, - columns: cprimaryKey.map((c: any) => c.column_name), - }; - } - - let columnTypeMapped = columnType; + [tableSchema, tableName], + ); + primaryKeys[tableCompositePkName[0].primary_key] = { + name: tableCompositePkName[0].primary_key, + columns: cprimaryKey.map((c: any) => c.column_name), + }; + } - // Set default to internal object - if (columnAdditionalDT === 'ARRAY') { - if (typeof internals.tables[tableName] === 'undefined') { - internals.tables[tableName] = { - columns: { - [columnName]: { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring( - 0, - columnTypeMapped.length - 2, - ), + let columnTypeMapped = columnType; + + // Set default to internal object + if (columnAdditionalDT === 'ARRAY') { + if (typeof internals.tables[tableName] === 'undefined') { + internals.tables[tableName] = { + columns: { + [columnName]: { + isArray: true, + dimensions: columnDimensions, + rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), + }, }, - }, - }; - } else { - if ( - typeof internals.tables[tableName]!.columns[columnName] - === 'undefined' - ) { - internals.tables[tableName]!.columns[columnName] = { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring( - 0, - columnTypeMapped.length - 2, - ), }; + } else { + if (typeof internals.tables[tableName]!.columns[columnName] === 'undefined') { + internals.tables[tableName]!.columns[columnName] = { + isArray: true, + dimensions: columnDimensions, + rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), + }; + } } } - } - const defaultValue = defaultForColumn( - columnResponse, - internals, - tableName, - ); - if ( - defaultValue === 'NULL' - || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) - ) { - if (typeof internals!.tables![tableName] === 'undefined') { - internals!.tables![tableName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, + const defaultValue = defaultForColumn(columnResponse, internals, tableName); + if ( + defaultValue === 'NULL' + || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) + ) { + if (typeof internals!.tables![tableName] === 'undefined') { + internals!.tables![tableName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, }, - }, - }; - } else { - if ( - typeof internals!.tables![tableName]!.columns[columnName] - === 'undefined' - ) { - internals!.tables![tableName]!.columns[columnName] = { - isDefaultAnExpression: true, }; } else { - internals!.tables![tableName]!.columns[ - columnName - ]!.isDefaultAnExpression = true; + if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { + internals!.tables![tableName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; + } } } - } - const isSerial = columnType === 'serial'; - - if (columnTypeMapped.startsWith('numeric(')) { - columnTypeMapped = columnTypeMapped.replace(',', ', '); - } + const isSerial = columnType === 'serial'; - if (columnAdditionalDT === 'ARRAY') { - for (let i = 1; i < Number(columnDimensions); i++) { - columnTypeMapped += '[]'; + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); } - } - columnTypeMapped = columnTypeMapped - .replace('character varying', 'varchar') - .replace(' without time zone', '') - // .replace("timestamp without time zone", "timestamp") - .replace('character', 'char'); - - columnTypeMapped = trimChar(columnTypeMapped, '"'); - - columnToReturn[columnName] = { - name: columnName, - type: - // filter vectors, but in future we should filter any extension that was installed by user - columnAdditionalDT === 'USER-DEFINED' - && !['vector', 'geometry'].includes(enumType) - ? enumType - : columnTypeMapped, - typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined - ? enumsToReturn[`${typeSchema}.${enumType}`].schema - : undefined, - primaryKey: primaryKey.length === 1 && cprimaryKey.length < 2, - // default: isSerial ? undefined : defaultValue, - notNull: columnResponse.is_nullable === 'NO', - generated: isGenerated - ? { as: generationExpression, type: 'stored' } - : undefined, - identity: isIdentity - ? { - type: identityGeneration, - name: identityName, - increment: stringFromDatabaseIdentityProperty(identityIncrement), - minValue: stringFromDatabaseIdentityProperty(identityMinimum), - maxValue: stringFromDatabaseIdentityProperty(identityMaximum), - startWith: stringFromDatabaseIdentityProperty(identityStart), - cache: sequencesToReturn[identityName]?.cache - ? sequencesToReturn[identityName]?.cache - : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache - ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache - : undefined, - cycle: identityCycle, - schema: tableSchema, + if (columnAdditionalDT === 'ARRAY') { + for (let i = 1; i < Number(columnDimensions); i++) { + columnTypeMapped += '[]'; } - : undefined, - }; + } - if (identityName && typeof identityName === 'string') { - // remove "" from sequence name - delete sequencesToReturn[ - `${tableSchema}.${ - identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName - }` - ]; - delete sequencesToReturn[identityName]; - } + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + columnToReturn[columnName] = { + name: columnName, + type: + // filter vectors, but in future we should filter any extension that was installed by user + columnAdditionalDT === 'USER-DEFINED' + && !['vector', 'geometry'].includes(enumType) + ? enumType + : columnTypeMapped, + typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined + ? enumsToReturn[`${typeSchema}.${enumType}`].schema + : undefined, + primaryKey: primaryKey.length === 1 && cprimaryKey.length < 2, + // default: isSerial ? undefined : defaultValue, + notNull: columnResponse.is_nullable === 'NO', + generated: isGenerated + ? { as: generationExpression, type: 'stored' } + : undefined, + identity: isIdentity + ? { + type: identityGeneration, + name: identityName, + increment: stringFromDatabaseIdentityProperty(identityIncrement), + minValue: stringFromDatabaseIdentityProperty(identityMinimum), + maxValue: stringFromDatabaseIdentityProperty(identityMaximum), + startWith: stringFromDatabaseIdentityProperty(identityStart), + cache: sequencesToReturn[identityName]?.cache + ? sequencesToReturn[identityName]?.cache + : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache + ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache + : undefined, + cycle: identityCycle, + schema: tableSchema, + } + : undefined, + }; - if (!isSerial && typeof defaultValue !== 'undefined') { - columnToReturn[columnName].default = defaultValue; + if (identityName && typeof identityName === 'string') { + // remove "" from sequence name + delete sequencesToReturn[ + `${tableSchema}.${ + identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName + }` + ]; + delete sequencesToReturn[identityName]; + } + + if (!isSerial && typeof defaultValue !== 'undefined') { + columnToReturn[columnName].default = defaultValue; + } } - } - const dbIndexes = await db.query( - `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname as table_name, ic.relname AS indexname, + const dbIndexes = await db.query( + `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname as table_name, ic.relname AS indexname, k.i AS index_order, i.indisunique as is_unique, am.amname as method, @@ -1116,10 +1249,10 @@ export const fromDatabase = async ( WHERE c.nspname = '${tableSchema}' AND t.relname = '${tableName}';`, - ); + ); - const dbIndexFromConstraint = await db.query( - `SELECT + const dbIndexFromConstraint = await db.query( + `SELECT idx.indexrelname AS index_name, idx.relname AS table_name, schemaname, @@ -1130,89 +1263,90 @@ export const fromDatabase = async ( pg_constraint con ON con.conindid = idx.indexrelid WHERE idx.relname = '${tableName}' and schemaname = '${tableSchema}' group by index_name, table_name,schemaname, generated_by_constraint;`, - ); + ); - const idxsInConsteraint = dbIndexFromConstraint - .filter((it) => it.generated_by_constraint === 1) - .map((it) => it.index_name); - - for (const dbIndex of dbIndexes) { - const indexName: string = dbIndex.indexname; - const indexColumnName: string = dbIndex.column_name; - const indexIsUnique = dbIndex.is_unique; - const indexMethod = dbIndex.method; - const indexWith: string[] = dbIndex.with; - const indexWhere: string = dbIndex.where; - const opclass: string = dbIndex.opcname; - const isExpression = dbIndex.is_expression === 1; - - const desc: boolean = dbIndex.descending; - const nullsFirst: boolean = dbIndex.nulls_first; - - const mappedWith: Record = {}; - - if (indexWith !== null) { - indexWith - // .slice(1, indexWith.length - 1) - // .split(",") - .forEach((it) => { - const splitted = it.split('='); - mappedWith[splitted[0]] = splitted[1]; - }); - } + const idxsInConsteraint = dbIndexFromConstraint.filter((it) => it.generated_by_constraint === 1).map((it) => + it.index_name + ); - if (idxsInConsteraint.includes(indexName)) continue; + for (const dbIndex of dbIndexes) { + const indexName: string = dbIndex.indexname; + const indexColumnName: string = dbIndex.column_name; + const indexIsUnique = dbIndex.is_unique; + const indexMethod = dbIndex.method; + const indexWith: string[] = dbIndex.with; + const indexWhere: string = dbIndex.where; + const opclass: string = dbIndex.opcname; + const isExpression = dbIndex.is_expression === 1; + + const desc: boolean = dbIndex.descending; + const nullsFirst: boolean = dbIndex.nulls_first; + + const mappedWith: Record = {}; + + if (indexWith !== null) { + indexWith + // .slice(1, indexWith.length - 1) + // .split(",") + .forEach((it) => { + const splitted = it.split('='); + mappedWith[splitted[0]] = splitted[1]; + }); + } - if (typeof indexToReturn[indexName] !== 'undefined') { - indexToReturn[indexName].columns.push({ - expression: indexColumnName, - asc: !desc, - nulls: nullsFirst ? 'first' : 'last', - opclass, - isExpression, - }); - } else { - indexToReturn[indexName] = { - name: indexName, - columns: [ - { - expression: indexColumnName, - asc: !desc, - nulls: nullsFirst ? 'first' : 'last', - opclass, - isExpression, - }, - ], - isUnique: indexIsUnique, - // should not be a part of diff detecs - concurrently: false, - method: indexMethod, - where: indexWhere === null ? undefined : indexWhere, - with: mappedWith, - }; + if (idxsInConsteraint.includes(indexName)) continue; + + if (typeof indexToReturn[indexName] !== 'undefined') { + indexToReturn[indexName].columns.push({ + expression: indexColumnName, + asc: !desc, + nulls: nullsFirst ? 'first' : 'last', + opclass, + isExpression, + }); + } else { + indexToReturn[indexName] = { + name: indexName, + columns: [ + { + expression: indexColumnName, + asc: !desc, + nulls: nullsFirst ? 'first' : 'last', + opclass, + isExpression, + }, + ], + isUnique: indexIsUnique, + // should not be a part of diff detecs + concurrently: false, + method: indexMethod, + where: indexWhere === null ? undefined : indexWhere, + with: mappedWith, + }; + } } - } - indexesCount += Object.keys(indexToReturn).length; - if (progressCallback) { - progressCallback('indexes', indexesCount, 'fetching'); + indexesCount += Object.keys(indexToReturn).length; + if (progressCallback) { + progressCallback('indexes', indexesCount, 'fetching'); + } + result[`${tableSchema}.${tableName}`] = { + name: tableName, + schema: tableSchema !== 'public' ? tableSchema : '', + columns: columnToReturn, + indexes: indexToReturn, + foreignKeys: foreignKeysToReturn, + compositePrimaryKeys: primaryKeys, + uniqueConstraints: uniqueConstrains, + checkConstraints: checkConstraints, + }; + } catch (e) { + rej(e); + return; } - result[`${tableSchema}.${tableName}`] = { - name: tableName, - schema: tableSchema !== 'public' ? tableSchema : '', - columns: columnToReturn, - indexes: indexToReturn, - foreignKeys: foreignKeysToReturn, - compositePrimaryKeys: primaryKeys, - uniqueConstraints: uniqueConstrains, - }; - } catch (e) { - rej(e); - return; - } - res(''); + res(''); + }); }); - }); if (progressCallback) { progressCallback('tables', tableCount, 'done'); @@ -1221,10 +1355,241 @@ export const fromDatabase = async ( for await (const _ of all) { } + const allViews = allTables + .filter((it) => it.type === 'view' || it.type === 'materialized_view') + .map((row) => { + return new Promise(async (res, rej) => { + const viewName = row.table_name as string; + if (!tablesFilter(viewName)) return res(''); + tableCount += 1; + const viewSchema = row.table_schema; + + try { + const columnToReturn: Record = {}; + + const viewResponses = await getColumnsInfoQuery({ schema: viewSchema, table: viewName, db }); + + for (const viewResponse of viewResponses) { + const columnName = viewResponse.column_name; + const columnAdditionalDT = viewResponse.additional_dt; + const columnDimensions = viewResponse.array_dimensions; + const enumType: string = viewResponse.enum_name; + let columnType: string = viewResponse.data_type; + const typeSchema = viewResponse.type_schema; + // const defaultValueRes: string = viewResponse.column_default; + + const isGenerated = viewResponse.is_generated === 'ALWAYS'; + const generationExpression = viewResponse.generation_expression; + const isIdentity = viewResponse.is_identity === 'YES'; + const identityGeneration = viewResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; + const identityStart = viewResponse.identity_start; + const identityIncrement = viewResponse.identity_increment; + const identityMaximum = viewResponse.identity_maximum; + const identityMinimum = viewResponse.identity_minimum; + const identityCycle = viewResponse.identity_cycle === 'YES'; + const identityName = viewResponse.seq_name; + const defaultValueRes = viewResponse.column_default; + + const primaryKey = viewResponse.constraint_type === 'PRIMARY KEY'; + + let columnTypeMapped = columnType; + + // Set default to internal object + if (columnAdditionalDT === 'ARRAY') { + if (typeof internals.tables[viewName] === 'undefined') { + internals.tables[viewName] = { + columns: { + [columnName]: { + isArray: true, + dimensions: columnDimensions, + rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), + }, + }, + }; + } else { + if (typeof internals.tables[viewName]!.columns[columnName] === 'undefined') { + internals.tables[viewName]!.columns[columnName] = { + isArray: true, + dimensions: columnDimensions, + rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), + }; + } + } + } + + const defaultValue = defaultForColumn(viewResponse, internals, viewName); + if ( + defaultValue === 'NULL' + || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) + ) { + if (typeof internals!.tables![viewName] === 'undefined') { + internals!.tables![viewName] = { + columns: { + [columnName]: { + isDefaultAnExpression: true, + }, + }, + }; + } else { + if (typeof internals!.tables![viewName]!.columns[columnName] === 'undefined') { + internals!.tables![viewName]!.columns[columnName] = { + isDefaultAnExpression: true, + }; + } else { + internals!.tables![viewName]!.columns[columnName]!.isDefaultAnExpression = true; + } + } + } + + const isSerial = columnType === 'serial'; + + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + if (columnAdditionalDT === 'ARRAY') { + for (let i = 1; i < Number(columnDimensions); i++) { + columnTypeMapped += '[]'; + } + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + columnToReturn[columnName] = { + name: columnName, + type: + // filter vectors, but in future we should filter any extension that was installed by user + columnAdditionalDT === 'USER-DEFINED' && !['vector', 'geometry'].includes(enumType) + ? enumType + : columnTypeMapped, + typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined + ? enumsToReturn[`${typeSchema}.${enumType}`].schema + : undefined, + primaryKey: primaryKey, + notNull: viewResponse.is_nullable === 'NO', + generated: isGenerated ? { as: generationExpression, type: 'stored' } : undefined, + identity: isIdentity + ? { + type: identityGeneration, + name: identityName, + increment: stringFromDatabaseIdentityProperty(identityIncrement), + minValue: stringFromDatabaseIdentityProperty(identityMinimum), + maxValue: stringFromDatabaseIdentityProperty(identityMaximum), + startWith: stringFromDatabaseIdentityProperty(identityStart), + cache: sequencesToReturn[identityName]?.cache + ? sequencesToReturn[identityName]?.cache + : sequencesToReturn[`${viewSchema}.${identityName}`]?.cache + ? sequencesToReturn[`${viewSchema}.${identityName}`]?.cache + : undefined, + cycle: identityCycle, + schema: viewSchema, + } + : undefined, + }; + + if (identityName) { + // remove "" from sequence name + delete sequencesToReturn[ + `${viewSchema}.${ + identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName + }` + ]; + delete sequencesToReturn[identityName]; + } + + if (!isSerial && typeof defaultValue !== 'undefined') { + columnToReturn[columnName].default = defaultValue; + } + } + + const [viewInfo] = await db.query<{ + view_name: string; + schema_name: string; + definition: string; + tablespace_name: string | null; + options: string[] | null; + location: string | null; + }>(` + SELECT + c.relname AS view_name, + n.nspname AS schema_name, + pg_get_viewdef(c.oid, true) AS definition, + ts.spcname AS tablespace_name, + c.reloptions AS options, + pg_tablespace_location(ts.oid) AS location +FROM + pg_class c +JOIN + pg_namespace n ON c.relnamespace = n.oid +LEFT JOIN + pg_tablespace ts ON c.reltablespace = ts.oid +WHERE + (c.relkind = 'm' OR c.relkind = 'v') + AND n.nspname = '${viewSchema}' + AND c.relname = '${viewName}';`); + + const resultWith: { [key: string]: string | boolean | number } = {}; + if (viewInfo.options) { + viewInfo.options.forEach((pair) => { + const splitted = pair.split('='); + const key = splitted[0]; + const value = splitted[1]; + + if (value === 'true') { + resultWith[key] = true; + } else if (value === 'false') { + resultWith[key] = false; + } else if (!isNaN(Number(value))) { + resultWith[key] = Number(value); + } else { + resultWith[key] = value; + } + }); + } + + const definition = viewInfo.definition.replace(/\s+/g, ' ').replace(';', '').trim(); + // { "check_option":"cascaded","security_barrier":true} -> // { "checkOption":"cascaded","securityBarrier":true} + const withOption = Object.values(resultWith).length + ? Object.fromEntries(Object.entries(resultWith).map(([key, value]) => [key.camelCase(), value])) + : undefined; + + const materialized = row.type === 'materialized_view'; + + views[`${viewSchema}.${viewName}`] = { + name: viewName, + schema: viewSchema, + columns: columnToReturn, + isExisting: false, + definition: definition, + materialized: materialized, + with: withOption, + tablespace: viewInfo.tablespace_name ?? undefined, + }; + } catch (e) { + rej(e); + return; + } + res(''); + }); + }); + + viewsCount = allViews.length; + + for await (const _ of allViews) { + } + if (progressCallback) { progressCallback('columns', columnsCount, 'done'); progressCallback('indexes', indexesCount, 'done'); progressCallback('fks', foreignKeysCount, 'done'); + progressCallback('checks', checksCount, 'done'); + progressCallback('views', viewsCount, 'done'); } const schemasObject = Object.fromEntries([...schemas].map((it) => [it, it])); @@ -1236,6 +1601,7 @@ export const fromDatabase = async ( enums: enumsToReturn, schemas: schemasObject, sequences: sequencesToReturn, + views: views, _meta: { schemas: {}, tables: {}, @@ -1246,18 +1612,14 @@ export const fromDatabase = async ( }; const defaultForColumn = (column: any, internals: PgKitInternals, tableName: string) => { - const columnName = column.attname; + const columnName = column.column_name; const isArray = internals?.tables[tableName]?.columns[columnName]?.isArray ?? false; if (column.column_default === null) { return undefined; } - if ( - column.data_type === 'serial' - || column.data_type === 'smallserial' - || column.data_type === 'bigserial' - ) { + if (column.data_type === 'serial' || column.data_type === 'smallserial' || column.data_type === 'bigserial') { return undefined; } @@ -1275,7 +1637,8 @@ const defaultForColumn = (column: any, internals: PgKitInternals, tableName: str if (isArray) { return `'{${ - columnDefaultAsString.slice(2, -2) + columnDefaultAsString + .slice(2, -2) .split(/\s*,\s*/g) .map((value) => { if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type.slice(0, -2))) { @@ -1296,9 +1659,7 @@ const defaultForColumn = (column: any, internals: PgKitInternals, tableName: str }}'`; } - if ( - ['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type) - ) { + if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type)) { if (/^-?[\d.]+(?:e-?\d+)?$/.test(columnDefaultAsString)) { return Number(columnDefaultAsString); } else { @@ -1311,21 +1672,19 @@ const defaultForColumn = (column: any, internals: PgKitInternals, tableName: str }, }; } else { - if ( - typeof internals!.tables![tableName]!.columns[columnName] - === 'undefined' - ) { + if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { internals!.tables![tableName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { - internals!.tables![tableName]!.columns[ - columnName - ]!.isDefaultAnExpression = true; + internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; } } return columnDefaultAsString; } + } else if (column.data_type.includes('numeric')) { + // if numeric(1,1) and used '99' -> psql stores like '99'::numeric + return columnDefaultAsString.includes("'") ? columnDefaultAsString : `'${columnDefaultAsString}'`; } else if (column.data_type === 'json' || column.data_type === 'jsonb') { const jsonWithoutSpaces = JSON.stringify(JSON.parse(columnDefaultAsString.slice(1, -1))); return `'${jsonWithoutSpaces}'::${column.data_type}`; @@ -1336,6 +1695,76 @@ const defaultForColumn = (column: any, internals: PgKitInternals, tableName: str } else if (columnDefaultAsString.startsWith("'") && columnDefaultAsString.endsWith("'")) { return columnDefaultAsString; } else { - return `${columnDefaultAsString.replace(/\\/g, '\`\\')}`; + return `${columnDefaultAsString.replace(/\\/g, '`\\')}`; } }; + +const getColumnsInfoQuery = ({ schema, table, db }: { schema: string; table: string; db: DB }) => { + return db.query( + `SELECT + a.attrelid::regclass::text AS table_name, -- Table, view, or materialized view name + a.attname AS column_name, -- Column name + CASE + WHEN NOT a.attisdropped THEN + CASE + WHEN a.attnotnull THEN 'NO' + ELSE 'YES' + END + ELSE NULL + END AS is_nullable, -- NULL or NOT NULL constraint + a.attndims AS array_dimensions, -- Array dimensions + CASE + WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) + AND EXISTS ( + SELECT FROM pg_attrdef ad + WHERE ad.adrelid = a.attrelid + AND ad.adnum = a.attnum + AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' + || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)' + ) + THEN CASE a.atttypid + WHEN 'int'::regtype THEN 'serial' + WHEN 'int8'::regtype THEN 'bigserial' + WHEN 'int2'::regtype THEN 'smallserial' + END + ELSE format_type(a.atttypid, a.atttypmod) + END AS data_type, -- Column data type +-- ns.nspname AS type_schema, -- Schema name + pg_get_serial_sequence('"${schema}"."${table}"', a.attname)::regclass AS seq_name, -- Serial sequence (if any) + c.column_default, -- Column default value + c.data_type AS additional_dt, -- Data type from information_schema + c.udt_name AS enum_name, -- Enum type (if applicable) + c.is_generated, -- Is it a generated column? + c.generation_expression, -- Generation expression (if generated) + c.is_identity, -- Is it an identity column? + c.identity_generation, -- Identity generation strategy (ALWAYS or BY DEFAULT) + c.identity_start, -- Start value of identity column + c.identity_increment, -- Increment for identity column + c.identity_maximum, -- Maximum value for identity column + c.identity_minimum, -- Minimum value for identity column + c.identity_cycle, -- Does the identity column cycle? + enum_ns.nspname AS type_schema -- Schema of the enum type +FROM + pg_attribute a +JOIN + pg_class cls ON cls.oid = a.attrelid -- Join pg_class to get table/view/materialized view info +JOIN + pg_namespace ns ON ns.oid = cls.relnamespace -- Join namespace to get schema info +LEFT JOIN + information_schema.columns c ON c.column_name = a.attname + AND c.table_schema = ns.nspname + AND c.table_name = cls.relname -- Match schema and table/view name +LEFT JOIN + pg_type enum_t ON enum_t.oid = a.atttypid -- Join to get the type info +LEFT JOIN + pg_namespace enum_ns ON enum_ns.oid = enum_t.typnamespace -- Join to get the enum schema +WHERE + a.attnum > 0 -- Valid column numbers only + AND NOT a.attisdropped -- Skip dropped columns + AND cls.relkind IN ('r', 'v', 'm') -- Include regular tables ('r'), views ('v'), and materialized views ('m') + AND ns.nspname = '${schema}' -- Filter by schema + AND cls.relname = '${table}' -- Filter by table name +ORDER BY + a.attnum; -- Order by column number`, + ); +}; diff --git a/drizzle-kit/src/serializer/sqliteImports.ts b/drizzle-kit/src/serializer/sqliteImports.ts index 534427e47..0164604d1 100644 --- a/drizzle-kit/src/serializer/sqliteImports.ts +++ b/drizzle-kit/src/serializer/sqliteImports.ts @@ -1,21 +1,28 @@ import { is } from 'drizzle-orm'; -import { AnySQLiteTable, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import { AnySQLiteTable, SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { const tables: AnySQLiteTable[] = []; + const views: SQLiteView[] = []; + const i0values = Object.values(exports); i0values.forEach((t) => { if (is(t, SQLiteTable)) { tables.push(t); } + + if (is(t, SQLiteView)) { + views.push(t); + } }); - return { tables }; + return { tables, views }; }; export const prepareFromSqliteImports = async (imports: string[]) => { const tables: AnySQLiteTable[] = []; + const views: SQLiteView[] = []; const { unregister } = await safeRegister(); for (let i = 0; i < imports.length; i++) { @@ -25,9 +32,10 @@ export const prepareFromSqliteImports = async (imports: string[]) => { const prepared = prepareFromExports(i0); tables.push(...prepared.tables); + views.push(...prepared.views); } unregister(); - return { tables: Array.from(new Set(tables)) }; + return { tables: Array.from(new Set(tables)), views }; }; diff --git a/drizzle-kit/src/serializer/sqliteSchema.ts b/drizzle-kit/src/serializer/sqliteSchema.ts index a8114e3a8..54587c3e0 100644 --- a/drizzle-kit/src/serializer/sqliteSchema.ts +++ b/drizzle-kit/src/serializer/sqliteSchema.ts @@ -1,5 +1,5 @@ import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; -import { customMapEntries, mapEntries, mapValues, originUUID } from '../global'; +import { customMapEntries, mapValues, originUUID } from '../global'; // ------- V3 -------- const index = object({ @@ -49,6 +49,11 @@ const uniqueConstraint = object({ columns: string().array(), }).strict(); +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + const table = object({ name: string(), columns: record(string(), column), @@ -56,6 +61,14 @@ const table = object({ foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), + checkConstraints: record(string(), checkConstraint).default({}), +}).strict(); + +export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), }).strict(); // use main dialect @@ -77,6 +90,7 @@ export const schemaInternalV4 = object({ version: literal('4'), dialect: dialect, tables: record(string(), table), + views: record(string(), view), enums: object({}), }).strict(); @@ -108,6 +122,7 @@ export const schemaInternal = object({ version: latestVersion, dialect: dialect, tables: record(string(), table), + views: record(string(), view), enums: object({}), _meta: object({ tables: record(string(), string()), @@ -128,12 +143,14 @@ const tableSquashed = object({ foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()).default({}), + checkConstraints: record(string(), string()).default({}), }).strict(); export const schemaSquashed = object({ version: latestVersion, dialect: dialect, tables: record(string(), tableSquashed), + views: record(string(), view), enums: any(), }).strict(); @@ -150,6 +167,8 @@ export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; +export type CheckConstraint = TypeOf; +export type View = TypeOf; export const SQLiteSquasher = { squashIdx: (idx: Index) => { @@ -233,6 +252,17 @@ export const SQLiteSquasher = { unsquashPK: (pk: string) => { return pk.split(','); }, + squashCheck: (check: CheckConstraint) => { + return `${check.name};${check.value}`; + }, + unsquashCheck: (input: string): CheckConstraint => { + const [ + name, + value, + ] = input.split(';'); + + return { name, value }; + }, }; export const squashSqliteScheme = ( @@ -268,6 +298,13 @@ export const squashSqliteScheme = ( }, ); + const squashedCheckConstraints = mapValues( + it[1].checkConstraints, + (check) => { + return SQLiteSquasher.squashCheck(check); + }, + ); + return [ it[0], { @@ -277,6 +314,7 @@ export const squashSqliteScheme = ( foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, + checkConstraints: squashedCheckConstraints, }, ]; }), @@ -286,6 +324,7 @@ export const squashSqliteScheme = ( version: '6', dialect: json.dialect, tables: mappedTables, + views: json.views, enums: json.enums, }; }; @@ -296,6 +335,7 @@ export const drySQLite = schema.parse({ id: originUUID, prevId: '', tables: {}, + views: {}, enums: {}, _meta: { tables: {}, diff --git a/drizzle-kit/src/serializer/sqliteSerializer.ts b/drizzle-kit/src/serializer/sqliteSerializer.ts index f1d28f759..3977705a6 100644 --- a/drizzle-kit/src/serializer/sqliteSerializer.ts +++ b/drizzle-kit/src/serializer/sqliteSerializer.ts @@ -5,14 +5,18 @@ import { // AnySQLiteColumnBuilder, AnySQLiteTable, getTableConfig, + getViewConfig, SQLiteBaseInteger, + SQLiteColumn, SQLiteSyncDialect, + SQLiteView, uniqueKeyName, } from 'drizzle-orm/sqlite-core'; import { CasingType } from 'src/cli/validations/common'; import { withStyle } from '../cli/validations/outputs'; import type { IntrospectStage, IntrospectStatus } from '../cli/views'; import type { + CheckConstraint, Column, ForeignKey, Index, @@ -21,16 +25,20 @@ import type { SQLiteSchemaInternal, Table, UniqueConstraint, + View, } from '../serializer/sqliteSchema'; import { getColumnCasing, type SQLiteDB } from '../utils'; import { sqlToStr } from '.'; export const generateSqliteSnapshot = ( tables: AnySQLiteTable[], + views: SQLiteView[], casing: CasingType | undefined, ): SQLiteSchemaInternal => { const dialect = new SQLiteSyncDialect({ casing }); const result: Record = {}; + const resultViews: Record = {}; + const internal: SQLiteKitInternals = { indexes: {} }; for (const table of tables) { // const tableName = getTableName(table); @@ -39,11 +47,15 @@ export const generateSqliteSnapshot = ( const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; + const checkConstraintObject: Record = {}; + + const checksInTable: Record = {}; const { name: tableName, columns, indexes, + checks, foreignKeys: tableForeignKeys, primaryKeys, uniqueConstraints, @@ -271,6 +283,38 @@ export const generateSqliteSnapshot = ( } }); + checks.forEach((check) => { + const checkName = check.name; + if (typeof checksInTable[tableName] !== 'undefined') { + if (checksInTable[tableName].includes(check.name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated check constraint name in ${ + chalk.underline.blue( + tableName, + ) + }. Please rename your check constraint in the ${ + chalk.underline.blue( + tableName, + ) + } table`, + ) + }`, + ); + process.exit(1); + } + checksInTable[tableName].push(checkName); + } else { + checksInTable[tableName] = [check.name]; + } + + checkConstraintObject[checkName] = { + name: checkName, + value: dialect.sqlToQuery(check.value).sql, + }; + }); + result[tableName] = { name: tableName, columns: columnsObject, @@ -278,6 +322,79 @@ export const generateSqliteSnapshot = ( foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, + checkConstraints: checkConstraintObject, + }; + } + + for (const view of views) { + const { name, isExisting, selectedFields, query, schema } = getViewConfig(view); + + const columnsObject: Record = {}; + + const existingView = resultViews[name]; + if (typeof existingView !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We\'ve found duplicated view name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema. Please rename your view`, + ) + }`, + ); + process.exit(1); + } + + for (const key in selectedFields) { + if (is(selectedFields[key], SQLiteColumn)) { + const column = selectedFields[key]; + const notNull: boolean = column.notNull; + const primaryKey: boolean = column.primary; + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey, + notNull, + autoincrement: is(column, SQLiteBaseInteger) + ? column.autoIncrement + : false, + generated: generated + ? { + as: is(generated.as, SQL) + ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` + : typeof generated.as === 'function' + ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` + : `(${generated.as as any})`, + type: generated.mode ?? 'virtual', + } + : undefined, + }; + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + columnToSet.default = typeof column.default === 'string' + ? `'${column.default}'` + : typeof column.default === 'object' + || Array.isArray(column.default) + ? `'${JSON.stringify(column.default)}'` + : column.default; + } + } + columnsObject[column.name] = columnToSet; + } + } + + resultViews[name] = { + columns: columnsObject, + name, + isExisting, + definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, }; } @@ -285,6 +402,7 @@ export const generateSqliteSnapshot = ( version: '6', dialect: 'sqlite', tables: result, + views: resultViews, enums: {}, _meta: { tables: {}, @@ -389,6 +507,8 @@ export const fromDatabase = async ( ) => void, ): Promise => { const result: Record = {}; + const resultViews: Record = {}; + const columns = await db.query<{ tableName: string; columnName: string; @@ -399,11 +519,12 @@ export const fromDatabase = async ( seq: number; hidden: number; sql: string; + type: 'view' | 'table'; }>( `SELECT - m.name as "tableName", p.name as "columnName", p.type as "columnType", p."notnull" as "notNull", p.dflt_value as "defaultValue", p.pk as pk, p.hidden as hidden, m.sql + m.name as "tableName", p.name as "columnName", p.type as "columnType", p."notnull" as "notNull", p.dflt_value as "defaultValue", p.pk as pk, p.hidden as hidden, m.sql, m.type as type FROM sqlite_master AS m JOIN pragma_table_xinfo(m.name) AS p - WHERE m.type = 'table' + WHERE (m.type = 'table' OR m.type = 'view') and m.tbl_name != 'sqlite_sequence' and m.tbl_name != 'sqlite_stat1' and m.tbl_name != '_litestream_seq' @@ -435,6 +556,8 @@ export const fromDatabase = async ( let tablesCount = new Set(); let indexesCount = 0; let foreignKeysCount = 0; + let checksCount = 0; + let viewsCount = 0; // append primaryKeys by table const tableToPk: { [tname: string]: string[] } = {}; @@ -447,7 +570,10 @@ export const fromDatabase = async ( for (const column of columns) { if (!tablesFilter(column.tableName)) continue; - columnsCount += 1; + // TODO + if (column.type !== 'view') { + columnsCount += 1; + } if (progressCallback) { progressCallback('columns', columnsCount, 'fetching'); } @@ -526,6 +652,7 @@ export const fromDatabase = async ( indexes: {}, foreignKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }; } else { result[tableName]!.columns[columnName] = newColumn; @@ -696,10 +823,107 @@ WHERE progressCallback('enums', 0, 'done'); } + const views = await db.query( + `SELECT name AS view_name, sql AS sql FROM sqlite_master WHERE type = 'view';`, + ); + + viewsCount = views.length; + + if (progressCallback) { + progressCallback('views', viewsCount, 'fetching'); + } + for (const view of views) { + const viewName = view['view_name']; + const sql = view['sql']; + + const regex = new RegExp(`\\bAS\\b\\s+(SELECT.+)$`, 'i'); + const match = sql.match(regex); + + if (!match) { + console.log('Could not process view'); + process.exit(1); + } + + const viewDefinition = match[1] as string; + + const columns = result[viewName].columns; + delete result[viewName]; + + resultViews[viewName] = { + columns: columns, + isExisting: false, + name: viewName, + definition: viewDefinition, + }; + } + if (progressCallback) { + progressCallback('views', viewsCount, 'done'); + } + + const namedCheckPattern = /CONSTRAINT\s*["']?(\w+)["']?\s*CHECK\s*\((.*?)\)/gi; + const unnamedCheckPattern = /CHECK\s*\((.*?)\)/gi; + let checkCounter = 0; + const checkConstraints: Record = {}; + const checks = await db.query<{ tableName: string; sql: string }>(`SELECT name as "tableName", sql as "sql" + FROM sqlite_master + WHERE type = 'table' AND name != 'sqlite_sequence';`); + for (const check of checks) { + if (!tablesFilter(check.tableName)) continue; + + const { tableName, sql } = check; + + // Find named CHECK constraints + let namedChecks = [...sql.matchAll(namedCheckPattern)]; + if (namedChecks.length > 0) { + namedChecks.forEach(([_, checkName, checkValue]) => { + checkConstraints[checkName] = { + name: checkName, + value: checkValue.trim(), + }; + }); + } else { + // If no named constraints, find unnamed CHECK constraints and assign names + let unnamedChecks = [...sql.matchAll(unnamedCheckPattern)]; + unnamedChecks.forEach(([_, checkValue]) => { + let checkName = `${tableName}_check_${++checkCounter}`; + checkConstraints[checkName] = { + name: checkName, + value: checkValue.trim(), + }; + }); + } + + checksCount += Object.values(checkConstraints).length; + if (progressCallback) { + progressCallback('checks', checksCount, 'fetching'); + } + + const table = result[tableName]; + + if (!table) { + result[tableName] = { + name: tableName, + columns: {}, + compositePrimaryKeys: {}, + indexes: {}, + foreignKeys: {}, + uniqueConstraints: {}, + checkConstraints: checkConstraints, + }; + } else { + result[tableName]!.checkConstraints = checkConstraints; + } + } + + if (progressCallback) { + progressCallback('checks', checksCount, 'done'); + } + return { version: '6', dialect: 'sqlite', tables: result, + views: resultViews, enums: {}, _meta: { tables: {}, diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts index 64ea8e465..bae4c100a 100644 --- a/drizzle-kit/src/snapshotsDiffer.ts +++ b/drizzle-kit/src/snapshotsDiffer.ts @@ -5,7 +5,6 @@ import { enum as enumType, literal, never, - number, object, record, string, @@ -22,18 +21,28 @@ import { _prepareSqliteAddColumns, JsonAddColumnStatement, JsonAlterCompositePK, + JsonAlterMySqlViewStatement, JsonAlterTableSetSchema, JsonAlterUniqueConstraint, + JsonAlterViewStatement, + JsonCreateCheckConstraint, JsonCreateCompositePK, + JsonCreateMySqlViewStatement, + JsonCreatePgViewStatement, JsonCreateReferenceStatement, + JsonCreateSqliteViewStatement, JsonCreateUniqueConstraint, + JsonDeleteCheckConstraint, JsonDeleteCompositePK, JsonDeleteUniqueConstraint, JsonDropColumnStatement, + JsonDropViewStatement, JsonReferenceStatement, JsonRenameColumnStatement, + JsonRenameViewStatement, JsonSqliteAddColumnStatement, JsonStatement, + prepareAddCheckConstraint, prepareAddCompositePrimaryKeyMySql, prepareAddCompositePrimaryKeyPg, prepareAddCompositePrimaryKeySqlite, @@ -50,38 +59,51 @@ import { prepareCreateReferencesJson, prepareCreateSchemasJson, prepareCreateSequenceJson, + prepareDeleteCheckConstraint, prepareDeleteCompositePrimaryKeyMySql, prepareDeleteCompositePrimaryKeyPg, prepareDeleteCompositePrimaryKeySqlite, prepareDeleteSchemasJson as prepareDropSchemasJson, prepareDeleteUniqueConstraintPg as prepareDeleteUniqueConstraint, prepareDropEnumJson, + prepareDropEnumValues, prepareDropIndexesJson, prepareDropReferencesJson, prepareDropSequenceJson, prepareDropTableJson, + prepareDropViewJson, prepareLibSQLCreateReferencesJson, prepareLibSQLDropReferencesJson, prepareMoveEnumJson, prepareMoveSequenceJson, + prepareMySqlAlterView, prepareMySqlCreateTableJson, + prepareMySqlCreateViewJson, preparePgAlterColumns, + preparePgAlterViewAddWithOptionJson, + preparePgAlterViewAlterSchemaJson, + preparePgAlterViewAlterTablespaceJson, + preparePgAlterViewAlterUsingJson, + preparePgAlterViewDropWithOptionJson, preparePgCreateIndexesJson, preparePgCreateTableJson, + preparePgCreateViewJson, prepareRenameColumns, prepareRenameEnumJson, prepareRenameSchemasJson, prepareRenameSequenceJson, prepareRenameTableJson, + prepareRenameViewJson, prepareSqliteAlterColumns, prepareSQLiteCreateTable, + prepareSqliteCreateViewJson, } from './jsonStatements'; import { Named, NamedWithSchema } from './cli/commands/migrate'; import { mapEntries, mapKeys, mapValues } from './global'; -import { MySqlSchema, MySqlSchemaSquashed, MySqlSquasher } from './serializer/mysqlSchema'; -import { PgSchema, PgSchemaSquashed, sequenceSquashed } from './serializer/pgSchema'; -import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; +import { MySqlSchema, MySqlSchemaSquashed, MySqlSquasher, ViewSquashed } from './serializer/mysqlSchema'; +import { mergedViewWithOption, PgSchema, PgSchemaSquashed, sequenceSquashed, View } from './serializer/pgSchema'; +import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView } from './serializer/sqliteSchema'; import { libSQLCombineStatements, sqliteCombineStatements } from './statementCombiner'; import { copy, prepareMigrationMeta } from './utils'; @@ -207,6 +229,7 @@ const tableScheme = object({ foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()).default({}), uniqueConstraints: record(string(), string()).default({}), + checkConstraints: record(string(), string()).default({}), }).strict(); export const alteredTableScheme = object({ @@ -249,22 +272,84 @@ export const alteredTableScheme = object({ __old: string(), }), ), + addedCheckConstraints: record( + string(), + string(), + ), + deletedCheckConstraints: record( + string(), + string(), + ), + alteredCheckConstraints: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), }).strict(); +const alteredViewCommon = object({ + name: string(), + alteredDefinition: object({ + __old: string(), + __new: string(), + }).strict().optional(), + alteredExisting: object({ + __old: boolean(), + __new: boolean(), + }).strict().optional(), +}); + +export const alteredPgViewSchema = alteredViewCommon.merge( + object({ + schema: string(), + deletedWithOption: mergedViewWithOption.optional(), + addedWithOption: mergedViewWithOption.optional(), + addedWith: mergedViewWithOption.optional(), + deletedWith: mergedViewWithOption.optional(), + alteredWith: mergedViewWithOption.optional(), + alteredSchema: object({ + __old: string(), + __new: string(), + }).strict().optional(), + alteredTablespace: object({ + __old: string(), + __new: string(), + }).strict().optional(), + alteredUsing: object({ + __old: string(), + __new: string(), + }).strict().optional(), + }).strict(), +); + +const alteredMySqlViewSchema = alteredViewCommon.merge( + object({ + alteredMeta: object({ + __old: string(), + __new: string(), + }).strict().optional(), + }).strict(), +); + export const diffResultScheme = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: changedEnumSchema.array(), alteredSequences: sequenceSquashed.array(), + alteredViews: alteredPgViewSchema.array(), }).strict(); export const diffResultSchemeMysql = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: never().array(), + alteredViews: alteredMySqlViewSchema.array(), }); export const diffResultSchemeSQLite = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: never().array(), + alteredViews: alteredViewCommon.array(), }); export type Column = TypeOf; @@ -390,6 +475,9 @@ export const applyPgSnapshotsDiff = async ( columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, + viewsResolver: ( + input: ResolverInput, + ) => Promise>, prevFull: PgSchema, curFull: PgSchema, action?: 'push' | undefined, @@ -720,11 +808,49 @@ export const applyPgSnapshotsDiff = async ( }, ); - const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + const viewsDiff = diffSchemasOrTables(json1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, + moved: movedViews, + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[`${it.from.schema}.${it.from.name}`] = { to: it.to.name, from: it.from.name }; + }); + + const movedViewDic: Record = {}; + movedViews.forEach((it) => { + movedViewDic[`${it.schemaFrom}.${it.name}`] = { to: it.schemaTo, from: it.schemaFrom }; + }); + + const viewsPatchedSnap1 = copy(columnsPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[`${viewValue.schema}.${viewValue.name}`]; + const moved = movedViewDic[`${viewValue.schema}.${viewValue.name}`]; + + if (rename) { + viewValue.name = rename.to; + viewKey = `${viewValue.schema}.${viewValue.name}`; + } + + if (moved) viewKey = `${moved.to}.${viewValue.name}`; + + return [viewKey, viewValue]; + }, + ); + + const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); - // no diffs const typedResult: DiffResult = diffResultScheme.parse(diffResult); - // const typedResult: DiffResult = {}; const jsonStatements: JsonStatement[] = []; @@ -791,6 +917,9 @@ export const applyPgSnapshotsDiff = async ( }); } + const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; + for (let it of alteredTables) { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name @@ -841,6 +970,8 @@ export const applyPgSnapshotsDiff = async ( let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + let createCheckConstraints: JsonCreateCheckConstraint[] = []; + let deleteCheckConstraints: JsonDeleteCheckConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, @@ -867,6 +998,28 @@ export const applyPgSnapshotsDiff = async ( ); } + createCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deleteCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deleteCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + + jsonCreatedCheckConstraints.push(...createCheckConstraints); + jsonDeletedCheckConstraints.push(...deleteCheckConstraints); + jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); @@ -1002,30 +1155,6 @@ export const applyPgSnapshotsDiff = async ( // - create table with generated // - alter - should be not triggered, but should get warning - // TODO: - // let hasEnumValuesDeletions = false; - // let enumValuesDeletions: { name: string; schema: string; values: string[] }[] = - // []; - // for (let alteredEnum of typedResult.alteredEnums) { - // if (alteredEnum.deletedValues.length > 0) { - // hasEnumValuesDeletions = true; - // enumValuesDeletions.push({ - // name: alteredEnum.name, - // schema: alteredEnum.schema, - // values: alteredEnum.deletedValues, - // }); - // } - // } - // if (hasEnumValuesDeletions) { - // console.log(error("Deletion of enum values is prohibited in Postgres - see here")); - // for(let entry of enumValuesDeletions){ - // console.log(error(`You're trying to delete ${chalk.blue(`[${entry.values.join(", ")}]`)} values from ${chalk.blue(`${entry.schema}.${entry.name}`)}`)) - // } - // } - // if (hasEnumValuesDeletions && action === "push") { - // process.exit(1); - // } - const createEnums = createdEnums.map((it) => { return prepareCreateEnumJson(it.name, it.schema, it.values); }) ?? []; @@ -1042,14 +1171,17 @@ export const applyPgSnapshotsDiff = async ( return prepareRenameEnumJson(it.from.name, it.to.name, it.to.schema); }); - // todo: block enum rename, enum value rename and enun deletion for now const jsonAlterEnumsWithAddedValues = typedResult.alteredEnums .map((it) => { return prepareAddValuesToEnumJson(it.name, it.schema, it.addedValues); }) .flat() ?? []; - /////////// + const jsonAlterEnumsWithDroppedValues = typedResult.alteredEnums + .map((it) => { + return prepareDropEnumValues(it.name, it.schema, it.deletedValues, curFull); + }) + .flat() ?? []; const createSequences = createdSequences.map((it) => { return prepareCreateSequenceJson(it); @@ -1091,6 +1223,156 @@ export const applyPgSnapshotsDiff = async ( return preparePgCreateTableJson(it, curFull); }); + const createViews: JsonCreatePgViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + const renameViews: JsonRenameViewStatement[] = []; + const alterViews: JsonAlterViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return preparePgCreateViewJson( + it.name, + it.schema, + it.definition!, + it.materialized, + it.withNoData, + it.with, + it.using, + it.tablespace, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name, it.schema, it.materialized); + }), + ); + + renameViews.push( + ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[`${it.from.schema}.${it.from.name}`].isExisting) + .map((it) => { + return prepareRenameViewJson(it.to.name, it.from.name, it.to.schema, it.to.materialized); + }), + ); + + alterViews.push( + ...movedViews.filter((it) => + !json2.views[`${it.schemaTo}.${it.name}`].isExisting && !json1.views[`${it.schemaFrom}.${it.name}`].isExisting + ).map((it) => { + return preparePgAlterViewAlterSchemaJson( + it.schemaTo, + it.schemaFrom, + it.name, + json2.views[`${it.schemaTo}.${it.name}`].materialized, + ); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[`${it.schema}.${it.name}`].isExisting); + + for (const alteredView of alteredViews) { + const viewKey = `${alteredView.schema}.${alteredView.name}`; + + const { materialized, with: withOption, definition, withNoData, using, tablespace } = json2.views[viewKey]; + + if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { + dropViews.push(prepareDropViewJson(alteredView.name, alteredView.schema, materialized)); + + createViews.push( + preparePgCreateViewJson( + alteredView.name, + alteredView.schema, + definition!, + materialized, + withNoData, + withOption, + using, + tablespace, + ), + ); + + continue; + } + + if (alteredView.addedWithOption) { + alterViews.push( + preparePgAlterViewAddWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.addedWithOption, + ), + ); + } + + if (alteredView.deletedWithOption) { + alterViews.push( + preparePgAlterViewDropWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.deletedWithOption, + ), + ); + } + + if (alteredView.addedWith) { + alterViews.push( + preparePgAlterViewAddWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.addedWith, + ), + ); + } + + if (alteredView.deletedWith) { + alterViews.push( + preparePgAlterViewDropWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.deletedWith, + ), + ); + } + + if (alteredView.alteredWith) { + alterViews.push( + preparePgAlterViewAddWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.alteredWith, + ), + ); + } + + if (alteredView.alteredTablespace) { + alterViews.push( + preparePgAlterViewAlterTablespaceJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.alteredTablespace.__new, + ), + ); + } + + if (alteredView.alteredUsing) { + alterViews.push( + preparePgAlterViewAlterUsingJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.alteredUsing.__new, + ), + ); + } + } + jsonStatements.push(...createSchemas); jsonStatements.push(...renameSchemas); jsonStatements.push(...createEnums); @@ -1105,12 +1387,17 @@ export const applyPgSnapshotsDiff = async ( jsonStatements.push(...createTables); + jsonStatements.push(...dropViews); + jsonStatements.push(...renameViews); + jsonStatements.push(...alterViews); + jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonSetTableSchemas); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); jsonStatements.push(...jsonDeletedUniqueConstraints); + jsonStatements.push(...jsonDeletedCheckConstraints); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); @@ -1133,8 +1420,12 @@ export const applyPgSnapshotsDiff = async ( jsonStatements.push(...jsonAlteredCompositePKs); jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonCreatedCheckConstraints); jsonStatements.push(...jsonAlteredUniqueConstraints); + jsonStatements.push(...jsonAlterEnumsWithDroppedValues); + + jsonStatements.push(...createViews); jsonStatements.push(...dropEnums); jsonStatements.push(...dropSequences); @@ -1169,7 +1460,25 @@ export const applyPgSnapshotsDiff = async ( return true; }); - const sqlStatements = fromJson(filteredJsonStatements, 'postgresql'); + // enum filters + // Need to find add and drop enum values in same enum and remove add values + const filteredEnumsJsonStatements = filteredJsonStatements.filter((st) => { + if (st.type === 'alter_type_add_value') { + if ( + jsonStatements.find( + (it) => + it.type === 'alter_type_drop_value' + && it.name === st.name + && it.schema === st.schema, + ) + ) { + return false; + } + } + return true; + }); + + const sqlStatements = fromJson(filteredEnumsJsonStatements, 'postgresql'); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { @@ -1190,7 +1499,7 @@ export const applyPgSnapshotsDiff = async ( const _meta = prepareMigrationMeta(rSchemas, rTables, rColumns); return { - statements: filteredJsonStatements, + statements: filteredEnumsJsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; @@ -1205,6 +1514,9 @@ export const applyMysqlSnapshotsDiff = async ( columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, + viewsResolver: ( + input: ResolverInput, + ) => Promise>, prevFull: MySqlSchema, curFull: MySqlSchema, action?: 'push' | undefined, @@ -1352,7 +1664,38 @@ export const applyMysqlSnapshotsDiff = async ( }, ); - const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + const viewsDiff = diffSchemasOrTables(json1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, // renamed or moved + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; + }); + + const viewsPatchedSnap1 = copy(columnsPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[viewValue.name]; + + if (rename) { + viewValue.name = rename.to; + viewKey = rename.to; + } + + return [viewKey, viewValue]; + }, + ); + + const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult: DiffResultMysql = diffResultSchemeMysql.parse(diffResult); @@ -1387,6 +1730,9 @@ export const applyMysqlSnapshotsDiff = async ( const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; + const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames .map((it) => prepareRenameColumns(it.table, '', it.renames)) .flat(); @@ -1448,6 +1794,9 @@ export const applyMysqlSnapshotsDiff = async ( let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + let createdCheckConstraints: JsonCreateCheckConstraint[] = []; + let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, @@ -1473,6 +1822,26 @@ export const applyMysqlSnapshotsDiff = async ( ); } + createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deletedCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + // skip for push + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); @@ -1480,6 +1849,9 @@ export const applyMysqlSnapshotsDiff = async ( jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + + jsonCreatedCheckConstraints.push(...createdCheckConstraints); + jsonDeletedCheckConstraints.push(...deletedCheckConstraints); }); const rColumns = jsonRenameColumnsStatements.map((it) => { @@ -1593,13 +1965,85 @@ export const applyMysqlSnapshotsDiff = async ( curFull.internal, ); }); + + const createViews: JsonCreateMySqlViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + const renameViews: JsonRenameViewStatement[] = []; + const alterViews: JsonAlterMySqlViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return prepareMySqlCreateViewJson( + it.name, + it.definition!, + it.meta, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name); + }), + ); + + renameViews.push( + ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { + return prepareRenameViewJson(it.to.name, it.from.name); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); + + for (const alteredView of alteredViews) { + const { definition, meta } = json2.views[alteredView.name]; + + if (alteredView.alteredExisting) { + dropViews.push(prepareDropViewJson(alteredView.name)); + + createViews.push( + prepareMySqlCreateViewJson( + alteredView.name, + definition!, + meta, + ), + ); + + continue; + } + + if (alteredView.alteredDefinition && action !== 'push') { + createViews.push( + prepareMySqlCreateViewJson( + alteredView.name, + definition!, + meta, + true, + ), + ); + continue; + } + + if (alteredView.alteredMeta) { + const view = curFull['views'][alteredView.name]; + alterViews.push( + prepareMySqlAlterView(view), + ); + } + } + jsonStatements.push(...jsonMySqlCreateTables); jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); + jsonStatements.push(...dropViews); + jsonStatements.push(...renameViews); + jsonStatements.push(...alterViews); + jsonStatements.push(...jsonDeletedUniqueConstraints); + jsonStatements.push(...jsonDeletedCheckConstraints); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); @@ -1618,6 +2062,7 @@ export const applyMysqlSnapshotsDiff = async ( jsonStatements.push(...jsonCreateReferencesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForCreatedTables); + jsonStatements.push(...jsonCreatedCheckConstraints); jsonStatements.push(...jsonCreatedReferencesForAlteredTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); @@ -1628,7 +2073,7 @@ export const applyMysqlSnapshotsDiff = async ( // jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAlteredCompositePKs); - jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...createViews); jsonStatements.push(...jsonAlteredUniqueConstraints); @@ -1663,6 +2108,9 @@ export const applySqliteSnapshotsDiff = async ( columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, + viewsResolver: ( + input: ResolverInput, + ) => Promise>, prevFull: SQLiteSchema, curFull: SQLiteSchema, action?: 'push' | undefined, @@ -1775,7 +2223,37 @@ export const applySqliteSnapshotsDiff = async ( }, ); - const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + const viewsDiff = diffSchemasOrTables(json1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, // renamed or moved + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; + }); + + const viewsPatchedSnap1 = copy(columnsPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[viewValue.name]; + + if (rename) { + viewValue.name = rename.to; + } + + return [viewKey, viewValue]; + }, + ); + + const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult = diffResultSchemeSQLite.parse(diffResult); @@ -1841,6 +2319,9 @@ export const applySqliteSnapshotsDiff = async ( const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; + allAltered.forEach((it) => { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name @@ -1911,6 +2392,54 @@ export const applySqliteSnapshotsDiff = async ( ); } + let createdCheckConstraints: JsonCreateCheckConstraint[] = []; + let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deletedCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + // skip for push + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); @@ -1918,6 +2447,9 @@ export const applySqliteSnapshotsDiff = async ( jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + + jsonCreatedCheckConstraints.push(...createdCheckConstraints); + jsonDeletedCheckConstraints.push(...deletedCheckConstraints); }); const rColumns = jsonRenameColumnsStatements.map((it) => { @@ -2016,6 +2548,52 @@ export const applySqliteSnapshotsDiff = async ( (t) => t.type === 'delete_reference', ); + const createViews: JsonCreateSqliteViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return prepareSqliteCreateViewJson( + it.name, + it.definition!, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name); + }), + ); + + dropViews.push( + ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { + return prepareDropViewJson(it.from.name); + }), + ); + createViews.push( + ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { + return prepareSqliteCreateViewJson(it.to.name, it.to.definition!); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); + + for (const alteredView of alteredViews) { + const { definition } = json2.views[alteredView.name]; + + if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { + dropViews.push(prepareDropViewJson(alteredView.name)); + + createViews.push( + prepareSqliteCreateViewJson( + alteredView.name, + definition!, + ), + ); + } + } + const jsonStatements: JsonStatement[] = []; jsonStatements.push(...jsonCreateTables); @@ -2024,6 +2602,7 @@ export const applySqliteSnapshotsDiff = async ( jsonStatements.push(...jsonRenameColumnsStatements); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + jsonStatements.push(...jsonDeletedCheckConstraints); // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation @@ -2037,6 +2616,8 @@ export const applySqliteSnapshotsDiff = async ( jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + jsonStatements.push(...jsonCreatedCheckConstraints); + jsonStatements.push(...jsonCreatedReferencesForAlteredTables); jsonStatements.push(...jsonDropColumnsStatemets); @@ -2047,6 +2628,9 @@ export const applySqliteSnapshotsDiff = async ( jsonStatements.push(...jsonAlteredUniqueConstraints); + jsonStatements.push(...dropViews); + jsonStatements.push(...createViews); + const combinedJsonStatements = sqliteCombineStatements(jsonStatements, json2, action); const sqlStatements = fromJson(combinedJsonStatements, 'sqlite'); @@ -2079,6 +2663,9 @@ export const applyLibSQLSnapshotsDiff = async ( columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, + viewsResolver: ( + input: ResolverInput, + ) => Promise>, prevFull: SQLiteSchema, curFull: SQLiteSchema, action?: 'push', @@ -2190,7 +2777,37 @@ export const applyLibSQLSnapshotsDiff = async ( }, ); - const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); + const viewsDiff = diffSchemasOrTables(json1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, // renamed or moved + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; + }); + + const viewsPatchedSnap1 = copy(columnsPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[viewValue.name]; + + if (rename) { + viewValue.name = rename.to; + } + + return [viewKey, viewValue]; + }, + ); + + const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult = diffResultSchemeSQLite.parse(diffResult); @@ -2271,6 +2888,9 @@ export const applyLibSQLSnapshotsDiff = async ( const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; + allAltered.forEach((it) => { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name @@ -2316,6 +2936,9 @@ export const applyLibSQLSnapshotsDiff = async ( let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + let createdCheckConstraints: JsonCreateCheckConstraint[] = []; + let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, @@ -2342,6 +2965,26 @@ export const applyLibSQLSnapshotsDiff = async ( ); } + createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deletedCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + // skip for push + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); @@ -2349,6 +2992,9 @@ export const applyLibSQLSnapshotsDiff = async ( jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + + jsonCreatedCheckConstraints.push(...createdCheckConstraints); + jsonDeletedCheckConstraints.push(...deletedCheckConstraints); }); const jsonTableAlternations = allAltered @@ -2439,6 +3085,53 @@ export const applyLibSQLSnapshotsDiff = async ( (t) => t.type === 'delete_reference', ); + const createViews: JsonCreateSqliteViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return prepareSqliteCreateViewJson( + it.name, + it.definition!, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name); + }), + ); + + // renames + dropViews.push( + ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { + return prepareDropViewJson(it.from.name); + }), + ); + createViews.push( + ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { + return prepareSqliteCreateViewJson(it.to.name, it.to.definition!); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); + + for (const alteredView of alteredViews) { + const { definition } = json2.views[alteredView.name]; + + if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { + dropViews.push(prepareDropViewJson(alteredView.name)); + + createViews.push( + prepareSqliteCreateViewJson( + alteredView.name, + definition!, + ), + ); + } + } + const jsonStatements: JsonStatement[] = []; jsonStatements.push(...jsonCreateTables); @@ -2448,6 +3141,8 @@ export const applyLibSQLSnapshotsDiff = async ( jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + jsonStatements.push(...jsonDeletedCheckConstraints); + // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation jsonStatements.push(...jsonDropIndexesForAllAlteredTables); @@ -2459,6 +3154,10 @@ export const applyLibSQLSnapshotsDiff = async ( jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + jsonStatements.push(...jsonCreatedCheckConstraints); + + jsonStatements.push(...dropViews); + jsonStatements.push(...createViews); jsonStatements.push(...jsonCreatedReferencesForAlteredTables); diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/sqlgenerator.ts index 374b30581..586175e28 100644 --- a/drizzle-kit/src/sqlgenerator.ts +++ b/drizzle-kit/src/sqlgenerator.ts @@ -1,5 +1,4 @@ import { BREAKPOINT } from './cli/commands/migrate'; -import { Driver } from './cli/validations/common'; import { JsonAddColumnStatement, JsonAddValueToEnumStatement, @@ -21,33 +20,50 @@ import { JsonAlterColumnSetPrimaryKeyStatement, JsonAlterColumnTypeStatement, JsonAlterCompositePK, + JsonAlterMySqlViewStatement, JsonAlterReferenceStatement, JsonAlterSequenceStatement, JsonAlterTableRemoveFromSchema, JsonAlterTableSetNewSchema, JsonAlterTableSetSchema, + JsonAlterViewAddWithOptionStatement, + JsonAlterViewAlterSchemaStatement, + JsonAlterViewAlterTablespaceStatement, + JsonAlterViewAlterUsingStatement, + JsonAlterViewDropWithOptionStatement, + JsonCreateCheckConstraint, JsonCreateCompositePK, JsonCreateEnumStatement, JsonCreateIndexStatement, + JsonCreateMySqlViewStatement, + JsonCreatePgViewStatement, JsonCreateReferenceStatement, JsonCreateSchema, JsonCreateSequenceStatement, + JsonCreateSqliteViewStatement, JsonCreateTableStatement, JsonCreateUniqueConstraint, + JsonDeleteCheckConstraint, JsonDeleteCompositePK, JsonDeleteReferenceStatement, JsonDeleteUniqueConstraint, JsonDropColumnStatement, + JsonDropEnumStatement, JsonDropIndexStatement, JsonDropSequenceStatement, JsonDropTableStatement, + JsonDropValueFromEnumStatement, + JsonDropViewStatement, + JsonMoveEnumStatement, JsonMoveSequenceStatement, JsonPgCreateIndexStatement, JsonRecreateTableStatement, JsonRenameColumnStatement, + JsonRenameEnumStatement, JsonRenameSchema, JsonRenameSequenceStatement, JsonRenameTableStatement, + JsonRenameViewStatement, JsonSqliteAddColumnStatement, JsonSqliteCreateTableStatement, JsonStatement, @@ -145,7 +161,7 @@ class PgCreateTableConvertor extends Convertor { } convert(st: JsonCreateTableStatement) { - const { tableName, schema, columns, compositePKs, uniqueConstraints } = st; + const { tableName, schema, columns, compositePKs, uniqueConstraints, checkConstraints } = st; let statement = ''; const name = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; @@ -230,6 +246,15 @@ class PgCreateTableConvertor extends Convertor { // statement += `\n`; } } + + if (typeof checkConstraints !== 'undefined' && checkConstraints.length > 0) { + for (const checkConstraint of checkConstraints) { + statement += ',\n'; + const unsquashedCheck = PgSquasher.unsquashCheck(checkConstraint); + statement += `\tCONSTRAINT "${unsquashedCheck.name}" CHECK (${unsquashedCheck.value})`; + } + } + statement += `\n);`; statement += `\n`; @@ -247,6 +272,7 @@ class MySqlCreateTableConvertor extends Convertor { tableName, columns, schema, + checkConstraints, compositePKs, uniqueConstraints, internals, @@ -307,6 +333,15 @@ class MySqlCreateTableConvertor extends Convertor { } } + if (typeof checkConstraints !== 'undefined' && checkConstraints.length > 0) { + for (const checkConstraint of checkConstraints) { + statement += ',\n'; + const unsquashedCheck = MySqlSquasher.unsquashCheck(checkConstraint); + + statement += `\tCONSTRAINT \`${unsquashedCheck.name}\` CHECK(${unsquashedCheck.value})`; + } + } + statement += `\n);`; statement += `\n`; return statement; @@ -325,6 +360,7 @@ export class SQLiteCreateTableConvertor extends Convertor { referenceData, compositePKs, uniqueConstraints, + checkConstraints, } = st; let statement = ''; @@ -384,11 +420,22 @@ export class SQLiteCreateTableConvertor extends Convertor { ) { for (const uniqueConstraint of uniqueConstraints) { statement += ',\n'; - const unsquashedUnique = MySqlSquasher.unsquashUnique(uniqueConstraint); + const unsquashedUnique = SQLiteSquasher.unsquashUnique(uniqueConstraint); statement += `\tCONSTRAINT ${unsquashedUnique.name} UNIQUE(\`${unsquashedUnique.columns.join(`\`,\``)}\`)`; } } + if ( + typeof checkConstraints !== 'undefined' + && checkConstraints.length > 0 + ) { + for (const check of checkConstraints) { + statement += ',\n'; + const { value, name } = SQLiteSquasher.unsquashCheck(check); + statement += `\tCONSTRAINT "${name}" CHECK(${value})`; + } + } + statement += `\n`; statement += `);`; statement += `\n`; @@ -396,6 +443,256 @@ export class SQLiteCreateTableConvertor extends Convertor { } } +class PgCreateViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_view' && dialect === 'postgresql'; + } + + convert(st: JsonCreatePgViewStatement) { + const { definition, name: viewName, schema, with: withOption, materialized, withNoData, tablespace, using } = st; + + const name = schema ? `"${schema}"."${viewName}"` : `"${viewName}"`; + + let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`; + + if (using) statement += ` USING "${using}"`; + + const options: string[] = []; + if (withOption) { + statement += ` WITH (`; + + Object.entries(withOption).forEach(([key, value]) => { + if (typeof value === 'undefined') return; + + options.push(`${key.snake_case()} = ${value}`); + }); + + statement += options.join(', '); + + statement += `)`; + } + + if (tablespace) statement += ` TABLESPACE ${tablespace}`; + + statement += ` AS (${definition})`; + + if (withNoData) statement += ` WITH NO DATA`; + + statement += `;`; + + return statement; + } +} + +class MySqlCreateViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'mysql_create_view' && dialect === 'mysql'; + } + + convert(st: JsonCreateMySqlViewStatement) { + const { definition, name, algorithm, sqlSecurity, withCheckOption, replace } = st; + + let statement = `CREATE `; + statement += replace ? `OR REPLACE ` : ''; + statement += algorithm ? `ALGORITHM = ${algorithm}\n` : ''; + statement += sqlSecurity ? `SQL SECURITY ${sqlSecurity}\n` : ''; + statement += `VIEW \`${name}\` AS (${definition})`; + statement += withCheckOption ? `\nWITH ${withCheckOption} CHECK OPTION` : ''; + + statement += ';'; + + return statement; + } +} + +class SqliteCreateViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'sqlite_create_view' && (dialect === 'sqlite' || dialect === 'turso'); + } + + convert(st: JsonCreateSqliteViewStatement) { + const { definition, name } = st; + + return `CREATE VIEW \`${name}\` AS ${definition};`; + } +} + +class PgDropViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_view' && dialect === 'postgresql'; + } + + convert(st: JsonDropViewStatement) { + const { name: viewName, schema, materialized } = st; + + const name = schema ? `"${schema}"."${viewName}"` : `"${viewName}"`; + + return `DROP${materialized ? ' MATERIALIZED' : ''} VIEW ${name};`; + } +} + +class MySqlDropViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_view' && dialect === 'mysql'; + } + + convert(st: JsonDropViewStatement) { + const { name } = st; + + return `DROP VIEW \`${name}\`;`; + } +} + +class SqliteDropViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_view' && (dialect === 'sqlite' || dialect === 'turso'); + } + + convert(st: JsonDropViewStatement) { + const { name } = st; + + return `DROP VIEW \`${name}\`;`; + } +} + +class MySqlAlterViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_mysql_view' && dialect === 'mysql'; + } + + convert(st: JsonAlterMySqlViewStatement) { + const { name, algorithm, definition, sqlSecurity, withCheckOption } = st; + + let statement = `ALTER `; + statement += algorithm ? `ALGORITHM = ${algorithm}\n` : ''; + statement += sqlSecurity ? `SQL SECURITY ${sqlSecurity}\n` : ''; + statement += `VIEW \`${name}\` AS ${definition}`; + statement += withCheckOption ? `\nWITH ${withCheckOption} CHECK OPTION` : ''; + + statement += ';'; + + return statement; + } +} + +class PgRenameViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_view' && dialect === 'postgresql'; + } + + convert(st: JsonRenameViewStatement) { + const { nameFrom: from, nameTo: to, schema, materialized } = st; + + const nameFrom = `"${schema}"."${from}"`; + + return `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW ${nameFrom} RENAME TO "${to}";`; + } +} + +class MySqlRenameViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_view' && dialect === 'mysql'; + } + + convert(st: JsonRenameViewStatement) { + const { nameFrom: from, nameTo: to } = st; + + return `RENAME TABLE \`${from}\` TO \`${to}\`;`; + } +} + +class PgAlterViewSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_alter_schema' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAlterSchemaStatement) { + const { fromSchema, toSchema, name, materialized } = st; + + const statement = `ALTER${ + materialized ? ' MATERIALIZED' : '' + } VIEW "${fromSchema}"."${name}" SET SCHEMA "${toSchema}";`; + + return statement; + } +} + +class PgAlterViewAddWithOptionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_add_with_option' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAddWithOptionStatement) { + const { schema, with: withOption, name, materialized } = st; + + let statement = `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW "${schema}"."${name}" SET (`; + + const options: string[] = []; + + Object.entries(withOption).forEach(([key, value]) => { + options.push(`${key.snake_case()} = ${value}`); + }); + + statement += options.join(', '); + + statement += `);`; + + return statement; + } +} + +class PgAlterViewDropWithOptionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_drop_with_option' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewDropWithOptionStatement) { + const { schema, name, materialized, with: withOptions } = st; + + let statement = `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW "${schema}"."${name}" RESET (`; + + const options: string[] = []; + + Object.entries(withOptions).forEach(([key, value]) => { + options.push(`${key.snake_case()}`); + }); + + statement += options.join(', '); + + statement += ');'; + + return statement; + } +} + +class PgAlterViewAlterTablespaceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_alter_tablespace' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAlterTablespaceStatement) { + const { schema, name, toTablespace } = st; + + const statement = `ALTER MATERIALIZED VIEW "${schema}"."${name}" SET TABLESPACE ${toTablespace};`; + + return statement; + } +} + +class PgAlterViewAlterUsingConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_alter_using' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAlterUsingStatement) { + const { schema, name, toUsing } = st; + + const statement = `ALTER MATERIALIZED VIEW "${schema}"."${name}" SET ACCESS METHOD "${toUsing}";`; + + return statement; + } +} + class PgAlterTableAlterColumnSetGenerated extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -573,6 +870,38 @@ class PgAlterTableDropUniqueConstraintConvertor extends Convertor { } } +class PgAlterTableAddCheckConstraintConvertor extends Convertor { + can(statement: JsonCreateCheckConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'create_check_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonCreateCheckConstraint): string { + const unsquashed = PgSquasher.unsquashCheck(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unsquashed.name}" CHECK (${unsquashed.value});`; + } +} + +class PgAlterTableDeleteCheckConstraintConvertor extends Convertor { + can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'delete_check_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonDeleteCheckConstraint): string { + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; + } +} + class MySQLAlterTableAddUniqueConstraintConvertor extends Convertor { can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { return statement.type === 'create_unique_constraint' && dialect === 'mysql'; @@ -597,6 +926,33 @@ class MySQLAlterTableDropUniqueConstraintConvertor extends Convertor { } } +class MySqlAlterTableAddCheckConstraintConvertor extends Convertor { + can(statement: JsonCreateCheckConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'create_check_constraint' && dialect === 'mysql' + ); + } + convert(statement: JsonCreateCheckConstraint): string { + const unsquashed = MySqlSquasher.unsquashCheck(statement.data); + const { tableName } = statement; + + return `ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` CHECK (${unsquashed.value});`; + } +} + +class MySqlAlterTableDeleteCheckConstraintConvertor extends Convertor { + can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'delete_check_constraint' && dialect === 'mysql' + ); + } + convert(statement: JsonDeleteCheckConstraint): string { + const { tableName } = statement; + + return `ALTER TABLE \`${tableName}\` DROP CONSTRAINT \`${statement.constraintName}\`;`; + } +} + class CreatePgSequenceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_sequence' && dialect === 'postgresql'; @@ -694,22 +1050,39 @@ class CreateTypeEnumConvertor extends Convertor { convert(st: JsonCreateEnumStatement) { const { name, values, schema } = st; - const tableNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; let valuesStatement = '('; valuesStatement += values.map((it) => `'${it}'`).join(', '); valuesStatement += ')'; - let statement = 'DO $$ BEGIN'; - statement += '\n'; - statement += ` CREATE TYPE ${tableNameWithSchema} AS ENUM${valuesStatement};`; - statement += '\n'; - statement += 'EXCEPTION'; - statement += '\n'; - statement += ' WHEN duplicate_object THEN null;'; - statement += '\n'; - statement += 'END $$;'; - statement += '\n'; + // TODO do we need this? + // let statement = 'DO $$ BEGIN'; + // statement += '\n'; + let statement = `CREATE TYPE ${enumNameWithSchema} AS ENUM${valuesStatement};`; + // statement += '\n'; + // statement += 'EXCEPTION'; + // statement += '\n'; + // statement += ' WHEN duplicate_object THEN null;'; + // statement += '\n'; + // statement += 'END $$;'; + // statement += '\n'; + return statement; + } +} + +class DropTypeEnumConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'drop_type_enum'; + } + + convert(st: JsonDropEnumStatement) { + const { name, schema } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + let statement = `DROP TYPE ${enumNameWithSchema};`; + return statement; } } @@ -720,9 +1093,74 @@ class AlterTypeAddValueConvertor extends Convertor { } convert(st: JsonAddValueToEnumStatement) { - const { name, schema, value } = st; - const schemaPrefix = schema && schema !== 'public' ? `"${schema}".` : ''; - return `ALTER TYPE ${schemaPrefix}"${name}" ADD VALUE '${value}';`; + const { name, schema, value, before } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER TYPE ${enumNameWithSchema} ADD VALUE '${value}'${before.length ? ` BEFORE '${before}'` : ''};`; + } +} + +class AlterTypeSetSchemaConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'move_type_enum'; + } + + convert(st: JsonMoveEnumStatement) { + const { name, schemaFrom, schemaTo } = st; + + const enumNameWithSchema = schemaFrom ? `"${schemaFrom}"."${name}"` : `"${name}"`; + + return `ALTER TYPE ${enumNameWithSchema} SET SCHEMA "${schemaTo}";`; + } +} + +class AlterRenameTypeConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'rename_type_enum'; + } + + convert(st: JsonRenameEnumStatement) { + const { nameTo, nameFrom, schema } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${nameFrom}"` : `"${nameFrom}"`; + + return `ALTER TYPE ${enumNameWithSchema} RENAME TO "${nameTo}";`; + } +} + +class AlterTypeDropValueConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'alter_type_drop_value'; + } + + convert(st: JsonDropValueFromEnumStatement) { + const { columnsWithEnum, name, newValues, schema } = st; + + const statements: string[] = []; + + for (const withEnum of columnsWithEnum) { + statements.push( + `ALTER TABLE "${withEnum.schema}"."${withEnum.table}" ALTER COLUMN "${withEnum.column}" SET DATA TYPE text;`, + ); + } + + statements.push(new DropTypeEnumConvertor().convert({ name: name, schema, type: 'drop_type_enum' })); + + statements.push(new CreateTypeEnumConvertor().convert({ + name: name, + schema: schema, + values: newValues, + type: 'create_type_enum', + })); + + for (const withEnum of columnsWithEnum) { + statements.push( + `ALTER TABLE "${withEnum.schema}"."${withEnum.table}" ALTER COLUMN "${withEnum.column}" SET DATA TYPE "${schema}"."${name}" USING "${withEnum.column}"::"${schema}"."${name}";`, + ); + } + + return statements; } } @@ -1450,7 +1888,9 @@ export class LibSQLModifyColumn extends Convertor { || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_set_default' - || statement.type === 'alter_table_alter_column_drop_default') + || statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'create_check_constraint' + || statement.type === 'delete_check_constraint') && dialect === 'turso' ); } @@ -2368,7 +2808,7 @@ class SQLiteRecreateTableConvertor extends Convertor { } convert(statement: JsonRecreateTableStatement): string | string[] { - const { tableName, columns, compositePKs, referenceData } = statement; + const { tableName, columns, compositePKs, referenceData, checkConstraints } = statement; const columnNames = columns.map((it) => `"${it.name}"`).join(', '); const newTableName = `__new_${tableName}`; @@ -2377,6 +2817,12 @@ class SQLiteRecreateTableConvertor extends Convertor { sqlStatements.push(`PRAGMA foreign_keys=OFF;`); + // map all possible variants + const mappedCheckConstraints: string[] = checkConstraints.map((it) => + it.replaceAll(`"${tableName}".`, `"${newTableName}".`).replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) + .replaceAll(`${tableName}.`, `${newTableName}.`).replaceAll(`'${tableName}'.`, `'${newTableName}'.`) + ); + // create new table sqlStatements.push( new SQLiteCreateTableConvertor().convert({ @@ -2385,6 +2831,7 @@ class SQLiteRecreateTableConvertor extends Convertor { columns, referenceData, compositePKs, + checkConstraints: mappedCheckConstraints, }), ); @@ -2428,13 +2875,18 @@ class LibSQLRecreateTableConvertor extends Convertor { } convert(statement: JsonRecreateTableStatement): string[] { - const { tableName, columns, compositePKs, referenceData } = statement; + const { tableName, columns, compositePKs, referenceData, checkConstraints } = statement; const columnNames = columns.map((it) => `"${it.name}"`).join(', '); const newTableName = `__new_${tableName}`; const sqlStatements: string[] = []; + const mappedCheckConstraints: string[] = checkConstraints.map((it) => + it.replaceAll(`"${tableName}".`, `"${newTableName}".`).replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) + .replaceAll(`${tableName}.`, `${newTableName}.`).replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) + ); + sqlStatements.push(`PRAGMA foreign_keys=OFF;`); // create new table @@ -2445,6 +2897,7 @@ class LibSQLRecreateTableConvertor extends Convertor { columns, referenceData, compositePKs, + checkConstraints: mappedCheckConstraints, }), ); @@ -2486,7 +2939,29 @@ convertors.push(new SQLiteCreateTableConvertor()); convertors.push(new SQLiteRecreateTableConvertor()); convertors.push(new LibSQLRecreateTableConvertor()); +convertors.push(new PgCreateViewConvertor()); +convertors.push(new PgDropViewConvertor()); +convertors.push(new PgRenameViewConvertor()); +convertors.push(new PgAlterViewSchemaConvertor()); +convertors.push(new PgAlterViewAddWithOptionConvertor()); +convertors.push(new PgAlterViewDropWithOptionConvertor()); +convertors.push(new PgAlterViewAlterTablespaceConvertor()); +convertors.push(new PgAlterViewAlterUsingConvertor()); + +convertors.push(new MySqlCreateViewConvertor()); +convertors.push(new MySqlDropViewConvertor()); +convertors.push(new MySqlRenameViewConvertor()); +convertors.push(new MySqlAlterViewConvertor()); + +convertors.push(new SqliteCreateViewConvertor()); +convertors.push(new SqliteDropViewConvertor()); + convertors.push(new CreateTypeEnumConvertor()); +convertors.push(new DropTypeEnumConvertor()); +convertors.push(new AlterTypeAddValueConvertor()); +convertors.push(new AlterTypeSetSchemaConvertor()); +convertors.push(new AlterRenameTypeConvertor()); +convertors.push(new AlterTypeDropValueConvertor()); convertors.push(new CreatePgSequenceConvertor()); convertors.push(new DropPgSequenceConvertor()); @@ -2519,6 +2994,11 @@ convertors.push(new PgAlterTableAlterColumnSetTypeConvertor()); convertors.push(new PgAlterTableAddUniqueConstraintConvertor()); convertors.push(new PgAlterTableDropUniqueConstraintConvertor()); +convertors.push(new PgAlterTableAddCheckConstraintConvertor()); +convertors.push(new PgAlterTableDeleteCheckConstraintConvertor()); +convertors.push(new MySqlAlterTableAddCheckConstraintConvertor()); +convertors.push(new MySqlAlterTableDeleteCheckConstraintConvertor()); + convertors.push(new MySQLAlterTableAddUniqueConstraintConvertor()); convertors.push(new MySQLAlterTableDropUniqueConstraintConvertor()); @@ -2530,8 +3010,6 @@ convertors.push(new PgDropIndexConvertor()); convertors.push(new SqliteDropIndexConvertor()); convertors.push(new MySqlDropIndexConvertor()); -convertors.push(new AlterTypeAddValueConvertor()); - convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterColumnSetNotNullConvertor()); diff --git a/drizzle-kit/src/statementCombiner.ts b/drizzle-kit/src/statementCombiner.ts index 2f7b6ddbe..f3ca9789c 100644 --- a/drizzle-kit/src/statementCombiner.ts +++ b/drizzle-kit/src/statementCombiner.ts @@ -10,7 +10,7 @@ export const prepareLibSQLRecreateTable = ( table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']], action?: 'push', ): (JsonRecreateTableStatement | JsonCreateIndexStatement)[] => { - const { name, columns, uniqueConstraints, indexes } = table; + const { name, columns, uniqueConstraints, indexes, checkConstraints } = table; const composites: string[][] = Object.values(table.compositePrimaryKeys).map( (it) => SQLiteSquasher.unsquashPK(it), @@ -29,6 +29,7 @@ export const prepareLibSQLRecreateTable = ( compositePKs: composites, referenceData: fks, uniqueConstraints: Object.values(uniqueConstraints), + checkConstraints: Object.values(checkConstraints), }, ]; @@ -42,7 +43,7 @@ export const prepareSQLiteRecreateTable = ( table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']], action?: 'push', ): JsonStatement[] => { - const { name, columns, uniqueConstraints, indexes } = table; + const { name, columns, uniqueConstraints, indexes, checkConstraints } = table; const composites: string[][] = Object.values(table.compositePrimaryKeys).map( (it) => SQLiteSquasher.unsquashPK(it), @@ -61,6 +62,7 @@ export const prepareSQLiteRecreateTable = ( compositePKs: composites, referenceData: fks, uniqueConstraints: Object.values(uniqueConstraints), + checkConstraints: Object.values(checkConstraints), }, ]; @@ -86,6 +88,8 @@ export const libSQLCombineStatements = ( || statement.type === 'create_composite_pk' || statement.type === 'alter_composite_pk' || statement.type === 'delete_composite_pk' + || statement.type === 'create_check_constraint' + || statement.type === 'delete_check_constraint' ) { const tableName = statement.tableName; @@ -122,16 +126,6 @@ export const libSQLCombineStatements = ( ) { const { tableName, columnName, columnPk } = statement; - // const columnIsPartOfUniqueIndex = Object.values( - // json2.tables[tableName].indexes, - // ).some((it) => { - // const unsquashIndex = SQLiteSquasher.unsquashIdx(it); - - // return ( - // unsquashIndex.columns.includes(columnName) && unsquashIndex.isUnique - // ); - // }); - const columnIsPartOfForeignKey = Object.values( json2.tables[tableName].foreignKeys, ).some((it) => { @@ -332,19 +326,21 @@ export const sqliteCombineStatements = ( || statement.type === 'delete_composite_pk' || statement.type === 'create_unique_constraint' || statement.type === 'delete_unique_constraint' + || statement.type === 'create_check_constraint' + || statement.type === 'delete_check_constraint' ) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { - newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); + newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); + const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); @@ -364,13 +360,13 @@ export const sqliteCombineStatements = ( const statementsForTable = newStatements[tableName]; if (!statementsForTable) { - newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); + newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); + const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); @@ -409,7 +405,7 @@ export const sqliteCombineStatements = ( if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); + const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); diff --git a/drizzle-kit/tests/introspect/libsql.test.ts b/drizzle-kit/tests/introspect/libsql.test.ts new file mode 100644 index 000000000..9211989ca --- /dev/null +++ b/drizzle-kit/tests/introspect/libsql.test.ts @@ -0,0 +1,35 @@ +import { createClient } from '@libsql/client'; +import { sql } from 'drizzle-orm'; +import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; +import fs from 'fs'; +import { introspectLibSQLToFile, introspectMySQLToFile, introspectSQLiteToFile } from 'tests/schemaDiffer'; +import { expect, test } from 'vitest'; + +if (!fs.existsSync('tests/introspect/libsql')) { + fs.mkdirSync('tests/introspect/libsql'); +} + +test('view #1', async () => { + const turso = createClient({ + url: ':memory:', + }); + + const users = sqliteTable('users', { id: int('id') }); + const testView = sqliteView('some_view', { id: int('id') }).as( + sql`SELECT * FROM ${users}`, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await introspectLibSQLToFile( + turso, + schema, + 'view-1', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/introspect/mysql.test.ts b/drizzle-kit/tests/introspect/mysql.test.ts index e35b34f40..024300bea 100644 --- a/drizzle-kit/tests/introspect/mysql.test.ts +++ b/drizzle-kit/tests/introspect/mysql.test.ts @@ -1,6 +1,6 @@ import Docker from 'dockerode'; import { SQL, sql } from 'drizzle-orm'; -import { char, int, mysqlTable, text, varchar } from 'drizzle-orm/mysql-core'; +import { char, check, int, mysqlTable, mysqlView, serial, text, varchar } from 'drizzle-orm/mysql-core'; import * as fs from 'fs'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; @@ -165,3 +165,78 @@ test('Default value of character type column: varchar', async () => { await client.query(`drop table users;`); }); + +test('introspect checks', async () => { + const schema = { + users: mysqlTable('users', { + id: serial('id'), + name: varchar('name', { length: 255 }), + age: int('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'introspect-checks', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop table users;`); +}); + +test('view #1', async () => { + const users = mysqlTable('users', { id: int('id') }); + const testView = mysqlView('some_view', { id: int('id') }).as( + sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'view-1', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop view some_view;`); + await client.query(`drop table users;`); +}); + +test('view #2', async () => { + // await client.query(`drop view some_view;`); + + const users = mysqlTable('some_users', { id: int('id') }); + const testView = mysqlView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as( + sql`SELECT * FROM ${users}`, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await introspectMySQLToFile( + client, + schema, + 'view-2', + 'drizzle', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`drop table some_users;`); +}); diff --git a/drizzle-kit/tests/introspect/pg.test.ts b/drizzle-kit/tests/introspect/pg.test.ts index e65c0f904..bd8b15ab9 100644 --- a/drizzle-kit/tests/introspect/pg.test.ts +++ b/drizzle-kit/tests/introspect/pg.test.ts @@ -5,6 +5,7 @@ import { bigserial, boolean, char, + check, cidr, date, doublePrecision, @@ -17,8 +18,10 @@ import { macaddr8, numeric, pgEnum, + pgMaterializedView, pgSchema, pgTable, + pgView, real, serial, smallint, @@ -29,9 +32,14 @@ import { uuid, varchar, } from 'drizzle-orm/pg-core'; +import fs from 'fs'; import { introspectPgToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; +if (!fs.existsSync('tests/introspect/postgres')) { + fs.mkdirSync('tests/introspect/postgres'); +} + test('basic introspect test', async () => { const client = new PGlite(); @@ -227,6 +235,8 @@ test('instrospect all column types', async () => { smallint: smallint('smallint').default(10), integer: integer('integer').default(10), numeric: numeric('numeric', { precision: 3, scale: 1 }).default('99.9'), + numeric2: numeric('numeric2', { precision: 1, scale: 1 }).default('99.9'), + numeric3: numeric('numeric3').default('99.9'), bigint: bigint('bigint', { mode: 'number' }).default(100), boolean: boolean('boolean').default(true), text: text('test').default('abc'), @@ -405,3 +415,215 @@ test('introspect enum with similar name to native type', async () => { expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); + +test('introspect checks', async () => { + const client = new PGlite(); + + const schema = { + users: pgTable('users', { + id: serial('id'), + name: varchar('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-checks', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect checks from different schemas with same names', async () => { + const client = new PGlite(); + + const mySchema = pgSchema('schema2'); + const schema = { + mySchema, + users: pgTable('users', { + id: serial('id'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + usersInMySchema: mySchema.table('users', { + id: serial('id'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} < 1`), + })), + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-checks-diff-schema-same-names', + ['public', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect view #1', async () => { + const client = new PGlite(); + + const users = pgTable('users', { + id: serial('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = pgView('some_view').as((qb) => qb.select().from(users)); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-view', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect view #2', async () => { + const client = new PGlite(); + + const users = pgTable('users', { + id: serial('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = pgView('some_view', { id: integer('asd') }).with({ checkOption: 'cascaded' }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-view-2', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect view in other schema', async () => { + const client = new PGlite(); + + const newSchema = pgSchema('new_schema'); + const users = pgTable('users', { + id: serial('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = newSchema.view('some_view', { id: integer('asd') }).with({ checkOption: 'cascaded' }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + newSchema, + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-view-in-other-schema', + ['new_schema'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect materialized view in other schema', async () => { + const client = new PGlite(); + + const newSchema = pgSchema('new_schema'); + const users = pgTable('users', { + id: serial('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = newSchema.materializedView('some_view', { id: integer('asd') }).with({ autovacuumEnabled: true }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + newSchema, + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-mat-view-in-other-schema', + ['new_schema'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect materialized view #1', async () => { + const client = new PGlite(); + + const users = pgTable('users', { + id: serial('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = pgMaterializedView('some_view').using('heap').withNoData().as((qb) => qb.select().from(users)); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-materialized-view', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect materialized view #2', async () => { + const client = new PGlite(); + + const users = pgTable('users', { + id: serial('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = pgMaterializedView('some_view', { id: integer('asd') }).with({ autovacuumFreezeMinAge: 1 }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await introspectPgToFile( + client, + schema, + 'introspect-materialized-view-2', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/introspect/sqlite.test.ts b/drizzle-kit/tests/introspect/sqlite.test.ts index 18473e87b..89cdf590e 100644 --- a/drizzle-kit/tests/introspect/sqlite.test.ts +++ b/drizzle-kit/tests/introspect/sqlite.test.ts @@ -1,6 +1,6 @@ import Database from 'better-sqlite3'; import { SQL, sql } from 'drizzle-orm'; -import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { check, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import * as fs from 'fs'; import { introspectSQLiteToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; @@ -55,3 +55,49 @@ test('generated always column virtual: link to another column', async () => { expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); + +test('introspect checks', async () => { + const sqlite = new Database(':memory:'); + + const schema = { + users: sqliteTable('users', { + id: int('id'), + name: text('name'), + age: int('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const { statements, sqlStatements } = await introspectSQLiteToFile( + sqlite, + schema, + 'introspect-checks', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('view #1', async () => { + const sqlite = new Database(':memory:'); + + const users = sqliteTable('users', { id: int('id') }); + const testView = sqliteView('some_view', { id: int('id') }).as( + sql`SELECT * FROM ${users}`, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await introspectSQLiteToFile( + sqlite, + schema, + 'view-1', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/libsql-checks.test.ts b/drizzle-kit/tests/libsql-checks.test.ts new file mode 100644 index 000000000..2a3abf2dc --- /dev/null +++ b/drizzle-kit/tests/libsql-checks.test.ts @@ -0,0 +1,308 @@ +import { sql } from 'drizzle-orm'; +import { check, int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasLibSQL } from './schemaDiffer'; + +test('create table with check', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasLibSQL({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'integer', + notNull: true, + primaryKey: true, + autoincrement: false, + }, + { + name: 'age', + type: 'integer', + notNull: false, + primaryKey: false, + autoincrement: false, + }, + ], + compositePKs: [], + checkConstraints: ['some_check_name;"users"."age" > 21'], + referenceData: [], + uniqueConstraints: [], + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "some_check_name" CHECK("users"."age" > 21) +);\n`); +}); + +test('add check contraint to existing table', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: ['some_check_name;"users"."age" > 21'], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 21) +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('drop check contraint to existing table', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('rename check constraint', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('new_some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [`new_some_check_name;"users"."age" > 21`], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "new_some_check_name" CHECK("__new_users"."age" > 21) +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('rename check constraint', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 10`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [`some_check_name;"users"."age" > 10`], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 10) +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('create checks with same names', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + name: text('name'), + }, (table) => ({ + checkConstraint1: check('some_check_name', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), + })), + }; + + await expect(diffTestSchemasLibSQL({}, to, [])).rejects.toThrowError(); +}); diff --git a/drizzle-kit/tests/libsql-statements.test.ts b/drizzle-kit/tests/libsql-statements.test.ts index 8221e52e0..a7cbc0602 100644 --- a/drizzle-kit/tests/libsql-statements.test.ts +++ b/drizzle-kit/tests/libsql-statements.test.ts @@ -33,6 +33,7 @@ test('drop autoincrement', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -66,6 +67,7 @@ test('set autoincrement', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -427,6 +429,7 @@ test('drop foriegn key', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); @@ -512,6 +515,7 @@ test('alter foriegn key', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); @@ -615,6 +619,7 @@ test('add foriegn key for multiple columns', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], } as JsonRecreateTableStatement); expect(sqlStatements.length).toBe(6); @@ -709,6 +714,7 @@ test('drop foriegn key for multiple columns', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); @@ -850,6 +856,7 @@ test('recreate table with nested references', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); diff --git a/drizzle-kit/tests/libsql-views.test.ts b/drizzle-kit/tests/libsql-views.test.ts new file mode 100644 index 000000000..bf5cdb04e --- /dev/null +++ b/drizzle-kit/tests/libsql-views.test.ts @@ -0,0 +1,218 @@ +import { sql } from 'drizzle-orm'; +import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasLibSQL } from './schemaDiffer'; + +test('create view', async () => { + const users = sqliteTable('users', { id: int('id').default(1) }); + const view = sqliteView('view').as((qb) => qb.select().from(users)); + const to = { + users: users, + testView: view, + }; + + const { statements, sqlStatements } = await diffTestSchemasLibSQL({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [{ + autoincrement: false, + default: 1, + name: 'id', + type: 'integer', + primaryKey: false, + notNull: false, + }], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_view', + name: 'view', + definition: 'select "id" from "users"', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( +\t\`id\` integer DEFAULT 1 +);\n`); + expect(sqlStatements[1]).toBe(`CREATE VIEW \`view\` AS select "id" from "users";`); +}); + +test('drop view', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + }; + + const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `DROP VIEW \`view\`;`, + ); +}); + +test('alter view', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), + }; + const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + name: 'view', + type: 'sqlite_create_view', + definition: 'SELECT * FROM users WHERE users.id = 1', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `DROP VIEW \`view\`;`, + ); + expect(sqlStatements[1]).toBe( + `CREATE VIEW \`view\` AS SELECT * FROM users WHERE users.id = 1;`, + ); +}); + +test('create view with existing flag', async () => { + const view = sqliteView('view', {}).existing(); + const to = { + testView: view, + }; + + const { statements, sqlStatements } = await diffTestSchemasLibSQL({}, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop view with existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + }; + + const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename view with existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).existing(), + }; + const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename view and drop existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_view', + name: 'new_view', + definition: 'SELECT * FROM users', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); + expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users;`); +}); + +test('rename view and alter ".as"', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), + }; + const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_view', + name: 'new_view', + definition: 'SELECT * FROM users WHERE 1=1', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); + expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users WHERE 1=1;`); +}); diff --git a/drizzle-kit/tests/mysql-checks.test.ts b/drizzle-kit/tests/mysql-checks.test.ts new file mode 100644 index 000000000..82e7a5104 --- /dev/null +++ b/drizzle-kit/tests/mysql-checks.test.ts @@ -0,0 +1,291 @@ +import { sql } from 'drizzle-orm'; +import { check, int, mysqlTable, serial, varchar } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasMysql } from './schemaDiffer'; + +test('create table with check', async (t) => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasMysql({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'serial', + notNull: true, + primaryKey: false, + autoincrement: true, + }, + { + name: 'age', + type: 'int', + notNull: false, + primaryKey: false, + autoincrement: false, + }, + ], + compositePKs: [ + 'users_id;id', + ], + checkConstraints: ['some_check_name;\`users\`.\`age\` > 21'], + compositePkName: 'users_id', + uniqueConstraints: [], + schema: undefined, + internals: { + tables: {}, + indexes: {}, + }, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( +\t\`id\` serial AUTO_INCREMENT NOT NULL, +\t\`age\` int, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`), +\tCONSTRAINT \`some_check_name\` CHECK(\`users\`.\`age\` > 21) +);\n`); +}); + +test('add check contraint to existing table', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_check_constraint', + tableName: 'users', + data: 'some_check_name;\`users\`.\`age\` > 21', + schema: '', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name\` CHECK (\`users\`.\`age\` > 21);`, + ); +}); + +test('drop check contraint in existing table', async (t) => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }), + }; + + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'delete_check_constraint', + tableName: 'users', + schema: '', + constraintName: 'some_check_name', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, + ); +}); + +test('rename check constraint', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('new_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + constraintName: 'some_check_name', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[1]).toStrictEqual({ + data: 'new_check_name;\`users\`.\`age\` > 21', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 21);`, + ); +}); + +test('alter check constraint', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('new_check_name', sql`${table.age} > 10`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + constraintName: 'some_check_name', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[1]).toStrictEqual({ + data: 'new_check_name;\`users\`.\`age\` > 10', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 10);`, + ); +}); + +test('alter multiple check constraints', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + name: varchar('name', { length: 255 }), + }, (table) => ({ + checkConstraint1: check('some_check_name_1', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name_2', sql`${table.name} != 'Alex'`), + })), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + name: varchar('name', { length: 255 }), + }, (table) => ({ + checkConstraint1: check('some_check_name_3', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name_4', sql`${table.name} != 'Alex'`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); + expect(statements.length).toBe(4); + expect(statements[0]).toStrictEqual({ + constraintName: 'some_check_name_1', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[1]).toStrictEqual({ + constraintName: 'some_check_name_2', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[2]).toStrictEqual({ + data: 'some_check_name_3;\`users\`.\`age\` > 21', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + expect(statements[3]).toStrictEqual({ + data: "some_check_name_4;\`users\`.\`name\` != 'Alex'", + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_1\`;`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_2\`;`, + ); + expect(sqlStatements[2]).toBe( + `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_3\` CHECK (\`users\`.\`age\` > 21);`, + ); + expect(sqlStatements[3]).toBe( + `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_4\` CHECK (\`users\`.\`name\` != \'Alex\');`, + ); +}); + +test('create checks with same names', async (t) => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + name: varchar('name', { length: 255 }), + }, (table) => ({ + checkConstraint1: check('some_check_name', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), + })), + }; + + await expect(diffTestSchemasMysql({}, to, [])).rejects.toThrowError(); +}); diff --git a/drizzle-kit/tests/mysql-schemas.test.ts b/drizzle-kit/tests/mysql-schemas.test.ts index 826585d86..6776700e3 100644 --- a/drizzle-kit/tests/mysql-schemas.test.ts +++ b/drizzle-kit/tests/mysql-schemas.test.ts @@ -129,6 +129,7 @@ test('add table to schema #3', async () => { }, compositePkName: '', compositePKs: [], + checkConstraints: [], }); }); diff --git a/drizzle-kit/tests/mysql-views.test.ts b/drizzle-kit/tests/mysql-views.test.ts new file mode 100644 index 000000000..39cd6c09e --- /dev/null +++ b/drizzle-kit/tests/mysql-views.test.ts @@ -0,0 +1,553 @@ +import { sql } from 'drizzle-orm'; +import { int, mysqlTable, mysqlView } from 'drizzle-orm/mysql-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasMysql } from './schemaDiffer'; + +test('create view #1', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + }; + const to = { + users: users, + view: mysqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'mysql_create_view', + name: 'some_view', + algorithm: 'undefined', + replace: false, + definition: 'select `id` from `users`', + withCheckOption: undefined, + sqlSecurity: 'definer', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE ALGORITHM = undefined +SQL SECURITY definer +VIEW \`some_view\` AS (select \`id\` from \`users\`);`); +}); + +test('create view #2', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'mysql_create_view', + name: 'some_view', + algorithm: 'merge', + replace: false, + definition: 'SELECT * FROM \`users\`', + withCheckOption: 'cascaded', + sqlSecurity: 'definer', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE ALGORITHM = merge +SQL SECURITY definer +VIEW \`some_view\` AS (SELECT * FROM \`users\`) +WITH cascaded CHECK OPTION;`); +}); + +test('create view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + }; + const to = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_view', + name: 'some_view', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP VIEW \`some_view\`;`); +}); + +test('drop view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ + 'public.some_view->public.new_some_view', + ]); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_view', + nameFrom: 'some_view', + nameTo: 'new_some_view', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); +}); + +test('rename view and alter meta options', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ + 'public.some_view->public.new_some_view', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'rename_view', + nameFrom: 'some_view', + nameTo: 'new_some_view', + }); + expect(statements[1]).toStrictEqual({ + algorithm: 'undefined', + columns: {}, + definition: 'SELECT * FROM `users`', + isExisting: false, + name: 'new_some_view', + sqlSecurity: 'definer', + type: 'alter_mysql_view', + withCheckOption: 'cascaded', + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); + expect(sqlStatements[1]).toBe(`ALTER ALGORITHM = undefined +SQL SECURITY definer +VIEW \`new_some_view\` AS SELECT * FROM \`users\` +WITH cascaded CHECK OPTION;`); +}); + +test('rename view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ + 'public.some_view->public.new_some_view', + ]); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('add meta to view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + algorithm: 'merge', + columns: {}, + definition: 'SELECT * FROM `users`', + isExisting: false, + name: 'some_view', + sqlSecurity: 'definer', + type: 'alter_mysql_view', + withCheckOption: 'cascaded', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = merge +SQL SECURITY definer +VIEW \`some_view\` AS SELECT * FROM \`users\` +WITH cascaded CHECK OPTION;`); +}); + +test('add meta to view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('alter meta to view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + algorithm: 'merge', + columns: {}, + definition: 'SELECT * FROM `users`', + isExisting: false, + name: 'some_view', + sqlSecurity: 'definer', + type: 'alter_mysql_view', + withCheckOption: 'cascaded', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = merge +SQL SECURITY definer +VIEW \`some_view\` AS SELECT * FROM \`users\` +WITH cascaded CHECK OPTION;`); +}); + +test('alter meta to view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop meta from view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + algorithm: 'undefined', + columns: {}, + definition: 'SELECT * FROM `users`', + isExisting: false, + name: 'some_view', + sqlSecurity: 'definer', + type: 'alter_mysql_view', + withCheckOption: undefined, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = undefined +SQL SECURITY definer +VIEW \`some_view\` AS SELECT * FROM \`users\`;`); +}); + +test('drop meta from view existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('alter view ".as" value', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + algorithm: 'temptable', + definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', + name: 'some_view', + sqlSecurity: 'invoker', + type: 'mysql_create_view', + withCheckOption: 'cascaded', + replace: true, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE OR REPLACE ALGORITHM = temptable +SQL SECURITY invoker +VIEW \`some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) +WITH cascaded CHECK OPTION;`); +}); + +test('rename and alter view ".as" value', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ + 'public.some_view->public.new_some_view', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + nameFrom: 'some_view', + nameTo: 'new_some_view', + type: 'rename_view', + }); + expect(statements[1]).toStrictEqual({ + algorithm: 'temptable', + definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', + name: 'new_some_view', + sqlSecurity: 'invoker', + type: 'mysql_create_view', + withCheckOption: 'cascaded', + replace: true, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); + expect(sqlStatements[1]).toBe(`CREATE OR REPLACE ALGORITHM = temptable +SQL SECURITY invoker +VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) +WITH cascaded CHECK OPTION;`); +}); + +test('set existing', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ + 'public.some_view->public.new_some_view', + ]); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop existing', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), + }; + + const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ + 'public.some_view->public.new_some_view', + ]); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'new_some_view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + algorithm: 'temptable', + definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', + name: 'new_some_view', + sqlSecurity: 'invoker', + type: 'mysql_create_view', + withCheckOption: 'cascaded', + replace: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`DROP VIEW \`new_some_view\`;`); + expect(sqlStatements[1]).toBe(`CREATE ALGORITHM = temptable +SQL SECURITY invoker +VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) +WITH cascaded CHECK OPTION;`); +}); diff --git a/drizzle-kit/tests/mysql.test.ts b/drizzle-kit/tests/mysql.test.ts index b7e8cc1cf..29f2e869a 100644 --- a/drizzle-kit/tests/mysql.test.ts +++ b/drizzle-kit/tests/mysql.test.ts @@ -35,6 +35,7 @@ test('add table #1', async () => { }, uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); }); @@ -64,6 +65,7 @@ test('add table #2', async () => { compositePKs: ['users_id;id'], compositePkName: 'users_id', uniqueConstraints: [], + checkConstraints: [], internals: { tables: {}, indexes: {}, @@ -108,6 +110,7 @@ test('add table #3', async () => { compositePKs: ['users_pk;id'], uniqueConstraints: [], compositePkName: 'users_pk', + checkConstraints: [], internals: { tables: {}, indexes: {}, @@ -136,6 +139,7 @@ test('add table #4', async () => { compositePKs: [], uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_table', @@ -149,6 +153,7 @@ test('add table #4', async () => { }, uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); }); @@ -192,6 +197,7 @@ test('add table #6', async () => { compositePKs: [], uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'drop_table', @@ -227,6 +233,7 @@ test('add table #7', async () => { indexes: {}, }, compositePkName: '', + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'rename_table', @@ -316,6 +323,7 @@ test('change table schema #2', async () => { uniqueConstraints: [], compositePkName: '', compositePKs: [], + checkConstraints: [], internals: { tables: {}, indexes: {}, diff --git a/drizzle-kit/tests/pg-checks.test.ts b/drizzle-kit/tests/pg-checks.test.ts new file mode 100644 index 000000000..1f5e5e1c5 --- /dev/null +++ b/drizzle-kit/tests/pg-checks.test.ts @@ -0,0 +1,280 @@ +import { sql } from 'drizzle-orm'; +import { check, integer, pgTable, serial, varchar } from 'drizzle-orm/pg-core'; +import { JsonCreateTableStatement } from 'src/jsonStatements'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('create table with check', async (t) => { + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [ + { + name: 'id', + type: 'serial', + notNull: true, + primaryKey: true, + }, + { + name: 'age', + type: 'integer', + notNull: false, + primaryKey: false, + }, + ], + compositePKs: [], + checkConstraints: ['some_check_name;"users"."age" > 21'], + compositePkName: '', + uniqueConstraints: [], + } as JsonCreateTableStatement); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" serial PRIMARY KEY NOT NULL, +\t"age" integer, +\tCONSTRAINT "some_check_name" CHECK ("users"."age" > 21) +);\n`); +}); + +test('add check contraint to existing table', async (t) => { + const from = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }), + }; + + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemas(from, to, []); + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_check_constraint', + tableName: 'users', + schema: '', + data: 'some_check_name;"users"."age" > 21', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name" CHECK ("users"."age" > 21);`, + ); +}); + +test('drop check contraint in existing table', async (t) => { + const from = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }), + }; + + const { sqlStatements, statements } = await diffTestSchemas(from, to, []); + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'delete_check_constraint', + tableName: 'users', + schema: '', + constraintName: 'some_check_name', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, + ); +}); + +test('rename check constraint', async (t) => { + const from = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('new_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemas(from, to, []); + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + constraintName: 'some_check_name', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[1]).toStrictEqual({ + data: 'new_check_name;"users"."age" > 21', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 21);`, + ); +}); + +test('alter check constraint', async (t) => { + const from = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + }, (table) => ({ + checkConstraint: check('new_check_name', sql`${table.age} > 10`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemas(from, to, []); + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + constraintName: 'some_check_name', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[1]).toStrictEqual({ + data: 'new_check_name;"users"."age" > 10', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 10);`, + ); +}); + +test('alter multiple check constraints', async (t) => { + const from = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + name: varchar('name'), + }, (table) => ({ + checkConstraint1: check('some_check_name_1', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name_2', sql`${table.name} != 'Alex'`), + })), + }; + + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + name: varchar('name'), + }, (table) => ({ + checkConstraint1: check('some_check_name_3', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name_4', sql`${table.name} != 'Alex'`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemas(from, to, []); + expect(statements.length).toBe(4); + expect(statements[0]).toStrictEqual({ + constraintName: 'some_check_name_1', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[1]).toStrictEqual({ + constraintName: 'some_check_name_2', + schema: '', + tableName: 'users', + type: 'delete_check_constraint', + }); + expect(statements[2]).toStrictEqual({ + data: 'some_check_name_3;"users"."age" > 21', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + expect(statements[3]).toStrictEqual({ + data: 'some_check_name_4;"users"."name" != \'Alex\'', + schema: '', + tableName: 'users', + type: 'create_check_constraint', + }); + + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_1";`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_2";`, + ); + expect(sqlStatements[2]).toBe( + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_3" CHECK ("users"."age" > 21);`, + ); + expect(sqlStatements[3]).toBe( + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_4" CHECK ("users"."name" != \'Alex\');`, + ); +}); + +test('create checks with same names', async (t) => { + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + age: integer('age'), + name: varchar('name'), + }, (table) => ({ + checkConstraint1: check('some_check_name', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), + })), + }; + + await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); +}); diff --git a/drizzle-kit/tests/pg-enums.test.ts b/drizzle-kit/tests/pg-enums.test.ts index cd8877a43..99a3dca7e 100644 --- a/drizzle-kit/tests/pg-enums.test.ts +++ b/drizzle-kit/tests/pg-enums.test.ts @@ -7,8 +7,10 @@ test('enums #1', async () => { enum: pgEnum('enum', ['value']), }; - const { statements } = await diffTestSchemas({}, to, []); + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value');`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'enum', @@ -24,8 +26,10 @@ test('enums #2', async () => { enum: folder.enum('enum', ['value']), }; - const { statements } = await diffTestSchemas({}, to, []); + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE TYPE "folder"."enum" AS ENUM('value');`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'enum', @@ -40,8 +44,10 @@ test('enums #3', async () => { enum: pgEnum('enum', ['value']), }; - const { statements } = await diffTestSchemas(from, {}, []); + const { statements, sqlStatements } = await diffTestSchemas(from, {}, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP TYPE "public"."enum";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_type_enum', @@ -57,8 +63,10 @@ test('enums #4', async () => { enum: folder.enum('enum', ['value']), }; - const { statements } = await diffTestSchemas(from, {}, []); + const { statements, sqlStatements } = await diffTestSchemas(from, {}, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP TYPE "folder"."enum";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_type_enum', @@ -81,8 +89,10 @@ test('enums #5', async () => { enum: folder2.enum('enum', ['value']), }; - const { statements } = await diffTestSchemas(from, to, ['folder1->folder2']); + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['folder1->folder2']); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER SCHEMA "folder1" RENAME TO "folder2";\n`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_schema', @@ -107,10 +117,12 @@ test('enums #6', async () => { enum: folder2.enum('enum', ['value']), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum->folder2.enum', ]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum" SET SCHEMA "folder2";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -129,8 +141,10 @@ test('enums #7', async () => { enum: pgEnum('enum', ['value1', 'value2']), }; - const { statements } = await diffTestSchemas(from, to, []); + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', @@ -150,8 +164,11 @@ test('enums #8', async () => { enum: pgEnum('enum', ['value1', 'value2', 'value3']), }; - const { statements } = await diffTestSchemas(from, to, []); + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2';`); + expect(sqlStatements[1]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value3';`); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', @@ -179,8 +196,10 @@ test('enums #9', async () => { enum: pgEnum('enum', ['value1', 'value2', 'value3']), }; - const { statements } = await diffTestSchemas(from, to, []); + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2' BEFORE 'value3';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', @@ -201,8 +220,10 @@ test('enums #10', async () => { enum: schema.enum('enum', ['value1', 'value2']), }; - const { statements } = await diffTestSchemas(from, to, []); + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "folder"."enum" ADD VALUE 'value2';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', @@ -223,10 +244,12 @@ test('enums #11', async () => { enum: pgEnum('enum', ['value1']), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum->public.enum', ]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum" SET SCHEMA "public";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -246,10 +269,12 @@ test('enums #12', async () => { enum: schema1.enum('enum', ['value1']), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum->folder1.enum', ]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" SET SCHEMA "folder1";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -268,10 +293,12 @@ test('enums #13', async () => { enum: pgEnum('enum2', ['value1']), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum1->public.enum2', ]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" RENAME TO "enum2";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_type_enum', @@ -292,10 +319,13 @@ test('enums #14', async () => { enum: folder2.enum('enum2', ['value1']), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum1->folder2.enum2', ]); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`); + expect(sqlStatements[1]).toBe(`ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -322,10 +352,16 @@ test('enums #15', async () => { enum: folder2.enum('enum2', ['value1', 'value2', 'value3', 'value4']), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum1->folder2.enum2', ]); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`); + expect(sqlStatements[1]).toBe(`ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`); + expect(sqlStatements[2]).toBe(`ALTER TYPE "folder2"."enum2" ADD VALUE 'value2' BEFORE 'value4';`); + expect(sqlStatements[3]).toBe(`ALTER TYPE "folder2"."enum2" ADD VALUE 'value3' BEFORE 'value4';`); + expect(statements.length).toBe(4); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -373,10 +409,13 @@ test('enums #16', async () => { }), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum1->public.enum2', ]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" RENAME TO "enum2";`); + expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_type_enum', @@ -405,10 +444,14 @@ test('enums #17', async () => { }), }; - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum1->schema.enum1', ]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" SET SCHEMA "schema";`); + + expect(sqlStatements.length).toBe(1); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -440,10 +483,14 @@ test('enums #18', async () => { }; // change name and schema of the enum, no table changes - const { statements } = await diffTestSchemas(from, to, [ + const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'schema1.enum1->schema2.enum2', ]); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`ALTER TYPE "schema1"."enum1" SET SCHEMA "schema2";`); + expect(sqlStatements[1]).toBe(`ALTER TYPE "schema2"."enum1" RENAME TO "enum2";`); + expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', @@ -458,3 +505,176 @@ test('enums #18', async () => { schema: 'schema2', }); }); + +test('drop enum value', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + }; + + const enum2 = pgEnum('enum', ['value1', 'value3']); + const to = { + enum2, + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`DROP TYPE "public"."enum";`); + expect(sqlStatements[1]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3');`); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columnsWithEnum: [], + deletedValues: [ + 'value2', + ], + name: 'enum', + newValues: [ + 'value1', + 'value3', + ], + schema: 'public', + type: 'alter_type_drop_value', + }); +}); + +test('drop enum value. enum is columns data type', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const schema = pgSchema('new_schema'); + + const from = { + schema, + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3']); + const to = { + schema, + enum2, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe(`ALTER TABLE "public"."table" ALTER COLUMN "column" SET DATA TYPE text;`); + expect(sqlStatements[1]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); + expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); + expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3');`); + expect(sqlStatements[4]).toBe( + `ALTER TABLE "public"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, + ); + expect(sqlStatements[5]).toBe( + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columnsWithEnum: [ + { + column: 'column', + schema: 'public', + table: 'table', + }, + { + column: 'column', + schema: 'new_schema', + table: 'table', + }, + ], + deletedValues: [ + 'value2', + ], + name: 'enum', + newValues: [ + 'value1', + 'value3', + ], + schema: 'public', + type: 'alter_type_drop_value', + }); +}); + +test('shuffle enum values', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const schema = pgSchema('new_schema'); + + const from = { + schema, + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe(`ALTER TABLE "public"."table" ALTER COLUMN "column" SET DATA TYPE text;`); + expect(sqlStatements[1]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); + expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); + expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); + expect(sqlStatements[4]).toBe( + `ALTER TABLE "public"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, + ); + expect(sqlStatements[5]).toBe( + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columnsWithEnum: [ + { + column: 'column', + schema: 'public', + table: 'table', + }, + { + column: 'column', + schema: 'new_schema', + table: 'table', + }, + ], + deletedValues: [ + 'value3', + ], + name: 'enum', + newValues: [ + 'value1', + 'value3', + 'value2', + ], + schema: 'public', + type: 'alter_type_drop_value', + }); +}); diff --git a/drizzle-kit/tests/pg-identity.test.ts b/drizzle-kit/tests/pg-identity.test.ts index 906d812d4..7e0854b67 100644 --- a/drizzle-kit/tests/pg-identity.test.ts +++ b/drizzle-kit/tests/pg-identity.test.ts @@ -48,6 +48,7 @@ test('create table: identity always/by default - no params', async () => { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -86,6 +87,7 @@ test('create table: identity always/by default - few params', async () => { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -128,6 +130,7 @@ test('create table: identity always/by default - all params', async () => { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ diff --git a/drizzle-kit/tests/pg-tables.test.ts b/drizzle-kit/tests/pg-tables.test.ts index 79a21a695..0648459b4 100644 --- a/drizzle-kit/tests/pg-tables.test.ts +++ b/drizzle-kit/tests/pg-tables.test.ts @@ -35,6 +35,7 @@ test('add table #1', async () => { columns: [], compositePKs: [], uniqueConstraints: [], + checkConstraints: [], compositePkName: '', }); }); @@ -63,6 +64,7 @@ test('add table #2', async () => { ], compositePKs: [], uniqueConstraints: [], + checkConstraints: [], compositePkName: '', }); }); @@ -102,6 +104,7 @@ test('add table #3', async () => { ], compositePKs: ['id;users_pk'], uniqueConstraints: [], + checkConstraints: [], compositePkName: 'users_pk', }); }); @@ -122,6 +125,7 @@ test('add table #4', async () => { columns: [], compositePKs: [], uniqueConstraints: [], + checkConstraints: [], compositePkName: '', }); expect(statements[1]).toStrictEqual({ @@ -131,6 +135,7 @@ test('add table #4', async () => { columns: [], compositePKs: [], uniqueConstraints: [], + checkConstraints: [], compositePkName: '', }); }); @@ -157,6 +162,7 @@ test('add table #5', async () => { compositePKs: [], uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); }); @@ -180,6 +186,7 @@ test('add table #6', async () => { compositePKs: [], uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'drop_table', @@ -211,6 +218,7 @@ test('add table #7', async () => { compositePKs: [], uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'rename_table', @@ -267,6 +275,7 @@ test('multiproject schema add table #1', async () => { compositePKs: [], compositePkName: '', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -360,6 +369,7 @@ test('add schema + table #1', async () => { compositePKs: [], uniqueConstraints: [], compositePkName: '', + checkConstraints: [], }); }); diff --git a/drizzle-kit/tests/pg-views.test.ts b/drizzle-kit/tests/pg-views.test.ts new file mode 100644 index 000000000..2874caf5b --- /dev/null +++ b/drizzle-kit/tests/pg-views.test.ts @@ -0,0 +1,1911 @@ +import { sql } from 'drizzle-orm'; +import { integer, pgMaterializedView, pgSchema, pgTable, pgView } from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemas } from './schemaDiffer'; + +test('create table and view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgView('some_view').as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'create_view', + name: 'some_view', + definition: `select "id" from "users"`, + schema: 'public', + with: undefined, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe(`CREATE VIEW "public"."some_view" AS (select "id" from "users");`); +}); + +test('create table and view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'create_view', + name: 'some_view', + definition: `SELECT * FROM "users"`, + schema: 'public', + with: undefined, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe(`CREATE VIEW "public"."some_view" AS (SELECT * FROM "users");`); +}); + +test('create table and view #3', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgView('some_view1', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: false, + securityInvoker: true, + }).as(sql`SELECT * FROM ${users}`), + view2: pgView('some_view2').with({ + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: false, + }).as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(3); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'create_view', + name: 'some_view1', + definition: `SELECT * FROM "users"`, + schema: 'public', + with: { + checkOption: 'local', + securityBarrier: false, + securityInvoker: true, + }, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + expect(statements[2]).toStrictEqual({ + type: 'create_view', + name: 'some_view2', + definition: `select "id" from "users"`, + schema: 'public', + with: { + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: false, + }, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(3); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe( + `CREATE VIEW "public"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "users");`, + ); + expect(sqlStatements[2]).toBe( + `CREATE VIEW "public"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "users");`, + ); +}); + +test('create table and view #4', async () => { + const schema = pgSchema('new_schema'); + + const users = schema.table('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + schema, + users: users, + view1: schema.view('some_view1', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: false, + securityInvoker: true, + }).as(sql`SELECT * FROM ${users}`), + view2: schema.view('some_view2').with({ + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: false, + }).as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(4); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'new_schema', + }); + expect(statements[1]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: 'new_schema', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[2]).toStrictEqual({ + type: 'create_view', + name: 'some_view1', + definition: `SELECT * FROM "new_schema"."users"`, + schema: 'new_schema', + with: { + checkOption: 'local', + securityBarrier: false, + securityInvoker: true, + }, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + expect(statements[3]).toStrictEqual({ + type: 'create_view', + name: 'some_view2', + definition: `select "id" from "new_schema"."users"`, + schema: 'new_schema', + with: { + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: false, + }, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); + expect(sqlStatements[1]).toBe(`CREATE TABLE IF NOT EXISTS "new_schema"."users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[2]).toBe( + `CREATE VIEW "new_schema"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "new_schema"."users");`, + ); + expect(sqlStatements[3]).toBe( + `CREATE VIEW "new_schema"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "new_schema"."users");`, + ); +}); + +test('create table and view #5', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + view2: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); +}); + +test('create table and view #6', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + columns: [ + { + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + definition: 'SELECT * FROM "users"', + name: 'some_view', + schema: 'public', + type: 'create_view', + with: { + checkOption: 'cascaded', + }, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe( + `CREATE VIEW "public"."some_view" WITH (check_option = cascaded) AS (SELECT * FROM "users");`, + ); +}); + +test('create view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('create table and materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'create_view', + name: 'some_view', + definition: `select "id" from "users"`, + schema: 'public', + with: undefined, + materialized: true, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe(`CREATE MATERIALIZED VIEW "public"."some_view" AS (select "id" from "users");`); +}); + +test('create table and materialized view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'create_view', + name: 'some_view', + definition: `SELECT * FROM "users"`, + schema: 'public', + with: undefined, + materialized: true, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe(`CREATE MATERIALIZED VIEW "public"."some_view" AS (SELECT * FROM "users");`); +}); + +test('create table and materialized view #3', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgMaterializedView('some_view1', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + view2: pgMaterializedView('some_view2').tablespace('some_tablespace').using('heap').withNoData().with({ + autovacuumEnabled: true, + autovacuumFreezeMaxAge: 1, + autovacuumFreezeMinAge: 1, + autovacuumFreezeTableAge: 1, + autovacuumMultixactFreezeMaxAge: 1, + autovacuumMultixactFreezeMinAge: 1, + autovacuumMultixactFreezeTableAge: 1, + autovacuumVacuumCostDelay: 1, + autovacuumVacuumCostLimit: 1, + autovacuumVacuumScaleFactor: 1, + autovacuumVacuumThreshold: 1, + fillfactor: 1, + logAutovacuumMinDuration: 1, + parallelWorkers: 1, + toastTupleTarget: 1, + userCatalogTable: true, + vacuumIndexCleanup: 'off', + vacuumTruncate: false, + }).as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(3); + expect(statements[0]).toStrictEqual({ + type: 'create_table', + tableName: 'users', + schema: '', + columns: [{ + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }], + compositePKs: [], + uniqueConstraints: [], + compositePkName: '', + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'create_view', + name: 'some_view1', + definition: `SELECT * FROM "users"`, + schema: 'public', + with: undefined, + materialized: true, + withNoData: false, + using: undefined, + tablespace: undefined, + }); + expect(statements[2]).toStrictEqual({ + type: 'create_view', + name: 'some_view2', + definition: `select "id" from "users"`, + schema: 'public', + with: { + autovacuumEnabled: true, + autovacuumFreezeMaxAge: 1, + autovacuumFreezeMinAge: 1, + autovacuumFreezeTableAge: 1, + autovacuumMultixactFreezeMaxAge: 1, + autovacuumMultixactFreezeMinAge: 1, + autovacuumMultixactFreezeTableAge: 1, + autovacuumVacuumCostDelay: 1, + autovacuumVacuumCostLimit: 1, + autovacuumVacuumScaleFactor: 1, + autovacuumVacuumThreshold: 1, + fillfactor: 1, + logAutovacuumMinDuration: 1, + parallelWorkers: 1, + toastTupleTarget: 1, + userCatalogTable: true, + vacuumIndexCleanup: 'off', + vacuumTruncate: false, + }, + materialized: true, + tablespace: 'some_tablespace', + using: 'heap', + withNoData: true, + }); + + expect(sqlStatements.length).toBe(3); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe( + `CREATE MATERIALIZED VIEW "public"."some_view1" AS (SELECT * FROM "users");`, + ); + expect(sqlStatements[2]).toBe( + `CREATE MATERIALIZED VIEW "public"."some_view2" USING "heap" WITH (autovacuum_enabled = true, autovacuum_freeze_max_age = 1, autovacuum_freeze_min_age = 1, autovacuum_freeze_table_age = 1, autovacuum_multixact_freeze_max_age = 1, autovacuum_multixact_freeze_min_age = 1, autovacuum_multixact_freeze_table_age = 1, autovacuum_vacuum_cost_delay = 1, autovacuum_vacuum_cost_limit = 1, autovacuum_vacuum_scale_factor = 1, autovacuum_vacuum_threshold = 1, fillfactor = 1, log_autovacuum_min_duration = 1, parallel_workers = 1, toast_tuple_target = 1, user_catalog_table = true, vacuum_index_cleanup = off, vacuum_truncate = false) TABLESPACE some_tablespace AS (select "id" from "users") WITH NO DATA;`, + ); +}); + +test('create table and materialized view #4', async () => { + // same names + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + view2: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); +}); + +test('create table and materialized view #5', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumFreezeMinAge: 14 }).as( + sql`SELECT * FROM ${users}`, + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + columns: [ + { + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + ], + compositePKs: [], + compositePkName: '', + schema: '', + tableName: 'users', + type: 'create_table', + uniqueConstraints: [], + checkConstraints: [], + }); + expect(statements[1]).toEqual({ + definition: 'SELECT * FROM "users"', + name: 'some_view', + schema: 'public', + type: 'create_view', + with: { + autovacuumFreezeMinAge: 14, + }, + materialized: true, + tablespace: undefined, + using: undefined, + withNoData: false, + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE IF NOT EXISTS "users" ( +\t"id" integer PRIMARY KEY NOT NULL +);\n`); + expect(sqlStatements[1]).toBe( + `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_freeze_min_age = 14) AS (SELECT * FROM "users");`, + ); +}); + +test('create materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumEnabled: true }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_view', + name: 'some_view', + schema: 'public', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP VIEW "public"."some_view";`); +}); + +test('drop view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'drop_view', + name: 'some_view', + schema: 'public', + materialized: true, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."some_view";`); +}); + +test('drop materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename view #1', async () => { + const from = { + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + view: pgView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_view', + nameFrom: 'some_view', + nameTo: 'new_some_view', + schema: 'public', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" RENAME TO "new_some_view";`); +}); + +test('rename view with existing flag', async () => { + const from = { + view: pgView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + view: pgView('new_some_view', { id: integer('id') }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename materialized view #1', async () => { + const from = { + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + view: pgMaterializedView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'rename_view', + nameFrom: 'some_view', + nameTo: 'new_some_view', + schema: 'public', + materialized: true, + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`ALTER MATERIALIZED VIEW "public"."some_view" RENAME TO "new_some_view";`); +}); + +test('rename materialized view with existing flag', async () => { + const from = { + view: pgMaterializedView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + view: pgMaterializedView('new_some_view', { id: integer('id') }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('view alter schema', async () => { + const schema = pgSchema('new_schema'); + + const from = { + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + view: schema.view('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'new_schema', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_view_alter_schema', + toSchema: 'new_schema', + fromSchema: 'public', + name: 'some_view', + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); + expect(sqlStatements[1]).toBe(`ALTER VIEW "public"."some_view" SET SCHEMA "new_schema";`); +}); + +test('view alter schema with existing flag', async () => { + const schema = pgSchema('new_schema'); + + const from = { + view: pgView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + schema, + view: schema.view('some_view', { id: integer('id') }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'new_schema', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); +}); + +test('view alter schema for materialized', async () => { + const schema = pgSchema('new_schema'); + + const from = { + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + view: schema.materializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'new_schema', + }); + expect(statements[1]).toStrictEqual({ + type: 'alter_view_alter_schema', + toSchema: 'new_schema', + fromSchema: 'public', + name: 'some_view', + materialized: true, + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); + expect(sqlStatements[1]).toBe(`ALTER MATERIALIZED VIEW "public"."some_view" SET SCHEMA "new_schema";`); +}); + +test('view alter schema for materialized with existing flag', async () => { + const schema = pgSchema('new_schema'); + + const from = { + view: pgMaterializedView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + schema, + view: schema.materializedView('some_view', { id: integer('id') }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'create_schema', + name: 'new_schema', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); +}); + +test('add with option to view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true }).as((qb) => + qb.select().from(users) + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'some_view', + schema: 'public', + type: 'alter_view_add_with_option', + with: { + checkOption: 'cascaded', + securityBarrier: true, + }, + materialized: false, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER VIEW "public"."some_view" SET (check_option = cascaded, security_barrier = true);`, + ); +}); + +test('add with option to view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', {}).existing(), + }; + + const to = { + users, + view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('add with option to materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: pgMaterializedView('some_view').with({ autovacuumMultixactFreezeMaxAge: 3 }).as((qb) => + qb.select().from(users) + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'some_view', + schema: 'public', + type: 'alter_view_add_with_option', + with: { + autovacuumMultixactFreezeMaxAge: 3, + }, + materialized: true, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET (autovacuum_multixact_freeze_max_age = 3);`, + ); +}); + +test('add with option to materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', {}).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumMultixactFreezeMaxAge: 3 }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop with option from view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgView('some_view').as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'some_view', + schema: 'public', + type: 'alter_view_drop_with_option', + materialized: false, + with: { + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: true, + }, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER VIEW "public"."some_view" RESET (check_option, security_barrier, security_invoker);`, + ); +}); + +test('drop with option from view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }) + .existing(), + }; + + const to = { + users, + view: pgView('some_view', {}).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop with option from materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'some_view', + schema: 'public', + type: 'alter_view_drop_with_option', + materialized: true, + with: { + autovacuumEnabled: true, + autovacuumFreezeMaxAge: 10, + }, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" RESET (autovacuum_enabled, autovacuum_freeze_max_age);`, + ); +}); + +test('drop with option from materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', {}).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('alter with option in view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').with({ securityBarrier: true, securityInvoker: true }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgView('some_view').with({ securityBarrier: true }).as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'some_view', + schema: 'public', + type: 'alter_view_drop_with_option', + with: { + securityInvoker: true, + }, + materialized: false, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER VIEW "public"."some_view" RESET (security_invoker);`, + ); +}); + +test('alter with option in view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', {}).with({ securityBarrier: true, securityInvoker: true }).existing(), + }; + + const to = { + users, + view: pgView('some_view', {}).with({ securityBarrier: true }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('alter with option in materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true }).as((qb) => qb.select().from(users)), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'some_view', + schema: 'public', + type: 'alter_view_drop_with_option', + with: { + autovacuumVacuumScaleFactor: 1, + }, + materialized: true, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" RESET (autovacuum_vacuum_scale_factor);`, + ); +}); + +test('alter with option in materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }) + .existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('alter with option in view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').with({ checkOption: 'local', securityBarrier: true, securityInvoker: true }).as((qb) => + qb.selectDistinct().from(users) + ), + }; + + const to = { + users, + view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => + qb.selectDistinct().from(users) + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_view_add_with_option', + name: 'some_view', + schema: 'public', + with: { + checkOption: 'cascaded', + }, + materialized: false, + }); + + expect(sqlStatements.length).toBe(1); + + expect(sqlStatements[0]).toBe( + `ALTER VIEW "public"."some_view" SET (check_option = cascaded);`, + ); +}); + +test('alter with option in materialized view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, fillfactor: 1 }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: false, fillfactor: 1 }).as((qb) => + qb.select().from(users) + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_view_add_with_option', + name: 'some_view', + schema: 'public', + with: { + autovacuumEnabled: false, + }, + materialized: true, + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET (autovacuum_enabled = false);`, + ); +}); + +test('alter view ".as" value', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).as(sql`SELECT '123'`), + }; + + const to = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).as(sql`SELECT '1234'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual( + { + name: 'some_view', + schema: 'public', + type: 'drop_view', + }, + ); + expect(statements[1]).toStrictEqual( + { + definition: "SELECT '1234'", + name: 'some_view', + schema: 'public', + type: 'create_view', + materialized: false, + with: { + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }, + withNoData: false, + tablespace: undefined, + using: undefined, + }, + ); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe('DROP VIEW "public"."some_view";'); + expect(sqlStatements[1]).toBe( + `CREATE VIEW "public"."some_view" WITH (check_option = local, security_barrier = true, security_invoker = true) AS (SELECT '1234');`, + ); +}); + +test('alter view ".as" value with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).existing(), + }; + + const to = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('alter materialized view ".as" value', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT '123'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT '1234'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual( + { + name: 'some_view', + schema: 'public', + type: 'drop_view', + materialized: true, + }, + ); + expect(statements[1]).toStrictEqual( + { + definition: "SELECT '1234'", + name: 'some_view', + schema: 'public', + type: 'create_view', + with: { + autovacuumVacuumCostLimit: 1, + }, + materialized: true, + withNoData: false, + tablespace: undefined, + using: undefined, + }, + ); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe('DROP MATERIALIZED VIEW "public"."some_view";'); + expect(sqlStatements[1]).toBe( + `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT '1234');`, + ); +}); + +test('alter materialized view ".as" value with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toEqual({ + type: 'drop_view', + name: 'some_view', + schema: 'public', + materialized: true, + }); + expect(statements[1]).toEqual({ + definition: "SELECT 'asd'", + materialized: true, + name: 'some_view', + schema: 'public', + tablespace: undefined, + type: 'create_view', + using: undefined, + with: { + autovacuumVacuumCostLimit: 1, + }, + withNoData: false, + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."some_view";`); + expect(sqlStatements[1]).toBe( + `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT 'asd');`, + ); +}); + +test('alter tablespace - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toEqual({ + type: 'alter_view_alter_tablespace', + name: 'some_view', + schema: 'public', + materialized: true, + toTablespace: 'new_tablespace', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE new_tablespace;`, + ); +}); + +test('set tablespace - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toEqual({ + type: 'alter_view_alter_tablespace', + name: 'some_view', + schema: 'public', + materialized: true, + toTablespace: 'new_tablespace', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE new_tablespace;`, + ); +}); + +test('drop tablespace - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toEqual({ + type: 'alter_view_alter_tablespace', + name: 'some_view', + schema: 'public', + materialized: true, + toTablespace: 'pg_default', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE pg_default;`, + ); +}); + +test('set existing - materialized', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('new_some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + autovacuumFreezeMinAge: 1, + }).withNoData().existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(0); + + expect(sqlStatements.length).toBe(0); +}); + +test('drop existing - materialized', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + autovacuumFreezeMinAge: 1, + }).withNoData().as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(2); + + expect(sqlStatements.length).toBe(2); +}); + +test('set existing', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'cascaded', + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgView('new_some_view', { id: integer('id') }).with({ + checkOption: 'cascaded', + securityBarrier: true, + }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(0); + + expect(sqlStatements.length).toBe(0); +}); + +test('alter using - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').using('some_using').with( + { + autovacuumVacuumCostLimit: 1, + }, + ).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').using('new_using').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toEqual({ + type: 'alter_view_alter_using', + name: 'some_view', + schema: 'public', + materialized: true, + toUsing: 'new_using', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "new_using";`, + ); +}); + +test('set using - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).using('new_using').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toEqual({ + type: 'alter_view_alter_using', + name: 'some_view', + schema: 'public', + materialized: true, + toUsing: 'new_using', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "new_using";`, + ); +}); + +test('drop using - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).using('new_using').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toEqual({ + type: 'alter_view_alter_using', + name: 'some_view', + schema: 'public', + materialized: true, + toUsing: 'heap', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "heap";`, + ); +}); + +test('rename view and alter view', async () => { + const from = { + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + view: pgView('new_some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( + sql`SELECT * FROM "users"`, + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'rename_view', + nameFrom: 'some_view', + nameTo: 'new_some_view', + schema: 'public', + }); + expect(statements[1]).toStrictEqual({ + materialized: false, + name: 'new_some_view', + schema: 'public', + type: 'alter_view_add_with_option', + with: { + checkOption: 'cascaded', + }, + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" RENAME TO "new_some_view";`); + expect(sqlStatements[1]).toBe(`ALTER VIEW "public"."new_some_view" SET (check_option = cascaded);`); +}); + +test('moved schema and alter view', async () => { + const schema = pgSchema('my_schema'); + const from = { + schema, + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + view: schema.view('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( + sql`SELECT * FROM "users"`, + ), + }; + + const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->my_schema.some_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + fromSchema: 'public', + name: 'some_view', + toSchema: 'my_schema', + type: 'alter_view_alter_schema', + }); + expect(statements[1]).toStrictEqual({ + name: 'some_view', + schema: 'my_schema', + type: 'alter_view_add_with_option', + materialized: false, + with: { + checkOption: 'cascaded', + }, + }); + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" SET SCHEMA "my_schema";`); + expect(sqlStatements[1]).toBe(`ALTER VIEW "my_schema"."some_view" SET (check_option = cascaded);`); +}); diff --git a/drizzle-kit/tests/push/libsql.test.ts b/drizzle-kit/tests/push/libsql.test.ts index 89ec008ca..1877f34e1 100644 --- a/drizzle-kit/tests/push/libsql.test.ts +++ b/drizzle-kit/tests/push/libsql.test.ts @@ -3,6 +3,7 @@ import chalk from 'chalk'; import { sql } from 'drizzle-orm'; import { blob, + check, foreignKey, getTableConfig, index, @@ -11,6 +12,7 @@ import { numeric, real, sqliteTable, + sqliteView, text, uniqueIndex, } from 'drizzle-orm/sqlite-core'; @@ -389,6 +391,7 @@ test('drop autoincrement. drop column with data', async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(4); @@ -491,6 +494,7 @@ test('change autoincrement. table is part of foreign key', async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(4); @@ -752,6 +756,7 @@ test('recreate table with nested references', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); @@ -860,6 +865,7 @@ test('recreate table with added column not null and without default', async (t) tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); @@ -1047,3 +1053,347 @@ test('drop not null with two indexes', async (t) => { expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); + +test('add check constraint to table', async (t) => { + const turso = createClient({ + url: ':memory:', + }); + + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushLibSQL( + turso, + schema1, + schema2, + [], + ); + + expect(statements!.length).toBe(1); + expect(statements![0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + name: 'id', + notNull: true, + generated: undefined, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + generated: undefined, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + name: 'age', + notNull: false, + generated: undefined, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: ['some_check;"users"."age" > 21'], + }); + + expect(sqlStatements!.length).toBe(4); + expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`name\` text, +\t\`age\` integer, +\tCONSTRAINT "some_check" CHECK("__new_users"."age" > 21) +);\n`); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', + ); + expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements![3]).toBe( + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + ); + + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); +}); + +test('drop check constraint', async (t) => { + const turso = createClient({ + url: ':memory:', + }); + + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushLibSQL( + turso, + schema1, + schema2, + [], + ); + + expect(statements!.length).toBe(1); + expect(statements![0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + name: 'id', + notNull: true, + generated: undefined, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + generated: undefined, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + name: 'age', + notNull: false, + generated: undefined, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [], + }); + + expect(sqlStatements!.length).toBe(4); + expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`name\` text, +\t\`age\` integer +);\n`); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', + ); + expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements![3]).toBe( + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + ); + + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); +}); + +test('db has checks. Push with same names', async () => { + const turso = createClient({ + url: ':memory:', + }); + + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`some new value`), + })), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushLibSQL( + turso, + schema1, + schema2, + [], + false, + [], + ); + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); +}); + +test('create view', async () => { + const turso = createClient({ + url: ':memory:', + }); + + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( + turso, + schema1, + schema2, + [], + ); + + expect(statements).toStrictEqual([ + { + definition: 'select "id" from "test"', + name: 'view', + type: 'sqlite_create_view', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `CREATE VIEW \`view\` AS select "id" from "test";`, + ]); +}); + +test('drop view', async () => { + const turso = createClient({ + url: ':memory:', + }); + + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( + turso, + schema1, + schema2, + [], + ); + + expect(statements).toStrictEqual([ + { + name: 'view', + type: 'drop_view', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'DROP VIEW \`view\`;', + ]); +}); + +test('alter view ".as"', async () => { + const turso = createClient({ + url: ':memory:', + }); + + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), + }; + + const schema2 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( + turso, + schema1, + schema2, + [], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/push/mysql-push.test.ts b/drizzle-kit/tests/push/mysql-push.test.ts new file mode 100644 index 000000000..ba64ccddb --- /dev/null +++ b/drizzle-kit/tests/push/mysql-push.test.ts @@ -0,0 +1,345 @@ +import Docker from 'dockerode'; +import { sql } from 'drizzle-orm'; +import { check, int, mysqlTable, mysqlView } from 'drizzle-orm/mysql-core'; +import fs from 'fs'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { diffTestSchemasPushMysql } from 'tests/schemaDiffer'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, expect, test } from 'vitest'; + +let client: Connection; +let mysqlContainer: Docker.Container; + +async function createDockerDB(): Promise { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'mysql:8'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mysqlContainer.start(); + + return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; +} + +beforeAll(async () => { + const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + client = await createConnection(connectionString); + await client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to MySQL'); + await client?.end().catch(console.error); + await mysqlContainer?.stop().catch(console.error); + throw lastError; + } +}); + +afterAll(async () => { + await client?.end().catch(console.error); + await mysqlContainer?.stop().catch(console.error); +}); + +if (!fs.existsSync('tests/push/mysql')) { + fs.mkdirSync('tests/push/mysql'); +} + +test('add check constraint to table', async () => { + const schema1 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }), + }; + const schema2 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }, (table) => ({ + checkConstraint1: check('some_check1', sql`${table.values} < 100`), + checkConstraint2: check('some_check2', sql`'test' < 100`), + })), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + type: 'create_check_constraint', + tableName: 'test', + schema: '', + data: 'some_check1;\`test\`.\`values\` < 100', + }, + { + data: "some_check2;'test' < 100", + schema: '', + tableName: 'test', + type: 'create_check_constraint', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE \`test\` ADD CONSTRAINT \`some_check1\` CHECK (\`test\`.\`values\` < 100);', + `ALTER TABLE \`test\` ADD CONSTRAINT \`some_check2\` CHECK ('test' < 100);`, + ]); + + await client.query(`DROP TABLE \`test\`;`); +}); + +test('drop check constraint to table', async () => { + const schema1 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }, (table) => ({ + checkConstraint1: check('some_check1', sql`${table.values} < 100`), + checkConstraint2: check('some_check2', sql`'test' < 100`), + })), + }; + const schema2 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + type: 'delete_check_constraint', + tableName: 'test', + schema: '', + constraintName: 'some_check1', + }, + { + constraintName: 'some_check2', + schema: '', + tableName: 'test', + type: 'delete_check_constraint', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE \`test\` DROP CONSTRAINT \`some_check1\`;', + `ALTER TABLE \`test\` DROP CONSTRAINT \`some_check2\`;`, + ]); + + await client.query(`DROP TABLE \`test\`;`); +}); + +test('db has checks. Push with same names', async () => { + const schema1 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => ({ + checkConstraint: check('some_check', sql`${table.values} < 100`), + })), + }; + const schema2 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => ({ + checkConstraint: check('some_check', sql`some new value`), + })), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + + await client.query(`DROP TABLE \`test\`;`); +}); + +test('create view', async () => { + const table = mysqlTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + definition: 'select \`id\` from \`test\`', + name: 'view', + type: 'mysql_create_view', + replace: false, + sqlSecurity: 'definer', + withCheckOption: undefined, + algorithm: 'undefined', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `CREATE ALGORITHM = undefined +SQL SECURITY definer +VIEW \`view\` AS (select \`id\` from \`test\`);`, + ]); + + await client.query(`DROP TABLE \`test\`;`); +}); + +test('drop view', async () => { + const table = mysqlTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + name: 'view', + type: 'drop_view', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'DROP VIEW \`view\`;', + ]); + await client.query(`DROP TABLE \`test\`;`); + await client.query(`DROP VIEW \`view\`;`); +}); + +test('alter view ".as"', async () => { + const table = mysqlTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), + }; + + const schema2 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`DROP TABLE \`test\`;`); + await client.query(`DROP VIEW \`view\`;`); +}); + +test('alter meta options with distinct in definition', async () => { + const table = mysqlTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: mysqlView('view').withCheckOption('cascaded').sqlSecurity('definer').algorithm('merge').as(( + qb, + ) => qb.selectDistinct().from(table).where(sql`${table.id} = 1`)), + }; + + const schema2 = { + test: table, + view: mysqlView('view').withCheckOption('cascaded').sqlSecurity('definer').algorithm('undefined').as((qb) => + qb.selectDistinct().from(table) + ), + }; + + await expect(diffTestSchemasPushMysql( + client, + schema1, + schema2, + [], + 'drizzle', + false, + )).rejects.toThrowError(); + + await client.query(`DROP TABLE \`test\`;`); +}); diff --git a/drizzle-kit/tests/push/pg.test.ts b/drizzle-kit/tests/push/pg.test.ts index cb1a97122..fb2ffdc8d 100644 --- a/drizzle-kit/tests/push/pg.test.ts +++ b/drizzle-kit/tests/push/pg.test.ts @@ -1,9 +1,11 @@ import { PGlite } from '@electric-sql/pglite'; +import chalk from 'chalk'; import { bigint, bigserial, boolean, char, + check, date, doublePrecision, index, @@ -13,9 +15,11 @@ import { jsonb, numeric, pgEnum, + pgMaterializedView, pgSchema, pgSequence, pgTable, + pgView, real, serial, smallint, @@ -25,13 +29,12 @@ import { uniqueIndex, uuid, varchar, - vector, } from 'drizzle-orm/pg-core'; import { drizzle } from 'drizzle-orm/pglite'; -import { SQL, sql } from 'drizzle-orm/sql'; +import { eq, SQL, sql } from 'drizzle-orm/sql'; import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; -import { diffTestSchemasPush } from 'tests/schemaDiffer'; -import { afterEach, expect, test } from 'vitest'; +import { diffTestSchemas, diffTestSchemasPush } from 'tests/schemaDiffer'; +import { expect, test } from 'vitest'; import { DialectSuite, run } from './common'; const pgSuite: DialectSuite = { @@ -40,10 +43,7 @@ const pgSuite: DialectSuite = { const customSchema = pgSchema('schemass'); - const transactionStatusEnum = customSchema.enum( - 'TransactionStatusEnum', - ['PENDING', 'FAILED', 'SUCCESS'], - ); + const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); const enumname = pgEnum('enumname', ['three', 'two', 'one']); @@ -53,11 +53,7 @@ const pgSuite: DialectSuite = { enumname: pgEnum('enumname', ['three', 'two', 'one']), customSchema: customSchema, - transactionStatusEnum: customSchema.enum('TransactionStatusEnum', [ - 'PENDING', - 'FAILED', - 'SUCCESS', - ]), + transactionStatusEnum: customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']), allSmallSerials: pgTable('schema_test', { columnAll: uuid('column_all').defaultRandom(), @@ -95,15 +91,9 @@ const pgSuite: DialectSuite = { withTimezone: true, mode: 'string', }).defaultNow(), - columnAll: timestamp('column_all', { mode: 'string' }).default( - '2023-03-01 12:47:29.792', - ), - column: timestamp('column', { mode: 'string' }).default( - sql`'2023-02-28 16:18:31.18'`, - ), - column2: timestamp('column2', { mode: 'string', precision: 3 }).default( - sql`'2023-02-28 16:18:31.18'`, - ), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), }), allUuids: customSchema.table('all_uuids', { @@ -113,9 +103,7 @@ const pgSuite: DialectSuite = { allDates: customSchema.table('all_dates', { column_date_now: date('column_date_now').defaultNow(), - column_all: date('column_all', { mode: 'date' }) - .default(new Date()) - .notNull(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), column: date('column'), }), @@ -126,9 +114,7 @@ const pgSuite: DialectSuite = { }), allBigints: pgTable('all_bigints', { - columnAll: bigint('column_all', { mode: 'number' }) - .default(124) - .notNull(), + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), column: bigint('column', { mode: 'number' }), }), @@ -146,9 +132,7 @@ const pgSuite: DialectSuite = { columnMinToSec: interval('column_min_to_sec', { fields: 'minute to second', }), - columnWithoutFields: interval('column_without_fields') - .default('00:00:01') - .notNull(), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), column: interval('column'), column5: interval('column5', { fields: 'minute to second', @@ -200,9 +184,7 @@ const pgSuite: DialectSuite = { }), allJsonb: customSchema.table('all_jsonb', { - columnDefaultObject: jsonb('column_default_object') - .default({ hello: 'world world' }) - .notNull(), + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), columnDefaultArray: jsonb('column_default_array').default({ hello: { 'world world': ['foo', 'bar'] }, }), @@ -210,9 +192,7 @@ const pgSuite: DialectSuite = { }), allJson: customSchema.table('all_json', { - columnDefaultObject: json('column_default_object') - .default({ hello: 'world world' }) - .notNull(), + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), columnDefaultArray: json('column_default_array').default({ hello: { 'world world': ['foo', 'bar'] }, foo: 'bar', @@ -228,22 +208,16 @@ const pgSuite: DialectSuite = { }), allNumerics: customSchema.table('all_numerics', { - columnAll: numeric('column_all', { precision: 1, scale: 1 }) - .default('32') - .notNull(), + columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), column: numeric('column'), columnPrimary: numeric('column_primary').primaryKey().notNull(), }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema1, - [], - false, - ['public', 'schemass'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema1, [], false, [ + 'public', + 'schemass', + ]); expect(statements.length).toBe(0); }, @@ -276,14 +250,7 @@ const pgSuite: DialectSuite = { ), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ schema: '', @@ -370,20 +337,11 @@ const pgSuite: DialectSuite = { id: integer('id'), id2: integer('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name}`, - ), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -427,20 +385,11 @@ const pgSuite: DialectSuite = { id: integer('id'), id2: integer('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name}`, - ), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -478,9 +427,7 @@ const pgSuite: DialectSuite = { id: integer('id'), id2: integer('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema1.users.name}`, - ), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), }), }; const schema2 = { @@ -492,14 +439,7 @@ const pgSuite: DialectSuite = { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -516,9 +456,7 @@ const pgSuite: DialectSuite = { type: 'alter_table_alter_column_drop_generated', }, ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "gen_name" DROP EXPRESSION;', - ]); + expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ALTER COLUMN "gen_name" DROP EXPRESSION;']); }, async alterGeneratedConstraint() { @@ -529,9 +467,7 @@ const pgSuite: DialectSuite = { id: integer('id'), id2: integer('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema1.users.name}`, - ), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), }), }; const schema2 = { @@ -539,20 +475,11 @@ const pgSuite: DialectSuite = { id: integer('id'), id2: integer('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - ), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); @@ -567,20 +494,11 @@ const pgSuite: DialectSuite = { id: integer('id'), id2: integer('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - ), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -620,6 +538,7 @@ const pgSuite: DialectSuite = { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -638,14 +557,7 @@ const pgSuite: DialectSuite = { seq: pgSequence('my_seq', { startWith: 100 }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); }, @@ -661,20 +573,13 @@ const pgSuite: DialectSuite = { }, (t) => ({ removeColumn: index('removeColumn').on(t.name, t.id), - addColumn: index('addColumn') - .on(t.name.desc()) - .with({ fillfactor: 70 }), + addColumn: index('addColumn').on(t.name.desc()).with({ fillfactor: 70 }), removeExpression: index('removeExpression') .on(t.name.desc(), sql`name`) .concurrently(), addExpression: index('addExpression').on(t.id.desc()), - changeExpression: index('changeExpression').on( - t.id.desc(), - sql`name`, - ), - changeName: index('changeName') - .on(t.name.desc(), t.id.asc().nullsLast()) - .with({ fillfactor: 70 }), + changeExpression: index('changeExpression').on(t.id.desc(), sql`name`), + changeName: index('changeName').on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), changeWith: index('changeWith').on(t.name).with({ fillfactor: 70 }), changeUsing: index('changeUsing').on(t.name), }), @@ -690,17 +595,10 @@ const pgSuite: DialectSuite = { }, (t) => ({ removeColumn: index('removeColumn').on(t.name), - addColumn: index('addColumn') - .on(t.name.desc(), t.id.nullsLast()) - .with({ fillfactor: 70 }), - removeExpression: index('removeExpression') - .on(t.name.desc()) - .concurrently(), + addColumn: index('addColumn').on(t.name.desc(), t.id.nullsLast()).with({ fillfactor: 70 }), + removeExpression: index('removeExpression').on(t.name.desc()).concurrently(), addExpression: index('addExpression').on(t.id.desc()), - changeExpression: index('changeExpression').on( - t.id.desc(), - sql`name desc`, - ), + changeExpression: index('changeExpression').on(t.id.desc(), sql`name desc`), changeName: index('newName') .on(t.name.desc(), sql`name`) .with({ fillfactor: 70 }), @@ -710,14 +608,7 @@ const pgSuite: DialectSuite = { ), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(sqlStatements).toStrictEqual([ 'DROP INDEX IF EXISTS "changeName";', @@ -748,9 +639,7 @@ const pgSuite: DialectSuite = { name: text('name'), }, (t) => ({ - indx: index() - .on(t.name.desc(), t.id.asc().nullsLast()) - .with({ fillfactor: 70 }), + indx: index().on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), }), ), }; @@ -762,14 +651,7 @@ const pgSuite: DialectSuite = { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ @@ -780,9 +662,7 @@ const pgSuite: DialectSuite = { }); expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `DROP INDEX IF EXISTS "users_name_id_index";`, - ); + expect(sqlStatements[0]).toBe(`DROP INDEX IF EXISTS "users_name_id_index";`); }, async indexesToBeNotTriggered() { @@ -832,14 +712,7 @@ const pgSuite: DialectSuite = { ), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); }, @@ -883,14 +756,7 @@ const pgSuite: DialectSuite = { ), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); }, @@ -956,14 +822,7 @@ const pgSuite: DialectSuite = { ), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); const query = async (sql: string, params?: any[]) => { const result = await client.query(sql, params ?? []); return result.rows as any[]; @@ -971,9 +830,7 @@ const pgSuite: DialectSuite = { const { statementsToExecute } = await pgSuggestions({ query }, statements); - expect(statementsToExecute).toStrictEqual([ - 'ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;', - ]); + expect(statementsToExecute).toStrictEqual(['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']); }, async addNotNullWithDataNoRollback() { @@ -1038,14 +895,7 @@ const pgSuite: DialectSuite = { ), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); const query = async (sql: string, params?: any[]) => { const result = await client.query(sql, params ?? []); return result.rows as any[]; @@ -1053,14 +903,9 @@ const pgSuite: DialectSuite = { await db.insert(schema1.users).values({ id: 'str', email: 'email@gmail' }); - const { statementsToExecute, shouldAskForApprove } = await pgSuggestions( - { query }, - statements, - ); + const { statementsToExecute, shouldAskForApprove } = await pgSuggestions({ query }, statements); - expect(statementsToExecute).toStrictEqual([ - 'ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;', - ]); + expect(statementsToExecute).toStrictEqual(['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']); expect(shouldAskForApprove).toBeFalsy(); }, @@ -1143,14 +988,7 @@ test('full sequence: no changes', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); @@ -1185,14 +1023,7 @@ test('basic sequence: change fields', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1260,9 +1091,7 @@ test('basic sequence: change name', async () => { type: 'rename_sequence', }, ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";', - ]); + expect(sqlStatements).toStrictEqual(['ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";']); for (const st of sqlStatements) { await client.query(st); @@ -1348,14 +1177,7 @@ test('create table: identity always/by default - no params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1388,6 +1210,7 @@ test('create table: identity always/by default - no params', async () => { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1415,14 +1238,7 @@ test('create table: identity always/by default - few params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1455,6 +1271,7 @@ test('create table: identity always/by default - few params', async () => { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1488,14 +1305,7 @@ test('create table: identity always/by default - all params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1528,6 +1338,7 @@ test('create table: identity always/by default - all params', async () => { tableName: 'users', type: 'create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1556,14 +1367,7 @@ test('no diff: identity always/by default - no params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); @@ -1596,14 +1400,7 @@ test('no diff: identity always/by default - few params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); @@ -1656,14 +1453,7 @@ test('no diff: identity always/by default - all params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); @@ -1684,14 +1474,7 @@ test('drop identity from a column - no params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1701,9 +1484,7 @@ test('drop identity from a column - no params', async () => { type: 'alter_table_alter_column_drop_identity', }, ]); - expect(sqlStatements).toStrictEqual([ - `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, - ]); + expect(sqlStatements).toStrictEqual([`ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`]); for (const st of sqlStatements) { await client.query(st); @@ -1735,14 +1516,7 @@ test('drop identity from a column - few params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1810,14 +1584,7 @@ test('drop identity from a column - all params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1865,14 +1632,7 @@ test('alter identity from a column - no params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1884,9 +1644,7 @@ test('alter identity from a column - no params', async () => { type: 'alter_table_alter_column_change_identity', }, ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', - ]); + expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;']); for (const st of sqlStatements) { await client.query(st); @@ -1912,14 +1670,7 @@ test('alter identity from a column - few params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -1960,14 +1711,7 @@ test('alter identity from a column - by default to always', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -2011,14 +1755,7 @@ test('alter identity from a column - always to by default', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -2063,14 +1800,7 @@ test('add column with identity - few params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -2128,14 +1858,7 @@ test('add identity to column - few params', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { @@ -2178,6 +1901,177 @@ test('add array column - empty array default', async () => { }), }; + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([ + { + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, + }, + ]); + expect(sqlStatements).toStrictEqual(['ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{}\';']); +}); + +test('add array column - default', async () => { + const client = new PGlite(); + + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([ + { + type: 'alter_table_add_column', + tableName: 'test', + schema: '', + column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, + }, + ]); + expect(sqlStatements).toStrictEqual(['ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{1,2,3}\';']); +}); + +test('create view', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([ + { + definition: 'select distinct "id" from "test"', + name: 'view', + schema: 'public', + type: 'create_view', + with: undefined, + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: false, + }, + ]); + expect(sqlStatements).toStrictEqual(['CREATE VIEW "public"."view" AS (select distinct "id" from "test");']); +}); + +test('add check constraint to table', async () => { + const client = new PGlite(); + + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }, (table) => ({ + checkConstraint1: check('some_check1', sql`${table.values} < 100`), + checkConstraint2: check('some_check2', sql`'test' < 100`), + })), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([ + { + type: 'create_check_constraint', + tableName: 'test', + schema: '', + data: 'some_check1;"test"."values" < 100', + }, + { + data: "some_check2;'test' < 100", + schema: '', + tableName: 'test', + type: 'create_check_constraint', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE "test" ADD CONSTRAINT "some_check1" CHECK ("test"."values" < 100);', + `ALTER TABLE "test" ADD CONSTRAINT "some_check2" CHECK ('test' < 100);`, + ]); +}); + +test('create materialized view', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view') + .withNoData() + .using('heap') + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([ + { + definition: 'select distinct "id" from "test"', + name: 'view', + schema: 'public', + type: 'create_view', + with: undefined, + materialized: true, + tablespace: undefined, + using: 'heap', + withNoData: true, + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'CREATE MATERIALIZED VIEW "public"."view" USING "heap" AS (select distinct "id" from "test") WITH NO DATA;', + ]); +}); + +test('drop check constraint', async () => { + const client = new PGlite(); + + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').default(1), + }, (table) => ({ + checkConstraint: check('some_check', sql`${table.values} < 100`), + })), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').default(1), + }), + }; + const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, @@ -2189,30 +2083,35 @@ test('add array column - empty array default', async () => { expect(statements).toStrictEqual([ { - type: 'alter_table_add_column', + type: 'delete_check_constraint', tableName: 'test', schema: '', - column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, + constraintName: 'some_check', }, ]); expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{}\';', + 'ALTER TABLE "test" DROP CONSTRAINT "some_check";', ]); }); -test('add array column - default', async () => { +test('db has checks. Push with same names', async () => { const client = new PGlite(); const schema1 = { test: pgTable('test', { id: serial('id').primaryKey(), - }), + values: integer('values').default(1), + }, (table) => ({ + checkConstraint: check('some_check', sql`${table.values} < 100`), + })), }; const schema2 = { test: pgTable('test', { id: serial('id').primaryKey(), - values: integer('values').array().default([1, 2, 3]), - }), + values: integer('values').default(1), + }, (table) => ({ + checkConstraint: check('some_check', sql`some new value`), + })), }; const { statements, sqlStatements } = await diffTestSchemasPush( @@ -2224,15 +2123,548 @@ test('add array column - default', async () => { ['public'], ); + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('drop view', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + expect(statements).toStrictEqual([ { - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, + name: 'view', + schema: 'public', + type: 'drop_view', }, ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{1,2,3}\';', + expect(sqlStatements).toStrictEqual(['DROP VIEW "public"."view";']); +}); + +test('drop materialized view', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([ + { + name: 'view', + schema: 'public', + type: 'drop_view', + materialized: true, + }, ]); + expect(sqlStatements).toStrictEqual(['DROP MATERIALIZED VIEW "public"."view";']); +}); + +test('push view with same name', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('push materialized view with same name', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('add with options for materialized view', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view') + .with({ autovacuumFreezeTableAge: 1, autovacuumEnabled: false }) + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'view', + schema: 'public', + type: 'alter_view_add_with_option', + with: { + autovacuumFreezeTableAge: 1, + autovacuumEnabled: false, + }, + materialized: true, + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."view" SET (autovacuum_enabled = false, autovacuum_freeze_table_age = 1);`, + ); +}); + +test('add with options to materialized', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view') + .with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }) + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'view', + schema: 'public', + type: 'alter_view_add_with_option', + with: { + autovacuumVacuumCostDelay: 100, + vacuumTruncate: false, + }, + materialized: true, + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER MATERIALIZED VIEW "public"."view" SET (vacuum_truncate = false, autovacuum_vacuum_cost_delay = 100);`, + ); +}); + +test('add with options to materialized with existing flag', async () => { + const client = new PGlite(); + + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view', {}).as(sql`SELECT id FROM "test"`), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view', {}).with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }).existing(), + }; + + const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop mat view with data', async () => { + const client = new PGlite(); + + const table = pgTable('table', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const seedStatements = [`INSERT INTO "public"."table" ("id") VALUES (1), (2), (3)`]; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + schemasToRemove, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + matViewsToRemove, + } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + undefined, + { after: seedStatements }, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + materialized: true, + name: 'view', + schema: 'public', + type: 'drop_view', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."view";`); + expect(infoToPrint!.length).toBe(1); + expect(infoToPrint![0]).toBe(`· You're about to delete "${chalk.underline('view')}" materialized view with 3 items`); + expect(columnsToRemove!.length).toBe(0); + expect(schemasToRemove!.length).toBe(0); + expect(shouldAskForApprove).toBe(true); + expect(tablesToRemove!.length).toBe(0); + expect(matViewsToRemove!.length).toBe(1); +}); + +test('drop mat view without data', async () => { + const client = new PGlite(); + + const table = pgTable('table', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + schemasToRemove, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + matViewsToRemove, + } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + materialized: true, + name: 'view', + schema: 'public', + type: 'drop_view', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."view";`); + expect(infoToPrint!.length).toBe(0); + expect(columnsToRemove!.length).toBe(0); + expect(schemasToRemove!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(matViewsToRemove!.length).toBe(0); +}); + +test('drop view with data', async () => { + const client = new PGlite(); + + const table = pgTable('table', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const seedStatements = [`INSERT INTO "public"."table" ("id") VALUES (1), (2), (3)`]; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + schemasToRemove, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + matViewsToRemove, + } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public'], + undefined, + { after: seedStatements }, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'view', + schema: 'public', + type: 'drop_view', + }); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`DROP VIEW "public"."view";`); + expect(infoToPrint!.length).toBe(0); + expect(columnsToRemove!.length).toBe(0); + expect(schemasToRemove!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(matViewsToRemove!.length).toBe(0); +}); + +test('enums ordering', async () => { + const enum1 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'custMgf', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema1 = {}; + + const schema2 = { + enum1, + }; + + const { sqlStatements: createEnum } = await diffTestSchemas(schema1, schema2, []); + + const enum2 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'custMgf', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema3 = { + enum2, + }; + + const { sqlStatements: addedValueSql } = await diffTestSchemas(schema2, schema3, []); + + const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'addedToMiddle', + 'custMgf', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema4 = { + enum3, + }; + + const client = new PGlite(); + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema3, + schema4, + [], + false, + ['public'], + undefined, + { before: [...createEnum, ...addedValueSql], runApply: false }, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + before: 'custMgf', + name: 'enum_users_customer_and_ship_to_settings_roles', + schema: 'public', + type: 'alter_type_add_value', + value: 'addedToMiddle', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER TYPE "public"."enum_users_customer_and_ship_to_settings_roles" ADD VALUE 'addedToMiddle' BEFORE 'custMgf';`, + ); +}); + +test('drop enum values', async () => { + const newSchema = pgSchema('mySchema'); + const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'addedToMiddle', + 'custMgf', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema1 = { + enum3, + table: pgTable('enum_table', { + id: enum3(), + }), + newSchema, + table1: newSchema.table('enum_table', { + id: enum3(), + }), + }; + + const enum4 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema2 = { + enum4, + table: pgTable('enum_table', { + id: enum4(), + }), + newSchema, + table1: newSchema.table('enum_table', { + id: enum4(), + }), + }; + + const client = new PGlite(); + + const { statements, sqlStatements } = await diffTestSchemasPush( + client, + schema1, + schema2, + [], + false, + ['public', 'mySchema'], + undefined, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'enum_users_customer_and_ship_to_settings_roles', + schema: 'public', + type: 'alter_type_drop_value', + newValues: [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ], + deletedValues: ['addedToMiddle', 'custMgf'], + columnsWithEnum: [{ + column: 'id', + schema: 'public', + table: 'enum_table', + }, { + column: 'id', + schema: 'mySchema', + table: 'enum_table', + }], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe( + `ALTER TABLE "public"."enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, + ); + expect(sqlStatements[1]).toBe( + `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, + ); + expect(sqlStatements[2]).toBe( + `DROP TYPE "public"."enum_users_customer_and_ship_to_settings_roles";`, + ); + expect(sqlStatements[3]).toBe( + `CREATE TYPE "public"."enum_users_customer_and_ship_to_settings_roles" AS ENUM('addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer');`, + ); + expect(sqlStatements[4]).toBe( + `ALTER TABLE "public"."enum_table" ALTER COLUMN "id" SET DATA TYPE "public"."enum_users_customer_and_ship_to_settings_roles" USING "id"::"public"."enum_users_customer_and_ship_to_settings_roles";`, + ); + expect(sqlStatements[5]).toBe( + `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE "public"."enum_users_customer_and_ship_to_settings_roles" USING "id"::"public"."enum_users_customer_and_ship_to_settings_roles";`, + ); }); diff --git a/drizzle-kit/tests/push/sqlite.test.ts b/drizzle-kit/tests/push/sqlite.test.ts index aea5cd379..5ac6f996c 100644 --- a/drizzle-kit/tests/push/sqlite.test.ts +++ b/drizzle-kit/tests/push/sqlite.test.ts @@ -1,7 +1,9 @@ import Database from 'better-sqlite3'; import chalk from 'chalk'; +import { sql } from 'drizzle-orm'; import { blob, + check, foreignKey, getTableConfig, int, @@ -9,10 +11,11 @@ import { numeric, real, sqliteTable, + sqliteView, text, uniqueIndex, } from 'drizzle-orm/sqlite-core'; -import { diffTestSchemasPushSqlite } from 'tests/schemaDiffer'; +import { diffTestSchemasPushSqlite, introspectSQLiteToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; test('nothing changed in schema', async (t) => { @@ -67,6 +70,7 @@ test('nothing changed in schema', async (t) => { tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema1, [], false); + expect(sqlStatements.length).toBe(0); expect(statements.length).toBe(0); expect(columnsToRemove!.length).toBe(0); @@ -378,6 +382,7 @@ test('drop autoincrement. drop column with data', async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(4); @@ -494,6 +499,7 @@ test('drop autoincrement. drop column with data with pragma off', async (t) => { }, ], uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(4); @@ -598,6 +604,7 @@ test('change autoincrement. other table references current', async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); @@ -717,6 +724,7 @@ test('drop not null, add not null', async (t) => { expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ + checkConstraints: [], columns: [ { autoincrement: true, @@ -742,6 +750,7 @@ test('drop not null, add not null', async (t) => { uniqueConstraints: [], }); expect(statements![1]).toStrictEqual({ + checkConstraints: [], columns: [ { autoincrement: true, @@ -868,6 +877,7 @@ test('rename table and change data type', async (t) => { tableName: 'new_users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(5); @@ -946,6 +956,7 @@ test('rename column and change data type', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); @@ -1053,6 +1064,7 @@ test('recreate table with nested references', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(6); @@ -1161,6 +1173,7 @@ test('recreate table with added column not null and without default with data', tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); @@ -1189,12 +1202,12 @@ test('recreate table with added column not null and without default with data', expect(tablesToTruncate![0]).toBe('users'); }); -test('recreate table with added column not null and without default with data', async (t) => { +test('add check constraint to table', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), + id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }), @@ -1205,8 +1218,9 @@ test('recreate table with added column not null and without default with data', id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), - newColumn: text('new_column').notNull(), - }), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), }; const { @@ -1251,31 +1265,117 @@ test('recreate table with added column not null and without default with data', primaryKey: false, type: 'integer', }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: ['some_check;"users"."age" > 21'], + }); + + expect(sqlStatements!.length).toBe(4); + expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`name\` text, +\t\`age\` integer, +\tCONSTRAINT "some_check" CHECK("__new_users"."age" > 21) +);\n`); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', + ); + expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements![3]).toBe( + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + ); + + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); +}); + +test('drop check constraint', async (t) => { + const client = new Database(':memory:'); + + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSqlite( + client, + schema1, + schema2, + [], + ); + + expect(statements!.length).toBe(1); + expect(statements![0]).toStrictEqual({ + columns: [ { autoincrement: false, - name: 'new_column', + name: 'id', notNull: true, generated: undefined, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + name: 'name', + notNull: false, + generated: undefined, primaryKey: false, type: 'text', }, + { + autoincrement: false, + name: 'age', + notNull: false, + generated: undefined, + primaryKey: false, + type: 'integer', + }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, -\t\`age\` integer, -\t\`new_column\` text NOT NULL +\t\`age\` integer );\n`); expect(sqlStatements[1]).toBe( - 'INSERT INTO `__new_users`("id", "name", "age", "new_column") SELECT "id", "name", "age", "new_column" FROM `users`;', + 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( @@ -1288,3 +1388,149 @@ test('recreate table with added column not null and without default with data', expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); + +test('db has checks. Push with same names', async () => { + const client = new Database(':memory:'); + + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`${table.age} > 21`), + })), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => ({ + someCheck: check('some_check', sql`some new value`), + })), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + schemasToRemove, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSqlite( + client, + schema1, + schema2, + [], + false, + [], + ); + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); +}); + +test('create view', async () => { + const client = new Database(':memory:'); + + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + client, + schema1, + schema2, + [], + ); + + expect(statements).toStrictEqual([ + { + definition: 'select "id" from "test"', + name: 'view', + type: 'sqlite_create_view', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `CREATE VIEW \`view\` AS select "id" from "test";`, + ]); +}); + +test('drop view', async () => { + const client = new Database(':memory:'); + + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + client, + schema1, + schema2, + [], + ); + + expect(statements).toStrictEqual([ + { + name: 'view', + type: 'drop_view', + }, + ]); + expect(sqlStatements).toStrictEqual([ + 'DROP VIEW \`view\`;', + ]); +}); + +test('alter view ".as"', async () => { + const client = new Database(':memory:'); + + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), + }; + + const schema2 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSqlite( + client, + schema1, + schema2, + [], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts index 22a79ef72..3001887e1 100644 --- a/drizzle-kit/tests/schemaDiffer.ts +++ b/drizzle-kit/tests/schemaDiffer.ts @@ -1,35 +1,52 @@ import { PGlite } from '@electric-sql/pglite'; import { Client } from '@libsql/client/.'; import { Database } from 'better-sqlite3'; +import { randomUUID } from 'crypto'; import { is } from 'drizzle-orm'; -import { MySqlSchema, MySqlTable } from 'drizzle-orm/mysql-core'; -import { isPgEnum, isPgSequence, PgEnum, PgSchema, PgSequence, PgTable } from 'drizzle-orm/pg-core'; -import { SQLiteTable } from 'drizzle-orm/sqlite-core'; +import { MySqlSchema, MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; +import { + getMaterializedViewConfig, + isPgEnum, + isPgMaterializedView, + isPgSequence, + isPgView, + PgEnum, + PgMaterializedView, + PgSchema, + PgSequence, + PgTable, + PgView, +} from 'drizzle-orm/pg-core'; +import { SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; import * as fs from 'fs'; import { Connection } from 'mysql2/promise'; import { libSqlLogSuggestionsAndReturn } from 'src/cli/commands/libSqlPushUtils'; import { columnsResolver, enumsResolver, + mySqlViewsResolver, Named, schemasResolver, sequencesResolver, + sqliteViewsResolver, tablesResolver, + viewsResolver, } from 'src/cli/commands/migrate'; +import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; import { logSuggestionsAndReturn } from 'src/cli/commands/sqlitePushUtils'; import { CasingType } from 'src/cli/validations/common'; import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql'; import { schemaToTypeScript } from 'src/introspect-pg'; import { schemaToTypeScript as schemaToTypeScriptSQLite } from 'src/introspect-sqlite'; import { prepareFromMySqlImports } from 'src/serializer/mysqlImports'; -import { mysqlSchema, squashMysqlScheme } from 'src/serializer/mysqlSchema'; +import { mysqlSchema, squashMysqlScheme, ViewSquashed } from 'src/serializer/mysqlSchema'; import { generateMySqlSnapshot } from 'src/serializer/mysqlSerializer'; import { fromDatabase as fromMySqlDatabase } from 'src/serializer/mysqlSerializer'; import { prepareFromPgImports } from 'src/serializer/pgImports'; -import { pgSchema, squashPgScheme } from 'src/serializer/pgSchema'; +import { pgSchema, squashPgScheme, View } from 'src/serializer/pgSchema'; import { fromDatabase, generatePgSnapshot } from 'src/serializer/pgSerializer'; import { prepareFromSqliteImports } from 'src/serializer/sqliteImports'; -import { sqliteSchema, squashSqliteScheme } from 'src/serializer/sqliteSchema'; +import { sqliteSchema, squashSqliteScheme, View as SqliteView } from 'src/serializer/sqliteSchema'; import { fromDatabase as fromSqliteDatabase } from 'src/serializer/sqliteSerializer'; import { generateSqliteSnapshot } from 'src/serializer/sqliteSerializer'; import { @@ -50,19 +67,15 @@ import { export type PostgresSchema = Record< string, - PgTable | PgEnum | PgSchema | PgSequence + PgTable | PgEnum | PgSchema | PgSequence | PgView | PgMaterializedView >; -export type MysqlSchema = Record | MySqlSchema>; -export type SqliteSchema = Record>; +export type MysqlSchema = Record | MySqlSchema | MySqlView>; +export type SqliteSchema = Record | SQLiteView>; export const testSchemasResolver = (renames: Set) => async (input: ResolverInput): Promise> => { try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { return { created: input.created, renamed: [], @@ -114,297 +127,476 @@ export const testSchemasResolver = } }; -export const testSequencesResolver = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdSequences = [...input.created]; - let deletedSequences = [...input.deleted]; +export const testSequencesResolver = + (renames: Set) => async (input: ResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - const result: { - created: Sequence[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Sequence; to: Sequence }[]; - deleted: Sequence[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + let createdSequences = [...input.created]; + let deletedSequences = [...input.deleted]; - for (let rename of renames) { - const [from, to] = rename.split('->'); + const result: { + created: Sequence[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Sequence; to: Sequence }[]; + deleted: Sequence[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - const idxFrom = deletedSequences.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); + for (let rename of renames) { + const [from, to] = rename.split('->'); - if (idxFrom >= 0) { - const idxTo = createdSequences.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const idxFrom = deletedSequences.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; }); - const tableFrom = deletedSequences[idxFrom]; - const tableTo = createdSequences[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, + if (idxFrom >= 0) { + const idxTo = createdSequences.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - } - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedSequences[idxFrom], - to: createdSequences[idxTo], - }); + const tableFrom = deletedSequences[idxFrom]; + const tableTo = createdSequences[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, + }); + } + + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedSequences[idxFrom], + to: createdSequences[idxTo], + }); + } + + delete createdSequences[idxTo]; + delete deletedSequences[idxFrom]; + + createdSequences = createdSequences.filter(Boolean); + deletedSequences = deletedSequences.filter(Boolean); } + } - delete createdSequences[idxTo]; - delete deletedSequences[idxFrom]; + result.created = createdSequences; + result.deleted = deletedSequences; - createdSequences = createdSequences.filter(Boolean); - deletedSequences = deletedSequences.filter(Boolean); - } + return result; + } catch (e) { + console.error(e); + throw e; } + }; - result.created = createdSequences; - result.deleted = deletedSequences; +export const testEnumsResolver = + (renames: Set) => async (input: ResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - return result; - } catch (e) { - console.error(e); - throw e; - } -}; + let createdEnums = [...input.created]; + let deletedEnums = [...input.deleted]; -export const testEnumsResolver = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; + const result: { + created: Enum[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Enum; to: Enum }[]; + deleted: Enum[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedEnums.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdEnums.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; + }); + + const tableFrom = deletedEnums[idxFrom]; + const tableTo = createdEnums[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, + }); + } + + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedEnums[idxFrom], + to: createdEnums[idxTo], + }); + } + + delete createdEnums[idxTo]; + delete deletedEnums[idxFrom]; + + createdEnums = createdEnums.filter(Boolean); + deletedEnums = deletedEnums.filter(Boolean); + } + } + + result.created = createdEnums; + result.deleted = deletedEnums; + + return result; + } catch (e) { + console.error(e); + throw e; } + }; - let createdEnums = [...input.created]; - let deletedEnums = [...input.deleted]; +export const testTablesResolver = + (renames: Set) => async (input: ResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - const result: { - created: Enum[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Enum; to: Enum }[]; - deleted: Enum[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + let createdTables = [...input.created]; + let deletedTables = [...input.deleted]; - for (let rename of renames) { - const [from, to] = rename.split('->'); + const result: { + created: Table[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: Table; to: Table }[]; + deleted: Table[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - const idxFrom = deletedEnums.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); + for (let rename of renames) { + const [from, to] = rename.split('->'); - if (idxFrom >= 0) { - const idxTo = createdEnums.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const idxFrom = deletedTables.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; }); - const tableFrom = deletedEnums[idxFrom]; - const tableTo = createdEnums[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, + if (idxFrom >= 0) { + const idxTo = createdTables.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - } - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedEnums[idxFrom], - to: createdEnums[idxTo], - }); + const tableFrom = deletedTables[idxFrom]; + const tableTo = createdTables[idxFrom]; + + if (tableFrom.schema !== tableTo.schema) { + result.moved.push({ + name: tableFrom.name, + schemaFrom: tableFrom.schema, + schemaTo: tableTo.schema, + }); + } + + if (tableFrom.name !== tableTo.name) { + result.renamed.push({ + from: deletedTables[idxFrom], + to: createdTables[idxTo], + }); + } + + delete createdTables[idxTo]; + delete deletedTables[idxFrom]; + + createdTables = createdTables.filter(Boolean); + deletedTables = deletedTables.filter(Boolean); } + } - delete createdEnums[idxTo]; - delete deletedEnums[idxFrom]; + result.created = createdTables; + result.deleted = deletedTables; - createdEnums = createdEnums.filter(Boolean); - deletedEnums = deletedEnums.filter(Boolean); - } + return result; + } catch (e) { + console.error(e); + throw e; } + }; - result.created = createdEnums; - result.deleted = deletedEnums; +export const testColumnsResolver = + (renames: Set) => async (input: ColumnsResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + tableName: input.tableName, + schema: input.schema, + created: input.created, + renamed: [], + deleted: input.deleted, + }; + } - return result; - } catch (e) { - console.error(e); - throw e; - } -}; + let createdColumns = [...input.created]; + let deletedColumns = [...input.deleted]; + + const renamed: { from: Column; to: Column }[] = []; + + const schema = input.schema || 'public'; + + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedColumns.findIndex((it) => { + return `${schema}.${input.tableName}.${it.name}` === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdColumns.findIndex((it) => { + return `${schema}.${input.tableName}.${it.name}` === to; + }); + + renamed.push({ + from: deletedColumns[idxFrom], + to: createdColumns[idxTo], + }); + + delete createdColumns[idxTo]; + delete deletedColumns[idxFrom]; + + createdColumns = createdColumns.filter(Boolean); + deletedColumns = deletedColumns.filter(Boolean); + } + } -export const testTablesResolver = (renames: Set) => -async ( - input: ResolverInput
, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, + tableName: input.tableName, + schema: input.schema, + created: createdColumns, + deleted: deletedColumns, + renamed, }; + } catch (e) { + console.error(e); + throw e; } + }; - let createdTables = [...input.created]; - let deletedTables = [...input.deleted]; +export const testViewsResolver = + (renames: Set) => async (input: ResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - const result: { - created: Table[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Table; to: Table }[]; - deleted: Table[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; - for (let rename of renames) { - const [from, to] = rename.split('->'); + const result: { + created: View[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: View; to: View }[]; + deleted: View[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - const idxFrom = deletedTables.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); + for (let rename of renames) { + const [from, to] = rename.split('->'); - if (idxFrom >= 0) { - const idxTo = createdTables.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; + const idxFrom = deletedViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; }); - const tableFrom = deletedTables[idxFrom]; - const tableTo = createdTables[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; }); - } - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedTables[idxFrom], - to: createdTables[idxTo], - }); + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; + + if (viewFrom.schema !== viewTo.schema) { + result.moved.push({ + name: viewFrom.name, + schemaFrom: viewFrom.schema, + schemaTo: viewTo.schema, + }); + } + + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], + }); + } + + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; + + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); } + } - delete createdTables[idxTo]; - delete deletedTables[idxFrom]; + result.created = createdViews; + result.deleted = deletedViews; - createdTables = createdTables.filter(Boolean); - deletedTables = deletedTables.filter(Boolean); - } + return result; + } catch (e) { + console.error(e); + throw e; } + }; - result.created = createdTables; - result.deleted = deletedTables; +export const testViewsResolverMySql = + (renames: Set) => + async (input: ResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } - return result; - } catch (e) { - console.error(e); - throw e; - } -}; + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; -export const testColumnsResolver = (renames: Set) => -async ( - input: ColumnsResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - tableName: input.tableName, - schema: input.schema, - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } + const result: { + created: ViewSquashed[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: ViewSquashed; to: ViewSquashed }[]; + deleted: ViewSquashed[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; - let createdColumns = [...input.created]; - let deletedColumns = [...input.deleted]; + for (let rename of renames) { + const [from, to] = rename.split('->'); - const renamed: { from: Column; to: Column }[] = []; + const idxFrom = deletedViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === from; + }); - const schema = input.schema || 'public'; + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return `${it.schema || 'public'}.${it.name}` === to; + }); - for (let rename of renames) { - const [from, to] = rename.split('->'); + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; + + if (viewFrom.schema !== viewTo.schema) { + result.moved.push({ + name: viewFrom.name, + schemaFrom: viewFrom.schema, + schemaTo: viewTo.schema, + }); + } + + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], + }); + } + + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; + + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); + } + } - const idxFrom = deletedColumns.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === from; - }); + result.created = createdViews; + result.deleted = deletedViews; - if (idxFrom >= 0) { - const idxTo = createdColumns.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === to; - }); + return result; + } catch (e) { + console.error(e); + throw e; + } + }; + +export const testViewsResolverSqlite = + (renames: Set) => async (input: ResolverInput): Promise> => { + try { + if (input.created.length === 0 || input.deleted.length === 0 || renames.size === 0) { + return { + created: input.created, + moved: [], + renamed: [], + deleted: input.deleted, + }; + } + + let createdViews = [...input.created]; + let deletedViews = [...input.deleted]; + + const result: { + created: SqliteView[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: SqliteView; to: SqliteView }[]; + deleted: SqliteView[]; + } = { created: [], renamed: [], deleted: [], moved: [] }; + + for (let rename of renames) { + const [from, to] = rename.split('->'); - renamed.push({ - from: deletedColumns[idxFrom], - to: createdColumns[idxTo], + const idxFrom = deletedViews.findIndex((it) => { + return it.name === from; }); - delete createdColumns[idxTo]; - delete deletedColumns[idxFrom]; + if (idxFrom >= 0) { + const idxTo = createdViews.findIndex((it) => { + return it.name === to; + }); + + const viewFrom = deletedViews[idxFrom]; + const viewTo = createdViews[idxFrom]; + + if (viewFrom.name !== viewTo.name) { + result.renamed.push({ + from: deletedViews[idxFrom], + to: createdViews[idxTo], + }); + } - createdColumns = createdColumns.filter(Boolean); - deletedColumns = deletedColumns.filter(Boolean); + delete createdViews[idxTo]; + delete deletedViews[idxFrom]; + + createdViews = createdViews.filter(Boolean); + deletedViews = deletedViews.filter(Boolean); + } } - } - return { - tableName: input.tableName, - schema: input.schema, - created: createdColumns, - deleted: deletedColumns, - renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; + result.created = createdViews; + result.deleted = deletedViews; + + return result; + } catch (e) { + console.error(e); + throw e; + } + }; export const diffTestSchemasPush = async ( client: PGlite, @@ -414,12 +606,45 @@ export const diffTestSchemasPush = async ( cli: boolean = false, schemas: string[] = ['public'], casing?: CasingType | undefined, + sqlStatementsToRun: { before?: string[]; after?: string[]; runApply?: boolean } = { + before: [], + after: [], + runApply: true, + }, ) => { - const { sqlStatements } = await applyPgDiffs(left, casing); - for (const st of sqlStatements) { + const shouldRunApply = sqlStatementsToRun.runApply === undefined ? true : sqlStatementsToRun.runApply; + + for (const st of sqlStatementsToRun.before ?? []) { + await client.query(st); + } + + if (shouldRunApply) { + const res = await applyPgDiffs(left, casing); + for (const st of res.sqlStatements) { + await client.query(st); + } + } + + for (const st of sqlStatementsToRun.after ?? []) { await client.query(st); } + const materializedViewsForRefresh = Object.values(left).filter((it) => + isPgMaterializedView(it) + ) as PgMaterializedView[]; + + // refresh all mat views + for (const view of materializedViewsForRefresh) { + const viewConf = getMaterializedViewConfig(view); + if (viewConf.isExisting) continue; + + await client.exec( + `REFRESH MATERIALIZED VIEW "${viewConf.schema ?? 'public'}"."${viewConf.name}"${ + viewConf.withNoData ? ' WITH NO DATA;' : ';' + }`, + ); + } + // do introspect into PgSchemaInternal const introspectedSchema = await fromDatabase( { @@ -440,11 +665,17 @@ export const diffTestSchemasPush = async ( const leftSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; + const leftViews = Object.values(right).filter((it) => isPgView(it)) as PgView[]; + + const leftMaterializedViews = Object.values(right).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; + const serialized2 = generatePgSnapshot( leftTables, leftEnums, leftSchemas, leftSequences, + leftViews, + leftMaterializedViews, casing, ); @@ -484,11 +715,41 @@ export const diffTestSchemasPush = async ( testSequencesResolver(renames), testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolver(renames), validatedPrev, validatedCur, 'push', ); - return { sqlStatements, statements }; + + const { + shouldAskForApprove, + statementsToExecute, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + schemasToRemove, + matViewsToRemove, + } = await pgSuggestions( + { + query: async (sql: string, params: any[] = []) => { + return (await client.query(sql, params)).rows as T[]; + }, + }, + statements, + ); + + return { + sqlStatements: statementsToExecute, + statements, + shouldAskForApprove, + columnsToRemove, + tablesToRemove, + tablesToTruncate, + infoToPrint, + schemasToRemove, + matViewsToRemove, + }; } else { const { sqlStatements, statements } = await applyPgSnapshotsDiff( sn1, @@ -498,6 +759,7 @@ export const diffTestSchemasPush = async ( sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, validatedPrev, validatedCur, 'push', @@ -514,6 +776,7 @@ export const applyPgDiffs = async (sn: PostgresSchema, casing: CasingType | unde prevId: '0', tables: {}, enums: {}, + views: {}, schemas: {}, sequences: {}, _meta: { @@ -531,7 +794,11 @@ export const applyPgDiffs = async (sn: PostgresSchema, casing: CasingType | unde const sequences = Object.values(sn).filter((it) => isPgSequence(it)) as PgSequence[]; - const serialized1 = generatePgSnapshot(tables, enums, schemas, sequences, casing); + const views = Object.values(sn).filter((it) => isPgView(it)) as PgView[]; + + const materializedViews = Object.values(sn).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; + + const serialized1 = generatePgSnapshot(tables, enums, schemas, sequences, views, materializedViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; @@ -556,6 +823,7 @@ export const applyPgDiffs = async (sn: PostgresSchema, casing: CasingType | unde testSequencesResolver(new Set()), testTablesResolver(new Set()), testColumnsResolver(new Set()), + testViewsResolver(new Set()), validatedPrev, validatedCur, ); @@ -585,11 +853,21 @@ export const diffTestSchemas = async ( const rightSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; + const leftViews = Object.values(left).filter((it) => isPgView(it)) as PgView[]; + + const rightViews = Object.values(right).filter((it) => isPgView(it)) as PgView[]; + + const leftMaterializedViews = Object.values(left).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; + + const rightMaterializedViews = Object.values(right).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; + const serialized1 = generatePgSnapshot( leftTables, leftEnums, leftSchemas, leftSequences, + leftViews, + leftMaterializedViews, casing, ); const serialized2 = generatePgSnapshot( @@ -597,6 +875,8 @@ export const diffTestSchemas = async ( rightEnums, rightSchemas, rightSequences, + rightViews, + rightMaterializedViews, casing, ); @@ -636,6 +916,7 @@ export const diffTestSchemas = async ( testSequencesResolver(renames), testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolver(renames), validatedPrev, validatedCur, ); @@ -649,6 +930,7 @@ export const diffTestSchemas = async ( sequencesResolver, tablesResolver, columnsResolver, + viewsResolver, validatedPrev, validatedCur, ); @@ -682,7 +964,9 @@ export const diffTestSchemasPushMysql = async ( const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - const serialized2 = generateMySqlSnapshot(leftTables, casing); + const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -717,6 +1001,7 @@ export const diffTestSchemasPushMysql = async ( sn2, testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolverMySql(renames), validatedPrev, validatedCur, 'push', @@ -728,6 +1013,7 @@ export const diffTestSchemasPushMysql = async ( sn2, tablesResolver, columnsResolver, + mySqlViewsResolver, validatedPrev, validatedCur, 'push', @@ -742,6 +1028,7 @@ export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | unde dialect: 'mysql', id: '0', prevId: '0', + views: {}, tables: {}, enums: {}, schemas: {}, @@ -754,7 +1041,9 @@ export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | unde const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - const serialized1 = generateMySqlSnapshot(tables, casing); + const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const serialized1 = generateMySqlSnapshot(tables, views, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; @@ -776,6 +1065,7 @@ export const applyMySqlDiffs = async (sn: MysqlSchema, casing: CasingType | unde sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), + testViewsResolverMySql(new Set()), validatedPrev, validatedCur, ); @@ -791,10 +1081,14 @@ export const diffTestSchemasMysql = async ( ) => { const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[]; + const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - const serialized1 = generateMySqlSnapshot(leftTables, casing); - const serialized2 = generateMySqlSnapshot(rightTables, casing); + const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; + + const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing); + const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -829,6 +1123,7 @@ export const diffTestSchemasMysql = async ( sn2, testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolverMySql(renames), validatedPrev, validatedCur, ); @@ -840,6 +1135,7 @@ export const diffTestSchemasMysql = async ( sn2, tablesResolver, columnsResolver, + mySqlViewsResolver, validatedPrev, validatedCur, ); @@ -880,7 +1176,9 @@ export const diffTestSchemasPushSqlite = async ( const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - const serialized2 = generateSqliteSnapshot(rightTables, casing); + const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -912,6 +1210,7 @@ export const diffTestSchemasPushSqlite = async ( sn2, testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolverSqlite(renames), sch1, sch2, 'push', @@ -956,6 +1255,7 @@ export const diffTestSchemasPushSqlite = async ( sn2, tablesResolver, columnsResolver, + sqliteViewsResolver, sch1, sch2, 'push', @@ -992,20 +1292,15 @@ export async function diffTestSchemasPushLibSQL( run: async (query: string) => { await client.execute(query); }, - batch: async ( - queries: { query: string; values?: any[] | undefined }[], - ) => { - await client.batch( - queries.map((it) => ({ sql: it.query, args: it.values ?? [] })), - ); - }, }, undefined, ); const leftTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - const serialized2 = generateSqliteSnapshot(leftTables, casing); + const leftViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + const serialized2 = generateSqliteSnapshot(leftTables, leftViews, casing); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -1037,40 +1332,28 @@ export async function diffTestSchemasPushLibSQL( sn2, testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolverSqlite(renames), sch1, sch2, 'push', ); - const { - statementsToExecute, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await libSqlLogSuggestionsAndReturn( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute({ sql, args: params || [] }); - return res.rows as T[]; + const { statementsToExecute, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate } = + await libSqlLogSuggestionsAndReturn( + { + query: async (sql: string, params?: any[]) => { + const res = await client.execute({ sql, args: params || [] }); + return res.rows as T[]; + }, + run: async (query: string) => { + await client.execute(query); + }, }, - run: async (query: string) => { - await client.execute(query); - }, - batch: async ( - queries: { query: string; values?: any[] | undefined }[], - ) => { - await client.batch( - queries.map((it) => ({ sql: it.query, args: it.values ?? [] })), - ); - }, - }, - statements, - sn1, - sn2, - _meta!, - ); + statements, + sn1, + sn2, + _meta!, + ); return { sqlStatements: statementsToExecute, @@ -1087,6 +1370,7 @@ export async function diffTestSchemasPushLibSQL( sn2, tablesResolver, columnsResolver, + sqliteViewsResolver, sch1, sch2, 'push', @@ -1107,6 +1391,7 @@ export const applySqliteDiffs = async ( prevId: '0', tables: {}, enums: {}, + views: {}, schemas: {}, _meta: { schemas: {}, @@ -1117,7 +1402,9 @@ export const applySqliteDiffs = async ( const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - const serialized1 = generateSqliteSnapshot(tables, casing); + const views = Object.values(sn).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + const serialized1 = generateSqliteSnapshot(tables, views, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; @@ -1136,6 +1423,7 @@ export const applySqliteDiffs = async ( sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), + testViewsResolverSqlite(new Set()), dryRun, sch1, action, @@ -1155,6 +1443,7 @@ export const applyLibSQLDiffs = async ( id: '0', prevId: '0', tables: {}, + views: {}, enums: {}, schemas: {}, _meta: { @@ -1166,7 +1455,9 @@ export const applyLibSQLDiffs = async ( const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - const serialized1 = generateSqliteSnapshot(tables, casing); + const views = Object.values(sn).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + const serialized1 = generateSqliteSnapshot(tables, views, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; @@ -1185,6 +1476,7 @@ export const applyLibSQLDiffs = async ( sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), + testViewsResolverSqlite(new Set()), dryRun, sch1, action, @@ -1202,10 +1494,14 @@ export const diffTestSchemasSqlite = async ( ) => { const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + const leftViews = Object.values(left).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - const serialized1 = generateSqliteSnapshot(leftTables, casing); - const serialized2 = generateSqliteSnapshot(rightTables, casing); + const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + const serialized1 = generateSqliteSnapshot(leftTables, leftViews, casing); + const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -1237,6 +1533,7 @@ export const diffTestSchemasSqlite = async ( sn2, testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolverSqlite(renames), sch1, sch2, ); @@ -1248,6 +1545,7 @@ export const diffTestSchemasSqlite = async ( sn2, tablesResolver, columnsResolver, + sqliteViewsResolver, sch1, sch2, ); @@ -1263,10 +1561,14 @@ export const diffTestSchemasLibSQL = async ( ) => { const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + const leftViews = Object.values(left).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - const serialized1 = generateSqliteSnapshot(leftTables, casing); - const serialized2 = generateSqliteSnapshot(rightTables, casing); + const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + const serialized1 = generateSqliteSnapshot(leftTables, leftViews, casing); + const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; @@ -1298,6 +1600,7 @@ export const diffTestSchemasLibSQL = async ( sn2, testTablesResolver(renames), testColumnsResolver(renames), + testViewsResolverSqlite(renames), sch1, sch2, ); @@ -1309,6 +1612,7 @@ export const diffTestSchemasLibSQL = async ( sn2, tablesResolver, columnsResolver, + sqliteViewsResolver, sch1, sch2, ); @@ -1342,19 +1646,34 @@ export const introspectPgToFile = async ( schemas, ); + const { version: initV, dialect: initD, ...initRest } = introspectedSchema; + + const initSch = { + version: '7', + dialect: 'postgresql', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashPgScheme(initSch); + const validatedCur = pgSchema.parse(initSch); + + // write to ts file const file = schemaToTypeScript(introspectedSchema, 'camel'); - fs.writeFileSync(`tests/introspect/${testName}.ts`, file.file); + fs.writeFileSync(`tests/introspect/postgres/${testName}.ts`, file.file); - const response = await prepareFromPgImports([ - `tests/introspect/${testName}.ts`, - ]); + // generate snapshot from ts file + const response = await prepareFromPgImports([`tests/introspect/postgres/${testName}.ts`]); const afterFileImports = generatePgSnapshot( response.tables, response.enums, response.schemas, response.sequences, + response.views, + response.matViews, casing, ); @@ -1371,51 +1690,20 @@ export const introspectPgToFile = async ( const sn2AfterIm = squashPgScheme(sch2); const validatedCurAfterImport = pgSchema.parse(sch2); - const leftTables = Object.values(initSchema).filter((it) => is(it, PgTable)) as PgTable[]; - - const leftSchemas = Object.values(initSchema).filter((it) => is(it, PgSchema)) as PgSchema[]; - - const leftEnums = Object.values(initSchema).filter((it) => isPgEnum(it)) as PgEnum[]; - - const leftSequences = Object.values(initSchema).filter((it) => isPgSequence(it)) as PgSequence[]; - - const initSnapshot = generatePgSnapshot( - leftTables, - leftEnums, - leftSchemas, - leftSequences, - casing, - ); - - const { version: initV, dialect: initD, ...initRest } = initSnapshot; - - const initSch = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashPgScheme(initSch); - const validatedCur = pgSchema.parse(initSch); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applyPgSnapshotsDiff( - sn2AfterIm, + const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyPgSnapshotsDiff( initSn, + sn2AfterIm, testSchemasResolver(new Set()), testEnumsResolver(new Set()), testSequencesResolver(new Set()), testTablesResolver(new Set()), testColumnsResolver(new Set()), - validatedCurAfterImport, + testViewsResolver(new Set()), validatedCur, + validatedCurAfterImport, ); - fs.rmSync(`tests/introspect/${testName}.ts`); + fs.rmSync(`tests/introspect/postgres/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, @@ -1447,15 +1735,26 @@ export const introspectMySQLToFile = async ( schema, ); + const { version: initV, dialect: initD, ...initRest } = introspectedSchema; + + const initSch = { + version: '5', + dialect: 'mysql', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashMysqlScheme(initSch); + const validatedCur = mysqlSchema.parse(initSch); + const file = schemaToTypeScriptMySQL(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/mysql/${testName}.ts`, file.file); - const response = await prepareFromMySqlImports([ - `tests/introspect/mysql/${testName}.ts`, - ]); + const response = await prepareFromMySqlImports([`tests/introspect/mysql/${testName}.ts`]); - const afterFileImports = generateMySqlSnapshot(response.tables, casing); + const afterFileImports = generateMySqlSnapshot(response.tables, response.views, casing); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; @@ -1470,31 +1769,12 @@ export const introspectMySQLToFile = async ( const sn2AfterIm = squashMysqlScheme(sch2); const validatedCurAfterImport = mysqlSchema.parse(sch2); - const leftTables = Object.values(initSchema).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const initSnapshot = generateMySqlSnapshot(leftTables, casing); - - const { version: initV, dialect: initD, ...initRest } = initSnapshot; - - const initSch = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashMysqlScheme(initSch); - const validatedCur = mysqlSchema.parse(initSch); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applyMysqlSnapshotsDiff( + const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyMysqlSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), testColumnsResolver(new Set()), + testViewsResolverMySql(new Set()), validatedCurAfterImport, validatedCur, ); @@ -1532,15 +1812,27 @@ export const introspectSQLiteToFile = async ( undefined, ); + const { version: initV, dialect: initD, ...initRest } = introspectedSchema; + + const initSch = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...initRest, + } as const; + + const initSn = squashSqliteScheme(initSch); + + const validatedCur = sqliteSchema.parse(initSch); + const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/sqlite/${testName}.ts`, file.file); - const response = await prepareFromSqliteImports([ - `tests/introspect/sqlite/${testName}.ts`, - ]); + const response = await prepareFromSqliteImports([`tests/introspect/sqlite/${testName}.ts`]); - const afterFileImports = generateSqliteSnapshot(response.tables, casing); + const afterFileImports = generateSqliteSnapshot(response.tables, response.views, casing); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; @@ -1555,11 +1847,50 @@ export const introspectSQLiteToFile = async ( const sn2AfterIm = squashSqliteScheme(sch2); const validatedCurAfterImport = sqliteSchema.parse(sch2); - const leftTables = Object.values(initSchema).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applySqliteSnapshotsDiff( + sn2AfterIm, + initSn, + testTablesResolver(new Set()), + testColumnsResolver(new Set()), + testViewsResolverSqlite(new Set()), + validatedCurAfterImport, + validatedCur, + ); + + fs.rmSync(`tests/introspect/sqlite/${testName}.ts`); - const initSnapshot = generateSqliteSnapshot(leftTables, casing); + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; - const { version: initV, dialect: initD, ...initRest } = initSnapshot; +export const introspectLibSQLToFile = async ( + client: Client, + initSchema: SqliteSchema, + testName: string, + casing?: CasingType | undefined, +) => { + // put in db + const { sqlStatements } = await applyLibSQLDiffs(initSchema); + for (const st of sqlStatements) { + client.execute(st); + } + + // introspect to schema + const introspectedSchema = await fromSqliteDatabase( + { + query: async (sql: string, params: any[] = []) => { + return (await client.execute({ sql, args: params })).rows as T[]; + }, + run: async (query: string) => { + client.execute(query); + }, + }, + undefined, + ); + + const { version: initV, dialect: initD, ...initRest } = introspectedSchema; const initSch = { version: '6', @@ -1570,21 +1901,41 @@ export const introspectSQLiteToFile = async ( } as const; const initSn = squashSqliteScheme(initSch); + const validatedCur = sqliteSchema.parse(initSch); - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applySqliteSnapshotsDiff( + const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); + + fs.writeFileSync(`tests/introspect/libsql/${testName}.ts`, file.file); + + const response = await prepareFromSqliteImports([`tests/introspect/libsql/${testName}.ts`]); + + const afterFileImports = generateSqliteSnapshot(response.tables, response.views, casing); + + const { version: v2, dialect: d2, ...rest2 } = afterFileImports; + + const sch2 = { + version: '6', + dialect: 'sqlite', + id: '0', + prevId: '0', + ...rest2, + } as const; + + const sn2AfterIm = squashSqliteScheme(sch2); + const validatedCurAfterImport = sqliteSchema.parse(sch2); + + const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await applyLibSQLSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), testColumnsResolver(new Set()), + testViewsResolverSqlite(new Set()), validatedCurAfterImport, validatedCur, ); - fs.rmSync(`tests/introspect/sqlite/${testName}.ts`); + fs.rmSync(`tests/introspect/libsql/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, diff --git a/drizzle-kit/tests/sqlite-checks.test.ts b/drizzle-kit/tests/sqlite-checks.test.ts new file mode 100644 index 000000000..d1824e441 --- /dev/null +++ b/drizzle-kit/tests/sqlite-checks.test.ts @@ -0,0 +1,308 @@ +import { sql } from 'drizzle-orm'; +import { check, int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSqlite } from './schemaDiffer'; + +test('create table with check', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'integer', + notNull: true, + primaryKey: true, + autoincrement: false, + }, + { + name: 'age', + type: 'integer', + notNull: false, + primaryKey: false, + autoincrement: false, + }, + ], + compositePKs: [], + checkConstraints: ['some_check_name;"users"."age" > 21'], + referenceData: [], + uniqueConstraints: [], + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "some_check_name" CHECK("users"."age" > 21) +);\n`); +}); + +test('add check contraint to existing table', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: ['some_check_name;"users"."age" > 21'], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 21) +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('drop check contraint to existing table', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('rename check constraint', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('new_some_check_name', sql`${table.age} > 21`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [`new_some_check_name;"users"."age" > 21`], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "new_some_check_name" CHECK("__new_users"."age" > 21) +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('rename check constraint', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 21`), + })), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => ({ + checkConstraint: check('some_check_name', sql`${table.age} > 10`), + })), + }; + + const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + primaryKey: true, + type: 'integer', + }, + { + autoincrement: false, + generated: undefined, + name: 'age', + notNull: false, + primaryKey: false, + type: 'integer', + }, + ], + compositePKs: [], + referenceData: [], + tableName: 'users', + type: 'recreate_table', + uniqueConstraints: [], + checkConstraints: [`some_check_name;"users"."age" > 10`], + }); + + expect(sqlStatements.length).toBe(6); + expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); + expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY NOT NULL, +\t\`age\` integer, +\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 10) +);\n`); + expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); + expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); +}); + +test('create checks with same names', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + name: text('name'), + }, (table) => ({ + checkConstraint1: check('some_check_name', sql`${table.age} > 21`), + checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), + })), + }; + + await expect(diffTestSchemasSqlite({}, to, [])).rejects.toThrowError(); +}); diff --git a/drizzle-kit/tests/sqlite-columns.test.ts b/drizzle-kit/tests/sqlite-columns.test.ts index 04dbb940c..b7b4c7f6b 100644 --- a/drizzle-kit/tests/sqlite-columns.test.ts +++ b/drizzle-kit/tests/sqlite-columns.test.ts @@ -37,6 +37,7 @@ test('create table with id', async (t) => { uniqueConstraints: [], referenceData: [], compositePKs: [], + checkConstraints: [], }); }); @@ -363,6 +364,7 @@ test('add foreign key #1', async (t) => { }], tableName: 'users', uniqueConstraints: [], + checkConstraints: [], } as JsonRecreateTableStatement, ); }); @@ -426,6 +428,7 @@ test('add foreign key #2', async (t) => { }], tableName: 'users', uniqueConstraints: [], + checkConstraints: [], } as JsonRecreateTableStatement); }); @@ -584,6 +587,7 @@ test('alter table add composite pk', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -621,6 +625,7 @@ test('alter column drop not null', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -658,6 +663,7 @@ test('alter column add not null', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -696,6 +702,7 @@ test('alter column add default', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -733,6 +740,7 @@ test('alter column drop default', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -771,6 +779,7 @@ test('alter column add default not null', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); }); @@ -813,6 +822,7 @@ test('alter column add default not null with indexes', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ data: 'index_name;name;false;', @@ -869,6 +879,7 @@ test('alter column drop default not null', async (t) => { referenceData: [], tableName: 'table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); @@ -997,6 +1008,7 @@ test('recreate table with nested references', async (t) => { tableName: 'users', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }); expect(sqlStatements.length).toBe(6); diff --git a/drizzle-kit/tests/sqlite-generated.test.ts b/drizzle-kit/tests/sqlite-generated.test.ts index 749dde825..2d3ceed97 100644 --- a/drizzle-kit/tests/sqlite-generated.test.ts +++ b/drizzle-kit/tests/sqlite-generated.test.ts @@ -508,6 +508,7 @@ test('generated as callback: add table with column with stored generated constra tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -576,6 +577,7 @@ test('generated as callback: add table with column with virtual generated constr tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1076,6 +1078,7 @@ test('generated as sql: add table with column with stored generated constraint', tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1144,6 +1147,7 @@ test('generated as sql: add table with column with virtual generated constraint' tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1644,6 +1648,7 @@ test('generated as string: add table with column with stored generated constrain tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ @@ -1712,6 +1717,7 @@ test('generated as string: add table with column with virtual generated constrai tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], + checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ diff --git a/drizzle-kit/tests/sqlite-tables.test.ts b/drizzle-kit/tests/sqlite-tables.test.ts index 81ac7f100..0390ff28e 100644 --- a/drizzle-kit/tests/sqlite-tables.test.ts +++ b/drizzle-kit/tests/sqlite-tables.test.ts @@ -28,6 +28,7 @@ test('add table #1', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); }); @@ -56,6 +57,7 @@ test('add table #2', async () => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }); }); @@ -95,6 +97,7 @@ test('add table #3', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); }); @@ -114,6 +117,7 @@ test('add table #4', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_table', @@ -122,6 +126,7 @@ test('add table #4', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); }); @@ -148,6 +153,7 @@ test('add table #6', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'drop_table', @@ -185,6 +191,7 @@ test('add table #7', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); }); @@ -222,6 +229,7 @@ test('add table #8', async () => { ], compositePKs: [], uniqueConstraints: [], + checkConstraints: [], referenceData: [ { columnsFrom: ['reportee_id'], @@ -277,6 +285,7 @@ test('add table #9', async () => { compositePKs: [], uniqueConstraints: [], referenceData: [], + checkConstraints: [], }); expect(statements[1]).toStrictEqual({ diff --git a/drizzle-kit/tests/sqlite-views.test.ts b/drizzle-kit/tests/sqlite-views.test.ts new file mode 100644 index 000000000..8021ba37e --- /dev/null +++ b/drizzle-kit/tests/sqlite-views.test.ts @@ -0,0 +1,218 @@ +import { sql } from 'drizzle-orm'; +import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; +import { expect, test } from 'vitest'; +import { diffTestSchemasSqlite } from './schemaDiffer'; + +test('create view', async () => { + const users = sqliteTable('users', { id: int('id').default(1) }); + const view = sqliteView('view').as((qb) => qb.select().from(users)); + const to = { + users: users, + testView: view, + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + type: 'sqlite_create_table', + tableName: 'users', + columns: [{ + autoincrement: false, + default: 1, + name: 'id', + type: 'integer', + primaryKey: false, + notNull: false, + }], + compositePKs: [], + uniqueConstraints: [], + referenceData: [], + checkConstraints: [], + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_view', + name: 'view', + definition: 'select "id" from "users"', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( +\t\`id\` integer DEFAULT 1 +);\n`); + expect(sqlStatements[1]).toBe(`CREATE VIEW \`view\` AS select "id" from "users";`); +}); + +test('drop view', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `DROP VIEW \`view\`;`, + ); +}); + +test('alter view', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), + }; + const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + name: 'view', + type: 'sqlite_create_view', + definition: 'SELECT * FROM users WHERE users.id = 1', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe( + `DROP VIEW \`view\`;`, + ); + expect(sqlStatements[1]).toBe( + `CREATE VIEW \`view\` AS SELECT * FROM users WHERE users.id = 1;`, + ); +}); + +test('create view with existing flag', async () => { + const view = sqliteView('view', {}).existing(); + const to = { + testView: view, + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite({}, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('drop view with existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + }; + + const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename view with existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).existing(), + }; + const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('rename view and drop existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_view', + name: 'new_view', + definition: 'SELECT * FROM users', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`DROP VIEW \`view\`;`); + expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users;`); +}); + +test('rename view and alter ".as"', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), + }; + const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); + + expect(statements.length).toBe(2); + expect(statements[0]).toStrictEqual({ + name: 'view', + type: 'drop_view', + }); + expect(statements[1]).toStrictEqual({ + type: 'sqlite_create_view', + name: 'new_view', + definition: 'SELECT * FROM users WHERE 1=1', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); + expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users WHERE 1=1;`); +}); diff --git a/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts index 47447decd..449b61c6c 100644 --- a/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts +++ b/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts @@ -83,9 +83,11 @@ test(`rename table and drop index`, async (t) => { user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -122,9 +124,11 @@ test(`rename table and drop index`, async (t) => { new_user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -242,6 +246,7 @@ test(`drop, set autoincrement. drop not null`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, autoincrement2: { name: 'autoincrement2', @@ -258,6 +263,7 @@ test(`drop, set autoincrement. drop not null`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, dropNotNull: { name: 'dropNotNull', @@ -274,9 +280,11 @@ test(`drop, set autoincrement. drop not null`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -297,6 +305,7 @@ test(`drop, set autoincrement. drop not null`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, autoincrement2: { name: 'autoincrement2', @@ -313,6 +322,7 @@ test(`drop, set autoincrement. drop not null`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, dropNotNull: { name: 'dropNotNull', @@ -329,9 +339,11 @@ test(`drop, set autoincrement. drop not null`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -350,6 +362,7 @@ test(`drop, set autoincrement. drop not null`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, { type: 'recreate_table', @@ -366,6 +379,7 @@ test(`drop, set autoincrement. drop not null`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, { type: 'alter_table_alter_column_drop_notnull', @@ -498,6 +512,7 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, pk1: { name: 'pk1', @@ -514,6 +529,7 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, pk2: { name: 'pk2', @@ -530,6 +546,7 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, ref_table: { name: 'ref_table', @@ -546,9 +563,11 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -572,6 +591,7 @@ test(`drop and set primary key. create reference`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, pk1: { name: 'pk1', @@ -588,6 +608,7 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, pk2: { name: 'pk2', @@ -604,6 +625,7 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, ref_table: { name: 'ref_table', @@ -620,9 +642,11 @@ test(`drop and set primary key. create reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -641,6 +665,7 @@ test(`drop and set primary key. create reference`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, { type: 'recreate_table', @@ -657,6 +682,7 @@ test(`drop and set primary key. create reference`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, { type: 'create_reference', @@ -761,6 +787,7 @@ test(`set and drop multiple columns reference`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, fk2: { name: 'fk2', @@ -784,6 +811,7 @@ test(`set and drop multiple columns reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, ref_table: { name: 'ref_table', @@ -807,9 +835,11 @@ test(`set and drop multiple columns reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -837,6 +867,7 @@ test(`set and drop multiple columns reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, fk2: { name: 'fk2', @@ -863,6 +894,7 @@ test(`set and drop multiple columns reference`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, ref_table: { name: 'ref_table', @@ -886,9 +918,11 @@ test(`set and drop multiple columns reference`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -914,6 +948,7 @@ test(`set and drop multiple columns reference`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, { type: 'recreate_table', @@ -947,6 +982,7 @@ test(`set and drop multiple columns reference`, async (t) => { }, ], uniqueConstraints: [], + checkConstraints: [], }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( @@ -1054,6 +1090,7 @@ test(`set new type for primary key, unique and normal column`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, simple: { name: 'simple', @@ -1070,6 +1107,7 @@ test(`set new type for primary key, unique and normal column`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, unique: { name: 'unique', @@ -1088,9 +1126,11 @@ test(`set new type for primary key, unique and normal column`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -1111,6 +1151,7 @@ test(`set new type for primary key, unique and normal column`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, simple: { name: 'simple', @@ -1127,6 +1168,7 @@ test(`set new type for primary key, unique and normal column`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, unique: { name: 'unique', @@ -1145,9 +1187,11 @@ test(`set new type for primary key, unique and normal column`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -1166,6 +1210,7 @@ test(`set new type for primary key, unique and normal column`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, { type: 'alter_table_alter_column_set_type', @@ -1261,6 +1306,7 @@ test(`add columns. set fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1284,9 +1330,11 @@ test(`add columns. set fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -1330,6 +1378,7 @@ test(`add columns. set fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1353,9 +1402,11 @@ test(`add columns. set fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -1457,6 +1508,7 @@ test(`add column and fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1480,9 +1532,11 @@ test(`add column and fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -1526,6 +1580,7 @@ test(`add column and fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1549,9 +1604,11 @@ test(`add column and fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -1632,6 +1689,7 @@ test(`add column and fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1655,9 +1713,11 @@ test(`add column and fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -1701,6 +1761,7 @@ test(`add column and fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1724,9 +1785,11 @@ test(`add column and fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ diff --git a/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts index 2fcaf6436..20f953da6 100644 --- a/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts +++ b/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts @@ -60,9 +60,11 @@ test(`renamed column and altered this column type`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -97,9 +99,11 @@ test(`renamed column and altered this column type`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -132,6 +136,7 @@ test(`renamed column and altered this column type`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( @@ -188,9 +193,11 @@ test(`renamed column and droped column "test"`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -225,9 +232,11 @@ test(`renamed column and droped column "test"`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements: JsonStatement[] = [ @@ -301,9 +310,11 @@ test(`droped column that is part of composite pk`, async (t) => { user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -331,9 +342,11 @@ test(`droped column that is part of composite pk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements: JsonStatement[] = [ @@ -359,6 +372,7 @@ test(`droped column that is part of composite pk`, async (t) => { compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( @@ -466,6 +480,7 @@ test(`drop column "ref"."name", rename column "ref"."age". dropped primary key " }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -496,9 +511,11 @@ test(`drop column "ref"."name", rename column "ref"."age". dropped primary key " foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -535,6 +552,7 @@ test(`drop column "ref"."name", rename column "ref"."age". dropped primary key " }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -565,9 +583,11 @@ test(`drop column "ref"."name", rename column "ref"."age". dropped primary key " foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements: JsonStatement[] = [ @@ -613,6 +633,7 @@ test(`drop column "ref"."name", rename column "ref"."age". dropped primary key " compositePKs: [], referenceData: [], uniqueConstraints: [], + checkConstraints: [], }, ]; @@ -649,6 +670,7 @@ test(`create reference on exising column (table includes unique index). expect t foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, unique: { name: 'unique', @@ -674,9 +696,11 @@ test(`create reference on exising column (table includes unique index). expect t foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -697,6 +721,7 @@ test(`create reference on exising column (table includes unique index). expect t foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, unique: { name: 'unique', @@ -724,9 +749,11 @@ test(`create reference on exising column (table includes unique index). expect t }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements: JsonStatement[] = [ @@ -762,6 +789,7 @@ test(`create reference on exising column (table includes unique index). expect t }, ], uniqueConstraints: [], + checkConstraints: [], }, { data: 'unique_unique_unique;unique;true;', @@ -839,6 +867,7 @@ test(`add columns. set fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -862,9 +891,11 @@ test(`add columns. set fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -908,6 +939,7 @@ test(`add columns. set fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -931,9 +963,11 @@ test(`add columns. set fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ @@ -987,6 +1021,7 @@ test(`add columns. set fk`, async (t) => { tableName: 'ref', type: 'recreate_table', uniqueConstraints: [], + checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( @@ -1053,6 +1088,7 @@ test(`add column and fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1076,9 +1112,11 @@ test(`add column and fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', @@ -1122,6 +1160,7 @@ test(`add column and fk`, async (t) => { }, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, user: { name: 'user', @@ -1145,9 +1184,11 @@ test(`add column and fk`, async (t) => { foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, + checkConstraints: {}, }, }, enums: {}, + views: {}, }; const newJsonStatements = [ diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json index 829441886..bd3221754 100644 --- a/drizzle-orm/package.json +++ b/drizzle-orm/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-orm", - "version": "0.34.1", + "version": "0.35.0", "description": "Drizzle ORM package for SQL databases", "type": "module", "scripts": { @@ -162,7 +162,7 @@ "@cloudflare/workers-types": "^4.20230904.0", "@electric-sql/pglite": "^0.1.1", "@libsql/client": "^0.10.0", - "@miniflare/d1": "^2.14.2", + "@miniflare/d1": "^2.14.4", "@neondatabase/serverless": "^0.9.0", "@op-engineering/op-sqlite": "^2.0.16", "@opentelemetry/api": "^1.4.1", diff --git a/drizzle-orm/src/aws-data-api/pg/driver.ts b/drizzle-orm/src/aws-data-api/pg/driver.ts index 479cc32fe..1d59bea62 100644 --- a/drizzle-orm/src/aws-data-api/pg/driver.ts +++ b/drizzle-orm/src/aws-data-api/pg/driver.ts @@ -1,3 +1,4 @@ +import { RDSDataClient, type RDSDataClientConfig } from '@aws-sdk/client-rds-data'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -14,7 +15,7 @@ import { } from '~/relations.ts'; import { Param, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import { Table } from '~/table.ts'; -import type { DrizzleConfig, UpdateSet } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError, UpdateSet } from '~/utils.ts'; import type { AwsDataApiClient, AwsDataApiPgQueryResult, AwsDataApiPgQueryResultHKT } from './session.ts'; import { AwsDataApiSession } from './session.ts'; @@ -36,7 +37,7 @@ export interface DrizzleAwsDataApiPgConfig< export class AwsDataApiPgDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'AwsDataApiPgDatabase'; + static override readonly [entityKind]: string = 'AwsDataApiPgDatabase'; override execute< TRow extends Record = Record, @@ -46,7 +47,7 @@ export class AwsDataApiPgDatabase< } export class AwsPgDialect extends PgDialect { - static readonly [entityKind]: string = 'AwsPgDialect'; + static override readonly [entityKind]: string = 'AwsPgDialect'; override escapeParam(num: number): string { return `:${num + 1}`; @@ -87,7 +88,7 @@ export class AwsPgDialect extends PgDialect { } } -export function drizzle = Record>( +function construct = Record>( client: AwsDataApiClient, config: DrizzleAwsDataApiPgConfig, ): AwsDataApiPgDatabase & { @@ -120,3 +121,65 @@ export function drizzle = Record = Record, + TClient extends AwsDataApiClient = RDSDataClient, +>( + ...params: IfNotImported< + RDSDataClientConfig, + [ImportTypeError<'@aws-sdk/client-rds-data'>], + [ + TClient, + DrizzleAwsDataApiPgConfig, + ] | [ + ( + | ( + & DrizzleConfig + & { + connection: RDSDataClientConfig & Omit; + } + ) + | ( + & DrizzleAwsDataApiPgConfig + & { + client: TClient; + } + ) + ), + ] + > +): AwsDataApiPgDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof RDSDataClient) { + return construct(params[0] as TClient, params[1] as DrizzleAwsDataApiPgConfig) as any; + } + + if ((params[0] as { client?: TClient }).client) { + const { client, ...drizzleConfig } = params[0] as { + client: TClient; + } & DrizzleAwsDataApiPgConfig; + + return construct(client, drizzleConfig) as any; + } + + const { connection, ...drizzleConfig } = params[0] as { + connection: RDSDataClientConfig & Omit; + } & DrizzleConfig; + const { resourceArn, database, secretArn, ...rdsConfig } = connection; + + const instance = new RDSDataClient(rdsConfig); + return construct(instance, { resourceArn, database, secretArn, ...drizzleConfig }) as any; +} + +export namespace drizzle { + export function mock = Record>( + config: DrizzleAwsDataApiPgConfig, + ): AwsDataApiPgDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/aws-data-api/pg/session.ts b/drizzle-orm/src/aws-data-api/pg/session.ts index 4fc43ddf6..974f6d3ff 100644 --- a/drizzle-orm/src/aws-data-api/pg/session.ts +++ b/drizzle-orm/src/aws-data-api/pg/session.ts @@ -27,7 +27,7 @@ export type AwsDataApiClient = RDSDataClient; export class AwsDataApiPreparedQuery< T extends PreparedQueryConfig & { values: AwsDataApiPgQueryResult }, > extends PgPreparedQuery { - static readonly [entityKind]: string = 'AwsDataApiPreparedQuery'; + static override readonly [entityKind]: string = 'AwsDataApiPreparedQuery'; private rawQuery: ExecuteStatementCommand; @@ -154,7 +154,7 @@ export class AwsDataApiSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'AwsDataApiSession'; + static override readonly [entityKind]: string = 'AwsDataApiSession'; /** @internal */ readonly rawQuery: AwsDataApiQueryBase; @@ -239,7 +239,7 @@ export class AwsDataApiTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'AwsDataApiTransaction'; + static override readonly [entityKind]: string = 'AwsDataApiTransaction'; override async transaction( transaction: (tx: AwsDataApiTransaction) => Promise, diff --git a/drizzle-orm/src/better-sqlite3/driver.ts b/drizzle-orm/src/better-sqlite3/driver.ts index 50660e4d6..14e6644bc 100644 --- a/drizzle-orm/src/better-sqlite3/driver.ts +++ b/drizzle-orm/src/better-sqlite3/driver.ts @@ -1,4 +1,4 @@ -import type { Database, RunResult } from 'better-sqlite3'; +import Client, { type Database, type Options, type RunResult } from 'better-sqlite3'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { @@ -9,16 +9,25 @@ import { } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import { BetterSQLiteSession } from './session.ts'; +export type DrizzleBetterSQLite3DatabaseConfig = + | ({ + source?: + | string + | Buffer; + } & Options) + | string + | undefined; + export class BetterSQLite3Database = Record> extends BaseSQLiteDatabase<'sync', RunResult, TSchema> { - static readonly [entityKind]: string = 'BetterSQLite3Database'; + static override readonly [entityKind]: string = 'BetterSQLite3Database'; } -export function drizzle = Record>( +function construct = Record>( client: Database, config: DrizzleConfig = {}, ): BetterSQLite3Database & { @@ -51,3 +60,74 @@ export function drizzle = Record = Record, +>( + ...params: IfNotImported< + Database, + [ImportTypeError<'better-sqlite3'>], + | [] + | [ + Database | string, + ] + | [ + Database | string, + DrizzleConfig, + ] + | [ + ( + & DrizzleConfig + & ({ + connection?: DrizzleBetterSQLite3DatabaseConfig; + } | { + client: Database; + }) + ), + ] + > +): BetterSQLite3Database & { + $client: Database; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof Client) { + return construct(params[0] as Database, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as + & { + connection?: DrizzleBetterSQLite3DatabaseConfig; + client?: Database; + } + & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + if (typeof connection === 'object') { + const { source, ...options } = connection; + + const instance = new Client(source, options); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new Client(connection); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new Client(params[0]); + + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): BetterSQLite3Database & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/better-sqlite3/session.ts b/drizzle-orm/src/better-sqlite3/session.ts index 5b8d29b81..8a02eb37e 100644 --- a/drizzle-orm/src/better-sqlite3/session.ts +++ b/drizzle-orm/src/better-sqlite3/session.ts @@ -26,7 +26,7 @@ export class BetterSQLiteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', RunResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'BetterSQLiteSession'; + static override readonly [entityKind]: string = 'BetterSQLiteSession'; private logger: Logger; @@ -73,7 +73,7 @@ export class BetterSQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', RunResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'BetterSQLiteTransaction'; + static override readonly [entityKind]: string = 'BetterSQLiteTransaction'; override transaction(transaction: (tx: BetterSQLiteTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; @@ -93,7 +93,7 @@ export class BetterSQLiteTransaction< export class PreparedQuery extends PreparedQueryBase< { type: 'sync'; run: RunResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'BetterSQLitePreparedQuery'; + static override readonly [entityKind]: string = 'BetterSQLitePreparedQuery'; constructor( private stmt: Statement, diff --git a/drizzle-orm/src/bun-sqlite/driver.ts b/drizzle-orm/src/bun-sqlite/driver.ts index abcc09224..91a2e370b 100644 --- a/drizzle-orm/src/bun-sqlite/driver.ts +++ b/drizzle-orm/src/bun-sqlite/driver.ts @@ -1,6 +1,6 @@ /// -import type { Database } from 'bun:sqlite'; +import { Database } from 'bun:sqlite'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { @@ -11,16 +11,44 @@ import { } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import { SQLiteBunSession } from './session.ts'; export class BunSQLiteDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'sync', void, TSchema> { - static readonly [entityKind]: string = 'BunSQLiteDatabase'; + static override readonly [entityKind]: string = 'BunSQLiteDatabase'; } -export function drizzle = Record>( +type DrizzleBunSqliteDatabaseOptions = { + /** + * Open the database as read-only (no write operations, no create). + * + * Equivalent to {@link constants.SQLITE_OPEN_READONLY} + */ + readonly?: boolean; + /** + * Allow creating a new database + * + * Equivalent to {@link constants.SQLITE_OPEN_CREATE} + */ + create?: boolean; + /** + * Open the database as read-write + * + * Equivalent to {@link constants.SQLITE_OPEN_READWRITE} + */ + readwrite?: boolean; +}; + +export type DrizzleBunSqliteDatabaseConfig = + | ({ + source?: string; + } & DrizzleBunSqliteDatabaseOptions) + | string + | undefined; + +function construct = Record>( client: Database, config: DrizzleConfig = {}, ): BunSQLiteDatabase & { @@ -53,3 +81,77 @@ export function drizzle = Record = Record, + TClient extends Database = Database, +>( + ...params: IfNotImported< + Database, + [ImportTypeError<'bun-types'>], + | [] + | [ + TClient | string, + ] + | [ + TClient | string, + DrizzleConfig, + ] + | [ + ( + & DrizzleConfig + & ({ + connection?: DrizzleBunSqliteDatabaseConfig; + } | { + client: TClient; + }) + ), + ] + > +): BunSQLiteDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof Database) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as + & ({ + connection?: DrizzleBunSqliteDatabaseConfig | string; + client?: TClient; + }) + & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + if (typeof connection === 'object') { + const { source, ...opts } = connection; + + const options = Object.values(opts).filter((v) => v !== undefined).length ? opts : undefined; + + const instance = new Database(source, options); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new Database(connection); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new Database(params[0]); + + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): BunSQLiteDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/bun-sqlite/session.ts b/drizzle-orm/src/bun-sqlite/session.ts index ff4da3e6e..fd02e4f00 100644 --- a/drizzle-orm/src/bun-sqlite/session.ts +++ b/drizzle-orm/src/bun-sqlite/session.ts @@ -28,7 +28,7 @@ export class SQLiteBunSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', void, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLiteBunSession'; + static override readonly [entityKind]: string = 'SQLiteBunSession'; private logger: Logger; @@ -83,7 +83,7 @@ export class SQLiteBunTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', void, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLiteBunTransaction'; + static override readonly [entityKind]: string = 'SQLiteBunTransaction'; override transaction(transaction: (tx: SQLiteBunTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; @@ -103,7 +103,7 @@ export class SQLiteBunTransaction< export class PreparedQuery extends PreparedQueryBase< { type: 'sync'; run: void; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'SQLiteBunPreparedQuery'; + static override readonly [entityKind]: string = 'SQLiteBunPreparedQuery'; constructor( private stmt: Statement, diff --git a/drizzle-orm/src/connect.ts b/drizzle-orm/src/connect.ts deleted file mode 100644 index 6e26b2922..000000000 --- a/drizzle-orm/src/connect.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './monodriver.ts'; -export * from './monomigrator.ts'; diff --git a/drizzle-orm/src/d1/driver.ts b/drizzle-orm/src/d1/driver.ts index 6ec8a5294..7b4bbdfb6 100644 --- a/drizzle-orm/src/d1/driver.ts +++ b/drizzle-orm/src/d1/driver.ts @@ -24,7 +24,7 @@ export type AnyD1Database = IfNotImported< export class DrizzleD1Database< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', D1Result, TSchema> { - static readonly [entityKind]: string = 'D1Database'; + static override readonly [entityKind]: string = 'D1Database'; /** @internal */ declare readonly session: SQLiteD1Session>; diff --git a/drizzle-orm/src/d1/session.ts b/drizzle-orm/src/d1/session.ts index 0f2989c12..61ef49315 100644 --- a/drizzle-orm/src/d1/session.ts +++ b/drizzle-orm/src/d1/session.ts @@ -28,7 +28,7 @@ export class SQLiteD1Session< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', D1Result, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLiteD1Session'; + static override readonly [entityKind]: string = 'SQLiteD1Session'; private logger: Logger; @@ -116,7 +116,7 @@ export class D1Transaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', D1Result, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'D1Transaction'; + static override readonly [entityKind]: string = 'D1Transaction'; override async transaction(transaction: (tx: D1Transaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex}`; @@ -151,7 +151,7 @@ function d1ToRawMapping(results: any) { export class D1PreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: D1Response; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'D1PreparedQuery'; + static override readonly [entityKind]: string = 'D1PreparedQuery'; /** @internal */ customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => unknown; diff --git a/drizzle-orm/src/entity.ts b/drizzle-orm/src/entity.ts index d9ab3d36a..2b6dfb4de 100644 --- a/drizzle-orm/src/entity.ts +++ b/drizzle-orm/src/entity.ts @@ -26,7 +26,7 @@ export function is>(value: any, type: T): valu ); } - let cls = value.constructor; + let cls = Object.getPrototypeOf(value).constructor; if (cls) { // Traverse the prototype chain to find the entityKind while (cls) { diff --git a/drizzle-orm/src/errors.ts b/drizzle-orm/src/errors.ts index ede6e0a59..a72615c9b 100644 --- a/drizzle-orm/src/errors.ts +++ b/drizzle-orm/src/errors.ts @@ -11,7 +11,7 @@ export class DrizzleError extends Error { } export class TransactionRollbackError extends DrizzleError { - static readonly [entityKind]: string = 'TransactionRollbackError'; + static override readonly [entityKind]: string = 'TransactionRollbackError'; constructor() { super({ message: 'Rollback' }); diff --git a/drizzle-orm/src/expo-sqlite/driver.ts b/drizzle-orm/src/expo-sqlite/driver.ts index d9cf47b01..6d9ebe375 100644 --- a/drizzle-orm/src/expo-sqlite/driver.ts +++ b/drizzle-orm/src/expo-sqlite/driver.ts @@ -15,7 +15,7 @@ import { ExpoSQLiteSession } from './session.ts'; export class ExpoSQLiteDatabase = Record> extends BaseSQLiteDatabase<'sync', SQLiteRunResult, TSchema> { - static readonly [entityKind]: string = 'ExpoSQLiteDatabase'; + static override readonly [entityKind]: string = 'ExpoSQLiteDatabase'; } export function drizzle = Record>( diff --git a/drizzle-orm/src/expo-sqlite/session.ts b/drizzle-orm/src/expo-sqlite/session.ts index d87236bc2..9fcc4b93c 100644 --- a/drizzle-orm/src/expo-sqlite/session.ts +++ b/drizzle-orm/src/expo-sqlite/session.ts @@ -26,7 +26,7 @@ export class ExpoSQLiteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', SQLiteRunResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'ExpoSQLiteSession'; + static override readonly [entityKind]: string = 'ExpoSQLiteSession'; private logger: Logger; @@ -80,7 +80,7 @@ export class ExpoSQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', SQLiteRunResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'ExpoSQLiteTransaction'; + static override readonly [entityKind]: string = 'ExpoSQLiteTransaction'; override transaction(transaction: (tx: ExpoSQLiteTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; @@ -100,7 +100,7 @@ export class ExpoSQLiteTransaction< export class ExpoSQLitePreparedQuery extends SQLitePreparedQuery< { type: 'sync'; run: SQLiteRunResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'ExpoSQLitePreparedQuery'; + static override readonly [entityKind]: string = 'ExpoSQLitePreparedQuery'; constructor( private stmt: SQLiteStatement, diff --git a/drizzle-orm/src/libsql/driver.ts b/drizzle-orm/src/libsql/driver.ts index 1e87e7555..c5e3957d2 100644 --- a/drizzle-orm/src/libsql/driver.ts +++ b/drizzle-orm/src/libsql/driver.ts @@ -1,4 +1,7 @@ -import type { Client, ResultSet } from '@libsql/client'; +import { type Client, type Config, createClient, type ResultSet } from '@libsql/client'; +import { HttpClient } from '@libsql/client/http'; +import { Sqlite3Client } from '@libsql/client/sqlite3'; +import { WsClient } from '@libsql/client/ws'; import type { BatchItem, BatchResponse } from '~/batch.ts'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -11,13 +14,13 @@ import { } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import { LibSQLSession } from './session.ts'; export class LibSQLDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', ResultSet, TSchema> { - static readonly [entityKind]: string = 'LibSQLDatabase'; + static override readonly [entityKind]: string = 'LibSQLDatabase'; /** @internal */ declare readonly session: LibSQLSession>; @@ -29,7 +32,7 @@ export class LibSQLDatabase< } } -export function drizzle< +function construct< TSchema extends Record = Record, >(client: Client, config: DrizzleConfig = {}): LibSQLDatabase & { $client: Client; @@ -61,3 +64,63 @@ export function drizzle< return db as any; } + +export function drizzle< + TSchema extends Record = Record, + TClient extends Client = Client, +>( + ...params: IfNotImported< + Client, + [ImportTypeError<'@libsql/client'>], + [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + connection: string | Config; + } | { + client: TClient; + }) + ), + ] + > +): LibSQLDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof WsClient || params[0] instanceof HttpClient || params[0] instanceof Sqlite3Client) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'string') { + const instance = createClient({ + url: params[0], + }); + + return construct(instance, params[1]) as any; + } + + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + + return construct(instance, drizzleConfig) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): LibSQLDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/libsql/session.ts b/drizzle-orm/src/libsql/session.ts index 640977734..617ebe342 100644 --- a/drizzle-orm/src/libsql/session.ts +++ b/drizzle-orm/src/libsql/session.ts @@ -27,7 +27,7 @@ export class LibSQLSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', ResultSet, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'LibSQLSession'; + static override readonly [entityKind]: string = 'LibSQLSession'; private logger: Logger; @@ -132,7 +132,7 @@ export class LibSQLTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', ResultSet, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'LibSQLTransaction'; + static override readonly [entityKind]: string = 'LibSQLTransaction'; override async transaction(transaction: (tx: LibSQLTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex}`; @@ -152,7 +152,7 @@ export class LibSQLTransaction< export class LibSQLPreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: ResultSet; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'LibSQLPreparedQuery'; + static override readonly [entityKind]: string = 'LibSQLPreparedQuery'; constructor( private client: Client, diff --git a/drizzle-orm/src/monodriver.ts b/drizzle-orm/src/monodriver.ts deleted file mode 100644 index 9af80db06..000000000 --- a/drizzle-orm/src/monodriver.ts +++ /dev/null @@ -1,659 +0,0 @@ -/* eslint-disable import/extensions */ -import type { RDSDataClient, RDSDataClientConfig as RDSConfig } from '@aws-sdk/client-rds-data'; -import type { PGlite, PGliteOptions } from '@electric-sql/pglite'; -import type { Client as LibsqlClient, Config as LibsqlConfig } from '@libsql/client'; -import type { - HTTPTransactionOptions as NeonHttpConfig, - NeonQueryFunction, - Pool as NeonServerlessPool, - PoolConfig as NeonServerlessConfig, - QueryResult, - QueryResultRow, -} from '@neondatabase/serverless'; -import type { Client as PlanetscaleClient, Config as PlanetscaleConfig } from '@planetscale/database'; -import type { Config as TiDBServerlessConfig, Connection as TiDBConnection } from '@tidbcloud/serverless'; -import type { VercelPool } from '@vercel/postgres'; -import type { Database as BetterSQLite3Database, Options as BetterSQLite3Options } from 'better-sqlite3'; -import type { Database as BunDatabase } from 'bun:sqlite'; -import type { Pool as Mysql2Pool, PoolOptions as Mysql2Config } from 'mysql2'; -import type { Pool as NodePgPool, PoolConfig as NodePgPoolConfig } from 'pg'; -import type { - Options as PostgresJSOptions, - PostgresType as PostgresJSPostgresType, - Sql as PostgresJsClient, -} from 'postgres'; -import type { AwsDataApiPgDatabase, DrizzleAwsDataApiPgConfig } from './aws-data-api/pg/index.ts'; -import type { BetterSQLite3Database as DrizzleBetterSQLite3Database } from './better-sqlite3/index.ts'; -import type { BunSQLiteDatabase } from './bun-sqlite/index.ts'; -import type { AnyD1Database, DrizzleD1Database } from './d1/index.ts'; -import type { LibSQLDatabase } from './libsql/index.ts'; -import type { MySql2Database, MySql2DrizzleConfig } from './mysql2/index.ts'; -import type { NeonHttpDatabase } from './neon-http/index.ts'; -import type { NeonDatabase } from './neon-serverless/index.ts'; -import type { NodePgDatabase } from './node-postgres/driver.ts'; -import type { PgliteDatabase } from './pglite/driver.ts'; -import type { PlanetScaleDatabase } from './planetscale-serverless/index.ts'; -import type { PostgresJsDatabase } from './postgres-js/index.ts'; -import type { TiDBServerlessDatabase } from './tidb-serverless/index.ts'; -import type { DrizzleConfig, IfNotImported } from './utils.ts'; -import type { VercelPgDatabase } from './vercel-postgres/index.ts'; - -type BunSqliteDatabaseOptions = { - /** - * Open the database as read-only (no write operations, no create). - * - * Equivalent to {@link constants.SQLITE_OPEN_READONLY} - */ - readonly?: boolean; - /** - * Allow creating a new database - * - * Equivalent to {@link constants.SQLITE_OPEN_CREATE} - */ - create?: boolean; - /** - * Open the database as read-write - * - * Equivalent to {@link constants.SQLITE_OPEN_READWRITE} - */ - readwrite?: boolean; -}; - -type BunSqliteDatabaseConfig = - | ({ - source?: string; - } & BunSqliteDatabaseOptions) - | string - | undefined; - -type BetterSQLite3DatabaseConfig = - | ({ - source?: - | string - | Buffer; - } & BetterSQLite3Options) - | string - | undefined; - -type MonodriverNeonHttpConfig = - | ({ - connectionString: string; - } & NeonHttpConfig) - | string; - -type AwsDataApiConnectionConfig = RDSConfig & Omit; - -type DatabaseClient = - | 'node-postgres' - | 'postgres-js' - | 'neon-websocket' - | 'neon-http' - | 'vercel-postgres' - | 'aws-data-api-pg' - | 'planetscale' - | 'mysql2' - | 'tidb-serverless' - | 'libsql' - | 'turso' - | 'd1' - | 'bun:sqlite' - | 'better-sqlite3' - | 'pglite'; - -type ClientDrizzleInstanceMap> = { - 'node-postgres': NodePgDatabase; - 'postgres-js': PostgresJsDatabase; - 'neon-websocket': NeonDatabase; - 'neon-http': NeonHttpDatabase; - 'vercel-postgres': VercelPgDatabase; - 'aws-data-api-pg': AwsDataApiPgDatabase; - planetscale: PlanetScaleDatabase; - mysql2: MySql2Database; - 'tidb-serverless': TiDBServerlessDatabase; - libsql: LibSQLDatabase; - turso: LibSQLDatabase; - d1: DrizzleD1Database; - 'bun:sqlite': BunSQLiteDatabase; - 'better-sqlite3': DrizzleBetterSQLite3Database; - pglite: PgliteDatabase; -}; - -type Primitive = string | number | boolean | undefined | null; - -type ClientInstanceMap = { - 'node-postgres': NodePgPool; - 'postgres-js': PostgresJsClient; - 'neon-websocket': NeonServerlessPool; - 'neon-http': NeonQueryFunction; - 'vercel-postgres': - & VercelPool - & ((strings: TemplateStringsArray, ...values: Primitive[]) => Promise>); - 'aws-data-api-pg': RDSDataClient; - planetscale: PlanetscaleClient; - mysql2: Mysql2Pool; - 'tidb-serverless': TiDBConnection; - libsql: LibsqlClient; - turso: LibsqlClient; - d1: AnyD1Database; - 'bun:sqlite': BunDatabase; - 'better-sqlite3': BetterSQLite3Database; - pglite: PGlite; -}; - -type ClientTypeImportErrorMap = { - 'node-postgres': 'pg`, `@types/pg'; - 'postgres-js': 'postgres'; - 'neon-websocket': '@neondatabase/serverless'; - 'neon-http': '@neondatabase/serverless'; - 'vercel-postgres': '@vercel/postgres'; - 'aws-data-api-pg': '@aws-sdk/client-rds-data'; - planetscale: '@planetscale/database'; - mysql2: 'mysql2'; - 'tidb-serverless': '@tidbcloud/serverless'; - libsql: '@libsql/client'; - turso: '@libsql/client'; - d1: '@cloudflare/workers-types` or `@miniflare/d1'; - 'bun:sqlite': 'bun-types'; - 'better-sqlite3': 'better-sqlite3'; - pglite: '@electric-sql/pglite'; -}; - -type ImportTypeError = - `Please install \`${ClientTypeImportErrorMap[TClient]}\`to allow Drizzle ORM to connect to the database`; - -type InitializerParams = { - 'node-postgres': { - connection: string | NodePgPoolConfig; - }; - 'postgres-js': { - connection: string | ({ url?: string } & PostgresJSOptions>); - }; - 'neon-websocket': { - connection: string | NeonServerlessConfig; - }; - 'neon-http': { - connection: MonodriverNeonHttpConfig; - }; - 'vercel-postgres': {}; - 'aws-data-api-pg': { - connection: AwsDataApiConnectionConfig; - }; - planetscale: { - connection: PlanetscaleConfig | string; - }; - mysql2: { - connection: Mysql2Config | string; - }; - 'tidb-serverless': { - connection: TiDBServerlessConfig | string; - }; - libsql: { - connection: LibsqlConfig | string; - }; - turso: { - connection: LibsqlConfig | string; - }; - d1: { - connection: AnyD1Database; - }; - 'bun:sqlite': { - connection?: BunSqliteDatabaseConfig; - }; - 'better-sqlite3': { - connection?: BetterSQLite3DatabaseConfig; - }; - pglite: { - connection?: (PGliteOptions & { dataDir?: string }) | string; - }; -}; - -type DetermineClient< - TClient extends DatabaseClient, - TSchema extends Record, -> = - & ClientDrizzleInstanceMap< - TSchema - >[TClient] - & { - $client: ClientInstanceMap[TClient]; - }; - -const importError = (libName: string) => { - throw new Error( - `Please install '${libName}' to allow Drizzle ORM to connect to the database`, - ); -}; - -function assertUnreachable(_: never | undefined): never { - throw new Error("Didn't expect to get here"); -} - -export async function drizzle< - TClient extends DatabaseClient, - TSchema extends Record = Record, ->( - client: TClient, - ...params: TClient extends 'bun:sqlite' | 'better-sqlite3' | 'pglite' ? ( - [] | [ - ( - & IfNotImported< - ClientInstanceMap[TClient], - { connection: ImportTypeError }, - InitializerParams[TClient] - > - & DrizzleConfig - ), - ] | [string] - ) - : TClient extends 'vercel-postgres' ? ([] | [ - ( - & IfNotImported< - ClientInstanceMap[TClient], - { connection: ImportTypeError }, - InitializerParams[TClient] - > - & DrizzleConfig - ), - ]) - : TClient extends - 'postgres-js' | 'tidb-serverless' | 'libsql' | 'turso' | 'planetscale' | 'neon-http' | 'node-postgres' ? ( - [ - ( - & IfNotImported< - ClientInstanceMap[TClient], - { connection: ImportTypeError }, - InitializerParams[TClient] - > - & DrizzleConfig - ), - ] | [string] - ) - : TClient extends 'mysql2' ? ( - [ - ( - & IfNotImported< - ClientInstanceMap[TClient], - { connection: ImportTypeError }, - InitializerParams[TClient] - > - & MySql2DrizzleConfig - ), - ] | [string] - ) - : TClient extends 'neon-websocket' ? ( - | [ - & IfNotImported< - ClientInstanceMap[TClient], - { connection: ImportTypeError }, - InitializerParams[TClient] - > - & DrizzleConfig - & { - ws?: any; - }, - ] - | [string] - ) - : [ - ( - & IfNotImported< - ClientInstanceMap[TClient], - { connection: ImportTypeError }, - InitializerParams[TClient] - > - & DrizzleConfig - ), - ] -): Promise> { - switch (client) { - case 'node-postgres': { - const defpg = await import('pg').catch(() => importError('pg')); - const { drizzle } = await import('./node-postgres/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as - & { connection: NodePgPoolConfig | string } - & DrizzleConfig; - - const instance = typeof connection === 'string' - ? new defpg.default.Pool({ - connectionString: connection, - }) - : new defpg.default.Pool(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = typeof params[0] === 'string' - ? new defpg.default.Pool({ - connectionString: params[0], - }) - : new defpg.default.Pool(params[0]); - const db = drizzle(instance); - - return db as any; - } - case 'aws-data-api-pg': { - const { connection, ...drizzleConfig } = params[0] as { - connection: AwsDataApiConnectionConfig; - } & DrizzleConfig; - const { resourceArn, database, secretArn, ...rdsConfig } = connection; - - const { RDSDataClient } = await import('@aws-sdk/client-rds-data').catch(() => - importError('@aws-sdk/client-rds-data') - ); - const { drizzle } = await import('./aws-data-api/pg/index.ts'); - - const instance = new RDSDataClient(rdsConfig); - const db = drizzle(instance, { resourceArn, database, secretArn, ...drizzleConfig }); - - return db as any; - } - case 'better-sqlite3': { - const { default: Client } = await import('better-sqlite3').catch(() => importError('better-sqlite3')); - const { drizzle } = await import('./better-sqlite3/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as { - connection: BetterSQLite3DatabaseConfig; - } & DrizzleConfig; - - if (typeof connection === 'object') { - const { source, ...options } = connection; - - const instance = new Client(source, options); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new Client(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new Client(params[0]); - const db = drizzle(instance); - - return db as any; - } - case 'bun:sqlite': { - const { Database: Client } = await import('bun:sqlite').catch(() => { - throw new Error(`Please use bun to use 'bun:sqlite' for Drizzle ORM to connect to database`); - }); - const { drizzle } = await import('./bun-sqlite/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as { - connection: BunSqliteDatabaseConfig | string | undefined; - } & DrizzleConfig; - - if (typeof connection === 'object') { - const { source, ...opts } = connection; - - const options = Object.values(opts).filter((v) => v !== undefined).length ? opts : undefined; - - const instance = new Client(source, options); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new Client(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new Client(params[0]); - const db = drizzle(instance); - - return db as any; - } - case 'd1': { - const { connection, ...drizzleConfig } = params[0] as { connection: AnyD1Database } & DrizzleConfig; - - const { drizzle } = await import('./d1/index.ts'); - - const db = drizzle(connection, drizzleConfig); - - return db as any; - } - case 'libsql': - case 'turso': { - const { createClient } = await import('@libsql/client').catch(() => importError('@libsql/client')); - const { drizzle } = await import('./libsql/index.ts'); - - if (typeof params[0] === 'string') { - const instance = createClient({ - url: params[0], - }); - const db = drizzle(instance); - - return db as any; - } - - const { connection, ...drizzleConfig } = params[0] as any as { connection: LibsqlConfig } & DrizzleConfig; - - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - case 'mysql2': { - const { createPool } = await import('mysql2/promise').catch(() => importError('mysql2')); - const { drizzle } = await import('./mysql2/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as - & { connection: Mysql2Config | string } - & MySql2DrizzleConfig; - - const instance = createPool(connection as Mysql2Config); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const connectionString = params[0]!; - const instance = createPool(connectionString); - - const db = drizzle(instance); - - return db as any; - } - case 'neon-http': { - const { neon } = await import('@neondatabase/serverless').catch(() => importError('@neondatabase/serverless')); - const { drizzle } = await import('./neon-http/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as { connection: MonodriverNeonHttpConfig } & DrizzleConfig; - - if (typeof connection === 'object') { - const { connectionString, ...options } = connection; - - const instance = neon(connectionString, options); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = neon(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = neon(params[0]!); - const db = drizzle(instance); - - return db as any; - } - case 'neon-websocket': { - const { Pool, neonConfig } = await import('@neondatabase/serverless').catch(() => - importError('@neondatabase/serverless') - ); - const { drizzle } = await import('./neon-serverless/index.ts'); - if (typeof params[0] === 'string') { - const instance = new Pool({ - connectionString: params[0], - }); - - const db = drizzle(instance); - - return db as any; - } - - if (typeof params[0] === 'object') { - const { connection, ws, ...drizzleConfig } = params[0] as { - connection?: NeonServerlessConfig | string; - ws?: any; - } & DrizzleConfig; - - if (ws) { - neonConfig.webSocketConstructor = ws; - } - - const instance = typeof connection === 'string' - ? new Pool({ - connectionString: connection, - }) - : new Pool(connection); - - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new Pool(); - const db = drizzle(instance); - - return db as any; - } - case 'planetscale': { - const { Client } = await import('@planetscale/database').catch(() => importError('@planetscale/database')); - const { drizzle } = await import('./planetscale-serverless/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as - & { connection: PlanetscaleConfig | string } - & DrizzleConfig; - - const instance = typeof connection === 'string' - ? new Client({ - url: connection, - }) - : new Client( - connection, - ); - const db = drizzle(instance, drizzleConfig); - return db as any; - } - - const instance = new Client({ - url: params[0], - }); - const db = drizzle(instance); - - return db as any; - } - case 'postgres-js': { - const { default: client } = await import('postgres').catch(() => importError('postgres')); - const { drizzle } = await import('./postgres-js/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as { - connection: { url?: string } & PostgresJSOptions>; - } & DrizzleConfig; - - if (typeof connection === 'object' && connection.url !== undefined) { - const { url, ...config } = connection; - - const instance = client(url, config); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = client(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = client(params[0]!); - const db = drizzle(instance); - - return db as any; - } - case 'tidb-serverless': { - const { connect } = await import('@tidbcloud/serverless').catch(() => importError('@tidbcloud/serverless')); - const { drizzle } = await import('./tidb-serverless/index.ts'); - - if (typeof params[0] === 'string') { - const instance = connect({ - url: params[0], - }); - const db = drizzle(instance); - - return db as any; - } - - const { connection, ...drizzleConfig } = params[0] as - & { connection: TiDBServerlessConfig | string } - & DrizzleConfig; - - const instance = typeof connection === 'string' - ? connect({ - url: connection, - }) - : connect(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - case 'vercel-postgres': { - const drizzleConfig = params[0] as DrizzleConfig | undefined; - const { sql } = await import('@vercel/postgres').catch(() => importError('@vercel/postgres')); - const { drizzle } = await import('./vercel-postgres/index.ts'); - - const db = drizzle(sql, drizzleConfig); - - return db as any; - } - - case 'pglite': { - const { PGlite } = await import('@electric-sql/pglite').catch(() => importError('@electric-sql/pglite')); - const { drizzle } = await import('./pglite/index.ts'); - - if (typeof params[0] === 'object') { - const { connection, ...drizzleConfig } = params[0] as { - connection: PGliteOptions & { dataDir: string }; - } & DrizzleConfig; - - if (typeof connection === 'object') { - const { dataDir, ...options } = connection; - - const instance = new PGlite(dataDir, options); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new PGlite(connection); - const db = drizzle(instance, drizzleConfig); - - return db as any; - } - - const instance = new PGlite(params[0]); - const db = drizzle(instance); - - return db as any; - } - } - - assertUnreachable(client); -} diff --git a/drizzle-orm/src/monomigrator.ts b/drizzle-orm/src/monomigrator.ts deleted file mode 100644 index 9f4a748e0..000000000 --- a/drizzle-orm/src/monomigrator.ts +++ /dev/null @@ -1,109 +0,0 @@ -/* eslint-disable import/extensions */ -import type { AwsDataApiPgDatabase } from './aws-data-api/pg/index.ts'; -import type { BetterSQLite3Database } from './better-sqlite3/index.ts'; -import type { BunSQLiteDatabase } from './bun-sqlite/index.ts'; -import type { DrizzleD1Database } from './d1/index.ts'; -import { entityKind } from './entity.ts'; -import type { LibSQLDatabase } from './libsql/index.ts'; -import type { MigrationConfig } from './migrator.ts'; -import type { MySql2Database } from './mysql2/index.ts'; -import type { NeonHttpDatabase } from './neon-http/index.ts'; -import type { NeonDatabase } from './neon-serverless/index.ts'; -import type { NodePgDatabase } from './node-postgres/index.ts'; -import type { PgliteDatabase } from './pglite/driver.ts'; -import type { PlanetScaleDatabase } from './planetscale-serverless/index.ts'; -import type { PostgresJsDatabase } from './postgres-js/index.ts'; -import type { TiDBServerlessDatabase } from './tidb-serverless/index.ts'; -import type { VercelPgDatabase } from './vercel-postgres/index.ts'; - -export async function migrate( - db: - | AwsDataApiPgDatabase - | BetterSQLite3Database - | BunSQLiteDatabase - | DrizzleD1Database - | LibSQLDatabase - | MySql2Database - | NeonHttpDatabase - | NeonDatabase - | NodePgDatabase - | PlanetScaleDatabase - | PostgresJsDatabase - | VercelPgDatabase - | TiDBServerlessDatabase - | PgliteDatabase, - config: MigrationConfig, -) { - switch (( db).constructor[entityKind]) { - case 'AwsDataApiPgDatabase': { - const { migrate } = await import('./aws-data-api/pg/migrator.ts'); - - return migrate(db as AwsDataApiPgDatabase, config as MigrationConfig); - } - case 'BetterSQLite3Database': { - const { migrate } = await import('./better-sqlite3/migrator.ts'); - - return migrate(db as BetterSQLite3Database, config as MigrationConfig); - } - case 'BunSQLiteDatabase': { - const { migrate } = await import('./bun-sqlite/migrator.ts'); - - return migrate(db as BunSQLiteDatabase, config as MigrationConfig); - } - case 'D1Database': { - const { migrate } = await import('./d1/migrator.ts'); - - return migrate(db as DrizzleD1Database, config as MigrationConfig); - } - case 'LibSQLDatabase': { - const { migrate } = await import('./libsql/migrator.ts'); - - return migrate(db as LibSQLDatabase, config as MigrationConfig); - } - case 'MySql2Database': { - const { migrate } = await import('./mysql2/migrator.ts'); - - return migrate(db as MySql2Database, config as MigrationConfig); - } - case 'NeonHttpDatabase': { - const { migrate } = await import('./neon-http/migrator.ts'); - - return migrate(db as NeonHttpDatabase, config as MigrationConfig); - } - case 'NeonServerlessDatabase': { - const { migrate } = await import('./neon-serverless/migrator.ts'); - - return migrate(db as NeonDatabase, config as MigrationConfig); - } - case 'NodePgDatabase': { - const { migrate } = await import('./node-postgres/migrator.ts'); - - return migrate(db as NodePgDatabase, config as MigrationConfig); - } - case 'PlanetScaleDatabase': { - const { migrate } = await import('./planetscale-serverless/migrator.ts'); - - return migrate(db as PlanetScaleDatabase, config as MigrationConfig); - } - case 'PostgresJsDatabase': { - const { migrate } = await import('./postgres-js/migrator.ts'); - - return migrate(db as PostgresJsDatabase, config as MigrationConfig); - } - case 'TiDBServerlessDatabase': { - const { migrate } = await import('./tidb-serverless/migrator.ts'); - - return migrate(db as TiDBServerlessDatabase, config as MigrationConfig); - } - case 'VercelPgDatabase': { - const { migrate } = await import('./vercel-postgres/migrator.ts'); - - return migrate(db as VercelPgDatabase, config as MigrationConfig); - } - case 'PgliteDatabase': { - const { migrate } = await import('./pglite/migrator.ts'); - - return migrate(db as PgliteDatabase, config as MigrationConfig); - } - } -} diff --git a/drizzle-orm/src/mysql-core/columns/bigint.ts b/drizzle-orm/src/mysql-core/columns/bigint.ts index 5882b1025..7411c07ce 100644 --- a/drizzle-orm/src/mysql-core/columns/bigint.ts +++ b/drizzle-orm/src/mysql-core/columns/bigint.ts @@ -18,7 +18,7 @@ export type MySqlBigInt53BuilderInitial = MySqlBigInt53Bui export class MySqlBigInt53Builder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlBigInt53Builder'; + static override readonly [entityKind]: string = 'MySqlBigInt53Builder'; constructor(name: T['name'], unsigned: boolean = false) { super(name, 'number', 'MySqlBigInt53'); @@ -39,7 +39,7 @@ export class MySqlBigInt53Builder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlBigInt53'; + static override readonly [entityKind]: string = 'MySqlBigInt53'; getSQLType(): string { return `bigint${this.config.unsigned ? ' unsigned' : ''}`; @@ -66,7 +66,7 @@ export type MySqlBigInt64BuilderInitial = MySqlBigInt64Bui export class MySqlBigInt64Builder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlBigInt64Builder'; + static override readonly [entityKind]: string = 'MySqlBigInt64Builder'; constructor(name: T['name'], unsigned: boolean = false) { super(name, 'bigint', 'MySqlBigInt64'); @@ -87,7 +87,7 @@ export class MySqlBigInt64Builder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlBigInt64'; + static override readonly [entityKind]: string = 'MySqlBigInt64'; getSQLType(): string { return `bigint${this.config.unsigned ? ' unsigned' : ''}`; diff --git a/drizzle-orm/src/mysql-core/columns/binary.ts b/drizzle-orm/src/mysql-core/columns/binary.ts index 7297d7b0a..7031b565c 100644 --- a/drizzle-orm/src/mysql-core/columns/binary.ts +++ b/drizzle-orm/src/mysql-core/columns/binary.ts @@ -19,7 +19,7 @@ export class MySqlBinaryBuilder { - static readonly [entityKind]: string = 'MySqlBinaryBuilder'; + static override readonly [entityKind]: string = 'MySqlBinaryBuilder'; constructor(name: T['name'], length: number | undefined) { super(name, 'string', 'MySqlBinary'); @@ -38,7 +38,7 @@ export class MySqlBinary> ex T, MySqlBinaryConfig > { - static readonly [entityKind]: string = 'MySqlBinary'; + static override readonly [entityKind]: string = 'MySqlBinary'; length: number | undefined = this.config.length; diff --git a/drizzle-orm/src/mysql-core/columns/boolean.ts b/drizzle-orm/src/mysql-core/columns/boolean.ts index d1df78570..9e786b6f9 100644 --- a/drizzle-orm/src/mysql-core/columns/boolean.ts +++ b/drizzle-orm/src/mysql-core/columns/boolean.ts @@ -17,7 +17,7 @@ export type MySqlBooleanBuilderInitial = MySqlBooleanBuild export class MySqlBooleanBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlBooleanBuilder'; + static override readonly [entityKind]: string = 'MySqlBooleanBuilder'; constructor(name: T['name']) { super(name, 'boolean', 'MySqlBoolean'); @@ -35,7 +35,7 @@ export class MySqlBooleanBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlBoolean'; + static override readonly [entityKind]: string = 'MySqlBoolean'; getSQLType(): string { return 'boolean'; diff --git a/drizzle-orm/src/mysql-core/columns/char.ts b/drizzle-orm/src/mysql-core/columns/char.ts index 019c035ba..efcb7e65a 100644 --- a/drizzle-orm/src/mysql-core/columns/char.ts +++ b/drizzle-orm/src/mysql-core/columns/char.ts @@ -19,7 +19,7 @@ export class MySqlCharBuilder > { - static readonly [entityKind]: string = 'MySqlCharBuilder'; + static override readonly [entityKind]: string = 'MySqlCharBuilder'; constructor(name: T['name'], config: MySqlCharConfig) { super(name, 'string', 'MySqlChar'); @@ -41,7 +41,7 @@ export class MySqlCharBuilder> extends MySqlColumn> { - static readonly [entityKind]: string = 'MySqlChar'; + static override readonly [entityKind]: string = 'MySqlChar'; readonly length: number | undefined = this.config.length; override readonly enumValues = this.config.enum; diff --git a/drizzle-orm/src/mysql-core/columns/common.ts b/drizzle-orm/src/mysql-core/columns/common.ts index a0a192477..9babc31da 100644 --- a/drizzle-orm/src/mysql-core/columns/common.ts +++ b/drizzle-orm/src/mysql-core/columns/common.ts @@ -47,7 +47,7 @@ export abstract class MySqlColumnBuilder< > extends ColumnBuilder implements MySqlColumnBuilderBase { - static readonly [entityKind]: string = 'MySqlColumnBuilder'; + static override readonly [entityKind]: string = 'MySqlColumnBuilder'; private foreignKeyConfigs: ReferenceConfig[] = []; @@ -101,7 +101,7 @@ export abstract class MySqlColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = object, > extends Column { - static readonly [entityKind]: string = 'MySqlColumn'; + static override readonly [entityKind]: string = 'MySqlColumn'; constructor( override readonly table: MySqlTable, @@ -127,7 +127,7 @@ export abstract class MySqlColumnBuilderWithAutoIncrement< TRuntimeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlColumnBuilderWithAutoIncrement'; + static override readonly [entityKind]: string = 'MySqlColumnBuilderWithAutoIncrement'; constructor(name: NonNullable, dataType: T['dataType'], columnType: T['columnType']) { super(name, dataType, columnType); @@ -145,7 +145,7 @@ export abstract class MySqlColumnWithAutoIncrement< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = object, > extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlColumnWithAutoIncrement'; + static override readonly [entityKind]: string = 'MySqlColumnWithAutoIncrement'; readonly autoIncrement: boolean = this.config.autoIncrement; } diff --git a/drizzle-orm/src/mysql-core/columns/custom.ts b/drizzle-orm/src/mysql-core/columns/custom.ts index 35ca19d3d..711b27813 100644 --- a/drizzle-orm/src/mysql-core/columns/custom.ts +++ b/drizzle-orm/src/mysql-core/columns/custom.ts @@ -35,7 +35,7 @@ export class MySqlCustomColumnBuilder { - static readonly [entityKind]: string = 'MySqlCustomColumnBuilder'; + static override readonly [entityKind]: string = 'MySqlCustomColumnBuilder'; constructor( name: T['name'], @@ -59,7 +59,7 @@ export class MySqlCustomColumnBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlCustomColumn'; + static override readonly [entityKind]: string = 'MySqlCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; diff --git a/drizzle-orm/src/mysql-core/columns/date.common.ts b/drizzle-orm/src/mysql-core/columns/date.common.ts index 3fd8aa612..75faad5b8 100644 --- a/drizzle-orm/src/mysql-core/columns/date.common.ts +++ b/drizzle-orm/src/mysql-core/columns/date.common.ts @@ -18,7 +18,7 @@ export abstract class MySqlDateColumnBaseBuilder< TRuntimeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlDateColumnBuilder'; + static override readonly [entityKind]: string = 'MySqlDateColumnBuilder'; defaultNow() { return this.default(sql`(now())`); @@ -36,7 +36,7 @@ export abstract class MySqlDateBaseColumn< T extends ColumnBaseConfig, TRuntimeConfig extends object = object, > extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlDateColumn'; + static override readonly [entityKind]: string = 'MySqlDateColumn'; readonly hasOnUpdateNow: boolean = this.config.hasOnUpdateNow; } diff --git a/drizzle-orm/src/mysql-core/columns/date.ts b/drizzle-orm/src/mysql-core/columns/date.ts index 17ad8d8be..318fac65d 100644 --- a/drizzle-orm/src/mysql-core/columns/date.ts +++ b/drizzle-orm/src/mysql-core/columns/date.ts @@ -16,7 +16,7 @@ export type MySqlDateBuilderInitial = MySqlDateBuilder<{ }>; export class MySqlDateBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlDateBuilder'; + static override readonly [entityKind]: string = 'MySqlDateBuilder'; constructor(name: T['name']) { super(name, 'date', 'MySqlDate'); @@ -31,7 +31,7 @@ export class MySqlDateBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlDate'; + static override readonly [entityKind]: string = 'MySqlDate'; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, @@ -62,7 +62,7 @@ export type MySqlDateStringBuilderInitial = MySqlDateStrin export class MySqlDateStringBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlDateStringBuilder'; + static override readonly [entityKind]: string = 'MySqlDateStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'MySqlDateString'); @@ -80,7 +80,7 @@ export class MySqlDateStringBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlDateString'; + static override readonly [entityKind]: string = 'MySqlDateString'; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, diff --git a/drizzle-orm/src/mysql-core/columns/datetime.ts b/drizzle-orm/src/mysql-core/columns/datetime.ts index 39b0bae32..61b062e8f 100644 --- a/drizzle-orm/src/mysql-core/columns/datetime.ts +++ b/drizzle-orm/src/mysql-core/columns/datetime.ts @@ -18,7 +18,7 @@ export type MySqlDateTimeBuilderInitial = MySqlDateTimeBui export class MySqlDateTimeBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlDateTimeBuilder'; + static override readonly [entityKind]: string = 'MySqlDateTimeBuilder'; constructor(name: T['name'], config: MySqlDatetimeConfig | undefined) { super(name, 'date', 'MySqlDateTime'); @@ -37,7 +37,7 @@ export class MySqlDateTimeBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlDateTime'; + static override readonly [entityKind]: string = 'MySqlDateTime'; readonly fsp: number | undefined; @@ -76,7 +76,7 @@ export type MySqlDateTimeStringBuilderInitial = MySqlDateT export class MySqlDateTimeStringBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlDateTimeStringBuilder'; + static override readonly [entityKind]: string = 'MySqlDateTimeStringBuilder'; constructor(name: T['name'], config: MySqlDatetimeConfig | undefined) { super(name, 'string', 'MySqlDateTimeString'); @@ -95,7 +95,7 @@ export class MySqlDateTimeStringBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlDateTimeString'; + static override readonly [entityKind]: string = 'MySqlDateTimeString'; readonly fsp: number | undefined; diff --git a/drizzle-orm/src/mysql-core/columns/decimal.ts b/drizzle-orm/src/mysql-core/columns/decimal.ts index 3b01923e4..1e5f78679 100644 --- a/drizzle-orm/src/mysql-core/columns/decimal.ts +++ b/drizzle-orm/src/mysql-core/columns/decimal.ts @@ -18,7 +18,7 @@ export type MySqlDecimalBuilderInitial = MySqlDecimalBuild export class MySqlDecimalBuilder< T extends ColumnBuilderBaseConfig<'string', 'MySqlDecimal'>, > extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlDecimalBuilder'; + static override readonly [entityKind]: string = 'MySqlDecimalBuilder'; constructor(name: T['name'], precision?: number, scale?: number) { super(name, 'string', 'MySqlDecimal'); @@ -40,7 +40,7 @@ export class MySqlDecimalBuilder< export class MySqlDecimal> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlDecimal'; + static override readonly [entityKind]: string = 'MySqlDecimal'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; diff --git a/drizzle-orm/src/mysql-core/columns/double.ts b/drizzle-orm/src/mysql-core/columns/double.ts index 0324025b8..c9f95fd04 100644 --- a/drizzle-orm/src/mysql-core/columns/double.ts +++ b/drizzle-orm/src/mysql-core/columns/double.ts @@ -18,7 +18,7 @@ export type MySqlDoubleBuilderInitial = MySqlDoubleBuilder export class MySqlDoubleBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlDoubleBuilder'; + static override readonly [entityKind]: string = 'MySqlDoubleBuilder'; constructor(name: T['name'], config: MySqlDoubleConfig | undefined) { super(name, 'number', 'MySqlDouble'); @@ -37,7 +37,7 @@ export class MySqlDoubleBuilder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlDouble'; + static override readonly [entityKind]: string = 'MySqlDouble'; precision: number | undefined = this.config.precision; scale: number | undefined = this.config.scale; diff --git a/drizzle-orm/src/mysql-core/columns/enum.ts b/drizzle-orm/src/mysql-core/columns/enum.ts index de1d41cdd..6a586ca7c 100644 --- a/drizzle-orm/src/mysql-core/columns/enum.ts +++ b/drizzle-orm/src/mysql-core/columns/enum.ts @@ -19,7 +19,7 @@ export type MySqlEnumColumnBuilderInitial> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlEnumColumnBuilder'; + static override readonly [entityKind]: string = 'MySqlEnumColumnBuilder'; constructor(name: T['name'], values: T['enumValues']) { super(name, 'string', 'MySqlEnumColumn'); @@ -40,7 +40,7 @@ export class MySqlEnumColumnBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlEnumColumn'; + static override readonly [entityKind]: string = 'MySqlEnumColumn'; override readonly enumValues = this.config.enumValues; diff --git a/drizzle-orm/src/mysql-core/columns/float.ts b/drizzle-orm/src/mysql-core/columns/float.ts index 88b989077..d7c3e586b 100644 --- a/drizzle-orm/src/mysql-core/columns/float.ts +++ b/drizzle-orm/src/mysql-core/columns/float.ts @@ -17,7 +17,7 @@ export type MySqlFloatBuilderInitial = MySqlFloatBuilder<{ export class MySqlFloatBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlFloatBuilder'; + static override readonly [entityKind]: string = 'MySqlFloatBuilder'; constructor(name: T['name']) { super(name, 'number', 'MySqlFloat'); @@ -32,7 +32,7 @@ export class MySqlFloatBuilder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlFloat'; + static override readonly [entityKind]: string = 'MySqlFloat'; getSQLType(): string { return 'float'; diff --git a/drizzle-orm/src/mysql-core/columns/int.ts b/drizzle-orm/src/mysql-core/columns/int.ts index 4902bc593..aca0ea61e 100644 --- a/drizzle-orm/src/mysql-core/columns/int.ts +++ b/drizzle-orm/src/mysql-core/columns/int.ts @@ -18,7 +18,7 @@ export type MySqlIntBuilderInitial = MySqlIntBuilder<{ export class MySqlIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlIntBuilder'; + static override readonly [entityKind]: string = 'MySqlIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlInt'); @@ -36,7 +36,7 @@ export class MySqlIntBuilder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlInt'; + static override readonly [entityKind]: string = 'MySqlInt'; getSQLType(): string { return `int${this.config.unsigned ? ' unsigned' : ''}`; diff --git a/drizzle-orm/src/mysql-core/columns/json.ts b/drizzle-orm/src/mysql-core/columns/json.ts index d57cf963c..ecb73ed82 100644 --- a/drizzle-orm/src/mysql-core/columns/json.ts +++ b/drizzle-orm/src/mysql-core/columns/json.ts @@ -15,7 +15,7 @@ export type MySqlJsonBuilderInitial = MySqlJsonBuilder<{ }>; export class MySqlJsonBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlJsonBuilder'; + static override readonly [entityKind]: string = 'MySqlJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'MySqlJson'); @@ -30,7 +30,7 @@ export class MySqlJsonBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlJson'; + static override readonly [entityKind]: string = 'MySqlJson'; getSQLType(): string { return 'json'; diff --git a/drizzle-orm/src/mysql-core/columns/mediumint.ts b/drizzle-orm/src/mysql-core/columns/mediumint.ts index 237090179..764969d31 100644 --- a/drizzle-orm/src/mysql-core/columns/mediumint.ts +++ b/drizzle-orm/src/mysql-core/columns/mediumint.ts @@ -19,7 +19,7 @@ export type MySqlMediumIntBuilderInitial = MySqlMediumIntB export class MySqlMediumIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlMediumIntBuilder'; + static override readonly [entityKind]: string = 'MySqlMediumIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlMediumInt'); @@ -40,7 +40,7 @@ export class MySqlMediumIntBuilder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlMediumInt'; + static override readonly [entityKind]: string = 'MySqlMediumInt'; getSQLType(): string { return `mediumint${this.config.unsigned ? ' unsigned' : ''}`; diff --git a/drizzle-orm/src/mysql-core/columns/real.ts b/drizzle-orm/src/mysql-core/columns/real.ts index 2a921f1aa..8b9eca794 100644 --- a/drizzle-orm/src/mysql-core/columns/real.ts +++ b/drizzle-orm/src/mysql-core/columns/real.ts @@ -21,7 +21,7 @@ export class MySqlRealBuilder { - static readonly [entityKind]: string = 'MySqlRealBuilder'; + static override readonly [entityKind]: string = 'MySqlRealBuilder'; constructor(name: T['name'], config: MySqlRealConfig | undefined) { super(name, 'number', 'MySqlReal'); @@ -41,7 +41,7 @@ export class MySqlReal> extend T, MySqlRealConfig > { - static readonly [entityKind]: string = 'MySqlReal'; + static override readonly [entityKind]: string = 'MySqlReal'; precision: number | undefined = this.config.precision; scale: number | undefined = this.config.scale; diff --git a/drizzle-orm/src/mysql-core/columns/serial.ts b/drizzle-orm/src/mysql-core/columns/serial.ts index 43af900a1..88485d6b2 100644 --- a/drizzle-orm/src/mysql-core/columns/serial.ts +++ b/drizzle-orm/src/mysql-core/columns/serial.ts @@ -33,7 +33,7 @@ export type MySqlSerialBuilderInitial = IsAutoincrement< export class MySqlSerialBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlSerialBuilder'; + static override readonly [entityKind]: string = 'MySqlSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'MySqlSerial'); @@ -52,7 +52,7 @@ export class MySqlSerialBuilder, > extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlSerial'; + static override readonly [entityKind]: string = 'MySqlSerial'; getSQLType(): string { return 'serial'; diff --git a/drizzle-orm/src/mysql-core/columns/smallint.ts b/drizzle-orm/src/mysql-core/columns/smallint.ts index e6801e214..482ff89ea 100644 --- a/drizzle-orm/src/mysql-core/columns/smallint.ts +++ b/drizzle-orm/src/mysql-core/columns/smallint.ts @@ -19,7 +19,7 @@ export type MySqlSmallIntBuilderInitial = MySqlSmallIntBui export class MySqlSmallIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlSmallIntBuilder'; + static override readonly [entityKind]: string = 'MySqlSmallIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlSmallInt'); @@ -40,7 +40,7 @@ export class MySqlSmallIntBuilder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlSmallInt'; + static override readonly [entityKind]: string = 'MySqlSmallInt'; getSQLType(): string { return `smallint${this.config.unsigned ? ' unsigned' : ''}`; diff --git a/drizzle-orm/src/mysql-core/columns/text.ts b/drizzle-orm/src/mysql-core/columns/text.ts index c90362dc5..18434a532 100644 --- a/drizzle-orm/src/mysql-core/columns/text.ts +++ b/drizzle-orm/src/mysql-core/columns/text.ts @@ -21,7 +21,7 @@ export class MySqlTextBuilder { - static readonly [entityKind]: string = 'MySqlTextBuilder'; + static override readonly [entityKind]: string = 'MySqlTextBuilder'; constructor(name: T['name'], textType: MySqlTextColumnType, config: MySqlTextConfig) { super(name, 'string', 'MySqlText'); @@ -40,7 +40,7 @@ export class MySqlTextBuilder> extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlText'; + static override readonly [entityKind]: string = 'MySqlText'; private textType: MySqlTextColumnType = this.config.textType; diff --git a/drizzle-orm/src/mysql-core/columns/time.ts b/drizzle-orm/src/mysql-core/columns/time.ts index e862d9fa3..408453947 100644 --- a/drizzle-orm/src/mysql-core/columns/time.ts +++ b/drizzle-orm/src/mysql-core/columns/time.ts @@ -19,7 +19,7 @@ export class MySqlTimeBuilder { - static readonly [entityKind]: string = 'MySqlTimeBuilder'; + static override readonly [entityKind]: string = 'MySqlTimeBuilder'; constructor( name: T['name'], @@ -40,7 +40,7 @@ export class MySqlTimeBuilder, > extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlTime'; + static override readonly [entityKind]: string = 'MySqlTime'; readonly fsp: number | undefined = this.config.fsp; diff --git a/drizzle-orm/src/mysql-core/columns/timestamp.ts b/drizzle-orm/src/mysql-core/columns/timestamp.ts index 07649ba94..892f8e603 100644 --- a/drizzle-orm/src/mysql-core/columns/timestamp.ts +++ b/drizzle-orm/src/mysql-core/columns/timestamp.ts @@ -18,7 +18,7 @@ export type MySqlTimestampBuilderInitial = MySqlTimestampB export class MySqlTimestampBuilder> extends MySqlDateColumnBaseBuilder { - static readonly [entityKind]: string = 'MySqlTimestampBuilder'; + static override readonly [entityKind]: string = 'MySqlTimestampBuilder'; constructor(name: T['name'], config: MySqlTimestampConfig | undefined) { super(name, 'date', 'MySqlTimestamp'); @@ -39,7 +39,7 @@ export class MySqlTimestampBuilder> extends MySqlDateBaseColumn { - static readonly [entityKind]: string = 'MySqlTimestamp'; + static override readonly [entityKind]: string = 'MySqlTimestamp'; readonly fsp: number | undefined = this.config.fsp; @@ -70,7 +70,7 @@ export type MySqlTimestampStringBuilderInitial = MySqlTime export class MySqlTimestampStringBuilder> extends MySqlDateColumnBaseBuilder { - static readonly [entityKind]: string = 'MySqlTimestampStringBuilder'; + static override readonly [entityKind]: string = 'MySqlTimestampStringBuilder'; constructor(name: T['name'], config: MySqlTimestampConfig | undefined) { super(name, 'string', 'MySqlTimestampString'); @@ -91,7 +91,7 @@ export class MySqlTimestampStringBuilder> extends MySqlDateBaseColumn { - static readonly [entityKind]: string = 'MySqlTimestampString'; + static override readonly [entityKind]: string = 'MySqlTimestampString'; readonly fsp: number | undefined = this.config.fsp; diff --git a/drizzle-orm/src/mysql-core/columns/tinyint.ts b/drizzle-orm/src/mysql-core/columns/tinyint.ts index a9d7e967b..ee4ccdaa7 100644 --- a/drizzle-orm/src/mysql-core/columns/tinyint.ts +++ b/drizzle-orm/src/mysql-core/columns/tinyint.ts @@ -19,7 +19,7 @@ export type MySqlTinyIntBuilderInitial = MySqlTinyIntBuild export class MySqlTinyIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlTinyIntBuilder'; + static override readonly [entityKind]: string = 'MySqlTinyIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlTinyInt'); @@ -40,7 +40,7 @@ export class MySqlTinyIntBuilder> extends MySqlColumnWithAutoIncrement { - static readonly [entityKind]: string = 'MySqlTinyInt'; + static override readonly [entityKind]: string = 'MySqlTinyInt'; getSQLType(): string { return `tinyint${this.config.unsigned ? ' unsigned' : ''}`; diff --git a/drizzle-orm/src/mysql-core/columns/varbinary.ts b/drizzle-orm/src/mysql-core/columns/varbinary.ts index ed6b90b7a..bc0dde635 100644 --- a/drizzle-orm/src/mysql-core/columns/varbinary.ts +++ b/drizzle-orm/src/mysql-core/columns/varbinary.ts @@ -18,7 +18,7 @@ export type MySqlVarBinaryBuilderInitial = MySqlVarBinaryB export class MySqlVarBinaryBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlVarBinaryBuilder'; + static override readonly [entityKind]: string = 'MySqlVarBinaryBuilder'; /** @internal */ constructor(name: T['name'], config: MySqlVarbinaryOptions) { @@ -40,7 +40,7 @@ export class MySqlVarBinaryBuilder, > extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlVarBinary'; + static override readonly [entityKind]: string = 'MySqlVarBinary'; length: number | undefined = this.config.length; diff --git a/drizzle-orm/src/mysql-core/columns/varchar.ts b/drizzle-orm/src/mysql-core/columns/varchar.ts index b05cf7523..32cfda7e9 100644 --- a/drizzle-orm/src/mysql-core/columns/varchar.ts +++ b/drizzle-orm/src/mysql-core/columns/varchar.ts @@ -20,7 +20,7 @@ export type MySqlVarCharBuilderInitial> extends MySqlColumnBuilder> { - static readonly [entityKind]: string = 'MySqlVarCharBuilder'; + static override readonly [entityKind]: string = 'MySqlVarCharBuilder'; /** @internal */ constructor(name: T['name'], config: MySqlVarCharConfig) { @@ -43,7 +43,7 @@ export class MySqlVarCharBuilder> extends MySqlColumn> { - static readonly [entityKind]: string = 'MySqlVarChar'; + static override readonly [entityKind]: string = 'MySqlVarChar'; readonly length: number | undefined = this.config.length; diff --git a/drizzle-orm/src/mysql-core/columns/year.ts b/drizzle-orm/src/mysql-core/columns/year.ts index 27a81f887..8a7a44410 100644 --- a/drizzle-orm/src/mysql-core/columns/year.ts +++ b/drizzle-orm/src/mysql-core/columns/year.ts @@ -15,7 +15,7 @@ export type MySqlYearBuilderInitial = MySqlYearBuilder<{ }>; export class MySqlYearBuilder> extends MySqlColumnBuilder { - static readonly [entityKind]: string = 'MySqlYearBuilder'; + static override readonly [entityKind]: string = 'MySqlYearBuilder'; constructor(name: T['name']) { super(name, 'number', 'MySqlYear'); @@ -32,7 +32,7 @@ export class MySqlYearBuilder, > extends MySqlColumn { - static readonly [entityKind]: string = 'MySqlYear'; + static override readonly [entityKind]: string = 'MySqlYear'; getSQLType(): string { return `year`; diff --git a/drizzle-orm/src/mysql-core/dialect.ts b/drizzle-orm/src/mysql-core/dialect.ts index ee28af04d..af4f11905 100644 --- a/drizzle-orm/src/mysql-core/dialect.ts +++ b/drizzle-orm/src/mysql-core/dialect.ts @@ -18,7 +18,7 @@ import { type TablesRelationalConfig, } from '~/relations.ts'; import { Param, SQL, sql, View } from '~/sql/sql.ts'; -import type { Name, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; +import type { Name, Placeholder, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; @@ -112,7 +112,7 @@ export class MySqlDialect { return sql.join(withSqlChunks); } - buildDeleteQuery({ table, where, returning, withList }: MySqlDeleteConfig): SQL { + buildDeleteQuery({ table, where, returning, withList, limit, orderBy }: MySqlDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning @@ -121,7 +121,11 @@ export class MySqlDialect { const whereSql = where ? sql` where ${where}` : undefined; - return sql`${withSql}delete from ${table}${whereSql}${returningSql}`; + const orderBySql = this.buildOrderBy(orderBy); + + const limitSql = this.buildLimit(limit); + + return sql`${withSql}delete from ${table}${whereSql}${orderBySql}${limitSql}${returningSql}`; } buildUpdateSet(table: MySqlTable, set: UpdateSet): SQL { @@ -145,7 +149,7 @@ export class MySqlDialect { })); } - buildUpdateQuery({ table, set, where, returning, withList }: MySqlUpdateConfig): SQL { + buildUpdateQuery({ table, set, where, returning, withList, limit, orderBy }: MySqlUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const setSql = this.buildUpdateSet(table, set); @@ -156,7 +160,11 @@ export class MySqlDialect { const whereSql = where ? sql` where ${where}` : undefined; - return sql`${withSql}update ${table} set ${setSql}${whereSql}${returningSql}`; + const orderBySql = this.buildOrderBy(orderBy); + + const limitSql = this.buildLimit(limit); + + return sql`${withSql}update ${table} set ${setSql}${whereSql}${orderBySql}${limitSql}${returningSql}`; } /** @@ -221,6 +229,16 @@ export class MySqlDialect { return sql.join(chunks); } + private buildLimit(limit: number | Placeholder | undefined): SQL | undefined { + return typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + } + + private buildOrderBy(orderBy: (MySqlColumn | SQL | SQL.Aliased)[] | undefined): SQL | undefined { + return orderBy && orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined; + } + buildSelectQuery( { withList, @@ -328,19 +346,11 @@ export class MySqlDialect { const havingSql = having ? sql` having ${having}` : undefined; - let orderBySql; - if (orderBy && orderBy.length > 0) { - orderBySql = sql` order by ${sql.join(orderBy, sql`, `)}`; - } + const orderBySql = this.buildOrderBy(orderBy); - let groupBySql; - if (groupBy && groupBy.length > 0) { - groupBySql = sql` group by ${sql.join(groupBy, sql`, `)}`; - } + const groupBySql = groupBy && groupBy.length > 0 ? sql` group by ${sql.join(groupBy, sql`, `)}` : undefined; - const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) - ? sql` limit ${limit}` - : undefined; + const limitSql = this.buildLimit(limit); const offsetSql = offset ? sql` offset ${offset}` : undefined; diff --git a/drizzle-orm/src/mysql-core/query-builders/count.ts b/drizzle-orm/src/mysql-core/query-builders/count.ts index e61b27011..9a0241c70 100644 --- a/drizzle-orm/src/mysql-core/query-builders/count.ts +++ b/drizzle-orm/src/mysql-core/query-builders/count.ts @@ -9,7 +9,7 @@ export class MySqlCountBuilder< > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static readonly [entityKind] = 'MySqlCountBuilder'; + static override readonly [entityKind] = 'MySqlCountBuilder'; [Symbol.toStringTag] = 'MySqlCountBuilder'; private session: TSession; diff --git a/drizzle-orm/src/mysql-core/query-builders/delete.ts b/drizzle-orm/src/mysql-core/query-builders/delete.ts index e9a48da8e..22a3e1be3 100644 --- a/drizzle-orm/src/mysql-core/query-builders/delete.ts +++ b/drizzle-orm/src/mysql-core/query-builders/delete.ts @@ -11,8 +11,12 @@ import type { } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; -import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import type { ValueOrArray } from '~/utils.ts'; +import type { MySqlColumn } from '../columns/common.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; export type MySqlDeleteWithout< @@ -39,6 +43,8 @@ export type MySqlDelete< export interface MySqlDeleteConfig { where?: SQL | undefined; + limit?: number | Placeholder; + orderBy?: (MySqlColumn | SQL | SQL.Aliased)[]; table: MySqlTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; @@ -86,7 +92,7 @@ export class MySqlDeleteBase< // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise> implements SQLWrapper { - static readonly [entityKind]: string = 'MySqlDelete'; + static override readonly [entityKind]: string = 'MySqlDelete'; private config: MySqlDeleteConfig; @@ -134,6 +140,37 @@ export class MySqlDeleteBase< return this as any; } + orderBy( + builder: (deleteTable: TTable) => ValueOrArray, + ): MySqlDeleteWithout; + orderBy(...columns: (MySqlColumn | SQL | SQL.Aliased)[]): MySqlDeleteWithout; + orderBy( + ...columns: + | [(deleteTable: TTable) => ValueOrArray] + | (MySqlColumn | SQL | SQL.Aliased)[] + ): MySqlDeleteWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as any, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + this.config.orderBy = orderByArray; + } else { + const orderByArray = columns as (MySqlColumn | SQL | SQL.Aliased)[]; + this.config.orderBy = orderByArray; + } + return this as any; + } + + limit(limit: number | Placeholder): MySqlDeleteWithout { + this.config.limit = limit; + return this as any; + } + /** @internal */ getSQL(): SQL { return this.dialect.buildDeleteQuery(this.config); diff --git a/drizzle-orm/src/mysql-core/query-builders/insert.ts b/drizzle-orm/src/mysql-core/query-builders/insert.ts index 97e61de74..fe9f7d7ba 100644 --- a/drizzle-orm/src/mysql-core/query-builders/insert.ts +++ b/drizzle-orm/src/mysql-core/query-builders/insert.ts @@ -190,7 +190,7 @@ export class MySqlInsertBase< RunnableQuery : TReturning[], 'mysql'>, SQLWrapper { - static readonly [entityKind]: string = 'MySqlInsert'; + static override readonly [entityKind]: string = 'MySqlInsert'; declare protected $table: TTable; diff --git a/drizzle-orm/src/mysql-core/query-builders/query.ts b/drizzle-orm/src/mysql-core/query-builders/query.ts index 955f73428..16d294598 100644 --- a/drizzle-orm/src/mysql-core/query-builders/query.ts +++ b/drizzle-orm/src/mysql-core/query-builders/query.ts @@ -77,7 +77,7 @@ export class MySqlRelationalQuery< TPreparedQueryHKT extends PreparedQueryHKTBase, TResult, > extends QueryPromise { - static readonly [entityKind]: string = 'MySqlRelationalQuery'; + static override readonly [entityKind]: string = 'MySqlRelationalQuery'; declare protected $brand: 'MySqlRelationalQuery'; diff --git a/drizzle-orm/src/mysql-core/query-builders/select.ts b/drizzle-orm/src/mysql-core/query-builders/select.ts index a5a0ca69a..95f67827b 100644 --- a/drizzle-orm/src/mysql-core/query-builders/select.ts +++ b/drizzle-orm/src/mysql-core/query-builders/select.ts @@ -132,7 +132,7 @@ export abstract class MySqlSelectQueryBuilderBase< TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { - static readonly [entityKind]: string = 'MySqlSelectQueryBuilder'; + static override readonly [entityKind]: string = 'MySqlSelectQueryBuilder'; override readonly _: { readonly hkt: THKT; @@ -942,7 +942,7 @@ export class MySqlSelectBase< TResult, TSelectedFields > { - static readonly [entityKind]: string = 'MySqlSelect'; + static override readonly [entityKind]: string = 'MySqlSelect'; prepare(): MySqlSelectPrepare { if (!this.session) { diff --git a/drizzle-orm/src/mysql-core/query-builders/update.ts b/drizzle-orm/src/mysql-core/query-builders/update.ts index 7884599cf..9efc4e325 100644 --- a/drizzle-orm/src/mysql-core/query-builders/update.ts +++ b/drizzle-orm/src/mysql-core/query-builders/update.ts @@ -12,13 +12,18 @@ import type { } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; -import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; -import { mapUpdateSet, type UpdateSet } from '~/utils.ts'; +import { Table } from '~/table.ts'; +import { mapUpdateSet, type UpdateSet, type ValueOrArray } from '~/utils.ts'; +import type { MySqlColumn } from '../columns/common.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; export interface MySqlUpdateConfig { where?: SQL | undefined; + limit?: number | Placeholder; + orderBy?: (MySqlColumn | SQL | SQL.Aliased)[]; set: UpdateSet; table: MySqlTable; returning?: SelectedFieldsOrdered; @@ -120,7 +125,7 @@ export class MySqlUpdateBase< // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise> implements SQLWrapper { - static readonly [entityKind]: string = 'MySqlUpdate'; + static override readonly [entityKind]: string = 'MySqlUpdate'; private config: MySqlUpdateConfig; @@ -173,6 +178,37 @@ export class MySqlUpdateBase< return this as any; } + orderBy( + builder: (updateTable: TTable) => ValueOrArray, + ): MySqlUpdateWithout; + orderBy(...columns: (MySqlColumn | SQL | SQL.Aliased)[]): MySqlUpdateWithout; + orderBy( + ...columns: + | [(updateTable: TTable) => ValueOrArray] + | (MySqlColumn | SQL | SQL.Aliased)[] + ): MySqlUpdateWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as any, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + this.config.orderBy = orderByArray; + } else { + const orderByArray = columns as (MySqlColumn | SQL | SQL.Aliased)[]; + this.config.orderBy = orderByArray; + } + return this as any; + } + + limit(limit: number | Placeholder): MySqlUpdateWithout { + this.config.limit = limit; + return this as any; + } + /** @internal */ getSQL(): SQL { return this.dialect.buildUpdateQuery(this.config); diff --git a/drizzle-orm/src/mysql-core/session.ts b/drizzle-orm/src/mysql-core/session.ts index 021d4276d..326b0ad61 100644 --- a/drizzle-orm/src/mysql-core/session.ts +++ b/drizzle-orm/src/mysql-core/session.ts @@ -130,7 +130,7 @@ export abstract class MySqlTransaction< TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends MySqlDatabase { - static readonly [entityKind]: string = 'MySqlTransaction'; + static override readonly [entityKind]: string = 'MySqlTransaction'; constructor( dialect: MySqlDialect, diff --git a/drizzle-orm/src/mysql-core/table.ts b/drizzle-orm/src/mysql-core/table.ts index 36c366d61..e09278dc5 100644 --- a/drizzle-orm/src/mysql-core/table.ts +++ b/drizzle-orm/src/mysql-core/table.ts @@ -24,7 +24,7 @@ export type TableConfig = TableConfigBase; export const InlineForeignKeys = Symbol.for('drizzle:MySqlInlineForeignKeys'); export class MySqlTable extends Table { - static readonly [entityKind]: string = 'MySqlTable'; + static override readonly [entityKind]: string = 'MySqlTable'; declare protected $columns: T['columns']; diff --git a/drizzle-orm/src/mysql-core/view-base.ts b/drizzle-orm/src/mysql-core/view-base.ts index 46b1527d9..fa8a25cfa 100644 --- a/drizzle-orm/src/mysql-core/view-base.ts +++ b/drizzle-orm/src/mysql-core/view-base.ts @@ -7,7 +7,7 @@ export abstract class MySqlViewBase< TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends View { - static readonly [entityKind]: string = 'MySqlViewBase'; + static override readonly [entityKind]: string = 'MySqlViewBase'; declare readonly _: View['_'] & { readonly viewBrand: 'MySqlViewBase'; diff --git a/drizzle-orm/src/mysql-core/view.ts b/drizzle-orm/src/mysql-core/view.ts index 4cc7d416c..6054e022c 100644 --- a/drizzle-orm/src/mysql-core/view.ts +++ b/drizzle-orm/src/mysql-core/view.ts @@ -14,7 +14,6 @@ import { MySqlViewConfig } from './view-common.ts'; export interface ViewBuilderConfig { algorithm?: 'undefined' | 'merge' | 'temptable'; - definer?: string; sqlSecurity?: 'definer' | 'invoker'; withCheckOption?: 'cascaded' | 'local'; } @@ -41,13 +40,6 @@ export class ViewBuilderCore, - ): this { - this.config.definer = definer; - return this; - } - sqlSecurity( sqlSecurity: Exclude, ): this { @@ -64,7 +56,7 @@ export class ViewBuilderCore extends ViewBuilderCore<{ name: TName }> { - static readonly [entityKind]: string = 'MySqlViewBuilder'; + static override readonly [entityKind]: string = 'MySqlViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), @@ -98,7 +90,7 @@ export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends ViewBuilderCore<{ name: TName; columns: TColumns }> { - static readonly [entityKind]: string = 'MySqlManualViewBuilder'; + static override readonly [entityKind]: string = 'MySqlManualViewBuilder'; private columns: Record; @@ -157,7 +149,7 @@ export class MySqlView< TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends MySqlViewBase { - static readonly [entityKind]: string = 'MySqlView'; + static override readonly [entityKind]: string = 'MySqlView'; declare protected $MySqlViewBrand: 'MySqlView'; diff --git a/drizzle-orm/src/mysql-proxy/driver.ts b/drizzle-orm/src/mysql-proxy/driver.ts index badefb02e..bb0c21134 100644 --- a/drizzle-orm/src/mysql-proxy/driver.ts +++ b/drizzle-orm/src/mysql-proxy/driver.ts @@ -14,7 +14,7 @@ import { type MySqlRemotePreparedQueryHKT, type MySqlRemoteQueryResultHKT, MySql export class MySqlRemoteDatabase< TSchema extends Record = Record, > extends MySqlDatabase { - static readonly [entityKind]: string = 'MySqlRemoteDatabase'; + static override readonly [entityKind]: string = 'MySqlRemoteDatabase'; } export type RemoteCallback = ( diff --git a/drizzle-orm/src/mysql-proxy/session.ts b/drizzle-orm/src/mysql-proxy/session.ts index 03039cfb2..e72875e79 100644 --- a/drizzle-orm/src/mysql-proxy/session.ts +++ b/drizzle-orm/src/mysql-proxy/session.ts @@ -30,7 +30,7 @@ export class MySqlRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { - static readonly [entityKind]: string = 'MySqlRemoteSession'; + static override readonly [entityKind]: string = 'MySqlRemoteSession'; private logger: Logger; @@ -81,7 +81,7 @@ export class MySqlProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { - static readonly [entityKind]: string = 'MySqlProxyTransaction'; + static override readonly [entityKind]: string = 'MySqlProxyTransaction'; override async transaction( _transaction: (tx: MySqlProxyTransaction) => Promise, @@ -91,7 +91,7 @@ export class MySqlProxyTransaction< } export class PreparedQuery extends PreparedQueryBase { - static readonly [entityKind]: string = 'MySqlProxyPreparedQuery'; + static override readonly [entityKind]: string = 'MySqlProxyPreparedQuery'; constructor( private client: RemoteCallback, diff --git a/drizzle-orm/src/mysql2/driver.ts b/drizzle-orm/src/mysql2/driver.ts index 13e296ab9..ef34604e3 100644 --- a/drizzle-orm/src/mysql2/driver.ts +++ b/drizzle-orm/src/mysql2/driver.ts @@ -1,4 +1,6 @@ -import type { Connection as CallbackConnection, Pool as CallbackPool } from 'mysql2'; +import { EventEmitter } from 'events'; +import { type Connection as CallbackConnection, createPool, type Pool as CallbackPool, type PoolOptions } from 'mysql2'; +import type { Connection, Pool } from 'mysql2/promise'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -11,7 +13,7 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import { DrizzleError } from '../errors.ts'; import type { MySql2Client, MySql2PreparedQueryHKT, MySql2QueryResultHKT } from './session.ts'; import { MySql2Session } from './session.ts'; @@ -43,16 +45,16 @@ export { MySqlDatabase } from '~/mysql-core/db.ts'; export class MySql2Database< TSchema extends Record = Record, > extends MySqlDatabase { - static readonly [entityKind]: string = 'MySql2Database'; + static override readonly [entityKind]: string = 'MySql2Database'; } export type MySql2DrizzleConfig = Record> = & Omit, 'schema'> & ({ schema: TSchema; mode: Mode } | { schema?: undefined; mode?: Mode }); -export function drizzle< +function construct< TSchema extends Record = Record, - TClient extends MySql2Client | CallbackConnection | CallbackPool = MySql2Client | CallbackConnection | CallbackPool, + TClient extends Pool | Connection | CallbackPool | CallbackConnection = CallbackPool, >( client: TClient, config: MySql2DrizzleConfig = {}, @@ -106,3 +108,71 @@ interface CallbackClient { function isCallbackClient(client: any): client is CallbackClient { return typeof client.promise === 'function'; } + +export type AnyMySql2Connection = Pool | Connection | CallbackPool | CallbackConnection; + +export function drizzle< + TSchema extends Record = Record, + TClient extends AnyMySql2Connection = CallbackPool, +>( + ...params: IfNotImported< + CallbackPool, + [ImportTypeError<'mysql2'>], + [ + TClient | string, + ] | [ + TClient | string, + MySql2DrizzleConfig, + ] | [ + ( + & MySql2DrizzleConfig + & ({ + connection: string | PoolOptions; + } | { + client: TClient; + }) + ), + ] + > +): MySql2Database & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof EventEmitter) { + return construct(params[0] as TClient, params[1] as MySql2DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: PoolOptions | string; client?: TClient } + & MySql2DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + const instance = typeof connection === 'string' + ? createPool({ + uri: connection, + }) + : createPool(connection!); + const db = construct(instance, drizzleConfig); + + return db as any; + } + + const connectionString = params[0]!; + const instance = createPool({ + uri: connectionString, + }); + + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: MySql2DrizzleConfig, + ): MySql2Database & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/mysql2/session.ts b/drizzle-orm/src/mysql2/session.ts index ab11d1f17..7ca21c4a6 100644 --- a/drizzle-orm/src/mysql2/session.ts +++ b/drizzle-orm/src/mysql2/session.ts @@ -41,7 +41,7 @@ export type MySqlQueryResult< > = [T extends ResultSetHeader ? T : T[], FieldPacket[]]; export class MySql2PreparedQuery extends MySqlPreparedQuery { - static readonly [entityKind]: string = 'MySql2PreparedQuery'; + static override readonly [entityKind]: string = 'MySql2PreparedQuery'; private rawQuery: QueryOptions; private query: QueryOptions; @@ -190,7 +190,7 @@ export class MySql2Session< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { - static readonly [entityKind]: string = 'MySql2Session'; + static override readonly [entityKind]: string = 'MySql2Session'; private logger: Logger; private mode: Mode; @@ -301,7 +301,7 @@ export class MySql2Transaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { - static readonly [entityKind]: string = 'MySql2Transaction'; + static override readonly [entityKind]: string = 'MySql2Transaction'; override async transaction(transaction: (tx: MySql2Transaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; diff --git a/drizzle-orm/src/neon-http/driver.ts b/drizzle-orm/src/neon-http/driver.ts index e85204a62..f79fd9de3 100644 --- a/drizzle-orm/src/neon-http/driver.ts +++ b/drizzle-orm/src/neon-http/driver.ts @@ -1,5 +1,5 @@ -import type { NeonQueryFunction } from '@neondatabase/serverless'; -import { types } from '@neondatabase/serverless'; +import type { HTTPTransactionOptions, NeonQueryFunction } from '@neondatabase/serverless'; +import { neon, types } from '@neondatabase/serverless'; import type { BatchItem, BatchResponse } from '~/batch.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; @@ -8,7 +8,7 @@ import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig } from '~/relations.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import { type NeonHttpClient, type NeonHttpQueryResultHKT, NeonHttpSession } from './session.ts'; export interface NeonDriverOptions { @@ -43,7 +43,7 @@ export class NeonHttpDriver { export class NeonHttpDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'NeonHttpDatabase'; + static override readonly [entityKind]: string = 'NeonHttpDatabase'; /** @internal */ declare readonly session: NeonHttpSession>; @@ -55,7 +55,7 @@ export class NeonHttpDatabase< } } -export function drizzle< +function construct< TSchema extends Record = Record, TClient extends NeonQueryFunction = NeonQueryFunction, >( @@ -97,3 +97,75 @@ export function drizzle< return db as any; } + +export function drizzle< + TSchema extends Record = Record, + TClient extends NeonQueryFunction = NeonQueryFunction, +>( + ...params: IfNotImported< + HTTPTransactionOptions, + [ImportTypeError<'@neondatabase/serverless'>], + [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + connection: string | ({ connectionString: string } & HTTPTransactionOptions); + } | { + client: TClient; + }) + ), + ] + > +): NeonHttpDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (typeof params[0] === 'function') { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as + & { + connection?: + | ({ + connectionString: string; + } & HTTPTransactionOptions) + | string; + client?: TClient; + } + & DrizzleConfig; + + if (client) return construct(client, drizzleConfig); + + if (typeof connection === 'object') { + const { connectionString, ...options } = connection; + + const instance = neon(connectionString, options); + + return construct(instance, drizzleConfig) as any; + } + + const instance = neon(connection!); + + return construct(instance, drizzleConfig) as any; + } + + const instance = neon(params[0] as string); + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): NeonHttpDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/neon-http/session.ts b/drizzle-orm/src/neon-http/session.ts index 4dd768d3e..cd4a855e7 100644 --- a/drizzle-orm/src/neon-http/session.ts +++ b/drizzle-orm/src/neon-http/session.ts @@ -25,7 +25,7 @@ const queryConfig = { } as const; export class NeonHttpPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'NeonHttpPreparedQuery'; + static override readonly [entityKind]: string = 'NeonHttpPreparedQuery'; constructor( private client: NeonHttpClient, @@ -94,7 +94,7 @@ export class NeonHttpSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'NeonHttpSession'; + static override readonly [entityKind]: string = 'NeonHttpSession'; private logger: Logger; @@ -182,7 +182,7 @@ export class NeonTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'NeonHttpTransaction'; + static override readonly [entityKind]: string = 'NeonHttpTransaction'; override async transaction(_transaction: (tx: NeonTransaction) => Promise): Promise { throw new Error('No transactions support in neon-http driver'); diff --git a/drizzle-orm/src/neon-serverless/driver.ts b/drizzle-orm/src/neon-serverless/driver.ts index 7ee72814b..c0f962e96 100644 --- a/drizzle-orm/src/neon-serverless/driver.ts +++ b/drizzle-orm/src/neon-serverless/driver.ts @@ -1,4 +1,4 @@ -import { types } from '@neondatabase/serverless'; +import { neonConfig, Pool, type PoolConfig } from '@neondatabase/serverless'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -10,7 +10,7 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import type { NeonClient, NeonQueryResultHKT } from './session.ts'; import { NeonSession } from './session.ts'; @@ -26,7 +26,6 @@ export class NeonDriver { private dialect: PgDialect, private options: NeonDriverOptions = {}, ) { - this.initMappers(); } createSession( @@ -34,22 +33,15 @@ export class NeonDriver { ): NeonSession, TablesRelationalConfig> { return new NeonSession(this.client, this.dialect, schema, { logger: this.options.logger }); } - - initMappers() { - types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); - types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); - types.setTypeParser(types.builtins.DATE, (val) => val); - types.setTypeParser(types.builtins.INTERVAL, (val) => val); - } } export class NeonDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'NeonServerlessDatabase'; + static override readonly [entityKind]: string = 'NeonServerlessDatabase'; } -export function drizzle< +function construct< TSchema extends Record = Record, TClient extends NeonClient = NeonClient, >( @@ -86,3 +78,76 @@ export function drizzle< return db as any; } + +export function drizzle< + TSchema extends Record = Record, + TClient extends NeonClient = Pool, +>( + ...params: IfNotImported< + Pool, + [ImportTypeError<'@neondatabase/serverless'>], + [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + connection: string | PoolConfig; + } | { + client: TClient; + }) + & { + ws?: any; + } + ), + ] + > +): NeonDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof Pool) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ws, ...drizzleConfig } = params[0] as { + connection?: PoolConfig | string; + ws?: any; + client?: TClient; + } & DrizzleConfig; + + if (ws) { + neonConfig.webSocketConstructor = ws; + } + + if (client) return construct(client, drizzleConfig); + + const instance = typeof connection === 'string' + ? new Pool({ + connectionString: connection, + }) + : new Pool(connection); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new Pool({ + connectionString: params[0], + }); + + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): NeonDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/neon-serverless/session.ts b/drizzle-orm/src/neon-serverless/session.ts index 82c405333..6f144e3fb 100644 --- a/drizzle-orm/src/neon-serverless/session.ts +++ b/drizzle-orm/src/neon-serverless/session.ts @@ -6,6 +6,7 @@ import { type QueryConfig, type QueryResult, type QueryResultRow, + types, } from '@neondatabase/serverless'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; @@ -16,13 +17,13 @@ import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.type import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; -import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; +import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export type NeonClient = Pool | PoolClient | Client; export class NeonPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'NeonPreparedQuery'; + static override readonly [entityKind]: string = 'NeonPreparedQuery'; private rawQueryConfig: QueryConfig; private queryConfig: QueryArrayConfig; @@ -41,11 +42,49 @@ export class NeonPreparedQuery extends PgPrepared this.rawQueryConfig = { name, text: queryString, + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val: any) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, }; this.queryConfig = { name, text: queryString, rowMode: 'array', + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val: any) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, }; } @@ -93,7 +132,7 @@ export class NeonSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'NeonSession'; + static override readonly [entityKind]: string = 'NeonSession'; private logger: Logger; @@ -143,6 +182,14 @@ export class NeonSession< return this.client.query(query, params); } + override async count(sql: SQL): Promise { + const res = await this.execute<{ rows: [{ count: string }] }>(sql); + + return Number( + res['rows'][0]['count'], + ); + } + override async transaction( transaction: (tx: NeonTransaction) => Promise, config: PgTransactionConfig = {}, @@ -171,7 +218,7 @@ export class NeonTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'NeonTransaction'; + static override readonly [entityKind]: string = 'NeonTransaction'; override async transaction(transaction: (tx: NeonTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; diff --git a/drizzle-orm/src/node-postgres/driver.ts b/drizzle-orm/src/node-postgres/driver.ts index 79a99a3fa..b9bb063d8 100644 --- a/drizzle-orm/src/node-postgres/driver.ts +++ b/drizzle-orm/src/node-postgres/driver.ts @@ -1,4 +1,5 @@ -import pg from 'pg'; +import { EventEmitter } from 'events'; +import pg, { type Pool, type PoolConfig } from 'pg'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -10,12 +11,10 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import type { NodePgClient, NodePgQueryResultHKT } from './session.ts'; import { NodePgSession } from './session.ts'; -const { types } = pg; - export interface PgDriverOptions { logger?: Logger; } @@ -28,7 +27,6 @@ export class NodePgDriver { private dialect: PgDialect, private options: PgDriverOptions = {}, ) { - this.initMappers(); } createSession( @@ -36,22 +34,15 @@ export class NodePgDriver { ): NodePgSession, TablesRelationalConfig> { return new NodePgSession(this.client, this.dialect, schema, { logger: this.options.logger }); } - - initMappers() { - types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); - types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); - types.setTypeParser(types.builtins.DATE, (val) => val); - types.setTypeParser(types.builtins.INTERVAL, (val) => val); - } } export class NodePgDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'NodePgDatabase'; + static override readonly [entityKind]: string = 'NodePgDatabase'; } -export function drizzle< +function construct< TSchema extends Record = Record, TClient extends NodePgClient = NodePgClient, >( @@ -88,3 +79,70 @@ export function drizzle< return db as any; } + +export function drizzle< + TSchema extends Record = Record, + TClient extends NodePgClient = Pool, +>( + ...params: IfNotImported< + Pool, + [ImportTypeError<'pg'>], + | [ + TClient | string, + ] + | [ + TClient | string, + DrizzleConfig, + ] + | [ + ( + & DrizzleConfig + & ({ + connection: string | PoolConfig; + } | { + client: TClient; + }) + ), + ] + > +): NodePgDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof EventEmitter) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as ( + & ({ connection?: PoolConfig | string; client?: TClient }) + & DrizzleConfig + ); + + if (client) return construct(client, drizzleConfig); + + const instance = typeof connection === 'string' + ? new pg.Pool({ + connectionString: connection, + }) + : new pg.Pool(connection!); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new pg.Pool({ + connectionString: params[0], + }); + + return construct(instance, params[1] as DrizzleConfig | undefined) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): NodePgDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/node-postgres/session.ts b/drizzle-orm/src/node-postgres/session.ts index ef6779354..a925d7018 100644 --- a/drizzle-orm/src/node-postgres/session.ts +++ b/drizzle-orm/src/node-postgres/session.ts @@ -12,12 +12,12 @@ import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; -const { Pool } = pg; +const { Pool, types } = pg; export type NodePgClient = pg.Pool | PoolClient | Client; export class NodePgPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'NodePgPreparedQuery'; + static override readonly [entityKind]: string = 'NodePgPreparedQuery'; private rawQueryConfig: QueryConfig; private queryConfig: QueryArrayConfig; @@ -36,11 +36,49 @@ export class NodePgPreparedQuery extends PgPrepar this.rawQueryConfig = { name, text: queryString, + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val) => val; + } + if (typeId === types.builtins.DATE) { + return (val) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, }; this.queryConfig = { name, text: queryString, rowMode: 'array', + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val) => val; + } + if (typeId === types.builtins.DATE) { + return (val) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, }; } @@ -109,7 +147,7 @@ export class NodePgSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'NodePgSession'; + static override readonly [entityKind]: string = 'NodePgSession'; private logger: Logger; @@ -177,7 +215,7 @@ export class NodePgTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'NodePgTransaction'; + static override readonly [entityKind]: string = 'NodePgTransaction'; override async transaction(transaction: (tx: NodePgTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; diff --git a/drizzle-orm/src/op-sqlite/driver.ts b/drizzle-orm/src/op-sqlite/driver.ts index ac8b1f310..06b9d57f4 100644 --- a/drizzle-orm/src/op-sqlite/driver.ts +++ b/drizzle-orm/src/op-sqlite/driver.ts @@ -15,7 +15,7 @@ import { OPSQLiteSession } from './session.ts'; export class OPSQLiteDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', QueryResult, TSchema> { - static readonly [entityKind]: string = 'OPSQLiteDatabase'; + static override readonly [entityKind]: string = 'OPSQLiteDatabase'; } export function drizzle = Record>( diff --git a/drizzle-orm/src/op-sqlite/session.ts b/drizzle-orm/src/op-sqlite/session.ts index f7a08a56b..c1ac63071 100644 --- a/drizzle-orm/src/op-sqlite/session.ts +++ b/drizzle-orm/src/op-sqlite/session.ts @@ -26,7 +26,7 @@ export class OPSQLiteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', QueryResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'OPSQLiteSession'; + static override readonly [entityKind]: string = 'OPSQLiteSession'; private logger: Logger; @@ -79,7 +79,7 @@ export class OPSQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', QueryResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'OPSQLiteTransaction'; + static override readonly [entityKind]: string = 'OPSQLiteTransaction'; override transaction(transaction: (tx: OPSQLiteTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; @@ -99,7 +99,7 @@ export class OPSQLiteTransaction< export class OPSQLitePreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: QueryResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'OPSQLitePreparedQuery'; + static override readonly [entityKind]: string = 'OPSQLitePreparedQuery'; constructor( private client: OPSQLiteConnection, diff --git a/drizzle-orm/src/pg-core/columns/bigint.ts b/drizzle-orm/src/pg-core/columns/bigint.ts index ef6be9eff..23e1e7f15 100644 --- a/drizzle-orm/src/pg-core/columns/bigint.ts +++ b/drizzle-orm/src/pg-core/columns/bigint.ts @@ -20,7 +20,7 @@ export type PgBigInt53BuilderInitial = PgBigInt53Builder<{ export class PgBigInt53Builder> extends PgIntColumnBaseBuilder { - static readonly [entityKind]: string = 'PgBigInt53Builder'; + static override readonly [entityKind]: string = 'PgBigInt53Builder'; constructor(name: T['name']) { super(name, 'number', 'PgBigInt53'); @@ -35,7 +35,7 @@ export class PgBigInt53Builder> extends PgColumn { - static readonly [entityKind]: string = 'PgBigInt53'; + static override readonly [entityKind]: string = 'PgBigInt53'; getSQLType(): string { return 'bigint'; @@ -62,7 +62,7 @@ export type PgBigInt64BuilderInitial = PgBigInt64Builder<{ export class PgBigInt64Builder> extends PgIntColumnBaseBuilder { - static readonly [entityKind]: string = 'PgBigInt64Builder'; + static override readonly [entityKind]: string = 'PgBigInt64Builder'; constructor(name: T['name']) { super(name, 'bigint', 'PgBigInt64'); @@ -80,7 +80,7 @@ export class PgBigInt64Builder> extends PgColumn { - static readonly [entityKind]: string = 'PgBigInt64'; + static override readonly [entityKind]: string = 'PgBigInt64'; getSQLType(): string { return 'bigint'; diff --git a/drizzle-orm/src/pg-core/columns/bigserial.ts b/drizzle-orm/src/pg-core/columns/bigserial.ts index 775234cb2..ed4224354 100644 --- a/drizzle-orm/src/pg-core/columns/bigserial.ts +++ b/drizzle-orm/src/pg-core/columns/bigserial.ts @@ -28,7 +28,7 @@ export type PgBigSerial53BuilderInitial = NotNull< export class PgBigSerial53Builder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgBigSerial53Builder'; + static override readonly [entityKind]: string = 'PgBigSerial53Builder'; constructor(name: string) { super(name, 'number', 'PgBigSerial53'); @@ -48,7 +48,7 @@ export class PgBigSerial53Builder> extends PgColumn { - static readonly [entityKind]: string = 'PgBigSerial53'; + static override readonly [entityKind]: string = 'PgBigSerial53'; getSQLType(): string { return 'bigserial'; @@ -79,7 +79,7 @@ export type PgBigSerial64BuilderInitial = NotNull< export class PgBigSerial64Builder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgBigSerial64Builder'; + static override readonly [entityKind]: string = 'PgBigSerial64Builder'; constructor(name: string) { super(name, 'bigint', 'PgBigSerial64'); @@ -98,7 +98,7 @@ export class PgBigSerial64Builder> extends PgColumn { - static readonly [entityKind]: string = 'PgBigSerial64'; + static override readonly [entityKind]: string = 'PgBigSerial64'; getSQLType(): string { return 'bigserial'; diff --git a/drizzle-orm/src/pg-core/columns/boolean.ts b/drizzle-orm/src/pg-core/columns/boolean.ts index 19139243a..cd30895c7 100644 --- a/drizzle-orm/src/pg-core/columns/boolean.ts +++ b/drizzle-orm/src/pg-core/columns/boolean.ts @@ -15,7 +15,7 @@ export type PgBooleanBuilderInitial = PgBooleanBuilder<{ }>; export class PgBooleanBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgBooleanBuilder'; + static override readonly [entityKind]: string = 'PgBooleanBuilder'; constructor(name: T['name']) { super(name, 'boolean', 'PgBoolean'); @@ -30,7 +30,7 @@ export class PgBooleanBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgBoolean'; + static override readonly [entityKind]: string = 'PgBoolean'; getSQLType(): string { return 'boolean'; diff --git a/drizzle-orm/src/pg-core/columns/char.ts b/drizzle-orm/src/pg-core/columns/char.ts index a3b8853be..6629f08cc 100644 --- a/drizzle-orm/src/pg-core/columns/char.ts +++ b/drizzle-orm/src/pg-core/columns/char.ts @@ -19,7 +19,7 @@ export class PgCharBuilder T, { length: number | undefined; enumValues: T['enumValues'] } > { - static readonly [entityKind]: string = 'PgCharBuilder'; + static override readonly [entityKind]: string = 'PgCharBuilder'; constructor(name: T['name'], config: PgCharConfig) { super(name, 'string', 'PgChar'); @@ -38,7 +38,7 @@ export class PgCharBuilder export class PgChar> extends PgColumn { - static readonly [entityKind]: string = 'PgChar'; + static override readonly [entityKind]: string = 'PgChar'; readonly length = this.config.length; override readonly enumValues = this.config.enumValues; diff --git a/drizzle-orm/src/pg-core/columns/cidr.ts b/drizzle-orm/src/pg-core/columns/cidr.ts index 8ab375ba1..6caa3dc25 100644 --- a/drizzle-orm/src/pg-core/columns/cidr.ts +++ b/drizzle-orm/src/pg-core/columns/cidr.ts @@ -15,7 +15,7 @@ export type PgCidrBuilderInitial = PgCidrBuilder<{ }>; export class PgCidrBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgCidrBuilder'; + static override readonly [entityKind]: string = 'PgCidrBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgCidr'); @@ -30,7 +30,7 @@ export class PgCidrBuilder } export class PgCidr> extends PgColumn { - static readonly [entityKind]: string = 'PgCidr'; + static override readonly [entityKind]: string = 'PgCidr'; getSQLType(): string { return 'cidr'; diff --git a/drizzle-orm/src/pg-core/columns/common.ts b/drizzle-orm/src/pg-core/columns/common.ts index 84118ef20..c2fbe8cb9 100644 --- a/drizzle-orm/src/pg-core/columns/common.ts +++ b/drizzle-orm/src/pg-core/columns/common.ts @@ -46,7 +46,7 @@ export abstract class PgColumnBuilder< { private foreignKeyConfigs: ReferenceConfig[] = []; - static readonly [entityKind]: string = 'PgColumnBuilder'; + static override readonly [entityKind]: string = 'PgColumnBuilder'; array(size?: number): PgArrayBuilder< & { @@ -134,7 +134,7 @@ export abstract class PgColumn< TRuntimeConfig extends object = {}, TTypeConfig extends object = {}, > extends Column { - static readonly [entityKind]: string = 'PgColumn'; + static override readonly [entityKind]: string = 'PgColumn'; constructor( override readonly table: PgTable, @@ -152,7 +152,7 @@ export type IndexedExtraConfigType = { order?: 'asc' | 'desc'; nulls?: 'first' | export class ExtraConfigColumn< T extends ColumnBaseConfig = ColumnBaseConfig, > extends PgColumn { - static readonly [entityKind]: string = 'ExtraConfigColumn'; + static override readonly [entityKind]: string = 'ExtraConfigColumn'; override getSQLType(): string { return this.getSQLType(); @@ -292,7 +292,7 @@ export class PgArray< > extends PgColumn { readonly size: number | undefined; - static readonly [entityKind]: string = 'PgArray'; + static override readonly [entityKind]: string = 'PgArray'; constructor( table: AnyPgTable<{ name: T['tableName'] }>, diff --git a/drizzle-orm/src/pg-core/columns/custom.ts b/drizzle-orm/src/pg-core/columns/custom.ts index 44fdec1b2..b59169ed6 100644 --- a/drizzle-orm/src/pg-core/columns/custom.ts +++ b/drizzle-orm/src/pg-core/columns/custom.ts @@ -35,7 +35,7 @@ export class PgCustomColumnBuilder { - static readonly [entityKind]: string = 'PgCustomColumnBuilder'; + static override readonly [entityKind]: string = 'PgCustomColumnBuilder'; constructor( name: T['name'], @@ -59,7 +59,7 @@ export class PgCustomColumnBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgCustomColumn'; + static override readonly [entityKind]: string = 'PgCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; diff --git a/drizzle-orm/src/pg-core/columns/date.common.ts b/drizzle-orm/src/pg-core/columns/date.common.ts index c2a46d1ce..e8bfbf2b1 100644 --- a/drizzle-orm/src/pg-core/columns/date.common.ts +++ b/drizzle-orm/src/pg-core/columns/date.common.ts @@ -7,7 +7,7 @@ export abstract class PgDateColumnBaseBuilder< T extends ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, > extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgDateColumnBaseBuilder'; + static override readonly [entityKind]: string = 'PgDateColumnBaseBuilder'; defaultNow() { return this.default(sql`now()`); diff --git a/drizzle-orm/src/pg-core/columns/date.ts b/drizzle-orm/src/pg-core/columns/date.ts index 812ec4e92..1d295b97a 100644 --- a/drizzle-orm/src/pg-core/columns/date.ts +++ b/drizzle-orm/src/pg-core/columns/date.ts @@ -17,7 +17,7 @@ export type PgDateBuilderInitial = PgDateBuilder<{ }>; export class PgDateBuilder> extends PgDateColumnBaseBuilder { - static readonly [entityKind]: string = 'PgDateBuilder'; + static override readonly [entityKind]: string = 'PgDateBuilder'; constructor(name: T['name']) { super(name, 'date', 'PgDate'); @@ -32,7 +32,7 @@ export class PgDateBuilder> } export class PgDate> extends PgColumn { - static readonly [entityKind]: string = 'PgDate'; + static override readonly [entityKind]: string = 'PgDate'; getSQLType(): string { return 'date'; @@ -60,7 +60,7 @@ export type PgDateStringBuilderInitial = PgDateStringBuild export class PgDateStringBuilder> extends PgDateColumnBaseBuilder { - static readonly [entityKind]: string = 'PgDateStringBuilder'; + static override readonly [entityKind]: string = 'PgDateStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgDateString'); @@ -78,7 +78,7 @@ export class PgDateStringBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgDateString'; + static override readonly [entityKind]: string = 'PgDateString'; getSQLType(): string { return 'date'; diff --git a/drizzle-orm/src/pg-core/columns/double-precision.ts b/drizzle-orm/src/pg-core/columns/double-precision.ts index 8e454169f..77245ea45 100644 --- a/drizzle-orm/src/pg-core/columns/double-precision.ts +++ b/drizzle-orm/src/pg-core/columns/double-precision.ts @@ -17,7 +17,7 @@ export type PgDoublePrecisionBuilderInitial = PgDoublePrec export class PgDoublePrecisionBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgDoublePrecisionBuilder'; + static override readonly [entityKind]: string = 'PgDoublePrecisionBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgDoublePrecision'); @@ -35,7 +35,7 @@ export class PgDoublePrecisionBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgDoublePrecision'; + static override readonly [entityKind]: string = 'PgDoublePrecision'; getSQLType(): string { return 'double precision'; diff --git a/drizzle-orm/src/pg-core/columns/enum.ts b/drizzle-orm/src/pg-core/columns/enum.ts index d35ec89d9..7712ca606 100644 --- a/drizzle-orm/src/pg-core/columns/enum.ts +++ b/drizzle-orm/src/pg-core/columns/enum.ts @@ -36,7 +36,7 @@ export function isPgEnum(obj: unknown): obj is PgEnum<[string, ...string[]]> { export class PgEnumColumnBuilder< T extends ColumnBuilderBaseConfig<'string', 'PgEnumColumn'> & { enumValues: [string, ...string[]] }, > extends PgColumnBuilder }> { - static readonly [entityKind]: string = 'PgEnumColumnBuilder'; + static override readonly [entityKind]: string = 'PgEnumColumnBuilder'; constructor(name: T['name'], enumInstance: PgEnum) { super(name, 'string', 'PgEnumColumn'); @@ -57,7 +57,7 @@ export class PgEnumColumnBuilder< export class PgEnumColumn & { enumValues: [string, ...string[]] }> extends PgColumn }> { - static readonly [entityKind]: string = 'PgEnumColumn'; + static override readonly [entityKind]: string = 'PgEnumColumn'; readonly enum = this.config.enum; override readonly enumValues = this.config.enum.enumValues; diff --git a/drizzle-orm/src/pg-core/columns/inet.ts b/drizzle-orm/src/pg-core/columns/inet.ts index a675359b3..6b6210fcf 100644 --- a/drizzle-orm/src/pg-core/columns/inet.ts +++ b/drizzle-orm/src/pg-core/columns/inet.ts @@ -15,7 +15,7 @@ export type PgInetBuilderInitial = PgInetBuilder<{ }>; export class PgInetBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgInetBuilder'; + static override readonly [entityKind]: string = 'PgInetBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgInet'); @@ -30,7 +30,7 @@ export class PgInetBuilder } export class PgInet> extends PgColumn { - static readonly [entityKind]: string = 'PgInet'; + static override readonly [entityKind]: string = 'PgInet'; getSQLType(): string { return 'inet'; diff --git a/drizzle-orm/src/pg-core/columns/int.common.ts b/drizzle-orm/src/pg-core/columns/int.common.ts index 07c26ba9e..c473b8d04 100644 --- a/drizzle-orm/src/pg-core/columns/int.common.ts +++ b/drizzle-orm/src/pg-core/columns/int.common.ts @@ -14,7 +14,7 @@ export abstract class PgIntColumnBaseBuilder< T, { generatedIdentity: GeneratedIdentityConfig } > { - static readonly [entityKind]: string = 'PgIntColumnBaseBuilder'; + static override readonly [entityKind]: string = 'PgIntColumnBaseBuilder'; generatedAlwaysAsIdentity( sequence?: PgSequenceOptions & { name?: string }, diff --git a/drizzle-orm/src/pg-core/columns/integer.ts b/drizzle-orm/src/pg-core/columns/integer.ts index 0feb388f3..bb70f9b41 100644 --- a/drizzle-orm/src/pg-core/columns/integer.ts +++ b/drizzle-orm/src/pg-core/columns/integer.ts @@ -18,7 +18,7 @@ type PgIntegerBuilderInitial = PgIntegerBuilder<{ export class PgIntegerBuilder> extends PgIntColumnBaseBuilder { - static readonly [entityKind]: string = 'PgIntegerBuilder'; + static override readonly [entityKind]: string = 'PgIntegerBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgInteger'); @@ -33,7 +33,7 @@ export class PgIntegerBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgInteger'; + static override readonly [entityKind]: string = 'PgInteger'; getSQLType(): string { return 'integer'; diff --git a/drizzle-orm/src/pg-core/columns/interval.ts b/drizzle-orm/src/pg-core/columns/interval.ts index 3ae6557a3..4d3ed4588 100644 --- a/drizzle-orm/src/pg-core/columns/interval.ts +++ b/drizzle-orm/src/pg-core/columns/interval.ts @@ -19,7 +19,7 @@ export type PgIntervalBuilderInitial = PgIntervalBuilder<{ export class PgIntervalBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgIntervalBuilder'; + static override readonly [entityKind]: string = 'PgIntervalBuilder'; constructor( name: T['name'], @@ -40,7 +40,7 @@ export class PgIntervalBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgInterval'; + static override readonly [entityKind]: string = 'PgInterval'; readonly fields: IntervalConfig['fields'] = this.config.intervalConfig.fields; readonly precision: IntervalConfig['precision'] = this.config.intervalConfig.precision; diff --git a/drizzle-orm/src/pg-core/columns/json.ts b/drizzle-orm/src/pg-core/columns/json.ts index 7e232db5e..3c440c7d2 100644 --- a/drizzle-orm/src/pg-core/columns/json.ts +++ b/drizzle-orm/src/pg-core/columns/json.ts @@ -17,7 +17,7 @@ export type PgJsonBuilderInitial = PgJsonBuilder<{ export class PgJsonBuilder> extends PgColumnBuilder< T > { - static readonly [entityKind]: string = 'PgJsonBuilder'; + static override readonly [entityKind]: string = 'PgJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgJson'); @@ -32,7 +32,7 @@ export class PgJsonBuilder> } export class PgJson> extends PgColumn { - static readonly [entityKind]: string = 'PgJson'; + static override readonly [entityKind]: string = 'PgJson'; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgJsonBuilder['config']) { super(table, config); diff --git a/drizzle-orm/src/pg-core/columns/jsonb.ts b/drizzle-orm/src/pg-core/columns/jsonb.ts index 89d1be86a..3407730db 100644 --- a/drizzle-orm/src/pg-core/columns/jsonb.ts +++ b/drizzle-orm/src/pg-core/columns/jsonb.ts @@ -15,7 +15,7 @@ export type PgJsonbBuilderInitial = PgJsonbBuilder<{ }>; export class PgJsonbBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgJsonbBuilder'; + static override readonly [entityKind]: string = 'PgJsonbBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgJsonb'); @@ -30,7 +30,7 @@ export class PgJsonbBuilder } export class PgJsonb> extends PgColumn { - static readonly [entityKind]: string = 'PgJsonb'; + static override readonly [entityKind]: string = 'PgJsonb'; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgJsonbBuilder['config']) { super(table, config); diff --git a/drizzle-orm/src/pg-core/columns/line.ts b/drizzle-orm/src/pg-core/columns/line.ts index cd4b70a66..014140797 100644 --- a/drizzle-orm/src/pg-core/columns/line.ts +++ b/drizzle-orm/src/pg-core/columns/line.ts @@ -17,7 +17,7 @@ export type PgLineBuilderInitial = PgLineBuilder<{ }>; export class PgLineBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgLineBuilder'; + static override readonly [entityKind]: string = 'PgLineBuilder'; constructor(name: T['name']) { super(name, 'array', 'PgLine'); @@ -35,7 +35,7 @@ export class PgLineBuilder> } export class PgLineTuple> extends PgColumn { - static readonly [entityKind]: string = 'PgLine'; + static override readonly [entityKind]: string = 'PgLine'; getSQLType(): string { return 'line'; @@ -62,7 +62,7 @@ export type PgLineABCBuilderInitial = PgLineABCBuilder<{ }>; export class PgLineABCBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgLineABCBuilder'; + static override readonly [entityKind]: string = 'PgLineABCBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgLineABC'); @@ -80,7 +80,7 @@ export class PgLineABCBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgLineABC'; + static override readonly [entityKind]: string = 'PgLineABC'; getSQLType(): string { return 'line'; diff --git a/drizzle-orm/src/pg-core/columns/macaddr.ts b/drizzle-orm/src/pg-core/columns/macaddr.ts index b43d4ab40..bfc4511f4 100644 --- a/drizzle-orm/src/pg-core/columns/macaddr.ts +++ b/drizzle-orm/src/pg-core/columns/macaddr.ts @@ -15,7 +15,7 @@ export type PgMacaddrBuilderInitial = PgMacaddrBuilder<{ }>; export class PgMacaddrBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgMacaddrBuilder'; + static override readonly [entityKind]: string = 'PgMacaddrBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgMacaddr'); @@ -30,7 +30,7 @@ export class PgMacaddrBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgMacaddr'; + static override readonly [entityKind]: string = 'PgMacaddr'; getSQLType(): string { return 'macaddr'; diff --git a/drizzle-orm/src/pg-core/columns/macaddr8.ts b/drizzle-orm/src/pg-core/columns/macaddr8.ts index 00af8c031..6c4218de0 100644 --- a/drizzle-orm/src/pg-core/columns/macaddr8.ts +++ b/drizzle-orm/src/pg-core/columns/macaddr8.ts @@ -15,7 +15,7 @@ export type PgMacaddr8BuilderInitial = PgMacaddr8Builder<{ }>; export class PgMacaddr8Builder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgMacaddr8Builder'; + static override readonly [entityKind]: string = 'PgMacaddr8Builder'; constructor(name: T['name']) { super(name, 'string', 'PgMacaddr8'); @@ -30,7 +30,7 @@ export class PgMacaddr8Builder> extends PgColumn { - static readonly [entityKind]: string = 'PgMacaddr8'; + static override readonly [entityKind]: string = 'PgMacaddr8'; getSQLType(): string { return 'macaddr8'; diff --git a/drizzle-orm/src/pg-core/columns/numeric.ts b/drizzle-orm/src/pg-core/columns/numeric.ts index a661df21e..efeb4ab97 100644 --- a/drizzle-orm/src/pg-core/columns/numeric.ts +++ b/drizzle-orm/src/pg-core/columns/numeric.ts @@ -22,7 +22,7 @@ export class PgNumericBuilder { - static readonly [entityKind]: string = 'PgNumericBuilder'; + static override readonly [entityKind]: string = 'PgNumericBuilder'; constructor(name: T['name'], precision?: number, scale?: number) { super(name, 'string', 'PgNumeric'); @@ -39,7 +39,7 @@ export class PgNumericBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgNumeric'; + static override readonly [entityKind]: string = 'PgNumeric'; readonly precision: number | undefined; readonly scale: number | undefined; diff --git a/drizzle-orm/src/pg-core/columns/point.ts b/drizzle-orm/src/pg-core/columns/point.ts index 584e395f9..827579ad8 100644 --- a/drizzle-orm/src/pg-core/columns/point.ts +++ b/drizzle-orm/src/pg-core/columns/point.ts @@ -19,7 +19,7 @@ export type PgPointTupleBuilderInitial = PgPointTupleBuild export class PgPointTupleBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgPointTupleBuilder'; + static override readonly [entityKind]: string = 'PgPointTupleBuilder'; constructor(name: string) { super(name, 'array', 'PgPointTuple'); @@ -37,7 +37,7 @@ export class PgPointTupleBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgPointTuple'; + static override readonly [entityKind]: string = 'PgPointTuple'; getSQLType(): string { return 'point'; @@ -69,7 +69,7 @@ export type PgPointObjectBuilderInitial = PgPointObjectBui export class PgPointObjectBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgPointObjectBuilder'; + static override readonly [entityKind]: string = 'PgPointObjectBuilder'; constructor(name: string) { super(name, 'json', 'PgPointObject'); @@ -87,7 +87,7 @@ export class PgPointObjectBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgPointObject'; + static override readonly [entityKind]: string = 'PgPointObject'; getSQLType(): string { return 'point'; diff --git a/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts b/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts index 18e6c946d..853c3dff9 100644 --- a/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts +++ b/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts @@ -18,7 +18,7 @@ export type PgGeometryBuilderInitial = PgGeometryBuilder<{ }>; export class PgGeometryBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgGeometryBuilder'; + static override readonly [entityKind]: string = 'PgGeometryBuilder'; constructor(name: T['name']) { super(name, 'array', 'PgGeometry'); @@ -36,7 +36,7 @@ export class PgGeometryBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgGeometry'; + static override readonly [entityKind]: string = 'PgGeometry'; getSQLType(): string { return 'geometry(point)'; @@ -64,7 +64,7 @@ export type PgGeometryObjectBuilderInitial = PgGeometryObj export class PgGeometryObjectBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgGeometryObjectBuilder'; + static override readonly [entityKind]: string = 'PgGeometryObjectBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgGeometryObject'); @@ -82,7 +82,7 @@ export class PgGeometryObjectBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgGeometryObject'; + static override readonly [entityKind]: string = 'PgGeometryObject'; getSQLType(): string { return 'geometry(point)'; diff --git a/drizzle-orm/src/pg-core/columns/real.ts b/drizzle-orm/src/pg-core/columns/real.ts index f39527a45..0e3de4b2e 100644 --- a/drizzle-orm/src/pg-core/columns/real.ts +++ b/drizzle-orm/src/pg-core/columns/real.ts @@ -18,7 +18,7 @@ export class PgRealBuilder T, { length: number | undefined } > { - static readonly [entityKind]: string = 'PgRealBuilder'; + static override readonly [entityKind]: string = 'PgRealBuilder'; constructor(name: T['name'], length?: number) { super(name, 'number', 'PgReal'); @@ -34,7 +34,7 @@ export class PgRealBuilder } export class PgReal> extends PgColumn { - static readonly [entityKind]: string = 'PgReal'; + static override readonly [entityKind]: string = 'PgReal'; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgRealBuilder['config']) { super(table, config); diff --git a/drizzle-orm/src/pg-core/columns/serial.ts b/drizzle-orm/src/pg-core/columns/serial.ts index a4d7f8e30..6a0196c38 100644 --- a/drizzle-orm/src/pg-core/columns/serial.ts +++ b/drizzle-orm/src/pg-core/columns/serial.ts @@ -25,7 +25,7 @@ export type PgSerialBuilderInitial = NotNull< >; export class PgSerialBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgSerialBuilder'; + static override readonly [entityKind]: string = 'PgSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgSerial'); @@ -42,7 +42,7 @@ export class PgSerialBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgSerial'; + static override readonly [entityKind]: string = 'PgSerial'; getSQLType(): string { return 'serial'; diff --git a/drizzle-orm/src/pg-core/columns/smallint.ts b/drizzle-orm/src/pg-core/columns/smallint.ts index 20204cd3c..1cdfe141f 100644 --- a/drizzle-orm/src/pg-core/columns/smallint.ts +++ b/drizzle-orm/src/pg-core/columns/smallint.ts @@ -18,7 +18,7 @@ export type PgSmallIntBuilderInitial = PgSmallIntBuilder<{ export class PgSmallIntBuilder> extends PgIntColumnBaseBuilder { - static readonly [entityKind]: string = 'PgSmallIntBuilder'; + static override readonly [entityKind]: string = 'PgSmallIntBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgSmallInt'); @@ -33,7 +33,7 @@ export class PgSmallIntBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgSmallInt'; + static override readonly [entityKind]: string = 'PgSmallInt'; getSQLType(): string { return 'smallint'; diff --git a/drizzle-orm/src/pg-core/columns/smallserial.ts b/drizzle-orm/src/pg-core/columns/smallserial.ts index ec2204034..456dc47f7 100644 --- a/drizzle-orm/src/pg-core/columns/smallserial.ts +++ b/drizzle-orm/src/pg-core/columns/smallserial.ts @@ -27,7 +27,7 @@ export type PgSmallSerialBuilderInitial = NotNull< export class PgSmallSerialBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgSmallSerialBuilder'; + static override readonly [entityKind]: string = 'PgSmallSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgSmallSerial'); @@ -47,7 +47,7 @@ export class PgSmallSerialBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgSmallSerial'; + static override readonly [entityKind]: string = 'PgSmallSerial'; getSQLType(): string { return 'smallserial'; diff --git a/drizzle-orm/src/pg-core/columns/text.ts b/drizzle-orm/src/pg-core/columns/text.ts index 522135e5c..6845f0e74 100644 --- a/drizzle-orm/src/pg-core/columns/text.ts +++ b/drizzle-orm/src/pg-core/columns/text.ts @@ -18,7 +18,7 @@ type PgTextBuilderInitial, > extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgTextBuilder'; + static override readonly [entityKind]: string = 'PgTextBuilder'; constructor( name: T['name'], @@ -39,7 +39,7 @@ export class PgTextBuilder< export class PgText> extends PgColumn { - static readonly [entityKind]: string = 'PgText'; + static override readonly [entityKind]: string = 'PgText'; override readonly enumValues = this.config.enumValues; diff --git a/drizzle-orm/src/pg-core/columns/time.ts b/drizzle-orm/src/pg-core/columns/time.ts index e2b29f1f7..9b3ff51e0 100644 --- a/drizzle-orm/src/pg-core/columns/time.ts +++ b/drizzle-orm/src/pg-core/columns/time.ts @@ -21,7 +21,7 @@ export class PgTimeBuilder T, { withTimezone: boolean; precision: number | undefined } > { - static readonly [entityKind]: string = 'PgTimeBuilder'; + static override readonly [entityKind]: string = 'PgTimeBuilder'; constructor( name: T['name'], @@ -42,7 +42,7 @@ export class PgTimeBuilder } export class PgTime> extends PgColumn { - static readonly [entityKind]: string = 'PgTime'; + static override readonly [entityKind]: string = 'PgTime'; readonly withTimezone: boolean; readonly precision: number | undefined; diff --git a/drizzle-orm/src/pg-core/columns/timestamp.ts b/drizzle-orm/src/pg-core/columns/timestamp.ts index 08474bf68..6879106e0 100644 --- a/drizzle-orm/src/pg-core/columns/timestamp.ts +++ b/drizzle-orm/src/pg-core/columns/timestamp.ts @@ -22,7 +22,7 @@ export class PgTimestampBuilder { - static readonly [entityKind]: string = 'PgTimestampBuilder'; + static override readonly [entityKind]: string = 'PgTimestampBuilder'; constructor( name: T['name'], @@ -43,7 +43,7 @@ export class PgTimestampBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgTimestamp'; + static override readonly [entityKind]: string = 'PgTimestamp'; readonly withTimezone: boolean; readonly precision: number | undefined; @@ -84,7 +84,7 @@ export class PgTimestampStringBuilder { - static readonly [entityKind]: string = 'PgTimestampStringBuilder'; + static override readonly [entityKind]: string = 'PgTimestampStringBuilder'; constructor( name: T['name'], @@ -108,7 +108,7 @@ export class PgTimestampStringBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgTimestampString'; + static override readonly [entityKind]: string = 'PgTimestampString'; readonly withTimezone: boolean; readonly precision: number | undefined; diff --git a/drizzle-orm/src/pg-core/columns/uuid.ts b/drizzle-orm/src/pg-core/columns/uuid.ts index 851036d8d..d0e5a6830 100644 --- a/drizzle-orm/src/pg-core/columns/uuid.ts +++ b/drizzle-orm/src/pg-core/columns/uuid.ts @@ -16,7 +16,7 @@ export type PgUUIDBuilderInitial = PgUUIDBuilder<{ }>; export class PgUUIDBuilder> extends PgColumnBuilder { - static readonly [entityKind]: string = 'PgUUIDBuilder'; + static override readonly [entityKind]: string = 'PgUUIDBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgUUID'); @@ -38,7 +38,7 @@ export class PgUUIDBuilder } export class PgUUID> extends PgColumn { - static readonly [entityKind]: string = 'PgUUID'; + static override readonly [entityKind]: string = 'PgUUID'; getSQLType(): string { return 'uuid'; diff --git a/drizzle-orm/src/pg-core/columns/varchar.ts b/drizzle-orm/src/pg-core/columns/varchar.ts index bc9d1b160..78ee0db96 100644 --- a/drizzle-orm/src/pg-core/columns/varchar.ts +++ b/drizzle-orm/src/pg-core/columns/varchar.ts @@ -19,7 +19,7 @@ export class PgVarcharBuilder { - static readonly [entityKind]: string = 'PgVarcharBuilder'; + static override readonly [entityKind]: string = 'PgVarcharBuilder'; constructor(name: T['name'], config: PgVarcharConfig) { super(name, 'string', 'PgVarchar'); @@ -38,7 +38,7 @@ export class PgVarcharBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgVarchar'; + static override readonly [entityKind]: string = 'PgVarchar'; readonly length = this.config.length; override readonly enumValues = this.config.enumValues; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts b/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts index a841c28e5..81eea6b2f 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts @@ -21,7 +21,7 @@ export class PgBinaryVectorBuilder { - static readonly [entityKind]: string = 'PgBinaryVectorBuilder'; + static override readonly [entityKind]: string = 'PgBinaryVectorBuilder'; constructor(name: string, config: PgBinaryVectorConfig) { super(name, 'string', 'PgBinaryVector'); @@ -42,7 +42,7 @@ export class PgBinaryVectorBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgBinaryVector'; + static override readonly [entityKind]: string = 'PgBinaryVector'; readonly dimensions = this.config.dimensions; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts b/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts index 7218f8114..e12d0d22f 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts @@ -19,7 +19,7 @@ export class PgHalfVectorBuilder { - static readonly [entityKind]: string = 'PgHalfVectorBuilder'; + static override readonly [entityKind]: string = 'PgHalfVectorBuilder'; constructor(name: string, config: PgHalfVectorConfig) { super(name, 'array', 'PgHalfVector'); @@ -40,7 +40,7 @@ export class PgHalfVectorBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgHalfVector'; + static override readonly [entityKind]: string = 'PgHalfVector'; readonly dimensions = this.config.dimensions; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts b/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts index 2bdbf1ac3..3881b338f 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts @@ -21,7 +21,7 @@ export class PgSparseVectorBuilder { - static readonly [entityKind]: string = 'PgSparseVectorBuilder'; + static override readonly [entityKind]: string = 'PgSparseVectorBuilder'; constructor(name: string, config: PgSparseVectorConfig) { super(name, 'string', 'PgSparseVector'); @@ -42,7 +42,7 @@ export class PgSparseVectorBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgSparseVector'; + static override readonly [entityKind]: string = 'PgSparseVector'; readonly dimensions = this.config.dimensions; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts b/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts index b6ad9caff..eaac075dc 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts @@ -19,7 +19,7 @@ export class PgVectorBuilder { - static readonly [entityKind]: string = 'PgVectorBuilder'; + static override readonly [entityKind]: string = 'PgVectorBuilder'; constructor(name: string, config: PgVectorConfig) { super(name, 'array', 'PgVector'); @@ -37,7 +37,7 @@ export class PgVectorBuilder> extends PgColumn { - static readonly [entityKind]: string = 'PgVector'; + static override readonly [entityKind]: string = 'PgVector'; readonly dimensions = this.config.dimensions; diff --git a/drizzle-orm/src/pg-core/query-builders/count.ts b/drizzle-orm/src/pg-core/query-builders/count.ts index c823f7c6f..af16fda01 100644 --- a/drizzle-orm/src/pg-core/query-builders/count.ts +++ b/drizzle-orm/src/pg-core/query-builders/count.ts @@ -8,7 +8,7 @@ export class PgCountBuilder< > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static readonly [entityKind] = 'PgCountBuilder'; + static override readonly [entityKind] = 'PgCountBuilder'; [Symbol.toStringTag] = 'PgCountBuilder'; private session: TSession; diff --git a/drizzle-orm/src/pg-core/query-builders/delete.ts b/drizzle-orm/src/pg-core/query-builders/delete.ts index dc127f167..b42d46711 100644 --- a/drizzle-orm/src/pg-core/query-builders/delete.ts +++ b/drizzle-orm/src/pg-core/query-builders/delete.ts @@ -128,7 +128,7 @@ export class PgDeleteBase< RunnableQuery : TReturning[], 'pg'>, SQLWrapper { - static readonly [entityKind]: string = 'PgDelete'; + static override readonly [entityKind]: string = 'PgDelete'; private config: PgDeleteConfig; diff --git a/drizzle-orm/src/pg-core/query-builders/insert.ts b/drizzle-orm/src/pg-core/query-builders/insert.ts index 02bcb972c..9f494ab50 100644 --- a/drizzle-orm/src/pg-core/query-builders/insert.ts +++ b/drizzle-orm/src/pg-core/query-builders/insert.ts @@ -166,7 +166,7 @@ export class PgInsertBase< RunnableQuery : TReturning[], 'pg'>, SQLWrapper { - static readonly [entityKind]: string = 'PgInsert'; + static override readonly [entityKind]: string = 'PgInsert'; private config: PgInsertConfig; diff --git a/drizzle-orm/src/pg-core/query-builders/query.ts b/drizzle-orm/src/pg-core/query-builders/query.ts index 17cea6cff..07f485669 100644 --- a/drizzle-orm/src/pg-core/query-builders/query.ts +++ b/drizzle-orm/src/pg-core/query-builders/query.ts @@ -65,7 +65,7 @@ export class RelationalQueryBuilder extends QueryPromise implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'PgRelationalQuery'; + static override readonly [entityKind]: string = 'PgRelationalQuery'; declare readonly _: { readonly dialect: 'pg'; diff --git a/drizzle-orm/src/pg-core/query-builders/raw.ts b/drizzle-orm/src/pg-core/query-builders/raw.ts index 871581dfb..d08c1ee14 100644 --- a/drizzle-orm/src/pg-core/query-builders/raw.ts +++ b/drizzle-orm/src/pg-core/query-builders/raw.ts @@ -9,7 +9,7 @@ export interface PgRaw extends QueryPromise, RunnableQuery extends QueryPromise implements RunnableQuery, SQLWrapper, PreparedQuery { - static readonly [entityKind]: string = 'PgRaw'; + static override readonly [entityKind]: string = 'PgRaw'; declare readonly _: { readonly dialect: 'pg'; diff --git a/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts b/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts index d2bedac68..62ade9139 100644 --- a/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts +++ b/drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts @@ -30,7 +30,7 @@ export class PgRefreshMaterializedView extends QueryPromise> implements RunnableQuery, 'pg'>, SQLWrapper { - static readonly [entityKind]: string = 'PgRefreshMaterializedView'; + static override readonly [entityKind]: string = 'PgRefreshMaterializedView'; private config: { view: PgMaterializedView; diff --git a/drizzle-orm/src/pg-core/query-builders/select.ts b/drizzle-orm/src/pg-core/query-builders/select.ts index d2406995b..6e9a961c0 100644 --- a/drizzle-orm/src/pg-core/query-builders/select.ts +++ b/drizzle-orm/src/pg-core/query-builders/select.ts @@ -139,7 +139,7 @@ export abstract class PgSelectQueryBuilderBase< TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { - static readonly [entityKind]: string = 'PgSelectQueryBuilder'; + static override readonly [entityKind]: string = 'PgSelectQueryBuilder'; override readonly _: { readonly dialect: 'pg'; @@ -947,7 +947,7 @@ export class PgSelectBase< TResult, TSelectedFields > implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'PgSelect'; + static override readonly [entityKind]: string = 'PgSelect'; /** @internal */ _prepare(name?: string): PgSelectPrepare { diff --git a/drizzle-orm/src/pg-core/query-builders/update.ts b/drizzle-orm/src/pg-core/query-builders/update.ts index ec404ac22..2c63dacc0 100644 --- a/drizzle-orm/src/pg-core/query-builders/update.ts +++ b/drizzle-orm/src/pg-core/query-builders/update.ts @@ -159,7 +159,7 @@ export class PgUpdateBase< RunnableQuery : TReturning[], 'pg'>, SQLWrapper { - static readonly [entityKind]: string = 'PgUpdate'; + static override readonly [entityKind]: string = 'PgUpdate'; private config: PgUpdateConfig; diff --git a/drizzle-orm/src/pg-core/session.ts b/drizzle-orm/src/pg-core/session.ts index ea820f2d8..d909e82db 100644 --- a/drizzle-orm/src/pg-core/session.ts +++ b/drizzle-orm/src/pg-core/session.ts @@ -105,7 +105,7 @@ export abstract class PgTransaction< TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'PgTransaction'; + static override readonly [entityKind]: string = 'PgTransaction'; constructor( dialect: PgDialect, diff --git a/drizzle-orm/src/pg-core/table.ts b/drizzle-orm/src/pg-core/table.ts index c09a56233..5bf9a9895 100644 --- a/drizzle-orm/src/pg-core/table.ts +++ b/drizzle-orm/src/pg-core/table.ts @@ -24,7 +24,7 @@ export type TableConfig = TableConfigBase; export const InlineForeignKeys = Symbol.for('drizzle:PgInlineForeignKeys'); export class PgTable extends Table { - static readonly [entityKind]: string = 'PgTable'; + static override readonly [entityKind]: string = 'PgTable'; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, { diff --git a/drizzle-orm/src/pg-core/view-base.ts b/drizzle-orm/src/pg-core/view-base.ts index 87f76ac24..d3f52a501 100644 --- a/drizzle-orm/src/pg-core/view-base.ts +++ b/drizzle-orm/src/pg-core/view-base.ts @@ -6,7 +6,7 @@ export abstract class PgViewBase< TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends View { - static readonly [entityKind]: string = 'PgViewBase'; + static override readonly [entityKind]: string = 'PgViewBase'; declare readonly _: View['_'] & { readonly viewBrand: 'PgViewBase'; diff --git a/drizzle-orm/src/pg-core/view.ts b/drizzle-orm/src/pg-core/view.ts index 22c510dce..2f88d7e17 100644 --- a/drizzle-orm/src/pg-core/view.ts +++ b/drizzle-orm/src/pg-core/view.ts @@ -1,10 +1,11 @@ import type { BuildColumns } from '~/column-builder.ts'; -import { entityKind } from '~/entity.ts'; +import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { getTableColumns } from '~/utils.ts'; +import type { RequireAtLeastOne } from '~/utils.ts'; import type { PgColumn, PgColumnBuilderBase } from './columns/common.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; @@ -12,11 +13,11 @@ import { pgTable } from './table.ts'; import { PgViewBase } from './view-base.ts'; import { PgViewConfig } from './view-common.ts'; -export interface ViewWithConfig { +export type ViewWithConfig = RequireAtLeastOne<{ checkOption: 'local' | 'cascaded'; securityBarrier: boolean; securityInvoker: boolean; -} +}>; export class DefaultViewBuilderCore { static readonly [entityKind]: string = 'PgDefaultViewBuilderCore'; @@ -42,7 +43,7 @@ export class DefaultViewBuilderCore extends DefaultViewBuilderCore<{ name: TName }> { - static readonly [entityKind]: string = 'PgViewBuilder'; + static override readonly [entityKind]: string = 'PgViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), @@ -76,7 +77,7 @@ export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends DefaultViewBuilderCore<{ name: TName; columns: TColumns }> { - static readonly [entityKind]: string = 'PgManualViewBuilder'; + static override readonly [entityKind]: string = 'PgManualViewBuilder'; private columns: Record; @@ -130,9 +131,26 @@ export class ManualViewBuilder< } } -export interface PgMaterializedViewWithConfig { - [Key: string]: string | number | boolean | SQL; -} +export type PgMaterializedViewWithConfig = RequireAtLeastOne<{ + fillfactor: number; + toastTupleTarget: number; + parallelWorkers: number; + autovacuumEnabled: boolean; + vacuumIndexCleanup: 'auto' | 'off' | 'on'; + vacuumTruncate: boolean; + autovacuumVacuumThreshold: number; + autovacuumVacuumScaleFactor: number; + autovacuumVacuumCostDelay: number; + autovacuumVacuumCostLimit: number; + autovacuumFreezeMinAge: number; + autovacuumFreezeMaxAge: number; + autovacuumFreezeTableAge: number; + autovacuumMultixactFreezeMinAge: number; + autovacuumMultixactFreezeMaxAge: number; + autovacuumMultixactFreezeTableAge: number; + logAutovacuumMinDuration: number; + userCatalogTable: boolean; +}>; export class MaterializedViewBuilderCore { static readonly [entityKind]: string = 'PgMaterializedViewBuilderCore'; @@ -178,7 +196,7 @@ export class MaterializedViewBuilderCore extends MaterializedViewBuilderCore<{ name: TName }> { - static readonly [entityKind]: string = 'PgMaterializedViewBuilder'; + static override readonly [entityKind]: string = 'PgMaterializedViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), @@ -217,7 +235,7 @@ export class ManualMaterializedViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends MaterializedViewBuilderCore<{ name: TName; columns: TColumns }> { - static readonly [entityKind]: string = 'PgManualMaterializedViewBuilder'; + static override readonly [entityKind]: string = 'PgManualMaterializedViewBuilder'; private columns: Record; @@ -233,7 +251,12 @@ export class ManualMaterializedViewBuilder< existing(): PgMaterializedViewWithSelection> { return new Proxy( new PgMaterializedView({ - pgConfig: undefined, + pgConfig: { + tablespace: this.config.tablespace, + using: this.config.using, + with: this.config.with, + withNoData: this.config.withNoData, + }, config: { name: this.name, schema: this.schema, @@ -253,7 +276,12 @@ export class ManualMaterializedViewBuilder< as(query: SQL): PgMaterializedViewWithSelection> { return new Proxy( new PgMaterializedView({ - pgConfig: undefined, + pgConfig: { + tablespace: this.config.tablespace, + using: this.config.using, + with: this.config.with, + withNoData: this.config.withNoData, + }, config: { name: this.name, schema: this.schema, @@ -276,7 +304,7 @@ export class PgView< TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends PgViewBase { - static readonly [entityKind]: string = 'PgView'; + static override readonly [entityKind]: string = 'PgView'; [PgViewConfig]: { with?: ViewWithConfig; @@ -315,7 +343,7 @@ export class PgMaterializedView< TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends PgViewBase { - static readonly [entityKind]: string = 'PgMaterializedView'; + static override readonly [entityKind]: string = 'PgMaterializedView'; readonly [PgMaterializedViewConfig]: { readonly with?: PgMaterializedViewWithConfig; @@ -398,3 +426,11 @@ export function pgMaterializedView( ): MaterializedViewBuilder | ManualMaterializedViewBuilder { return pgMaterializedViewWithSchema(name, columns, undefined); } + +export function isPgView(obj: unknown): obj is PgView { + return is(obj, PgView); +} + +export function isPgMaterializedView(obj: unknown): obj is PgMaterializedView { + return is(obj, PgMaterializedView); +} diff --git a/drizzle-orm/src/pg-proxy/driver.ts b/drizzle-orm/src/pg-proxy/driver.ts index 8ccd9ba02..955dc2bb4 100644 --- a/drizzle-orm/src/pg-proxy/driver.ts +++ b/drizzle-orm/src/pg-proxy/driver.ts @@ -14,7 +14,7 @@ import { type PgRemoteQueryResultHKT, PgRemoteSession } from './session.ts'; export class PgRemoteDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'PgRemoteDatabase'; + static override readonly [entityKind]: string = 'PgRemoteDatabase'; } export type RemoteCallback = ( diff --git a/drizzle-orm/src/pg-proxy/session.ts b/drizzle-orm/src/pg-proxy/session.ts index 1a30c0a3c..9d433502c 100644 --- a/drizzle-orm/src/pg-proxy/session.ts +++ b/drizzle-orm/src/pg-proxy/session.ts @@ -21,7 +21,7 @@ export class PgRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'PgRemoteSession'; + static override readonly [entityKind]: string = 'PgRemoteSession'; private logger: Logger; @@ -66,7 +66,7 @@ export class PgProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'PgProxyTransaction'; + static override readonly [entityKind]: string = 'PgProxyTransaction'; override async transaction( _transaction: (tx: PgProxyTransaction) => Promise, @@ -76,7 +76,7 @@ export class PgProxyTransaction< } export class PreparedQuery extends PreparedQueryBase { - static readonly [entityKind]: string = 'PgProxyPreparedQuery'; + static override readonly [entityKind]: string = 'PgProxyPreparedQuery'; constructor( private client: RemoteCallback, diff --git a/drizzle-orm/src/pglite/driver.ts b/drizzle-orm/src/pglite/driver.ts index b6cb8bd7e..89d37d1f9 100644 --- a/drizzle-orm/src/pglite/driver.ts +++ b/drizzle-orm/src/pglite/driver.ts @@ -1,3 +1,4 @@ +import { PGlite, type PGliteOptions } from '@electric-sql/pglite'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -9,7 +10,7 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import type { PgliteClient, PgliteQueryResultHKT } from './session.ts'; import { PgliteSession } from './session.ts'; @@ -37,10 +38,10 @@ export class PgliteDriver { export class PgliteDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'PgliteDatabase'; + static override readonly [entityKind]: string = 'PgliteDatabase'; } -export function drizzle = Record>( +function construct = Record>( client: PgliteClient, config: DrizzleConfig = {}, ): PgliteDatabase & { @@ -74,3 +75,72 @@ export function drizzle = Record = Record, + TClient extends PGlite = PGlite, +>( + ...params: IfNotImported< + PGlite, + [ImportTypeError<'@electric-sql/pglite'>], + | [] + | [ + TClient | string, + ] + | [ + TClient | string, + DrizzleConfig, + ] + | [ + ( + & DrizzleConfig + & ({ + connection?: (PGliteOptions & { dataDir?: string }) | string; + } | { + client: TClient; + }) + ), + ] + > +): PgliteDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof PGlite) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: PGliteOptions & { dataDir: string }; + client?: TClient; + } & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + if (typeof connection === 'object') { + const { dataDir, ...options } = connection; + + const instance = new PGlite(dataDir, options); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new PGlite(connection); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new PGlite(params[0]); + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): PgliteDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/pglite/session.ts b/drizzle-orm/src/pglite/session.ts index ebf7701a6..18c341fc6 100644 --- a/drizzle-orm/src/pglite/session.ts +++ b/drizzle-orm/src/pglite/session.ts @@ -15,7 +15,7 @@ import { types } from '@electric-sql/pglite'; export type PgliteClient = PGlite; export class PglitePreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'PglitePreparedQuery'; + static override readonly [entityKind]: string = 'PglitePreparedQuery'; private rawQueryConfig: QueryOptions; private queryConfig: QueryOptions; @@ -89,7 +89,7 @@ export class PgliteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'PgliteSession'; + static override readonly [entityKind]: string = 'PgliteSession'; private logger: Logger; @@ -153,7 +153,7 @@ export class PgliteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'PgliteTransaction'; + static override readonly [entityKind]: string = 'PgliteTransaction'; override async transaction(transaction: (tx: PgliteTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; diff --git a/drizzle-orm/src/planetscale-serverless/driver.ts b/drizzle-orm/src/planetscale-serverless/driver.ts index 2b851f75b..1865673bf 100644 --- a/drizzle-orm/src/planetscale-serverless/driver.ts +++ b/drizzle-orm/src/planetscale-serverless/driver.ts @@ -1,4 +1,4 @@ -import type { Connection } from '@planetscale/database'; +import type { Config, Connection } from '@planetscale/database'; import { Client } from '@planetscale/database'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; @@ -11,7 +11,7 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from './session.ts'; import { PlanetscaleSession } from './session.ts'; @@ -22,10 +22,10 @@ export interface PlanetscaleSDriverOptions { export class PlanetScaleDatabase< TSchema extends Record = Record, > extends MySqlDatabase { - static readonly [entityKind]: string = 'PlanetScaleDatabase'; + static override readonly [entityKind]: string = 'PlanetScaleDatabase'; } -export function drizzle< +function construct< TSchema extends Record = Record, TClient extends Client | Connection = Client | Connection, >( @@ -95,3 +95,69 @@ Starting from version 0.30.0, you will encounter an error if you attempt to use return db as any; } + +export function drizzle< + TSchema extends Record = Record, + TClient extends Client = Client, +>( + ...params: IfNotImported< + Config, + [ImportTypeError<'@planetscale/database'>], + [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + connection: string | Config; + } | { + client: TClient; + }) + ), + ] + > +): PlanetScaleDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof Client) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config | string; client?: TClient } + & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + const instance = typeof connection === 'string' + ? new Client({ + url: connection, + }) + : new Client( + connection!, + ); + + return construct(instance, drizzleConfig) as any; + } + + const instance = new Client({ + url: params[0], + }); + + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): PlanetScaleDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/planetscale-serverless/session.ts b/drizzle-orm/src/planetscale-serverless/session.ts index 987529d7c..4e6a0f432 100644 --- a/drizzle-orm/src/planetscale-serverless/session.ts +++ b/drizzle-orm/src/planetscale-serverless/session.ts @@ -18,7 +18,7 @@ import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export class PlanetScalePreparedQuery extends MySqlPreparedQuery { - static readonly [entityKind]: string = 'PlanetScalePreparedQuery'; + static override readonly [entityKind]: string = 'PlanetScalePreparedQuery'; private rawQuery = { as: 'object' } as const; private query = { as: 'array' } as const; @@ -106,7 +106,7 @@ export class PlanetscaleSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { - static readonly [entityKind]: string = 'PlanetscaleSession'; + static override readonly [entityKind]: string = 'PlanetscaleSession'; private logger: Logger; private client: Client | Transaction | Connection; @@ -191,7 +191,7 @@ export class PlanetScaleTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { - static readonly [entityKind]: string = 'PlanetScaleTransaction'; + static override readonly [entityKind]: string = 'PlanetScaleTransaction'; constructor( dialect: MySqlDialect, diff --git a/drizzle-orm/src/postgres-js/driver.ts b/drizzle-orm/src/postgres-js/driver.ts index 641e413d0..5c2979c84 100644 --- a/drizzle-orm/src/postgres-js/driver.ts +++ b/drizzle-orm/src/postgres-js/driver.ts @@ -1,4 +1,4 @@ -import type { Sql } from 'postgres'; +import pgClient, { type Options, type PostgresType, type Sql } from 'postgres'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; @@ -9,17 +9,17 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import type { PostgresJsQueryResultHKT } from './session.ts'; import { PostgresJsSession } from './session.ts'; export class PostgresJsDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'PostgresJsDatabase'; + static override readonly [entityKind]: string = 'PostgresJsDatabase'; } -export function drizzle = Record>( +function construct = Record>( client: Sql, config: DrizzleConfig = {}, ): PostgresJsDatabase & { @@ -62,3 +62,67 @@ export function drizzle = Record = Record, + TClient extends Sql = Sql, +>( + ...params: IfNotImported< + Options, + [ImportTypeError<'postgres'>], + [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + connection: string | ({ url?: string } & Options>); + } | { + client: TClient; + }) + ), + ] + > +): PostgresJsDatabase & { + $client: TClient; +} { + if (typeof params[0] === 'function') { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'object') { + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: { url?: string } & Options>; + client?: TClient; + } & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + if (typeof connection === 'object' && connection.url !== undefined) { + const { url, ...config } = connection; + + const instance = pgClient(url, config); + return construct(instance, drizzleConfig) as any; + } + + const instance = pgClient(connection); + return construct(instance, drizzleConfig) as any; + } + + const instance = pgClient(params[0] as string); + + return construct(instance, params[1]) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): PostgresJsDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/postgres-js/session.ts b/drizzle-orm/src/postgres-js/session.ts index 05179ebdb..7509e2a00 100644 --- a/drizzle-orm/src/postgres-js/session.ts +++ b/drizzle-orm/src/postgres-js/session.ts @@ -13,7 +13,7 @@ import { tracer } from '~/tracing.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export class PostgresJsPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'PostgresJsPreparedQuery'; + static override readonly [entityKind]: string = 'PostgresJsPreparedQuery'; constructor( private client: Sql, @@ -95,7 +95,7 @@ export class PostgresJsSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'PostgresJsSession'; + static override readonly [entityKind]: string = 'PostgresJsSession'; logger: Logger; @@ -164,7 +164,7 @@ export class PostgresJsTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'PostgresJsTransaction'; + static override readonly [entityKind]: string = 'PostgresJsTransaction'; constructor( dialect: PgDialect, diff --git a/drizzle-orm/src/prisma/mysql/driver.ts b/drizzle-orm/src/prisma/mysql/driver.ts index 586832948..c6ca143df 100644 --- a/drizzle-orm/src/prisma/mysql/driver.ts +++ b/drizzle-orm/src/prisma/mysql/driver.ts @@ -13,7 +13,7 @@ import { PrismaMySqlSession } from './session.ts'; export class PrismaMySqlDatabase extends MySqlDatabase> { - static readonly [entityKind]: string = 'PrismaMySqlDatabase'; + static override readonly [entityKind]: string = 'PrismaMySqlDatabase'; constructor(client: PrismaClient, logger: Logger | undefined) { const dialect = new MySqlDialect(); diff --git a/drizzle-orm/src/prisma/mysql/session.ts b/drizzle-orm/src/prisma/mysql/session.ts index a6b12a0c3..fc3807bc5 100644 --- a/drizzle-orm/src/prisma/mysql/session.ts +++ b/drizzle-orm/src/prisma/mysql/session.ts @@ -19,7 +19,7 @@ export class PrismaMySqlPreparedQuery extends MySqlPreparedQuery | undefined): AsyncGenerator { throw new Error('Method not implemented.'); } - static readonly [entityKind]: string = 'PrismaMySqlPreparedQuery'; + static override readonly [entityKind]: string = 'PrismaMySqlPreparedQuery'; constructor( private readonly prisma: PrismaClient, @@ -41,7 +41,7 @@ export interface PrismaMySqlSessionOptions { } export class PrismaMySqlSession extends MySqlSession { - static readonly [entityKind]: string = 'PrismaMySqlSession'; + static override readonly [entityKind]: string = 'PrismaMySqlSession'; private readonly logger: Logger; diff --git a/drizzle-orm/src/prisma/pg/driver.ts b/drizzle-orm/src/prisma/pg/driver.ts index 23678f09f..f9038d8a1 100644 --- a/drizzle-orm/src/prisma/pg/driver.ts +++ b/drizzle-orm/src/prisma/pg/driver.ts @@ -11,7 +11,7 @@ import type { PrismaPgQueryResultHKT } from './session.ts'; import { PrismaPgSession } from './session.ts'; export class PrismaPgDatabase extends PgDatabase> { - static readonly [entityKind]: string = 'PrismaPgDatabase'; + static override readonly [entityKind]: string = 'PrismaPgDatabase'; constructor(client: PrismaClient, logger: Logger | undefined) { const dialect = new PgDialect(); diff --git a/drizzle-orm/src/prisma/pg/session.ts b/drizzle-orm/src/prisma/pg/session.ts index 077326ef3..b93f6f14b 100644 --- a/drizzle-orm/src/prisma/pg/session.ts +++ b/drizzle-orm/src/prisma/pg/session.ts @@ -14,7 +14,7 @@ import type { Query, SQL } from '~/sql/sql.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; export class PrismaPgPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'PrismaPgPreparedQuery'; + static override readonly [entityKind]: string = 'PrismaPgPreparedQuery'; constructor( private readonly prisma: PrismaClient, @@ -44,7 +44,7 @@ export interface PrismaPgSessionOptions { } export class PrismaPgSession extends PgSession { - static readonly [entityKind]: string = 'PrismaPgSession'; + static override readonly [entityKind]: string = 'PrismaPgSession'; private readonly logger: Logger; diff --git a/drizzle-orm/src/prisma/sqlite/session.ts b/drizzle-orm/src/prisma/sqlite/session.ts index 3dbdc6f1a..3a10fddbd 100644 --- a/drizzle-orm/src/prisma/sqlite/session.ts +++ b/drizzle-orm/src/prisma/sqlite/session.ts @@ -19,7 +19,7 @@ type PreparedQueryConfig = Omit; export class PrismaSQLitePreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: []; all: T['all']; get: T['get']; values: never; execute: T['execute'] } > { - static readonly [entityKind]: string = 'PrismaSQLitePreparedQuery'; + static override readonly [entityKind]: string = 'PrismaSQLitePreparedQuery'; constructor( private readonly prisma: PrismaClient, @@ -60,7 +60,7 @@ export interface PrismaSQLiteSessionOptions { } export class PrismaSQLiteSession extends SQLiteSession<'async', unknown, Record, Record> { - static readonly [entityKind]: string = 'PrismaSQLiteSession'; + static override readonly [entityKind]: string = 'PrismaSQLiteSession'; private readonly logger: Logger; diff --git a/drizzle-orm/src/relations.ts b/drizzle-orm/src/relations.ts index 99780897e..ed49c138f 100644 --- a/drizzle-orm/src/relations.ts +++ b/drizzle-orm/src/relations.ts @@ -66,7 +66,7 @@ export class One< TTableName extends string = string, TIsNullable extends boolean = boolean, > extends Relation { - static readonly [entityKind]: string = 'One'; + static override readonly [entityKind]: string = 'One'; declare protected $relationBrand: 'One'; @@ -98,7 +98,7 @@ export class One< } export class Many extends Relation { - static readonly [entityKind]: string = 'Many'; + static override readonly [entityKind]: string = 'Many'; declare protected $relationBrand: 'Many'; diff --git a/drizzle-orm/src/sql-js/session.ts b/drizzle-orm/src/sql-js/session.ts index 4325cd13f..0a09babbd 100644 --- a/drizzle-orm/src/sql-js/session.ts +++ b/drizzle-orm/src/sql-js/session.ts @@ -25,7 +25,7 @@ export class SQLJsSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', void, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLJsSession'; + static override readonly [entityKind]: string = 'SQLJsSession'; private logger: Logger; @@ -90,7 +90,7 @@ export class SQLJsTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', void, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLJsTransaction'; + static override readonly [entityKind]: string = 'SQLJsTransaction'; override transaction(transaction: (tx: SQLJsTransaction) => T): T { const savepointName = `sp${this.nestedIndex + 1}`; @@ -110,7 +110,7 @@ export class SQLJsTransaction< export class PreparedQuery extends PreparedQueryBase< { type: 'sync'; run: void; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'SQLJsPreparedQuery'; + static override readonly [entityKind]: string = 'SQLJsPreparedQuery'; constructor( private stmt: Statement, diff --git a/drizzle-orm/src/sqlite-core/columns/blob.ts b/drizzle-orm/src/sqlite-core/columns/blob.ts index 96e8d7f69..b7cd90be1 100644 --- a/drizzle-orm/src/sqlite-core/columns/blob.ts +++ b/drizzle-orm/src/sqlite-core/columns/blob.ts @@ -20,7 +20,7 @@ export type SQLiteBigIntBuilderInitial = SQLiteBigIntBuild export class SQLiteBigIntBuilder> extends SQLiteColumnBuilder { - static readonly [entityKind]: string = 'SQLiteBigIntBuilder'; + static override readonly [entityKind]: string = 'SQLiteBigIntBuilder'; constructor(name: T['name']) { super(name, 'bigint', 'SQLiteBigInt'); @@ -35,7 +35,7 @@ export class SQLiteBigIntBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteBigInt'; + static override readonly [entityKind]: string = 'SQLiteBigInt'; getSQLType(): string { return 'blob'; @@ -63,7 +63,7 @@ export type SQLiteBlobJsonBuilderInitial = SQLiteBlobJsonB export class SQLiteBlobJsonBuilder> extends SQLiteColumnBuilder { - static readonly [entityKind]: string = 'SQLiteBlobJsonBuilder'; + static override readonly [entityKind]: string = 'SQLiteBlobJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'SQLiteBlobJson'); @@ -81,7 +81,7 @@ export class SQLiteBlobJsonBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteBlobJson'; + static override readonly [entityKind]: string = 'SQLiteBlobJson'; getSQLType(): string { return 'blob'; @@ -109,7 +109,7 @@ export type SQLiteBlobBufferBuilderInitial = SQLiteBlobBuf export class SQLiteBlobBufferBuilder> extends SQLiteColumnBuilder { - static readonly [entityKind]: string = 'SQLiteBlobBufferBuilder'; + static override readonly [entityKind]: string = 'SQLiteBlobBufferBuilder'; constructor(name: T['name']) { super(name, 'buffer', 'SQLiteBlobBuffer'); @@ -124,7 +124,7 @@ export class SQLiteBlobBufferBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteBlobBuffer'; + static override readonly [entityKind]: string = 'SQLiteBlobBuffer'; getSQLType(): string { return 'blob'; diff --git a/drizzle-orm/src/sqlite-core/columns/common.ts b/drizzle-orm/src/sqlite-core/columns/common.ts index a0cdd755d..0fd985537 100644 --- a/drizzle-orm/src/sqlite-core/columns/common.ts +++ b/drizzle-orm/src/sqlite-core/columns/common.ts @@ -43,7 +43,7 @@ export abstract class SQLiteColumnBuilder< > extends ColumnBuilder implements SQLiteColumnBuilderBase { - static readonly [entityKind]: string = 'SQLiteColumnBuilder'; + static override readonly [entityKind]: string = 'SQLiteColumnBuilder'; private foreignKeyConfigs: ReferenceConfig[] = []; @@ -102,7 +102,7 @@ export abstract class SQLiteColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = object, > extends Column { - static readonly [entityKind]: string = 'SQLiteColumn'; + static override readonly [entityKind]: string = 'SQLiteColumn'; constructor( override readonly table: SQLiteTable, diff --git a/drizzle-orm/src/sqlite-core/columns/custom.ts b/drizzle-orm/src/sqlite-core/columns/custom.ts index 293dd09ad..6ece801c5 100644 --- a/drizzle-orm/src/sqlite-core/columns/custom.ts +++ b/drizzle-orm/src/sqlite-core/columns/custom.ts @@ -35,7 +35,7 @@ export class SQLiteCustomColumnBuilder { - static readonly [entityKind]: string = 'SQLiteCustomColumnBuilder'; + static override readonly [entityKind]: string = 'SQLiteCustomColumnBuilder'; constructor( name: T['name'], @@ -59,7 +59,7 @@ export class SQLiteCustomColumnBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteCustomColumn'; + static override readonly [entityKind]: string = 'SQLiteCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; diff --git a/drizzle-orm/src/sqlite-core/columns/integer.ts b/drizzle-orm/src/sqlite-core/columns/integer.ts index 449c6357d..10595b9a5 100644 --- a/drizzle-orm/src/sqlite-core/columns/integer.ts +++ b/drizzle-orm/src/sqlite-core/columns/integer.ts @@ -29,7 +29,7 @@ export abstract class SQLiteBaseIntegerBuilder< {}, { primaryKeyHasDefault: true } > { - static readonly [entityKind]: string = 'SQLiteBaseIntegerBuilder'; + static override readonly [entityKind]: string = 'SQLiteBaseIntegerBuilder'; constructor(name: T['name'], dataType: T['dataType'], columnType: T['columnType']) { super(name, dataType, columnType); @@ -54,7 +54,7 @@ export abstract class SQLiteBaseInteger< T extends ColumnBaseConfig, TRuntimeConfig extends object = object, > extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteBaseInteger'; + static override readonly [entityKind]: string = 'SQLiteBaseInteger'; readonly autoIncrement: boolean = this.config.autoIncrement; @@ -76,7 +76,7 @@ export type SQLiteIntegerBuilderInitial = SQLiteIntegerBui export class SQLiteIntegerBuilder> extends SQLiteBaseIntegerBuilder { - static readonly [entityKind]: string = 'SQLiteIntegerBuilder'; + static override readonly [entityKind]: string = 'SQLiteIntegerBuilder'; constructor(name: T['name']) { super(name, 'number', 'SQLiteInteger'); @@ -93,7 +93,7 @@ export class SQLiteIntegerBuilder> extends SQLiteBaseInteger { - static readonly [entityKind]: string = 'SQLiteInteger'; + static override readonly [entityKind]: string = 'SQLiteInteger'; } export type SQLiteTimestampBuilderInitial = SQLiteTimestampBuilder<{ @@ -109,7 +109,7 @@ export type SQLiteTimestampBuilderInitial = SQLiteTimestam export class SQLiteTimestampBuilder> extends SQLiteBaseIntegerBuilder { - static readonly [entityKind]: string = 'SQLiteTimestampBuilder'; + static override readonly [entityKind]: string = 'SQLiteTimestampBuilder'; constructor(name: T['name'], mode: 'timestamp' | 'timestamp_ms') { super(name, 'date', 'SQLiteTimestamp'); @@ -138,7 +138,7 @@ export class SQLiteTimestampBuilder> extends SQLiteBaseInteger { - static readonly [entityKind]: string = 'SQLiteTimestamp'; + static override readonly [entityKind]: string = 'SQLiteTimestamp'; readonly mode: 'timestamp' | 'timestamp_ms' = this.config.mode; @@ -171,7 +171,7 @@ export type SQLiteBooleanBuilderInitial = SQLiteBooleanBui export class SQLiteBooleanBuilder> extends SQLiteBaseIntegerBuilder { - static readonly [entityKind]: string = 'SQLiteBooleanBuilder'; + static override readonly [entityKind]: string = 'SQLiteBooleanBuilder'; constructor(name: T['name'], mode: 'boolean') { super(name, 'boolean', 'SQLiteBoolean'); @@ -191,7 +191,7 @@ export class SQLiteBooleanBuilder> extends SQLiteBaseInteger { - static readonly [entityKind]: string = 'SQLiteBoolean'; + static override readonly [entityKind]: string = 'SQLiteBoolean'; readonly mode: 'boolean' = this.config.mode; diff --git a/drizzle-orm/src/sqlite-core/columns/numeric.ts b/drizzle-orm/src/sqlite-core/columns/numeric.ts index 331547736..93dfc4c3d 100644 --- a/drizzle-orm/src/sqlite-core/columns/numeric.ts +++ b/drizzle-orm/src/sqlite-core/columns/numeric.ts @@ -17,7 +17,7 @@ export type SQLiteNumericBuilderInitial = SQLiteNumericBui export class SQLiteNumericBuilder> extends SQLiteColumnBuilder { - static readonly [entityKind]: string = 'SQLiteNumericBuilder'; + static override readonly [entityKind]: string = 'SQLiteNumericBuilder'; constructor(name: T['name']) { super(name, 'string', 'SQLiteNumeric'); @@ -35,7 +35,7 @@ export class SQLiteNumericBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteNumeric'; + static override readonly [entityKind]: string = 'SQLiteNumeric'; getSQLType(): string { return 'numeric'; diff --git a/drizzle-orm/src/sqlite-core/columns/real.ts b/drizzle-orm/src/sqlite-core/columns/real.ts index 693780e9d..cd7cf5d01 100644 --- a/drizzle-orm/src/sqlite-core/columns/real.ts +++ b/drizzle-orm/src/sqlite-core/columns/real.ts @@ -17,7 +17,7 @@ export type SQLiteRealBuilderInitial = SQLiteRealBuilder<{ export class SQLiteRealBuilder> extends SQLiteColumnBuilder { - static readonly [entityKind]: string = 'SQLiteRealBuilder'; + static override readonly [entityKind]: string = 'SQLiteRealBuilder'; constructor(name: T['name']) { super(name, 'number', 'SQLiteReal'); @@ -32,7 +32,7 @@ export class SQLiteRealBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteReal'; + static override readonly [entityKind]: string = 'SQLiteReal'; getSQLType(): string { return 'real'; diff --git a/drizzle-orm/src/sqlite-core/columns/text.ts b/drizzle-orm/src/sqlite-core/columns/text.ts index 033c2cb99..84c71fb20 100644 --- a/drizzle-orm/src/sqlite-core/columns/text.ts +++ b/drizzle-orm/src/sqlite-core/columns/text.ts @@ -19,7 +19,7 @@ export class SQLiteTextBuilder { - static readonly [entityKind]: string = 'SQLiteTextBuilder'; + static override readonly [entityKind]: string = 'SQLiteTextBuilder'; constructor(name: T['name'], config: SQLiteTextConfig<'text', T['enumValues']>) { super(name, 'string', 'SQLiteText'); @@ -38,7 +38,7 @@ export class SQLiteTextBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteText'; + static override readonly [entityKind]: string = 'SQLiteText'; override readonly enumValues = this.config.enumValues; @@ -69,7 +69,7 @@ export type SQLiteTextJsonBuilderInitial = SQLiteTextJsonB export class SQLiteTextJsonBuilder> extends SQLiteColumnBuilder { - static readonly [entityKind]: string = 'SQLiteTextJsonBuilder'; + static override readonly [entityKind]: string = 'SQLiteTextJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'SQLiteTextJson'); @@ -89,7 +89,7 @@ export class SQLiteTextJsonBuilder> extends SQLiteColumn { - static readonly [entityKind]: string = 'SQLiteTextJson'; + static override readonly [entityKind]: string = 'SQLiteTextJson'; getSQLType(): string { return 'text'; diff --git a/drizzle-orm/src/sqlite-core/dialect.ts b/drizzle-orm/src/sqlite-core/dialect.ts index d3822be5e..8995148c1 100644 --- a/drizzle-orm/src/sqlite-core/dialect.ts +++ b/drizzle-orm/src/sqlite-core/dialect.ts @@ -17,7 +17,7 @@ import { type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { Name } from '~/sql/index.ts'; +import type { Name, Placeholder } from '~/sql/index.ts'; import { and, eq } from '~/sql/index.ts'; import { Param, type QueryWithTypings, SQL, sql, type SQLChunk } from '~/sql/sql.ts'; import { SQLiteColumn } from '~/sqlite-core/columns/index.ts'; @@ -75,7 +75,7 @@ export abstract class SQLiteDialect { return sql.join(withSqlChunks); } - buildDeleteQuery({ table, where, returning, withList }: SQLiteDeleteConfig): SQL { + buildDeleteQuery({ table, where, returning, withList, limit, orderBy }: SQLiteDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning @@ -84,7 +84,11 @@ export abstract class SQLiteDialect { const whereSql = where ? sql` where ${where}` : undefined; - return sql`${withSql}delete from ${table}${whereSql}${returningSql}`; + const orderBySql = this.buildOrderBy(orderBy); + + const limitSql = this.buildLimit(limit); + + return sql`${withSql}delete from ${table}${whereSql}${returningSql}${orderBySql}${limitSql}`; } buildUpdateSet(table: SQLiteTable, set: UpdateSet): SQL { @@ -108,7 +112,7 @@ export abstract class SQLiteDialect { })); } - buildUpdateQuery({ table, set, where, returning, withList }: SQLiteUpdateConfig): SQL { + buildUpdateQuery({ table, set, where, returning, withList, limit, orderBy }: SQLiteUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const setSql = this.buildUpdateSet(table, set); @@ -119,7 +123,11 @@ export abstract class SQLiteDialect { const whereSql = where ? sql` where ${where}` : undefined; - return sql`${withSql}update ${table} set ${setSql}${whereSql}${returningSql}`; + const orderBySql = this.buildOrderBy(orderBy); + + const limitSql = this.buildLimit(limit); + + return sql`${withSql}update ${table} set ${setSql}${whereSql}${returningSql}${orderBySql}${limitSql}`; } /** @@ -185,6 +193,28 @@ export abstract class SQLiteDialect { return sql.join(chunks); } + private buildLimit(limit: number | Placeholder | undefined): SQL | undefined { + return typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + } + + private buildOrderBy(orderBy: (SQLiteColumn | SQL | SQL.Aliased)[] | undefined): SQL | undefined { + const orderByList: (SQLiteColumn | SQL | SQL.Aliased)[] = []; + + if (orderBy) { + for (const [index, orderByValue] of orderBy.entries()) { + orderByList.push(orderByValue); + + if (index < orderBy.length - 1) { + orderByList.push(sql`, `); + } + } + } + + return orderByList.length > 0 ? sql` order by ${sql.join(orderByList)}` : undefined; + } + buildSelectQuery( { withList, @@ -280,17 +310,6 @@ export abstract class SQLiteDialect { const havingSql = having ? sql` having ${having}` : undefined; - const orderByList: (SQLiteColumn | SQL | SQL.Aliased)[] = []; - if (orderBy) { - for (const [index, orderByValue] of orderBy.entries()) { - orderByList.push(orderByValue); - - if (index < orderBy.length - 1) { - orderByList.push(sql`, `); - } - } - } - const groupByList: (SQL | AnyColumn | SQL.Aliased)[] = []; if (groupBy) { for (const [index, groupByValue] of groupBy.entries()) { @@ -304,11 +323,9 @@ export abstract class SQLiteDialect { const groupBySql = groupByList.length > 0 ? sql` group by ${sql.join(groupByList)}` : undefined; - const orderBySql = orderByList.length > 0 ? sql` order by ${sql.join(orderByList)}` : undefined; + const orderBySql = this.buildOrderBy(orderBy); - const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) - ? sql` limit ${limit}` - : undefined; + const limitSql = this.buildLimit(limit); const offsetSql = offset ? sql` offset ${offset}` : undefined; @@ -745,7 +762,7 @@ export abstract class SQLiteDialect { } export class SQLiteSyncDialect extends SQLiteDialect { - static readonly [entityKind]: string = 'SQLiteSyncDialect'; + static override readonly [entityKind]: string = 'SQLiteSyncDialect'; migrate( migrations: MigrationMeta[], @@ -797,7 +814,7 @@ export class SQLiteSyncDialect extends SQLiteDialect { } export class SQLiteAsyncDialect extends SQLiteDialect { - static readonly [entityKind]: string = 'SQLiteAsyncDialect'; + static override readonly [entityKind]: string = 'SQLiteAsyncDialect'; async migrate( migrations: MigrationMeta[], diff --git a/drizzle-orm/src/sqlite-core/query-builders/count.ts b/drizzle-orm/src/sqlite-core/query-builders/count.ts index 424276825..1c1234034 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/count.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/count.ts @@ -9,7 +9,7 @@ export class SQLiteCountBuilder< > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static readonly [entityKind] = 'SQLiteCountBuilderAsync'; + static override readonly [entityKind] = 'SQLiteCountBuilderAsync'; [Symbol.toStringTag] = 'SQLiteCountBuilderAsync'; private session: TSession; diff --git a/drizzle-orm/src/sqlite-core/query-builders/delete.ts b/drizzle-orm/src/sqlite-core/query-builders/delete.ts index 1a028c09a..53e8d6227 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/delete.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/delete.ts @@ -2,12 +2,14 @@ import { entityKind } from '~/entity.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; -import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteDialect } from '~/sqlite-core/dialect.ts'; import type { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import type { Subquery } from '~/subquery.ts'; -import { type DrizzleTypeError, orderSelectedFields } from '~/utils.ts'; +import { Table } from '~/table.ts'; +import { type DrizzleTypeError, orderSelectedFields, type ValueOrArray } from '~/utils.ts'; import type { SQLiteColumn } from '../columns/common.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; @@ -37,6 +39,8 @@ export type SQLiteDelete< export interface SQLiteDeleteConfig { where?: SQL | undefined; + limit?: number | Placeholder; + orderBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; table: SQLiteTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; @@ -136,7 +140,7 @@ export class SQLiteDeleteBase< > extends QueryPromise implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'SQLiteDelete'; + static override readonly [entityKind]: string = 'SQLiteDelete'; /** @internal */ config: SQLiteDeleteConfig; @@ -185,6 +189,37 @@ export class SQLiteDeleteBase< return this as any; } + orderBy( + builder: (deleteTable: TTable) => ValueOrArray, + ): SQLiteDeleteWithout; + orderBy(...columns: (SQLiteColumn | SQL | SQL.Aliased)[]): SQLiteDeleteWithout; + orderBy( + ...columns: + | [(deleteTable: TTable) => ValueOrArray] + | (SQLiteColumn | SQL | SQL.Aliased)[] + ): SQLiteDeleteWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as any, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + this.config.orderBy = orderByArray; + } else { + const orderByArray = columns as (SQLiteColumn | SQL | SQL.Aliased)[]; + this.config.orderBy = orderByArray; + } + return this as any; + } + + limit(limit: number | Placeholder): SQLiteDeleteWithout { + this.config.limit = limit; + return this as any; + } + /** * Adds a `returning` clause to the query. * diff --git a/drizzle-orm/src/sqlite-core/query-builders/insert.ts b/drizzle-orm/src/sqlite-core/query-builders/insert.ts index b0861fade..4f20666c4 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/insert.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/insert.ts @@ -199,7 +199,7 @@ export class SQLiteInsertBase< > extends QueryPromise implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'SQLiteInsert'; + static override readonly [entityKind]: string = 'SQLiteInsert'; /** @internal */ config: SQLiteInsertConfig; diff --git a/drizzle-orm/src/sqlite-core/query-builders/query.ts b/drizzle-orm/src/sqlite-core/query-builders/query.ts index 9ae47f9ce..ef93e992a 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/query.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/query.ts @@ -98,7 +98,7 @@ export class RelationalQueryBuilder< export class SQLiteRelationalQuery extends QueryPromise implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'SQLiteAsyncRelationalQuery'; + static override readonly [entityKind]: string = 'SQLiteAsyncRelationalQuery'; declare readonly _: { readonly dialect: 'sqlite'; @@ -199,7 +199,7 @@ export class SQLiteRelationalQuery exte } export class SQLiteSyncRelationalQuery extends SQLiteRelationalQuery<'sync', TResult> { - static readonly [entityKind]: string = 'SQLiteSyncRelationalQuery'; + static override readonly [entityKind]: string = 'SQLiteSyncRelationalQuery'; sync(): TResult { return this.executeRaw(); diff --git a/drizzle-orm/src/sqlite-core/query-builders/raw.ts b/drizzle-orm/src/sqlite-core/query-builders/raw.ts index 10ddb38bd..488f45afe 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/raw.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/raw.ts @@ -15,7 +15,7 @@ export interface SQLiteRaw extends QueryPromise, RunnableQuery export class SQLiteRaw extends QueryPromise implements RunnableQuery, SQLWrapper, PreparedQuery { - static readonly [entityKind]: string = 'SQLiteRaw'; + static override readonly [entityKind]: string = 'SQLiteRaw'; declare readonly _: { readonly dialect: 'sqlite'; diff --git a/drizzle-orm/src/sqlite-core/query-builders/select.ts b/drizzle-orm/src/sqlite-core/query-builders/select.ts index b7f4b0465..d9fce748a 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/select.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/select.ts @@ -137,7 +137,7 @@ export abstract class SQLiteSelectQueryBuilderBase< TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { - static readonly [entityKind]: string = 'SQLiteSelectQueryBuilder'; + static override readonly [entityKind]: string = 'SQLiteSelectQueryBuilder'; override readonly _: { readonly dialect: 'sqlite'; @@ -854,7 +854,7 @@ export class SQLiteSelectBase< TResult, TSelectedFields > implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'SQLiteSelect'; + static override readonly [entityKind]: string = 'SQLiteSelect'; /** @internal */ _prepare(isOneTimeQuery = true): SQLiteSelectPrepare { diff --git a/drizzle-orm/src/sqlite-core/query-builders/update.ts b/drizzle-orm/src/sqlite-core/query-builders/update.ts index 0238b748f..f49337107 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/update.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/update.ts @@ -3,17 +3,27 @@ import { entityKind } from '~/entity.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; -import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteDialect } from '~/sqlite-core/dialect.ts'; import type { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import type { Subquery } from '~/subquery.ts'; -import { type DrizzleTypeError, mapUpdateSet, orderSelectedFields, type UpdateSet } from '~/utils.ts'; +import { Table } from '~/table.ts'; +import { + type DrizzleTypeError, + mapUpdateSet, + orderSelectedFields, + type UpdateSet, + type ValueOrArray, +} from '~/utils.ts'; import type { SQLiteColumn } from '../columns/common.ts'; import type { SelectedFields, SelectedFieldsOrdered } from './select.types.ts'; export interface SQLiteUpdateConfig { where?: SQL | undefined; + limit?: number | Placeholder; + orderBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; set: UpdateSet; table: SQLiteTable; returning?: SelectedFieldsOrdered; @@ -169,7 +179,7 @@ export class SQLiteUpdateBase< > extends QueryPromise implements RunnableQuery, SQLWrapper { - static readonly [entityKind]: string = 'SQLiteUpdate'; + static override readonly [entityKind]: string = 'SQLiteUpdate'; /** @internal */ config: SQLiteUpdateConfig; @@ -223,6 +233,37 @@ export class SQLiteUpdateBase< return this as any; } + orderBy( + builder: (updateTable: TTable) => ValueOrArray, + ): SQLiteUpdateWithout; + orderBy(...columns: (SQLiteColumn | SQL | SQL.Aliased)[]): SQLiteUpdateWithout; + orderBy( + ...columns: + | [(updateTable: TTable) => ValueOrArray] + | (SQLiteColumn | SQL | SQL.Aliased)[] + ): SQLiteUpdateWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as any, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + this.config.orderBy = orderByArray; + } else { + const orderByArray = columns as (SQLiteColumn | SQL | SQL.Aliased)[]; + this.config.orderBy = orderByArray; + } + return this as any; + } + + limit(limit: number | Placeholder): SQLiteUpdateWithout { + this.config.limit = limit; + return this as any; + } + /** * Adds a `returning` clause to the query. * diff --git a/drizzle-orm/src/sqlite-core/session.ts b/drizzle-orm/src/sqlite-core/session.ts index d291b6fdf..9e6924ca0 100644 --- a/drizzle-orm/src/sqlite-core/session.ts +++ b/drizzle-orm/src/sqlite-core/session.ts @@ -20,7 +20,7 @@ export interface PreparedQueryConfig { } export class ExecuteResultSync extends QueryPromise { - static readonly [entityKind]: string = 'ExecuteResultSync'; + static override readonly [entityKind]: string = 'ExecuteResultSync'; constructor(private resultCb: () => T) { super(); @@ -209,7 +209,7 @@ export abstract class SQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends BaseSQLiteDatabase { - static readonly [entityKind]: string = 'SQLiteTransaction'; + static override readonly [entityKind]: string = 'SQLiteTransaction'; constructor( resultType: TResultType, diff --git a/drizzle-orm/src/sqlite-core/table.ts b/drizzle-orm/src/sqlite-core/table.ts index c223e2d6f..d7c5a060b 100644 --- a/drizzle-orm/src/sqlite-core/table.ts +++ b/drizzle-orm/src/sqlite-core/table.ts @@ -24,7 +24,7 @@ export type TableConfig = TableConfigBase>; export const InlineForeignKeys = Symbol.for('drizzle:SQLiteInlineForeignKeys'); export class SQLiteTable extends Table { - static readonly [entityKind]: string = 'SQLiteTable'; + static override readonly [entityKind]: string = 'SQLiteTable'; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, { diff --git a/drizzle-orm/src/sqlite-core/utils.ts b/drizzle-orm/src/sqlite-core/utils.ts index 2312466c4..33ae2c248 100644 --- a/drizzle-orm/src/sqlite-core/utils.ts +++ b/drizzle-orm/src/sqlite-core/utils.ts @@ -11,7 +11,6 @@ import type { PrimaryKey } from './primary-keys.ts'; import { PrimaryKeyBuilder } from './primary-keys.ts'; import { SQLiteTable } from './table.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; -import { SQLiteViewConfig } from './view-common.ts'; import type { SQLiteView } from './view.ts'; export function getTableConfig(table: TTable) { @@ -61,6 +60,6 @@ export function getViewConfig< >(view: SQLiteView) { return { ...view[ViewBaseConfig], - ...view[SQLiteViewConfig], + // ...view[SQLiteViewConfig], }; } diff --git a/drizzle-orm/src/sqlite-core/view-base.ts b/drizzle-orm/src/sqlite-core/view-base.ts index ac3328905..dd2f306a9 100644 --- a/drizzle-orm/src/sqlite-core/view-base.ts +++ b/drizzle-orm/src/sqlite-core/view-base.ts @@ -7,7 +7,7 @@ export abstract class SQLiteViewBase< TExisting extends boolean = boolean, TSelection extends ColumnsSelection = ColumnsSelection, > extends View { - static readonly [entityKind]: string = 'SQLiteViewBase'; + static override readonly [entityKind]: string = 'SQLiteViewBase'; declare _: View['_'] & { viewBrand: 'SQLiteView'; diff --git a/drizzle-orm/src/sqlite-core/view.ts b/drizzle-orm/src/sqlite-core/view.ts index d1f11969e..03ef08025 100644 --- a/drizzle-orm/src/sqlite-core/view.ts +++ b/drizzle-orm/src/sqlite-core/view.ts @@ -10,7 +10,6 @@ import { QueryBuilder } from './query-builders/query-builder.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import { sqliteTable } from './table.ts'; import { SQLiteViewBase } from './view-base.ts'; -import { SQLiteViewConfig } from './view-common.ts'; export interface ViewBuilderConfig { algorithm?: 'undefined' | 'merge' | 'temptable'; @@ -37,7 +36,7 @@ export class ViewBuilderCore< } export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { - static readonly [entityKind]: string = 'SQLiteViewBuilder'; + static override readonly [entityKind]: string = 'SQLiteViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), @@ -55,7 +54,7 @@ export class ViewBuilder extends ViewBuilderCore< const aliasedSelectedFields = qb.getSelectedFields(); return new Proxy( new SQLiteView({ - sqliteConfig: this.config, + // sqliteConfig: this.config, config: { name: this.name, schema: undefined, @@ -74,7 +73,7 @@ export class ManualViewBuilder< > extends ViewBuilderCore< { name: TName; columns: TColumns } > { - static readonly [entityKind]: string = 'SQLiteManualViewBuilder'; + static override readonly [entityKind]: string = 'SQLiteManualViewBuilder'; private columns: Record; @@ -89,7 +88,6 @@ export class ManualViewBuilder< existing(): SQLiteViewWithSelection> { return new Proxy( new SQLiteView({ - sqliteConfig: undefined, config: { name: this.name, schema: undefined, @@ -109,7 +107,6 @@ export class ManualViewBuilder< as(query: SQL): SQLiteViewWithSelection> { return new Proxy( new SQLiteView({ - sqliteConfig: this.config, config: { name: this.name, schema: undefined, @@ -132,13 +129,9 @@ export class SQLiteView< TExisting extends boolean = boolean, TSelection extends ColumnsSelection = ColumnsSelection, > extends SQLiteViewBase { - static readonly [entityKind]: string = 'SQLiteView'; + static override readonly [entityKind]: string = 'SQLiteView'; - /** @internal */ - [SQLiteViewConfig]: ViewBuilderConfig | undefined; - - constructor({ sqliteConfig, config }: { - sqliteConfig: ViewBuilderConfig | undefined; + constructor({ config }: { config: { name: TName; schema: string | undefined; @@ -147,7 +140,6 @@ export class SQLiteView< }; }) { super(config); - this[SQLiteViewConfig] = sqliteConfig; } } diff --git a/drizzle-orm/src/sqlite-proxy/driver.ts b/drizzle-orm/src/sqlite-proxy/driver.ts index e3f2b2af7..e11e977c1 100644 --- a/drizzle-orm/src/sqlite-proxy/driver.ts +++ b/drizzle-orm/src/sqlite-proxy/driver.ts @@ -15,7 +15,7 @@ export interface SqliteRemoteResult { export class SqliteRemoteDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', SqliteRemoteResult, TSchema> { - static readonly [entityKind]: string = 'SqliteRemoteDatabase'; + static override readonly [entityKind]: string = 'SqliteRemoteDatabase'; /** @internal */ declare readonly session: SQLiteRemoteSession>; diff --git a/drizzle-orm/src/sqlite-proxy/session.ts b/drizzle-orm/src/sqlite-proxy/session.ts index 398913568..93d277d69 100644 --- a/drizzle-orm/src/sqlite-proxy/session.ts +++ b/drizzle-orm/src/sqlite-proxy/session.ts @@ -27,7 +27,7 @@ export class SQLiteRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', SqliteRemoteResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLiteRemoteSession'; + static override readonly [entityKind]: string = 'SQLiteRemoteSession'; private logger: Logger; @@ -108,7 +108,7 @@ export class SQLiteProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', SqliteRemoteResult, TFullSchema, TSchema> { - static readonly [entityKind]: string = 'SQLiteProxyTransaction'; + static override readonly [entityKind]: string = 'SQLiteProxyTransaction'; override async transaction( transaction: (tx: SQLiteProxyTransaction) => Promise, @@ -130,7 +130,7 @@ export class SQLiteProxyTransaction< export class RemotePreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: SqliteRemoteResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { - static readonly [entityKind]: string = 'SQLiteProxyPreparedQuery'; + static override readonly [entityKind]: string = 'SQLiteProxyPreparedQuery'; private method: SQLiteExecuteMethod; diff --git a/drizzle-orm/src/subquery.ts b/drizzle-orm/src/subquery.ts index 320ec46e6..37fe48d86 100644 --- a/drizzle-orm/src/subquery.ts +++ b/drizzle-orm/src/subquery.ts @@ -42,5 +42,5 @@ export class WithSubquery< TAlias extends string = string, TSelection extends Record = Record, > extends Subquery { - static readonly [entityKind]: string = 'WithSubquery'; + static override readonly [entityKind]: string = 'WithSubquery'; } diff --git a/drizzle-orm/src/tidb-serverless/driver.ts b/drizzle-orm/src/tidb-serverless/driver.ts index ec82e61ff..01f54af6e 100644 --- a/drizzle-orm/src/tidb-serverless/driver.ts +++ b/drizzle-orm/src/tidb-serverless/driver.ts @@ -1,4 +1,4 @@ -import type { Connection } from '@tidbcloud/serverless'; +import { type Config, connect, Connection } from '@tidbcloud/serverless'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -10,7 +10,7 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import type { TiDBServerlessPreparedQueryHKT, TiDBServerlessQueryResultHKT } from './session.ts'; import { TiDBServerlessSession } from './session.ts'; @@ -21,10 +21,10 @@ export interface TiDBServerlessSDriverOptions { export class TiDBServerlessDatabase< TSchema extends Record = Record, > extends MySqlDatabase { - static readonly [entityKind]: string = 'TiDBServerlessDatabase'; + static override readonly [entityKind]: string = 'TiDBServerlessDatabase'; } -export function drizzle = Record>( +function construct = Record>( client: Connection, config: DrizzleConfig = {}, ): TiDBServerlessDatabase & { @@ -57,3 +57,65 @@ export function drizzle = Record = Record, + TClient extends Connection = Connection, +>( + ...params: IfNotImported< + Config, + [ImportTypeError<'@tidbcloud/serverless'>], + [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + & ({ + connection: string | Config; + } | { + client: TClient; + }) + & DrizzleConfig, + ] + > +): TiDBServerlessDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (params[0] instanceof Connection) { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (typeof params[0] === 'string') { + const instance = connect({ + url: params[0], + }); + + return construct(instance, params[1]) as any; + } + + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config | string; client?: TClient } + & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + const instance = typeof connection === 'string' + ? connect({ + url: connection, + }) + : connect(connection!); + + return construct(instance, drizzleConfig) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): TiDBServerlessDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/tidb-serverless/session.ts b/drizzle-orm/src/tidb-serverless/session.ts index b01b9f948..279c60f3b 100644 --- a/drizzle-orm/src/tidb-serverless/session.ts +++ b/drizzle-orm/src/tidb-serverless/session.ts @@ -22,7 +22,7 @@ const executeRawConfig = { fullResult: true } satisfies ExecuteOptions; const queryConfig = { arrayMode: true } satisfies ExecuteOptions; export class TiDBServerlessPreparedQuery extends MySqlPreparedQuery { - static readonly [entityKind]: string = 'TiDBPreparedQuery'; + static override readonly [entityKind]: string = 'TiDBPreparedQuery'; constructor( private client: Tx | Connection, @@ -97,7 +97,7 @@ export class TiDBServerlessSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { - static readonly [entityKind]: string = 'TiDBServerlessSession'; + static override readonly [entityKind]: string = 'TiDBServerlessSession'; private logger: Logger; private client: Tx | Connection; @@ -172,7 +172,7 @@ export class TiDBServerlessTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { - static readonly [entityKind]: string = 'TiDBServerlessTransaction'; + static override readonly [entityKind]: string = 'TiDBServerlessTransaction'; constructor( dialect: MySqlDialect, diff --git a/drizzle-orm/src/utils.ts b/drizzle-orm/src/utils.ts index c073448e3..20abd0a5a 100644 --- a/drizzle-orm/src/utils.ts +++ b/drizzle-orm/src/utils.ts @@ -236,4 +236,12 @@ export function getColumnNameAndConfig< config: typeof a === 'object' ? a : b as TConfig, }; } + export type IfNotImported = unknown extends T ? Y : N; + +export type ImportTypeError = + `Please install \`${TPackageName}\`to allow Drizzle ORM to connect to the database`; + +export type RequireAtLeastOne = Keys extends any + ? Required> & Partial> + : never; diff --git a/drizzle-orm/src/vercel-postgres/driver.ts b/drizzle-orm/src/vercel-postgres/driver.ts index 52a55db2c..5e6c44c27 100644 --- a/drizzle-orm/src/vercel-postgres/driver.ts +++ b/drizzle-orm/src/vercel-postgres/driver.ts @@ -1,4 +1,4 @@ -import { types } from '@vercel/postgres'; +import { type QueryResult, type QueryResultRow, sql, type VercelPool } from '@vercel/postgres'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; @@ -10,7 +10,7 @@ import { type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; -import type { DrizzleConfig } from '~/utils.ts'; +import type { DrizzleConfig, IfNotImported, ImportTypeError } from '~/utils.ts'; import { type VercelPgClient, type VercelPgQueryResultHKT, VercelPgSession } from './session.ts'; export interface VercelPgDriverOptions { @@ -25,7 +25,6 @@ export class VercelPgDriver { private dialect: PgDialect, private options: VercelPgDriverOptions = {}, ) { - this.initMappers(); } createSession( @@ -33,22 +32,15 @@ export class VercelPgDriver { ): VercelPgSession, TablesRelationalConfig> { return new VercelPgSession(this.client, this.dialect, schema, { logger: this.options.logger }); } - - initMappers() { - types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); - types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); - types.setTypeParser(types.builtins.DATE, (val) => val); - types.setTypeParser(types.builtins.INTERVAL, (val) => val); - } } export class VercelPgDatabase< TSchema extends Record = Record, > extends PgDatabase { - static readonly [entityKind]: string = 'VercelPgDatabase'; + static override readonly [entityKind]: string = 'VercelPgDatabase'; } -export function drizzle = Record>( +function construct = Record>( client: VercelPgClient, config: DrizzleConfig = {}, ): VercelPgDatabase & { @@ -82,3 +74,54 @@ export function drizzle = Record = Record, + TClient extends VercelPgClient = + & VercelPool + & ((strings: TemplateStringsArray, ...values: Primitive[]) => Promise>), +>( + ...params: IfNotImported< + VercelPool, + [ImportTypeError<'@vercel/postgres'>], + [] | [ + TClient, + ] | [ + TClient, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + client?: TClient; + }) + ), + ] + > +): VercelPgDatabase & { + $client: TClient; +} { + // eslint-disable-next-line no-instanceof/no-instanceof + if (typeof params[0] === 'function') { + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + } + + if (!params[0] || !(params[0] as { client?: TClient }).client) { + return construct(sql, params[0] as DrizzleConfig | undefined) as any; + } + + const { client, ...drizzleConfig } = params[0] as ({ client?: TClient } & DrizzleConfig); + return construct(client ?? sql, drizzleConfig) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): VercelPgDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/vercel-postgres/session.ts b/drizzle-orm/src/vercel-postgres/session.ts index 51a987905..a901f24c8 100644 --- a/drizzle-orm/src/vercel-postgres/session.ts +++ b/drizzle-orm/src/vercel-postgres/session.ts @@ -3,6 +3,7 @@ import { type QueryConfig, type QueryResult, type QueryResultRow, + types, type VercelClient, VercelPool, type VercelPoolClient, @@ -20,7 +21,7 @@ import { type Assume, mapResultRow } from '~/utils.ts'; export type VercelPgClient = VercelPool | VercelClient | VercelPoolClient; export class VercelPgPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'VercelPgPreparedQuery'; + static override readonly [entityKind]: string = 'VercelPgPreparedQuery'; private rawQuery: QueryConfig; private queryConfig: QueryArrayConfig; @@ -39,11 +40,49 @@ export class VercelPgPreparedQuery extends PgPrep this.rawQuery = { name, text: queryString, + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val: any) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, }; this.queryConfig = { name, text: queryString, rowMode: 'array', + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val: any) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, }; } @@ -92,7 +131,7 @@ export class VercelPgSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { - static readonly [entityKind]: string = 'VercelPgSession'; + static override readonly [entityKind]: string = 'VercelPgSession'; private logger: Logger; @@ -170,7 +209,7 @@ export class VercelPgTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { - static readonly [entityKind]: string = 'VercelPgTransaction'; + static override readonly [entityKind]: string = 'VercelPgTransaction'; override async transaction( transaction: (tx: VercelPgTransaction) => Promise, diff --git a/drizzle-orm/src/xata-http/driver.ts b/drizzle-orm/src/xata-http/driver.ts index 82986f298..ce275a88d 100644 --- a/drizzle-orm/src/xata-http/driver.ts +++ b/drizzle-orm/src/xata-http/driver.ts @@ -40,7 +40,7 @@ export class XataHttpDriver { export class XataHttpDatabase = Record> extends PgDatabase { - static readonly [entityKind]: string = 'XataHttpDatabase'; + static override readonly [entityKind]: string = 'XataHttpDatabase'; /** @internal */ declare readonly session: XataHttpSession>; diff --git a/drizzle-orm/src/xata-http/session.ts b/drizzle-orm/src/xata-http/session.ts index c666ba09d..df4cc1003 100644 --- a/drizzle-orm/src/xata-http/session.ts +++ b/drizzle-orm/src/xata-http/session.ts @@ -22,7 +22,7 @@ export interface QueryResults { } export class XataHttpPreparedQuery extends PgPreparedQuery { - static readonly [entityKind]: string = 'XataHttpPreparedQuery'; + static override readonly [entityKind]: string = 'XataHttpPreparedQuery'; constructor( private client: XataHttpClient, @@ -84,7 +84,7 @@ export class XataHttpSession, TSchem TSchema > { - static readonly [entityKind]: string = 'XataHttpSession'; + static override readonly [entityKind]: string = 'XataHttpSession'; private logger: Logger; @@ -152,7 +152,7 @@ export class XataTransaction, TSchem TSchema > { - static readonly [entityKind]: string = 'XataHttpTransaction'; + static override readonly [entityKind]: string = 'XataHttpTransaction'; override async transaction(_transaction: (tx: XataTransaction) => Promise): Promise { throw new Error('No transactions support in Xata Http driver'); diff --git a/drizzle-orm/type-tests/mysql/delete.ts b/drizzle-orm/type-tests/mysql/delete.ts index c3e5afbb2..84c827ba8 100644 --- a/drizzle-orm/type-tests/mysql/delete.ts +++ b/drizzle-orm/type-tests/mysql/delete.ts @@ -59,3 +59,7 @@ Expect>; .where(sql``) .where(sql``); } + +{ + db.delete(users).where(sql``).limit(1).orderBy(sql``); +} diff --git a/drizzle-orm/type-tests/mysql/tables.ts b/drizzle-orm/type-tests/mysql/tables.ts index 5c9f7029f..aca5c63d7 100644 --- a/drizzle-orm/type-tests/mysql/tables.ts +++ b/drizzle-orm/type-tests/mysql/tables.ts @@ -186,7 +186,6 @@ export const classes = mysqlTable('classes_table', ({ serial, text }) => ({ export const newYorkers = mysqlView('new_yorkers') .algorithm('merge') - .definer('root@localhost') .sqlSecurity('definer') .as((qb) => { const sq = qb @@ -243,7 +242,6 @@ Expect< { const newYorkers = customSchema.view('new_yorkers') .algorithm('merge') - .definer('root@localhost') .sqlSecurity('definer') .as((qb) => { const sq = qb @@ -304,7 +302,6 @@ Expect< cityId: int('city_id'), }) .algorithm('merge') - .definer('root@localhost') .sqlSecurity('definer') .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ @@ -359,7 +356,6 @@ Expect< cityId: int('city_id'), }) .algorithm('merge') - .definer('root@localhost') .sqlSecurity('definer') .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ diff --git a/drizzle-orm/type-tests/mysql/update.ts b/drizzle-orm/type-tests/mysql/update.ts index dc6967f44..abb127b5d 100644 --- a/drizzle-orm/type-tests/mysql/update.ts +++ b/drizzle-orm/type-tests/mysql/update.ts @@ -24,3 +24,7 @@ import { users } from './tables.ts'; // @ts-expect-error method was already called .where(sql``); } + +{ + db.update(users).set({}).where(sql``).limit(1).orderBy(sql``); +} diff --git a/drizzle-orm/type-tests/pg/tables.ts b/drizzle-orm/type-tests/pg/tables.ts index 068f8fcf6..0ae1d8488 100644 --- a/drizzle-orm/type-tests/pg/tables.ts +++ b/drizzle-orm/type-tests/pg/tables.ts @@ -506,8 +506,8 @@ export const newYorkers2 = pgMaterializedView('new_yorkers') .using('btree') .with({ fillfactor: 90, - toast_tuple_target: 0.5, - autovacuum_enabled: true, + toastTupleTarget: 0.5, + autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() @@ -568,8 +568,8 @@ Expect< .using('btree') .with({ fillfactor: 90, - toast_tuple_target: 0.5, - autovacuum_enabled: true, + toastTupleTarget: 0.5, + autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() @@ -634,8 +634,8 @@ Expect< .using('btree') .with({ fillfactor: 90, - toast_tuple_target: 0.5, - autovacuum_enabled: true, + toastTupleTarget: 0.5, + autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() @@ -694,8 +694,8 @@ Expect< .using('btree') .with({ fillfactor: 90, - toast_tuple_target: 0.5, - autovacuum_enabled: true, + toastTupleTarget: 0.5, + autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() diff --git a/drizzle-orm/type-tests/sqlite/delete.ts b/drizzle-orm/type-tests/sqlite/delete.ts index fcc754740..d943077c8 100644 --- a/drizzle-orm/type-tests/sqlite/delete.ts +++ b/drizzle-orm/type-tests/sqlite/delete.ts @@ -152,3 +152,7 @@ Expect>; // @ts-expect-error method was already called .returning(); } + +{ + db.delete(users).where(sql``).limit(1).orderBy(sql``); +} diff --git a/drizzle-orm/type-tests/sqlite/update.ts b/drizzle-orm/type-tests/sqlite/update.ts index aa1f8051f..cea386b98 100644 --- a/drizzle-orm/type-tests/sqlite/update.ts +++ b/drizzle-orm/type-tests/sqlite/update.ts @@ -133,3 +133,7 @@ Expect>; // @ts-expect-error method was already called .where(sql``); } + +{ + db.update(users).set({}).where(sql``).limit(1).orderBy(sql``); +} diff --git a/integration-tests/package.json b/integration-tests/package.json index 78f36fe30..2b26ec374 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -15,6 +15,7 @@ "license": "Apache-2.0", "private": true, "devDependencies": { + "@cloudflare/workers-types": "^4.20241004.0", "@libsql/client": "^0.10.0", "@neondatabase/serverless": "0.9.0", "@originjs/vite-plugin-commonjs": "^1.0.3", @@ -28,6 +29,7 @@ "@types/pg": "^8.10.1", "@types/sql.js": "^1.4.4", "@types/uuid": "^9.0.1", + "@types/ws": "^8.5.10", "@vitest/ui": "^1.6.0", "ava": "^5.3.0", "axios": "^1.4.0", @@ -42,8 +44,8 @@ "@aws-sdk/client-rds-data": "^3.549.0", "@aws-sdk/credential-providers": "^3.549.0", "@electric-sql/pglite": "^0.1.1", - "@miniflare/d1": "^2.14.2", - "@miniflare/shared": "^2.14.2", + "@miniflare/d1": "^2.14.4", + "@miniflare/shared": "^2.14.4", "@planetscale/database": "^1.16.0", "@prisma/client": "5.14.0", "@tidbcloud/serverless": "^0.1.1", @@ -70,7 +72,8 @@ "sst": "^3.0.4", "uuid": "^9.0.0", "uvu": "^0.5.6", - "vitest": "^1.6.0", + "vitest": "^2.1.2", + "ws": "^8.16.0", "zod": "^3.20.2" } } diff --git a/integration-tests/tests/bun/sqlite-nw.test.ts b/integration-tests/tests/bun/sqlite-nw.test.ts index f6c11a698..d61d1ab4e 100644 --- a/integration-tests/tests/bun/sqlite-nw.test.ts +++ b/integration-tests/tests/bun/sqlite-nw.test.ts @@ -1,3 +1,4 @@ +/// import { Database } from 'bun:sqlite'; import { DefaultLogger, sql } from 'drizzle-orm'; import type { BunSQLiteDatabase } from 'drizzle-orm/bun-sqlite'; diff --git a/integration-tests/tests/bun/sqlite.test.ts b/integration-tests/tests/bun/sqlite.test.ts index faa3f8eb1..0065b1928 100644 --- a/integration-tests/tests/bun/sqlite.test.ts +++ b/integration-tests/tests/bun/sqlite.test.ts @@ -1,3 +1,4 @@ +/// import { Database } from 'bun:sqlite'; import { DefaultLogger, sql } from 'drizzle-orm'; import type { BunSQLiteDatabase } from 'drizzle-orm/bun-sqlite'; diff --git a/integration-tests/tests/common.ts b/integration-tests/tests/common.ts index 55daa43ce..0a4a61e94 100644 --- a/integration-tests/tests/common.ts +++ b/integration-tests/tests/common.ts @@ -2,7 +2,7 @@ import { beforeEach } from 'vitest'; export function skipTests(names: string[]) { beforeEach((ctx) => { - if (ctx.task.suite.name === 'common' && names.includes(ctx.task.name)) { + if (ctx.task.suite?.name === 'common' && names.includes(ctx.task.name)) { ctx.skip(); } }); diff --git a/integration-tests/tests/imports/index.test.ts b/integration-tests/tests/imports/index.test.ts index dee96e84f..875d1d24c 100644 --- a/integration-tests/tests/imports/index.test.ts +++ b/integration-tests/tests/imports/index.test.ts @@ -18,7 +18,10 @@ it('dynamic imports check for CommonJS', async () => { const promises: ProcessPromise[] = []; for (const [i, key] of Object.keys(pj['exports']).entries()) { const o1 = path.join('drizzle-orm', key); - if (o1.startsWith('drizzle-orm/pglite') || o1.startsWith('drizzle-orm/expo-sqlite')) { + if ( + o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/pglite') + || o1.startsWith('drizzle-orm/expo-sqlite') + ) { continue; } fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'requ'); @@ -43,7 +46,7 @@ it('dynamic imports check for ESM', async () => { const promises: ProcessPromise[] = []; for (const [i, key] of Object.keys(pj['exports']).entries()) { const o1 = path.join('drizzle-orm', key); - if (o1.startsWith('drizzle-orm/expo-sqlite')) { + if (o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/expo-sqlite')) { continue; } fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); diff --git a/integration-tests/tests/mysql/mysql-common.ts b/integration-tests/tests/mysql/mysql-common.ts index eaee85bf6..45b96f391 100644 --- a/integration-tests/tests/mysql/mysql-common.ts +++ b/integration-tests/tests/mysql/mysql-common.ts @@ -3809,54 +3809,95 @@ export function tests(driver?: string) { expect(users.length).toBeGreaterThan(0); }); - }); - test('Object keys as column names', async (ctx) => { - const { db } = ctx.mysql; - - // Tests the following: - // Column with required config - // Column with optional config without providing a value - // Column with optional config providing a value - // Column without config - const users = mysqlTable('users', { - id: bigint({ mode: 'number' }).autoincrement().primaryKey(), - createdAt: timestamp(), - updatedAt: timestamp({ fsp: 3 }), - admin: boolean(), - }); - - await db.execute(sql`drop table if exists users`); - await db.execute( - sql` - create table users ( - \`id\` bigint auto_increment primary key, - \`createdAt\` timestamp, - \`updatedAt\` timestamp(3), - \`admin\` boolean - ) - `, - ); - - await db.insert(users).values([ - { createdAt: sql`now() - interval 30 day`, updatedAt: sql`now() - interval 1 day`, admin: true }, - { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 30 day`, admin: true }, - { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 1 day`, admin: false }, - ]); - const result = await db - .select({ id: users.id, admin: users.admin }) - .from(users) - .where( - and( - gt(users.createdAt, sql`now() - interval 7 day`), - gt(users.updatedAt, sql`now() - interval 7 day`), - ), + test('update with limit and order by', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([ + { name: 'Barry', verified: false }, + { name: 'Alan', verified: false }, + { name: 'Carl', verified: false }, + ]); + + await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); + + const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( + asc(usersTable.name), + ); + expect(result).toStrictEqual([ + { name: 'Alan', verified: true }, + { name: 'Barry', verified: true }, + { name: 'Carl', verified: false }, + ]); + }); + + test('delete with limit and order by', async (ctx) => { + const { db } = ctx.mysql; + + await db.insert(usersTable).values([ + { name: 'Barry', verified: false }, + { name: 'Alan', verified: false }, + { name: 'Carl', verified: false }, + ]); + + await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); + + const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( + asc(usersTable.name), + ); + expect(result).toStrictEqual([ + { name: 'Barry', verified: false }, + { name: 'Carl', verified: false }, + ]); + }); + + test('Object keys as column names', async (ctx) => { + const { db } = ctx.mysql; + + // Tests the following: + // Column with required config + // Column with optional config without providing a value + // Column with optional config providing a value + // Column without config + const users = mysqlTable('users', { + id: bigint({ mode: 'number' }).autoincrement().primaryKey(), + createdAt: timestamp(), + updatedAt: timestamp({ fsp: 3 }), + admin: boolean(), + }); + + await db.execute(sql`drop table if exists users`); + await db.execute( + sql` + create table users ( + \`id\` bigint auto_increment primary key, + \`createdAt\` timestamp, + \`updatedAt\` timestamp(3), + \`admin\` boolean + ) + `, ); - expect(result).toEqual([ - { id: 3, admin: false }, - ]); + await db.insert(users).values([ + { createdAt: sql`now() - interval 30 day`, updatedAt: sql`now() - interval 1 day`, admin: true }, + { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 30 day`, admin: true }, + { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 1 day`, admin: false }, + ]); + const result = await db + .select({ id: users.id, admin: users.admin }) + .from(users) + .where( + and( + gt(users.createdAt, sql`now() - interval 7 day`), + gt(users.updatedAt, sql`now() - interval 7 day`), + ), + ); - await db.execute(sql`drop table users`); + expect(result).toEqual([ + { id: 3, admin: false }, + ]); + + await db.execute(sql`drop table users`); + }); }); } diff --git a/integration-tests/tests/pg/neon-serverless.test.ts b/integration-tests/tests/pg/neon-serverless.test.ts new file mode 100644 index 000000000..5a77809fa --- /dev/null +++ b/integration-tests/tests/pg/neon-serverless.test.ts @@ -0,0 +1,582 @@ +import { neonConfig, Pool } from '@neondatabase/serverless'; +import retry from 'async-retry'; +import { eq, sql } from 'drizzle-orm'; +import { drizzle, type NeonDatabase } from 'drizzle-orm/neon-serverless'; +import { migrate } from 'drizzle-orm/neon-serverless/migrator'; +import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import ws from 'ws'; +import { skipTests } from '~/common'; +import { randomString } from '~/utils'; +import { mySchema, tests, usersMigratorTable, usersMySchemaTable, usersTable } from './pg-common'; + +const ENABLE_LOGGING = false; + +let db: NeonDatabase; +let client: Pool; + +beforeAll(async () => { + const connectionString = process.env['NEON_CONNECTION_STRING']; + if (!connectionString) { + throw new Error('NEON_CONNECTION_STRING is not defined'); + } + + neonConfig.webSocketConstructor = ws; + + client = await retry(async () => { + client = new Pool({ connectionString }); + + const cnt = await client.connect(); + cnt.release(); + + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach((ctx) => { + ctx.pg = { + db, + }; +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom schema', async () => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: 'custom_migrations', + }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from custom_migrations.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); +}); + +test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); + + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) not null + ) + `); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); + + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-0200'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test.skip('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); + + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(6) with time zone not null + ) + `); + + const timestampString = '2022-01-01 00:00:00.123456-1000'; + + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); + + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); + + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); + + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('select all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('update with returning all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(users).toEqual([ + { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); +}); + +test('delete with returning all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(users).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); +}); + +test('mySchema :: select all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('mySchema :: delete with returning all fields', async (ctx) => { + const { db } = ctx.pg; + + const now = Date.now(); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +skipTests([ + 'migrator : default migration strategy', + 'migrator : migrate with custom schema', + 'migrator : migrate with custom table', + 'migrator : migrate with custom table and custom schema', + 'insert via db.execute + select via db.execute', + 'insert via db.execute + returning', + 'insert via db.execute w/ query builder', + 'all date and time columns without timezone first case mode string', + 'all date and time columns without timezone third case mode date', + 'test mode string for timestamp with timezone', + 'test mode date for timestamp with timezone', + 'test mode string for timestamp with timezone in UTC timezone', + 'nested transaction rollback', + 'transaction rollback', + 'nested transaction', + 'transaction', + 'timestamp timezone', + 'test $onUpdateFn and $onUpdate works as $default', + 'select all fields', + 'update with returning all fields', + 'delete with returning all fields', + 'mySchema :: select all fields', + 'mySchema :: delete with returning all fields', +]); +tests(); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`drop schema if exists ${mySchema} cascade`); + + await db.execute(sql`create schema public`); + await db.execute(sql`create schema ${mySchema}`); + + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + + await db.execute( + sql` + create table ${usersMySchemaTable} ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/pg/pg-common.ts b/integration-tests/tests/pg/pg-common.ts index 0ea8f250f..78eecf328 100644 --- a/integration-tests/tests/pg/pg-common.ts +++ b/integration-tests/tests/pg/pg-common.ts @@ -179,9 +179,9 @@ const aggregateTable = pgTable('aggregate_table', { }); // To test another schema and multischema -const mySchema = pgSchema('mySchema'); +export const mySchema = pgSchema('mySchema'); -const usersMySchemaTable = mySchema.table('users', { +export const usersMySchemaTable = mySchema.table('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), diff --git a/integration-tests/tests/replicas/mysql.test.ts b/integration-tests/tests/replicas/mysql.test.ts index a7de02411..673a8da65 100644 --- a/integration-tests/tests/replicas/mysql.test.ts +++ b/integration-tests/tests/replicas/mysql.test.ts @@ -15,9 +15,9 @@ const users = mysqlTable('users', { describe('[select] read replicas mysql', () => { it('primary select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -35,9 +35,9 @@ describe('[select] read replicas mysql', () => { }); it('random replica select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -64,8 +64,8 @@ describe('[select] read replicas mysql', () => { }); it('single read replica select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -84,8 +84,8 @@ describe('[select] read replicas mysql', () => { }); it('single read replica select + primary select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -105,9 +105,9 @@ describe('[select] read replicas mysql', () => { }); it('always first read select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -134,9 +134,9 @@ describe('[select] read replicas mysql', () => { describe('[selectDistinct] read replicas mysql', () => { it('primary selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -153,9 +153,9 @@ describe('[selectDistinct] read replicas mysql', () => { }); it('random replica selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -181,8 +181,8 @@ describe('[selectDistinct] read replicas mysql', () => { }); it('single read replica selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -201,8 +201,8 @@ describe('[selectDistinct] read replicas mysql', () => { }); it('single read replica selectDistinct + primary selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -222,9 +222,9 @@ describe('[selectDistinct] read replicas mysql', () => { }); it('always first read selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -250,9 +250,9 @@ describe('[selectDistinct] read replicas mysql', () => { describe('[with] read replicas mysql', () => { it('primary with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -273,9 +273,9 @@ describe('[with] read replicas mysql', () => { }); it('random replica with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -299,8 +299,8 @@ describe('[with] read replicas mysql', () => { }); it('single read replica with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -317,8 +317,8 @@ describe('[with] read replicas mysql', () => { }); it('single read replica with + primary with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -336,9 +336,9 @@ describe('[with] read replicas mysql', () => { }); it('always first read with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -367,9 +367,9 @@ describe('[with] read replicas mysql', () => { describe('[update] replicas mysql', () => { it('primary update', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -402,9 +402,9 @@ describe('[update] replicas mysql', () => { describe('[delete] replicas mysql', () => { it('primary delete', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -438,9 +438,9 @@ describe('[delete] replicas mysql', () => { describe('[insert] replicas mysql', () => { it('primary insert', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -473,9 +473,9 @@ describe('[insert] replicas mysql', () => { describe('[execute] replicas mysql', () => { it('primary execute', async () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -515,9 +515,9 @@ describe('[execute] replicas mysql', () => { describe('[transaction] replicas mysql', () => { it('primary transaction', async () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -558,9 +558,9 @@ describe('[transaction] replicas mysql', () => { describe('[findFirst] read replicas mysql', () => { it('primary findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read2 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2]); @@ -578,9 +578,9 @@ describe('[findFirst] read replicas mysql', () => { }); it('random replica findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read2 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -607,8 +607,8 @@ describe('[findFirst] read replicas mysql', () => { }); it('single read replica findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); @@ -625,8 +625,8 @@ describe('[findFirst] read replicas mysql', () => { }); it('single read replica findFirst + primary findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); @@ -644,9 +644,9 @@ describe('[findFirst] read replicas mysql', () => { }); it('always first read findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read2 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -670,9 +670,9 @@ describe('[findFirst] read replicas mysql', () => { describe('[findMany] read replicas mysql', () => { it('primary findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read2 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2]); @@ -691,9 +691,9 @@ describe('[findMany] read replicas mysql', () => { }); it('random replica findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read2 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -724,8 +724,8 @@ describe('[findMany] read replicas mysql', () => { }); it('single read replica findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); @@ -748,8 +748,8 @@ describe('[findMany] read replicas mysql', () => { }); it('single read replica findMany + primary findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); @@ -774,9 +774,9 @@ describe('[findMany] read replicas mysql', () => { }); it('always first read findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read1 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); - const read2 = drizzle({} as any, { schema: { usersTable }, mode: 'default' }); + const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); + const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; diff --git a/integration-tests/tests/replicas/postgres.test.ts b/integration-tests/tests/replicas/postgres.test.ts index 6165ae413..0860aac6a 100644 --- a/integration-tests/tests/replicas/postgres.test.ts +++ b/integration-tests/tests/replicas/postgres.test.ts @@ -17,9 +17,9 @@ const users = pgTable('users', { describe('[select] read replicas postgres', () => { it('primary select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -37,9 +37,9 @@ describe('[select] read replicas postgres', () => { }); it('random replica select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -66,8 +66,8 @@ describe('[select] read replicas postgres', () => { }); it('single read replica select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -86,8 +86,8 @@ describe('[select] read replicas postgres', () => { }); it('single read replica select + primary select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -107,9 +107,9 @@ describe('[select] read replicas postgres', () => { }); it('always first read select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -136,9 +136,9 @@ describe('[select] read replicas postgres', () => { describe('[selectDistinct] read replicas postgres', () => { it('primary selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -155,9 +155,9 @@ describe('[selectDistinct] read replicas postgres', () => { }); it('random replica selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -183,8 +183,8 @@ describe('[selectDistinct] read replicas postgres', () => { }); it('single read replica selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -203,8 +203,8 @@ describe('[selectDistinct] read replicas postgres', () => { }); it('single read replica selectDistinct + primary selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -224,9 +224,9 @@ describe('[selectDistinct] read replicas postgres', () => { }); it('always first read selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -252,9 +252,9 @@ describe('[selectDistinct] read replicas postgres', () => { describe('[with] read replicas postgres', () => { it('primary with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -275,9 +275,9 @@ describe('[with] read replicas postgres', () => { }); it('random replica with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -301,8 +301,8 @@ describe('[with] read replicas postgres', () => { }); it('single read replica with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -319,8 +319,8 @@ describe('[with] read replicas postgres', () => { }); it('single read replica with + primary with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -338,9 +338,9 @@ describe('[with] read replicas postgres', () => { }); it('always first read with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -369,9 +369,9 @@ describe('[with] read replicas postgres', () => { describe('[update] replicas postgres', () => { it('primary update', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -404,9 +404,9 @@ describe('[update] replicas postgres', () => { describe('[delete] replicas postgres', () => { it('primary delete', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -440,9 +440,9 @@ describe('[delete] replicas postgres', () => { describe('[insert] replicas postgres', () => { it('primary insert', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -475,9 +475,9 @@ describe('[insert] replicas postgres', () => { describe('[execute] replicas postgres', () => { it('primary execute', async () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -517,9 +517,9 @@ describe('[execute] replicas postgres', () => { describe('[transaction] replicas postgres', () => { it('primary transaction', async () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -560,9 +560,9 @@ describe('[transaction] replicas postgres', () => { describe('[findFirst] read replicas postgres', () => { it('primary findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); @@ -580,9 +580,9 @@ describe('[findFirst] read replicas postgres', () => { }); it('random replica findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -611,8 +611,8 @@ describe('[findFirst] read replicas postgres', () => { }); it('single read replica findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -629,8 +629,8 @@ describe('[findFirst] read replicas postgres', () => { }); it('single read replica findFirst + primary findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -648,9 +648,9 @@ describe('[findFirst] read replicas postgres', () => { }); it('always first read findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -674,9 +674,9 @@ describe('[findFirst] read replicas postgres', () => { describe('[findMany] read replicas postgres', () => { it('primary findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); @@ -697,9 +697,9 @@ describe('[findMany] read replicas postgres', () => { }); it('random replica findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -734,8 +734,8 @@ describe('[findMany] read replicas postgres', () => { }); it('single read replica findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -762,8 +762,8 @@ describe('[findMany] read replicas postgres', () => { }); it('single read replica findMany + primary findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -792,9 +792,9 @@ describe('[findMany] read replicas postgres', () => { }); it('always first read findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; diff --git a/integration-tests/tests/replicas/sqlite.test.ts b/integration-tests/tests/replicas/sqlite.test.ts index 4093a6298..aab55bbfd 100644 --- a/integration-tests/tests/replicas/sqlite.test.ts +++ b/integration-tests/tests/replicas/sqlite.test.ts @@ -15,9 +15,9 @@ const users = sqliteTable('users', { describe('[select] read replicas sqlite', () => { it('primary select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -35,9 +35,9 @@ describe('[select] read replicas sqlite', () => { }); it('random replica select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -64,8 +64,8 @@ describe('[select] read replicas sqlite', () => { }); it('single read replica select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -84,8 +84,8 @@ describe('[select] read replicas sqlite', () => { }); it('single read replica select + primary select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -105,9 +105,9 @@ describe('[select] read replicas sqlite', () => { }); it('always first read select', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -134,9 +134,9 @@ describe('[select] read replicas sqlite', () => { describe('[selectDistinct] read replicas sqlite', () => { it('primary selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -153,9 +153,9 @@ describe('[selectDistinct] read replicas sqlite', () => { }); it('random replica selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -181,8 +181,8 @@ describe('[selectDistinct] read replicas sqlite', () => { }); it('single read replica selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -201,8 +201,8 @@ describe('[selectDistinct] read replicas sqlite', () => { }); it('single read replica selectDistinct + primary selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -222,9 +222,9 @@ describe('[selectDistinct] read replicas sqlite', () => { }); it('always first read selectDistinct', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -250,9 +250,9 @@ describe('[selectDistinct] read replicas sqlite', () => { describe('[with] read replicas sqlite', () => { it('primary with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -273,9 +273,9 @@ describe('[with] read replicas sqlite', () => { }); it('random replica with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -299,8 +299,8 @@ describe('[with] read replicas sqlite', () => { }); it('single read replica with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -317,8 +317,8 @@ describe('[with] read replicas sqlite', () => { }); it('single read replica with + primary with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); @@ -336,9 +336,9 @@ describe('[with] read replicas sqlite', () => { }); it('always first read with', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -367,9 +367,9 @@ describe('[with] read replicas sqlite', () => { describe('[update] replicas sqlite', () => { it('primary update', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -402,9 +402,9 @@ describe('[update] replicas sqlite', () => { describe('[delete] replicas sqlite', () => { it('primary delete', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -438,9 +438,9 @@ describe('[delete] replicas sqlite', () => { describe('[insert] replicas sqlite', () => { it('primary insert', () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -473,9 +473,9 @@ describe('[insert] replicas sqlite', () => { describe('[execute] replicas sqlite', () => { it('primary execute', async () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -511,9 +511,9 @@ describe('[execute] replicas sqlite', () => { describe('[transaction] replicas sqlite', () => { it('primary transaction', async () => { - const primaryDb = drizzle({} as any); - const read1 = drizzle({} as any); - const read2 = drizzle({} as any); + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); @@ -554,9 +554,9 @@ describe('[transaction] replicas sqlite', () => { describe('[findFirst] read replicas sqlite', () => { it('primary findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); @@ -574,9 +574,9 @@ describe('[findFirst] read replicas sqlite', () => { }); it('random replica findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -603,8 +603,8 @@ describe('[findFirst] read replicas sqlite', () => { }); it('single read replica findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -621,8 +621,8 @@ describe('[findFirst] read replicas sqlite', () => { }); it('single read replica findFirst + primary findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -640,9 +640,9 @@ describe('[findFirst] read replicas sqlite', () => { }); it('always first read findFirst', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; @@ -666,9 +666,9 @@ describe('[findFirst] read replicas sqlite', () => { describe('[findMany] read replicas sqlite', () => { it('primary findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); @@ -687,9 +687,9 @@ describe('[findMany] read replicas sqlite', () => { }); it('random replica findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); @@ -720,8 +720,8 @@ describe('[findMany] read replicas sqlite', () => { }); it('single read replica findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -744,8 +744,8 @@ describe('[findMany] read replicas sqlite', () => { }); it('single read replica findMany + primary findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); @@ -770,9 +770,9 @@ describe('[findMany] read replicas sqlite', () => { }); it('always first read findMany', () => { - const primaryDb = drizzle({} as any, { schema: { usersTable } }); - const read1 = drizzle({} as any, { schema: { usersTable } }); - const read2 = drizzle({} as any, { schema: { usersTable } }); + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; diff --git a/integration-tests/tests/sqlite/d1-batch.test.ts b/integration-tests/tests/sqlite/d1-batch.test.ts index 2c46a6fe4..dba22cd4d 100644 --- a/integration-tests/tests/sqlite/d1-batch.test.ts +++ b/integration-tests/tests/sqlite/d1-batch.test.ts @@ -1,3 +1,4 @@ +/// import 'dotenv/config'; import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; import { createSQLiteDB } from '@miniflare/shared'; diff --git a/integration-tests/tests/sqlite/libsql.test.ts b/integration-tests/tests/sqlite/libsql.test.ts index 71d3b289e..b99d7e9bf 100644 --- a/integration-tests/tests/sqlite/libsql.test.ts +++ b/integration-tests/tests/sqlite/libsql.test.ts @@ -4,6 +4,7 @@ import { sql } from 'drizzle-orm'; import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; @@ -87,4 +88,9 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); +skipTests([ + 'delete with limit and order by', + 'update with limit and order by', +]); + tests(); diff --git a/integration-tests/tests/sqlite/sql-js.test.ts b/integration-tests/tests/sqlite/sql-js.test.ts index ec3d7b583..4c733835f 100644 --- a/integration-tests/tests/sqlite/sql-js.test.ts +++ b/integration-tests/tests/sqlite/sql-js.test.ts @@ -58,5 +58,7 @@ skipTests([ */ 'transaction rollback', 'nested transaction rollback', + 'delete with limit and order by', + 'update with limit and order by', ]); tests(); diff --git a/integration-tests/tests/sqlite/sqlite-common.ts b/integration-tests/tests/sqlite/sqlite-common.ts index 9d2489184..f31bdbbd2 100644 --- a/integration-tests/tests/sqlite/sqlite-common.ts +++ b/integration-tests/tests/sqlite/sqlite-common.ts @@ -2887,6 +2887,48 @@ export function tests() { { count: 3 }, ]); }); + + test('update with limit and order by', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([ + { name: 'Barry', verified: false }, + { name: 'Alan', verified: false }, + { name: 'Carl', verified: false }, + ]); + + await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); + + const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( + asc(usersTable.name), + ); + + expect(result).toStrictEqual([ + { name: 'Alan', verified: true }, + { name: 'Barry', verified: true }, + { name: 'Carl', verified: false }, + ]); + }); + + test('delete with limit and order by', async (ctx) => { + const { db } = ctx.sqlite; + + await db.insert(usersTable).values([ + { name: 'Barry', verified: false }, + { name: 'Alan', verified: false }, + { name: 'Carl', verified: false }, + ]); + + await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); + + const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( + asc(usersTable.name), + ); + expect(result).toStrictEqual([ + { name: 'Barry', verified: false }, + { name: 'Carl', verified: false }, + ]); + }); }); test('table configs: unique third param', () => { diff --git a/integration-tests/vitest.config.ts b/integration-tests/vitest.config.ts index 5187d2cfc..3952eca49 100644 --- a/integration-tests/vitest.config.ts +++ b/integration-tests/vitest.config.ts @@ -14,18 +14,18 @@ export default defineConfig({ 'tests/imports/**/*', 'tests/extensions/vectors/**/*', 'tests/version.test.ts', + 'tests/pg/node-postgres.test.ts', ], exclude: [ ...(process.env.SKIP_EXTERNAL_DB_TESTS ? [ 'tests/relational/mysql.planetscale.test.ts', - 'tests/neon-http-batch.test.ts', + 'tests/pg/neon-serverless.test.ts', 'tests/mysql/tidb-serverless.test.ts', 'tests/mysql/mysql-planetscale.test.ts', 'tests/sqlite/libsql.test.ts', 'tests/mysql/tidb-serverless.test.ts', 'tests/sqlite/libsql-batch.test.ts', - 'tests/pg/neon-http.test.ts', 'tests/pg/neon-http-batch.test.ts', ] diff --git a/package.json b/package.json index 4e7bd4e91..b0fda61c8 100755 --- a/package.json +++ b/package.json @@ -35,12 +35,7 @@ "tsup": "^7.2.0", "tsx": "^4.10.5", "turbo": "^1.10.14", - "typescript": "5.4.5" - }, - "pnpm": { - "patchedDependencies": { - "typescript@5.4.5": "patches/typescript@5.4.5.patch" - } + "typescript": "5.6.3" }, "packageManager": "pnpm@9.7.0" } diff --git a/patches/typescript@5.4.5.patch b/patches/typescript@5.6.3.patch similarity index 100% rename from patches/typescript@5.4.5.patch rename to patches/typescript@5.6.3.patch diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f5d886131..d2400e16d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -4,11 +4,6 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false -patchedDependencies: - typescript@5.4.5: - hash: q3iy4fwdhi5sis3wty7d4nbsme - path: patches/typescript@5.4.5.patch - importers: .: @@ -21,13 +16,13 @@ importers: version: 4.2.0(prettier@3.0.3) '@typescript-eslint/eslint-plugin': specifier: ^6.7.3 - version: 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0)(typescript@5.6.3) '@typescript-eslint/experimental-utils': specifier: ^5.62.0 - version: 5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 5.62.0(eslint@8.50.0)(typescript@5.6.3) '@typescript-eslint/parser': specifier: ^6.7.3 - version: 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 6.7.3(eslint@8.50.0)(typescript@5.6.3) bun-types: specifier: ^1.0.3 version: 1.0.3 @@ -45,7 +40,7 @@ importers: version: link:drizzle-orm/dist drizzle-orm-old: specifier: npm:drizzle-orm@^0.27.2 - version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) + version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20241004.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) eslint: specifier: ^8.50.0 version: 8.50.0 @@ -54,7 +49,7 @@ importers: version: link:eslint/eslint-plugin-drizzle-internal eslint-plugin-import: specifier: ^2.28.1 - version: 2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0) + version: 2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0) eslint-plugin-no-instanceof: specifier: ^1.0.1 version: 1.0.1 @@ -63,7 +58,7 @@ importers: version: 48.0.1(eslint@8.50.0) eslint-plugin-unused-imports: specifier: ^3.0.0 - version: 3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0) + version: 3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0) glob: specifier: ^10.3.10 version: 10.3.10 @@ -75,10 +70,10 @@ importers: version: 0.23.9 resolve-tspaths: specifier: ^0.8.16 - version: 0.8.16(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 0.8.16(typescript@5.6.3) tsup: specifier: ^7.2.0 - version: 7.2.0(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 7.2.0(postcss@8.4.39)(ts-node@10.9.2(typescript@5.6.3))(typescript@5.6.3) tsx: specifier: ^4.10.5 version: 4.10.5 @@ -86,8 +81,8 @@ importers: specifier: ^1.10.14 version: 1.10.14 typescript: - specifier: 5.4.5 - version: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + specifier: 5.6.3 + version: 5.6.3 drizzle-kit: dependencies: @@ -169,10 +164,10 @@ importers: version: 8.5.11 '@typescript-eslint/eslint-plugin': specifier: ^7.2.0 - version: 7.16.1(@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 7.16.1(@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/parser': specifier: ^7.2.0 - version: 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 7.16.1(eslint@8.57.0)(typescript@5.4.5) '@vercel/postgres': specifier: ^0.8.0 version: 0.8.0 @@ -265,19 +260,19 @@ importers: version: 2.2.1 tsup: specifier: ^8.0.2 - version: 8.1.2(postcss@8.4.39)(tsx@3.14.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(yaml@2.4.2) + version: 8.1.2(postcss@8.4.39)(tsx@3.14.0)(typescript@5.4.5)(yaml@2.4.2) tsx: specifier: ^3.12.1 version: 3.14.0 typescript: specifier: ^5.4.3 - version: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + version: 5.4.5 uuid: specifier: ^9.0.1 version: 9.0.1 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0)) + version: 4.3.2(typescript@5.4.5)(vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0)) vitest: specifier: ^1.4.0 version: 1.6.0(@types/node@18.19.33)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) @@ -309,8 +304,8 @@ importers: specifier: ^0.10.0 version: 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) '@miniflare/d1': - specifier: ^2.14.2 - version: 2.14.2 + specifier: ^2.14.4 + version: 2.14.4 '@neondatabase/serverless': specifier: ^0.9.0 version: 0.9.0 @@ -352,7 +347,7 @@ importers: version: 0.8.0 '@xata.io/client': specifier: ^0.29.3 - version: 0.29.4(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 0.29.4(typescript@5.6.3) better-sqlite3: specifier: ^8.4.0 version: 8.7.0 @@ -400,7 +395,7 @@ importers: version: 3.14.0 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) + version: 4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) vitest: specifier: ^1.6.0 version: 1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) @@ -418,7 +413,7 @@ importers: version: 0.4.1(rollup@3.27.2) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.6.3) '@sinclair/typebox': specifier: ^0.29.6 version: 0.29.6 @@ -439,7 +434,7 @@ importers: version: 3.27.2 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) + version: 4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) vitest: specifier: ^1.6.0 version: 1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) @@ -454,7 +449,7 @@ importers: version: 0.4.1(rollup@3.27.2) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.6.3) '@types/node': specifier: ^18.15.10 version: 18.15.10 @@ -475,7 +470,7 @@ importers: version: 0.30.0 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) + version: 4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) vitest: specifier: ^1.6.0 version: 1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) @@ -490,7 +485,7 @@ importers: version: 0.4.1(rollup@3.20.7) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.6.3) '@types/node': specifier: ^18.15.10 version: 18.15.10 @@ -508,7 +503,7 @@ importers: version: 3.20.7 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) + version: 4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)) vitest: specifier: ^1.6.0 version: 1.6.0(@types/node@18.15.10)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) @@ -553,16 +548,16 @@ importers: version: 3.583.0 '@aws-sdk/credential-providers': specifier: ^3.549.0 - version: 3.569.0(@aws-sdk/client-sso-oidc@3.583.0) + version: 3.569.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@electric-sql/pglite': specifier: ^0.1.1 version: 0.1.5 '@miniflare/d1': - specifier: ^2.14.2 - version: 2.14.2 + specifier: ^2.14.4 + version: 2.14.4 '@miniflare/shared': - specifier: ^2.14.2 - version: 2.14.2 + specifier: ^2.14.4 + version: 2.14.4 '@planetscale/database': specifier: ^1.16.0 version: 1.18.0 @@ -580,7 +575,7 @@ importers: version: 0.8.0 '@xata.io/client': specifier: ^0.29.3 - version: 0.29.4(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 0.29.4(typescript@5.6.3) async-retry: specifier: ^1.3.3 version: 1.3.3 @@ -642,12 +637,18 @@ importers: specifier: ^0.5.6 version: 0.5.6 vitest: - specifier: ^1.6.0 - version: 1.6.0(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) + specifier: ^2.1.2 + version: 2.1.2(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) + ws: + specifier: ^8.16.0 + version: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) zod: specifier: ^3.20.2 version: 3.23.7 devDependencies: + '@cloudflare/workers-types': + specifier: ^4.20241004.0 + version: 4.20241004.0 '@libsql/client': specifier: ^0.10.0 version: 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) @@ -687,9 +688,12 @@ importers: '@types/uuid': specifier: ^9.0.1 version: 9.0.8 + '@types/ws': + specifier: ^8.5.10 + version: 8.5.11 '@vitest/ui': specifier: ^1.6.0 - version: 1.6.0(vitest@1.6.0) + version: 1.6.0(vitest@2.1.2) ava: specifier: ^5.3.0 version: 5.3.0(@ava/typescript@5.0.0) @@ -701,7 +705,7 @@ importers: version: 7.0.3 ts-node: specifier: ^10.9.2 - version: 10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + version: 10.9.2(@types/node@20.12.12)(typescript@5.6.3) tsx: specifier: ^4.14.0 version: 4.16.2 @@ -710,7 +714,7 @@ importers: version: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) + version: 4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) zx: specifier: ^7.2.2 version: 7.2.2 @@ -1937,6 +1941,9 @@ packages: '@cloudflare/workers-types@4.20240524.0': resolution: {integrity: sha512-GpSr4uE7y39DU9f0+wmrL76xd03wn0jy1ClITaa3ZZltKjirAV8TW1GzHrvvKyVGx6u3lekrFnB1HzVHsCYHDQ==} + '@cloudflare/workers-types@4.20241004.0': + resolution: {integrity: sha512-3LrPvtecs4umknOF1bTPNLHUG/ZjeSE6PYBQ/tbO7lwaVhjZTaTugiaCny2byrZupBlVNuubQVktcAgMfw0C1A==} + '@colors/colors@1.5.0': resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} engines: {node: '>=0.1.90'} @@ -3075,6 +3082,9 @@ packages: '@jridgewell/sourcemap-codec@1.4.15': resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} + '@jridgewell/sourcemap-codec@1.5.0': + resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} + '@jridgewell/trace-mapping@0.3.18': resolution: {integrity: sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==} @@ -3170,24 +3180,24 @@ packages: cpu: [x64] os: [win32] - '@miniflare/core@2.14.2': - resolution: {integrity: sha512-n/smm5ZTg7ilGM4fxO7Gxhbe573oc8Za06M3b2fO+lPWqF6NJcEKdCC+sJntVFbn3Cbbd2G1ChISmugPfmlCkQ==} + '@miniflare/core@2.14.4': + resolution: {integrity: sha512-FMmZcC1f54YpF4pDWPtdQPIO8NXfgUxCoR9uyrhxKJdZu7M6n8QKopPVNuaxR40jcsdxb7yKoQoFWnHfzJD9GQ==} engines: {node: '>=16.13'} - '@miniflare/d1@2.14.2': - resolution: {integrity: sha512-3NPJyBLbFfzz9VAAdIZrDRdRpyslVCJoZHQk0/0CX3z2mJIfcQzjZhox2cYCFNH8NMJ7pRg6AeSMPYAnDKECDg==} + '@miniflare/d1@2.14.4': + resolution: {integrity: sha512-pMBVq9XWxTDdm+RRCkfXZP+bREjPg1JC8s8C0JTovA9OGmLQXqGTnFxIaS9vf1d8k3uSUGhDzPTzHr0/AUW1gA==} engines: {node: '>=16.7'} - '@miniflare/queues@2.14.2': - resolution: {integrity: sha512-OylkRs4lOWKvGnX+Azab3nx+1qwC87M36/hkgAU1RRvVDCOxOrYLvNLUczFfgmgMBwpYsmmW8YOIASlI3p4Qgw==} + '@miniflare/queues@2.14.4': + resolution: {integrity: sha512-aXQ5Ik8Iq1KGMBzGenmd6Js/jJgqyYvjom95/N9GptCGpiVWE5F0XqC1SL5rCwURbHN+aWY191o8XOFyY2nCUA==} engines: {node: '>=16.7'} - '@miniflare/shared@2.14.2': - resolution: {integrity: sha512-dDnYIztz10zDQjaFJ8Gy9UaaBWZkw3NyhFdpX6tAeyPA/2lGvkftc42MYmNi8s5ljqkZAtKgWAJnSf2K75NCJw==} + '@miniflare/shared@2.14.4': + resolution: {integrity: sha512-upl4RSB3hyCnITOFmRZjJj4A72GmkVrtfZTilkdq5Qe5TTlzsjVeDJp7AuNUM9bM8vswRo+N5jOiot6O4PVwwQ==} engines: {node: '>=16.13'} - '@miniflare/watcher@2.14.2': - resolution: {integrity: sha512-/TL0np4uYDl+6MdseDApZmDdlJ6Y7AY5iDY0TvUQJG9nyBoCjX6w0Zn4SiKDwO6660rPtSqZ5c7HzbPhGb5vsA==} + '@miniflare/watcher@2.14.4': + resolution: {integrity: sha512-PYn05ET2USfBAeXF6NZfWl0O32KVyE8ncQ/ngysrh3hoIV7l3qGGH7ubeFx+D8VWQ682qYhwGygUzQv2j1tGGg==} engines: {node: '>=16.13'} '@neon-rs/load@0.0.4': @@ -4355,15 +4365,42 @@ packages: '@vitest/expect@1.6.0': resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} + '@vitest/expect@2.1.2': + resolution: {integrity: sha512-FEgtlN8mIUSEAAnlvn7mP8vzaWhEaAEvhSXCqrsijM7K6QqjB11qoRZYEd4AKSCDz8p0/+yH5LzhZ47qt+EyPg==} + + '@vitest/mocker@2.1.2': + resolution: {integrity: sha512-ExElkCGMS13JAJy+812fw1aCv2QO/LBK6CyO4WOPAzLTmve50gydOlWhgdBJPx2ztbADUq3JVI0C5U+bShaeEA==} + peerDependencies: + '@vitest/spy': 2.1.2 + msw: ^2.3.5 + vite: ^5.0.0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@2.1.2': + resolution: {integrity: sha512-FIoglbHrSUlOJPDGIrh2bjX1sNars5HbxlcsFKCtKzu4+5lpsRhOCVcuzp0fEhAGHkPZRIXVNzPcpSlkoZ3LuA==} + '@vitest/runner@1.6.0': resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==} + '@vitest/runner@2.1.2': + resolution: {integrity: sha512-UCsPtvluHO3u7jdoONGjOSil+uON5SSvU9buQh3lP7GgUXHp78guN1wRmZDX4wGK6J10f9NUtP6pO+SFquoMlw==} + '@vitest/snapshot@1.6.0': resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==} + '@vitest/snapshot@2.1.2': + resolution: {integrity: sha512-xtAeNsZ++aRIYIUsek7VHzry/9AcxeULlegBvsdLncLmNCR6tR8SRjn8BbDP4naxtccvzTqZ+L1ltZlRCfBZFA==} + '@vitest/spy@1.6.0': resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==} + '@vitest/spy@2.1.2': + resolution: {integrity: sha512-GSUi5zoy+abNRJwmFhBDC0yRuVUn8WMlQscvnbbXdKLXX9dE59YbfwXxuJ/mth6eeqIzofU8BB5XDo/Ns/qK2A==} + '@vitest/ui@1.6.0': resolution: {integrity: sha512-k3Lyo+ONLOgylctiGovRKy7V4+dIN2yxstX3eY5cWFXH6WP+ooVX79YSyi0GagdTQzLmT43BF27T0s6dOIPBXA==} peerDependencies: @@ -4372,6 +4409,9 @@ packages: '@vitest/utils@1.6.0': resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==} + '@vitest/utils@2.1.2': + resolution: {integrity: sha512-zMO2KdYy6mx56btx9JvAqAZ6EyS3g49krMPPrgOp1yxGZiA93HumGk+bZ5jIZtOg5/VBYl5eBmGRQHqq4FG6uQ==} + '@xata.io/client@0.29.4': resolution: {integrity: sha512-dRff4E/wINr0SYIlOHwApo0h8jzpAHVf2RcbGMkK9Xrddbe90KmCEx/gue9hLhBOoCCp6qUht2h9BsuVPruymw==} peerDependencies: @@ -4585,6 +4625,10 @@ packages: assertion-error@1.1.0: resolution: {integrity: sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==} + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + ast-types@0.15.2: resolution: {integrity: sha512-c27loCv9QkZinsa5ProX751khO9DJl/AcB5c2KNtA6NRvHKS0PgLfcftz72KVq504vB0Gku5s2kUZzDBvQWvHg==} engines: {node: '>=4'} @@ -4875,6 +4919,10 @@ packages: resolution: {integrity: sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g==} engines: {node: '>=4'} + chai@5.1.1: + resolution: {integrity: sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==} + engines: {node: '>=12'} + chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} @@ -4897,6 +4945,10 @@ packages: check-error@1.0.3: resolution: {integrity: sha512-iKEoDYaRmd1mxM90a2OEfWhjsjPpYPuQ+lMYsoxB126+t8fw7ySEO48nmDg5COTjxDI65/Y2OWpeEHk3ZOe8zg==} + check-error@2.1.1: + resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} + engines: {node: '>= 16'} + chokidar@3.5.3: resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} engines: {node: '>= 8.10.0'} @@ -5271,6 +5323,15 @@ packages: supports-color: optional: true + debug@4.3.7: + resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decamelize@1.2.0: resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} engines: {node: '>=0.10.0'} @@ -5283,6 +5344,10 @@ packages: resolution: {integrity: sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==} engines: {node: '>=6'} + deep-eql@5.0.2: + resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} + engines: {node: '>=6'} + deep-extend@0.6.0: resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} engines: {node: '>=4.0.0'} @@ -7287,6 +7352,9 @@ packages: loupe@2.3.7: resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==} + loupe@3.1.2: + resolution: {integrity: sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==} + lru-cache@10.2.2: resolution: {integrity: sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==} engines: {node: 14 || >=16.14} @@ -7319,6 +7387,9 @@ packages: magic-string@0.30.10: resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} + magic-string@0.30.11: + resolution: {integrity: sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==} + make-dir@2.1.0: resolution: {integrity: sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==} engines: {node: '>=6'} @@ -8056,6 +8127,10 @@ packages: pathval@1.1.1: resolution: {integrity: sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==} + pathval@2.0.0: + resolution: {integrity: sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==} + engines: {node: '>= 14.16'} + pause-stream@0.0.11: resolution: {integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==} @@ -9205,14 +9280,32 @@ packages: tinybench@2.8.0: resolution: {integrity: sha512-1/eK7zUnIklz4JUUlL+658n58XO2hHLQfSk1Zf2LKieUjxidN16eKFEoDEfjHc3ohofSSqK3X5yO6VGb6iW8Lw==} + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@0.3.0: + resolution: {integrity: sha512-tVGE0mVJPGb0chKhqmsoosjsS+qUnJVGJpZgsHYQcGoPlG3B51R3PouqTgEGH2Dc9jjFyOqOpix6ZHNMXp1FZg==} + tinypool@0.8.4: resolution: {integrity: sha512-i11VH5gS6IFeLY3gMBQ00/MmLncVP7JLXOw1vlgkytLmJK7QnEr7NXf0LBdxfmNPAeyetukOk0bOYrJrFGjYJQ==} engines: {node: '>=14.0.0'} + tinypool@1.0.1: + resolution: {integrity: sha512-URZYihUbRPcGv95En+sz6MfghfIc2OJ1sv/RmhWZLouPY0/8Vo80viwPvg3dlaS9fuq7fQMEfgRRK7BBZThBEA==} + engines: {node: ^18.0.0 || >=20.0.0} + + tinyrainbow@1.2.0: + resolution: {integrity: sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==} + engines: {node: '>=14.0.0'} + tinyspy@2.2.1: resolution: {integrity: sha512-KYad6Vy5VDWV4GH3fjpseMQ/XU2BhIYP7Vzd0LG44qRWm/Yt2WCOTicFdvmgo6gWaqooMQCawTtILVQJupKu7A==} engines: {node: '>=14.0.0'} + tinyspy@3.0.2: + resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} + engines: {node: '>=14.0.0'} + tmp@0.0.33: resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} engines: {node: '>=0.6.0'} @@ -9504,6 +9597,11 @@ packages: engines: {node: '>=14.17'} hasBin: true + typescript@5.6.3: + resolution: {integrity: sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==} + engines: {node: '>=14.17'} + hasBin: true + ua-parser-js@1.0.38: resolution: {integrity: sha512-Aq5ppTOfvrCMgAPneW1HfWj66Xi7XL+/mIy996R1/CLS/rcyJQm6QZdsKrUeivDFQ+Oc9Wyuwor8Ze8peEoUoQ==} @@ -9516,10 +9614,6 @@ packages: undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - undici@5.28.2: - resolution: {integrity: sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==} - engines: {node: '>=14.0'} - undici@5.28.4: resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} engines: {node: '>=14.0'} @@ -9664,6 +9758,11 @@ packages: engines: {node: ^18.0.0 || >=20.0.0} hasBin: true + vite-node@2.1.2: + resolution: {integrity: sha512-HPcGNN5g/7I2OtPjLqgOtCRu/qhVvBxTUD3qzitmL0SrG1cWFzxzhMDWussxSbrRYWqnKf8P2jiNhPMSN+ymsQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + vite-tsconfig-paths@4.3.2: resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -9753,6 +9852,31 @@ packages: jsdom: optional: true + vitest@2.1.2: + resolution: {integrity: sha512-veNjLizOMkRrJ6xxb+pvxN6/QAWg95mzcRjtmkepXdN87FNfxAss9RKe2far/G9cQpipfgP2taqg0KiWsquj8A==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 2.1.2 + '@vitest/ui': 2.1.2 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + vlq@1.0.1: resolution: {integrity: sha512-gQpnTgkubC6hQgdIcRdYGDSDc+SaujOdyesZQMv6JlfQee/9Mp0Qhnys6WxDWvQnL5WZdT7o2Ul187aSt0Rq+w==} @@ -9830,6 +9954,11 @@ packages: engines: {node: '>=8'} hasBin: true + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + wide-align@1.1.5: resolution: {integrity: sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==} @@ -10782,12 +10911,12 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-ini@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': + '@aws-sdk/credential-provider-ini@3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': dependencies: '@aws-sdk/client-sts': 3.569.0(@aws-sdk/client-sso-oidc@3.569.0) '@aws-sdk/credential-provider-env': 3.568.0 '@aws-sdk/credential-provider-process': 3.568.0 - '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) '@aws-sdk/types': 3.567.0 '@smithy/credential-provider-imds': 2.3.0 @@ -10870,13 +10999,13 @@ snapshots: - '@aws-sdk/client-sts' - aws-crt - '@aws-sdk/credential-provider-node@3.569.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': + '@aws-sdk/credential-provider-node@3.569.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0))': dependencies: '@aws-sdk/credential-provider-env': 3.568.0 '@aws-sdk/credential-provider-http': 3.568.0 - '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) '@aws-sdk/credential-provider-process': 3.568.0 - '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) '@aws-sdk/types': 3.567.0 '@smithy/credential-provider-imds': 2.3.0 @@ -10957,10 +11086,10 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/credential-provider-sso@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/credential-provider-sso@3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': dependencies: '@aws-sdk/client-sso': 3.568.0 - '@aws-sdk/token-providers': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/token-providers': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/types': 3.567.0 '@smithy/property-provider': 2.2.0 '@smithy/shared-ini-file-loader': 2.4.0 @@ -11014,7 +11143,7 @@ snapshots: '@smithy/types': 3.0.0 tslib: 2.6.2 - '@aws-sdk/credential-providers@3.569.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/credential-providers@3.569.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': dependencies: '@aws-sdk/client-cognito-identity': 3.569.0 '@aws-sdk/client-sso': 3.568.0 @@ -11022,10 +11151,10 @@ snapshots: '@aws-sdk/credential-provider-cognito-identity': 3.569.0 '@aws-sdk/credential-provider-env': 3.568.0 '@aws-sdk/credential-provider-http': 3.568.0 - '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) - '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-ini': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) + '@aws-sdk/credential-provider-node': 3.569.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) '@aws-sdk/credential-provider-process': 3.568.0 - '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/credential-provider-sso': 3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) '@aws-sdk/credential-provider-web-identity': 3.568.0(@aws-sdk/client-sts@3.569.0(@aws-sdk/client-sso-oidc@3.569.0)) '@aws-sdk/types': 3.567.0 '@smithy/credential-provider-imds': 2.3.0 @@ -11207,7 +11336,7 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.6.2 - '@aws-sdk/token-providers@3.568.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/token-providers@3.568.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': dependencies: '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/types': 3.567.0 @@ -11344,7 +11473,7 @@ snapshots: '@babel/traverse': 7.24.6 '@babel/types': 7.24.6 convert-source-map: 2.0.0 - debug: 4.3.5 + debug: 4.3.7 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -11405,7 +11534,7 @@ snapshots: '@babel/core': 7.24.6 '@babel/helper-compilation-targets': 7.24.6 '@babel/helper-plugin-utils': 7.24.6 - debug: 4.3.5 + debug: 4.3.7 lodash.debounce: 4.0.8 resolve: 1.22.8 transitivePeerDependencies: @@ -12277,7 +12406,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.6 '@babel/parser': 7.24.6 '@babel/types': 7.24.6 - debug: 4.3.5 + debug: 4.3.7 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -12330,6 +12459,8 @@ snapshots: '@cloudflare/workers-types@4.20240524.0': {} + '@cloudflare/workers-types@4.20241004.0': {} + '@colors/colors@1.5.0': optional: true @@ -12862,7 +12993,7 @@ snapshots: '@eslint/eslintrc@3.1.0': dependencies: ajv: 6.12.6 - debug: 4.3.5 + debug: 4.3.7 espree: 10.0.1 globals: 14.0.0 ignore: 5.3.1 @@ -12919,7 +13050,7 @@ snapshots: chalk: 4.1.2 ci-info: 3.9.0 connect: 3.7.0 - debug: 4.3.5 + debug: 4.3.7 env-editor: 0.4.2 fast-glob: 3.3.2 find-yarn-workspace-root: 2.0.0 @@ -12987,7 +13118,7 @@ snapshots: '@expo/plist': 0.1.3 '@expo/sdk-runtime-versions': 1.0.0 chalk: 4.1.2 - debug: 4.3.5 + debug: 4.3.7 find-up: 5.0.0 getenv: 1.0.0 glob: 7.1.6 @@ -13039,7 +13170,7 @@ snapshots: '@expo/env@0.3.0': dependencies: chalk: 4.1.2 - debug: 4.3.5 + debug: 4.3.7 dotenv: 16.4.5 dotenv-expand: 11.0.6 getenv: 1.0.0 @@ -13078,7 +13209,7 @@ snapshots: '@expo/json-file': 8.3.3 '@expo/spawn-async': 1.7.2 chalk: 4.1.2 - debug: 4.3.5 + debug: 4.3.7 find-yarn-workspace-root: 2.0.0 fs-extra: 9.1.0 getenv: 1.0.0 @@ -13124,7 +13255,7 @@ snapshots: '@expo/image-utils': 0.5.1(encoding@0.1.13) '@expo/json-file': 8.3.3 '@react-native/normalize-colors': 0.74.83 - debug: 4.3.5 + debug: 4.3.7 expo-modules-autolinking: 1.11.1 fs-extra: 9.1.0 resolve-from: 5.0.0 @@ -13313,6 +13444,8 @@ snapshots: '@jridgewell/sourcemap-codec@1.4.15': {} + '@jridgewell/sourcemap-codec@1.5.0': {} + '@jridgewell/trace-mapping@0.3.18': dependencies: '@jridgewell/resolve-uri': 3.1.0 @@ -13405,38 +13538,38 @@ snapshots: '@libsql/win32-x64-msvc@0.4.1': optional: true - '@miniflare/core@2.14.2': + '@miniflare/core@2.14.4': dependencies: '@iarna/toml': 2.2.5 - '@miniflare/queues': 2.14.2 - '@miniflare/shared': 2.14.2 - '@miniflare/watcher': 2.14.2 + '@miniflare/queues': 2.14.4 + '@miniflare/shared': 2.14.4 + '@miniflare/watcher': 2.14.4 busboy: 1.6.0 dotenv: 10.0.0 kleur: 4.1.5 set-cookie-parser: 2.6.0 - undici: 5.28.2 + undici: 5.28.4 urlpattern-polyfill: 4.0.3 - '@miniflare/d1@2.14.2': + '@miniflare/d1@2.14.4': dependencies: - '@miniflare/core': 2.14.2 - '@miniflare/shared': 2.14.2 + '@miniflare/core': 2.14.4 + '@miniflare/shared': 2.14.4 - '@miniflare/queues@2.14.2': + '@miniflare/queues@2.14.4': dependencies: - '@miniflare/shared': 2.14.2 + '@miniflare/shared': 2.14.4 - '@miniflare/shared@2.14.2': + '@miniflare/shared@2.14.4': dependencies: '@types/better-sqlite3': 7.6.10 kleur: 4.1.5 npx-import: 1.1.4 picomatch: 2.3.1 - '@miniflare/watcher@2.14.2': + '@miniflare/watcher@2.14.4': dependencies: - '@miniflare/shared': 2.14.2 + '@miniflare/shared': 2.14.4 '@neon-rs/load@0.0.4': {} @@ -13852,20 +13985,20 @@ snapshots: optionalDependencies: rollup: 3.27.2 - '@rollup/plugin-typescript@11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@rollup/plugin-typescript@11.1.0(rollup@3.20.7)(tslib@2.6.2)(typescript@5.6.3)': dependencies: '@rollup/pluginutils': 5.0.2(rollup@3.20.7) resolve: 1.22.1 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 optionalDependencies: rollup: 3.20.7 tslib: 2.6.2 - '@rollup/plugin-typescript@11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@rollup/plugin-typescript@11.1.1(rollup@3.27.2)(tslib@2.6.2)(typescript@5.6.3)': dependencies: '@rollup/pluginutils': 5.0.2(rollup@3.27.2) resolve: 1.22.2 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 optionalDependencies: rollup: 3.27.2 tslib: 2.6.2 @@ -14753,13 +14886,13 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 - '@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0)(typescript@5.6.3)': dependencies: '@eslint-community/regexpp': 4.9.0 - '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.6.3) '@typescript-eslint/scope-manager': 6.7.3 - '@typescript-eslint/type-utils': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) - '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/type-utils': 6.7.3(eslint@8.50.0)(typescript@5.6.3) + '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.6.3) '@typescript-eslint/visitor-keys': 6.7.3 debug: 4.3.4 eslint: 8.50.0 @@ -14767,33 +14900,33 @@ snapshots: ignore: 5.2.4 natural-compare: 1.4.0 semver: 7.6.2 - ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-api-utils: 1.0.3(typescript@5.6.3) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/eslint-plugin@7.16.1(@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/eslint-plugin@7.16.1(@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5))(eslint@8.57.0)(typescript@5.4.5)': dependencies: '@eslint-community/regexpp': 4.11.0 - '@typescript-eslint/parser': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/parser': 7.16.1(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/scope-manager': 7.16.1 - '@typescript-eslint/type-utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) - '@typescript-eslint/utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/type-utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5) + '@typescript-eslint/utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5) '@typescript-eslint/visitor-keys': 7.16.1 eslint: 8.57.0 graphemer: 1.4.0 ignore: 5.3.1 natural-compare: 1.4.0 - ts-api-utils: 1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-api-utils: 1.3.0(typescript@5.4.5) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 transitivePeerDependencies: - supports-color - '@typescript-eslint/experimental-utils@5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/experimental-utils@5.62.0(eslint@8.50.0)(typescript@5.6.3)': dependencies: - '@typescript-eslint/utils': 5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/utils': 5.62.0(eslint@8.50.0)(typescript@5.6.3) eslint: 8.50.0 transitivePeerDependencies: - supports-color @@ -14812,29 +14945,29 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3)': dependencies: '@typescript-eslint/scope-manager': 6.7.3 '@typescript-eslint/types': 6.7.3 - '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.6.3) '@typescript-eslint/visitor-keys': 6.7.3 debug: 4.3.4 eslint: 8.50.0 optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/parser@7.16.1(eslint@8.57.0)(typescript@5.4.5)': dependencies: '@typescript-eslint/scope-manager': 7.16.1 '@typescript-eslint/types': 7.16.1 - '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5) '@typescript-eslint/visitor-keys': 7.16.1 debug: 4.3.4 eslint: 8.57.0 optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 transitivePeerDependencies: - supports-color @@ -14871,27 +15004,27 @@ snapshots: '@typescript-eslint/types': 7.16.1 '@typescript-eslint/visitor-keys': 7.16.1 - '@typescript-eslint/type-utils@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/type-utils@6.7.3(eslint@8.50.0)(typescript@5.6.3)': dependencies: - '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) - '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.6.3) + '@typescript-eslint/utils': 6.7.3(eslint@8.50.0)(typescript@5.6.3) debug: 4.3.4 eslint: 8.50.0 - ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-api-utils: 1.0.3(typescript@5.6.3) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/type-utils@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/type-utils@7.16.1(eslint@8.57.0)(typescript@5.4.5)': dependencies: - '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) - '@typescript-eslint/utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5) + '@typescript-eslint/utils': 7.16.1(eslint@8.57.0)(typescript@5.4.5) debug: 4.3.4 eslint: 8.57.0 - ts-api-utils: 1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-api-utils: 1.3.0(typescript@5.4.5) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 transitivePeerDependencies: - supports-color @@ -14903,7 +15036,7 @@ snapshots: '@typescript-eslint/types@7.16.1': {} - '@typescript-eslint/typescript-estree@5.62.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/typescript-estree@5.62.0(typescript@5.6.3)': dependencies: '@typescript-eslint/types': 5.62.0 '@typescript-eslint/visitor-keys': 5.62.0 @@ -14911,9 +15044,9 @@ snapshots: globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.2 - tsutils: 3.21.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + tsutils: 3.21.0(typescript@5.6.3) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 transitivePeerDependencies: - supports-color @@ -14931,7 +15064,7 @@ snapshots: transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/typescript-estree@6.7.3(typescript@5.6.3)': dependencies: '@typescript-eslint/types': 6.7.3 '@typescript-eslint/visitor-keys': 6.7.3 @@ -14939,13 +15072,13 @@ snapshots: globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.2 - ts-api-utils: 1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-api-utils: 1.0.3(typescript@5.6.3) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/typescript-estree@7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/typescript-estree@7.16.1(typescript@5.4.5)': dependencies: '@typescript-eslint/types': 7.16.1 '@typescript-eslint/visitor-keys': 7.16.1 @@ -14954,20 +15087,20 @@ snapshots: is-glob: 4.0.3 minimatch: 9.0.4 semver: 7.6.2 - ts-api-utils: 1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-api-utils: 1.3.0(typescript@5.4.5) optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@5.62.0(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/utils@5.62.0(eslint@8.50.0)(typescript@5.6.3)': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) '@types/json-schema': 7.0.13 '@types/semver': 7.5.8 '@typescript-eslint/scope-manager': 5.62.0 '@typescript-eslint/types': 5.62.0 - '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.6.3) eslint: 8.50.0 eslint-scope: 5.1.1 semver: 7.6.2 @@ -14989,26 +15122,26 @@ snapshots: - supports-color - typescript - '@typescript-eslint/utils@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/utils@6.7.3(eslint@8.50.0)(typescript@5.6.3)': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.50.0) '@types/json-schema': 7.0.13 '@types/semver': 7.5.8 '@typescript-eslint/scope-manager': 6.7.3 '@typescript-eslint/types': 6.7.3 - '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 6.7.3(typescript@5.6.3) eslint: 8.50.0 semver: 7.6.2 transitivePeerDependencies: - supports-color - typescript - '@typescript-eslint/utils@7.16.1(eslint@8.57.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@typescript-eslint/utils@7.16.1(eslint@8.57.0)(typescript@5.4.5)': dependencies: '@eslint-community/eslint-utils': 4.4.0(eslint@8.57.0) '@typescript-eslint/scope-manager': 7.16.1 '@typescript-eslint/types': 7.16.1 - '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/typescript-estree': 7.16.1(typescript@5.4.5) eslint: 8.57.0 transitivePeerDependencies: - supports-color @@ -15072,22 +15205,56 @@ snapshots: '@vitest/utils': 1.6.0 chai: 4.4.1 + '@vitest/expect@2.1.2': + dependencies: + '@vitest/spy': 2.1.2 + '@vitest/utils': 2.1.2 + chai: 5.1.1 + tinyrainbow: 1.2.0 + + '@vitest/mocker@2.1.2(@vitest/spy@2.1.2)(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0))': + dependencies: + '@vitest/spy': 2.1.2 + estree-walker: 3.0.3 + magic-string: 0.30.11 + optionalDependencies: + vite: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) + + '@vitest/pretty-format@2.1.2': + dependencies: + tinyrainbow: 1.2.0 + '@vitest/runner@1.6.0': dependencies: '@vitest/utils': 1.6.0 p-limit: 5.0.0 pathe: 1.1.2 + '@vitest/runner@2.1.2': + dependencies: + '@vitest/utils': 2.1.2 + pathe: 1.1.2 + '@vitest/snapshot@1.6.0': dependencies: magic-string: 0.30.10 pathe: 1.1.2 pretty-format: 29.7.0 + '@vitest/snapshot@2.1.2': + dependencies: + '@vitest/pretty-format': 2.1.2 + magic-string: 0.30.11 + pathe: 1.1.2 + '@vitest/spy@1.6.0': dependencies: tinyspy: 2.2.1 + '@vitest/spy@2.1.2': + dependencies: + tinyspy: 3.0.2 + '@vitest/ui@1.6.0(vitest@1.6.0)': dependencies: '@vitest/utils': 1.6.0 @@ -15098,6 +15265,18 @@ snapshots: picocolors: 1.0.1 sirv: 2.0.4 vitest: 1.6.0(@types/node@18.19.33)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) + optional: true + + '@vitest/ui@1.6.0(vitest@2.1.2)': + dependencies: + '@vitest/utils': 1.6.0 + fast-glob: 3.3.2 + fflate: 0.8.2 + flatted: 3.3.1 + pathe: 1.1.2 + picocolors: 1.0.1 + sirv: 2.0.4 + vitest: 2.1.2(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0) '@vitest/utils@1.6.0': dependencies: @@ -15106,9 +15285,15 @@ snapshots: loupe: 2.3.7 pretty-format: 29.7.0 - '@xata.io/client@0.29.4(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))': + '@vitest/utils@2.1.2': dependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + '@vitest/pretty-format': 2.1.2 + loupe: 3.1.2 + tinyrainbow: 1.2.0 + + '@xata.io/client@0.29.4(typescript@5.6.3)': + dependencies: + typescript: 5.6.3 '@xmldom/xmldom@0.7.13': {} @@ -15322,6 +15507,8 @@ snapshots: assertion-error@1.1.0: {} + assertion-error@2.0.1: {} + ast-types@0.15.2: dependencies: tslib: 2.6.2 @@ -15715,6 +15902,14 @@ snapshots: pathval: 1.1.1 type-detect: 4.0.8 + chai@5.1.1: + dependencies: + assertion-error: 2.0.1 + check-error: 2.1.1 + deep-eql: 5.0.2 + loupe: 3.1.2 + pathval: 2.0.0 + chalk@2.4.2: dependencies: ansi-styles: 3.2.1 @@ -15736,6 +15931,8 @@ snapshots: dependencies: get-func-name: 2.0.2 + check-error@2.1.1: {} + chokidar@3.5.3: dependencies: anymatch: 3.1.3 @@ -16116,6 +16313,10 @@ snapshots: dependencies: ms: 2.1.2 + debug@4.3.7: + dependencies: + ms: 2.1.3 + decamelize@1.2.0: {} decompress-response@6.0.0: @@ -16126,6 +16327,8 @@ snapshots: dependencies: type-detect: 4.0.8 + deep-eql@5.0.2: {} + deep-extend@0.6.0: {} deep-is@0.1.4: {} @@ -16281,10 +16484,10 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20240524.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): + drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20241004.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.9.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.10)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@9.6.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@9.6.0)(mysql2@3.11.0)(pg@8.11.5)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.11.5)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): optionalDependencies: '@aws-sdk/client-rds-data': 3.583.0 - '@cloudflare/workers-types': 4.20240524.0 + '@cloudflare/workers-types': 4.20241004.0 '@libsql/client': 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) '@neondatabase/serverless': 0.9.3 '@opentelemetry/api': 1.8.0 @@ -16793,17 +16996,17 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0): + eslint-module-utils@2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0): dependencies: debug: 3.2.7 optionalDependencies: - '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.6.3) eslint: 8.50.0 eslint-import-resolver-node: 0.3.9 transitivePeerDependencies: - supports-color - eslint-plugin-import@2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0): + eslint-plugin-import@2.28.1(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0): dependencies: array-includes: 3.1.6 array.prototype.findlastindex: 1.2.2 @@ -16813,7 +17016,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.50.0 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0) + eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint-import-resolver-node@0.3.9)(eslint@8.50.0) has: 1.0.3 is-core-module: 2.13.0 is-glob: 4.0.3 @@ -16824,7 +17027,7 @@ snapshots: semver: 6.3.1 tsconfig-paths: 3.14.2 optionalDependencies: - '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/parser': 6.7.3(eslint@8.50.0)(typescript@5.6.3) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack @@ -16860,12 +17063,12 @@ snapshots: semver: 7.6.2 strip-indent: 3.0.0 - eslint-plugin-unused-imports@3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0): + eslint-plugin-unused-imports@3.0.0(@typescript-eslint/eslint-plugin@6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0): dependencies: eslint: 8.50.0 eslint-rule-composer: 0.3.0 optionalDependencies: - '@typescript-eslint/eslint-plugin': 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(eslint@8.50.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + '@typescript-eslint/eslint-plugin': 6.7.3(@typescript-eslint/parser@6.7.3(eslint@8.50.0)(typescript@5.6.3))(eslint@8.50.0)(typescript@5.6.3) eslint-rule-composer@0.3.0: {} @@ -18502,6 +18705,8 @@ snapshots: dependencies: get-func-name: 2.0.2 + loupe@3.1.2: {} + lru-cache@10.2.2: {} lru-cache@5.1.1: @@ -18530,6 +18735,10 @@ snapshots: dependencies: '@jridgewell/sourcemap-codec': 1.4.15 + magic-string@0.30.11: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.0 + make-dir@2.1.0: dependencies: pify: 4.0.1 @@ -19399,6 +19608,8 @@ snapshots: pathval@1.1.1: {} + pathval@2.0.0: {} + pause-stream@0.0.11: dependencies: through: 2.3.8 @@ -19495,13 +19706,13 @@ snapshots: possible-typed-array-names@1.0.0: {} - postcss-load-config@4.0.1(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))): + postcss-load-config@4.0.1(postcss@8.4.39)(ts-node@10.9.2(typescript@5.6.3)): dependencies: lilconfig: 2.1.0 yaml: 2.3.1 optionalDependencies: postcss: 8.4.39 - ts-node: 10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + ts-node: 10.9.2(@types/node@20.12.12)(typescript@5.6.3) postcss-load-config@6.0.1(postcss@8.4.39)(tsx@3.14.0)(yaml@2.4.2): dependencies: @@ -19897,12 +20108,12 @@ snapshots: resolve-pkg-maps@1.0.0: {} - resolve-tspaths@0.8.16(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + resolve-tspaths@0.8.16(typescript@5.6.3): dependencies: ansi-colors: 4.1.3 commander: 11.0.0 fast-glob: 3.3.1 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 resolve.exports@2.0.2: {} @@ -20671,10 +20882,20 @@ snapshots: tinybench@2.8.0: {} + tinybench@2.9.0: {} + + tinyexec@0.3.0: {} + tinypool@0.8.4: {} + tinypool@1.0.1: {} + + tinyrainbow@1.2.0: {} + tinyspy@2.2.1: {} + tinyspy@3.0.2: {} + tmp@0.0.33: dependencies: os-tmpdir: 1.0.2 @@ -20711,19 +20932,19 @@ snapshots: dependencies: typescript: 5.2.2 - ts-api-utils@1.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + ts-api-utils@1.0.3(typescript@5.6.3): dependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 - ts-api-utils@1.3.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + ts-api-utils@1.3.0(typescript@5.4.5): dependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 ts-expose-internals-conditionally@1.0.0-empty.0: {} ts-interface-checker@0.1.13: {} - ts-node@10.9.2(@types/node@20.12.12)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + ts-node@10.9.2(@types/node@20.12.12)(typescript@5.6.3): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 @@ -20737,13 +20958,17 @@ snapshots: create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 - tsconfck@3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + tsconfck@3.0.3(typescript@5.4.5): optionalDependencies: - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 + + tsconfck@3.0.3(typescript@5.6.3): + optionalDependencies: + typescript: 5.6.3 tsconfig-paths@3.14.2: dependencies: @@ -20756,7 +20981,7 @@ snapshots: tslib@2.6.2: {} - tsup@7.2.0(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)))(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + tsup@7.2.0(postcss@8.4.39)(ts-node@10.9.2(typescript@5.6.3))(typescript@5.6.3): dependencies: bundle-require: 4.0.2(esbuild@0.18.20) cac: 6.7.14 @@ -20766,7 +20991,7 @@ snapshots: execa: 5.1.1 globby: 11.1.0 joycon: 3.1.1 - postcss-load-config: 4.0.1(postcss@8.4.39)(ts-node@10.9.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))) + postcss-load-config: 4.0.1(postcss@8.4.39)(ts-node@10.9.2(typescript@5.6.3)) resolve-from: 5.0.0 rollup: 3.27.2 source-map: 0.8.0-beta.0 @@ -20774,12 +20999,12 @@ snapshots: tree-kill: 1.2.2 optionalDependencies: postcss: 8.4.39 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 transitivePeerDependencies: - supports-color - ts-node - tsup@8.1.2(postcss@8.4.39)(tsx@3.14.0)(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(yaml@2.4.2): + tsup@8.1.2(postcss@8.4.39)(tsx@3.14.0)(typescript@5.4.5)(yaml@2.4.2): dependencies: bundle-require: 5.0.0(esbuild@0.23.0) cac: 6.7.14 @@ -20798,17 +21023,17 @@ snapshots: tree-kill: 1.2.2 optionalDependencies: postcss: 8.4.39 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.4.5 transitivePeerDependencies: - jiti - supports-color - tsx - yaml - tsutils@3.21.0(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)): + tsutils@3.21.0(typescript@5.6.3): dependencies: tslib: 1.14.1 - typescript: 5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme) + typescript: 5.6.3 tsx@3.14.0: dependencies: @@ -20970,7 +21195,9 @@ snapshots: typescript@5.3.3: {} - typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme): {} + typescript@5.4.5: {} + + typescript@5.6.3: {} ua-parser-js@1.0.38: {} @@ -20985,10 +21212,6 @@ snapshots: undici-types@5.26.5: {} - undici@5.28.2: - dependencies: - '@fastify/busboy': 2.1.1 - undici@5.28.4: dependencies: '@fastify/busboy': 2.1.1 @@ -21179,33 +21402,49 @@ snapshots: - supports-color - terser - vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)): + vite-node@2.1.2(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + cac: 6.7.14 + debug: 4.3.7 + pathe: 1.1.2 + vite: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vite-tsconfig-paths@4.3.2(typescript@5.4.5)(vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0)): dependencies: debug: 4.3.4 globrex: 0.1.2 - tsconfck: 3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + tsconfck: 3.0.3(typescript@5.4.5) optionalDependencies: - vite: 5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0) + vite: 5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0) transitivePeerDependencies: - supports-color - typescript - vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0)): + vite-tsconfig-paths@4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0)): dependencies: debug: 4.3.4 globrex: 0.1.2 - tsconfck: 3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + tsconfck: 3.0.3(typescript@5.6.3) optionalDependencies: - vite: 5.3.3(@types/node@18.19.33)(lightningcss@1.25.1)(terser@5.31.0) + vite: 5.3.3(@types/node@18.15.10)(lightningcss@1.25.1)(terser@5.31.0) transitivePeerDependencies: - supports-color - typescript - vite-tsconfig-paths@4.3.2(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme))(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)): + vite-tsconfig-paths@4.3.2(typescript@5.6.3)(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)): dependencies: debug: 4.3.4 globrex: 0.1.2 - tsconfck: 3.0.3(typescript@5.4.5(patch_hash=q3iy4fwdhi5sis3wty7d4nbsme)) + tsconfck: 3.0.3(typescript@5.6.3) optionalDependencies: vite: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) transitivePeerDependencies: @@ -21436,6 +21675,40 @@ snapshots: - supports-color - terser + vitest@2.1.2(@types/node@20.12.12)(@vitest/ui@1.6.0)(lightningcss@1.25.1)(terser@5.31.0): + dependencies: + '@vitest/expect': 2.1.2 + '@vitest/mocker': 2.1.2(@vitest/spy@2.1.2)(vite@5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0)) + '@vitest/pretty-format': 2.1.2 + '@vitest/runner': 2.1.2 + '@vitest/snapshot': 2.1.2 + '@vitest/spy': 2.1.2 + '@vitest/utils': 2.1.2 + chai: 5.1.1 + debug: 4.3.7 + magic-string: 0.30.11 + pathe: 1.1.2 + std-env: 3.7.0 + tinybench: 2.9.0 + tinyexec: 0.3.0 + tinypool: 1.0.1 + tinyrainbow: 1.2.0 + vite: 5.3.3(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) + vite-node: 2.1.2(@types/node@20.12.12)(lightningcss@1.25.1)(terser@5.31.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 20.12.12 + '@vitest/ui': 1.6.0(vitest@2.1.2) + transitivePeerDependencies: + - less + - lightningcss + - msw + - sass + - stylus + - sugarss + - supports-color + - terser + vlq@1.0.1: {} walker@1.0.8: @@ -21520,6 +21793,11 @@ snapshots: siginfo: 2.0.0 stackback: 0.0.2 + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + wide-align@1.1.5: dependencies: string-width: 4.2.3