From 5830618eb2fafdecdbfa106ca8ffcb74fb88d835 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 24 Sep 2025 00:05:00 +0200 Subject: [PATCH 01/68] feat: initial db pull implementation --- packages/cli/src/actions/action-utils.ts | 14 + packages/cli/src/actions/db.ts | 70 ++++- packages/cli/src/actions/pull/index.ts | 238 +++++++++++++++++ .../cli/src/actions/pull/provider/index.ts | 9 + .../src/actions/pull/provider/postgresql.ts | 242 ++++++++++++++++++ .../cli/src/actions/pull/provider/provider.ts | 44 ++++ .../cli/src/actions/pull/provider/sqlite.ts | 191 ++++++++++++++ packages/cli/src/actions/pull/utils.ts | 67 +++++ packages/language/src/document.ts | 3 +- pnpm-lock.yaml | 24 ++ 10 files changed, 897 insertions(+), 5 deletions(-) create mode 100644 packages/cli/src/actions/pull/index.ts create mode 100644 packages/cli/src/actions/pull/provider/index.ts create mode 100644 packages/cli/src/actions/pull/provider/postgresql.ts create mode 100644 packages/cli/src/actions/pull/provider/provider.ts create mode 100644 packages/cli/src/actions/pull/provider/sqlite.ts create mode 100644 packages/cli/src/actions/pull/utils.ts diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index d2e0ca2e9..78e4cb38b 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -55,6 +55,20 @@ export async function loadSchemaDocument(schemaFile: string) { return loadResult.model; } +export async function loadSchemaDocumentWithServices(schemaFile: string) { + const loadResult = await loadDocument(schemaFile); + if (!loadResult.success) { + loadResult.errors.forEach((err) => { + console.error(colors.red(err)); + }); + throw new CliError('Schema contains errors. See above for details.'); + } + loadResult.warnings.forEach((warn) => { + console.warn(colors.yellow(warn)); + }); + return { services: loadResult.services, model: loadResult.model }; +} + export function handleSubProcessError(err: unknown) { if (err instanceof Error && 'status' in err && typeof err.status === 'number') { process.exit(err.status); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 3d0108374..6c39a3529 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,25 +1,36 @@ +import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import { execPrisma } from '../utils/exec-utils'; -import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl } from './action-utils'; +import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; +import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; +import { providers } from './pull/provider'; +import { getDatasource, getDbName } from './pull/utils'; -type Options = { +type PushOptions = { schema?: string; acceptDataLoss?: boolean; forceReset?: boolean; }; +type PullOptions = { + schema?: string; +}; + /** * CLI action for db related commands */ -export async function run(command: string, options: Options) { +export async function run(command: string, options: PushOptions) { switch (command) { case 'push': await runPush(options); break; + case 'pull': + await runPull(options); + break; } } -async function runPush(options: Options) { +async function runPush(options: PushOptions) { const schemaFile = getSchemaFile(options.schema); // validate datasource url exists @@ -49,3 +60,54 @@ async function runPush(options: Options) { } } } + +async function runPull(options: PullOptions) { + const schemaFile = getSchemaFile(options.schema); + const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] + const datasource = getDatasource(model) + + if (!datasource) { + throw new Error('No datasource found in the schema.') + } + + if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { + throw new Error(`Unsupported datasource provider: ${datasource.provider}`) + } + + const provider = providers[datasource.provider]; + + if (!provider) { + throw new Error( + `No introspection provider found for: ${datasource.provider}` + ) + } + + const { enums, tables } = await provider.introspect(datasource.url) + + syncEnums(enums, model) + + const resolveRelations: Relation[] = [] + for (const table of tables) { + const relations = syncTable({ table, model, provider }) + resolveRelations.push(...relations) + } + + for (const rel of resolveRelations) { + syncRelation(model, rel, services); + } + + for (const d of model.declarations) { + if (d.$type !== 'DataModel') continue + const found = tables.find((t) => getDbName(d) === t.name) + if (!found) { + delete (d.$container as any)[d.$containerProperty!][d.$containerIndex!] + } + } + + model.declarations = model.declarations.filter((d) => d !== undefined) + + const zmpdelSchema = await new ZModelCodeGenerator().generate(model) + fs.writeFileSync(schemaFile, zmpdelSchema) +} diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts new file mode 100644 index 000000000..4651225e4 --- /dev/null +++ b/packages/cli/src/actions/pull/index.ts @@ -0,0 +1,238 @@ +import type { ZModelServices } from '@zenstackhq/language' +import type { + Attribute, + AttributeArg, + DataField, + DataFieldAttribute, + DataFieldType, + DataModel, + Enum, + EnumField, + Model, + UnsupportedFieldType +} from '@zenstackhq/language/ast' +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' +import { getAttributeRef, getDbName } from './utils' + +export function syncEnums(dbEnums: IntrospectedEnum[], model: Model) { + for (const dbEnum of dbEnums) { + let schemaEnum = model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === dbEnum.enum_type + ) as Enum | undefined + + if (!schemaEnum) { + schemaEnum = { + $type: 'Enum' as const, + $container: model, + name: dbEnum.enum_type, + attributes: [], + comments: [], + fields: [], + } + model.declarations.push(schemaEnum) + } + schemaEnum.fields = dbEnum.values.map((v) => { + const existingValue = schemaEnum.fields.find((f) => getDbName(f) === v) + if (!existingValue) { + const enumField: EnumField = { + $type: 'EnumField' as const, + $container: schemaEnum, + name: v, + attributes: [], + comments: [], + } + return enumField + } + return existingValue + }) + } +} + +export type Relation = { + schema: string + table: string + column: string + type: 'one' | 'many' + fk_name: string + nullable: boolean + references: { + schema: string | null + table: string | null + column: string | null + } +} + +export function syncTable({ + model, + provider, + table, +}: { + table: IntrospectedTable + model: Model + provider: IntrospectionProvider +}) { + const relations: Relation[] = [] + let modelTable = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === table.name + ) as DataModel | undefined + + if (!modelTable) { + modelTable = { + $type: 'DataModel' as const, + $container: model, + name: table.name, + fields: [], + attributes: [], + comments: [], + isView: false, + mixins: [], + } + model.declarations.push(modelTable) + } + + modelTable.fields = table.columns.map((col) => { + if (col.foreign_key_table) { + relations.push({ + schema: table.schema, + table: table.name, + column: col.name, + type: col.unique ? 'one' : 'many', + fk_name: col.foreign_key_name!, + nullable: col.nullable, + references: { + schema: col.foreign_key_schema, + table: col.foreign_key_table, + column: col.foreign_key_column, + }, + }) + } + + const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' + const fieldName = `${fieldPrefix}${col.name}` + + const existingField = modelTable!.fields.find( + (f) => getDbName(f) === fieldName + ) + if (!existingField) { + const builtinType = provider.getBuiltinType(col.datatype) + const unsupported: UnsupportedFieldType = { + get $container() { + return type + }, + $type: 'UnsupportedFieldType' as const, + value: { + get $container() { + return unsupported + }, + $type: 'StringLiteral', + value: col.datatype, + }, + } + + const type: DataFieldType = { + get $container() { + return field + }, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + unsupported: + builtinType.type === 'Unsupported' ? unsupported : undefined, + optional: col.nullable, + reference: col.options.length + ? { + $refText: col.datatype, + ref: model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === col.datatype + ) as Enum | undefined, + } + : undefined, + } + + const field: DataField = { + $type: 'DataField' as const, + type, + $container: modelTable!, + name: fieldName, + get attributes() { + if (fieldPrefix !== '') return [] + + const attr: DataFieldAttribute = { + $type: 'DataFieldAttribute' as const, + get $container() { + return field + }, + decl: { + $refText: '@map', + ref: model.$document?.references.find( + (r) => + //@ts-ignore + r.ref.$type === 'Attribute' && r.ref.name === '@map' + )?.ref as Attribute, + }, + get args() { + const arg: AttributeArg = { + $type: 'AttributeArg' as const, + get $container() { + return attr + }, + name: 'name', + $resolvedParam: { + name: 'name', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: arg, + value: col.name, + } + }, + } + + return [arg] + }, + } + + return [attr] + }, + comments: [], + } + return field + } + return existingField + }) + + return relations +} + +export function syncRelation(model: Model, relation: Relation, services: ZModelServices) { + const idAttribute = getAttributeRef('@id', services) + const uniqueAttribute = getAttributeRef('@unique', services) + const relationAttribute = getAttributeRef('@relation', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute) { + throw new Error('Cannot find required attributes in the model.') + } + + const sourceModel = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === relation.table + ) as DataModel | undefined + if (!sourceModel) return + + const sourceField = sourceModel.fields.find( + (f) => getDbName(f) === relation.column + ) as DataField | undefined + if (!sourceField) return + + const targetModel = model.declarations.find( + (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table + ) as DataModel | undefined + if (!targetModel) return + + const targetField = targetModel.fields.find( + (f) => getDbName(f) === relation.references.column + ) + if (!targetField) return + + //TODO: Finish relation sync +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts new file mode 100644 index 000000000..82ee2ac38 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -0,0 +1,9 @@ +export * from './provider' + +import { postgresql } from "./postgresql"; +import { sqlite } from "./sqlite"; + +export const providers = { + postgresql, + sqlite +}; \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts new file mode 100644 index 000000000..10a9642a3 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -0,0 +1,242 @@ +import { Client } from 'pg' +import type { + IntrospectedEnum, + IntrospectedSchema, + IntrospectedTable, + IntrospectionProvider, +} from './provider' + +export const postgresql: IntrospectionProvider = { + getBuiltinType(type) { + const t = (type || '').toLowerCase() + + const isArray = t.startsWith('_') + + switch (t.replace(/^_/, '')) { + // integers + case 'int2': + case 'smallint': + case 'int4': + case 'integer': + return { type: 'Int', isArray } + case 'int8': + case 'bigint': + return { type: 'BigInt', isArray } + + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray } + case 'float4': + case 'real': + case 'float8': + case 'double precision': + return { type: 'Float', isArray } + + // boolean + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray } + + // strings + case 'text': + case 'varchar': + case 'bpchar': + case 'character varying': + case 'character': + return { type: 'String', isArray } + + // uuid + case 'uuid': + return { type: 'String', isArray } + + // dates/times + case 'date': + case 'timestamp': + case 'timestamptz': + return { type: 'DateTime', isArray } + + // binary + case 'bytea': + return { type: 'Bytes', isArray } + + // json + case 'json': + case 'jsonb': + return { type: 'Json', isArray } + + // unsupported or postgres-specific + case 'time': + case 'timetz': + case 'interval': + case 'money': + case 'xml': + case 'bit': + case 'varbit': + case 'cidr': + case 'inet': + case 'macaddr': + case 'macaddr8': + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'tsvector': + case 'tsquery': + case 'jsonpath': + case 'hstore': + case 'oid': + case 'name': + case 'regclass': + case 'regproc': + case 'regprocedure': + case 'regoper': + case 'regoperator': + case 'regtype': + case 'regconfig': + case 'regdictionary': + case 'pg_lsn': + case 'txid_snapshot': + case 'int4range': + case 'int8range': + case 'numrange': + case 'tsrange': + case 'tstzrange': + case 'daterange': + default: + return { type: 'Unsupported' as const, isArray } + } + }, + async introspect(connectionString: string): Promise { + const client = new Client({ connectionString }) + await client.connect() + + const { rows: tables } = await client.query( + tableIntrospectionQuery + ) + const { rows: enums } = await client.query( + enumIntrospectionQuery + ) + + return { + enums, + tables, + } + }, +} + +const enumIntrospectionQuery = ` +SELECT + n.nspname AS schema_name, + t.typname AS enum_type, + coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values +FROM pg_type t +JOIN pg_enum e ON t.oid = e.enumtypid +JOIN pg_namespace n ON n.oid = t.typnamespace +GROUP BY schema_name, enum_type +ORDER BY schema_name, enum_type;` + +const tableIntrospectionQuery = ` +SELECT +"ns"."nspname" AS "schema", +"cls"."relname" AS "name", +CASE "cls"."relkind" + WHEN 'r' THEN 'table' + WHEN 'v' THEN 'view' + ELSE NULL +END AS "type", +( +SELECT +coalesce(json_agg(agg), '[]') +FROM +( + SELECT + "att"."attname" AS "name", + "typ"."typname" AS "datatype", + "tns"."nspname" AS "datatype_schema", + "fk_ns"."nspname" AS "foreign_key_schema", + "fk_cls"."relname" AS "foreign_key_table", + "fk_att"."attname" AS "foreign_key_column", + "fk_con"."conname" AS "foreign_key_name", + CASE "fk_con"."confupdtype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_update", + CASE "fk_con"."confdeltype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_delete", + "pk_con"."conkey" IS NOT NULL AS "pk", + ( + EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + ) + OR EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_index" AS "u_idx" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + ) + ) AS "unique", + "att"."attgenerated" != '' AS "computed", + "att"."attnotnull" != TRUE AS "nullable", + coalesce( + ( + SELECT + json_agg("enm"."enumlabel") AS "o" + FROM + "pg_catalog"."pg_enum" AS "enm" + WHERE + "enm"."enumtypid" = "typ"."oid" + ), + '[]' + ) AS "options" + FROM + "pg_catalog"."pg_attribute" AS "att" + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("pk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' + AND "fk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("fk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" + AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + WHERE + "att"."attrelid" = "cls"."oid" + AND "att"."attnum" >= 0 + AND "att"."attisdropped" != TRUE + ORDER BY "att"."attnum" +) AS agg +) AS "columns" +FROM +"pg_catalog"."pg_class" AS "cls" +INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" +WHERE +"ns"."nspname" !~ '^pg_' +AND "ns"."nspname" != 'information_schema' +AND "cls"."relkind" IN ('r', 'v') +AND "cls"."relname" !~ '^pg_' +AND "cls"."relname" !~ '_prisma_migrations' +` diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts new file mode 100644 index 000000000..d8bd09288 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -0,0 +1,44 @@ +import type { BuiltinType } from '@zenstackhq/language/ast' + +export type Cascade = "NO ACTION" | "RESTRICT"| "CASCADE" | "SET NULL" | "SET DEFAULT" | null; + +export interface IntrospectedTable { + schema: string + name: string + type: 'table' | 'view' + columns: { + name: string + datatype: string + datatype_schema: string + foreign_key_schema: string | null + foreign_key_table: string | null + foreign_key_column: string | null + foreign_key_name: string | null + foreign_key_on_update: Cascade + foreign_key_on_delete: Cascade + pk: boolean + computed: boolean + nullable: boolean + options: string[] + unique: boolean + }[] +} + +export type IntrospectedEnum = { + schema_name: string + enum_type: string + values: string[] +} + +export type IntrospectedSchema = { + tables: IntrospectedTable[] + enums: IntrospectedEnum[] +} + +export interface IntrospectionProvider { + introspect(connectionString: string): Promise + getBuiltinType(type: string): { + type: BuiltinType | 'Unsupported' + isArray: boolean + } +} diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts new file mode 100644 index 000000000..61883ef90 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -0,0 +1,191 @@ +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider' + +// Note: We dynamically import better-sqlite3 inside the async function to avoid +// requiring it at module load time for environments that don't use SQLite. + +export const sqlite: IntrospectionProvider = { + getBuiltinType(type) { + const t = (type || '').toLowerCase().trim() + + // SQLite has no array types + const isArray = false + + switch (t) { + // integers + case 'int': + case 'integer': + case 'tinyint': + case 'smallint': + case 'mediumint': + return { type: 'Int', isArray } + case 'bigint': + return { type: 'BigInt', isArray } + + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray } + case 'real': + case 'double': + case 'double precision': + case 'float': + return { type: 'Float', isArray } + + // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray } + + // strings + case 'text': + case 'varchar': + case 'character varying': + case 'char': + case 'character': + case 'clob': + case 'uuid': // often stored as TEXT + return { type: 'String', isArray } + + // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) + case 'date': + case 'datetime': + return { type: 'DateTime', isArray } + + // binary + case 'blob': + return { type: 'Bytes', isArray } + + // json (not a native type, but commonly used) + case 'json': + return { type: 'Json', isArray } + + default: { + // Fallbacks based on SQLite type affinity rules + if (t.includes('int')) return { type: 'Int', isArray } + if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray } + if (t.includes('blob')) return { type: 'Bytes', isArray } + if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray } + if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray } + return { type: 'Unsupported' as const, isArray } + } + } + }, + + async introspect(connectionString: string): Promise { + const SQLite = (await import('better-sqlite3')).default + const db = new SQLite(connectionString, { readonly: true }) + + try { + const all = (sql: string): T[] => { + const stmt: any = db.prepare(sql) + return stmt.all() as T[] + } + + // List user tables and views (exclude internal sqlite_*) + const tablesRaw = all<{ name: string; type: 'table' | 'view' }>( + "SELECT name, type FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" + ) + + const tables: IntrospectedTable[] = [] + + for (const t of tablesRaw) { + const tableName = t.name + const schema = 'main' + + // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) + const columnsInfo = all<{ + cid: number + name: string + type: string + notnull: number + dflt_value: string | null + pk: number + hidden?: number + }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`) + + // Unique columns detection via unique indexes with single column + const uniqueIndexRows = all<{ name: string; unique: number }>( + `PRAGMA index_list('${tableName.replace(/'/g, "''")}')` + ).filter((r) => r.unique === 1) + + const uniqueSingleColumn = new Set() + for (const idx of uniqueIndexRows) { + const idxCols = all<{ name: string }>( + `PRAGMA index_info('${idx.name.replace(/'/g, "''")}')` + ) + if (idxCols.length === 1 && idxCols[0]?.name) { + uniqueSingleColumn.add(idxCols[0].name) + } + } + + // Foreign keys mapping by column name + const fkRows = all<{ + id: number + seq: number + table: string + from: string + to: string | null + on_update: any + on_delete: any + }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`) + + const fkByColumn = new Map< + string, + { + foreign_key_schema: string | null + foreign_key_table: string | null + foreign_key_column: string | null + foreign_key_name: string | null + foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update'] + foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete'] + } + >() + + for (const fk of fkRows) { + fkByColumn.set(fk.from, { + foreign_key_schema: 'main', + foreign_key_table: fk.table || null, + foreign_key_column: fk.to || null, + foreign_key_name: null, + foreign_key_on_update: (fk.on_update as any) ?? null, + foreign_key_on_delete: (fk.on_delete as any) ?? null, + }) + } + + const columns: IntrospectedTable['columns'] = [] + for (const c of columnsInfo) { + // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed + const hidden = c.hidden ?? 0 + if (hidden === 1) continue + + const fk = fkByColumn.get(c.name) + + columns.push({ + name: c.name, + datatype: c.type || '', + datatype_schema: schema, + foreign_key_schema: fk?.foreign_key_schema ?? null, + foreign_key_table: fk?.foreign_key_table ?? null, + foreign_key_column: fk?.foreign_key_column ?? null, + foreign_key_name: fk?.foreign_key_name ?? null, + foreign_key_on_update: fk?.foreign_key_on_update ?? null, + foreign_key_on_delete: fk?.foreign_key_on_delete ?? null, + pk: !!c.pk, + computed: hidden === 2, + nullable: c.notnull !== 1, + options: [], + unique: uniqueSingleColumn.has(c.name), + }) + } + + tables.push({ schema, name: tableName, columns, type: t.type }) + } + + const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums + + return { tables, enums } + } finally { + db.close() + } + }, +} diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts new file mode 100644 index 000000000..b611fbca6 --- /dev/null +++ b/packages/cli/src/actions/pull/utils.ts @@ -0,0 +1,67 @@ +import type { ZModelServices } from '@zenstackhq/language' +import { + DataField, + EnumField, + isInvocationExpr, + type AbstractDeclaration, + type Attribute, + type Model, +} from '@zenstackhq/language/ast' +import { getStringLiteral } from '@zenstackhq/language/utils' +import type { + DataSourceProviderType +} from '@zenstackhq/sdk/schema' +import type { Reference } from 'langium' + +export function getAttribute(model: Model, attrName: string) { + const references = model.$document! + .references as Reference[] + return references.find( + (a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName + )?.ref as Attribute | undefined +} + +export function getDatasource(model: Model) { + const datasource = model.declarations.find((d) => d.$type === 'DataSource') + if (!datasource) { + throw new Error('No datasource declaration found in the schema.') + } + + const urlField = datasource.fields.find((f) => f.name === 'url')! + let url = getStringLiteral(urlField.value) + + if (!url && isInvocationExpr(urlField.value)) { + url = process.env[getStringLiteral(urlField.value.args[0]) as string]! + } + + if (!url) { + throw new Error('The url field must be a string literal or an env().') + } + + return { + name: datasource.name, + provider: getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value + ) as DataSourceProviderType, + url, + } +} + +export function getDbName( + decl: AbstractDeclaration | DataField | EnumField +): string { + if (!('attributes' in decl)) return decl.name + const nameAttr = decl.attributes.find( + (a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map' + ) + if (!nameAttr) return decl.name + const attrValue = nameAttr.args[0]?.value + + if (attrValue?.$type !== 'StringLiteral') return decl.name + + return attrValue.value +} + +export function getAttributeRef(name: string, services: ZModelServices) { + return services.shared.workspace.IndexManager.allElements("Attribute").find(a => a.name === name) as Attribute | undefined +} \ No newline at end of file diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 9642e61d5..569e04202 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -33,7 +33,7 @@ export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], ): Promise< - { success: true; model: Model; warnings: string[] } | { success: false; errors: string[]; warnings: string[] } + { success: true; model: Model; warnings: string[], services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } > { const { ZModelLanguage: services } = createZModelServices(false); const extensions = services.LanguageMetaData.fileExtensions; @@ -143,6 +143,7 @@ export async function loadDocument( return { success: true, model: document.parseResult.value as Model, + services, warnings, }; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3f519e5c3..52c28cb6d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -6915,6 +6915,9 @@ packages: pg-connection-string@2.9.1: resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + pg-connection-string@2.9.1: + resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} + pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -6948,6 +6951,15 @@ packages: pg-native: optional: true + pg@8.16.3: + resolution: {integrity: sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==} + engines: {node: '>= 16.0.0'} + peerDependencies: + pg-native: '>=3.0.1' + peerDependenciesMeta: + pg-native: + optional: true + pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -15058,6 +15070,8 @@ snapshots: pg-connection-string@2.9.1: {} + pg-connection-string@2.9.1: {} + pg-int8@1.0.1: {} pg-numeric@1.0.2: {} @@ -15096,6 +15110,16 @@ snapshots: optionalDependencies: pg-cloudflare: 1.2.7 + pg@8.16.3: + dependencies: + pg-connection-string: 2.9.1 + pg-pool: 3.10.1(pg@8.16.3) + pg-protocol: 1.10.3 + pg-types: 2.2.0 + pgpass: 1.0.5 + optionalDependencies: + pg-cloudflare: 1.2.7 + pgpass@1.0.5: dependencies: split2: 4.2.0 From df3ac3c63645d218ef6df3e5cccdd4ea0f9290a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:49:11 +0200 Subject: [PATCH 02/68] fix: generate imports and attributes for zmodel-code-generator --- packages/language/src/zmodel-code-generator.ts | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 1e0366ede..21bb5cad1 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -103,10 +103,18 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} }`; } + @gen(ModelImport) + private _generateModelImport(ast: ModelImport) { + return `import '${ast.path}'`; + } + @gen(Enum) private _generateEnum(ast: Enum) { return `enum ${ast.name} { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ast.attributes.length > 0 + ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') + : '' + } }`; } From a22b07ab4b3a11c39c5f83174f654601737451a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:50:08 +0200 Subject: [PATCH 03/68] fix: add option to not exclude imports in loadDocument --- packages/language/src/document.ts | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 569e04202..2fdce233d 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -32,8 +32,10 @@ import type { ZModelFormatter } from './zmodel-formatter'; export async function loadDocument( fileName: string, additionalModelFiles: string[] = [], + keepImports: boolean = false, ): Promise< - { success: true; model: Model; warnings: string[], services: ZModelServices } | { success: false; errors: string[]; warnings: string[] } + | { success: true; model: Model; warnings: string[]; services: ZModelServices } + | { success: false; errors: string[]; warnings: string[] } > { const { ZModelLanguage: services } = createZModelServices(false); const extensions = services.LanguageMetaData.fileExtensions; @@ -121,14 +123,16 @@ export async function loadDocument( const model = document.parseResult.value as Model; - // merge all declarations into the main document - const imported = mergeImportsDeclarations(langiumDocuments, model); + if (keepImports === false) { + // merge all declarations into the main document + const imported = mergeImportsDeclarations(langiumDocuments, model); - // remove imported documents - imported.forEach((model) => { - langiumDocuments.deleteDocument(model.$document!.uri); - services.shared.workspace.IndexManager.remove(model.$document!.uri); - }); + // remove imported documents + imported.forEach((model) => { + langiumDocuments.deleteDocument(model.$document!.uri); + services.shared.workspace.IndexManager.remove(model.$document!.uri); + }); + } // extra validation after merging imported declarations const additionalErrors = validationAfterImportMerge(model); From 6cc49fe48b195df712d773ae82eb5585af519685 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:53:50 +0200 Subject: [PATCH 04/68] fix: continue work on db pull --- packages/cli/package.json | 1 + packages/cli/src/actions/action-utils.ts | 2 +- packages/cli/src/actions/db.ts | 24 ++-- packages/cli/src/actions/pull/index.ts | 132 +++++++++++---------- packages/cli/src/actions/pull/utils.ts | 33 +++++- packages/cli/src/index.ts | 8 ++ pnpm-lock.yaml | 141 +++++++++++++++++++---- 7 files changed, 241 insertions(+), 100 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 2196fa74d..ece5d544d 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -36,6 +36,7 @@ "./package.json": "./package.json" }, "dependencies": { + "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 78e4cb38b..033cbdd48 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -56,7 +56,7 @@ export async function loadSchemaDocument(schemaFile: string) { } export async function loadSchemaDocumentWithServices(schemaFile: string) { - const loadResult = await loadDocument(schemaFile); + const loadResult = await loadDocument(schemaFile, [], true); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6c39a3529..e79073e33 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,6 @@ import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; +import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; @@ -14,6 +15,7 @@ type PushOptions = { type PullOptions = { schema?: string; + out?: string; }; /** @@ -64,7 +66,7 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - + await import("@dotenvx/dotenvx/config") const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] const datasource = getDatasource(model) @@ -86,16 +88,16 @@ async function runPull(options: PullOptions) { const { enums, tables } = await provider.introspect(datasource.url) - syncEnums(enums, model) + syncEnums({ dbEnums: enums, model, services }) const resolveRelations: Relation[] = [] for (const table of tables) { - const relations = syncTable({ table, model, provider }) + const relations = syncTable({ table, model, provider, services }) resolveRelations.push(...relations) } - for (const rel of resolveRelations) { - syncRelation(model, rel, services); + for (const relation of resolveRelations) { + syncRelation({ model, relation, services }); } for (const d of model.declarations) { @@ -108,6 +110,14 @@ async function runPull(options: PullOptions) { model.declarations = model.declarations.filter((d) => d !== undefined) - const zmpdelSchema = await new ZModelCodeGenerator().generate(model) - fs.writeFileSync(schemaFile, zmpdelSchema) + const generator = await new ZModelCodeGenerator(); + + const zmodelSchema = await generator.generate(model) + + console.log(options.out ? `Writing to ${options.out}` : schemaFile); + + const outPath = options.out ? path.resolve(options.out) : schemaFile; + console.log(outPath); + + fs.writeFileSync(outPath, zmodelSchema) } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4651225e4..6a7e2ba23 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,5 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language' import type { + ArrayExpr, Attribute, AttributeArg, DataField, @@ -9,18 +10,20 @@ import type { Enum, EnumField, Model, + ReferenceExpr, + StringLiteral, UnsupportedFieldType } from '@zenstackhq/language/ast' +import { getStringLiteral } from '@zenstackhq/language/utils' import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName } from './utils' +import { getAttributeRef, getDbName, getEnumRef, getModelRef } from './utils' -export function syncEnums(dbEnums: IntrospectedEnum[], model: Model) { +export function syncEnums({ dbEnums, model, services }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { for (const dbEnum of dbEnums) { - let schemaEnum = model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === dbEnum.enum_type - ) as Enum | undefined + let schemaEnum = getEnumRef(dbEnum.enum_type, services); if (!schemaEnum) { + console.log(`Adding enum for type ${dbEnum.enum_type}`); schemaEnum = { $type: 'Enum' as const, $container: model, @@ -66,17 +69,29 @@ export function syncTable({ model, provider, table, + services }: { table: IntrospectedTable model: Model provider: IntrospectionProvider + services: ZModelServices }) { + const idAttribute = getAttributeRef('@id', services) + const uniqueAttribute = getAttributeRef('@unique', services) + const relationAttribute = getAttributeRef('@relation', services) + const fieldMapAttribute = getAttributeRef('@map', services) + const tableMapAttribute = getAttributeRef('@@map', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { + throw new Error('Cannot find required attributes in the model.') + } + const relations: Relation[] = [] - let modelTable = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === table.name - ) as DataModel | undefined + let modelTable = getModelRef(table.name, services) if (!modelTable) { + console.log(`Adding model for table ${table.name}`); + modelTable = { $type: 'DataModel' as const, $container: model, @@ -96,7 +111,7 @@ export function syncTable({ schema: table.schema, table: table.name, column: col.name, - type: col.unique ? 'one' : 'many', + type: 'one', fk_name: col.foreign_key_name!, nullable: col.nullable, references: { @@ -115,67 +130,54 @@ export function syncTable({ ) if (!existingField) { const builtinType = provider.getBuiltinType(col.datatype) - const unsupported: UnsupportedFieldType = { - get $container() { - return type - }, - $type: 'UnsupportedFieldType' as const, - value: { - get $container() { - return unsupported - }, - $type: 'StringLiteral', - value: col.datatype, - }, - } - - const type: DataFieldType = { - get $container() { - return field - }, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - unsupported: - builtinType.type === 'Unsupported' ? unsupported : undefined, - optional: col.nullable, - reference: col.options.length - ? { + const field: DataField = { + $type: 'DataField' as const, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + get unsupported() { + return builtinType.type === 'Unsupported' ? { + $container: this, + $type: 'UnsupportedFieldType' as const, + get value() { + return { + $container: this, + $type: 'StringLiteral', + value: col.datatype, + } satisfies StringLiteral + }, + } satisfies UnsupportedFieldType : undefined + }, + optional: col.nullable, + reference: col.options.length + ? { $refText: col.datatype, ref: model.declarations.find( (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } - - const field: DataField = { - $type: 'DataField' as const, - type, + ) as Enum | undefined, + } + : undefined, + } satisfies DataFieldType + }, $container: modelTable!, name: fieldName, get attributes() { if (fieldPrefix !== '') return [] - const attr: DataFieldAttribute = { + return [{ $type: 'DataFieldAttribute' as const, - get $container() { - return field - }, + $container: this, decl: { $refText: '@map', - ref: model.$document?.references.find( - (r) => - //@ts-ignore - r.ref.$type === 'Attribute' && r.ref.name === '@map' - )?.ref as Attribute, + ref: fieldMapAttribute, }, get args() { - const arg: AttributeArg = { + return [{ $type: 'AttributeArg' as const, - get $container() { - return attr - }, + $container: this, name: 'name', $resolvedParam: { name: 'name', @@ -183,17 +185,13 @@ export function syncTable({ get value() { return { $type: 'StringLiteral' as const, - $container: arg, + $container: this, value: col.name, } }, - } - - return [arg] + }] satisfies AttributeArg[] }, - } - - return [attr] + }] satisfies DataFieldAttribute[] }, comments: [], } @@ -205,10 +203,16 @@ export function syncTable({ return relations } -export function syncRelation(model: Model, relation: Relation, services: ZModelServices) { +export function syncRelation({ model, relation, services }: { model: Model, relation: Relation, services: ZModelServices }) { const idAttribute = getAttributeRef('@id', services) const uniqueAttribute = getAttributeRef('@unique', services) const relationAttribute = getAttributeRef('@relation', services) + const fieldMapAttribute = getAttributeRef('@map', services) + const tableMapAttribute = getAttributeRef('@@map', services) + + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { + throw new Error('Cannot find required attributes in the model.') + } if (!idAttribute || !uniqueAttribute || !relationAttribute) { throw new Error('Cannot find required attributes in the model.') diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index b611fbca6..defd0f307 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,11 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language' import { + AbstractDeclaration, DataField, + DataModel, + Enum, EnumField, isInvocationExpr, - type AbstractDeclaration, type Attribute, - type Model, + type Model } from '@zenstackhq/language/ast' import { getStringLiteral } from '@zenstackhq/language/utils' import type { @@ -28,10 +30,20 @@ export function getDatasource(model: Model) { } const urlField = datasource.fields.find((f) => f.name === 'url')! + let url = getStringLiteral(urlField.value) if (!url && isInvocationExpr(urlField.value)) { - url = process.env[getStringLiteral(urlField.value.args[0]) as string]! + const envName = getStringLiteral(urlField.value.args[0]?.value) + if (!envName) { + throw new Error('The url field must be a string literal or an env().') + } + if (!process.env[envName]) { + throw new Error( + `Environment variable ${envName} is not set, please set it to the database connection string.` + ) + } + url = process.env[envName] } if (!url) { @@ -62,6 +74,19 @@ export function getDbName( return attrValue.value } + +export function getDeclarationRef(type: T["$type"], name: string, services: ZModelServices) { + return services.shared.workspace.IndexManager.allElements(type).find((m) => m.node && getDbName(m.node as T) === name)?.node as T | undefined +} + +export function getEnumRef(name: string, services: ZModelServices) { + return getDeclarationRef('Enum', name, services); +} + +export function getModelRef(name: string, services: ZModelServices) { + return getDeclarationRef('DataModel', name, services); +} + export function getAttributeRef(name: string, services: ZModelServices) { - return services.shared.workspace.IndexManager.allElements("Attribute").find(a => a.name === name) as Attribute | undefined + return getDeclarationRef('Attribute', name, services); } \ No newline at end of file diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 4efc86fd9..2444b10b5 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -143,6 +143,14 @@ function createProgram() { .addOption(new Option('--force-reset', 'force a reset of the database before push')) .action((options) => dbAction('push', options)); + dbCommand + .command('pull') + .description('Introspect your database.') + .addOption(schemaOption) + .addOption(noVersionCheckOption) + .addOption(new Option('--out ', 'add custom output path for the introspected schema')) + .action((options) => dbAction('pull', options)); + dbCommand .command('seed') .description('Seed the database') diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 52c28cb6d..8abc943c3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -192,6 +192,9 @@ importers: packages/cli: dependencies: + '@dotenvx/dotenvx': + specifier: ^1.51.0 + version: 1.51.4 '@zenstackhq/common-helpers': specifier: workspace:* version: link:../common-helpers @@ -1558,12 +1561,22 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} + '@dotenvx/dotenvx@1.51.4': + resolution: {integrity: sha512-AoziS8lRQ3ew/lY5J4JSlzYSN9Fo0oiyMBY37L3Bwq4mOQJT5GSrdZYLFPt6pH1LApDI3ZJceNyx+rHRACZSeQ==} + hasBin: true + '@dxup/nuxt@0.2.2': resolution: {integrity: sha512-RNpJjDZs9+JcT9N87AnOuHsNM75DEd58itADNd/s1LIF6BZbTLZV0xxilJZb55lntn4TYvscTaXLCBX2fq9CXg==} '@dxup/unimport@0.1.2': resolution: {integrity: sha512-/B8YJGPzaYq1NbsQmwgP8EZqg40NpTw4ZB3suuI0TplbxKHeK94jeaawLmVhCv+YwUnOpiWEz9U6SeThku/8JQ==} + '@ecies/ciphers@0.2.5': + resolution: {integrity: sha512-GalEZH4JgOMHYYcYmVqnFirFsjZHeoGMDt9IxEnM9F7GRUUyUksJ7Ou53L83WHJq3RWKD3AcBpo0iQh0oMpf8A==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + peerDependencies: + '@noble/ciphers': ^1.0.0 + '@edge-runtime/primitives@6.0.0': resolution: {integrity: sha512-FqoxaBT+prPBHBwE1WXS1ocnu/VLTQyZ6NMUBAdbP7N2hsFTTxMC/jMu2D/8GAlMQfxeuppcPuCUk/HO3fpIvA==} engines: {node: '>=18'} @@ -2383,14 +2396,26 @@ packages: cpu: [x64] os: [win32] + '@noble/ciphers@1.3.0': + resolution: {integrity: sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw==} + engines: {node: ^14.21.3 || >=16} + '@noble/ciphers@2.0.1': resolution: {integrity: sha512-xHK3XHPUW8DTAobU+G0XT+/w+JLM7/8k1UFdB5xg/zTFPnFCobhftzw8wl4Lw2aq/Rvir5pxfZV5fEazmeCJ2g==} engines: {node: '>= 20.19.0'} + '@noble/curves@1.9.7': + resolution: {integrity: sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==} + engines: {node: ^14.21.3 || >=16} + '@noble/hashes@1.7.1': resolution: {integrity: sha512-B8XBPsn4vT/KJAGqDzbwztd+6Yte3P4V7iafm24bxgDe/mlRuK6xmWPuCNrKt2vDafZ8MfJLlchDG/vYafQEjQ==} engines: {node: ^14.21.3 || >=16} + '@noble/hashes@1.8.0': + resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} + engines: {node: ^14.21.3 || >=16} + '@noble/hashes@2.0.1': resolution: {integrity: sha512-XlOlEbQcE9fmuXxrVTXCTlG2nlRXa9Rj3rr5Ue/+tX+nmkgbX720YHh0VR3hBF9xDvwnb8D2shVGOwNx+ulArw==} engines: {node: '>= 20.19.0'} @@ -5087,6 +5112,10 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + eciesjs@0.4.16: + resolution: {integrity: sha512-dS5cbA9rA2VR4Ybuvhg6jvdmp46ubLn3E+px8cG/35aEDNclrqoCjg6mt0HYZ/M+OoESS3jSkCrqk1kWAEhWAw==} + engines: {bun: '>=1', deno: '>=2', node: '>=16'} + ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -5376,6 +5405,10 @@ packages: '@sinclair/typebox': optional: true + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + execa@8.0.1: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} @@ -5593,6 +5626,10 @@ packages: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + get-stream@8.0.1: resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} engines: {node: '>=16'} @@ -5756,6 +5793,10 @@ packages: httpxy@0.1.7: resolution: {integrity: sha512-pXNx8gnANKAndgga5ahefxc++tJvNL87CXoRwxn1cJE2ZkWEojF3tNfQIEhZX/vfpt+wzeAzpUI4qkediX1MLQ==} + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + human-signals@5.0.0: resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} engines: {node: '>=16.17.0'} @@ -6677,6 +6718,10 @@ packages: engines: {node: '>= 4'} hasBin: true + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + npm-run-path@5.3.0: resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -6718,6 +6763,10 @@ packages: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} engines: {node: '>= 0.4'} + object-treeify@1.1.33: + resolution: {integrity: sha512-EFVjAYfzWqWsBMRHPMAXLCDIJnpMhdWAqR7xG6M6a2cs6PMFpl/+Z20w9zDW4vkxOFfddegBKq9Rehd0bxWE7A==} + engines: {node: '>= 10'} + object.assign@4.1.7: resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} engines: {node: '>= 0.4'} @@ -6915,9 +6964,6 @@ packages: pg-connection-string@2.9.1: resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} - pg-connection-string@2.9.1: - resolution: {integrity: sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==} - pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} engines: {node: '>=4.0.0'} @@ -6951,15 +6997,6 @@ packages: pg-native: optional: true - pg@8.16.3: - resolution: {integrity: sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==} - engines: {node: '>= 16.0.0'} - peerDependencies: - pg-native: '>=3.0.1' - peerDependenciesMeta: - pg-native: - optional: true - pgpass@1.0.5: resolution: {integrity: sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==} @@ -7886,6 +7923,10 @@ packages: resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} engines: {node: '>=4'} + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + strip-final-newline@3.0.0: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} @@ -8725,6 +8766,11 @@ packages: engines: {node: '>= 8'} hasBin: true + which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + which@5.0.0: resolution: {integrity: sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==} engines: {node: ^18.17.0 || >=20.5.0} @@ -9352,6 +9398,18 @@ snapshots: '@csstools/css-tokenizer@3.0.4': optional: true + '@dotenvx/dotenvx@1.51.4': + dependencies: + commander: 11.1.0 + dotenv: 17.2.3 + eciesjs: 0.4.16 + execa: 5.1.1 + fdir: 6.5.0(picomatch@4.0.3) + ignore: 5.3.2 + object-treeify: 1.1.33 + picomatch: 4.0.3 + which: 4.0.0 + '@dxup/nuxt@0.2.2(magicast@0.5.1)': dependencies: '@dxup/unimport': 0.1.2 @@ -9364,6 +9422,10 @@ snapshots: '@dxup/unimport@0.1.2': {} + '@ecies/ciphers@0.2.5(@noble/ciphers@1.3.0)': + dependencies: + '@noble/ciphers': 1.3.0 + '@edge-runtime/primitives@6.0.0': {} '@edge-runtime/vm@5.0.0': @@ -9935,10 +9997,18 @@ snapshots: '@next/swc-win32-x64-msvc@16.0.10': optional: true + '@noble/ciphers@1.3.0': {} + '@noble/ciphers@2.0.1': {} + '@noble/curves@1.9.7': + dependencies: + '@noble/hashes': 1.8.0 + '@noble/hashes@1.7.1': {} + '@noble/hashes@1.8.0': {} + '@noble/hashes@2.0.1': {} '@nodelib/fs.scandir@2.1.5': @@ -12732,6 +12802,13 @@ snapshots: eastasianwidth@0.2.0: {} + eciesjs@0.4.16: + dependencies: + '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) + '@noble/ciphers': 1.3.0 + '@noble/curves': 1.9.7 + '@noble/hashes': 1.8.0 + ee-first@1.1.1: {} effect@3.18.4: @@ -13218,6 +13295,18 @@ snapshots: optionalDependencies: '@sinclair/typebox': 0.34.41 + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + execa@8.0.1: dependencies: cross-spawn: 7.0.6 @@ -13511,6 +13600,8 @@ snapshots: dunder-proto: 1.0.1 es-object-atoms: 1.1.1 + get-stream@6.0.1: {} + get-stream@8.0.1: {} get-stream@9.0.1: @@ -13701,6 +13792,8 @@ snapshots: httpxy@0.1.7: {} + human-signals@2.1.0: {} + human-signals@5.0.0: {} human-signals@8.0.1: {} @@ -14665,6 +14758,10 @@ snapshots: shell-quote: 1.8.3 string.prototype.padend: 3.1.6 + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + npm-run-path@5.3.0: dependencies: path-key: 4.0.0 @@ -14815,6 +14912,8 @@ snapshots: object-keys@1.1.1: {} + object-treeify@1.1.33: {} + object.assign@4.1.7: dependencies: call-bind: 1.0.8 @@ -15070,8 +15169,6 @@ snapshots: pg-connection-string@2.9.1: {} - pg-connection-string@2.9.1: {} - pg-int8@1.0.1: {} pg-numeric@1.0.2: {} @@ -15110,16 +15207,6 @@ snapshots: optionalDependencies: pg-cloudflare: 1.2.7 - pg@8.16.3: - dependencies: - pg-connection-string: 2.9.1 - pg-pool: 3.10.1(pg@8.16.3) - pg-protocol: 1.10.3 - pg-types: 2.2.0 - pgpass: 1.0.5 - optionalDependencies: - pg-cloudflare: 1.2.7 - pgpass@1.0.5: dependencies: split2: 4.2.0 @@ -16087,6 +16174,8 @@ snapshots: strip-bom@3.0.0: {} + strip-final-newline@2.0.0: {} + strip-final-newline@3.0.0: {} strip-final-newline@4.0.0: {} @@ -17070,6 +17159,10 @@ snapshots: dependencies: isexe: 2.0.0 + which@4.0.0: + dependencies: + isexe: 3.1.1 + which@5.0.0: dependencies: isexe: 3.1.1 From 1766e6e570698230d5323c9ae44a16ec95717f40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 25 Sep 2025 00:59:00 +0200 Subject: [PATCH 05/68] fix: missing import --- packages/language/src/zmodel-code-generator.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 21bb5cad1..5730fc5b7 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -28,6 +28,7 @@ import { LiteralExpr, MemberAccessExpr, Model, + ModelImport, NullExpr, NumberLiteral, ObjectExpr, From 649cc86b24808cc1e9ea2fd0463892c6f0f07e58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 26 Sep 2025 02:57:44 +0200 Subject: [PATCH 06/68] fix: rewrite model generation generate model from ground up and diff later --- packages/cli/src/actions/db.ts | 39 +- packages/cli/src/actions/pull/index.ts | 423 ++++++++++++++---- .../src/actions/pull/provider/postgresql.ts | 295 ++++++++---- .../cli/src/actions/pull/provider/provider.ts | 9 +- .../cli/src/actions/pull/provider/sqlite.ts | 11 +- 5 files changed, 570 insertions(+), 207 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e79073e33..61e05956d 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,3 +1,4 @@ +import type { Model } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -5,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName } from './pull/utils'; +import { getDatasource } from './pull/utils'; type PushOptions = { schema?: string; @@ -88,31 +89,35 @@ async function runPull(options: PullOptions) { const { enums, tables } = await provider.introspect(datasource.url) - syncEnums({ dbEnums: enums, model, services }) + const newModel: Model = { + $type: 'Model', + $container: undefined, + $containerProperty: undefined, + $containerIndex: undefined, + declarations: [...model.declarations.filter(d => ["DataSource"].includes(d.$type))], + imports: [], + }; + + + syncEnums({ dbEnums: enums, model: newModel, services }) - const resolveRelations: Relation[] = [] - for (const table of tables) { - const relations = syncTable({ table, model, provider, services }) - resolveRelations.push(...relations) - } - for (const relation of resolveRelations) { - syncRelation({ model, relation, services }); + + const resolvedRelations: Relation[] = [] + for (const table of tables) { + const relations = syncTable({ table, model: newModel, provider, services }) + resolvedRelations.push(...relations) } - for (const d of model.declarations) { - if (d.$type !== 'DataModel') continue - const found = tables.find((t) => getDbName(d) === t.name) - if (!found) { - delete (d.$container as any)[d.$containerProperty!][d.$containerIndex!] - } + for (const relation of resolvedRelations) { + syncRelation({ model: newModel, relation, services }); } - model.declarations = model.declarations.filter((d) => d !== undefined) + //TODO: diff models and apply changes only const generator = await new ZModelCodeGenerator(); - const zmodelSchema = await generator.generate(model) + const zmodelSchema = await generator.generate(newModel) console.log(options.out ? `Writing to ${options.out}` : schemaFile); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 6a7e2ba23..75225c956 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,7 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language' import type { ArrayExpr, - Attribute, AttributeArg, DataField, DataFieldAttribute, @@ -14,40 +13,28 @@ import type { StringLiteral, UnsupportedFieldType } from '@zenstackhq/language/ast' -import { getStringLiteral } from '@zenstackhq/language/utils' import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName, getEnumRef, getModelRef } from './utils' +import { getAttributeRef, getDbName } from './utils' -export function syncEnums({ dbEnums, model, services }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { +export function syncEnums({ dbEnums, model }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { for (const dbEnum of dbEnums) { - let schemaEnum = getEnumRef(dbEnum.enum_type, services); - - if (!schemaEnum) { - console.log(`Adding enum for type ${dbEnum.enum_type}`); - schemaEnum = { - $type: 'Enum' as const, - $container: model, - name: dbEnum.enum_type, - attributes: [], - comments: [], - fields: [], - } - model.declarations.push(schemaEnum) - } - schemaEnum.fields = dbEnum.values.map((v) => { - const existingValue = schemaEnum.fields.find((f) => getDbName(f) === v) - if (!existingValue) { - const enumField: EnumField = { + const schemaEnum = { + $type: 'Enum' as const, + $container: model, + name: dbEnum.enum_type, + attributes: [], + comments: [], + get fields() { + return dbEnum.values.map((v): EnumField => ({ $type: 'EnumField' as const, $container: schemaEnum, name: v, attributes: [], comments: [], - } - return enumField + })); } - return existingValue - }) + } + model.declarations.push(schemaEnum) } } @@ -62,6 +49,7 @@ export type Relation = { schema: string | null table: string | null column: string | null + type: 'one' | 'many' } } @@ -78,6 +66,7 @@ export function syncTable({ }) { const idAttribute = getAttributeRef('@id', services) const uniqueAttribute = getAttributeRef('@unique', services) + const modelUniqueAttribute = getAttributeRef('@@unique', services) const relationAttribute = getAttributeRef('@relation', services) const fieldMapAttribute = getAttributeRef('@map', services) const tableMapAttribute = getAttributeRef('@@map', services) @@ -87,25 +76,21 @@ export function syncTable({ } const relations: Relation[] = [] - let modelTable = getModelRef(table.name, services) - - if (!modelTable) { - console.log(`Adding model for table ${table.name}`); - - modelTable = { - $type: 'DataModel' as const, - $container: model, - name: table.name, - fields: [], - attributes: [], - comments: [], - isView: false, - mixins: [], - } - model.declarations.push(modelTable) + const modelTable: DataModel = { + $type: 'DataModel' as const, + $container: model, + name: table.name, + fields: [], + attributes: [], + comments: [], + isView: false, + mixins: [], } + model.declarations.push(modelTable) modelTable.fields = table.columns.map((col) => { + if (col.default) console.log(`${table.name}.${col.name} -> ${col.default}`); + if (col.foreign_key_table) { relations.push({ schema: table.schema, @@ -118,6 +103,7 @@ export function syncTable({ schema: col.foreign_key_schema, table: col.foreign_key_table, column: col.foreign_key_column, + type: col.unique ? 'one' : 'many', }, }) } @@ -125,49 +111,100 @@ export function syncTable({ const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' const fieldName = `${fieldPrefix}${col.name}` - const existingField = modelTable!.fields.find( - (f) => getDbName(f) === fieldName - ) - if (!existingField) { - const builtinType = provider.getBuiltinType(col.datatype) - const field: DataField = { - $type: 'DataField' as const, - get type() { - return { + const builtinType = provider.getBuiltinType(col.datatype) + const field: DataField = { + $type: 'DataField' as const, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, + array: builtinType.isArray, + get unsupported() { + return builtinType.type === 'Unsupported' ? { + $container: this, + $type: 'UnsupportedFieldType' as const, + get value() { + return { + $container: this, + $type: 'StringLiteral', + value: col.datatype, + } satisfies StringLiteral + }, + } satisfies UnsupportedFieldType : undefined + }, + optional: col.nullable, + reference: col.options.length + ? { + $refText: col.datatype, + ref: model.declarations.find( + (d) => d.$type === 'Enum' && getDbName(d) === col.datatype + ) as Enum | undefined, + } + : undefined, + } satisfies DataFieldType + }, + $container: modelTable!, + name: fieldName, + get attributes() { + if (fieldPrefix !== '') return [] + + const getDefaultAttrs = () => { + if (!col.default) return []; + + const defaultValue = col.default && provider.getDefaultValue({ + fieldName: col.name, + defaultValue: col.default, + container: this, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }) + + if (!defaultValue) return []; + + if (Array.isArray(defaultValue)) { + return defaultValue; + } + + if (defaultValue?.$type === 'DataFieldAttribute') { + return [defaultValue]; + } + + return [{ + $type: 'DataFieldAttribute' as const, $container: this, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - get unsupported() { - return builtinType.type === 'Unsupported' ? { + decl: { + $refText: 'default', + ref: getAttributeRef('@default', services) + }, + get args() { + return [{ + $type: 'AttributeArg' as const, $container: this, - $type: 'UnsupportedFieldType' as const, + name: '', + $resolvedParam: { + name: '', + }, get value() { - return { - $container: this, - $type: 'StringLiteral', - value: col.datatype, - } satisfies StringLiteral + return { ...defaultValue, $container: this } }, - } satisfies UnsupportedFieldType : undefined + }] satisfies AttributeArg[] }, - optional: col.nullable, - reference: col.options.length - ? { - $refText: col.datatype, - ref: model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } satisfies DataFieldType - }, - $container: modelTable!, - name: fieldName, - get attributes() { - if (fieldPrefix !== '') return [] + } satisfies DataFieldAttribute]; + } - return [{ + return [ + ...(col.pk ? [{ + $type: 'DataFieldAttribute' as const, + $container: this, + args: [], + decl: { + $refText: '@id', + ref: idAttribute, + }, + }] : []) satisfies DataFieldAttribute[], + ...getDefaultAttrs(), + { $type: 'DataFieldAttribute' as const, $container: this, decl: { @@ -178,9 +215,9 @@ export function syncTable({ return [{ $type: 'AttributeArg' as const, $container: this, - name: 'name', + name: '', $resolvedParam: { - name: 'name', + name: '', }, get value() { return { @@ -189,17 +226,58 @@ export function syncTable({ value: col.name, } }, - }] satisfies AttributeArg[] + } + ] satisfies AttributeArg[] }, - }] satisfies DataFieldAttribute[] - }, - comments: [], - } - return field + } + ] satisfies DataFieldAttribute[] + }, + comments: [], } - return existingField + return field }) + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name) + if (uniqieColumns.length > 0) { + modelTable.attributes.push({ + $type: 'DataModelAttribute' as const, + $container: modelTable, + decl: { + $refText: '@unique', + ref: modelUniqueAttribute, + }, + get args() { + return uniqieColumns.map((c) => ({ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + $refText: c, + ref: modelTable.fields.find((f) => f.name === c), + }, + args: [], + }] satisfies ReferenceExpr[] + } + } as ArrayExpr + }, + })) satisfies AttributeArg[] + }, + }) + + return relations + } + return relations } @@ -214,10 +292,6 @@ export function syncRelation({ model, relation, services }: { model: Model, rela throw new Error('Cannot find required attributes in the model.') } - if (!idAttribute || !uniqueAttribute || !relationAttribute) { - throw new Error('Cannot find required attributes in the model.') - } - const sourceModel = model.declarations.find( (d) => d.$type === 'DataModel' && getDbName(d) === relation.table ) as DataModel | undefined @@ -239,4 +313,169 @@ export function syncRelation({ model, relation, services }: { model: Model, rela if (!targetField) return //TODO: Finish relation sync + + const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : '' + + sourceModel.fields.push({ + $type: 'DataField' as const, + $container: sourceModel, + name: `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, + comments: [], + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + reference: { + ref: targetModel, + $refText: targetModel.name, + }, + optional: relation.nullable, + //TODO + array: relation.type === 'many', + } satisfies DataFieldType + }, + get attributes() { + return [{ + $type: 'DataFieldAttribute' as const, + $container: this, + decl: { + $refText: '@relation', + ref: relationAttribute, + }, + get args() { + return [{ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }, + { + $type: 'AttributeArg' as const, + $container: this, + name: 'fields', + $resolvedParam: { + name: 'fields', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + ref: sourceField, + $refText: sourceField.name, + }, + args: [], + }] satisfies ReferenceExpr[] + }, + } satisfies ArrayExpr + }, + }, { + $type: 'AttributeArg' as const, + $container: this, + name: 'references', + $resolvedParam: { + name: 'references', + }, + get value() { + return { + $type: 'ArrayExpr' as const, + $container: this, + get items() { + return [{ + $container: this, + $type: 'ReferenceExpr' as const, + target: { + ref: targetField, + $refText: targetField.name, + }, + args: [], + }] satisfies ReferenceExpr[] + }, + } satisfies ArrayExpr + }, + }, { + $type: 'AttributeArg' as const, + $container: this, + name: 'map', + $resolvedParam: { + name: 'map', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }] satisfies AttributeArg[] + }, + }] satisfies DataFieldAttribute[] + }, + }) + + const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : '' + const oppositeFieldName = relation.type === 'one' + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` + : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + + targetModel.fields.push({ + $type: 'DataField' as const, + $container: targetModel, + name: oppositeFieldName, + get type() { + return { + $container: this, + $type: 'DataFieldType' as const, + reference: { + ref: sourceModel, + $refText: sourceModel.name, + }, + optional: relation.references.type === 'one' && relation.nullable, + array: relation.references.type === 'many', + } satisfies DataFieldType + }, + get attributes() { + return [ + { + $type: 'DataFieldAttribute' as const, + $container: this, + decl: { + $refText: '@relation', + ref: relationAttribute, + }, + get args() { + return [{ + $type: 'AttributeArg' as const, + $container: this, + name: '', + $resolvedParam: { + name: '', + }, + get value() { + return { + $type: 'StringLiteral' as const, + $container: this, + value: relation.fk_name, + } satisfies StringLiteral + }, + }] satisfies AttributeArg[] + } + } + ] satisfies DataFieldAttribute[] + }, + comments: [], + }) } \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 10a9642a3..be882be6e 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,4 +1,6 @@ +import { AttributeArg, DataFieldAttribute, Expression, FunctionDecl, InvocationExpr } from '@zenstackhq/language/ast' import { Client } from 'pg' +import { getAttributeRef, getDbName } from '../utils' import type { IntrospectedEnum, IntrospectedSchema, @@ -126,6 +128,114 @@ export const postgresql: IntrospectionProvider = { tables, } }, + getDefaultValue({ defaultValue, container: $container, fieldName, services, enums }) { + // Handle common cases + console.log(defaultValue); + + const val = defaultValue.trim() + + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + const attrs: DataFieldAttribute[] = []; + + attrs.push({ + $type: "DataFieldAttribute" as const, + $container: $container as any, + decl: { + $refText: '@default', + ref: getAttributeRef('@default', services) + }, + get args(): AttributeArg[] { + return [{ + $type: 'AttributeArg' as const, + $container: this as any, + get value(): Expression { + return { + $type: 'InvocationExpr' as const, + $container: this, + function: { + $refText: 'now', + ref: services.shared.workspace.IndexManager.allElements(FunctionDecl).find((f) => (f.node as FunctionDecl)?.name === 'now')?.node as FunctionDecl + }, + args: [], + } satisfies InvocationExpr + } + }] + } + }); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + // for updatedAt, use @updatedAt attribute + attrs.push({ + $type: "DataFieldAttribute" as const, + $container: $container as any, + decl: { + $refText: 'updatedAt', + ref: getAttributeRef('@updatedAt', services) + }, + args: [], + }); + } + + return attrs.length === 1 ? attrs[0] : attrs; + } + + if (val.includes('::')) { + const [enumValue, enumName] = val.replace(/'|"/g, '').split('::').map((s) => s.trim()) as [string, string] + const enumDef = enums.find((e) => getDbName(e) === enumName) + if (!enumDef) { + throw new Error(`Enum type ${enumName} not found for default value ${defaultValue}`) + } + const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue) + if (!enumField) { + throw new Error(`Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`) + } + + return { + $type: 'ReferenceExpr' as const, + $container: $container as any, + target: { + $refText: enumField!.name, + ref: enumField, + }, + args: [], + } + } + + if (val === 'true' || val === 'false') { + return { + $type: 'BooleanLiteral' as const, + $container: $container as any, + value: val === 'true', + } + } + + if (/^\d+$/.test(val)) { + return { + $container: $container as any, + $type: 'NumberLiteral' as const, + value: val, + } + } + + if (/^-?\d+(\.\d+)?$/.test(val)) { + // float + return { + $container: $container as any, + $type: 'NumberLiteral' as const, + value: val, + } + } + + if (val.startsWith("'") && val.endsWith("'")) { + // string + return { + $container: $container as any, + $type: 'StringLiteral' as const, + value: val.slice(1, -1).replace(/''/g, "'"), + } + } + return undefined + }, } const enumIntrospectionQuery = ` @@ -141,102 +251,101 @@ ORDER BY schema_name, enum_type;` const tableIntrospectionQuery = ` SELECT -"ns"."nspname" AS "schema", -"cls"."relname" AS "name", -CASE "cls"."relkind" - WHEN 'r' THEN 'table' - WHEN 'v' THEN 'view' - ELSE NULL -END AS "type", -( -SELECT -coalesce(json_agg(agg), '[]') -FROM -( - SELECT - "att"."attname" AS "name", - "typ"."typname" AS "datatype", - "tns"."nspname" AS "datatype_schema", - "fk_ns"."nspname" AS "foreign_key_schema", - "fk_cls"."relname" AS "foreign_key_table", - "fk_att"."attname" AS "foreign_key_column", - "fk_con"."conname" AS "foreign_key_name", - CASE "fk_con"."confupdtype" - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'c' THEN 'CASCADE' - WHEN 'n' THEN 'SET NULL' - WHEN 'd' THEN 'SET DEFAULT' + "ns"."nspname" AS "schema", + "cls"."relname" AS "name", + CASE "cls"."relkind" + WHEN 'r' THEN 'table' + WHEN 'v' THEN 'view' ELSE NULL - END AS "foreign_key_on_update", - CASE "fk_con"."confdeltype" - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'c' THEN 'CASCADE' - WHEN 'n' THEN 'SET NULL' - WHEN 'd' THEN 'SET DEFAULT' + END AS "type", + CASE + WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true) ELSE NULL - END AS "foreign_key_on_delete", - "pk_con"."conkey" IS NOT NULL AS "pk", - ( - EXISTS ( - SELECT 1 - FROM "pg_catalog"."pg_constraint" AS "u_con" - WHERE "u_con"."contype" = 'u' - AND "u_con"."conrelid" = "cls"."oid" - AND array_length("u_con"."conkey", 1) = 1 - AND "att"."attnum" = ANY ("u_con"."conkey") - ) - OR EXISTS ( - SELECT 1 - FROM "pg_catalog"."pg_index" AS "u_idx" - WHERE "u_idx"."indrelid" = "cls"."oid" - AND "u_idx"."indisunique" = TRUE - AND "u_idx"."indnkeyatts" = 1 - AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) - ) - ) AS "unique", - "att"."attgenerated" != '' AS "computed", - "att"."attnotnull" != TRUE AS "nullable", - coalesce( + END AS "definition", ( - SELECT - json_agg("enm"."enumlabel") AS "o" - FROM - "pg_catalog"."pg_enum" AS "enm" - WHERE - "enm"."enumtypid" = "typ"."oid" - ), - '[]' - ) AS "options" - FROM - "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' - AND "pk_con"."conrelid" = "cls"."oid" - AND "att"."attnum" = ANY ("pk_con"."conkey") - LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' - AND "fk_con"."conrelid" = "cls"."oid" - AND "att"."attnum" = ANY ("fk_con"."conkey") - LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" - LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" - LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" - AND "fk_att"."attnum" = ANY ("fk_con"."confkey") - WHERE - "att"."attrelid" = "cls"."oid" - AND "att"."attnum" >= 0 - AND "att"."attisdropped" != TRUE - ORDER BY "att"."attnum" -) AS agg -) AS "columns" -FROM -"pg_catalog"."pg_class" AS "cls" + SELECT coalesce(json_agg(agg), '[]') + FROM ( + SELECT + "att"."attname" AS "name", + "typ"."typname" AS "datatype", + "tns"."nspname" AS "datatype_schema", + "fk_ns"."nspname" AS "foreign_key_schema", + "fk_cls"."relname" AS "foreign_key_table", + "fk_att"."attname" AS "foreign_key_column", + "fk_con"."conname" AS "foreign_key_name", + CASE "fk_con"."confupdtype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_update", + CASE "fk_con"."confdeltype" + WHEN 'a' THEN 'NO ACTION' + WHEN 'r' THEN 'RESTRICT' + WHEN 'c' THEN 'CASCADE' + WHEN 'n' THEN 'SET NULL' + WHEN 'd' THEN 'SET DEFAULT' + ELSE NULL + END AS "foreign_key_on_delete", + "pk_con"."conkey" IS NOT NULL AS "pk", + ( + EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + ) + OR EXISTS ( + SELECT 1 + FROM "pg_catalog"."pg_index" AS "u_idx" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + ) + ) AS "unique", + "att"."attgenerated" != '' AS "computed", + pg_get_expr("def"."adbin", "def"."adrelid") AS "default", + "att"."attnotnull" != TRUE AS "nullable", + coalesce( + ( + SELECT json_agg("enm"."enumlabel") AS "o" + FROM "pg_catalog"."pg_enum" AS "enm" + WHERE "enm"."enumtypid" = "typ"."oid" + ), + '[]' + ) AS "options" + FROM "pg_catalog"."pg_attribute" AS "att" + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("pk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' + AND "fk_con"."conrelid" = "cls"."oid" + AND "att"."attnum" = ANY ("fk_con"."conkey") + LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid" + LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace" + LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid" + AND "fk_att"."attnum" = ANY ("fk_con"."confkey") + LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum" + WHERE + "att"."attrelid" = "cls"."oid" + AND "att"."attnum" >= 0 + AND "att"."attisdropped" != TRUE + ORDER BY "att"."attnum" + ) AS agg + ) AS "columns" +FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE -"ns"."nspname" !~ '^pg_' -AND "ns"."nspname" != 'information_schema' -AND "cls"."relkind" IN ('r', 'v') -AND "cls"."relname" !~ '^pg_' -AND "cls"."relname" !~ '_prisma_migrations' + "ns"."nspname" !~ '^pg_' + AND "ns"."nspname" != 'information_schema' + AND "cls"."relkind" IN ('r', 'v') + AND "cls"."relname" !~ '^pg_' + AND "cls"."relname" !~ '_prisma_migrations' ` diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index d8bd09288..b6f76b98e 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,11 +1,14 @@ -import type { BuiltinType } from '@zenstackhq/language/ast' +import type { BuiltinType, DataFieldAttribute, Enum, InvocationExpr, LiteralExpr, ReferenceExpr } from '@zenstackhq/language/ast' +import type { AstNode } from '../../../../../language/dist/ast.cjs'; +import type { ZModelServices } from '@zenstackhq/language'; -export type Cascade = "NO ACTION" | "RESTRICT"| "CASCADE" | "SET NULL" | "SET DEFAULT" | null; +export type Cascade = "NO ACTION" | "RESTRICT" | "CASCADE" | "SET NULL" | "SET DEFAULT" | null; export interface IntrospectedTable { schema: string name: string type: 'table' | 'view' + definition: string | null columns: { name: string datatype: string @@ -21,6 +24,7 @@ export interface IntrospectedTable { nullable: boolean options: string[] unique: boolean + default: string | null }[] } @@ -41,4 +45,5 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported' isArray: boolean } + getDefaultValue(args: { fieldName: string, defaultValue: string, container: T, services: ZModelServices, enums: Enum[] }): LiteralExpr | InvocationExpr | DataFieldAttribute | DataFieldAttribute[] | ReferenceExpr | undefined } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 61883ef90..3feaa5abc 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -82,8 +82,8 @@ export const sqlite: IntrospectionProvider = { } // List user tables and views (exclude internal sqlite_*) - const tablesRaw = all<{ name: string; type: 'table' | 'view' }>( - "SELECT name, type FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" + const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( + "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" ) const tables: IntrospectedTable[] = [] @@ -173,12 +173,13 @@ export const sqlite: IntrospectionProvider = { pk: !!c.pk, computed: hidden === 2, nullable: c.notnull !== 1, + default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), }) } - tables.push({ schema, name: tableName, columns, type: t.type }) + tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition }) } const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums @@ -188,4 +189,8 @@ export const sqlite: IntrospectionProvider = { db.close() } }, + + getDefaultValue(_args) { + throw new Error('Not implemented yet for SQLite') + } } From 0f9b24f9b6f8bacb4ba6fee5ef9c48e3c0ee9175 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 00:56:27 +0200 Subject: [PATCH 07/68] feat: add ast factory --- packages/cli/src/actions/db.ts | 42 +- packages/cli/src/actions/pull/index.ts | 754 ++++++++---------- .../cli/src/actions/pull/provider/index.ts | 10 +- .../src/actions/pull/provider/postgresql.ts | 428 +++++----- .../cli/src/actions/pull/provider/provider.ts | 96 ++- .../cli/src/actions/pull/provider/sqlite.ts | 173 ++-- packages/cli/src/actions/pull/utils.ts | 133 +-- packages/language/package.json | 10 + packages/language/src/factory/attribute.ts | 275 +++++++ packages/language/src/factory/declaration.ts | 363 +++++++++ packages/language/src/factory/expression.ts | 303 +++++++ packages/language/src/factory/index.ts | 61 ++ packages/language/src/factory/primitives.ts | 61 ++ packages/language/tsup.config.ts | 1 + 14 files changed, 1858 insertions(+), 852 deletions(-) create mode 100644 packages/language/src/factory/attribute.ts create mode 100644 packages/language/src/factory/declaration.ts create mode 100644 packages/language/src/factory/expression.ts create mode 100644 packages/language/src/factory/index.ts create mode 100644 packages/language/src/factory/primitives.ts diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 61e05956d..8dea5cd90 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -7,6 +7,7 @@ import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, require import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource } from './pull/utils'; +import { config } from '@dotenvx/dotenvx'; type PushOptions = { schema?: string; @@ -14,9 +15,11 @@ type PushOptions = { forceReset?: boolean; }; -type PullOptions = { +export type PullOptions = { schema?: string; out?: string; + naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + alwaysMap?: boolean; }; /** @@ -67,62 +70,57 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - await import("@dotenvx/dotenvx/config") - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'] - const datasource = getDatasource(model) + config(); + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const datasource = getDatasource(model); if (!datasource) { - throw new Error('No datasource found in the schema.') + throw new Error('No datasource found in the schema.'); } if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`) + throw new Error(`Unsupported datasource provider: ${datasource.provider}`); } const provider = providers[datasource.provider]; if (!provider) { - throw new Error( - `No introspection provider found for: ${datasource.provider}` - ) + throw new Error(`No introspection provider found for: ${datasource.provider}`); } - const { enums, tables } = await provider.introspect(datasource.url) + const { enums, tables } = await provider.introspect(datasource.url); const newModel: Model = { $type: 'Model', $container: undefined, $containerProperty: undefined, $containerIndex: undefined, - declarations: [...model.declarations.filter(d => ["DataSource"].includes(d.$type))], + declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], imports: [], }; + syncEnums({ dbEnums: enums, model: newModel, services, options }); - syncEnums({ dbEnums: enums, model: newModel, services }) - - - - const resolvedRelations: Relation[] = [] + const resolvedRelations: Relation[] = []; for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services }) - resolvedRelations.push(...relations) + const relations = syncTable({ table, model: newModel, provider, services, options }); + resolvedRelations.push(...relations); } for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services }); + syncRelation({ model: newModel, relation, services, options }); } //TODO: diff models and apply changes only - const generator = await new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator(); - const zmodelSchema = await generator.generate(newModel) + const zmodelSchema = generator.generate(newModel); console.log(options.out ? `Writing to ${options.out}` : schemaFile); const outPath = options.out ? path.resolve(options.out) : schemaFile; console.log(outPath); - fs.writeFileSync(outPath, zmodelSchema) + fs.writeFileSync(outPath, zmodelSchema); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 75225c956..708244a35 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,481 +1,371 @@ -import type { ZModelServices } from '@zenstackhq/language' -import type { - ArrayExpr, - AttributeArg, - DataField, - DataFieldAttribute, - DataFieldType, - DataModel, - Enum, - EnumField, - Model, - ReferenceExpr, - StringLiteral, - UnsupportedFieldType -} from '@zenstackhq/language/ast' -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider' -import { getAttributeRef, getDbName } from './utils' - -export function syncEnums({ dbEnums, model }: { dbEnums: IntrospectedEnum[], model: Model, services: ZModelServices }) { +import type { ZModelServices } from '@zenstackhq/language'; +import { isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; +import { DataFieldFactory, DataModelFactory, EnumFactory } from '@zenstackhq/language/factory'; +import type { PullOptions } from '../db'; +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; +import { getAttributeRef, getDbName } from './utils'; + +export function syncEnums({ + dbEnums, + model, + options: options, + services, +}: { + dbEnums: IntrospectedEnum[]; + model: Model; + services: ZModelServices; + options: PullOptions; +}) { for (const dbEnum of dbEnums) { - const schemaEnum = { - $type: 'Enum' as const, - $container: model, - name: dbEnum.enum_type, - attributes: [], - comments: [], - get fields() { - return dbEnum.values.map((v): EnumField => ({ - $type: 'EnumField' as const, - $container: schemaEnum, - name: v, - attributes: [], - comments: [], - })); - } - } - model.declarations.push(schemaEnum) + const { modified, name } = resolveNameCasing(options, dbEnum.enum_type); + if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + const factory = new EnumFactory().setName(name); + if (modified) + factory.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@@map', services)!) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), + ); + + dbEnum.values.map((v) => { + const { name, modified } = resolveNameCasing(options, v); + factory.addField((builder) => { + builder.setName(name); + if (modified) + builder.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@map', services)!) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), + ); + + return builder; + }); + }); + model.declarations.push(factory.get({ $container: model })); } } -export type Relation = { - schema: string - table: string - column: string - type: 'one' | 'many' - fk_name: string - nullable: boolean - references: { - schema: string | null - table: string | null - column: string | null - type: 'one' | 'many' +function resolveNameCasing(options: PullOptions, originalName: string) { + let name: string; + + switch (options.naming) { + case 'pascal': + name = toPascalCase(originalName); + break; + case 'camel': + name = toCamelCase(originalName); + break; + case 'snake': + name = toSnakeCase(originalName); + break; + case 'kebab': + name = toKebabCase(originalName); + break; + case 'none': + default: + name = originalName; + break; } + + return { + modified: options.alwaysMap ? true : name !== originalName, + name, + }; } +function toPascalCase(str: string): string { + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase()); +} + +function toCamelCase(str: string): string { + return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase()); +} + +function toSnakeCase(str: string): string { + return str + .replace(/[- ]+/g, '_') + .replace(/([a-z0-9])([A-Z])/g, '$1_$2') + .toLowerCase(); +} + +function toKebabCase(str: string): string { + return str + .replace(/[_ ]+/g, '-') + .replace(/([a-z0-9])([A-Z])/g, '$1-$2') + .toLowerCase(); +} + +export type Relation = { + schema: string; + table: string; + column: string; + type: 'one' | 'many'; + fk_name: string; + nullable: boolean; + references: { + schema: string | null; + table: string | null; + column: string | null; + type: 'one' | 'many'; + }; +}; + export function syncTable({ model, provider, table, - services + services, + options, }: { - table: IntrospectedTable - model: Model - provider: IntrospectionProvider - services: ZModelServices + table: IntrospectedTable; + model: Model; + provider: IntrospectionProvider; + services: ZModelServices; + options: PullOptions; }) { - const idAttribute = getAttributeRef('@id', services) - const uniqueAttribute = getAttributeRef('@unique', services) - const modelUniqueAttribute = getAttributeRef('@@unique', services) - const relationAttribute = getAttributeRef('@relation', services) - const fieldMapAttribute = getAttributeRef('@map', services) - const tableMapAttribute = getAttributeRef('@@map', services) + const idAttribute = getAttributeRef('@id', services); + const modelIdAttribute = getAttributeRef('@@id', services); + const uniqueAttribute = getAttributeRef('@unique', services); + const modelUniqueAttribute = getAttributeRef('@@unique', services); + const relationAttribute = getAttributeRef('@relation', services); + const fieldMapAttribute = getAttributeRef('@map', services); + const tableMapAttribute = getAttributeRef('@@map', services); + const modelindexAttribute = getAttributeRef('@@index', services); - if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.') + if ( + !idAttribute || + !uniqueAttribute || + !relationAttribute || + !fieldMapAttribute || + !tableMapAttribute || + !modelIdAttribute || + !modelUniqueAttribute || + !modelindexAttribute + ) { + throw new Error('Cannot find required attributes in the model.'); } - const relations: Relation[] = [] - const modelTable: DataModel = { - $type: 'DataModel' as const, - $container: model, - name: table.name, - fields: [], - attributes: [], - comments: [], - isView: false, - mixins: [], + const relations: Relation[] = []; + const { name, modified } = resolveNameCasing({ ...options, naming: 'pascal' }, table.name); + const multiPk = table.columns.filter((c) => c.pk).length > 1; + + const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); + modelFactory.setContainer(model); + if (modified) { + modelFactory.addAttribute((builder) => + builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), + ); } - model.declarations.push(modelTable) - modelTable.fields = table.columns.map((col) => { - if (col.default) console.log(`${table.name}.${col.name} -> ${col.default}`); + if (multiPk) { + const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); + modelFactory.addAttribute((builder) => + builder.setDecl(modelIdAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + pkColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); + } - if (col.foreign_key_table) { + table.columns.forEach((column) => { + if (column.foreign_key_table) { relations.push({ schema: table.schema, table: table.name, - column: col.name, + column: column.name, type: 'one', - fk_name: col.foreign_key_name!, - nullable: col.nullable, + fk_name: column.foreign_key_name!, + nullable: column.nullable, references: { - schema: col.foreign_key_schema, - table: col.foreign_key_table, - column: col.foreign_key_column, - type: col.unique ? 'one' : 'many', + schema: column.foreign_key_schema, + table: column.foreign_key_table, + column: column.foreign_key_column, + type: column.unique ? 'one' : 'many', }, - }) + }); } - const fieldPrefix = /[0-9]/g.test(col.name.charAt(0)) ? '_' : '' - const fieldName = `${fieldPrefix}${col.name}` - - const builtinType = provider.getBuiltinType(col.datatype) - const field: DataField = { - $type: 'DataField' as const, - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - type: builtinType.type === 'Unsupported' ? undefined : builtinType.type, - array: builtinType.isArray, - get unsupported() { - return builtinType.type === 'Unsupported' ? { - $container: this, - $type: 'UnsupportedFieldType' as const, - get value() { - return { - $container: this, - $type: 'StringLiteral', - value: col.datatype, - } satisfies StringLiteral - }, - } satisfies UnsupportedFieldType : undefined - }, - optional: col.nullable, - reference: col.options.length - ? { - $refText: col.datatype, - ref: model.declarations.find( - (d) => d.$type === 'Enum' && getDbName(d) === col.datatype - ) as Enum | undefined, - } - : undefined, - } satisfies DataFieldType - }, - $container: modelTable!, - name: fieldName, - get attributes() { - if (fieldPrefix !== '') return [] - - const getDefaultAttrs = () => { - if (!col.default) return []; - - const defaultValue = col.default && provider.getDefaultValue({ - fieldName: col.name, - defaultValue: col.default, - container: this, - services, - enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], - }) - - if (!defaultValue) return []; - - if (Array.isArray(defaultValue)) { - return defaultValue; - } + const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; + const { name: _name, modified } = resolveNameCasing(options, column.name); + const name = `${fieldPrefix}${_name}`; - if (defaultValue?.$type === 'DataFieldAttribute') { - return [defaultValue]; - } + const builtinType = provider.getBuiltinType(column.datatype); + + modelFactory.addField((builder) => { + builder.setName(name); + builder.setType((typeBuilder) => { + typeBuilder.setArray(builtinType.isArray); + typeBuilder.setOptional(column.nullable); - return [{ - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: 'default', - ref: getAttributeRef('@default', services) - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { ...defaultValue, $container: this } - }, - }] satisfies AttributeArg[] - }, - } satisfies DataFieldAttribute]; + if (builtinType.type != 'Unsupported') { + typeBuilder.setType(builtinType.type); + } else { + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); } - return [ - ...(col.pk ? [{ - $type: 'DataFieldAttribute' as const, - $container: this, - args: [], - decl: { - $refText: '@id', - ref: idAttribute, - }, - }] : []) satisfies DataFieldAttribute[], - ...getDefaultAttrs(), - { - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@map', - ref: fieldMapAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: col.name, - } - }, - } - ] satisfies AttributeArg[] - }, + if (column.options.length > 0) { + const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as + | Enum + | undefined; + + if (ref) { + typeBuilder.setReference(ref); } - ] satisfies DataFieldAttribute[] - }, - comments: [], - } - return field - }) + } + + return typeBuilder; + }); + + if (column.default) { + const defaultValuesAttrs = column.default + ? provider.getDefaultValue({ + fieldName: column.name, + defaultValue: column.default, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }) + : []; + defaultValuesAttrs.forEach(builder.addAttribute); + } + + if (column.pk && !multiPk) { + builder.addAttribute((b) => b.setDecl(idAttribute)); + } - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name) + if (column.unique) + builder.addAttribute((b) => { + b.setDecl(uniqueAttribute); + if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + + return b; + }); + if (modified) + builder.addAttribute((ab) => + ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name), 'name'), + ); + + return builder; + }); + }); + + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); if (uniqieColumns.length > 0) { - modelTable.attributes.push({ - $type: 'DataModelAttribute' as const, - $container: modelTable, - decl: { - $refText: '@unique', - ref: modelUniqueAttribute, - }, - get args() { - return uniqieColumns.map((c) => ({ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - $refText: c, - ref: modelTable.fields.find((f) => f.name === c), - }, - args: [], - }] satisfies ReferenceExpr[] - } - } as ArrayExpr - }, - })) satisfies AttributeArg[] - }, - }) - - return relations + modelFactory.addAttribute((builder) => + builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + uniqieColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); } - return relations + model.declarations.push(modelFactory.node); + + table.indexes.forEach((index) => { + modelFactory.addAttribute((builder) => + builder.setDecl(modelindexAttribute).addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + index.columns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); + }); + return arrayExpr; + }), + ); + }); + + return relations; } -export function syncRelation({ model, relation, services }: { model: Model, relation: Relation, services: ZModelServices }) { - const idAttribute = getAttributeRef('@id', services) - const uniqueAttribute = getAttributeRef('@unique', services) - const relationAttribute = getAttributeRef('@relation', services) - const fieldMapAttribute = getAttributeRef('@map', services) - const tableMapAttribute = getAttributeRef('@@map', services) +export function syncRelation({ + model, + relation, + services, +}: { + model: Model; + relation: Relation; + services: ZModelServices; + options: PullOptions; +}) { + const idAttribute = getAttributeRef('@id', services); + const uniqueAttribute = getAttributeRef('@unique', services); + const relationAttribute = getAttributeRef('@relation', services); + const fieldMapAttribute = getAttributeRef('@map', services); + const tableMapAttribute = getAttributeRef('@@map', services); if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { - throw new Error('Cannot find required attributes in the model.') + throw new Error('Cannot find required attributes in the model.'); } - const sourceModel = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === relation.table - ) as DataModel | undefined - if (!sourceModel) return + const sourceModel = model.declarations.find((d) => d.$type === 'DataModel' && getDbName(d) === relation.table) as + | DataModel + | undefined; + if (!sourceModel) return; - const sourceField = sourceModel.fields.find( - (f) => getDbName(f) === relation.column - ) as DataField | undefined - if (!sourceField) return + const sourceField = sourceModel.fields.find((f) => getDbName(f) === relation.column) as DataField | undefined; + if (!sourceField) return; const targetModel = model.declarations.find( - (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table - ) as DataModel | undefined - if (!targetModel) return + (d) => d.$type === 'DataModel' && getDbName(d) === relation.references.table, + ) as DataModel | undefined; + if (!targetModel) return; - const targetField = targetModel.fields.find( - (f) => getDbName(f) === relation.references.column - ) - if (!targetField) return + const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); + if (!targetField) return; //TODO: Finish relation sync - const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : '' - - sourceModel.fields.push({ - $type: 'DataField' as const, - $container: sourceModel, - name: `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, - comments: [], - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - reference: { - ref: targetModel, - $refText: targetModel.name, - }, - optional: relation.nullable, - //TODO - array: relation.type === 'many', - } satisfies DataFieldType - }, - get attributes() { - return [{ - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@relation', - ref: relationAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }, - { - $type: 'AttributeArg' as const, - $container: this, - name: 'fields', - $resolvedParam: { - name: 'fields', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - ref: sourceField, - $refText: sourceField.name, - }, - args: [], - }] satisfies ReferenceExpr[] - }, - } satisfies ArrayExpr - }, - }, { - $type: 'AttributeArg' as const, - $container: this, - name: 'references', - $resolvedParam: { - name: 'references', - }, - get value() { - return { - $type: 'ArrayExpr' as const, - $container: this, - get items() { - return [{ - $container: this, - $type: 'ReferenceExpr' as const, - target: { - ref: targetField, - $refText: targetField.name, - }, - args: [], - }] satisfies ReferenceExpr[] - }, - } satisfies ArrayExpr - }, - }, { - $type: 'AttributeArg' as const, - $container: this, - name: 'map', - $resolvedParam: { - name: 'map', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }] satisfies AttributeArg[] - }, - }] satisfies DataFieldAttribute[] - }, - }) - - const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : '' - const oppositeFieldName = relation.type === 'one' - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` - : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - - targetModel.fields.push({ - $type: 'DataField' as const, - $container: targetModel, - name: oppositeFieldName, - get type() { - return { - $container: this, - $type: 'DataFieldType' as const, - reference: { - ref: sourceModel, - $refText: sourceModel.name, - }, - optional: relation.references.type === 'one' && relation.nullable, - array: relation.references.type === 'many', - } satisfies DataFieldType - }, - get attributes() { - return [ - { - $type: 'DataFieldAttribute' as const, - $container: this, - decl: { - $refText: '@relation', - ref: relationAttribute, - }, - get args() { - return [{ - $type: 'AttributeArg' as const, - $container: this, - name: '', - $resolvedParam: { - name: '', - }, - get value() { - return { - $type: 'StringLiteral' as const, - $container: this, - value: relation.fk_name, - } satisfies StringLiteral - }, - }] satisfies AttributeArg[] - } - } - ] satisfies DataFieldAttribute[] - }, - comments: [], - }) -} \ No newline at end of file + const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; + + const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + + const sourceFieldFactory = new DataFieldFactory() + .setContainer(sourceModel) + .setName( + `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, + ) + .setType((tb) => + tb + .setOptional(relation.nullable) + .setArray(relation.type === 'many') + .setReference(targetModel), + ) + .addAttribute((ab) => + ab + .setDecl(relationAttribute) + .addArg((ab) => ab.StringLiteral.setValue(relationName)) + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') + .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.StringLiteral.setValue(relation.fk_name)), 'map'), + ); + + sourceModel.fields.push(sourceFieldFactory.node); + + const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; + const oppositeFieldName = + relation.type === 'one' + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` + : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + + const targetFieldFactory = new DataFieldFactory() + .setContainer(targetModel) + .setName(oppositeFieldName) + .setType((tb) => + tb + .setOptional(relation.references.type === 'one') + .setArray(relation.references.type === 'many') + .setReference(sourceModel), + ) + .addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName))); + + targetModel.fields.push(targetFieldFactory.node); +} diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index 82ee2ac38..4c9a0fe8d 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,9 +1,9 @@ -export * from './provider' +export * from './provider'; -import { postgresql } from "./postgresql"; -import { sqlite } from "./sqlite"; +import { postgresql } from './postgresql'; +import { sqlite } from './sqlite'; export const providers = { postgresql, - sqlite -}; \ No newline at end of file + sqlite, +}; diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index be882be6e..07dcee913 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,242 +1,176 @@ -import { AttributeArg, DataFieldAttribute, Expression, FunctionDecl, InvocationExpr } from '@zenstackhq/language/ast' -import { Client } from 'pg' -import { getAttributeRef, getDbName } from '../utils' -import type { - IntrospectedEnum, - IntrospectedSchema, - IntrospectedTable, - IntrospectionProvider, -} from './provider' +import { Client } from 'pg'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; export const postgresql: IntrospectionProvider = { - getBuiltinType(type) { - const t = (type || '').toLowerCase() + getBuiltinType(type) { + const t = (type || '').toLowerCase(); - const isArray = t.startsWith('_') + const isArray = t.startsWith('_'); - switch (t.replace(/^_/, '')) { - // integers - case 'int2': - case 'smallint': - case 'int4': - case 'integer': - return { type: 'Int', isArray } - case 'int8': - case 'bigint': - return { type: 'BigInt', isArray } + switch (t.replace(/^_/, '')) { + // integers + case 'int2': + case 'smallint': + case 'int4': + case 'integer': + return { type: 'Int', isArray }; + case 'int8': + case 'bigint': + return { type: 'BigInt', isArray }; - // decimals and floats - case 'numeric': - case 'decimal': - return { type: 'Decimal', isArray } - case 'float4': - case 'real': - case 'float8': - case 'double precision': - return { type: 'Float', isArray } + // decimals and floats + case 'numeric': + case 'decimal': + return { type: 'Decimal', isArray }; + case 'float4': + case 'real': + case 'float8': + case 'double precision': + return { type: 'Float', isArray }; - // boolean - case 'bool': - case 'boolean': - return { type: 'Boolean', isArray } + // boolean + case 'bool': + case 'boolean': + return { type: 'Boolean', isArray }; - // strings - case 'text': - case 'varchar': - case 'bpchar': - case 'character varying': - case 'character': - return { type: 'String', isArray } + // strings + case 'text': + case 'varchar': + case 'bpchar': + case 'character varying': + case 'character': + return { type: 'String', isArray }; - // uuid - case 'uuid': - return { type: 'String', isArray } + // uuid + case 'uuid': + return { type: 'String', isArray }; - // dates/times - case 'date': - case 'timestamp': - case 'timestamptz': - return { type: 'DateTime', isArray } + // dates/times + case 'date': + case 'timestamp': + case 'timestamptz': + return { type: 'DateTime', isArray }; - // binary - case 'bytea': - return { type: 'Bytes', isArray } + // binary + case 'bytea': + return { type: 'Bytes', isArray }; - // json - case 'json': - case 'jsonb': - return { type: 'Json', isArray } + // json + case 'json': + case 'jsonb': + return { type: 'Json', isArray }; - // unsupported or postgres-specific - case 'time': - case 'timetz': - case 'interval': - case 'money': - case 'xml': - case 'bit': - case 'varbit': - case 'cidr': - case 'inet': - case 'macaddr': - case 'macaddr8': - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'tsvector': - case 'tsquery': - case 'jsonpath': - case 'hstore': - case 'oid': - case 'name': - case 'regclass': - case 'regproc': - case 'regprocedure': - case 'regoper': - case 'regoperator': - case 'regtype': - case 'regconfig': - case 'regdictionary': - case 'pg_lsn': - case 'txid_snapshot': - case 'int4range': - case 'int8range': - case 'numrange': - case 'tsrange': - case 'tstzrange': - case 'daterange': - default: - return { type: 'Unsupported' as const, isArray } - } - }, - async introspect(connectionString: string): Promise { - const client = new Client({ connectionString }) - await client.connect() + // unsupported or postgres-specific + case 'time': + case 'timetz': + case 'interval': + case 'money': + case 'xml': + case 'bit': + case 'varbit': + case 'cidr': + case 'inet': + case 'macaddr': + case 'macaddr8': + case 'point': + case 'line': + case 'lseg': + case 'box': + case 'path': + case 'polygon': + case 'circle': + case 'tsvector': + case 'tsquery': + case 'jsonpath': + case 'hstore': + case 'oid': + case 'name': + case 'regclass': + case 'regproc': + case 'regprocedure': + case 'regoper': + case 'regoperator': + case 'regtype': + case 'regconfig': + case 'regdictionary': + case 'pg_lsn': + case 'txid_snapshot': + case 'int4range': + case 'int8range': + case 'numrange': + case 'tsrange': + case 'tstzrange': + case 'daterange': + default: + return { type: 'Unsupported' as const, isArray }; + } + }, + async introspect(connectionString: string): Promise { + const client = new Client({ connectionString }); + await client.connect(); - const { rows: tables } = await client.query( - tableIntrospectionQuery - ) - const { rows: enums } = await client.query( - enumIntrospectionQuery - ) + const { rows: tables } = await client.query(tableIntrospectionQuery); + const { rows: enums } = await client.query(enumIntrospectionQuery); - return { - enums, - tables, - } - }, - getDefaultValue({ defaultValue, container: $container, fieldName, services, enums }) { - // Handle common cases - console.log(defaultValue); + return { + enums, + tables, + }; + }, + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; - const val = defaultValue.trim() + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)!); - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - const attrs: DataFieldAttribute[] = []; + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)!))); - attrs.push({ - $type: "DataFieldAttribute" as const, - $container: $container as any, - decl: { - $refText: '@default', - ref: getAttributeRef('@default', services) - }, - get args(): AttributeArg[] { - return [{ - $type: 'AttributeArg' as const, - $container: this as any, - get value(): Expression { - return { - $type: 'InvocationExpr' as const, - $container: this, - function: { - $refText: 'now', - ref: services.shared.workspace.IndexManager.allElements(FunctionDecl).find((f) => (f.node as FunctionDecl)?.name === 'now')?.node as FunctionDecl - }, - args: [], - } satisfies InvocationExpr + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services)!)); } - }] + return factories; } - }); - - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - // for updatedAt, use @updatedAt attribute - attrs.push({ - $type: "DataFieldAttribute" as const, - $container: $container as any, - decl: { - $refText: 'updatedAt', - ref: getAttributeRef('@updatedAt', services) - }, - args: [], - }); - } - - return attrs.length === 1 ? attrs[0] : attrs; - } - if (val.includes('::')) { - const [enumValue, enumName] = val.replace(/'|"/g, '').split('::').map((s) => s.trim()) as [string, string] - const enumDef = enums.find((e) => getDbName(e) === enumName) - if (!enumDef) { - throw new Error(`Enum type ${enumName} not found for default value ${defaultValue}`) - } - const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue) - if (!enumField) { - throw new Error(`Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`) - } - - return { - $type: 'ReferenceExpr' as const, - $container: $container as any, - target: { - $refText: enumField!.name, - ref: enumField, - }, - args: [], - } - } + if (val.includes('::')) { + const [enumValue, enumName] = val + .replace(/'|"/g, '') + .split('::') + .map((s) => s.trim()) as [string, string]; + const enumDef = enums.find((e) => getDbName(e) === enumName); + if (!enumDef) { + return []; + } + const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue); + if (!enumField) { + throw new Error( + `Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`, + ); + } - if (val === 'true' || val === 'false') { - return { - $type: 'BooleanLiteral' as const, - $container: $container as any, - value: val === 'true', - } - } + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + return factories; + } - if (/^\d+$/.test(val)) { - return { - $container: $container as any, - $type: 'NumberLiteral' as const, - value: val, - } - } + if (val === 'true' || val === 'false') { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(val === 'true'))); + return factories; + } - if (/^-?\d+(\.\d+)?$/.test(val)) { - // float - return { - $container: $container as any, - $type: 'NumberLiteral' as const, - value: val, - } - } + if (/^\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + return factories; + } - if (val.startsWith("'") && val.endsWith("'")) { - // string - return { - $container: $container as any, - $type: 'StringLiteral' as const, - value: val.slice(1, -1).replace(/''/g, "'"), - } - } - return undefined - }, -} + if (val.startsWith("'") && val.endsWith("'")) { + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")))); + return factories; + } + return []; + }, +}; const enumIntrospectionQuery = ` SELECT @@ -247,7 +181,7 @@ FROM pg_type t JOIN pg_enum e ON t.oid = e.enumtypid JOIN pg_namespace n ON n.oid = t.typnamespace GROUP BY schema_name, enum_type -ORDER BY schema_name, enum_type;` +ORDER BY schema_name, enum_type;`; const tableIntrospectionQuery = ` SELECT @@ -308,6 +242,29 @@ SELECT AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) ) ) AS "unique", + ( + SELECT COALESCE( + ( + SELECT "u_con"."conname" + FROM "pg_catalog"."pg_constraint" AS "u_con" + WHERE "u_con"."contype" = 'u' + AND "u_con"."conrelid" = "cls"."oid" + AND array_length("u_con"."conkey", 1) = 1 + AND "att"."attnum" = ANY ("u_con"."conkey") + LIMIT 1 + ), + ( + SELECT "u_idx_cls"."relname" + FROM "pg_catalog"."pg_index" AS "u_idx" + JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid" + WHERE "u_idx"."indrelid" = "cls"."oid" + AND "u_idx"."indisunique" = TRUE + AND "u_idx"."indnkeyatts" = 1 + AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) + LIMIT 1 + ) + ) + ) AS "unique_name", "att"."attgenerated" != '' AS "computed", pg_get_expr("def"."adbin", "def"."adrelid") AS "default", "att"."attnotnull" != TRUE AS "nullable", @@ -339,7 +296,41 @@ SELECT AND "att"."attisdropped" != TRUE ORDER BY "att"."attnum" ) AS agg - ) AS "columns" + ) AS "columns", + ( + SELECT coalesce(json_agg(agg), '[]') + FROM ( + SELECT + "idx_cls"."relname" AS "name", + "am"."amname" AS "method", + "idx"."indisunique" AS "unique", + "idx"."indisprimary" AS "primary", + "idx"."indisvalid" AS "valid", + "idx"."indisready" AS "ready", + ("idx"."indpred" IS NOT NULL) AS "partial", + pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", + ( + SELECT json_agg( + json_build_object( + 'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)), + 'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END, + 'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END, + 'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END + ) + ORDER BY "s"."i" + ) + FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i") + LEFT JOIN "pg_catalog"."pg_attribute" AS "att" + ON "att"."attrelid" = "cls"."oid" + AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"] + ) AS "columns" + FROM "pg_catalog"."pg_index" AS "idx" + JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" + JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" + WHERE "idx"."indrelid" = "cls"."oid" + ORDER BY "idx_cls"."relname" + ) AS agg + ) AS "indexes" FROM "pg_catalog"."pg_class" AS "cls" INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid" WHERE @@ -348,4 +339,5 @@ WHERE AND "cls"."relkind" IN ('r', 'v') AND "cls"."relname" !~ '^pg_' AND "cls"."relname" !~ '_prisma_migrations' -` + ORDER BY "ns"."nspname", "cls"."relname" ASC; +`; diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index b6f76b98e..c03c39fcd 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,49 +1,71 @@ -import type { BuiltinType, DataFieldAttribute, Enum, InvocationExpr, LiteralExpr, ReferenceExpr } from '@zenstackhq/language/ast' -import type { AstNode } from '../../../../../language/dist/ast.cjs'; import type { ZModelServices } from '@zenstackhq/language'; +import type { BuiltinType, Enum } from '@zenstackhq/language/ast'; +import type { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; -export type Cascade = "NO ACTION" | "RESTRICT" | "CASCADE" | "SET NULL" | "SET DEFAULT" | null; +export type Cascade = 'NO ACTION' | 'RESTRICT' | 'CASCADE' | 'SET NULL' | 'SET DEFAULT' | null; export interface IntrospectedTable { - schema: string - name: string - type: 'table' | 'view' - definition: string | null - columns: { - name: string - datatype: string - datatype_schema: string - foreign_key_schema: string | null - foreign_key_table: string | null - foreign_key_column: string | null - foreign_key_name: string | null - foreign_key_on_update: Cascade - foreign_key_on_delete: Cascade - pk: boolean - computed: boolean - nullable: boolean - options: string[] - unique: boolean - default: string | null - }[] + schema: string; + name: string; + type: 'table' | 'view'; + definition: string | null; + columns: { + name: string; + datatype: string; + datatype_schema: string; + foreign_key_schema: string | null; + foreign_key_table: string | null; + foreign_key_column: string | null; + foreign_key_name: string | null; + foreign_key_on_update: Cascade; + foreign_key_on_delete: Cascade; + pk: boolean; + computed: boolean; + nullable: boolean; + options: string[]; + unique: boolean; + unique_name: string | null; + default: string | null; + }[]; + indexes: { + name: string; + method: string | null; + unique: boolean; + primary: boolean; + valid: boolean; + ready: boolean; + partial: boolean; + predicate: string | null; + columns: { + name: string; + expression: string | null; + order: 'ASC' | 'DESC' | null; + nulls: string | null; + }[]; + }[]; } export type IntrospectedEnum = { - schema_name: string - enum_type: string - values: string[] -} + schema_name: string; + enum_type: string; + values: string[]; +}; export type IntrospectedSchema = { - tables: IntrospectedTable[] - enums: IntrospectedEnum[] -} + tables: IntrospectedTable[]; + enums: IntrospectedEnum[]; +}; export interface IntrospectionProvider { - introspect(connectionString: string): Promise - getBuiltinType(type: string): { - type: BuiltinType | 'Unsupported' - isArray: boolean - } - getDefaultValue(args: { fieldName: string, defaultValue: string, container: T, services: ZModelServices, enums: Enum[] }): LiteralExpr | InvocationExpr | DataFieldAttribute | DataFieldAttribute[] | ReferenceExpr | undefined + introspect(connectionString: string): Promise; + getBuiltinType(type: string): { + type: BuiltinType | 'Unsupported'; + isArray: boolean; + }; + getDefaultValue(args: { + fieldName: string; + defaultValue: string; + services: ZModelServices; + enums: Enum[]; + }): DataFieldAttributeFactory[]; } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 3feaa5abc..160a3096e 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,14 +1,14 @@ -import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider' +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. export const sqlite: IntrospectionProvider = { getBuiltinType(type) { - const t = (type || '').toLowerCase().trim() + const t = (type || '').toLowerCase().trim(); // SQLite has no array types - const isArray = false + const isArray = false; switch (t) { // integers @@ -17,24 +17,24 @@ export const sqlite: IntrospectionProvider = { case 'tinyint': case 'smallint': case 'mediumint': - return { type: 'Int', isArray } + return { type: 'Int', isArray }; case 'bigint': - return { type: 'BigInt', isArray } + return { type: 'BigInt', isArray }; // decimals and floats case 'numeric': case 'decimal': - return { type: 'Decimal', isArray } + return { type: 'Decimal', isArray }; case 'real': case 'double': case 'double precision': case 'float': - return { type: 'Float', isArray } + return { type: 'Float', isArray }; // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) case 'bool': case 'boolean': - return { type: 'Boolean', isArray } + return { type: 'Boolean', isArray }; // strings case 'text': @@ -44,102 +44,128 @@ export const sqlite: IntrospectionProvider = { case 'character': case 'clob': case 'uuid': // often stored as TEXT - return { type: 'String', isArray } + return { type: 'String', isArray }; // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) case 'date': case 'datetime': - return { type: 'DateTime', isArray } + return { type: 'DateTime', isArray }; // binary case 'blob': - return { type: 'Bytes', isArray } + return { type: 'Bytes', isArray }; // json (not a native type, but commonly used) case 'json': - return { type: 'Json', isArray } + return { type: 'Json', isArray }; default: { // Fallbacks based on SQLite type affinity rules - if (t.includes('int')) return { type: 'Int', isArray } - if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray } - if (t.includes('blob')) return { type: 'Bytes', isArray } - if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray } - if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray } - return { type: 'Unsupported' as const, isArray } + if (t.includes('int')) return { type: 'Int', isArray }; + if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray }; + if (t.includes('blob')) return { type: 'Bytes', isArray }; + if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray }; + if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray }; + return { type: 'Unsupported' as const, isArray }; } } }, async introspect(connectionString: string): Promise { - const SQLite = (await import('better-sqlite3')).default - const db = new SQLite(connectionString, { readonly: true }) + const SQLite = (await import('better-sqlite3')).default; + const db = new SQLite(connectionString, { readonly: true }); try { const all = (sql: string): T[] => { - const stmt: any = db.prepare(sql) - return stmt.all() as T[] - } + const stmt: any = db.prepare(sql); + return stmt.all() as T[]; + }; // List user tables and views (exclude internal sqlite_*) const tablesRaw = all<{ name: string; type: 'table' | 'view'; definition: string | null }>( - "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name" - ) + "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", + ); - const tables: IntrospectedTable[] = [] + const tables: IntrospectedTable[] = []; for (const t of tablesRaw) { - const tableName = t.name - const schema = 'main' + const tableName = t.name; + const schema = 'main'; // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ - cid: number - name: string - type: string - notnull: number - dflt_value: string | null - pk: number - hidden?: number - }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`) + cid: number; + name: string; + type: string; + notnull: number; + dflt_value: string | null; + pk: number; + hidden?: number; + }>(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`); + + // Index list (used for both unique inference and index collection) + const tableNameEsc = tableName.replace(/'/g, "''"); + const idxList = all<{ + seq: number; + name: string; + unique: number; + origin: string; + partial: number; + }>(`PRAGMA index_list('${tableNameEsc}')`); // Unique columns detection via unique indexes with single column - const uniqueIndexRows = all<{ name: string; unique: number }>( - `PRAGMA index_list('${tableName.replace(/'/g, "''")}')` - ).filter((r) => r.unique === 1) - - const uniqueSingleColumn = new Set() + const uniqueSingleColumn = new Set(); + const uniqueIndexRows = idxList.filter((r) => r.unique === 1); for (const idx of uniqueIndexRows) { - const idxCols = all<{ name: string }>( - `PRAGMA index_info('${idx.name.replace(/'/g, "''")}')` - ) + const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); if (idxCols.length === 1 && idxCols[0]?.name) { - uniqueSingleColumn.add(idxCols[0].name) + uniqueSingleColumn.add(idxCols[0].name); } } + // Indexes details + const indexes: IntrospectedTable['indexes'] = idxList.map((idx) => { + const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); + return { + name: idx.name, + method: null, // SQLite does not expose index method + unique: idx.unique === 1, + primary: false, // SQLite does not expose this directly; handled via pk in columns + valid: true, // SQLite does not expose index validity + ready: true, // SQLite does not expose index readiness + partial: idx.partial === 1, + predicate: null, // SQLite does not expose index predicate + columns: idxCols.map((col) => ({ + name: col.name, + expression: null, + order: null, + nulls: null, + })), + }; + }); + // Foreign keys mapping by column name const fkRows = all<{ - id: number - seq: number - table: string - from: string - to: string | null - on_update: any - on_delete: any - }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`) + id: number; + seq: number; + table: string; + from: string; + to: string | null; + on_update: any; + on_delete: any; + }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`); const fkByColumn = new Map< string, { - foreign_key_schema: string | null - foreign_key_table: string | null - foreign_key_column: string | null - foreign_key_name: string | null - foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update'] - foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete'] + foreign_key_schema: string | null; + foreign_key_table: string | null; + foreign_key_column: string | null; + foreign_key_name: string | null; + foreign_key_on_update: IntrospectedTable['columns'][number]['foreign_key_on_update']; + foreign_key_on_delete: IntrospectedTable['columns'][number]['foreign_key_on_delete']; } - >() + >(); for (const fk of fkRows) { fkByColumn.set(fk.from, { @@ -149,16 +175,16 @@ export const sqlite: IntrospectionProvider = { foreign_key_name: null, foreign_key_on_update: (fk.on_update as any) ?? null, foreign_key_on_delete: (fk.on_delete as any) ?? null, - }) + }); } - const columns: IntrospectedTable['columns'] = [] + const columns: IntrospectedTable['columns'] = []; for (const c of columnsInfo) { // hidden: 1 (hidden/internal) -> skip; 2 (generated) -> mark computed - const hidden = c.hidden ?? 0 - if (hidden === 1) continue + const hidden = c.hidden ?? 0; + if (hidden === 1) continue; - const fk = fkByColumn.get(c.name) + const fk = fkByColumn.get(c.name); columns.push({ name: c.name, @@ -176,21 +202,22 @@ export const sqlite: IntrospectionProvider = { default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), - }) + unique_name: uniqueSingleColumn.has(c.name) ? `${tableName}_${c.name}_unique` : null, + }); } - tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition }) + tables.push({ schema, name: tableName, columns, type: t.type, definition: t.definition, indexes }); } - const enums: IntrospectedEnum[] = [] // SQLite doesn't support enums + const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - return { tables, enums } + return { tables, enums }; } finally { - db.close() + db.close(); } }, getDefaultValue(_args) { - throw new Error('Not implemented yet for SQLite') - } -} + throw new Error('Not implemented yet for SQLite'); + }, +}; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index defd0f307..234629740 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,92 +1,95 @@ -import type { ZModelServices } from '@zenstackhq/language' +import type { ZModelServices } from '@zenstackhq/language'; import { - AbstractDeclaration, - DataField, - DataModel, - Enum, - EnumField, - isInvocationExpr, - type Attribute, - type Model -} from '@zenstackhq/language/ast' -import { getStringLiteral } from '@zenstackhq/language/utils' -import type { - DataSourceProviderType -} from '@zenstackhq/sdk/schema' -import type { Reference } from 'langium' + AbstractDeclaration, + DataField, + DataModel, + Enum, + EnumField, + FunctionDecl, + isInvocationExpr, + type Attribute, + type Model, +} from '@zenstackhq/language/ast'; +import { getStringLiteral } from '@zenstackhq/language/utils'; +import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; +import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { - const references = model.$document! - .references as Reference[] - return references.find( - (a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName - )?.ref as Attribute | undefined + const references = model.$document!.references as Reference[]; + return references.find((a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName)?.ref as + | Attribute + | undefined; } export function getDatasource(model: Model) { - const datasource = model.declarations.find((d) => d.$type === 'DataSource') - if (!datasource) { - throw new Error('No datasource declaration found in the schema.') - } + const datasource = model.declarations.find((d) => d.$type === 'DataSource'); + if (!datasource) { + throw new Error('No datasource declaration found in the schema.'); + } - const urlField = datasource.fields.find((f) => f.name === 'url')! + const urlField = datasource.fields.find((f) => f.name === 'url')!; - let url = getStringLiteral(urlField.value) + let url = getStringLiteral(urlField.value); - if (!url && isInvocationExpr(urlField.value)) { - const envName = getStringLiteral(urlField.value.args[0]?.value) - if (!envName) { - throw new Error('The url field must be a string literal or an env().') - } - if (!process.env[envName]) { - throw new Error( - `Environment variable ${envName} is not set, please set it to the database connection string.` - ) + if (!url && isInvocationExpr(urlField.value)) { + const envName = getStringLiteral(urlField.value.args[0]?.value); + if (!envName) { + throw new Error('The url field must be a string literal or an env().'); + } + if (!process.env[envName]) { + throw new Error( + `Environment variable ${envName} is not set, please set it to the database connection string.`, + ); + } + url = process.env[envName]; } - url = process.env[envName] - } - if (!url) { - throw new Error('The url field must be a string literal or an env().') - } + if (!url) { + throw new Error('The url field must be a string literal or an env().'); + } - return { - name: datasource.name, - provider: getStringLiteral( - datasource.fields.find((f) => f.name === 'provider')?.value - ) as DataSourceProviderType, - url, - } + return { + name: datasource.name, + provider: getStringLiteral( + datasource.fields.find((f) => f.name === 'provider')?.value, + ) as DataSourceProviderType, + url, + }; } -export function getDbName( - decl: AbstractDeclaration | DataField | EnumField -): string { - if (!('attributes' in decl)) return decl.name - const nameAttr = decl.attributes.find( - (a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map' - ) - if (!nameAttr) return decl.name - const attrValue = nameAttr.args[0]?.value +export function getDbName(decl: AbstractDeclaration | DataField | EnumField): string { + if (!('attributes' in decl)) return decl.name; + const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map'); + if (!nameAttr) return decl.name; + const attrValue = nameAttr.args[0]?.value; - if (attrValue?.$type !== 'StringLiteral') return decl.name + if (attrValue?.$type !== 'StringLiteral') return decl.name; - return attrValue.value + return attrValue.value; } - -export function getDeclarationRef(type: T["$type"], name: string, services: ZModelServices) { - return services.shared.workspace.IndexManager.allElements(type).find((m) => m.node && getDbName(m.node as T) === name)?.node as T | undefined +export function getDeclarationRef( + type: T['$type'], + name: string, + services: ZModelServices, +) { + return services.shared.workspace.IndexManager.allElements(type).find( + (m) => m.node && getDbName(m.node as T) === name, + )?.node as T | undefined; } export function getEnumRef(name: string, services: ZModelServices) { - return getDeclarationRef('Enum', name, services); + return getDeclarationRef('Enum', name, services); } export function getModelRef(name: string, services: ZModelServices) { - return getDeclarationRef('DataModel', name, services); + return getDeclarationRef('DataModel', name, services); } export function getAttributeRef(name: string, services: ZModelServices) { - return getDeclarationRef('Attribute', name, services); -} \ No newline at end of file + return getDeclarationRef('Attribute', name, services); +} + +export function getFunctionRef(name: string, services: ZModelServices) { + return getDeclarationRef('FunctionDecl', name, services); +} diff --git a/packages/language/package.json b/packages/language/package.json index c5f60b107..ca3dc9a9e 100644 --- a/packages/language/package.json +++ b/packages/language/package.json @@ -49,6 +49,16 @@ "default": "./dist/utils.cjs" } }, + "./factory": { + "import": { + "types": "./dist/factory.d.ts", + "default": "./dist/factory.js" + }, + "require": { + "types": "./dist/factory.d.cts", + "default": "./dist/factory.cjs" + } + }, "./package.json": { "import": "./package.json", "require": "./package.json" diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts new file mode 100644 index 000000000..a42c5e50e --- /dev/null +++ b/packages/language/src/factory/attribute.ts @@ -0,0 +1,275 @@ +import { AstFactory } from '.'; +import { + Attribute, + AttributeArg, + AttributeParam, + AttributeParamType, + DataFieldAttribute, + DataModelAttribute, + Expression, + InternalAttribute, + TypeDeclaration, + type Reference, + type RegularID, +} from '../ast'; +import { ExpressionBuilder } from './expression'; + +export class DataFieldAttributeFactory extends AstFactory { + args: AttributeArgFactory[] = []; + decl?: Reference; + constructor() { + super({ type: DataFieldAttribute }); + } + setDecl(decl: Attribute) { + this.decl = { + $refText: decl?.name ?? '', + ref: decl!, + }; + this.update({ + decl: this.decl, + }); + return this; + } + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class DataModelAttributeFactory extends AstFactory { + args: AttributeArgFactory[] = []; + decl?: Reference; + constructor() { + super({ type: DataModelAttribute }); + } + setDecl(decl: Attribute) { + this.decl = { + $refText: decl?.name ?? '', + ref: decl!, + }; + this.update({ + decl: this.decl, + }); + return this; + } + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class AttributeArgFactory extends AstFactory { + name?: RegularID = ''; + value?: AstFactory; + + constructor() { + super({ type: AttributeArg }); + } + + setName(name: RegularID) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setValue(builder: (b: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value, + }); + return this; + } +} + +export class InternalAttributeFactory extends AstFactory { + decl?: Reference; + args: AttributeArgFactory[] = []; + + constructor() { + super({ type: InternalAttribute }); + } + + setDecl(decl: Attribute) { + this.decl = { + $refText: decl.name, + ref: decl, + }; + this.update({ + decl: this.decl, + }); + return this; + } + + addArg(builder: (b: ExpressionBuilder) => AstFactory, name?: string) { + const factory = new AttributeArgFactory().setValue(builder); + if (name) { + factory.setName(name); + } + this.args.push(factory); + this.update({ + args: this.args, + }); + return this; + } +} + +export class AttributeParamFactory extends AstFactory { + attributes: InternalAttributeFactory[] = []; + comments: string[] = []; + default?: boolean; + name?: RegularID; + type?: AttributeParamTypeFactory; + + constructor() { + super({ + type: AttributeParam, + node: { + comments: [], + attributes: [], + }, + }); + } + + addAttribute(builder: (b: InternalAttributeFactory) => InternalAttributeFactory) { + this.attributes.push(builder(new InternalAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + setDefault(defaultValue: boolean) { + this.default = defaultValue; + this.update({ + default: this.default, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setType(builder: (b: AttributeParamTypeFactory) => AttributeParamTypeFactory) { + this.type = builder(new AttributeParamTypeFactory()); + this.update({ + type: this.type, + }); + return this; + } +} + +export class AttributeParamTypeFactory extends AstFactory { + array?: boolean; + optional?: boolean; + reference?: Reference; + type?: AttributeParamType['type']; + constructor() { + super({ type: AttributeParamType }); + } + setArray(array: boolean) { + this.array = array; + this.update({ + array: this.array, + }); + return this; + } + + setOptional(optional: boolean) { + this.optional = optional; + this.update({ + optional: this.optional, + }); + return this; + } + + setReference(reference: TypeDeclaration) { + this.reference = { + $refText: reference.name, + ref: reference, + }; + this.update({ + reference: this.reference, + }); + return this; + } + + setType(type: AttributeParamType['type']) { + this.type = type; + this.update({ + type: this.type, + }); + return this; + } +} + +export class AttributeFactory extends AstFactory { + name?: string; + comments: string[] = []; + attributes: InternalAttributeFactory[] = []; + params: AttributeParamFactory[] = []; + + constructor() { + super({ type: Attribute, node: { comments: [], attributes: [], params: [] } }); + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + addAttribute(builder: (b: InternalAttributeFactory) => InternalAttributeFactory) { + this.attributes.push(builder(new InternalAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + addParam(builder: (b: AttributeParamFactory) => AttributeParamFactory) { + this.params.push(builder(new AttributeParamFactory())); + this.update({ + params: this.params, + }); + return this; + } +} diff --git a/packages/language/src/factory/declaration.ts b/packages/language/src/factory/declaration.ts new file mode 100644 index 000000000..1f514982b --- /dev/null +++ b/packages/language/src/factory/declaration.ts @@ -0,0 +1,363 @@ +import { AstFactory } from '.'; +import { AbstractDeclaration, type Reference } from '../ast'; +import { + type BuiltinType, + DataField, + DataFieldType, + DataModel, + Enum, + EnumField, + LiteralExpr, + Model, + ModelImport, + type RegularID, + type RegularIDWithTypeNames, + TypeDeclaration, + type TypeDef, + UnsupportedFieldType, +} from '../generated/ast'; +import { AttributeFactory, DataFieldAttributeFactory, DataModelAttributeFactory } from './attribute'; +import { ExpressionBuilder } from './expression'; +export const DeclarationBuilder = () => + ({ + get Attribute() { + return new AttributeFactory(); + }, + get DataModel() { + return new DataModelFactory(); + }, + get DataSource(): any { + throw new Error('DataSource is not implemented'); + }, + get Enum() { + return new EnumFactory(); + }, + get FunctionDecl(): any { + throw new Error('FunctionDecl is not implemented'); + }, + get GeneratorDecl(): any { + throw new Error('GeneratorDecl is not implemented'); + }, + get Plugin(): any { + throw new Error('Plugin is not implemented'); + }, + get Procedure(): any { + throw new Error('Procedure is not implemented'); + }, + get TypeDef(): any { + throw new Error('TypeDef is not implemented'); + }, + }) satisfies DeclarationBuilderType; +type DeclarationBuilderType = { + [K in T['$type']]: AstFactory>; +}; +type DeclarationBuilderMap = ReturnType; + +export type DeclarationBuilder = Pick< + DeclarationBuilderMap, + Extract +>; + +export class DataModelFactory extends AstFactory { + attributes: DataModelAttributeFactory[] = []; + baseModel?: Reference; + comments: string[] = []; + fields: DataFieldFactory[] = []; + isView?: boolean; + mixins: Reference[] = []; + name?: RegularID; + + constructor() { + super({ + type: DataModel, + node: { + attributes: [], + comments: [], + fields: [], + mixins: [], + }, + }); + } + + addAttribute(builder: (attr: DataModelAttributeFactory) => DataModelAttributeFactory) { + this.attributes.push(builder(new DataModelAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setBaseModel(model: Reference) { + this.baseModel = model; + this.update({ + baseModel: this.baseModel, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + addField(builder: (field: DataFieldFactory) => DataFieldFactory) { + this.fields.push(builder(new DataFieldFactory())); + this.update({ + fields: this.fields, + }); + return this; + } + + setIsView(isView: boolean) { + this.isView = isView; + this.update({ + isView: this.isView, + }); + return this; + } + + addMixin(mixin: Reference) { + this.mixins.push(mixin); + this.update({ + mixins: this.mixins, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } +} + +export class DataFieldFactory extends AstFactory { + attributes: DataFieldAttributeFactory[] = []; + comments: string[] = []; + name?: string; + type?: DataFieldTypeFactory; + + constructor() { + super({ type: DataField, node: { attributes: [], comments: [] } }); + } + + addAttribute( + builder: ((attr: DataFieldAttributeFactory) => DataFieldAttributeFactory) | DataFieldAttributeFactory, + ) { + if (builder instanceof DataFieldAttributeFactory) { + builder.setContainer(this.node); + this.attributes.push(builder); + } else { + this.attributes.push(builder(new DataFieldAttributeFactory())); + } + this.update({ + attributes: this.attributes, + }); + return this; + } + + setComments(comments: string[]) { + this.comments = comments; + this.update({ + comments: this.comments, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setType(builder: (type: DataFieldTypeFactory) => DataFieldTypeFactory) { + this.type = builder(new DataFieldTypeFactory()); + this.update({ + type: this.type, + }); + return this; + } +} + +export class DataFieldTypeFactory extends AstFactory { + array?: boolean; + optional?: boolean; + reference?: Reference; + type?: BuiltinType; + unsupported?: UnsupportedFieldTypeFactory; + + constructor() { + super({ type: DataFieldType }); + } + + setArray(array: boolean) { + this.array = array; + this.update({ + array: this.array, + }); + return this; + } + + setOptional(optional: boolean) { + this.optional = optional; + this.update({ + optional: this.optional, + }); + return this; + } + + setReference(reference: TypeDeclaration) { + this.reference = { + $refText: reference.name, + ref: reference, + }; + this.update({ + reference: this.reference, + }); + return this; + } + + setType(type: BuiltinType) { + this.type = type; + this.update({ + type: this.type, + }); + return this; + } + + setUnsupported(builder: (a: UnsupportedFieldTypeFactory) => UnsupportedFieldTypeFactory) { + this.unsupported = builder(new UnsupportedFieldTypeFactory()); + this.update({ + unsupported: this.unsupported, + }); + return this; + } +} + +export class UnsupportedFieldTypeFactory extends AstFactory { + value?: AstFactory; + constructor() { + super({ type: UnsupportedFieldType }); + } + setValue(builder: (value: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class ModelFactory extends AstFactory { + declarations: AstFactory[] = []; + imports: ModelImportFactory[] = []; + constructor() { + super({ type: Model, node: { declarations: [], imports: [] } }); + } + addImport(builder: (b: ModelImportFactory) => ModelImportFactory) { + this.imports.push(builder(new ModelImportFactory())); + this.update({ + imports: this.imports, + }); + return this; + } + addDeclaration(builder: (b: DeclarationBuilder) => AstFactory) { + this.declarations.push(builder(DeclarationBuilder())); + this.update({ + declarations: this.declarations, + }); + return this; + } +} + +export class ModelImportFactory extends AstFactory { + path?: string | undefined; + + constructor() { + super({ type: ModelImport }); + } + + setPath(path: string) { + this.path = path; + this.update({ + path: this.path, + }); + return this; + } +} + +export class EnumFactory extends AstFactory { + name?: string; + comments: string[] = []; + fields: EnumFieldFactory[] = []; + attributes: DataModelAttributeFactory[] = []; + + constructor() { + super({ type: Enum, node: { comments: [], fields: [], attributes: [] } }); + } + + addField(builder: (b: EnumFieldFactory) => EnumFieldFactory) { + this.fields.push(builder(new EnumFieldFactory())); + this.update({ + fields: this.fields, + }); + return this; + } + + addAttribute(builder: (b: DataModelAttributeFactory) => DataModelAttributeFactory) { + this.attributes.push(builder(new DataModelAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } +} + +export class EnumFieldFactory extends AstFactory { + name?: RegularIDWithTypeNames; + comments: string[] = []; + attributes: DataFieldAttributeFactory[] = []; + + constructor() { + super({ type: EnumField, node: { comments: [], attributes: [] } }); + } + + setName(name: RegularIDWithTypeNames) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + addAttribute(builder: (b: DataFieldAttributeFactory) => DataFieldAttributeFactory) { + this.attributes.push(builder(new DataFieldAttributeFactory())); + this.update({ + attributes: this.attributes, + }); + return this; + } + + addComment(comment: string) { + this.comments.push(comment); + this.update({ + comments: this.comments, + }); + return this; + } +} diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts new file mode 100644 index 000000000..ea8e984d8 --- /dev/null +++ b/packages/language/src/factory/expression.ts @@ -0,0 +1,303 @@ +import type { Reference } from 'langium'; +import { AstFactory } from '.'; +import { + Argument, + ArrayExpr, + BinaryExpr, + FieldInitializer, + FunctionDecl, + InvocationExpr, + MemberAccessExpr, + MemberAccessTarget, + ObjectExpr, + ReferenceArg, + ReferenceExpr, + ReferenceTarget, + UnaryExpr, + type Expression, + type RegularID, +} from '../ast'; +import { + BooleanLiteralFactory, + NullExprFactory, + NumberLiteralFactory, + StringLiteralFactory, + ThisExprFactory, +} from './primitives'; + +export const ExpressionBuilder = () => + ({ + get ArrayExpr() { + return new ArrayExprFactory(); + }, + get BinaryExpr() { + return new BinaryExprFactory(); + }, + get BooleanLiteral() { + return new BooleanLiteralFactory(); + }, + get InvocationExpr() { + return new InvocationExprFactory(); + }, + get MemberAccessExpr() { + return new MemberAccessExprFactory(); + }, + get NullExpr() { + return new NullExprFactory(); + }, + get NumberLiteral() { + return new NumberLiteralFactory(); + }, + get ObjectExpr() { + return new ObjectExprFactory(); + }, + get ReferenceExpr() { + return new ReferenceExprFactory(); + }, + get StringLiteral() { + return new StringLiteralFactory(); + }, + get ThisExpr() { + return new ThisExprFactory(); + }, + get UnaryExpr() { + return new UnaryExprFactory(); + }, + }) satisfies ExpressionBuilderType; +type ExpressionBuilderType = { + [K in T['$type']]: AstFactory>; +}; + +type ExpressionFactoryMap = ReturnType; + +export type ExpressionBuilder = Pick< + ExpressionFactoryMap, + Extract +>; + +export class UnaryExprFactory extends AstFactory { + operand?: AstFactory; + + constructor() { + super({ type: UnaryExpr, node: { operator: '!' } }); + } + + setOperand(builder: (a: ExpressionBuilder) => AstFactory) { + this.operand = builder(ExpressionBuilder()); + this.update({ + operand: this.operand, + }); + return this; + } +} + +export class ReferenceExprFactory extends AstFactory { + target?: Reference; + args: ReferenceArgFactory[] = []; + + constructor() { + super({ type: ReferenceExpr, node: { args: [] } }); + } + + setTarget(target: ReferenceTarget) { + this.target = { + $refText: target.name, + ref: target, + }; + this.update({ + target: this.target, + }); + return this; + } + + addArg(builder: (a: ReferenceArgFactory) => ReferenceArgFactory) { + this.args.push(builder(new ReferenceArgFactory())); + this.update({ + args: this.args, + }); + return this; + } +} + +export class ReferenceArgFactory extends AstFactory { + name?: string; + value?: AstFactory; + + constructor() { + super({ type: ReferenceArg }); + } + + setName(name: string) { + this.name = name; + this.update({ + name: this.name, + }); + return this; + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value, + }); + return this; + } +} + +export class MemberAccessExprFactory extends AstFactory { + member?: Reference; + operand?: AstFactory; + + constructor() { + super({ type: MemberAccessExpr }); + } + + setMember(target: Reference) { + this.member = target; + this.update({ + member: this.member, + }); + return this; + } + + setOperand(builder: (b: ExpressionBuilder) => AstFactory) { + this.operand = builder(ExpressionBuilder()); + this.update({ + operand: this.operand, + }); + return this; + } +} + +export class ObjectExprFactory extends AstFactory { + fields: FieldInitializerFactory[] = []; + + constructor() { + super({ type: ObjectExpr, node: { fields: [] } }); + } + + addField(builder: (b: FieldInitializerFactory) => FieldInitializerFactory) { + this.fields.push(builder(new FieldInitializerFactory())); + this.update({ + fields: this.fields, + }); + return this; + } +} + +export class FieldInitializerFactory extends AstFactory { + name?: RegularID; + value?: AstFactory; + + constructor() { + super({ type: FieldInitializer }); + } + + setName(name: RegularID) { + this.name = name; + this.update({ + name: this.name!, + }); + return this; + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class InvocationExprFactory extends AstFactory { + args: ArgumentFactory[] = []; + function?: Reference; + + constructor() { + super({ type: InvocationExpr, node: { args: [] } }); + } + + addArg(builder: (arg: ArgumentFactory) => ArgumentFactory) { + this.args.push(builder(new ArgumentFactory())); + this.update({ + args: this.args, + }); + return this; + } + + setFunction(value: FunctionDecl) { + this.function = { + $refText: value.name, + ref: value, + }; + this.update({ + function: this.function!, + }); + return this; + } +} + +export class ArgumentFactory extends AstFactory { + value?: AstFactory; + + constructor() { + super({ type: Argument }); + } + + setValue(builder: (a: ExpressionBuilder) => AstFactory) { + this.value = builder(ExpressionBuilder()); + this.update({ + value: this.value!, + }); + return this; + } +} + +export class ArrayExprFactory extends AstFactory { + items: AstFactory[] = []; + + constructor() { + super({ type: ArrayExpr, node: { items: [] } }); + } + + addItem(builder: (a: ExpressionBuilder) => AstFactory) { + this.items.push(builder(ExpressionBuilder())); + this.update({ + items: this.items, + }); + return this; + } +} + +export class BinaryExprFactory extends AstFactory { + operator?: BinaryExpr['operator']; + right?: AstFactory; + left?: AstFactory; + + constructor() { + super({ type: BinaryExpr }); + } + + setOperator(operator: BinaryExpr['operator']) { + this.operator = operator; + this.update({ + operator: this.operator!, + }); + return this; + } + setRight(builder: (arg: ExpressionBuilder) => AstFactory) { + this.right = builder(ExpressionBuilder()); + this.update({ + right: this.right!, + }); + return this; + } + setLeft(builder: (arg: ExpressionBuilder) => AstFactory) { + this.left = builder(ExpressionBuilder()); + this.update({ + left: this.left!, + }); + return this; + } +} diff --git a/packages/language/src/factory/index.ts b/packages/language/src/factory/index.ts new file mode 100644 index 000000000..e05891ab6 --- /dev/null +++ b/packages/language/src/factory/index.ts @@ -0,0 +1,61 @@ +import { type AstNode } from '../ast'; + +export type ContainerProps = { + $container: T; + $containerProperty?: string; + $containerIndex?: number; +}; + +type NodeFactoriesFor = { + [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +} & { + [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +}; + +export abstract class AstFactory { + node = {} as T; + constructor({ type, node }: { type: T['$type']; node?: Partial }) { + (this.node as any).$type = type; + if (node) { + this.update(node); + } + } + setContainer(container: T['$container']) { + (this.node as any).$container = container; + return this; + } + + get(params?: ContainerProps): T { + if (params) this.update(params as any); + return this.node; + } + update(nodeArg: Partial>): T { + const keys = Object.keys(nodeArg as object); + keys.forEach((key) => { + const child = (nodeArg as any)[key]; + if (child instanceof AstFactory) { + (this.node as any)[key] = child.get({ $container: this.node as any }); + } else if (Array.isArray(child)) { + (this.node as any)[key] = child.map((item: any) => + item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, + ); + } else { + (this.node as any)[key] = child; + } + }); + return this.node; + } + + resolveChilds(nodeArg: T | NodeFactoriesFor): T { + return this.update(nodeArg); + } +} + +export * from './primitives'; +export * from './expression'; +export * from './declaration'; +export * from './attribute'; diff --git a/packages/language/src/factory/primitives.ts b/packages/language/src/factory/primitives.ts new file mode 100644 index 000000000..1db7e0515 --- /dev/null +++ b/packages/language/src/factory/primitives.ts @@ -0,0 +1,61 @@ +import { AstFactory } from '.'; +import { BooleanLiteral, NullExpr, NumberLiteral, StringLiteral, ThisExpr } from '../ast'; + +export class ThisExprFactory extends AstFactory { + constructor() { + super({ type: ThisExpr, node: { value: 'this' } }); + } +} + +export class NullExprFactory extends AstFactory { + constructor() { + super({ type: NullExpr, node: { value: 'null' } }); + } +} + +export class NumberLiteralFactory extends AstFactory { + value?: number | string; + + constructor() { + super({ type: NumberLiteral }); + } + + setValue(value: number | string) { + this.value = value; + this.update({ + value: this.value.toString(), + }); + return this; + } +} + +export class StringLiteralFactory extends AstFactory { + value?: string; + + constructor() { + super({ type: StringLiteral }); + } + + setValue(value: string) { + this.value = value; + this.update({ + value: this.value, + }); + return this; + } +} +export class BooleanLiteralFactory extends AstFactory { + value?: boolean; + + constructor() { + super({ type: BooleanLiteral }); + } + + setValue(value: boolean) { + this.value = value; + this.update({ + value: this.value, + }); + return this; + } +} diff --git a/packages/language/tsup.config.ts b/packages/language/tsup.config.ts index 0d5d2b6c4..48282a08c 100644 --- a/packages/language/tsup.config.ts +++ b/packages/language/tsup.config.ts @@ -5,6 +5,7 @@ export default defineConfig({ index: 'src/index.ts', ast: 'src/ast.ts', utils: 'src/utils.ts', + factory: 'src/factory/index.ts', }, outDir: 'dist', splitting: false, From bd0fc90dbc72b7bbd1e5b1c618ea7bccbbfeb510 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 01:33:28 +0200 Subject: [PATCH 08/68] fix: ast factory import order --- packages/language/src/factory/ast-factory.ts | 56 +++++++++++++++++++ packages/language/src/factory/index.ts | 58 +------------------- 2 files changed, 57 insertions(+), 57 deletions(-) create mode 100644 packages/language/src/factory/ast-factory.ts diff --git a/packages/language/src/factory/ast-factory.ts b/packages/language/src/factory/ast-factory.ts new file mode 100644 index 000000000..e01dd7ced --- /dev/null +++ b/packages/language/src/factory/ast-factory.ts @@ -0,0 +1,56 @@ +import { type AstNode } from '../ast'; + +export type ContainerProps = { + $container: T; + $containerProperty?: string; + $containerIndex?: number; +}; + +type NodeFactoriesFor = { + [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +} & { + [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] + ? (AstFactory | U)[] + : AstFactory | N[K]; +}; + +export abstract class AstFactory { + node = {} as T; + constructor({ type, node }: { type: T['$type']; node?: Partial }) { + (this.node as any).$type = type; + if (node) { + this.update(node); + } + } + setContainer(container: T['$container']) { + (this.node as any).$container = container; + return this; + } + + get(params?: ContainerProps): T { + if (params) this.update(params as any); + return this.node; + } + update(nodeArg: Partial>): T { + const keys = Object.keys(nodeArg as object); + keys.forEach((key) => { + const child = (nodeArg as any)[key]; + if (child instanceof AstFactory) { + (this.node as any)[key] = child.get({ $container: this.node as any }); + } else if (Array.isArray(child)) { + (this.node as any)[key] = child.map((item: any) => + item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, + ); + } else { + (this.node as any)[key] = child; + } + }); + return this.node; + } + + resolveChilds(nodeArg: T | NodeFactoriesFor): T { + return this.update(nodeArg); + } +} diff --git a/packages/language/src/factory/index.ts b/packages/language/src/factory/index.ts index e05891ab6..1ea2a286b 100644 --- a/packages/language/src/factory/index.ts +++ b/packages/language/src/factory/index.ts @@ -1,60 +1,4 @@ -import { type AstNode } from '../ast'; - -export type ContainerProps = { - $container: T; - $containerProperty?: string; - $containerIndex?: number; -}; - -type NodeFactoriesFor = { - [K in keyof N as {} extends Pick ? never : K]: N[K] extends (infer U)[] - ? (AstFactory | U)[] - : AstFactory | N[K]; -} & { - [K in keyof N as {} extends Pick ? K : never]?: N[K] extends (infer U)[] - ? (AstFactory | U)[] - : AstFactory | N[K]; -}; - -export abstract class AstFactory { - node = {} as T; - constructor({ type, node }: { type: T['$type']; node?: Partial }) { - (this.node as any).$type = type; - if (node) { - this.update(node); - } - } - setContainer(container: T['$container']) { - (this.node as any).$container = container; - return this; - } - - get(params?: ContainerProps): T { - if (params) this.update(params as any); - return this.node; - } - update(nodeArg: Partial>): T { - const keys = Object.keys(nodeArg as object); - keys.forEach((key) => { - const child = (nodeArg as any)[key]; - if (child instanceof AstFactory) { - (this.node as any)[key] = child.get({ $container: this.node as any }); - } else if (Array.isArray(child)) { - (this.node as any)[key] = child.map((item: any) => - item instanceof AstFactory ? item.get({ $container: this.node as any }) : item, - ); - } else { - (this.node as any)[key] = child; - } - }); - return this.node; - } - - resolveChilds(nodeArg: T | NodeFactoriesFor): T { - return this.update(nodeArg); - } -} - +export * from './ast-factory'; export * from './primitives'; export * from './expression'; export * from './declaration'; From fcea74891e17d341592047415887ed42857f9b2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 6 Oct 2025 03:05:23 +0200 Subject: [PATCH 09/68] fix: some runtime bugs --- packages/cli/src/actions/db.ts | 87 ++++++++++--------- packages/cli/src/actions/pull/index.ts | 92 ++++++++++++++------- packages/language/src/factory/attribute.ts | 6 +- packages/language/src/factory/expression.ts | 8 +- 4 files changed, 118 insertions(+), 75 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 8dea5cd90..c9a3ef4cf 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -68,59 +68,64 @@ async function runPush(options: PushOptions) { } async function runPull(options: PullOptions) { - const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocumentWithServices(schemaFile); - config(); - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; - const datasource = getDatasource(model); - - if (!datasource) { - throw new Error('No datasource found in the schema.'); - } + try { + const schemaFile = getSchemaFile(options.schema); + const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + config(); + const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const datasource = getDatasource(model); + + if (!datasource) { + throw new Error('No datasource found in the schema.'); + } - if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { - throw new Error(`Unsupported datasource provider: ${datasource.provider}`); - } + if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) { + throw new Error(`Unsupported datasource provider: ${datasource.provider}`); + } - const provider = providers[datasource.provider]; + const provider = providers[datasource.provider]; - if (!provider) { - throw new Error(`No introspection provider found for: ${datasource.provider}`); - } + if (!provider) { + throw new Error(`No introspection provider found for: ${datasource.provider}`); + } - const { enums, tables } = await provider.introspect(datasource.url); + const { enums, tables } = await provider.introspect(datasource.url); - const newModel: Model = { - $type: 'Model', - $container: undefined, - $containerProperty: undefined, - $containerIndex: undefined, - declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], - imports: [], - }; + const newModel: Model = { + $type: 'Model', + $container: undefined, + $containerProperty: undefined, + $containerIndex: undefined, + declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], + imports: [], + }; - syncEnums({ dbEnums: enums, model: newModel, services, options }); + syncEnums({ dbEnums: enums, model: newModel, services, options }); - const resolvedRelations: Relation[] = []; - for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services, options }); - resolvedRelations.push(...relations); - } + const resolvedRelations: Relation[] = []; + for (const table of tables) { + const relations = syncTable({ table, model: newModel, provider, services, options }); + resolvedRelations.push(...relations); + } - for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services, options }); - } + for (const relation of resolvedRelations) { + syncRelation({ model: newModel, relation, services, options }); + } - //TODO: diff models and apply changes only + //TODO: diff models and apply changes only - const generator = new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator(); - const zmodelSchema = generator.generate(newModel); + const zmodelSchema = generator.generate(newModel); - console.log(options.out ? `Writing to ${options.out}` : schemaFile); + console.log(options.out ? `Writing to ${options.out}` : schemaFile); - const outPath = options.out ? path.resolve(options.out) : schemaFile; - console.log(outPath); + const outPath = options.out ? path.resolve(options.out) : schemaFile; + console.log(outPath); - fs.writeFileSync(outPath, zmodelSchema); + fs.writeFileSync(outPath, zmodelSchema); + } catch (error) { + console.log(error); + throw error; + } } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 708244a35..de06ca60b 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -151,26 +151,13 @@ export function syncTable({ const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); modelFactory.setContainer(model); + if (modified) { modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } - if (multiPk) { - const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); - modelFactory.addAttribute((builder) => - builder.setDecl(modelIdAttribute).addArg((argBuilder) => { - const arrayExpr = argBuilder.ArrayExpr; - pkColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; - arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); - }); - return arrayExpr; - }), - ); - } - table.columns.forEach((column) => { if (column.foreign_key_table) { relations.push({ @@ -231,7 +218,7 @@ export function syncTable({ enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], }) : []; - defaultValuesAttrs.forEach(builder.addAttribute); + defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } if (column.pk && !multiPk) { @@ -254,12 +241,12 @@ export function syncTable({ }); }); - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqieColumns.length > 0) { + const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name); + if (multiPk) { modelFactory.addAttribute((builder) => - builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { + builder.setDecl(modelIdAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.map((c) => { + pkColumns.map((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); @@ -268,21 +255,65 @@ export function syncTable({ ); } - model.declarations.push(modelFactory.node); - - table.indexes.forEach((index) => { + const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); + if (uniqieColumns.length > 0) { modelFactory.addAttribute((builder) => - builder.setDecl(modelindexAttribute).addArg((argBuilder) => { + builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - index.columns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + uniqieColumns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; }), ); + } + + table.indexes.forEach((index) => { + if (index.predicate) { + //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints + console.log( + 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', + `- Model: "${table.name}", constraint: "${index.name}"`, + ); + return; + } + if (index.columns.find((c) => c.expression)) { + console.log( + 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', + `- Model: "${table.name}", constraint: "${index.name}"`, + ); + return; + } + + if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name))) { + //skip primary key + return; + } + + modelFactory.addAttribute((builder) => + builder + .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) + .addArg((argBuilder) => { + const arrayExpr = argBuilder.ArrayExpr; + index.columns.map((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; + if (!ref) console.log(c, table.name); + arrayExpr.addItem((itemBuilder) => { + const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); + if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); + + return refExpr; + }); + }); + return arrayExpr; + }) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), + ); }); + model.declarations.push(modelFactory.node); + return relations; } @@ -327,12 +358,15 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + + if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { + sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; + } const sourceFieldFactory = new DataFieldFactory() .setContainer(sourceModel) - .setName( - `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`, - ) + .setName(sourceFieldName) .setType((tb) => tb .setOptional(relation.nullable) @@ -345,7 +379,7 @@ export function syncRelation({ .addArg((ab) => ab.StringLiteral.setValue(relationName)) .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.StringLiteral.setValue(relation.fk_name)), 'map'), + .addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'), ); sourceModel.fields.push(sourceFieldFactory.node); diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts index a42c5e50e..52aeebc7c 100644 --- a/packages/language/src/factory/attribute.ts +++ b/packages/language/src/factory/attribute.ts @@ -18,7 +18,7 @@ export class DataFieldAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; decl?: Reference; constructor() { - super({ type: DataFieldAttribute }); + super({ type: DataFieldAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { this.decl = { @@ -47,7 +47,7 @@ export class DataModelAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; decl?: Reference; constructor() { - super({ type: DataModelAttribute }); + super({ type: DataModelAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { this.decl = { @@ -102,7 +102,7 @@ export class InternalAttributeFactory extends AstFactory { args: AttributeArgFactory[] = []; constructor() { - super({ type: InternalAttribute }); + super({ type: InternalAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { diff --git a/packages/language/src/factory/expression.ts b/packages/language/src/factory/expression.ts index ea8e984d8..a0ba84001 100644 --- a/packages/language/src/factory/expression.ts +++ b/packages/language/src/factory/expression.ts @@ -110,8 +110,12 @@ export class ReferenceExprFactory extends AstFactory { return this; } - addArg(builder: (a: ReferenceArgFactory) => ReferenceArgFactory) { - this.args.push(builder(new ReferenceArgFactory())); + addArg(builder: (a: ExpressionBuilder) => AstFactory, name?: string) { + const arg = new ReferenceArgFactory().setValue(builder); + if (name) { + arg.setName(name); + } + this.args.push(arg); this.update({ args: this.args, }); From c518fecfa40060ee9fb03863e0ee025a852b6319 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 20 Oct 2025 21:58:40 +0200 Subject: [PATCH 10/68] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 38 ++++++++------- .../src/actions/pull/provider/postgresql.ts | 48 ++----------------- packages/cli/src/actions/pull/utils.ts | 28 ++++++----- 3 files changed, 41 insertions(+), 73 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index de06ca60b..14c9b477e 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -8,7 +8,7 @@ import { getAttributeRef, getDbName } from './utils'; export function syncEnums({ dbEnums, model, - options: options, + options, services, }: { dbEnums: IntrospectedEnum[]; @@ -23,18 +23,18 @@ export function syncEnums({ if (modified) factory.addAttribute((builder) => builder - .setDecl(getAttributeRef('@@map', services)!) + .setDecl(getAttributeRef('@@map', services)) .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), ); - dbEnum.values.map((v) => { + dbEnum.values.forEach((v) => { const { name, modified } = resolveNameCasing(options, v); factory.addField((builder) => { builder.setName(name); if (modified) builder.addAttribute((builder) => builder - .setDecl(getAttributeRef('@map', services)!) + .setDecl(getAttributeRef('@map', services)) .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), ); @@ -46,7 +46,7 @@ export function syncEnums({ } function resolveNameCasing(options: PullOptions, originalName: string) { - let name: string; + let name = originalName; switch (options.naming) { case 'pascal': @@ -61,10 +61,6 @@ function resolveNameCasing(options: PullOptions, originalName: string) { case 'kebab': name = toKebabCase(originalName); break; - case 'none': - default: - name = originalName; - break; } return { @@ -188,7 +184,7 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (builtinType.type != 'Unsupported') { + if (builtinType.type !== 'Unsupported') { typeBuilder.setType(builtinType.type); } else { typeBuilder.setUnsupported((unsupportedBuilder) => @@ -246,8 +242,11 @@ export function syncTable({ modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - pkColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + pkColumns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); + if (!ref) { + throw new Error(`Field ${c} not found`); + } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; @@ -260,8 +259,11 @@ export function syncTable({ modelFactory.addAttribute((builder) => builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c)!; + uniqieColumns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); + if (!ref) { + throw new Error(`Field ${c} not found`); + } arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); }); return arrayExpr; @@ -296,9 +298,11 @@ export function syncTable({ .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) .addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - index.columns.map((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name)!; - if (!ref) console.log(c, table.name); + index.columns.forEach((c) => { + const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name); + if (!ref) { + throw new Error(`Column ${c.name} not found in model ${table.name}`); + } arrayExpr.addItem((itemBuilder) => { const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 07dcee913..73428d37e 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -61,48 +61,6 @@ export const postgresql: IntrospectionProvider = { case 'json': case 'jsonb': return { type: 'Json', isArray }; - - // unsupported or postgres-specific - case 'time': - case 'timetz': - case 'interval': - case 'money': - case 'xml': - case 'bit': - case 'varbit': - case 'cidr': - case 'inet': - case 'macaddr': - case 'macaddr8': - case 'point': - case 'line': - case 'lseg': - case 'box': - case 'path': - case 'polygon': - case 'circle': - case 'tsvector': - case 'tsquery': - case 'jsonpath': - case 'hstore': - case 'oid': - case 'name': - case 'regclass': - case 'regproc': - case 'regprocedure': - case 'regoper': - case 'regoperator': - case 'regtype': - case 'regconfig': - case 'regdictionary': - case 'pg_lsn': - case 'txid_snapshot': - case 'int4range': - case 'int8range': - case 'numrange': - case 'tsrange': - case 'tstzrange': - case 'daterange': default: return { type: 'Unsupported' as const, isArray }; } @@ -123,13 +81,13 @@ export const postgresql: IntrospectionProvider = { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)!); + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)!))); + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services)!)); + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } return factories; } diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 234629740..33a6ace30 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -1,11 +1,11 @@ import type { ZModelServices } from '@zenstackhq/language'; import { - AbstractDeclaration, - DataField, - DataModel, - Enum, - EnumField, - FunctionDecl, + type AbstractDeclaration, + type DataField, + type DataModel, + type Enum, + type EnumField, + type FunctionDecl, isInvocationExpr, type Attribute, type Model, @@ -15,8 +15,10 @@ import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { - const references = model.$document!.references as Reference[]; - return references.find((a) => a.ref!.$type === 'Attribute' && a.ref!.name === attrName)?.ref as + if (!model.$document) throw new Error('Model is not associated with a document.'); + + const references = model.$document.references as Reference[]; + return references.find((a) => a.ref?.$type === 'Attribute' && a.ref?.name === attrName)?.ref as | Attribute | undefined; } @@ -27,7 +29,9 @@ export function getDatasource(model: Model) { throw new Error('No datasource declaration found in the schema.'); } - const urlField = datasource.fields.find((f) => f.name === 'url')!; + const urlField = datasource.fields.find((f) => f.name === 'url'); + + if (!urlField) throw new Error(`No url field found in the datasource declaration.`); let url = getStringLiteral(urlField.value); @@ -73,9 +77,11 @@ export function getDeclarationRef( name: string, services: ZModelServices, ) { - return services.shared.workspace.IndexManager.allElements(type).find( + const node = services.shared.workspace.IndexManager.allElements(type).find( (m) => m.node && getDbName(m.node as T) === name, - )?.node as T | undefined; + )?.node; + if (!node) throw new Error(`Declaration not found: ${name}`); + return node as T; } export function getEnumRef(name: string, services: ZModelServices) { From e18ae1dcd8f5c66d458c25a8fdf6a50dcc9d8253 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 20 Oct 2025 21:59:50 +0200 Subject: [PATCH 11/68] fix: update zmodel code generator - include imports in output - fix indentaions - include comments in output --- .../language/src/zmodel-code-generator.ts | 46 ++++++++++++++----- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 5730fc5b7..bdcad0150 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -71,7 +71,7 @@ function gen(name: string) { */ export class ZModelCodeGenerator { private readonly options: ZModelCodeOptions; - + private readonly quote: string; constructor(options?: Partial) { this.options = { binaryExprNumberOfSpaces: options?.binaryExprNumberOfSpaces ?? 1, @@ -79,6 +79,7 @@ export class ZModelCodeGenerator { indent: options?.indent ?? 4, quote: options?.quote ?? 'single', }; + this.quote = this.options.quote === 'double' ? '"' : "'"; } /** @@ -92,9 +93,14 @@ export class ZModelCodeGenerator { return handler.value.call(this, ast); } + private quotedStr(val: string): string { + const trimmedVal = val.replace(new RegExp(`${this.quote}`, 'g'), `\\${this.quote}`); + return `${this.quote}${trimmedVal}${this.quote}`; + } + @gen(Model) private _generateModel(ast: Model) { - return ast.declarations.map((d) => this.generate(d)).join('\n\n'); + return `${ast.imports.map((d) => this.generate(d)).join('\n')}\n\n${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; } @gen(DataSource) @@ -106,16 +112,17 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ModelImport) private _generateModelImport(ast: ModelImport) { - return `import '${ast.path}'`; + return `import ${this.quotedStr(ast.path)}`; } @gen(Enum) private _generateEnum(ast: Enum) { return `enum ${ast.name} { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ast.attributes.length > 0 +${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ + ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' - } + } }`; } @@ -135,7 +142,9 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(ConfigField) private _generateConfigField(ast: ConfigField) { - return `${ast.name} = ${this.generate(ast.value)}`; + const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); + const padding = ' '.repeat(longestName - ast.name.length + 1); + return `${ast.name}${padding}= ${this.generate(ast.value)}`; } @gen(ConfigArrayExpr) @@ -163,15 +172,24 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')} @gen(PluginField) private _generatePluginField(ast: PluginField) { - return `${ast.name} = ${this.generate(ast.value)}`; + const longestName = Math.max(...ast.$container.fields.map((x) => x.name.length)); + const padding = ' '.repeat(longestName - ast.name.length + 1); + return `${ast.name}${padding}= ${this.generate(ast.value)}`; } @gen(DataModel) private _generateDataModel(ast: DataModel) { - return `${ast.isView ? 'view' : 'model'} ${ast.name}${ + const comments = `${ast.comments.join('\n')}\n`; + + return `${ast.comments.length > 0 ? comments : ''}${ast.isView ? 'view' : 'model'} ${ast.name}${ ast.mixins.length > 0 ? ' mixes ' + ast.mixins.map((x) => x.$refText).join(', ') : '' } { -${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ +${ast.fields + .map((x) => { + const comments = x.comments.map((c) => `${this.indent}${c}`).join('\n'); + return (x.comments.length ? `${comments}\n` : '') + this.indent + this.generate(x); + }) + .join('\n')}${ ast.attributes.length > 0 ? '\n\n' + ast.attributes.map((x) => this.indent + this.generate(x)).join('\n') : '' @@ -181,7 +199,11 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(DataField) private _generateDataField(ast: DataField) { - return `${ast.name} ${this.fieldType(ast.type)}${ + const longestFieldName = Math.max(...ast.$container.fields.map((f) => f.name.length)); + const longestType = Math.max(...ast.$container.fields.map((f) => this.fieldType(f.type).length)); + const paddingLeft = longestFieldName - ast.name.length; + const paddingRight = ast.attributes.length > 0 ? longestType - this.fieldType(ast.type).length : 0; + return `${ast.name}${' '.repeat(paddingLeft)} ${this.fieldType(ast.type)}${' '.repeat(paddingRight)}${ ast.attributes.length > 0 ? ' ' + ast.attributes.map((x) => this.generate(x)).join(' ') : '' }`; } @@ -235,7 +257,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(StringLiteral) private _generateLiteralExpr(ast: LiteralExpr) { - return this.options.quote === 'single' ? `'${ast.value}'` : `"${ast.value}"`; + return this.quotedStr(ast.value as string); } @gen(NumberLiteral) @@ -280,7 +302,7 @@ ${ast.fields.map((x) => this.indent + this.generate(x)).join('\n')}${ @gen(ReferenceArg) private _generateReferenceArg(ast: ReferenceArg) { - return `${ast.name}:${this.generate(ast.value)}`; + return `${ast.name}: ${this.generate(ast.value)}`; } @gen(MemberAccessExpr) From 63f5015717b9392f10234900983a9fbf5cda4f14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:31:16 +0200 Subject: [PATCH 12/68] feat: add exclude schemas option --- packages/cli/src/actions/db.ts | 7 +++++-- packages/cli/src/actions/pull/index.ts | 15 +++++++++++++++ packages/cli/src/index.ts | 3 ++- 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index c9a3ef4cf..6fe4351c4 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -20,12 +20,13 @@ export type PullOptions = { out?: string; naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; alwaysMap?: boolean; + excludeSchemas: string[]; }; /** * CLI action for db related commands */ -export async function run(command: string, options: PushOptions) { +export async function run(command: string, options: any) { switch (command) { case 'push': await runPush(options); @@ -89,7 +90,9 @@ async function runPull(options: PullOptions) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - const { enums, tables } = await provider.introspect(datasource.url); + const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + const enums = allEnums.filter((e) => !options.excludeSchemas.includes(e.schema_name)); + const tables = allTables.filter((t) => !options.excludeSchemas.includes(t.schema)); const newModel: Model = { $type: 'Model', diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 14c9b477e..4b070a47c 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -41,6 +41,15 @@ export function syncEnums({ return builder; }); }); + + if (dbEnum.schema_name && dbEnum.schema_name != '' && dbEnum.schema_name !== 'public') { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } + model.declarations.push(factory.get({ $container: model })); } } @@ -316,6 +325,12 @@ export function syncTable({ ); }); + if (table.schema && table.schema != '' && table.schema !== 'public') { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); + } + model.declarations.push(modelFactory.node); return relations; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 2444b10b5..7905676f9 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -148,7 +148,8 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('--out ', 'add custom output path for the introspected schema')) + .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) + .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .action((options) => dbAction('pull', options)); dbCommand From 29a3f0b8339e12ddaef0addb3f813f3ac00d3f10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:40:40 +0200 Subject: [PATCH 13/68] feat: implement initial diff update --- packages/cli/src/actions/db.ts | 93 +++++++++++++++++++++++--- packages/cli/src/actions/pull/index.ts | 30 ++++++--- 2 files changed, 104 insertions(+), 19 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6fe4351c4..682cfc534 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,4 @@ -import type { Model } from '@zenstackhq/language/ast'; +import { Model, Enum, DataModel } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -6,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource } from './pull/utils'; +import { getDatasource, getDbName } from './pull/utils'; import { config } from '@dotenvx/dotenvx'; type PushOptions = { @@ -115,18 +115,93 @@ async function runPull(options: PullOptions) { syncRelation({ model: newModel, relation, services, options }); } - //TODO: diff models and apply changes only + const cwd = new URL(`file://${process.cwd()}`).pathname; + const docs = services.shared.workspace.LangiumDocuments.all + .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) + .toArray(); + const docsSet = new Set(docs.map((d) => d.uri.toString())); + console.log(docsSet); + newModel.declarations + .filter((d) => [DataModel, Enum].includes(d.$type)) + .forEach((_declaration) => { + const declaration = _declaration as DataModel | Enum; + const declarations = services.shared.workspace.IndexManager.allElements(declaration.$type, docsSet); + const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) + ?.node as DataModel | Enum | undefined; + if (!originalModel) { + model.declarations.push(declaration); + (declaration as any).$container = model; + return; + } + + declaration.fields.forEach((f) => { + const originalField = originalModel.fields.find((d) => getDbName(d) === getDbName(f)); + + if (!originalField) { + console.log(`Added field ${f.name} to ${originalModel.name}`); + (f as any).$container = originalModel; + originalModel.fields.push(f as any); + return; + } + //TODO: update field + }); + originalModel.fields + .filter((f) => !declaration.fields.find((d) => getDbName(d) === getDbName(f))) + .forEach((f) => { + const model = f.$container; + const index = model.fields.findIndex((d) => d === f); + model.fields.splice(index, 1); + console.log(`Delete field ${f.name}`); + }); + }); + + services.shared.workspace.IndexManager.allElements('DataModel', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete model ${decl.name}`); + }); + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + + if (options.out && !fs.lstatSync(options.out).isFile()) { + throw new Error(`Output path ${options.out} is not a file`); + } - const generator = new ZModelCodeGenerator(); + const generator = new ZModelCodeGenerator({ + //TODO: make configurable + quote: 'double', + }); - const zmodelSchema = generator.generate(newModel); + if (options.out) { + const zmodelSchema = generator.generate(newModel); - console.log(options.out ? `Writing to ${options.out}` : schemaFile); + console.log(`Writing to ${options.out}`); - const outPath = options.out ? path.resolve(options.out) : schemaFile; - console.log(outPath); + const outPath = options.out ? path.resolve(options.out) : schemaFile; - fs.writeFileSync(outPath, zmodelSchema); + fs.writeFileSync(outPath, zmodelSchema); + } else { + docs.forEach(({ uri, parseResult: { value: model } }) => { + const zmodelSchema = generator.generate(model); + console.log(`Writing to ${uri.path}`); + fs.writeFileSync(uri.fsPath, zmodelSchema); + }); + } } catch (error) { console.log(error); throw error; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4b070a47c..2a6a18d88 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -42,12 +42,17 @@ export function syncEnums({ }); }); - if (dbEnum.schema_name && dbEnum.schema_name != '' && dbEnum.schema_name !== 'public') { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); + try { + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== 'public') { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } + } catch (_error: unknown) { + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(factory.get({ $container: model })); @@ -325,10 +330,15 @@ export function syncTable({ ); }); - if (table.schema && table.schema != '' && table.schema !== 'public') { - modelFactory.addAttribute((b) => - b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), - ); + try { + if (table.schema && table.schema !== '' && table.schema !== 'public') { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); + } + } catch (_error: unknown) { + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(modelFactory.node); From 1a6f840ad827fe8bb3a884a4dd254101b1157084 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:40:56 +0200 Subject: [PATCH 14/68] fix: update format in zmodel code generator --- packages/language/src/zmodel-code-generator.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index bdcad0150..273a4b00e 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -100,7 +100,7 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { - return `${ast.imports.map((d) => this.generate(d)).join('\n')}\n\n${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; + return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; } @gen(DataSource) From a052553e15e61b07942d64af85afe1c16266659e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 21 Oct 2025 00:41:55 +0200 Subject: [PATCH 15/68] fix: typo --- packages/cli/src/actions/pull/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 2a6a18d88..56fc85452 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -268,12 +268,12 @@ export function syncTable({ ); } - const uniqieColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqieColumns.length > 0) { + const uniqueColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); + if (uniqueColumns.length > 0) { modelFactory.addAttribute((builder) => builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; - uniqieColumns.forEach((c) => { + uniqueColumns.forEach((c) => { const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); if (!ref) { throw new Error(`Field ${c} not found`); From debb9acc343264254978cd38d0c51fe1c5233acc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 00:59:38 +0200 Subject: [PATCH 16/68] feat: progress on database introspection and syncing --- packages/cli/src/actions/db.ts | 171 ++++++++++++++---- packages/cli/src/actions/pull/index.ts | 143 ++++++++++----- .../src/actions/pull/provider/postgresql.ts | 84 +++++++-- .../cli/src/actions/pull/provider/provider.ts | 2 + .../cli/src/actions/pull/provider/sqlite.ts | 2 + packages/cli/src/actions/pull/utils.ts | 33 +++- packages/language/res/stdlib.zmodel | 6 +- .../language/src/zmodel-code-generator.ts | 5 +- 8 files changed, 345 insertions(+), 101 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 682cfc534..8da323196 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,4 +1,4 @@ -import { Model, Enum, DataModel } from '@zenstackhq/language/ast'; +import { Model, Enum, DataModel, DataField } from '@zenstackhq/language/ast'; import { ZModelCodeGenerator } from '@zenstackhq/sdk'; import fs from 'node:fs'; import path from 'node:path'; @@ -6,7 +6,7 @@ import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName } from './pull/utils'; +import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; import { config } from '@dotenvx/dotenvx'; type PushOptions = { @@ -20,7 +20,7 @@ export type PullOptions = { out?: string; naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; alwaysMap?: boolean; - excludeSchemas: string[]; + excludeSchemas?: string[]; }; /** @@ -91,8 +91,8 @@ async function runPull(options: PullOptions) { } const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => !options.excludeSchemas.includes(e.schema_name)); - const tables = allTables.filter((t) => !options.excludeSchemas.includes(t.schema)); + const enums = allEnums.filter((e) => !options.excludeSchemas?.includes(e.schema_name)); + const tables = allTables.filter((t) => !options.excludeSchemas?.includes(t.schema)); const newModel: Model = { $type: 'Model', @@ -112,7 +112,28 @@ async function runPull(options: PullOptions) { } for (const relation of resolvedRelations) { - syncRelation({ model: newModel, relation, services, options }); + const simmilarRelations = resolvedRelations.filter((rr) => { + return ( + (rr.schema === relation.schema && + rr.table === relation.table && + rr.references.schema === relation.references.schema && + rr.references.table === relation.references.table) || + (rr.schema === relation.references.schema && + rr.column === relation.references.column && + rr.references.schema === relation.schema && + rr.references.table === relation.table) + ); + }).length; + const selfRelation = + relation.references.schema === relation.schema && relation.references.table === relation.table; + syncRelation({ + model: newModel, + relation, + services, + options, + selfRelation, + simmilarRelations, + }); } const cwd = new URL(`file://${process.cwd()}`).pathname; @@ -120,64 +141,141 @@ async function runPull(options: PullOptions) { .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); - console.log(docsSet); + + services.shared.workspace.IndexManager.allElements('DataModel', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete model ${decl.name}`); + }); + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { const declaration = _declaration as DataModel | Enum; - const declarations = services.shared.workspace.IndexManager.allElements(declaration.$type, docsSet); + const declarations = services.shared.workspace.IndexManager.allElements( + declaration.$type, + docsSet, + ).toArray(); const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) ?.node as DataModel | Enum | undefined; if (!originalModel) { model.declarations.push(declaration); (declaration as any).$container = model; + declaration.fields.forEach((f) => { + if (f.$type === 'DataField' && f.type.reference?.ref) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), + )?.node; + if (ref) (f.type.reference.ref as any) = ref; + } + }); return; } declaration.fields.forEach((f) => { - const originalField = originalModel.fields.find((d) => getDbName(d) === getDbName(f)); + const originalField = originalModel.fields.find( + (d) => + getDbName(d) === getDbName(f) || + (getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any)), + ); if (!originalField) { - console.log(`Added field ${f.name} to ${originalModel.name}`); + //console.log(`Added field ${f.name} to ${originalModel.name}`); (f as any).$container = originalModel; originalModel.fields.push(f as any); + if (f.$type === 'DataField' && f.type.reference?.ref) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), + )?.node as DataModel | undefined; + if (ref) { + (f.type.reference.$refText as any) = ref.name; + (f.type.reference.ref as any) = ref; + } + } return; } - //TODO: update field + + if (originalField.$type === 'DataField') { + const field = f as DataField; + originalField.type = field.type; + if (field.type.reference) { + const ref = declarations.find( + (d) => getDbName(d.node as any) === getDbName(field.type.reference!.ref as any), + )?.node as DataModel | undefined; + if (ref) { + (field.type.reference.$refText as any) = ref.name; + (field.type.reference.ref as any) = ref; + } + } + + (originalField.type.$container as any) = originalField; + } + + f.attributes.forEach((attr) => { + const originalAttribute = originalField.attributes.find( + (d) => d.decl.$refText === attr.decl.$refText, + ); + + if (!originalAttribute) { + //console.log(`Added Attribute ${attr.decl.$refText} to ${f.name}`); + (f as any).$container = originalField; + originalField.attributes.push(attr as any); + return; + } + + originalAttribute.args = attr.args; + attr.args.forEach((a) => { + (a.$container as any) = originalAttribute; + }); + }); + + originalField.attributes + .filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText)) + .forEach((attr) => { + const field = attr.$container; + const index = field.attributes.findIndex((d) => d === attr); + field.attributes.splice(index, 1); + //console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + }); }); originalModel.fields - .filter((f) => !declaration.fields.find((d) => getDbName(d) === getDbName(f))) + .filter( + (f) => + !declaration.fields.find( + (d) => + getDbName(d) === getDbName(f) || + (getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any)), + ), + ) .forEach((f) => { const model = f.$container; const index = model.fields.findIndex((d) => d === f); model.fields.splice(index, 1); - console.log(`Delete field ${f.name}`); + //console.log(`Delete field ${f.name}`); }); }); - services.shared.workspace.IndexManager.allElements('DataModel', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete model ${decl.name}`); - }); - services.shared.workspace.IndexManager.allElements('Enum', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); - }); - if (options.out && !fs.lstatSync(options.out).isFile()) { throw new Error(`Output path ${options.out} is not a file`); } @@ -185,6 +283,7 @@ async function runPull(options: PullOptions) { const generator = new ZModelCodeGenerator({ //TODO: make configurable quote: 'double', + indent: 2, }); if (options.out) { diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 56fc85452..c857fe16e 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,15 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; -import { DataFieldFactory, DataModelFactory, EnumFactory } from '@zenstackhq/language/factory'; +import { isEnum, type DataField, type DataModel, type Enum, type Model, Attribute } from '@zenstackhq/language/ast'; +import { + DataFieldFactory, + DataModelFactory, + EnumFactory, + ModelFactory, + DataFieldAttributeFactory, +} from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; -import { getAttributeRef, getDbName } from './utils'; +import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider, Cascade } from './provider'; +import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ dbEnums, @@ -111,6 +117,8 @@ export type Relation = { column: string; type: 'one' | 'many'; fk_name: string; + foreign_key_on_update: Cascade; + foreign_key_on_delete: Cascade; nullable: boolean; references: { schema: string | null; @@ -176,6 +184,8 @@ export function syncTable({ column: column.name, type: 'one', fk_name: column.foreign_key_name!, + foreign_key_on_delete: column.foreign_key_on_delete, + foreign_key_on_update: column.foreign_key_on_update, nullable: column.nullable, references: { schema: column.foreign_key_schema, @@ -187,8 +197,9 @@ export function syncTable({ } const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; - const { name: _name, modified } = resolveNameCasing(options, column.name); + const { name: _name, modified: _modified } = resolveNameCasing(options, column.name); const name = `${fieldPrefix}${_name}`; + const modified = fieldPrefix !== '' || _modified; const builtinType = provider.getBuiltinType(column.datatype); @@ -198,21 +209,22 @@ export function syncTable({ typeBuilder.setArray(builtinType.isArray); typeBuilder.setOptional(column.nullable); - if (builtinType.type !== 'Unsupported') { - typeBuilder.setType(builtinType.type); - } else { - typeBuilder.setUnsupported((unsupportedBuilder) => - unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), - ); - } - if (column.options.length > 0) { const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype) as | Enum | undefined; - if (ref) { - typeBuilder.setReference(ref); + if (!ref) { + throw new Error(`Enum ${column.datatype} not found`); + } + typeBuilder.setReference(ref); + } else { + if (builtinType.type !== 'Unsupported') { + typeBuilder.setType(builtinType.type); + } else { + typeBuilder.setUnsupported((unsupportedBuilder) => + unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)), + ); } } @@ -220,14 +232,12 @@ export function syncTable({ }); if (column.default) { - const defaultValuesAttrs = column.default - ? provider.getDefaultValue({ - fieldName: column.name, - defaultValue: column.default, - services, - enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], - }) - : []; + const defaultValuesAttrs = provider.getDefaultValue({ + fieldName: column.name, + defaultValue: column.default, + services, + enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], + }); defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } @@ -235,17 +245,31 @@ export function syncTable({ builder.addAttribute((b) => b.setDecl(idAttribute)); } - if (column.unique) + if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); return b; }); - if (modified) + } + if (modified) { builder.addAttribute((ab) => - ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name), 'name'), + ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name)), ); + } + + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + //TODO: exclude default types like text in postgres + //because Zenstack string = text in postgres so unnecessary to map to default types + if (dbAttr && !['text'].includes(column.datatype)) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (column.length || column.precision) + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); + builder.addAttribute(dbAttrFactory); + } return builder; }); @@ -283,6 +307,11 @@ export function syncTable({ return arrayExpr; }), ); + } else { + modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); + modelFactory.comments.push( + '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', + ); } table.indexes.forEach((index) => { @@ -337,8 +366,8 @@ export function syncTable({ ); } } catch (_error: unknown) { - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + //Waiting to support multi-schema + //TODO: remove catch after multi-schema support is implemented } model.declarations.push(modelFactory.node); @@ -350,11 +379,16 @@ export function syncRelation({ model, relation, services, + selfRelation, + simmilarRelations, }: { model: Model; relation: Relation; services: ZModelServices; options: PullOptions; + //self included + simmilarRelations: number; + selfRelation: boolean; }) { const idAttribute = getAttributeRef('@id', services); const uniqueAttribute = getAttributeRef('@unique', services); @@ -362,6 +396,8 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); + const includeRelationName = selfRelation || simmilarRelations > 1; + if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); } @@ -382,11 +418,9 @@ export function syncRelation({ const targetField = targetModel.fields.find((f) => getDbName(f) === relation.references.column); if (!targetField) return; - //TODO: Finish relation sync - const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${sourceModel.name}_${relation.column}To${targetModel.name}_${relation.references.column}`; + const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { @@ -402,14 +436,38 @@ export function syncRelation({ .setArray(relation.type === 'many') .setReference(targetModel), ) - .addAttribute((ab) => - ab - .setDecl(relationAttribute) - .addArg((ab) => ab.StringLiteral.setValue(relationName)) - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields') - .addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), 'references') - .addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'), - ); + .addAttribute((ab) => { + ab.setDecl(relationAttribute); + if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); + ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( + (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), + 'references', + ); + + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); + } + + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); + } + + ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + + return ab; + }); sourceModel.fields.push(sourceFieldFactory.node); @@ -427,8 +485,11 @@ export function syncRelation({ .setOptional(relation.references.type === 'one') .setArray(relation.references.type === 'many') .setReference(sourceModel), - ) - .addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName))); + ); + if (includeRelationName) + targetFieldFactory.addAttribute((ab) => + ab.setDecl(relationAttribute).addArg((ab) => ab.StringLiteral.setValue(relationName)), + ); targetModel.fields.push(targetFieldFactory.node); } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 73428d37e..30bb2602e 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,7 +1,7 @@ +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; export const postgresql: IntrospectionProvider = { getBuiltinType(type) { @@ -49,6 +49,7 @@ export const postgresql: IntrospectionProvider = { // dates/times case 'date': + case 'time': case 'timestamp': case 'timestamptz': return { type: 'DateTime', isArray }; @@ -91,24 +92,63 @@ export const postgresql: IntrospectionProvider = { } return factories; } + if (val.startsWith('nextval(')) { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + if (val.includes('(') && val.includes(')')) { + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + } if (val.includes('::')) { - const [enumValue, enumName] = val - .replace(/'|"/g, '') + const [value, type] = val + .replace(/'/g, '') .split('::') .map((s) => s.trim()) as [string, string]; - const enumDef = enums.find((e) => getDbName(e) === enumName); - if (!enumDef) { - return []; - } - const enumField = enumDef.fields.find((v) => getDbName(v) === enumValue); - if (!enumField) { - throw new Error( - `Enum value ${enumValue} not found in enum ${enumName} for default value ${defaultValue}`, - ); + switch (type) { + case 'character varying': + case 'uuid': + case 'json': + case 'jsonb': + if (value === 'NULL') return []; + factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); + break; + case 'real': + factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(value))); + break; + default: { + const enumDef = enums.find((e) => getDbName(e, true) === type); + if (!enumDef) { + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + break; + } + const enumField = enumDef.fields.find((v) => getDbName(v) === value); + if (!enumField) { + throw new Error( + `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, + ); + } + + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + break; + } } - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); return factories; } @@ -161,6 +201,8 @@ SELECT "att"."attname" AS "name", "typ"."typname" AS "datatype", "tns"."nspname" AS "datatype_schema", + "c"."character_maximum_length" AS "length", + COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", "fk_ns"."nspname" AS "foreign_key_schema", "fk_cls"."relname" AS "foreign_key_table", "fk_att"."attname" AS "foreign_key_column", @@ -234,10 +276,18 @@ SELECT ), '[]' ) AS "options" - FROM "pg_catalog"."pg_attribute" AS "att" - INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" - INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" - LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + + FROM "pg_catalog"."pg_attribute" AS "att" + + INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" + + INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace" + + LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname" + AND "c"."table_name" = "cls"."relname" + AND "c"."column_name" = "att"."attname" + LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p' + AND "pk_con"."conrelid" = "cls"."oid" AND "att"."attnum" = ANY ("pk_con"."conkey") LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f' diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index c03c39fcd..c04255d12 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -12,6 +12,8 @@ export interface IntrospectedTable { columns: { name: string; datatype: string; + length: number | null; + precision: number | null; datatype_schema: string; foreign_key_schema: string | null; foreign_key_table: string | null; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 160a3096e..87d6e0588 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -189,6 +189,8 @@ export const sqlite: IntrospectionProvider = { columns.push({ name: c.name, datatype: c.type || '', + length: null, + precision: null, datatype_schema: schema, foreign_key_schema: fk?.foreign_key_schema ?? null, foreign_key_table: fk?.foreign_key_table ?? null, diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 33a6ace30..9f05219c0 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,6 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, + StringLiteral, } from '@zenstackhq/language/ast'; import { getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; @@ -61,13 +62,39 @@ export function getDatasource(model: Model) { }; } -export function getDbName(decl: AbstractDeclaration | DataField | EnumField): string { +export function getDbName(decl: AbstractDeclaration | DataField | EnumField, includeSchema: boolean = false): string { if (!('attributes' in decl)) return decl.name; + + const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); + const schemaAttrValue = schemaAttr?.args[0]?.value; + let schema: string; + if (schemaAttrValue?.$type !== 'StringLiteral') schema = 'public'; + if (!schemaAttr) schema = 'public'; + else schema = (schemaAttr.args[0]?.value as any)?.value as string; + + const formatName = (name: string) => `${schema && includeSchema ? `${schema}.` : ''}${name}`; + const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@map' || a.decl.ref?.name === '@map'); - if (!nameAttr) return decl.name; + if (!nameAttr) return formatName(decl.name); const attrValue = nameAttr.args[0]?.value; - if (attrValue?.$type !== 'StringLiteral') return decl.name; + if (attrValue?.$type !== 'StringLiteral') return formatName(decl.name); + + return formatName(attrValue.value); +} + +export function getRelationFkName(decl: DataField): string | undefined { + const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === '@relation'); + const schemaAttrValue = relationAttr?.args.find((a) => a.name === 'map')?.value as StringLiteral; + return schemaAttrValue?.value; +} + +export function getDbSchemaName(decl: DataModel | Enum): string { + const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); + if (!schemaAttr) return 'public'; + const attrValue = schemaAttr.args[0]?.value; + + if (attrValue?.$type !== 'StringLiteral') return 'public'; return attrValue.value; } diff --git a/packages/language/res/stdlib.zmodel b/packages/language/res/stdlib.zmodel index d0c3c0003..82cd78362 100644 --- a/packages/language/res/stdlib.zmodel +++ b/packages/language/res/stdlib.zmodel @@ -120,7 +120,7 @@ function dbgenerated(expr: String?): Any { /** * Checks if the field value contains the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function contains(field: String, search: String, caseInSensitive: Boolean?): Boolean { @@ -135,7 +135,7 @@ function contains(field: String, search: String, caseInSensitive: Boolean?): Boo /** * Checks the field value starts with the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function startsWith(field: String, search: String, caseInSensitive: Boolean?): Boolean { @@ -144,7 +144,7 @@ function startsWith(field: String, search: String, caseInSensitive: Boolean?): B /** * Checks if the field value ends with the search string. By default, the search is case-sensitive, and * "LIKE" operator is used to match. If `caseInSensitive` is true, "ILIKE" operator is used if - * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's + * supported, otherwise it still falls back to "LIKE" and delivers whatever the database's * behavior is. */ function endsWith(field: String, search: String, caseInSensitive: Boolean?): Boolean { diff --git a/packages/language/src/zmodel-code-generator.ts b/packages/language/src/zmodel-code-generator.ts index 273a4b00e..c6059ebe6 100644 --- a/packages/language/src/zmodel-code-generator.ts +++ b/packages/language/src/zmodel-code-generator.ts @@ -100,7 +100,10 @@ export class ZModelCodeGenerator { @gen(Model) private _generateModel(ast: Model) { - return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations.map((d) => this.generate(d)).join('\n\n')}`; + return `${ast.imports.map((d) => this.generate(d)).join('\n')}${ast.imports.length > 0 ? '\n\n' : ''}${ast.declarations + .sort((d) => (d.$type === 'Enum' ? 1 : 0)) + .map((d) => this.generate(d)) + .join('\n\n')}`; } @gen(DataSource) From 716dee599a117d31d173c2d7a4b2315932ee3996 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:00:50 +0200 Subject: [PATCH 17/68] fix: make ignore behave it does in prisma with no index models --- packages/language/src/validators/datamodel-validator.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/language/src/validators/datamodel-validator.ts b/packages/language/src/validators/datamodel-validator.ts index 6c5d18ffd..d2fcd155d 100644 --- a/packages/language/src/validators/datamodel-validator.ts +++ b/packages/language/src/validators/datamodel-validator.ts @@ -44,13 +44,15 @@ export default class DataModelValidator implements AstValidator { const uniqueFields = allFields.filter((f) => f.attributes.find((attr) => attr.decl.ref?.name === '@unique')); const modelLevelIds = getModelIdFields(dm); const modelUniqueFields = getModelUniqueFields(dm); + const ignore = hasAttribute(dm, '@@ignore'); if ( !dm.isView && idFields.length === 0 && modelLevelIds.length === 0 && uniqueFields.length === 0 && - modelUniqueFields.length === 0 + modelUniqueFields.length === 0 && + !ignore ) { accept( 'error', From 420fe25ecf376749a27b478b0c00f455c1b8ff53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:21:31 +0200 Subject: [PATCH 18/68] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index c857fe16e..1939b9fca 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,14 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { isEnum, type DataField, type DataModel, type Enum, type Model, Attribute } from '@zenstackhq/language/ast'; +import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; import { - DataFieldFactory, - DataModelFactory, - EnumFactory, - ModelFactory, - DataFieldAttributeFactory, + DataFieldAttributeFactory, + DataFieldFactory, + DataModelFactory, + EnumFactory } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { IntrospectedEnum, IntrospectedTable, IntrospectionProvider, Cascade } from './provider'; +import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ From 26e14c0cf957bb988a55bba9551ea2cb37b60691 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:47:06 +0200 Subject: [PATCH 19/68] feat: make all format options configurable --- packages/cli/src/actions/db.ts | 14 +++++----- packages/cli/src/actions/pull/index.ts | 36 ++++++++++++-------------- packages/cli/src/actions/pull/utils.ts | 2 +- packages/cli/src/index.ts | 17 ++++++++++++ 4 files changed, 43 insertions(+), 26 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 8da323196..e48beb23f 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -17,10 +17,13 @@ type PushOptions = { export type PullOptions = { schema?: string; - out?: string; - naming?: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; - alwaysMap?: boolean; excludeSchemas?: string[]; + out?: string; + modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; + alwaysMap: boolean; + quote: 'single' | 'double'; + indent: number; }; /** @@ -281,9 +284,8 @@ async function runPull(options: PullOptions) { } const generator = new ZModelCodeGenerator({ - //TODO: make configurable - quote: 'double', - indent: 2, + quote: options.quote, + indent: options.indent, }); if (options.out) { diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 1939b9fca..4bc7dd14d 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,10 +1,10 @@ import type { ZModelServices } from '@zenstackhq/language'; import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; import { - DataFieldAttributeFactory, - DataFieldFactory, - DataModelFactory, - EnumFactory + DataFieldAttributeFactory, + DataFieldFactory, + DataModelFactory, + EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -22,10 +22,10 @@ export function syncEnums({ options: PullOptions; }) { for (const dbEnum of dbEnums) { - const { modified, name } = resolveNameCasing(options, dbEnum.enum_type); + const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); const factory = new EnumFactory().setName(name); - if (modified) + if (modified || options.alwaysMap) factory.addAttribute((builder) => builder .setDecl(getAttributeRef('@@map', services)) @@ -33,10 +33,10 @@ export function syncEnums({ ); dbEnum.values.forEach((v) => { - const { name, modified } = resolveNameCasing(options, v); + const { name, modified } = resolveNameCasing(options.fieldCasing, v); factory.addField((builder) => { builder.setName(name); - if (modified) + if (modified || options.alwaysMap) builder.addAttribute((builder) => builder .setDecl(getAttributeRef('@map', services)) @@ -64,10 +64,11 @@ export function syncEnums({ } } -function resolveNameCasing(options: PullOptions, originalName: string) { +function resolveNameCasing(casing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none', originalName: string) { let name = originalName; + const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? '_' : ''; - switch (options.naming) { + switch (casing) { case 'pascal': name = toPascalCase(originalName); break; @@ -83,8 +84,8 @@ function resolveNameCasing(options: PullOptions, originalName: string) { } return { - modified: options.alwaysMap ? true : name !== originalName, - name, + modified: name !== originalName || fieldPrefix !== '', + name: `${fieldPrefix}${name}`, }; } @@ -163,13 +164,13 @@ export function syncTable({ } const relations: Relation[] = []; - const { name, modified } = resolveNameCasing({ ...options, naming: 'pascal' }, table.name); + const { name, modified } = resolveNameCasing(options.modelCasing, table.name); const multiPk = table.columns.filter((c) => c.pk).length > 1; const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === 'view'); modelFactory.setContainer(model); - if (modified) { + if (modified || options.alwaysMap) { modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); @@ -195,10 +196,7 @@ export function syncTable({ }); } - const fieldPrefix = /[0-9]/g.test(column.name.charAt(0)) ? '_' : ''; - const { name: _name, modified: _modified } = resolveNameCasing(options, column.name); - const name = `${fieldPrefix}${_name}`; - const modified = fieldPrefix !== '' || _modified; + const { name, modified } = resolveNameCasing(options.fieldCasing, column.name); const builtinType = provider.getBuiltinType(column.datatype); @@ -252,7 +250,7 @@ export function syncTable({ return b; }); } - if (modified) { + if (modified || options.alwaysMap) { builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab) => ab.StringLiteral.setValue(column.name)), ); diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 9f05219c0..2b3b9b403 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,7 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, - StringLiteral, + type StringLiteral, } from '@zenstackhq/language/ast'; import { getStringLiteral } from '@zenstackhq/language/utils'; import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 7905676f9..81d7b0d30 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -150,6 +150,23 @@ function createProgram() { .addOption(noVersionCheckOption) .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) + .addOption( + new Option('--model-casing ', 'set the casing of generated models').default( + 'none', + ), + ) + .addOption( + new Option('--field-casing ', 'set the casing of generated fields').default( + 'none', + ), + ) + .addOption( + new Option('--always-map', 'always add @map and @@map attributes to models and fields').default(false), + ) + .addOption( + new Option('--quote ', 'set the quote style of generated schema files').default('double'), + ) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(2)) .action((options) => dbAction('pull', options)); dbCommand From afa2142e4df155f9f9c866137475986670f8fec9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 01:55:41 +0200 Subject: [PATCH 20/68] fix: lint fix --- packages/cli/src/actions/pull/index.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4bc7dd14d..81a7e56ea 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -55,7 +55,8 @@ export function syncEnums({ .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), ); } - } catch (_error: unknown) { + } catch (error: any) { + if (error?.message !== `Declaration not found: @@schema`) throw error; //Waiting to support multi-schema //TODO: remove catch after multi-schema support is implemented } @@ -362,7 +363,8 @@ export function syncTable({ b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), ); } - } catch (_error: unknown) { + } catch (error: any) { + if (error?.message !== `Declaration not found: @@schema`) throw error; //Waiting to support multi-schema //TODO: remove catch after multi-schema support is implemented } From 3b40b40af886f692b7ffbd526351949e90fbf03d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 22 Oct 2025 02:53:37 +0200 Subject: [PATCH 21/68] feat: Handle the database type mapping --- packages/cli/src/actions/pull/index.ts | 23 ++++++++++++++--- .../src/actions/pull/provider/postgresql.ts | 25 ++++++++++++++++++- .../cli/src/actions/pull/provider/provider.ts | 1 + .../cli/src/actions/pull/provider/sqlite.ts | 24 ++++++++++++++++++ 4 files changed, 68 insertions(+), 5 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 81a7e56ea..75f54b449 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,5 +1,13 @@ import type { ZModelServices } from '@zenstackhq/language'; -import { Attribute, isEnum, type DataField, type DataModel, type Enum, type Model } from '@zenstackhq/language/ast'; +import { + Attribute, + isEnum, + type DataField, + type DataModel, + type Enum, + type Model, + type BuiltinType, +} from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory, DataFieldFactory, @@ -260,9 +268,16 @@ export function syncTable({ const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, )?.node as Attribute | undefined; - //TODO: exclude default types like text in postgres - //because Zenstack string = text in postgres so unnecessary to map to default types - if (dbAttr && !['text'].includes(column.datatype)) { + + const defaultDatabaseType = provider.getDefaultDatabaseType(builtinType.type as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== column.datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (column.length || column.precision))) + ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (column.length || column.precision) dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 30bb2602e..8e1457a3b 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -2,6 +2,7 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import type { BuiltinType } from '@zenstackhq/language/ast'; export const postgresql: IntrospectionProvider = { getBuiltinType(type) { @@ -78,6 +79,28 @@ export const postgresql: IntrospectionProvider = { tables, }; }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'text' }; + case 'Boolean': + return { type: 'boolean' }; + case 'Int': + return { type: 'integer' }; + case 'BigInt': + return { type: 'bigint' }; + case 'Float': + return { type: 'double precision' }; + case 'Decimal': + return { type: 'decimal' }; + case 'DateTime': + return { type: 'timestamp', precisition: 3 }; + case 'Json': + return { type: 'jsonb' }; + case 'Bytes': + return { type: 'bytea' }; + } + }, getDefaultValue({ defaultValue, fieldName, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; @@ -276,7 +299,7 @@ SELECT ), '[]' ) AS "options" - + FROM "pg_catalog"."pg_attribute" AS "att" INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid" diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index c04255d12..0bdc2d671 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -64,6 +64,7 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported'; isArray: boolean; }; + getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; getDefaultValue(args: { fieldName: string; defaultValue: string; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 87d6e0588..5c3397600 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,3 +1,4 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid @@ -71,6 +72,29 @@ export const sqlite: IntrospectionProvider = { } }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'TEXT' }; + case 'Boolean': + return { type: 'INTEGER' }; + case 'Int': + return { type: 'INTEGER' }; + case 'BigInt': + return { type: 'INTEGER' }; + case 'Float': + return { type: 'REAL' }; + case 'Decimal': + return { type: 'DECIMAL' }; + case 'DateTime': + return { type: 'NUMERIC' }; + case 'Json': + return { type: 'JSONB' }; + case 'Bytes': + return { type: 'BLOB' }; + } + }, + async introspect(connectionString: string): Promise { const SQLite = (await import('better-sqlite3')).default; const db = new SQLite(connectionString, { readonly: true }); From c2b84dd0457655ead7ecb4e929d82e201902c0e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 12 Nov 2025 21:43:46 +0100 Subject: [PATCH 22/68] fix: catch up with feature updates - improve code styling - enable schema support for db pull --- packages/cli/package.json | 1 + packages/cli/src/actions/action-utils.ts | 39 ++++++++++--------- packages/cli/src/actions/db.ts | 24 +++++++----- packages/cli/src/actions/pull/index.ts | 38 +++++++----------- .../cli/src/actions/pull/provider/index.ts | 4 +- packages/cli/src/actions/pull/utils.ts | 18 ++++++++- packages/cli/src/index.ts | 1 - 7 files changed, 71 insertions(+), 54 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index ece5d544d..7ab08561f 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -38,6 +38,7 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", + "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", "@zenstackhq/sdk": "workspace:*", diff --git a/packages/cli/src/actions/action-utils.ts b/packages/cli/src/actions/action-utils.ts index 033cbdd48..86d55baa6 100644 --- a/packages/cli/src/actions/action-utils.ts +++ b/packages/cli/src/actions/action-utils.ts @@ -1,5 +1,5 @@ -import { loadDocument } from '@zenstackhq/language'; -import { isDataSource } from '@zenstackhq/language/ast'; +import { type ZModelServices, loadDocument } from '@zenstackhq/language'; +import { type Model, isDataSource } from '@zenstackhq/language/ast'; import { PrismaSchemaGenerator } from '@zenstackhq/sdk'; import colors from 'colors'; import fs from 'node:fs'; @@ -41,8 +41,22 @@ export function getSchemaFile(file?: string) { } } -export async function loadSchemaDocument(schemaFile: string) { - const loadResult = await loadDocument(schemaFile); +export async function loadSchemaDocument( + schemaFile: string, + opts?: { keepImports?: boolean; returnServices?: false }, +): Promise; +export async function loadSchemaDocument( + schemaFile: string, + opts: { returnServices: true; keepImports?: boolean }, +): Promise<{ model: Model; services: ZModelServices }>; +export async function loadSchemaDocument( + schemaFile: string, + opts: { returnServices?: boolean; keepImports?: boolean } = {}, +) { + const returnServices = opts.returnServices || false; + const keepImports = opts.keepImports || false; + + const loadResult = await loadDocument(schemaFile, [], keepImports); if (!loadResult.success) { loadResult.errors.forEach((err) => { console.error(colors.red(err)); @@ -52,21 +66,10 @@ export async function loadSchemaDocument(schemaFile: string) { loadResult.warnings.forEach((warn) => { console.warn(colors.yellow(warn)); }); - return loadResult.model; -} -export async function loadSchemaDocumentWithServices(schemaFile: string) { - const loadResult = await loadDocument(schemaFile, [], true); - if (!loadResult.success) { - loadResult.errors.forEach((err) => { - console.error(colors.red(err)); - }); - throw new CliError('Schema contains errors. See above for details.'); - } - loadResult.warnings.forEach((warn) => { - console.warn(colors.yellow(warn)); - }); - return { services: loadResult.services, model: loadResult.model }; + if (returnServices) return { model: loadResult.model, services: loadResult.services }; + + return loadResult.model; } export function handleSubProcessError(err: unknown) { diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e48beb23f..77f758ea0 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,6 @@ -import { Model, Enum, DataModel, DataField } from '@zenstackhq/language/ast'; -import { ZModelCodeGenerator } from '@zenstackhq/sdk'; +import { config } from '@dotenvx/dotenvx'; +import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { type DataField, DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import fs from 'node:fs'; import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; @@ -7,7 +8,6 @@ import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, require import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; -import { config } from '@dotenvx/dotenvx'; type PushOptions = { schema?: string; @@ -17,7 +17,6 @@ type PushOptions = { export type PullOptions = { schema?: string; - excludeSchemas?: string[]; out?: string; modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none'; @@ -74,7 +73,7 @@ async function runPush(options: PushOptions) { async function runPull(options: PullOptions) { try { const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocumentWithServices(schemaFile); + const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); config(); const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; const datasource = getDatasource(model); @@ -94,8 +93,8 @@ async function runPull(options: PullOptions) { } const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => !options.excludeSchemas?.includes(e.schema_name)); - const tables = allTables.filter((t) => !options.excludeSchemas?.includes(t.schema)); + const enums = allEnums.filter((e) => datasource.schemas.includes(e.schema_name)); + const tables = allTables.filter((t) => datasource.schemas.includes(t.schema)); const newModel: Model = { $type: 'Model', @@ -106,11 +105,18 @@ async function runPull(options: PullOptions) { imports: [], }; - syncEnums({ dbEnums: enums, model: newModel, services, options }); + syncEnums({ dbEnums: enums, model: newModel, services, options, defaultSchema: datasource.defaultSchema }); const resolvedRelations: Relation[] = []; for (const table of tables) { - const relations = syncTable({ table, model: newModel, provider, services, options }); + const relations = syncTable({ + table, + model: newModel, + provider, + services, + options, + defaultSchema: datasource.defaultSchema, + }); resolvedRelations.push(...relations); } diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 75f54b449..19d86a22b 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,7 +1,7 @@ import type { ZModelServices } from '@zenstackhq/language'; import { - Attribute, isEnum, + type Attribute, type DataField, type DataModel, type Enum, @@ -23,11 +23,13 @@ export function syncEnums({ model, options, services, + defaultSchema, }: { dbEnums: IntrospectedEnum[]; model: Model; services: ZModelServices; options: PullOptions; + defaultSchema: string; }) { for (const dbEnum of dbEnums) { const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); @@ -55,18 +57,12 @@ export function syncEnums({ }); }); - try { - if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== 'public') { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); - } - } catch (error: any) { - if (error?.message !== `Declaration not found: @@schema`) throw error; - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); } model.declarations.push(factory.get({ $container: model })); @@ -143,12 +139,14 @@ export function syncTable({ table, services, options, + defaultSchema, }: { table: IntrospectedTable; model: Model; provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; + defaultSchema: string; }) { const idAttribute = getAttributeRef('@id', services); const modelIdAttribute = getAttributeRef('@@id', services); @@ -372,16 +370,10 @@ export function syncTable({ ); }); - try { - if (table.schema && table.schema !== '' && table.schema !== 'public') { - modelFactory.addAttribute((b) => - b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), - ); - } - } catch (error: any) { - if (error?.message !== `Declaration not found: @@schema`) throw error; - //Waiting to support multi-schema - //TODO: remove catch after multi-schema support is implemented + if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { + modelFactory.addAttribute((b) => + b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), + ); } model.declarations.push(modelFactory.node); diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index 4c9a0fe8d..e712ac983 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,9 +1,11 @@ +import type { DataSourceProviderType } from '@zenstackhq/schema'; export * from './provider'; import { postgresql } from './postgresql'; +import type { IntrospectionProvider } from './provider'; import { sqlite } from './sqlite'; -export const providers = { +export const providers: Record = { postgresql, sqlite, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 2b3b9b403..05aa31acc 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -11,8 +11,8 @@ import { type Model, type StringLiteral, } from '@zenstackhq/language/ast'; -import { getStringLiteral } from '@zenstackhq/language/utils'; -import type { DataSourceProviderType } from '@zenstackhq/sdk/schema'; +import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; +import type { DataSourceProviderType } from '@zenstackhq/schema'; import type { Reference } from 'langium'; export function getAttribute(model: Model, attrName: string) { @@ -53,12 +53,26 @@ export function getDatasource(model: Model) { throw new Error('The url field must be a string literal or an env().'); } + const defaultSchemaField = datasource.fields.find((f) => f.name === 'defaultSchema'); + const defaultSchema = (defaultSchemaField && getStringLiteral(defaultSchemaField.value)) || 'public'; + + const schemasField = datasource.fields.find((f) => f.name === 'schemas'); + const schemas = + (schemasField && + getLiteralArray(schemasField.value) + ?.map(getStringLiteral) + .filter((s) => s !== undefined)) || + []; + return { name: datasource.name, provider: getStringLiteral( datasource.fields.find((f) => f.name === 'provider')?.value, ) as DataSourceProviderType, url, + defaultSchema, + schemas, + allSchemas: [defaultSchema, ...schemas], }; } diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index 81d7b0d30..e454e2a2a 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -148,7 +148,6 @@ function createProgram() { .description('Introspect your database.') .addOption(schemaOption) .addOption(noVersionCheckOption) - .addOption(new Option('-e, --exclude-schemas ', 'exclude specific schemas from introspection')) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .addOption( new Option('--model-casing ', 'set the casing of generated models').default( From d18102e92fae491d8569ebf2c7791967d60e933a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 01:11:08 +0100 Subject: [PATCH 23/68] fix: add sqlite e2e test and fix some bugs --- packages/cli/package.json | 1 + packages/cli/src/actions/db.ts | 178 +++++++++--------- packages/cli/src/actions/pull/index.ts | 159 +++++++++------- .../src/actions/pull/provider/postgresql.ts | 10 +- .../cli/src/actions/pull/provider/provider.ts | 3 + .../cli/src/actions/pull/provider/sqlite.ts | 137 +++++++------- packages/cli/src/actions/pull/utils.ts | 5 + packages/cli/src/index.ts | 2 +- packages/cli/src/test.ts | 9 + packages/cli/test/db/pull.test.ts | 96 ++++++++++ packages/cli/test/db/push.test.ts | 18 ++ pnpm-lock.yaml | 123 ++++++------ 12 files changed, 454 insertions(+), 287 deletions(-) create mode 100644 packages/cli/src/test.ts create mode 100644 packages/cli/test/db/pull.test.ts create mode 100644 packages/cli/test/db/push.test.ts diff --git a/packages/cli/package.json b/packages/cli/package.json index 7ab08561f..4d8411162 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -38,6 +38,7 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", + "@zenstackhq/language": "workspace:*", "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 77f758ea0..6ea1bc08f 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -74,10 +74,11 @@ async function runPull(options: PullOptions) { try { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); - config(); + config({ + ignore: ['MISSING_ENV_FILE'], + }); const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; const datasource = getDatasource(model); - if (!datasource) { throw new Error('No datasource found in the schema.'); } @@ -91,10 +92,14 @@ async function runPull(options: PullOptions) { if (!provider) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - + console.log('Starging introspect the database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); - const enums = allEnums.filter((e) => datasource.schemas.includes(e.schema_name)); - const tables = allTables.filter((t) => datasource.schemas.includes(t.schema)); + const enums = provider.isSupportedFeature('Schema') + ? allEnums.filter((e) => datasource.schemas.includes(e.schema_name)) + : allEnums; + const tables = provider.isSupportedFeature('Schema') + ? allTables.filter((t) => datasource.schemas.includes(t.schema)) + : allTables; const newModel: Model = { $type: 'Model', @@ -104,8 +109,15 @@ async function runPull(options: PullOptions) { declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))], imports: [], }; - - syncEnums({ dbEnums: enums, model: newModel, services, options, defaultSchema: datasource.defaultSchema }); + syncEnums({ + dbEnums: enums, + model: newModel, + services, + options, + defaultSchema: datasource.defaultSchema, + oldModel: model, + provider, + }); const resolvedRelations: Relation[] = []; for (const table of tables) { @@ -116,21 +128,23 @@ async function runPull(options: PullOptions) { services, options, defaultSchema: datasource.defaultSchema, + oldModel: model, }); resolvedRelations.push(...relations); } - + // sync relation fields for (const relation of resolvedRelations) { const simmilarRelations = resolvedRelations.filter((rr) => { return ( - (rr.schema === relation.schema && + rr !== relation && + ((rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table) || - (rr.schema === relation.references.schema && - rr.column === relation.references.column && - rr.references.schema === relation.schema && - rr.references.table === relation.table) + (rr.schema === relation.references.schema && + rr.column === relation.references.column && + rr.references.schema === relation.schema && + rr.references.table === relation.table)) ); }).length; const selfRelation = @@ -151,6 +165,7 @@ async function runPull(options: PullOptions) { .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); + //Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( (declaration) => @@ -162,32 +177,35 @@ async function runPull(options: PullOptions) { model.declarations.splice(index, 1); console.log(`Delete model ${decl.name}`); }); - services.shared.workspace.IndexManager.allElements('Enum', docsSet) - .filter( - (declaration) => - !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), - ) - .forEach((decl) => { - const model = decl.node!.$container as Model; - const index = model.declarations.findIndex((d) => d === decl.node); - model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); - }); + // Delete Enums + if (provider.isSupportedFeature('NativeEnum')) + services.shared.workspace.IndexManager.allElements('Enum', docsSet) + .filter( + (declaration) => + !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)), + ) + .forEach((decl) => { + const model = decl.node!.$container as Model; + const index = model.declarations.findIndex((d) => d === decl.node); + model.declarations.splice(index, 1); + console.log(`Delete enum ${decl.name}`); + }); + // newModel.declarations .filter((d) => [DataModel, Enum].includes(d.$type)) .forEach((_declaration) => { - const declaration = _declaration as DataModel | Enum; + const newDataModel = _declaration as DataModel | Enum; const declarations = services.shared.workspace.IndexManager.allElements( - declaration.$type, + newDataModel.$type, docsSet, ).toArray(); - const originalModel = declarations.find((d) => getDbName(d.node as any) === getDbName(declaration)) + const originalDataModel = declarations.find((d) => getDbName(d.node as any) === getDbName(newDataModel)) ?.node as DataModel | Enum | undefined; - if (!originalModel) { - model.declarations.push(declaration); - (declaration as any).$container = model; - declaration.fields.forEach((f) => { + if (!originalDataModel) { + model.declarations.push(newDataModel); + (newDataModel as any).$container = model; + newDataModel.fields.forEach((f) => { if (f.$type === 'DataField' && f.type.reference?.ref) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), @@ -198,19 +216,33 @@ async function runPull(options: PullOptions) { return; } - declaration.fields.forEach((f) => { - const originalField = originalModel.fields.find( - (d) => + newDataModel.fields.forEach((f) => { + const originalFields = originalDataModel.fields.filter((d) => { + return ( getDbName(d) === getDbName(f) || (getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)), - ); + !!getRelationFkName(f as any)) || + (f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + ); + }); + if (originalFields.length > 1) { + console.warn( + `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ); + return; + } + const originalField = originalFields.at(0); + Object.freeze(originalField); if (!originalField) { - //console.log(`Added field ${f.name} to ${originalModel.name}`); - (f as any).$container = originalModel; - originalModel.fields.push(f as any); + console.log(`Added field ${f.name} to ${originalDataModel.name}`); + (f as any).$container = originalDataModel; + originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { const ref = declarations.find( (d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any), @@ -222,66 +254,42 @@ async function runPull(options: PullOptions) { } return; } - - if (originalField.$type === 'DataField') { - const field = f as DataField; - originalField.type = field.type; - if (field.type.reference) { - const ref = declarations.find( - (d) => getDbName(d.node as any) === getDbName(field.type.reference!.ref as any), - )?.node as DataModel | undefined; - if (ref) { - (field.type.reference.$refText as any) = ref.name; - (field.type.reference.ref as any) = ref; - } - } - - (originalField.type.$container as any) = originalField; - } - - f.attributes.forEach((attr) => { - const originalAttribute = originalField.attributes.find( - (d) => d.decl.$refText === attr.decl.$refText, - ); - - if (!originalAttribute) { - //console.log(`Added Attribute ${attr.decl.$refText} to ${f.name}`); - (f as any).$container = originalField; - originalField.attributes.push(attr as any); - return; - } - - originalAttribute.args = attr.args; - attr.args.forEach((a) => { - (a.$container as any) = originalAttribute; - }); - }); - + if (f.name === 'profiles') console.log(f.attributes.length); originalField.attributes - .filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText)) + .filter( + (attr) => + !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && + !['@map', '@@map', '@default', '@updatedAt'].includes(attr.decl.$refText), + ) .forEach((attr) => { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - //console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); }); }); - originalModel.fields + originalDataModel.fields .filter( (f) => - !declaration.fields.find( - (d) => + !newDataModel.fields.find((d) => { + return ( getDbName(d) === getDbName(f) || (getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)), - ), + !!getRelationFkName(f as any)) || + (f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + ); + }), ) .forEach((f) => { - const model = f.$container; - const index = model.fields.findIndex((d) => d === f); - model.fields.splice(index, 1); - //console.log(`Delete field ${f.name}`); + const _model = f.$container; + const index = _model.fields.findIndex((d) => d === f); + _model.fields.splice(index, 1); + console.log(`Delete field ${f.name}`); }); }); diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 19d86a22b..60c192045 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -15,57 +15,73 @@ import { EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; +import { type Cascade, type IntrospectedEnum, type IntrospectedTable, type IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ dbEnums, model, + oldModel, + provider, options, services, defaultSchema, }: { dbEnums: IntrospectedEnum[]; model: Model; + oldModel: Model; + provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; defaultSchema: string; }) { - for (const dbEnum of dbEnums) { - const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); - if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); - const factory = new EnumFactory().setName(name); - if (modified || options.alwaysMap) - factory.addAttribute((builder) => - builder - .setDecl(getAttributeRef('@@map', services)) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), - ); + if (provider.isSupportedFeature('NativeEnum')) { + for (const dbEnum of dbEnums) { + const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); + if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + const factory = new EnumFactory().setName(name); + if (modified || options.alwaysMap) + factory.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@@map', services)) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)), + ); - dbEnum.values.forEach((v) => { - const { name, modified } = resolveNameCasing(options.fieldCasing, v); - factory.addField((builder) => { - builder.setName(name); - if (modified || options.alwaysMap) - builder.addAttribute((builder) => - builder - .setDecl(getAttributeRef('@map', services)) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), - ); - - return builder; + dbEnum.values.forEach((v) => { + const { name, modified } = resolveNameCasing(options.fieldCasing, v); + factory.addField((builder) => { + builder.setName(name); + if (modified || options.alwaysMap) + builder.addAttribute((builder) => + builder + .setDecl(getAttributeRef('@map', services)) + .addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)), + ); + + return builder; + }); }); - }); - if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { - factory.addAttribute((b) => - b - .setDecl(getAttributeRef('@@schema', services)) - .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), - ); - } + if (dbEnum.schema_name && dbEnum.schema_name !== '' && dbEnum.schema_name !== defaultSchema) { + factory.addAttribute((b) => + b + .setDecl(getAttributeRef('@@schema', services)) + .addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)), + ); + } - model.declarations.push(factory.get({ $container: model })); + model.declarations.push(factory.get({ $container: model })); + } + } else { + oldModel.declarations + .filter((d) => isEnum(d)) + .forEach((d) => { + const factory = new EnumFactory().setName(d.name); + d.fields.forEach((v) => { + factory.addField((builder) => builder.setName(v.name)); + }); + model.declarations.push(factory.get({ $container: model })); + }); } } @@ -140,9 +156,11 @@ export function syncTable({ services, options, defaultSchema, + oldModel, }: { table: IntrospectedTable; model: Model; + oldModel: Model; provider: IntrospectionProvider; services: ZModelServices; options: PullOptions; @@ -182,7 +200,6 @@ export function syncTable({ builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)), ); } - table.columns.forEach((column) => { if (column.foreign_key_table) { relations.push({ @@ -369,7 +386,6 @@ export function syncTable({ .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), ); }); - if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { modelFactory.addAttribute((b) => b.setDecl(getAttributeRef('@@schema', services)).addArg((a) => a.StringLiteral.setValue(table.schema)), @@ -377,7 +393,6 @@ export function syncTable({ } model.declarations.push(modelFactory.node); - return relations; } @@ -427,7 +442,10 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; - let sourceFieldName = `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + let sourceFieldName = + simmilarRelations > 0 + ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + : targetModel.name; if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; @@ -441,47 +459,47 @@ export function syncRelation({ .setOptional(relation.nullable) .setArray(relation.type === 'many') .setReference(targetModel), - ) - .addAttribute((ab) => { - ab.setDecl(relationAttribute); - if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); - ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( - (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), - 'references', - ); + ); + sourceFieldFactory.addAttribute((ab) => { + ab.setDecl(relationAttribute); + if (includeRelationName) ab.addArg((ab) => ab.StringLiteral.setValue(relationName)); + ab.addArg((ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(sourceField)), 'fields').addArg( + (ab) => ab.ArrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(targetField)), + 'references', + ); - if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { - const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); - const enumFieldRef = enumRef.fields.find( - (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), - ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); - ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); - } + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_delete!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_delete} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); + } - if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { - const enumRef = getEnumRef('ReferentialAction', services); - if (!enumRef) throw new Error('ReferentialAction enum not found'); - const enumFieldRef = enumRef.fields.find( - (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), - ); - if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); - ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); - } + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + const enumRef = getEnumRef('ReferentialAction', services); + if (!enumRef) throw new Error('ReferentialAction enum not found'); + const enumFieldRef = enumRef.fields.find( + (f) => f.name.toLowerCase() === relation.foreign_key_on_update!.replace(/ /g, '').toLowerCase(), + ); + if (!enumFieldRef) throw new Error(`ReferentialAction ${relation.foreign_key_on_update} not found`); + ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); + } - ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + if (relation.fk_name) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); - return ab; - }); + return ab; + }); sourceModel.fields.push(sourceFieldFactory.node); const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const oppositeFieldName = - relation.type === 'one' - ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}s` - : `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}`; + simmilarRelations > 0 + ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` + : sourceModel.name; const targetFieldFactory = new DataFieldFactory() .setContainer(targetModel) @@ -498,4 +516,9 @@ export function syncRelation({ ); targetModel.fields.push(targetFieldFactory.node); + + targetModel.fields.sort((a, b) => { + if (a.type.reference && b.type.reference) return 0; + return a.name.localeCompare(b.name); + }); } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 8e1457a3b..958b0930f 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,10 +1,18 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import type { BuiltinType } from '@zenstackhq/language/ast'; export const postgresql: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'Schema': + return true; + default: + return false; + } + }, getBuiltinType(type) { const t = (type || '').toLowerCase(); diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 0bdc2d671..252a8a300 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -58,6 +58,8 @@ export type IntrospectedSchema = { enums: IntrospectedEnum[]; }; +export type DatabaseFeature = 'Schema' | 'NativeEnum'; + export interface IntrospectionProvider { introspect(connectionString: string): Promise; getBuiltinType(type: string): { @@ -71,4 +73,5 @@ export interface IntrospectionProvider { services: ZModelServices; enums: Enum[]; }): DataFieldAttributeFactory[]; + isSupportedFeature(feature: DatabaseFeature): boolean; } diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 5c3397600..e940b3595 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,98 +1,52 @@ import type { BuiltinType } from '@zenstackhq/language/ast'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. export const sqlite: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'Schema': + case 'NativeEnum': + default: + return false; + } + }, getBuiltinType(type) { const t = (type || '').toLowerCase().trim(); - // SQLite has no array types const isArray = false; - switch (t) { - // integers - case 'int': case 'integer': - case 'tinyint': - case 'smallint': - case 'mediumint': return { type: 'Int', isArray }; + case 'text': + return { type: 'String', isArray }; case 'bigint': return { type: 'BigInt', isArray }; - - // decimals and floats + case 'blob': + return { type: 'Bytes', isArray }; + case 'real': + return { type: 'Float', isArray }; case 'numeric': case 'decimal': return { type: 'Decimal', isArray }; - case 'real': - case 'double': - case 'double precision': - case 'float': - return { type: 'Float', isArray }; - - // boolean (SQLite stores as integer 0/1, but commonly typed as BOOLEAN) - case 'bool': - case 'boolean': - return { type: 'Boolean', isArray }; - - // strings - case 'text': - case 'varchar': - case 'character varying': - case 'char': - case 'character': - case 'clob': - case 'uuid': // often stored as TEXT - return { type: 'String', isArray }; - - // dates/times (stored as TEXT/REAL/INTEGER, but commonly typed as DATE/DATETIME) - case 'date': case 'datetime': return { type: 'DateTime', isArray }; - - // binary - case 'blob': - return { type: 'Bytes', isArray }; - - // json (not a native type, but commonly used) - case 'json': + case 'jsonb': return { type: 'Json', isArray }; - + case 'boolean': + return { type: 'Boolean', isArray }; default: { - // Fallbacks based on SQLite type affinity rules - if (t.includes('int')) return { type: 'Int', isArray }; - if (t.includes('char') || t.includes('clob') || t.includes('text')) return { type: 'String', isArray }; - if (t.includes('blob')) return { type: 'Bytes', isArray }; - if (t.includes('real') || t.includes('floa') || t.includes('doub')) return { type: 'Float', isArray }; - if (t.includes('dec') || t.includes('num')) return { type: 'Decimal', isArray }; return { type: 'Unsupported' as const, isArray }; } } }, - getDefaultDatabaseType(type: BuiltinType) { - switch (type) { - case 'String': - return { type: 'TEXT' }; - case 'Boolean': - return { type: 'INTEGER' }; - case 'Int': - return { type: 'INTEGER' }; - case 'BigInt': - return { type: 'INTEGER' }; - case 'Float': - return { type: 'REAL' }; - case 'Decimal': - return { type: 'DECIMAL' }; - case 'DateTime': - return { type: 'NUMERIC' }; - case 'Json': - return { type: 'JSONB' }; - case 'Bytes': - return { type: 'BLOB' }; - } + getDefaultDatabaseType() { + return undefined; }, async introspect(connectionString: string): Promise { @@ -114,7 +68,7 @@ export const sqlite: IntrospectionProvider = { for (const t of tablesRaw) { const tableName = t.name; - const schema = 'main'; + const schema = ''; // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ @@ -193,7 +147,7 @@ export const sqlite: IntrospectionProvider = { for (const fk of fkRows) { fkByColumn.set(fk.from, { - foreign_key_schema: 'main', + foreign_key_schema: '', foreign_key_table: fk.table || null, foreign_key_column: fk.to || null, foreign_key_name: null, @@ -228,7 +182,7 @@ export const sqlite: IntrospectionProvider = { default: c.dflt_value, options: [], unique: uniqueSingleColumn.has(c.name), - unique_name: uniqueSingleColumn.has(c.name) ? `${tableName}_${c.name}_unique` : null, + unique_name: null, }); } @@ -243,7 +197,46 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue(_args) { - throw new Error('Not implemented yet for SQLite'); + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; + + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + return factories; + } + + if (val === 'true' || val === 'false') { + factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); + return factories; + } + + if (!Number.isNaN(parseFloat(val)) || !Number.isNaN(parseInt(val))) { + factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(val))); + return factories; + } + + if (val.startsWith("'") && val.endsWith("'")) { + const strippedName = val.slice(1, -1); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); + if (enumField) factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + } else { + factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(strippedName))); + } + return factories; + } + + //TODO: add more default value factories if exists + throw new Error( + `This default value type currently is not supported. Plesase open an issue on github. Values: "${defaultValue}"`, + ); }, }; diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 05aa31acc..e017bb9b4 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -53,6 +53,11 @@ export function getDatasource(model: Model) { throw new Error('The url field must be a string literal or an env().'); } + if (url.startsWith('file:')) { + url = new URL(url, `file:${model.$document!.uri.path}`).pathname; + if (process.platform === 'win32' && url[0] === '/') url = url.slice(1); + } + const defaultSchemaField = datasource.fields.find((f) => f.name === 'defaultSchema'); const defaultSchema = (defaultSchemaField && getStringLiteral(defaultSchemaField.value)) || 'public'; diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index e454e2a2a..a7bb403e0 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -165,7 +165,7 @@ function createProgram() { .addOption( new Option('--quote ', 'set the quote style of generated schema files').default('double'), ) - .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(2)) + .addOption(new Option('--indent ', 'set the indentation of the generated schema files').default(4)) .action((options) => dbAction('pull', options)); dbCommand diff --git a/packages/cli/src/test.ts b/packages/cli/src/test.ts new file mode 100644 index 000000000..b83716dfa --- /dev/null +++ b/packages/cli/src/test.ts @@ -0,0 +1,9 @@ +import { URI, Utils } from 'vscode-uri'; + +const base = URI.parse('file:/d/zenstack/'); +const relative = URI.parse('file:./c/asdasd.db'); +console.log(base); +console.log(relative); +console.log(Utils.resolvePath(base, relative.path)); +// console.log(URI.parse('file:/c/asdasd.db')); +// console.log(URI.parse('file:./c/asdasd.db')); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts new file mode 100644 index 000000000..45cdde442 --- /dev/null +++ b/packages/cli/test/db/pull.test.ts @@ -0,0 +1,96 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { describe, expect, it } from 'vitest'; +import { createProject, runCli } from '../utils'; + +const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); + +describe('DB pull', () => { + it('sqlite schema', () => { + const workDir = createProject(` +model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + +model Post { + id Int @id @default(autoincrement()) + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + title String + content String? + published Boolean @default(false) + tags PostTag[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + slug String + score Float @default(0.0) + metadata Json? + + @@unique([authorId, slug]) + @@index([authorId, published]) + @@map("posts") +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + assignedAt DateTime @default(now()) + note String? @default("initial") + + @@id([postId, tagId]) + @@map("post_tags") +} + +enum Role { + USER + ADMIN + MODERATOR +}`); + runCli('format', workDir); + runCli('db push', workDir); + + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); +}); diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts new file mode 100644 index 000000000..78164aaea --- /dev/null +++ b/packages/cli/test/db/push.test.ts @@ -0,0 +1,18 @@ +import fs from 'node:fs'; +import path from 'node:path'; +import { describe, expect, it } from 'vitest'; +import { createProject, runCli } from '../utils'; + +const model = ` +model User { + id String @id @default(cuid()) +} +`; + +describe('CLI db commands test', () => { + it('should generate a database with db push', () => { + const workDir = createProject(model); + runCli('db push', workDir); + expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + }); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8abc943c3..8c635fc5e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -109,7 +109,7 @@ importers: version: 20.19.24 '@vitest/coverage-v8': specifier: ^4.0.16 - version: 4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + version: 4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) eslint: specifier: ~9.29.0 version: 9.29.0(jiti@2.6.1) @@ -142,7 +142,7 @@ importers: version: 8.34.1(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^4.0.14 - version: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + version: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) yaml: specifier: ^2.8.0 version: 2.8.0 @@ -264,6 +264,9 @@ importers: ts-pattern: specifier: 'catalog:' version: 5.7.1 + vscode-uri: + specifier: ^3.1.0 + version: 3.1.0 devDependencies: '@types/better-sqlite3': specifier: 'catalog:' @@ -1171,13 +1174,13 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.15.6 + version: 8.16.0 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config bun-types: specifier: ^1.3.3 - version: 1.3.3 + version: 1.3.4 tests/runtimes/edge-runtime: dependencies: @@ -1205,7 +1208,7 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.15.6 + version: 8.16.0 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config @@ -3663,8 +3666,8 @@ packages: '@types/pg@8.11.11': resolution: {integrity: sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==} - '@types/pg@8.15.6': - resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==} + '@types/pg@8.16.0': + resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} @@ -3968,11 +3971,11 @@ packages: '@vitest/browser': optional: true - '@vitest/expect@4.0.14': - resolution: {integrity: sha512-RHk63V3zvRiYOWAV0rGEBRO820ce17hz7cI2kDmEdfQsBjT2luEKB5tCOc91u1oSQoUOZkSv3ZyzkdkSLD7lKw==} + '@vitest/expect@4.0.15': + resolution: {integrity: sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==} - '@vitest/mocker@4.0.14': - resolution: {integrity: sha512-RzS5NujlCzeRPF1MK7MXLiEFpkIXeMdQ+rN3Kk3tDI9j0mtbr7Nmuq67tpkOJQpgyClbOltCXMjLZicJHsH5Cg==} + '@vitest/mocker@4.0.15': + resolution: {integrity: sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==} peerDependencies: msw: ^2.4.9 vite: ^6.0.0 || ^7.0.0-0 @@ -3982,23 +3985,23 @@ packages: vite: optional: true - '@vitest/pretty-format@4.0.14': - resolution: {integrity: sha512-SOYPgujB6TITcJxgd3wmsLl+wZv+fy3av2PpiPpsWPZ6J1ySUYfScfpIt2Yv56ShJXR2MOA6q2KjKHN4EpdyRQ==} + '@vitest/pretty-format@4.0.15': + resolution: {integrity: sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==} '@vitest/pretty-format@4.0.16': resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==} - '@vitest/runner@4.0.14': - resolution: {integrity: sha512-BsAIk3FAqxICqREbX8SetIteT8PiaUL/tgJjmhxJhCsigmzzH8xeadtp7LRnTpCVzvf0ib9BgAfKJHuhNllKLw==} + '@vitest/runner@4.0.15': + resolution: {integrity: sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==} - '@vitest/snapshot@4.0.14': - resolution: {integrity: sha512-aQVBfT1PMzDSA16Y3Fp45a0q8nKexx6N5Amw3MX55BeTeZpoC08fGqEZqVmPcqN0ueZsuUQ9rriPMhZ3Mu19Ag==} + '@vitest/snapshot@4.0.15': + resolution: {integrity: sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==} - '@vitest/spy@4.0.14': - resolution: {integrity: sha512-JmAZT1UtZooO0tpY3GRyiC/8W7dCs05UOq9rfsUUgEZEdq+DuHLmWhPsrTt0TiW7WYeL/hXpaE07AZ2RCk44hg==} + '@vitest/spy@4.0.15': + resolution: {integrity: sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==} - '@vitest/utils@4.0.14': - resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==} + '@vitest/utils@4.0.15': + resolution: {integrity: sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==} '@vitest/utils@4.0.16': resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==} @@ -4468,8 +4471,8 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bun-types@1.3.3: - resolution: {integrity: sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ==} + bun-types@1.3.4: + resolution: {integrity: sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ==} bundle-name@4.1.0: resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} @@ -5421,8 +5424,8 @@ packages: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} - expect-type@1.2.2: - resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} engines: {node: '>=12.0.0'} express@5.1.0: @@ -8599,18 +8602,18 @@ packages: vite: optional: true - vitest@4.0.14: - resolution: {integrity: sha512-d9B2J9Cm9dN9+6nxMnnNJKJCtcyKfnHj15N6YNJfaFHRLua/d3sRKU9RuKmO9mB0XdFtUizlxfz/VPbd3OxGhw==} + vitest@4.0.15: + resolution: {integrity: sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@opentelemetry/api': ^1.9.0 '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 - '@vitest/browser-playwright': 4.0.14 - '@vitest/browser-preview': 4.0.14 - '@vitest/browser-webdriverio': 4.0.14 - '@vitest/ui': 4.0.14 + '@vitest/browser-playwright': 4.0.15 + '@vitest/browser-preview': 4.0.15 + '@vitest/browser-webdriverio': 4.0.15 + '@vitest/ui': 4.0.15 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -11263,7 +11266,7 @@ snapshots: pg-protocol: 1.10.3 pg-types: 4.0.2 - '@types/pg@8.15.6': + '@types/pg@8.16.0': dependencies: '@types/node': 20.19.24 pg-protocol: 1.10.3 @@ -11464,7 +11467,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.3 + semver: 7.7.2 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -11620,7 +11623,7 @@ snapshots: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) vue: 3.5.26(typescript@5.9.3) - '@vitest/coverage-v8@4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/coverage-v8@4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.0.16 @@ -11633,22 +11636,22 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + vitest: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) transitivePeerDependencies: - supports-color - '@vitest/expect@4.0.14': + '@vitest/expect@4.0.15': dependencies: '@standard-schema/spec': 1.0.0 '@types/chai': 5.2.2 - '@vitest/spy': 4.0.14 - '@vitest/utils': 4.0.14 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/mocker@4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: - '@vitest/spy': 4.0.14 + '@vitest/spy': 4.0.15 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: @@ -11671,22 +11674,22 @@ snapshots: dependencies: tinyrainbow: 3.0.3 - '@vitest/runner@4.0.14': + '@vitest/runner@4.0.15': dependencies: - '@vitest/utils': 4.0.14 + '@vitest/utils': 4.0.15 pathe: 2.0.3 - '@vitest/snapshot@4.0.14': + '@vitest/snapshot@4.0.15': dependencies: - '@vitest/pretty-format': 4.0.14 + '@vitest/pretty-format': 4.0.15 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@4.0.14': {} + '@vitest/spy@4.0.15': {} - '@vitest/utils@4.0.14': + '@vitest/utils@4.0.15': dependencies: - '@vitest/pretty-format': 4.0.14 + '@vitest/pretty-format': 4.0.15 tinyrainbow: 3.0.3 '@vitest/utils@4.0.16': @@ -12248,7 +12251,7 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bun-types@1.3.3: + bun-types@1.3.4: dependencies: '@types/node': 20.19.24 @@ -13336,7 +13339,7 @@ snapshots: expand-template@2.0.3: {} - expect-type@1.2.2: {} + expect-type@1.3.0: {} express@5.1.0: dependencies: @@ -14200,7 +14203,7 @@ snapshots: kysely-bun-sqlite@0.4.0(kysely@0.28.8): dependencies: - bun-types: 1.3.3 + bun-types: 1.3.4 kysely: 0.28.8 kysely@0.28.8: {} @@ -16919,24 +16922,24 @@ snapshots: optionalDependencies: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) - vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): + vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): dependencies: - '@vitest/expect': 4.0.14 - '@vitest/mocker': 4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) - '@vitest/pretty-format': 4.0.14 - '@vitest/runner': 4.0.14 - '@vitest/snapshot': 4.0.14 - '@vitest/spy': 4.0.14 - '@vitest/utils': 4.0.14 + '@vitest/expect': 4.0.15 + '@vitest/mocker': 4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + '@vitest/pretty-format': 4.0.15 + '@vitest/runner': 4.0.15 + '@vitest/snapshot': 4.0.15 + '@vitest/spy': 4.0.15 + '@vitest/utils': 4.0.15 es-module-lexer: 1.7.0 - expect-type: 1.2.2 + expect-type: 1.3.0 magic-string: 0.30.21 obug: 2.1.1 pathe: 2.0.3 picomatch: 4.0.3 std-env: 3.10.0 tinybench: 2.9.0 - tinyexec: 0.3.2 + tinyexec: 1.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) From 099d576a03d1b0f34a9cda41436ab5de77572f62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 01:25:23 +0100 Subject: [PATCH 24/68] fix: lint fix --- packages/cli/src/actions/db.ts | 10 ++++++++-- packages/cli/src/actions/pull/index.ts | 5 ++--- packages/cli/src/actions/pull/provider/sqlite.ts | 3 +-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6ea1bc08f..16fe6f196 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,10 +1,16 @@ import { config } from '@dotenvx/dotenvx'; import { ZModelCodeGenerator } from '@zenstackhq/language'; -import { type DataField, DataModel, Enum, type Model } from '@zenstackhq/language/ast'; +import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import fs from 'node:fs'; import path from 'node:path'; import { execPrisma } from '../utils/exec-utils'; -import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl, loadSchemaDocumentWithServices } from './action-utils'; +import { + generateTempPrismaSchema, + getSchemaFile, + handleSubProcessError, + requireDataSourceUrl, + loadSchemaDocument, +} from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 60c192045..4a661afb9 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -2,11 +2,11 @@ import type { ZModelServices } from '@zenstackhq/language'; import { isEnum, type Attribute, + type BuiltinType, type DataField, type DataModel, type Enum, type Model, - type BuiltinType, } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory, @@ -15,7 +15,7 @@ import { EnumFactory, } from '@zenstackhq/language/factory'; import type { PullOptions } from '../db'; -import { type Cascade, type IntrospectedEnum, type IntrospectedTable, type IntrospectionProvider } from './provider'; +import type { Cascade, IntrospectedEnum, IntrospectedTable, IntrospectionProvider } from './provider'; import { getAttributeRef, getDbName, getEnumRef } from './utils'; export function syncEnums({ @@ -156,7 +156,6 @@ export function syncTable({ services, options, defaultSchema, - oldModel, }: { table: IntrospectedTable; model: Model; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index e940b3595..5825becde 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,7 +1,6 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; -import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. From 3fb6cccbd3fccc640af9488351bdaa41428b8765 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 21:19:35 +0100 Subject: [PATCH 25/68] fix: formatting for e2e test schemas --- packages/cli/test/utils.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 2fafb2074..011eb57ec 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -5,15 +5,14 @@ import path from 'node:path'; const ZMODEL_PRELUDE = `datasource db { provider = "sqlite" - url = "file:./dev.db" -} -`; + url = "file:./dev.db" +}`; export function createProject(zmodel: string, addPrelude = true) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); return workDir; } From 7283cc7ac147d6afc436224840c2046390adb70d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 21 Nov 2025 21:40:41 +0100 Subject: [PATCH 26/68] test: run db pull e2e test also for postgres --- packages/cli/test/db/pull.test.ts | 6 ++-- packages/cli/test/utils.ts | 48 +++++++++++++++++++++++++++---- 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 45cdde442..35378c2ea 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -6,9 +6,9 @@ import { createProject, runCli } from '../utils'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); describe('DB pull', () => { - it('sqlite schema', () => { - const workDir = createProject(` -model User { + it('simple schema', () => { + const workDir = createProject( +`model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 011eb57ec..5a93100eb 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -1,14 +1,52 @@ -import { createTestProject } from '@zenstackhq/testtools'; +import { createTestProject, getTestDbProvider } from '@zenstackhq/testtools'; +import { createHash } from 'node:crypto'; import { execSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; +import { expect } from 'vitest'; -const ZMODEL_PRELUDE = `datasource db { - provider = "sqlite" - url = "file:./dev.db" -}`; +const TEST_PG_CONFIG = { + host: process.env['TEST_PG_HOST'] ?? 'localhost', + port: process.env['TEST_PG_PORT'] ? parseInt(process.env['TEST_PG_PORT']) : 5432, + user: process.env['TEST_PG_USER'] ?? 'postgres', + password: process.env['TEST_PG_PASSWORD'] ?? 'postgres', +}; + +function getTestDbName(provider: string) { + if (provider === 'sqlite') { + return './test.db'; + } + const testName = expect.getState().currentTestName ?? 'unnamed'; + const testPath = expect.getState().testPath ?? ''; + // digest test name + const digest = createHash('md5') + .update(testName + testPath) + .digest('hex'); + // compute a database name based on test name + return ( + 'test_' + + testName + .toLowerCase() + .replace(/[^a-z0-9_]/g, '_') + .replace(/_+/g, '_') + .substring(0, 30) + + digest.slice(0, 6) + ); +} export function createProject(zmodel: string, addPrelude = true) { + const provider = getTestDbProvider() ?? 'sqlite'; + const dbName = getTestDbName(provider); + const dbUrl = + provider === 'sqlite' + ? `file:${dbName}` + : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + + const ZMODEL_PRELUDE = `datasource db { + provider = "${provider}" + url = "${dbUrl}" +} +`; const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); From 0625bcc21c58ce68c7bc22a228af7687cc3441b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sun, 23 Nov 2025 01:30:26 +0100 Subject: [PATCH 27/68] fix: postgres instorspection schema filter --- packages/cli/src/actions/db.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 16fe6f196..879353130 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -101,10 +101,10 @@ async function runPull(options: PullOptions) { console.log('Starging introspect the database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); const enums = provider.isSupportedFeature('Schema') - ? allEnums.filter((e) => datasource.schemas.includes(e.schema_name)) + ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) : allEnums; const tables = provider.isSupportedFeature('Schema') - ? allTables.filter((t) => datasource.schemas.includes(t.schema)) + ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) : allTables; const newModel: Model = { From 164b193f64665b6c1acd46d3b0a6506fb6b7d747 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sun, 23 Nov 2025 01:32:49 +0100 Subject: [PATCH 28/68] test: update cli tests --- packages/cli/test/check.test.ts | 7 +------ packages/cli/test/db.test.ts | 10 +++++----- packages/cli/test/db/push.test.ts | 4 ++-- packages/cli/test/migrate.test.ts | 24 ++++++++++++------------ packages/cli/test/utils.ts | 9 ++++++--- 5 files changed, 26 insertions(+), 28 deletions(-) diff --git a/packages/cli/test/check.test.ts b/packages/cli/test/check.test.ts index 287bb6b80..60f80903e 100644 --- a/packages/cli/test/check.test.ts +++ b/packages/cli/test/check.test.ts @@ -83,17 +83,12 @@ describe('CLI validate command test', () => { it('should validate schema with syntax errors', () => { const modelWithSyntaxError = ` -datasource db { - provider = "sqlite" - url = "file:./dev.db" -} - model User { id String @id @default(cuid()) email String @unique // Missing closing brace - syntax error `; - const workDir = createProject(modelWithSyntaxError, false); + const workDir = createProject(modelWithSyntaxError); // Should throw an error due to syntax error expect(() => runCli('check', workDir)).toThrow(); diff --git a/packages/cli/test/db.test.ts b/packages/cli/test/db.test.ts index 636dcff8f..b17f92e5e 100644 --- a/packages/cli/test/db.test.ts +++ b/packages/cli/test/db.test.ts @@ -11,13 +11,13 @@ model User { describe('CLI db commands test', () => { it('should generate a database with db push', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('should seed the database with db seed with seed script', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -36,7 +36,7 @@ fs.writeFileSync('seed.txt', 'success'); }); it('should seed the database after migrate reset', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); const pkgJson = JSON.parse(fs.readFileSync(path.join(workDir, 'package.json'), 'utf8')); pkgJson.zenstack = { seed: 'node seed.js', @@ -55,7 +55,7 @@ fs.writeFileSync('seed.txt', 'success'); }); it('should skip seeding the database without seed script', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db seed', workDir); }); }); diff --git a/packages/cli/test/db/push.test.ts b/packages/cli/test/db/push.test.ts index 78164aaea..9c688df4d 100644 --- a/packages/cli/test/db/push.test.ts +++ b/packages/cli/test/db/push.test.ts @@ -11,8 +11,8 @@ model User { describe('CLI db commands test', () => { it('should generate a database with db push', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); }); diff --git a/packages/cli/test/migrate.test.ts b/packages/cli/test/migrate.test.ts index 56a0fec83..86abc3576 100644 --- a/packages/cli/test/migrate.test.ts +++ b/packages/cli/test/migrate.test.ts @@ -11,36 +11,36 @@ model User { describe('CLI migrate commands test', () => { it('should generate a database with migrate dev', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); expect(fs.existsSync(path.join(workDir, 'zenstack/migrations'))).toBe(true); }); it('should reset the database with migrate reset', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('db push', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); runCli('migrate reset --force', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('should reset the database with migrate deploy', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); - fs.rmSync(path.join(workDir, 'zenstack/dev.db')); + fs.rmSync(path.join(workDir, 'zenstack/test.db')); runCli('migrate deploy', workDir); - expect(fs.existsSync(path.join(workDir, 'zenstack/dev.db'))).toBe(true); + expect(fs.existsSync(path.join(workDir, 'zenstack/test.db'))).toBe(true); }); it('supports migrate status', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); runCli('migrate status', workDir); }); it('supports migrate resolve', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); runCli('migrate dev --name init', workDir); // find the migration record "timestamp_init" @@ -51,7 +51,7 @@ describe('CLI migrate commands test', () => { fs.writeFileSync(path.join(workDir, 'zenstack/migrations', migration!, 'migration.sql'), 'invalid content'); // redeploy the migration, which will fail - fs.rmSync(path.join(workDir, 'zenstack/dev.db'), { force: true }); + fs.rmSync(path.join(workDir, 'zenstack/test.db'), { force: true }); try { runCli('migrate deploy', workDir); } catch { @@ -66,7 +66,7 @@ describe('CLI migrate commands test', () => { }); it('should throw error when neither applied nor rolled-back is provided', () => { - const workDir = createProject(model); + const workDir = createProject(model, { provider: 'sqlite' }); expect(() => runCli('migrate resolve', workDir)).toThrow(); }); }); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 5a93100eb..7017b622f 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -34,8 +34,11 @@ function getTestDbName(provider: string) { ); } -export function createProject(zmodel: string, addPrelude = true) { - const provider = getTestDbProvider() ?? 'sqlite'; +export function createProject( + zmodel: string, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, +) { + const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); const dbUrl = provider === 'sqlite' @@ -50,7 +53,7 @@ export function createProject(zmodel: string, addPrelude = true) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, addPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); return workDir; } From 371a122406f964c77cb4a6e4fbae13f870ac95c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Mon, 15 Dec 2025 22:22:22 +0100 Subject: [PATCH 29/68] feat(cli): Improves database introspection and syncing Enhances the `db pull` command with a spinner for better UX. Adds color-coded logging to highlight important steps. Provides more detailed output on schema changes, including deleted models, enums, added fields, and deleted attributes. Also includes minor improvements to enum mapping and constraint handling. --- packages/cli/src/actions/db.ts | 76 +++++++++++++++++++++----- packages/cli/src/actions/pull/index.ts | 17 +++--- 2 files changed, 73 insertions(+), 20 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 879353130..36e354b95 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,15 +1,17 @@ import { config } from '@dotenvx/dotenvx'; import { ZModelCodeGenerator } from '@zenstackhq/language'; import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; +import colors from 'colors'; import fs from 'node:fs'; import path from 'node:path'; +import ora from 'ora'; import { execPrisma } from '../utils/exec-utils'; import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, - requireDataSourceUrl, loadSchemaDocument, + requireDataSourceUrl, } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; @@ -77,6 +79,7 @@ async function runPush(options: PushOptions) { } async function runPull(options: PullOptions) { + const spinner = ora(); try { const schemaFile = getSchemaFile(options.schema); const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); @@ -98,8 +101,11 @@ async function runPull(options: PullOptions) { if (!provider) { throw new Error(`No introspection provider found for: ${datasource.provider}`); } - console.log('Starging introspect the database...'); + + spinner.start('Introspecting database...'); const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url); + spinner.succeed('Database introspected'); + const enums = provider.isSupportedFeature('Schema') ? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name)) : allEnums; @@ -107,6 +113,8 @@ async function runPull(options: PullOptions) { ? allTables.filter((t) => datasource.allSchemas.includes(t.schema)) : allTables; + console.log(colors.blue('Syncing schema...')); + const newModel: Model = { $type: 'Model', $container: undefined, @@ -165,12 +173,22 @@ async function runPull(options: PullOptions) { }); } + console.log(colors.blue('Schema synced')); + const cwd = new URL(`file://${process.cwd()}`).pathname; const docs = services.shared.workspace.LangiumDocuments.all .filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase())) .toArray(); const docsSet = new Set(docs.map((d) => d.uri.toString())); + console.log(colors.bold('\nApplying changes to ZModel...')); + + const deletedModels: string[] = []; + const deletedEnums: string[] = []; + const addedFields: string[] = []; + const deletedAttributes: string[] = []; + const deletedFields: string[] = []; + //Delete models services.shared.workspace.IndexManager.allElements('DataModel', docsSet) .filter( @@ -181,7 +199,7 @@ async function runPull(options: PullOptions) { const model = decl.node!.$container as Model; const index = model.declarations.findIndex((d) => d === decl.node); model.declarations.splice(index, 1); - console.log(`Delete model ${decl.name}`); + deletedModels.push(colors.red(`- Model ${decl.name} deleted`)); }); // Delete Enums @@ -195,7 +213,7 @@ async function runPull(options: PullOptions) { const model = decl.node!.$container as Model; const index = model.declarations.findIndex((d) => d === decl.node); model.declarations.splice(index, 1); - console.log(`Delete enum ${decl.name}`); + deletedEnums.push(colors.red(`- Enum ${decl.name} deleted`)); }); // newModel.declarations @@ -239,14 +257,16 @@ async function runPull(options: PullOptions) { if (originalFields.length > 1) { console.warn( - `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + colors.yellow( + `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ), ); return; } const originalField = originalFields.at(0); Object.freeze(originalField); if (!originalField) { - console.log(`Added field ${f.name} to ${originalDataModel.name}`); + addedFields.push(colors.green(`+ Field ${f.name} added to ${originalDataModel.name}`)); (f as any).$container = originalDataModel; originalDataModel.fields.push(f as any); if (f.$type === 'DataField' && f.type.reference?.ref) { @@ -260,7 +280,7 @@ async function runPull(options: PullOptions) { } return; } - if (f.name === 'profiles') console.log(f.attributes.length); + originalField.attributes .filter( (attr) => @@ -271,7 +291,9 @@ async function runPull(options: PullOptions) { const field = attr.$container; const index = field.attributes.findIndex((d) => d === attr); field.attributes.splice(index, 1); - console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`); + deletedAttributes.push( + colors.yellow(`- Attribute ${attr.decl.$refText} deleted from field: ${field.name}`), + ); }); }); originalDataModel.fields @@ -295,10 +317,35 @@ async function runPull(options: PullOptions) { const _model = f.$container; const index = _model.fields.findIndex((d) => d === f); _model.fields.splice(index, 1); - console.log(`Delete field ${f.name}`); + deletedFields.push(colors.red(`- Field ${f.name} deleted from ${_model.name}`)); }); }); + if (deletedModels.length > 0) { + console.log(colors.bold('\nDeleted Models:')); + deletedModels.forEach((msg) => console.log(msg)); + } + + if (deletedEnums.length > 0) { + console.log(colors.bold('\nDeleted Enums:')); + deletedEnums.forEach((msg) => console.log(msg)); + } + + if (addedFields.length > 0) { + console.log(colors.bold('\nAdded Fields:')); + addedFields.forEach((msg) => console.log(msg)); + } + + if (deletedAttributes.length > 0) { + console.log(colors.bold('\nDeleted Attributes:')); + deletedAttributes.forEach((msg) => console.log(msg)); + } + + if (deletedFields.length > 0) { + console.log(colors.bold('\nDeleted Fields:')); + deletedFields.forEach((msg) => console.log(msg)); + } + if (options.out && !fs.lstatSync(options.out).isFile()) { throw new Error(`Output path ${options.out} is not a file`); } @@ -311,7 +358,7 @@ async function runPull(options: PullOptions) { if (options.out) { const zmodelSchema = generator.generate(newModel); - console.log(`Writing to ${options.out}`); + console.log(colors.blue(`Writing to ${options.out}`)); const outPath = options.out ? path.resolve(options.out) : schemaFile; @@ -319,12 +366,15 @@ async function runPull(options: PullOptions) { } else { docs.forEach(({ uri, parseResult: { value: model } }) => { const zmodelSchema = generator.generate(model); - console.log(`Writing to ${uri.path}`); + console.log(colors.blue(`Writing to ${uri.path}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); }); } + + console.log(colors.green.bold('\nPull completed successfully!')); } catch (error) { - console.log(error); + spinner.fail('Pull failed'); + console.error(error); throw error; } -} +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 4a661afb9..685038298 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,4 +1,5 @@ import type { ZModelServices } from '@zenstackhq/language'; +import colors from 'colors'; import { isEnum, type Attribute, @@ -38,7 +39,7 @@ export function syncEnums({ if (provider.isSupportedFeature('NativeEnum')) { for (const dbEnum of dbEnums) { const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type); - if (modified) console.log(`Mapping enum ${dbEnum.enum_type} to ${name}`); + if (modified) console.log(colors.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`)); const factory = new EnumFactory().setName(name); if (modified || options.alwaysMap) factory.addAttribute((builder) => @@ -344,16 +345,18 @@ export function syncTable({ table.indexes.forEach((index) => { if (index.predicate) { //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints - console.log( - 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', - `- Model: "${table.name}", constraint: "${index.name}"`, + console.warn( + colors.yellow( + `These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints\n- Model: "${table.name}", constraint: "${index.name}"`, + ), ); return; } if (index.columns.find((c) => c.expression)) { - console.log( - 'These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints', - `- Model: "${table.name}", constraint: "${index.name}"`, + console.warn( + colors.yellow( + `These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints\n- Model: "${table.name}", constraint: "${index.name}"`, + ), ); return; } From 575f0c8a15a06e60902c59756f6cfc8a41e3cb91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 9 Jan 2026 16:04:44 +0100 Subject: [PATCH 30/68] fix(cli): fixes field casing and sort issues --- packages/cli/src/actions/pull/index.ts | 24 +++--- packages/cli/test/db/pull.test.ts | 100 ++++++++++++++++++++++++- packages/cli/test/utils.ts | 20 +++-- 3 files changed, 123 insertions(+), 21 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 685038298..05a54e035 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -378,7 +378,8 @@ export function syncTable({ } arrayExpr.addItem((itemBuilder) => { const refExpr = itemBuilder.ReferenceExpr.setTarget(ref); - if (c.order !== 'ASC') refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); + if (c.order && c.order !== 'ASC') + refExpr.addArg((ab) => ab.StringLiteral.setValue('DESC'), 'sort'); return refExpr; }); @@ -402,6 +403,7 @@ export function syncRelation({ model, relation, services, + options, selfRelation, simmilarRelations, }: { @@ -444,10 +446,12 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; - let sourceFieldName = + let { name: sourceFieldName } = resolveNameCasing( + options.fieldCasing, simmilarRelations > 0 ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : targetModel.name; + : targetModel.name, + ); if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { sourceFieldName = `${sourceFieldName}To${targetModel.name.charAt(0).toLowerCase()}${targetModel.name.slice(1)}_${relation.references.column}`; @@ -498,10 +502,12 @@ export function syncRelation({ sourceModel.fields.push(sourceFieldFactory.node); const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; - const oppositeFieldName = + const { name: oppositeFieldName } = resolveNameCasing( + options.fieldCasing, simmilarRelations > 0 ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : sourceModel.name; + : sourceModel.name, + ); const targetFieldFactory = new DataFieldFactory() .setContainer(targetModel) @@ -519,8 +525,8 @@ export function syncRelation({ targetModel.fields.push(targetFieldFactory.node); - targetModel.fields.sort((a, b) => { - if (a.type.reference && b.type.reference) return 0; - return a.name.localeCompare(b.name); - }); + // targetModel.fields.sort((a, b) => { + // if (a.type.reference || b.type.reference) return a.name.localeCompare(b.name); + // return 0; + // }); } diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 35378c2ea..34d79036a 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -1,14 +1,20 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; -import { createProject, runCli } from '../utils'; +import { createProject, getDefaultPrelude, runCli } from '../utils'; +import { loadSchemaDocument } from '../../src/actions/action-utils'; +import { ZModelCodeGenerator } from '@zenstackhq/language'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); +const generator = new ZModelCodeGenerator({ + quote: 'double', + indent: 4, +}); describe('DB pull', () => { - it('simple schema', () => { + it("simple schema - pull shouldn't modify the schema", () => { const workDir = createProject( -`model User { + `model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") @@ -85,7 +91,8 @@ enum Role { USER ADMIN MODERATOR -}`); +}`, + ); runCli('format', workDir); runCli('db push', workDir); @@ -93,4 +100,89 @@ enum Role { runCli('db pull --indent 4', workDir); expect(getSchema(workDir)).toEqual(originalSchema); }); + + it('simple schema - pull shouldn recreate the schema.zmodel', async () => { + const workDir = createProject( + `model Post { + id Int @id @default(autoincrement()) + authorId String + title String + content String? + published Boolean @default(false) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + slug String + score Float @default(0.0) + metadata Json? + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + PostTag PostTag[] + + @@unique([authorId, slug]) + @@index([authorId, published]) +} +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + assignedAt DateTime @default(now()) + note String? @default("initial") + + @@id([postId, tagId]) +} +model User { + id String @id @default(cuid()) + email String @unique + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") +} + +enum Role { + USER + ADMIN + MODERATOR +}`, + ); + console.log(workDir) + runCli('format', workDir); + runCli('db push', workDir); + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const originalSchema = generator.generate(model); + fs.writeFileSync(path.join(workDir, 'zenstack/schema.zmodel'), getDefaultPrelude()); + + runCli('db pull --indent 4 --field-casing=camel', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); }); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 7017b622f..310fea122 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -34,10 +34,7 @@ function getTestDbName(provider: string) { ); } -export function createProject( - zmodel: string, - options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, -) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); const dbUrl = @@ -46,18 +43,25 @@ export function createProject( : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; const ZMODEL_PRELUDE = `datasource db { - provider = "${provider}" - url = "${dbUrl}" + provider = "${provider}" + url = "${dbUrl}" } `; + return ZMODEL_PRELUDE; +} + +export function createProject( + zmodel: string, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, +) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${ZMODEL_PRELUDE}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude()}\n${zmodel}` : zmodel); return workDir; } export function runCli(command: string, cwd: string) { const cli = path.join(__dirname, '../dist/index.js'); - execSync(`node ${cli} ${command}`, { cwd }); + execSync(`node ${cli} ${command}`, { cwd, stdio: 'inherit' }); } From 219337d4a66de863d60292a56ffddc0eddb68d5e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 27 Jan 2026 20:55:40 +0100 Subject: [PATCH 31/68] chore(cli): remove temporary test script Deletes an unused script used for experimenting with URI path resolution. Cleans up the codebase by removing development-only artifacts. --- packages/cli/src/test.ts | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 packages/cli/src/test.ts diff --git a/packages/cli/src/test.ts b/packages/cli/src/test.ts deleted file mode 100644 index b83716dfa..000000000 --- a/packages/cli/src/test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { URI, Utils } from 'vscode-uri'; - -const base = URI.parse('file:/d/zenstack/'); -const relative = URI.parse('file:./c/asdasd.db'); -console.log(base); -console.log(relative); -console.log(Utils.resolvePath(base, relative.path)); -// console.log(URI.parse('file:/c/asdasd.db')); -// console.log(URI.parse('file:./c/asdasd.db')); From 098873e47c06344916627ddd078a11ce04ec9e24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 27 Jan 2026 21:18:44 +0100 Subject: [PATCH 32/68] chore: update pnpm-lock.yaml --- pnpm-lock.yaml | 142 ++++++++++++++++++++++++------------------------- 1 file changed, 71 insertions(+), 71 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8c635fc5e..3412d6554 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -109,7 +109,7 @@ importers: version: 20.19.24 '@vitest/coverage-v8': specifier: ^4.0.16 - version: 4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + version: 4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) eslint: specifier: ~9.29.0 version: 9.29.0(jiti@2.6.1) @@ -142,7 +142,7 @@ importers: version: 8.34.1(eslint@9.29.0(jiti@2.6.1))(typescript@5.9.3) vitest: specifier: ^4.0.14 - version: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + version: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) yaml: specifier: ^2.8.0 version: 2.8.0 @@ -194,7 +194,7 @@ importers: dependencies: '@dotenvx/dotenvx': specifier: ^1.51.0 - version: 1.51.4 + version: 1.52.0 '@zenstackhq/common-helpers': specifier: workspace:* version: link:../common-helpers @@ -204,6 +204,9 @@ importers: '@zenstackhq/orm': specifier: workspace:* version: link:../orm + '@zenstackhq/schema': + specifier: workspace:* + version: link:../schema '@zenstackhq/sdk': specifier: workspace:* version: link:../sdk @@ -264,9 +267,6 @@ importers: ts-pattern: specifier: 'catalog:' version: 5.7.1 - vscode-uri: - specifier: ^3.1.0 - version: 3.1.0 devDependencies: '@types/better-sqlite3': specifier: 'catalog:' @@ -1174,13 +1174,13 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.16.0 + version: 8.15.6 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config bun-types: specifier: ^1.3.3 - version: 1.3.4 + version: 1.3.3 tests/runtimes/edge-runtime: dependencies: @@ -1208,7 +1208,7 @@ importers: devDependencies: '@types/pg': specifier: ^8.15.6 - version: 8.16.0 + version: 8.15.6 '@zenstackhq/typescript-config': specifier: workspace:* version: link:../../../packages/config/typescript-config @@ -1564,8 +1564,8 @@ packages: resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} engines: {node: '>=18'} - '@dotenvx/dotenvx@1.51.4': - resolution: {integrity: sha512-AoziS8lRQ3ew/lY5J4JSlzYSN9Fo0oiyMBY37L3Bwq4mOQJT5GSrdZYLFPt6pH1LApDI3ZJceNyx+rHRACZSeQ==} + '@dotenvx/dotenvx@1.52.0': + resolution: {integrity: sha512-CaQcc8JvtzQhUSm9877b6V4Tb7HCotkcyud9X2YwdqtQKwgljkMRwU96fVYKnzN3V0Hj74oP7Es+vZ0mS+Aa1w==} hasBin: true '@dxup/nuxt@0.2.2': @@ -3666,8 +3666,8 @@ packages: '@types/pg@8.11.11': resolution: {integrity: sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==} - '@types/pg@8.16.0': - resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} + '@types/pg@8.15.6': + resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==} '@types/pg@8.16.0': resolution: {integrity: sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==} @@ -3971,11 +3971,11 @@ packages: '@vitest/browser': optional: true - '@vitest/expect@4.0.15': - resolution: {integrity: sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==} + '@vitest/expect@4.0.14': + resolution: {integrity: sha512-RHk63V3zvRiYOWAV0rGEBRO820ce17hz7cI2kDmEdfQsBjT2luEKB5tCOc91u1oSQoUOZkSv3ZyzkdkSLD7lKw==} - '@vitest/mocker@4.0.15': - resolution: {integrity: sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==} + '@vitest/mocker@4.0.14': + resolution: {integrity: sha512-RzS5NujlCzeRPF1MK7MXLiEFpkIXeMdQ+rN3Kk3tDI9j0mtbr7Nmuq67tpkOJQpgyClbOltCXMjLZicJHsH5Cg==} peerDependencies: msw: ^2.4.9 vite: ^6.0.0 || ^7.0.0-0 @@ -3985,23 +3985,23 @@ packages: vite: optional: true - '@vitest/pretty-format@4.0.15': - resolution: {integrity: sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==} + '@vitest/pretty-format@4.0.14': + resolution: {integrity: sha512-SOYPgujB6TITcJxgd3wmsLl+wZv+fy3av2PpiPpsWPZ6J1ySUYfScfpIt2Yv56ShJXR2MOA6q2KjKHN4EpdyRQ==} '@vitest/pretty-format@4.0.16': resolution: {integrity: sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==} - '@vitest/runner@4.0.15': - resolution: {integrity: sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==} + '@vitest/runner@4.0.14': + resolution: {integrity: sha512-BsAIk3FAqxICqREbX8SetIteT8PiaUL/tgJjmhxJhCsigmzzH8xeadtp7LRnTpCVzvf0ib9BgAfKJHuhNllKLw==} - '@vitest/snapshot@4.0.15': - resolution: {integrity: sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==} + '@vitest/snapshot@4.0.14': + resolution: {integrity: sha512-aQVBfT1PMzDSA16Y3Fp45a0q8nKexx6N5Amw3MX55BeTeZpoC08fGqEZqVmPcqN0ueZsuUQ9rriPMhZ3Mu19Ag==} - '@vitest/spy@4.0.15': - resolution: {integrity: sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==} + '@vitest/spy@4.0.14': + resolution: {integrity: sha512-JmAZT1UtZooO0tpY3GRyiC/8W7dCs05UOq9rfsUUgEZEdq+DuHLmWhPsrTt0TiW7WYeL/hXpaE07AZ2RCk44hg==} - '@vitest/utils@4.0.15': - resolution: {integrity: sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==} + '@vitest/utils@4.0.14': + resolution: {integrity: sha512-hLqXZKAWNg8pI+SQXyXxWCTOpA3MvsqcbVeNgSi8x/CSN2wi26dSzn1wrOhmCmFjEvN9p8/kLFRHa6PI8jHazw==} '@vitest/utils@4.0.16': resolution: {integrity: sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==} @@ -4471,8 +4471,8 @@ packages: buffer@6.0.3: resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} - bun-types@1.3.4: - resolution: {integrity: sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ==} + bun-types@1.3.3: + resolution: {integrity: sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ==} bundle-name@4.1.0: resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} @@ -5115,8 +5115,8 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - eciesjs@0.4.16: - resolution: {integrity: sha512-dS5cbA9rA2VR4Ybuvhg6jvdmp46ubLn3E+px8cG/35aEDNclrqoCjg6mt0HYZ/M+OoESS3jSkCrqk1kWAEhWAw==} + eciesjs@0.4.17: + resolution: {integrity: sha512-TOOURki4G7sD1wDCjj7NfLaXZZ49dFOeEb5y39IXpb8p0hRzVvfvzZHOi5JcT+PpyAbi/Y+lxPb8eTag2WYH8w==} engines: {bun: '>=1', deno: '>=2', node: '>=16'} ee-first@1.1.1: @@ -5424,8 +5424,8 @@ packages: resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==} engines: {node: '>=6'} - expect-type@1.3.0: - resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} + expect-type@1.2.2: + resolution: {integrity: sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==} engines: {node: '>=12.0.0'} express@5.1.0: @@ -8602,18 +8602,18 @@ packages: vite: optional: true - vitest@4.0.15: - resolution: {integrity: sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==} + vitest@4.0.14: + resolution: {integrity: sha512-d9B2J9Cm9dN9+6nxMnnNJKJCtcyKfnHj15N6YNJfaFHRLua/d3sRKU9RuKmO9mB0XdFtUizlxfz/VPbd3OxGhw==} engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' '@opentelemetry/api': ^1.9.0 '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 - '@vitest/browser-playwright': 4.0.15 - '@vitest/browser-preview': 4.0.15 - '@vitest/browser-webdriverio': 4.0.15 - '@vitest/ui': 4.0.15 + '@vitest/browser-playwright': 4.0.14 + '@vitest/browser-preview': 4.0.14 + '@vitest/browser-webdriverio': 4.0.14 + '@vitest/ui': 4.0.14 happy-dom: '*' jsdom: '*' peerDependenciesMeta: @@ -9401,11 +9401,11 @@ snapshots: '@csstools/css-tokenizer@3.0.4': optional: true - '@dotenvx/dotenvx@1.51.4': + '@dotenvx/dotenvx@1.52.0': dependencies: commander: 11.1.0 dotenv: 17.2.3 - eciesjs: 0.4.16 + eciesjs: 0.4.17 execa: 5.1.1 fdir: 6.5.0(picomatch@4.0.3) ignore: 5.3.2 @@ -11266,7 +11266,7 @@ snapshots: pg-protocol: 1.10.3 pg-types: 4.0.2 - '@types/pg@8.16.0': + '@types/pg@8.15.6': dependencies: '@types/node': 20.19.24 pg-protocol: 1.10.3 @@ -11467,7 +11467,7 @@ snapshots: fast-glob: 3.3.3 is-glob: 4.0.3 minimatch: 9.0.5 - semver: 7.7.2 + semver: 7.7.3 ts-api-utils: 2.1.0(typescript@5.9.3) typescript: 5.9.3 transitivePeerDependencies: @@ -11623,7 +11623,7 @@ snapshots: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) vue: 3.5.26(typescript@5.9.3) - '@vitest/coverage-v8@4.0.16(vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/coverage-v8@4.0.16(vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: '@bcoe/v8-coverage': 1.0.2 '@vitest/utils': 4.0.16 @@ -11636,22 +11636,22 @@ snapshots: obug: 2.1.1 std-env: 3.10.0 tinyrainbow: 3.0.3 - vitest: 4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) + vitest: 4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) transitivePeerDependencies: - supports-color - '@vitest/expect@4.0.15': + '@vitest/expect@4.0.14': dependencies: '@standard-schema/spec': 1.0.0 '@types/chai': 5.2.2 - '@vitest/spy': 4.0.15 - '@vitest/utils': 4.0.15 + '@vitest/spy': 4.0.14 + '@vitest/utils': 4.0.14 chai: 6.2.1 tinyrainbow: 3.0.3 - '@vitest/mocker@4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': + '@vitest/mocker@4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0))': dependencies: - '@vitest/spy': 4.0.15 + '@vitest/spy': 4.0.14 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: @@ -11674,22 +11674,22 @@ snapshots: dependencies: tinyrainbow: 3.0.3 - '@vitest/runner@4.0.15': + '@vitest/runner@4.0.14': dependencies: - '@vitest/utils': 4.0.15 + '@vitest/utils': 4.0.14 pathe: 2.0.3 - '@vitest/snapshot@4.0.15': + '@vitest/snapshot@4.0.14': dependencies: - '@vitest/pretty-format': 4.0.15 + '@vitest/pretty-format': 4.0.14 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@4.0.15': {} + '@vitest/spy@4.0.14': {} - '@vitest/utils@4.0.15': + '@vitest/utils@4.0.14': dependencies: - '@vitest/pretty-format': 4.0.15 + '@vitest/pretty-format': 4.0.14 tinyrainbow: 3.0.3 '@vitest/utils@4.0.16': @@ -12251,7 +12251,7 @@ snapshots: base64-js: 1.5.1 ieee754: 1.2.1 - bun-types@1.3.4: + bun-types@1.3.3: dependencies: '@types/node': 20.19.24 @@ -12805,7 +12805,7 @@ snapshots: eastasianwidth@0.2.0: {} - eciesjs@0.4.16: + eciesjs@0.4.17: dependencies: '@ecies/ciphers': 0.2.5(@noble/ciphers@1.3.0) '@noble/ciphers': 1.3.0 @@ -13339,7 +13339,7 @@ snapshots: expand-template@2.0.3: {} - expect-type@1.3.0: {} + expect-type@1.2.2: {} express@5.1.0: dependencies: @@ -14203,7 +14203,7 @@ snapshots: kysely-bun-sqlite@0.4.0(kysely@0.28.8): dependencies: - bun-types: 1.3.4 + bun-types: 1.3.3 kysely: 0.28.8 kysely@0.28.8: {} @@ -16922,24 +16922,24 @@ snapshots: optionalDependencies: vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.2) - vitest@4.0.15(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): + vitest@4.0.14(@edge-runtime/vm@5.0.0)(@types/node@20.19.24)(happy-dom@20.0.10)(jiti@2.6.1)(jsdom@27.1.0)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0): dependencies: - '@vitest/expect': 4.0.15 - '@vitest/mocker': 4.0.15(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) - '@vitest/pretty-format': 4.0.15 - '@vitest/runner': 4.0.15 - '@vitest/snapshot': 4.0.15 - '@vitest/spy': 4.0.15 - '@vitest/utils': 4.0.15 + '@vitest/expect': 4.0.14 + '@vitest/mocker': 4.0.14(vite@7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0)) + '@vitest/pretty-format': 4.0.14 + '@vitest/runner': 4.0.14 + '@vitest/snapshot': 4.0.14 + '@vitest/spy': 4.0.14 + '@vitest/utils': 4.0.14 es-module-lexer: 1.7.0 - expect-type: 1.3.0 + expect-type: 1.2.2 magic-string: 0.30.21 obug: 2.1.1 pathe: 2.0.3 picomatch: 4.0.3 std-env: 3.10.0 tinybench: 2.9.0 - tinyexec: 1.0.2 + tinyexec: 0.3.2 tinyglobby: 0.2.15 tinyrainbow: 3.0.3 vite: 7.3.0(@types/node@20.19.24)(jiti@2.6.1)(lightningcss@1.30.2)(terser@5.44.0)(tsx@4.20.3)(yaml@2.8.0) From 99f180ed988d72b926e84fac51d3ed24ba81b507 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Tue, 27 Jan 2026 22:22:31 +0100 Subject: [PATCH 33/68] feat(cli): add MySQL support for schema introspection Introduces a MySQL-specific introspection provider to support pulling existing database schemas into ZenStack. The implementation includes logic for mapping MySQL data types to ZenStack types, handling auto-incrementing fields, and parsing MySQL-specific enum definitions. It utilizes dynamic imports for database drivers to minimize the CLI footprint for users not targeting MySQL. --- .../cli/src/actions/pull/provider/index.ts | 2 + .../cli/src/actions/pull/provider/mysql.ts | 424 ++++++++++++++++++ 2 files changed, 426 insertions(+) create mode 100644 packages/cli/src/actions/pull/provider/mysql.ts diff --git a/packages/cli/src/actions/pull/provider/index.ts b/packages/cli/src/actions/pull/provider/index.ts index e712ac983..7c93746d4 100644 --- a/packages/cli/src/actions/pull/provider/index.ts +++ b/packages/cli/src/actions/pull/provider/index.ts @@ -1,11 +1,13 @@ import type { DataSourceProviderType } from '@zenstackhq/schema'; export * from './provider'; +import { mysql } from './mysql'; import { postgresql } from './postgresql'; import type { IntrospectionProvider } from './provider'; import { sqlite } from './sqlite'; export const providers: Record = { + mysql, postgresql, sqlite, }; diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts new file mode 100644 index 000000000..123463725 --- /dev/null +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -0,0 +1,424 @@ +import type { BuiltinType } from '@zenstackhq/language/ast'; +import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; +import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; + +// Note: We dynamically import mysql2 inside the async function to avoid +// requiring it at module load time for environments that don't use MySQL. + +export const mysql: IntrospectionProvider = { + isSupportedFeature(feature) { + switch (feature) { + case 'NativeEnum': + return true; + case 'Schema': + default: + return false; + } + }, + getBuiltinType(type) { + const t = (type || '').toLowerCase().trim(); + + // MySQL doesn't have native array types + const isArray = false; + + switch (t) { + // integers + case 'tinyint': + case 'smallint': + case 'mediumint': + case 'int': + case 'integer': + return { type: 'Int', isArray }; + case 'bigint': + return { type: 'BigInt', isArray }; + + // decimals and floats + case 'decimal': + case 'numeric': + return { type: 'Decimal', isArray }; + case 'float': + case 'double': + case 'real': + return { type: 'Float', isArray }; + + // boolean (MySQL uses TINYINT(1) for boolean) + case 'boolean': + case 'bool': + return { type: 'Boolean', isArray }; + + // strings + case 'char': + case 'varchar': + case 'tinytext': + case 'text': + case 'mediumtext': + case 'longtext': + return { type: 'String', isArray }; + + // dates/times + case 'date': + case 'time': + case 'datetime': + case 'timestamp': + case 'year': + return { type: 'DateTime', isArray }; + + // binary + case 'binary': + case 'varbinary': + case 'tinyblob': + case 'blob': + case 'mediumblob': + case 'longblob': + return { type: 'Bytes', isArray }; + + // json + case 'json': + return { type: 'Json', isArray }; + + default: + // Handle ENUM type - MySQL returns enum values like "enum('val1','val2')" + if (t.startsWith('enum(')) { + return { type: 'String', isArray }; + } + // Handle SET type + if (t.startsWith('set(')) { + return { type: 'String', isArray }; + } + return { type: 'Unsupported' as const, isArray }; + } + }, + getDefaultDatabaseType(type: BuiltinType) { + switch (type) { + case 'String': + return { type: 'varchar', precisition: 191 }; + case 'Boolean': + return { type: 'tinyint', precisition: 1 }; + case 'Int': + return { type: 'int' }; + case 'BigInt': + return { type: 'bigint' }; + case 'Float': + return { type: 'double' }; + case 'Decimal': + return { type: 'decimal', precisition: 65 }; + case 'DateTime': + return { type: 'datetime', precisition: 3 }; + case 'Json': + return { type: 'json' }; + case 'Bytes': + return { type: 'longblob' }; + } + }, + async introspect(connectionString: string): Promise { + const mysql = await import('mysql2/promise'); + const connection = await mysql.createConnection(connectionString); + + try { + // Extract database name from connection string + const url = new URL(connectionString); + const databaseName = url.pathname.replace('/', ''); + + if (!databaseName) { + throw new Error('Database name not found in connection string'); + } + + // Introspect tables + const [tableRows] = (await connection.execute(getTableIntrospectionQuery(databaseName))) as [ + IntrospectedTable[], + unknown, + ]; + const tables: IntrospectedTable[] = []; + + for (const row of tableRows) { + const columns = typeof row.columns === 'string' ? JSON.parse(row.columns) : row.columns; + const indexes = typeof row.indexes === 'string' ? JSON.parse(row.indexes) : row.indexes; + + tables.push({ + schema: row.schema || '', + name: row.name, + type: row.type as 'table' | 'view', + definition: row.definition, + columns: columns || [], + indexes: indexes || [], + }); + } + + // Introspect enums (MySQL stores enum values in column definitions) + const [enumRows] = (await connection.execute(getEnumIntrospectionQuery(databaseName))) as [ + { table_name: string; column_name: string; column_type: string }[], + unknown, + ]; + + const enums: IntrospectedEnum[] = enumRows.map((row) => { + // Parse enum values from column_type like "enum('val1','val2','val3')" + const values = parseEnumValues(row.column_type); + return { + schema_name: databaseName, + // Create a unique enum type name based on table and column + enum_type: `${row.table_name}_${row.column_name}`, + values, + }; + }); + + return { tables, enums }; + } finally { + await connection.end(); + } + }, + getDefaultValue({ defaultValue, fieldName, services, enums }) { + const val = defaultValue.trim(); + const factories: DataFieldAttributeFactory[] = []; + + const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + + // Handle CURRENT_TIMESTAMP + if (val === 'CURRENT_TIMESTAMP' || val === 'current_timestamp()' || val === 'now()') { + factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + + if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + return factories; + } + + // Handle auto_increment + if (val === 'auto_increment') { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + + // Handle NULL + if (val.toUpperCase() === 'NULL') { + return []; + } + + // Handle boolean values + if (val === 'true' || val === '1' || val === "b'1'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); + return factories; + } + if (val === 'false' || val === '0' || val === "b'0'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); + return factories; + } + + // Handle numeric values + if (/^-?\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + return factories; + } + + // Handle string values (quoted with single quotes) + if (val.startsWith("'") && val.endsWith("'")) { + const strippedValue = val.slice(1, -1).replace(/''/g, "'"); + + // Check if it's an enum value + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); + if (enumField) { + factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); + return factories; + } + } + + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(strippedValue))); + return factories; + } + + // Handle function calls (e.g., uuid(), now()) + if (val.includes('(') && val.includes(')')) { + // Check for known functions + if (val.toLowerCase() === 'uuid()') { + factories.push( + defaultAttr.addArg((a) => a.InvocationExpr.setFunction(getFunctionRef('uuid', services))), + ); + return factories; + } + + // For other functions, use dbgenerated + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + } + + // For any other unhandled cases, use dbgenerated + factories.push( + defaultAttr.addArg((a) => + a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ), + ), + ); + return factories; + }, +}; + +function getTableIntrospectionQuery(databaseName: string) { + return ` +SELECT + t.TABLE_SCHEMA AS \`schema\`, + t.TABLE_NAME AS \`name\`, + CASE t.TABLE_TYPE + WHEN 'BASE TABLE' THEN 'table' + WHEN 'VIEW' THEN 'view' + ELSE NULL + END AS \`type\`, + CASE + WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION + ELSE NULL + END AS \`definition\`, + ( + SELECT JSON_ARRAYAGG( + JSON_OBJECT( + 'name', c.COLUMN_NAME, + 'datatype', c.DATA_TYPE, + 'datatype_schema', c.TABLE_SCHEMA, + 'length', c.CHARACTER_MAXIMUM_LENGTH, + 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), + 'nullable', c.IS_NULLABLE = 'YES', + 'default', c.COLUMN_DEFAULT, + 'pk', c.COLUMN_KEY = 'PRI', + 'unique', c.COLUMN_KEY = 'UNI', + 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, + 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', + 'options', JSON_ARRAY(), + 'foreign_key_schema', kcu_fk.REFERENCED_TABLE_SCHEMA, + 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, + 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, + 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, + 'foreign_key_on_update', rc.UPDATE_RULE, + 'foreign_key_on_delete', rc.DELETE_RULE + ) + ) + FROM INFORMATION_SCHEMA.COLUMNS c + LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk + ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA + AND c.TABLE_NAME = kcu_fk.TABLE_NAME + AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME + AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL + LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc + ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA + AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA + AND c.TABLE_NAME = t.TABLE_NAME + ORDER BY c.ORDINAL_POSITION + ) AS \`columns\`, + ( + SELECT JSON_ARRAYAGG( + JSON_OBJECT( + 'name', s.INDEX_NAME, + 'method', s.INDEX_TYPE, + 'unique', s.NON_UNIQUE = 0, + 'primary', s.INDEX_NAME = 'PRIMARY', + 'valid', TRUE, + 'ready', TRUE, + 'partial', FALSE, + 'predicate', NULL, + 'columns', ( + SELECT JSON_ARRAYAGG( + JSON_OBJECT( + 'name', s2.COLUMN_NAME, + 'expression', NULL, + 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END, + 'nulls', NULL + ) + ORDER BY s2.SEQ_IN_INDEX + ) + FROM INFORMATION_SCHEMA.STATISTICS s2 + WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA + AND s2.TABLE_NAME = s.TABLE_NAME + AND s2.INDEX_NAME = s.INDEX_NAME + ) + ) + ) + FROM ( + SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME + FROM INFORMATION_SCHEMA.STATISTICS + WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME + ) s + ) AS \`indexes\` +FROM INFORMATION_SCHEMA.TABLES t +LEFT JOIN INFORMATION_SCHEMA.VIEWS v + ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME +WHERE t.TABLE_SCHEMA = '${databaseName}' + AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') + AND t.TABLE_NAME NOT LIKE '_prisma_migrations' +ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME; +`; +} + +function getEnumIntrospectionQuery(databaseName: string) { + return ` +SELECT + c.TABLE_NAME AS table_name, + c.COLUMN_NAME AS column_name, + c.COLUMN_TYPE AS column_type +FROM INFORMATION_SCHEMA.COLUMNS c +WHERE c.TABLE_SCHEMA = '${databaseName}' + AND c.DATA_TYPE = 'enum' +ORDER BY c.TABLE_NAME, c.COLUMN_NAME; +`; +} + +/** + * Parse enum values from MySQL COLUMN_TYPE string like "enum('val1','val2','val3')" + */ +function parseEnumValues(columnType: string): string[] { + // Match the content inside enum(...) + const match = columnType.match(/^enum\((.+)\)$/i); + if (!match || !match[1]) return []; + + const valuesString = match[1]; + const values: string[] = []; + + // Parse quoted values, handling escaped quotes + let current = ''; + let inQuote = false; + let i = 0; + + while (i < valuesString.length) { + const char = valuesString[i]; + + if (char === "'" && !inQuote) { + inQuote = true; + i++; + continue; + } + + if (char === "'" && inQuote) { + // Check for escaped quote ('') + if (valuesString[i + 1] === "'") { + current += "'"; + i += 2; + continue; + } + // End of value + values.push(current); + current = ''; + inQuote = false; + i++; + // Skip comma and any whitespace + while (i < valuesString.length && (valuesString[i] === ',' || valuesString[i] === ' ')) { + i++; + } + continue; + } + + if (inQuote) { + current += char; + } + i++; + } + + return values; +} From 4da745d124b28d46c178ca5a55e64d797b6961f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:41 +0100 Subject: [PATCH 34/68] fix(cli): improve field matching logic during db pull --- packages/cli/src/actions/db.ts | 66 +++++++++++++++++++++------------- 1 file changed, 42 insertions(+), 24 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 36e354b95..4bf9ca47e 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -241,19 +241,30 @@ async function runPull(options: PullOptions) { } newDataModel.fields.forEach((f) => { - const originalFields = originalDataModel.fields.filter((d) => { - return ( - getDbName(d) === getDbName(f) || - (getRelationFkName(d as any) === getRelationFkName(f as any) && + // Prioritized matching: exact db name > relation FK name > type reference + let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f)); + + if (originalFields.length === 0) { + // Try matching by relation FK name + originalFields = originalDataModel.fields.filter( + (d) => + getRelationFkName(d as any) === getRelationFkName(f as any) && !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)) || - (f.$type === 'DataField' && + !!getRelationFkName(f as any), + ); + } + + if (originalFields.length === 0) { + // Try matching by type reference + originalFields = originalDataModel.fields.filter( + (d) => + f.$type === 'DataField' && d.$type === 'DataField' && f.type.reference?.ref && d.type.reference?.ref && - getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref), ); - }); + } if (originalFields.length > 1) { console.warn( @@ -297,22 +308,29 @@ async function runPull(options: PullOptions) { }); }); originalDataModel.fields - .filter( - (f) => - !newDataModel.fields.find((d) => { - return ( - getDbName(d) === getDbName(f) || - (getRelationFkName(d as any) === getRelationFkName(f as any) && - !!getRelationFkName(d as any) && - !!getRelationFkName(f as any)) || - (f.$type === 'DataField' && - d.$type === 'DataField' && - f.type.reference?.ref && - d.type.reference?.ref && - getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref)) - ); - }), - ) + .filter((f) => { + // Prioritized matching: exact db name > relation FK name > type reference + const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f)); + if (matchByDbName) return false; + + const matchByFkName = newDataModel.fields.find( + (d) => + getRelationFkName(d as any) === getRelationFkName(f as any) && + !!getRelationFkName(d as any) && + !!getRelationFkName(f as any), + ); + if (matchByFkName) return false; + + const matchByTypeRef = newDataModel.fields.find( + (d) => + f.$type === 'DataField' && + d.$type === 'DataField' && + f.type.reference?.ref && + d.type.reference?.ref && + getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref), + ); + return !matchByTypeRef; + }) .forEach((f) => { const _model = f.$container; const index = _model.fields.findIndex((d) => d === f); From aed3efefba33830a70fa0236ce81dc5aa087617b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:42 +0100 Subject: [PATCH 35/68] feat(cli): enhance SQLite introspection with autoincrement support --- .../cli/src/actions/pull/provider/sqlite.ts | 36 +++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 5825becde..f891e2994 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -63,12 +63,27 @@ export const sqlite: IntrospectionProvider = { "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", ); + // SQLite maintains sqlite_sequence table for tables with AUTOINCREMENT columns + // If a table has an entry here, its INTEGER PRIMARY KEY column is autoincrement + const autoIncrementTables = new Set(); + try { + const seqRows = all<{ name: string }>("SELECT name FROM sqlite_sequence"); + for (const row of seqRows) { + autoIncrementTables.add(row.name); + } + } catch { + // sqlite_sequence table doesn't exist if no AUTOINCREMENT was ever used + } + const tables: IntrospectedTable[] = []; for (const t of tablesRaw) { const tableName = t.name; const schema = ''; + // Check if this table has autoincrement (via sqlite_sequence) + const hasAutoIncrement = autoIncrementTables.has(tableName); + // Columns with extended info; filter out hidden=1 (internal/rowid), mark computed if hidden=2 (generated) const columnsInfo = all<{ cid: number; @@ -88,7 +103,7 @@ export const sqlite: IntrospectionProvider = { unique: number; origin: string; partial: number; - }>(`PRAGMA index_list('${tableNameEsc}')`); + }>(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith('sqlite_autoindex_')); // Unique columns detection via unique indexes with single column const uniqueSingleColumn = new Set(); @@ -163,6 +178,13 @@ export const sqlite: IntrospectionProvider = { const fk = fkByColumn.get(c.name); + // Determine default value - check for autoincrement + // AUTOINCREMENT in SQLite can only be on INTEGER PRIMARY KEY column + let defaultValue = c.dflt_value; + if (hasAutoIncrement && c.pk) { + defaultValue = 'autoincrement'; + } + columns.push({ name: c.name, datatype: c.type || '', @@ -178,7 +200,7 @@ export const sqlite: IntrospectionProvider = { pk: !!c.pk, computed: hidden === 2, nullable: c.notnull !== 1, - default: c.dflt_value, + default: defaultValue, options: [], unique: uniqueSingleColumn.has(c.name), unique_name: null, @@ -189,7 +211,7 @@ export const sqlite: IntrospectionProvider = { } const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - + return { tables, enums }; } finally { db.close(); @@ -211,6 +233,14 @@ export const sqlite: IntrospectionProvider = { return factories; } + // Handle autoincrement + if (val === 'autoincrement') { + factories.push( + defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), + ); + return factories; + } + if (val === 'true' || val === 'false') { factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); return factories; From 2cf701e7ed6616fee407028b3156f09165022938 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:43 +0100 Subject: [PATCH 36/68] fix(cli): refine attribute generation in db pull --- packages/cli/src/actions/pull/index.ts | 47 ++++++++++++-------------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 05a54e035..9a704b1f9 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -252,6 +252,10 @@ export function syncTable({ return typeBuilder; }); + if (column.pk && !multiPk) { + builder.addAttribute((b) => b.setDecl(idAttribute)); + } + if (column.default) { const defaultValuesAttrs = provider.getDefaultValue({ fieldName: column.name, @@ -262,10 +266,6 @@ export function syncTable({ defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); } - if (column.pk && !multiPk) { - builder.addAttribute((b) => b.setDecl(idAttribute)); - } - if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); @@ -320,22 +320,8 @@ export function syncTable({ ); } - const uniqueColumns = table.columns.filter((c) => c.unique && !c.pk).map((c) => c.name); - if (uniqueColumns.length > 0) { - modelFactory.addAttribute((builder) => - builder.setDecl(modelUniqueAttribute).addArg((argBuilder) => { - const arrayExpr = argBuilder.ArrayExpr; - uniqueColumns.forEach((c) => { - const ref = modelFactory.node.fields.find((f) => getDbName(f) === c); - if (!ref) { - throw new Error(`Field ${c} not found`); - } - arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref)); - }); - return arrayExpr; - }), - ); - } else { + const uniqueColumns = table.columns.filter((c) => c.unique); + if(uniqueColumns.length === 0) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); modelFactory.comments.push( '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', @@ -361,13 +347,15 @@ export function syncTable({ return; } - if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name))) { - //skip primary key + if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name)) + || index.columns.length === 1 && index.unique) { + //skip primary key or unique constraints as they are already handled return; } modelFactory.addAttribute((builder) => - builder + { + const attr = builder .setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute) .addArg((argBuilder) => { const arrayExpr = argBuilder.ArrayExpr; @@ -385,8 +373,17 @@ export function syncTable({ }); }); return arrayExpr; - }) - .addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'), + }); + + const suffix = index.unique ? '_key' : '_idx'; + + if(index.name !== `${table.name}_${index.columns.map(c => c.name).join('_')}${suffix}`){ + attr.addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), 'map'); + } + + return attr + } + ); }); if (table.schema && table.schema !== '' && table.schema !== defaultSchema) { From ba1aff18ab8c65eb8445e10e5a1ac47638db639f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:44 +0100 Subject: [PATCH 37/68] test(cli): update db pull tests for SQLite specific behavior --- packages/cli/test/db/pull.test.ts | 161 ++++++++++++++---------------- 1 file changed, 74 insertions(+), 87 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 34d79036a..84f34024a 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -11,43 +11,11 @@ const generator = new ZModelCodeGenerator({ indent: 4, }); -describe('DB pull', () => { +describe('DB pull - Sqlite specific', () => { it("simple schema - pull shouldn't modify the schema", () => { - const workDir = createProject( - `model User { - id String @id @default(cuid()) - email String @unique @map("email_address") - name String? @default("Anonymous") - role Role @default(USER) - profile Profile? - shared_profile Profile? @relation("shared") - posts Post[] - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - - @@index([role]) - @@map("users") -} - -model Profile { - id Int @id @default(autoincrement()) - user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique - bio String? - avatarUrl String? - - @@map("profiles") -} - + const workDir = createProject(` model Post { - id Int @id @default(autoincrement()) + id Int @id @default(1) author User @relation(fields: [authorId], references: [id], onDelete: Cascade) authorId String title String @@ -55,7 +23,6 @@ model Post { published Boolean @default(false) tags PostTag[] createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt slug String score Float @default(0.0) metadata Json? @@ -65,16 +32,6 @@ model Post { @@map("posts") } -model Tag { - id Int @id @default(autoincrement()) - name String @unique - posts PostTag[] - createdAt DateTime @default(now()) - - @@index([name], name: "tag_name_idx") - @@map("tags") -} - model PostTag { post Post @relation(fields: [postId], references: [id], onDelete: Cascade) postId Int @@ -87,10 +44,45 @@ model PostTag { @@map("post_tags") } -enum Role { - USER - ADMIN - MODERATOR +model Profile { + id Int @id @default(1) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + +model Tag { + id Int @id @default(1) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + +model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role String @default("USER") + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") }`, ); runCli('format', workDir); @@ -104,74 +96,69 @@ enum Role { it('simple schema - pull shouldn recreate the schema.zmodel', async () => { const workDir = createProject( `model Post { - id Int @id @default(autoincrement()) + id Int @id @default(1) authorId String title String content String? published Boolean @default(false) createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt slug String score Float @default(0.0) metadata Json? - author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - PostTag PostTag[] + user User @relation(fields: [authorId], references: [id], onDelete: Cascade, onUpdate: Cascade) + postTag PostTag[] @@unique([authorId, slug]) @@index([authorId, published]) } model PostTag { - post Post @relation(fields: [postId], references: [id], onDelete: Cascade) postId Int - tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) tagId Int assignedAt DateTime @default(now()) note String? @default("initial") + post Post @relation(fields: [postId], references: [id], onDelete: Cascade, onUpdate: Cascade) + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade, onUpdate: Cascade) @@id([postId, tagId]) } + +model Profile { + id Int @id @default(1) + userId String @unique + sharedUserId String @unique @map("shared_userId") + bio String? + avatarUrl String? + + profileUserId User @relation(fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) + profileSharedUserId User @relation("shared", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) +} + +model Tag { + id Int @id @default(1) + name String @unique + createdAt DateTime @default(now()) + postTag PostTag[] + + @@index([name], map: "tag_name_idx") +} + model User { - id String @id @default(cuid()) + id String @id email String @unique name String? @default("Anonymous") - role Role @default(USER) - profile Profile? - shared_profile Profile? @relation("shared") - posts Post[] + role String @default("USER") createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt jsonData Json? + balance Decimal @default(0.00) isActive Boolean @default(true) bigCounter BigInt @default(0) bytes Bytes? + post Post[] + profileUserId Profile? + profileSharedUserId Profile? @relation("shared") @@index([role]) -} - -model Profile { - id Int @id @default(autoincrement()) - user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique - bio String? - avatarUrl String? -} - -model Tag { - id Int @id @default(autoincrement()) - name String @unique - posts PostTag[] - createdAt DateTime @default(now()) - - @@index([name], name: "tag_name_idx") -} - -enum Role { - USER - ADMIN - MODERATOR }`, ); console.log(workDir) From 96c5dc636af8cd5a4abfb4736ca51dbd2ddd1a77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 00:50:45 +0100 Subject: [PATCH 38/68] refactor(language): export ZModelServices type --- packages/language/src/document.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/language/src/document.ts b/packages/language/src/document.ts index 2fdce233d..026d3d23e 100644 --- a/packages/language/src/document.ts +++ b/packages/language/src/document.ts @@ -13,7 +13,7 @@ import path from 'node:path'; import { fileURLToPath } from 'node:url'; import { isDataModel, isDataSource, type Model } from './ast'; import { DB_PROVIDERS_SUPPORTING_LIST_TYPE, STD_LIB_MODULE_NAME } from './constants'; -import { createZModelServices } from './module'; +import { createZModelServices, type ZModelServices } from './module'; import { getAllFields, getDataModelAndTypeDefs, From c9f8013511ecbb94599a23c32bf797e2fe9d0eba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:18 +0100 Subject: [PATCH 39/68] fix(cli): improve sqlite introspection for autoincrement and fk names --- .../cli/src/actions/pull/provider/sqlite.ts | 40 ++++++++++++++----- 1 file changed, 31 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index f891e2994..be4e6d6cf 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,6 +1,7 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import { writeFileSync } from 'node:fs'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. @@ -63,16 +64,18 @@ export const sqlite: IntrospectionProvider = { "SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name", ); - // SQLite maintains sqlite_sequence table for tables with AUTOINCREMENT columns - // If a table has an entry here, its INTEGER PRIMARY KEY column is autoincrement + // Detect AUTOINCREMENT by parsing the CREATE TABLE statement + // The sqlite_sequence table only has entries after rows are inserted, + // so we need to check the actual table definition instead const autoIncrementTables = new Set(); - try { - const seqRows = all<{ name: string }>("SELECT name FROM sqlite_sequence"); - for (const row of seqRows) { - autoIncrementTables.add(row.name); + for (const t of tablesRaw) { + if (t.type === 'table' && t.definition) { + // AUTOINCREMENT keyword appears in PRIMARY KEY definition + // e.g., PRIMARY KEY("id" AUTOINCREMENT) or PRIMARY KEY(id AUTOINCREMENT) + if (/\bAUTOINCREMENT\b/i.test(t.definition)) { + autoIncrementTables.add(t.name); + } } - } catch { - // sqlite_sequence table doesn't exist if no AUTOINCREMENT was ever used } const tables: IntrospectedTable[] = []; @@ -147,6 +150,25 @@ export const sqlite: IntrospectionProvider = { on_delete: any; }>(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`); + // Extract FK constraint names from CREATE TABLE statement + // Pattern: CONSTRAINT "name" FOREIGN KEY("column") or CONSTRAINT name FOREIGN KEY(column) + const fkConstraintNames = new Map(); + if (t.definition) { + // Match: CONSTRAINT "name" FOREIGN KEY("col") or CONSTRAINT name FOREIGN KEY(col) + // Use [^"'`]+ for quoted names to capture full identifier including underscores and other chars + const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(\s*(?:["'`]([^"'`]+)["'`]|(\w+))\s*\)/gi; + let match; + while ((match = fkRegex.exec(t.definition)) !== null) { + // match[1] = quoted constraint name, match[2] = unquoted constraint name + // match[3] = quoted column name, match[4] = unquoted column name + const constraintName = match[1] || match[2]; + const columnName = match[3] || match[4]; + if (constraintName && columnName) { + fkConstraintNames.set(columnName, constraintName); + } + } + } + const fkByColumn = new Map< string, { @@ -164,7 +186,7 @@ export const sqlite: IntrospectionProvider = { foreign_key_schema: '', foreign_key_table: fk.table || null, foreign_key_column: fk.to || null, - foreign_key_name: null, + foreign_key_name: fkConstraintNames.get(fk.from) ?? null, foreign_key_on_update: (fk.on_update as any) ?? null, foreign_key_on_delete: (fk.on_delete as any) ?? null, }); From 5884eff50e84b0a2f4bd650d945b3f603df0e0b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:18 +0100 Subject: [PATCH 40/68] feat(cli): enhance field matching logic during pull by using relation fields --- packages/cli/src/actions/db.ts | 43 ++++++++++++++++++++------ packages/cli/src/actions/pull/utils.ts | 23 ++++++++++++++ 2 files changed, 57 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 4bf9ca47e..e97f76a4a 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -15,7 +15,7 @@ import { } from './action-utils'; import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; -import { getDatasource, getDbName, getRelationFkName } from './pull/utils'; +import { getDatasource, getDbName, getRelationFieldsKey, getRelationFkName } from './pull/utils'; type PushOptions = { schema?: string; @@ -241,11 +241,22 @@ async function runPull(options: PullOptions) { } newDataModel.fields.forEach((f) => { - // Prioritized matching: exact db name > relation FK name > type reference + // Prioritized matching: exact db name > relation fields key > relation FK name > type reference let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f)); if (originalFields.length === 0) { - // Try matching by relation FK name + // Try matching by relation fields key (the `fields` attribute in @relation) + // This matches relation fields by their FK field references + const newFieldsKey = getRelationFieldsKey(f as any); + if (newFieldsKey) { + originalFields = originalDataModel.fields.filter( + (d) => getRelationFieldsKey(d as any) === newFieldsKey, + ); + } + } + + if (originalFields.length === 0) { + // Try matching by relation FK name (the `map` attribute in @relation) originalFields = originalDataModel.fields.filter( (d) => getRelationFkName(d as any) === getRelationFkName(f as any) && @@ -267,11 +278,16 @@ async function runPull(options: PullOptions) { } if (originalFields.length > 1) { - console.warn( - colors.yellow( - `Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, - ), - ); + // If this is a back-reference relation field (no `fields` attribute), + // silently skip when there are multiple potential matches + const isBackReferenceField = !getRelationFieldsKey(f as any); + if (!isBackReferenceField) { + console.warn( + colors.yellow( + `Found more original fields, need to tweak the search algorithm. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`, + ), + ); + } return; } const originalField = originalFields.at(0); @@ -309,10 +325,19 @@ async function runPull(options: PullOptions) { }); originalDataModel.fields .filter((f) => { - // Prioritized matching: exact db name > relation FK name > type reference + // Prioritized matching: exact db name > relation fields key > relation FK name > type reference const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f)); if (matchByDbName) return false; + // Try matching by relation fields key (the `fields` attribute in @relation) + const originalFieldsKey = getRelationFieldsKey(f as any); + if (originalFieldsKey) { + const matchByFieldsKey = newDataModel.fields.find( + (d) => getRelationFieldsKey(d as any) === originalFieldsKey, + ); + if (matchByFieldsKey) return false; + } + const matchByFkName = newDataModel.fields.find( (d) => getRelationFkName(d as any) === getRelationFkName(f as any) && diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index e017bb9b4..38a5f0e9c 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -9,6 +9,7 @@ import { isInvocationExpr, type Attribute, type Model, + type ReferenceExpr, type StringLiteral, } from '@zenstackhq/language/ast'; import { getLiteralArray, getStringLiteral } from '@zenstackhq/language/utils'; @@ -108,6 +109,28 @@ export function getRelationFkName(decl: DataField): string | undefined { return schemaAttrValue?.value; } +/** + * Gets the FK field names from the @relation attribute's `fields` argument. + * Returns a sorted, comma-separated string of field names for comparison. + * e.g., @relation(fields: [userId], references: [id]) -> "userId" + * e.g., @relation(fields: [postId, tagId], references: [id, id]) -> "postId,tagId" + */ +export function getRelationFieldsKey(decl: DataField): string | undefined { + const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === '@relation'); + if (!relationAttr) return undefined; + + const fieldsArg = relationAttr.args.find((a) => a.name === 'fields')?.value; + if (!fieldsArg || fieldsArg.$type !== 'ArrayExpr') return undefined; + + const fieldNames = fieldsArg.items + .filter((item): item is ReferenceExpr => item.$type === 'ReferenceExpr') + .map((item) => item.target?.$refText || item.target?.ref?.name) + .filter((name): name is string => !!name) + .sort(); + + return fieldNames.length > 0 ? fieldNames.join(',') : undefined; +} + export function getDbSchemaName(decl: DataModel | Enum): string { const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === '@@schema'); if (!schemaAttr) return 'public'; From 2c9ccb74d87912225a03bcc9b3cc0ab72d373757 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:19 +0100 Subject: [PATCH 41/68] refactor(cli): refine relation name generation and table syncing --- packages/cli/src/actions/pull/index.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 9a704b1f9..bd64289fb 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -320,7 +320,7 @@ export function syncTable({ ); } - const uniqueColumns = table.columns.filter((c) => c.unique); + const uniqueColumns = table.columns.filter((c) => c.unique || c.pk); if(uniqueColumns.length === 0) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); modelFactory.comments.push( @@ -418,7 +418,7 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); - const includeRelationName = selfRelation || simmilarRelations > 1; + const includeRelationName = selfRelation || simmilarRelations > 0; if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); @@ -442,7 +442,7 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${relation.table}${simmilarRelations > 1 ? `_${relation.column}` : ''}To${relation.references.table}`; + const relationName = `${relation.table}${simmilarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, simmilarRelations > 0 @@ -491,7 +491,7 @@ export function syncRelation({ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onUpdate'); } - if (relation.fk_name) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); + if (relation.fk_name && relation.fk_name !== `${relation.table}_${relation.column}_fkey`) ab.addArg((ab) => ab.StringLiteral.setValue(relation.fk_name), 'map'); return ab; }); From 89a15b543ad4cf1c4b5108d12c5d404cf1bc3604 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:19:19 +0100 Subject: [PATCH 42/68] test(cli): update pull tests to reflect improved schema generation --- .../cli/src/actions/pull/provider/sqlite.ts | 5 + packages/cli/test/db/pull.test.ts | 146 +++++++++--------- 2 files changed, 81 insertions(+), 70 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index be4e6d6cf..ee562f145 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -234,6 +234,11 @@ export const sqlite: IntrospectionProvider = { const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums + writeFileSync( + 'D:/Projects/GitHub/zenstack-v3/packages/cli/sqlite-introspected.json', + JSON.stringify({ tables, enums }, null, 4), + ); + return { tables, enums }; } finally { db.close(); diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 84f34024a..1754b7b4e 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -13,9 +13,40 @@ const generator = new ZModelCodeGenerator({ describe('DB pull - Sqlite specific', () => { it("simple schema - pull shouldn't modify the schema", () => { - const workDir = createProject(` + const workDir = createProject(`model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + name String? @default("Anonymous") + role Role @default(USER) + profile Profile? + shared_profile Profile? @relation("shared") + posts Post[] + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + + @@index([role]) + @@map("users") +} + +model Profile { + id Int @id @default(autoincrement()) + user User @relation(fields: [userId], references: [id], onDelete: Cascade) + userId String @unique + user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) + shared_userId String @unique + bio String? + avatarUrl String? + + @@map("profiles") +} + model Post { - id Int @id @default(1) + id Int @id @default(autoincrement()) author User @relation(fields: [authorId], references: [id], onDelete: Cascade) authorId String title String @@ -23,6 +54,7 @@ model Post { published Boolean @default(false) tags PostTag[] createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt slug String score Float @default(0.0) metadata Json? @@ -32,6 +64,16 @@ model Post { @@map("posts") } +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] + createdAt DateTime @default(now()) + + @@index([name], name: "tag_name_idx") + @@map("tags") +} + model PostTag { post Post @relation(fields: [postId], references: [id], onDelete: Cascade) postId Int @@ -44,45 +86,10 @@ model PostTag { @@map("post_tags") } -model Profile { - id Int @id @default(1) - user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique - user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique - bio String? - avatarUrl String? - - @@map("profiles") -} - -model Tag { - id Int @id @default(1) - name String @unique - posts PostTag[] - createdAt DateTime @default(now()) - - @@index([name], name: "tag_name_idx") - @@map("tags") -} - -model User { - id String @id @default(cuid()) - email String @unique @map("email_address") - name String? @default("Anonymous") - role String @default("USER") - profile Profile? - shared_profile Profile? @relation("shared") - posts Post[] - createdAt DateTime @default(now()) - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - - @@index([role]) - @@map("users") +enum Role { + USER + ADMIN + MODERATOR }`, ); runCli('format', workDir); @@ -96,7 +103,7 @@ model User { it('simple schema - pull shouldn recreate the schema.zmodel', async () => { const workDir = createProject( `model Post { - id Int @id @default(1) + id Int @id @default(autoincrement()) authorId String title String content String? @@ -111,6 +118,7 @@ model User { @@unique([authorId, slug]) @@index([authorId, published]) } + model PostTag { postId Int tagId Int @@ -123,40 +131,38 @@ model PostTag { } model Profile { - id Int @id @default(1) - userId String @unique - sharedUserId String @unique @map("shared_userId") - bio String? - avatarUrl String? - - profileUserId User @relation(fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) - profileSharedUserId User @relation("shared", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) + id Int @id @default(autoincrement()) + userId String @unique + sharedUserId String @unique @map("shared_userId") + bio String? + avatarUrl String? + profileUserId User @relation("Profile_userIdToUser", fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) + profileSharedUserId User @relation("Profile_shared_userIdToUser", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) } model Tag { - id Int @id @default(1) - name String @unique - createdAt DateTime @default(now()) - postTag PostTag[] - + id Int @id @default(autoincrement()) + name String @unique + createdAt DateTime @default(now()) + postTag PostTag[] + @@index([name], map: "tag_name_idx") } - -model User { - id String @id - email String @unique - name String? @default("Anonymous") - role String @default("USER") - createdAt DateTime @default(now()) - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - post Post[] - profileUserId Profile? - profileSharedUserId Profile? @relation("shared") +model User { + id String @id + email String @unique + name String? @default("Anonymous") + role String @default("USER") + createdAt DateTime @default(now()) + jsonData Json? + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + bytes Bytes? + post Post[] + profileUserId Profile? @relation("Profile_userIdToUser") + profileSharedUserId Profile? @relation("Profile_shared_userIdToUser") @@index([role]) }`, From f433883f61463a5f3f9ae452a36a22e4ab16dc71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Wed, 28 Jan 2026 23:44:31 +0100 Subject: [PATCH 43/68] test(cli): add MySQL support to test utility helpers Extends the testing infrastructure to support MySQL databases. Adds MySQL configuration defaults and environment variable overrides. Updates the prelude generation logic to handle MySQL connection strings and provider types, enabling broader database integration testing across the CLI. --- packages/cli/test/utils.ts | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 310fea122..29777d186 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -12,6 +12,13 @@ const TEST_PG_CONFIG = { password: process.env['TEST_PG_PASSWORD'] ?? 'postgres', }; +const TEST_MYSQL_CONFIG = { + host: process.env['TEST_MYSQL_HOST'] ?? 'localhost', + port: process.env['TEST_MYSQL_PORT'] ? parseInt(process.env['TEST_MYSQL_PORT']) : 3306, + user: process.env['TEST_MYSQL_USER'] ?? 'root', + password: process.env['TEST_MYSQL_PASSWORD'] ?? 'mysql', +}; + function getTestDbName(provider: string) { if (provider === 'sqlite') { return './test.db'; @@ -34,13 +41,24 @@ function getTestDbName(provider: string) { ); } -export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' }) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); - const dbUrl = - provider === 'sqlite' - ? `file:${dbName}` - : `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + let dbUrl: string; + + switch (provider) { + case 'sqlite': + dbUrl = `file:${dbName}`; + break; + case 'postgresql': + dbUrl = `postgres://${TEST_PG_CONFIG.user}:${TEST_PG_CONFIG.password}@${TEST_PG_CONFIG.host}:${TEST_PG_CONFIG.port}/${dbName}`; + break; + case 'mysql': + dbUrl = `mysql://${TEST_MYSQL_CONFIG.user}:${TEST_MYSQL_CONFIG.password}@${TEST_MYSQL_CONFIG.host}:${TEST_MYSQL_CONFIG.port}/${dbName}`; + break; + default: + throw new Error(`Unsupported provider: ${provider}`); + } const ZMODEL_PRELUDE = `datasource db { provider = "${provider}" @@ -52,7 +70,7 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' export function createProject( zmodel: string, - options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' }, + options?: { customPrelude?: boolean; provider?: 'sqlite' | 'postgresql' | 'mysql' }, ) { const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); From 5c08cbd901fea638b2bbf9656d00d286c6b4cb62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:06:11 +0100 Subject: [PATCH 44/68] fix(cli): omit default constraint names in table sync Avoids explicitly declaring unique constraint names when they match the default database naming convention. This results in cleaner generated schema code by removing redundant mapping arguments. --- packages/cli/src/actions/pull/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index bd64289fb..8a16d2788 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -269,7 +269,7 @@ export function syncTable({ if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); - if (column.unique_name) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + if (column.unique_name && column.unique_name != `${table.name}_${column.name}_key`) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); return b; }); From 1df69265057c9a051df09fe26e436e4d05e93f64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:13 +0100 Subject: [PATCH 45/68] fix: correctly handle default values for 'text' type in PostgreSQL --- packages/cli/src/actions/pull/provider/postgresql.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 958b0930f..ebe64ef3d 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -150,6 +150,7 @@ export const postgresql: IntrospectionProvider = { case 'uuid': case 'json': case 'jsonb': + case 'text': if (value === 'NULL') return []; factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); break; From d5b839722de14ed8f5c5d0fdc911bf6e71bda91a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:14 +0100 Subject: [PATCH 46/68] fix: sort table indexes to ensure stable schema generation --- packages/cli/src/actions/pull/index.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 8a16d2788..332b6bd5f 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -328,7 +328,14 @@ export function syncTable({ ); } - table.indexes.forEach((index) => { + // Sort indexes: unique indexes first, then other indexes + const sortedIndexes = table.indexes.sort((a, b) => { + if (a.unique && !b.unique) return -1; + if (!a.unique && b.unique) return 1; + return 0; + }); + + sortedIndexes.forEach((index) => { if (index.predicate) { //These constraints are not supported by Zenstack, because Zenstack currently does not fully support check constraints. Read more: https://pris.ly/d/check-constraints console.warn( From 05ad8e28b3f540e652e7f0073d2b1c8056d837fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:14 +0100 Subject: [PATCH 47/68] refactor: dynamically determine supported db providers in CLI --- packages/cli/src/actions/db.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index e97f76a4a..6a8135650 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -16,6 +16,7 @@ import { import { syncEnums, syncRelation, syncTable, type Relation } from './pull'; import { providers } from './pull/provider'; import { getDatasource, getDbName, getRelationFieldsKey, getRelationFkName } from './pull/utils'; +import type { DataSourceProviderType } from '@zenstackhq/schema'; type PushOptions = { schema?: string; @@ -86,7 +87,7 @@ async function runPull(options: PullOptions) { config({ ignore: ['MISSING_ENV_FILE'], }); - const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql']; + const SUPPORTED_PROVIDERS = Object.keys(providers) as DataSourceProviderType[]; const datasource = getDatasource(model); if (!datasource) { throw new Error('No datasource found in the schema.'); From d0e1242f9946c0d5d7604669e5f3f17e6a569c8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:24:14 +0100 Subject: [PATCH 48/68] test: fix typo in pull test description --- packages/cli/test/db/pull.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 1754b7b4e..444431c1f 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -100,7 +100,7 @@ enum Role { expect(getSchema(workDir)).toEqual(originalSchema); }); - it('simple schema - pull shouldn recreate the schema.zmodel', async () => { + it('simple schema - pull should recreate the schema.zmodel', async () => { const workDir = createProject( `model Post { id Int @id @default(autoincrement()) From c3ae802fc2c2a553c448206fda2cd7d884af8d6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 00:35:19 +0100 Subject: [PATCH 49/68] chore(cli): remove debug artifacts and silence test logs Removes hardcoded file system path debugging and unnecessary console logging from the introspector and test suites. Silences CLI command output during tests to provide a cleaner test execution environment. --- packages/cli/src/actions/pull/provider/sqlite.ts | 6 ------ packages/cli/test/db/pull.test.ts | 1 - packages/cli/test/utils.ts | 2 +- 3 files changed, 1 insertion(+), 8 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index ee562f145..1704cb8f0 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,7 +1,6 @@ import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; -import { writeFileSync } from 'node:fs'; // Note: We dynamically import better-sqlite3 inside the async function to avoid // requiring it at module load time for environments that don't use SQLite. @@ -233,11 +232,6 @@ export const sqlite: IntrospectionProvider = { } const enums: IntrospectedEnum[] = []; // SQLite doesn't support enums - - writeFileSync( - 'D:/Projects/GitHub/zenstack-v3/packages/cli/sqlite-introspected.json', - JSON.stringify({ tables, enums }, null, 4), - ); return { tables, enums }; } finally { diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 444431c1f..b04a46ffc 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -167,7 +167,6 @@ model User { @@index([role]) }`, ); - console.log(workDir) runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 29777d186..cb9e76add 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -81,5 +81,5 @@ export function createProject( export function runCli(command: string, cwd: string) { const cli = path.join(__dirname, '../dist/index.js'); - execSync(`node ${cli} ${command}`, { cwd, stdio: 'inherit' }); + execSync(`node ${cli} ${command}`, { cwd }); } From caee3f7bfadea4e837606c3f75720d90d3a4bef7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Thu, 29 Jan 2026 01:40:55 +0100 Subject: [PATCH 50/68] fix(cli): ensure MySQL column and index ordering Wraps JSON_ARRAYAGG calls in subqueries with explicit ORDER BY clauses to maintain correct metadata ordering. This addresses a limitation in MySQL versions prior to 8.0.21, where ORDER BY is not supported directly within the JSON_ARRAYAGG function, ensuring consistent introspection results across different database versions. --- .../cli/src/actions/pull/provider/mysql.ts | 71 ++++++++++--------- 1 file changed, 38 insertions(+), 33 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 123463725..4f5f98fe5 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -264,6 +264,8 @@ export const mysql: IntrospectionProvider = { }; function getTableIntrospectionQuery(databaseName: string) { + // Note: We use subqueries with ORDER BY before JSON_ARRAYAGG to ensure ordering + // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG return ` SELECT t.TABLE_SCHEMA AS \`schema\`, @@ -278,8 +280,9 @@ SELECT ELSE NULL END AS \`definition\`, ( - SELECT JSON_ARRAYAGG( - JSON_OBJECT( + SELECT JSON_ARRAYAGG(col_json) + FROM ( + SELECT JSON_OBJECT( 'name', c.COLUMN_NAME, 'datatype', c.DATA_TYPE, 'datatype_schema', c.TABLE_SCHEMA, @@ -298,24 +301,25 @@ SELECT 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, 'foreign_key_on_update', rc.UPDATE_RULE, 'foreign_key_on_delete', rc.DELETE_RULE - ) - ) - FROM INFORMATION_SCHEMA.COLUMNS c - LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk - ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA - AND c.TABLE_NAME = kcu_fk.TABLE_NAME - AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME - AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL - LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc - ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA - AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME - WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA - AND c.TABLE_NAME = t.TABLE_NAME - ORDER BY c.ORDINAL_POSITION + ) AS col_json + FROM INFORMATION_SCHEMA.COLUMNS c + LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk + ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA + AND c.TABLE_NAME = kcu_fk.TABLE_NAME + AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME + AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL + LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc + ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA + AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA + AND c.TABLE_NAME = t.TABLE_NAME + ORDER BY c.ORDINAL_POSITION + ) AS cols_ordered ) AS \`columns\`, ( - SELECT JSON_ARRAYAGG( - JSON_OBJECT( + SELECT JSON_ARRAYAGG(idx_json) + FROM ( + SELECT JSON_OBJECT( 'name', s.INDEX_NAME, 'method', s.INDEX_TYPE, 'unique', s.NON_UNIQUE = 0, @@ -325,27 +329,28 @@ SELECT 'partial', FALSE, 'predicate', NULL, 'columns', ( - SELECT JSON_ARRAYAGG( - JSON_OBJECT( + SELECT JSON_ARRAYAGG(idx_col_json) + FROM ( + SELECT JSON_OBJECT( 'name', s2.COLUMN_NAME, 'expression', NULL, 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END, 'nulls', NULL - ) + ) AS idx_col_json + FROM INFORMATION_SCHEMA.STATISTICS s2 + WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA + AND s2.TABLE_NAME = s.TABLE_NAME + AND s2.INDEX_NAME = s.INDEX_NAME ORDER BY s2.SEQ_IN_INDEX - ) - FROM INFORMATION_SCHEMA.STATISTICS s2 - WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA - AND s2.TABLE_NAME = s.TABLE_NAME - AND s2.INDEX_NAME = s.INDEX_NAME + ) AS idx_cols_ordered ) - ) - ) - FROM ( - SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME - FROM INFORMATION_SCHEMA.STATISTICS - WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME - ) s + ) AS idx_json + FROM ( + SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME + FROM INFORMATION_SCHEMA.STATISTICS + WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME + ) s + ) AS idxs_ordered ) AS \`indexes\` FROM INFORMATION_SCHEMA.TABLES t LEFT JOIN INFORMATION_SCHEMA.VIEWS v From 5c3265a672292592dcf41441015015128b7a9de8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:00:25 +0100 Subject: [PATCH 51/68] fix(cli): preserve column order during MySQL pull Ensures database columns are sorted by their ordinal position during the introspection process. This maintains the original schema structure and provides a consistent output that matches the physical database layout. --- packages/cli/src/actions/pull/provider/mysql.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 4f5f98fe5..329d01942 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -135,12 +135,18 @@ export const mysql: IntrospectionProvider = { const columns = typeof row.columns === 'string' ? JSON.parse(row.columns) : row.columns; const indexes = typeof row.indexes === 'string' ? JSON.parse(row.indexes) : row.indexes; + // Sort columns by ordinal_position to preserve database column order + const sortedColumns = (columns || []).sort( + (a: { ordinal_position?: number }, b: { ordinal_position?: number }) => + (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) + ); + tables.push({ schema: row.schema || '', name: row.name, type: row.type as 'table' | 'view', definition: row.definition, - columns: columns || [], + columns: sortedColumns, indexes: indexes || [], }); } @@ -283,6 +289,7 @@ SELECT SELECT JSON_ARRAYAGG(col_json) FROM ( SELECT JSON_OBJECT( + 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, 'datatype', c.DATA_TYPE, 'datatype_schema', c.TABLE_SCHEMA, From f6eda1ddaaefda5f568a39336859b51ee5c9897d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:10:23 +0100 Subject: [PATCH 52/68] refactor(cli): remove schema fields from MySQL queries Eliminates redundant schema and database name fields from the MySQL introspection query. Since MySQL does not support multi-schema architectures internal to a single connection in this context, removing these fields simplifies the data structure and avoids unnecessary metadata overhead. --- packages/cli/src/actions/pull/provider/mysql.ts | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 329d01942..ca73a2194 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -142,7 +142,7 @@ export const mysql: IntrospectionProvider = { ); tables.push({ - schema: row.schema || '', + schema: '', // MySQL doesn't support multi-schema name: row.name, type: row.type as 'table' | 'view', definition: row.definition, @@ -161,7 +161,7 @@ export const mysql: IntrospectionProvider = { // Parse enum values from column_type like "enum('val1','val2','val3')" const values = parseEnumValues(row.column_type); return { - schema_name: databaseName, + schema_name: '', // MySQL doesn't support multi-schema // Create a unique enum type name based on table and column enum_type: `${row.table_name}_${row.column_name}`, values, @@ -272,9 +272,9 @@ export const mysql: IntrospectionProvider = { function getTableIntrospectionQuery(databaseName: string) { // Note: We use subqueries with ORDER BY before JSON_ARRAYAGG to ensure ordering // since MySQL < 8.0.21 doesn't support ORDER BY inside JSON_ARRAYAGG + // MySQL doesn't support multi-schema, so we don't include schema in the result return ` SELECT - t.TABLE_SCHEMA AS \`schema\`, t.TABLE_NAME AS \`name\`, CASE t.TABLE_TYPE WHEN 'BASE TABLE' THEN 'table' @@ -292,7 +292,6 @@ SELECT 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, 'datatype', c.DATA_TYPE, - 'datatype_schema', c.TABLE_SCHEMA, 'length', c.CHARACTER_MAXIMUM_LENGTH, 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), 'nullable', c.IS_NULLABLE = 'YES', @@ -302,7 +301,6 @@ SELECT 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', 'options', JSON_ARRAY(), - 'foreign_key_schema', kcu_fk.REFERENCED_TABLE_SCHEMA, 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, @@ -365,7 +363,7 @@ LEFT JOIN INFORMATION_SCHEMA.VIEWS v WHERE t.TABLE_SCHEMA = '${databaseName}' AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') AND t.TABLE_NAME NOT LIKE '_prisma_migrations' -ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME; +ORDER BY t.TABLE_NAME; `; } From 1086be61c63448b58b1e3a4ef993d5dac7176353 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:22:05 +0100 Subject: [PATCH 53/68] fix(cli): improve MySQL introspection and index mapping Refines the database pull process to better handle MySQL-specific patterns. Improves unique constraint detection to prevent redundant mapping attributes when default naming conventions are used. Updates the MySQL introspection logic to correctly identify boolean types, handle timestamp precision in default values, and normalize numeric defaults. Also ensures auto-incrementing columns and primary key indexes are correctly mapped to prevent schema duplication. --- packages/cli/src/actions/pull/index.ts | 19 ++++++++++--- .../cli/src/actions/pull/provider/mysql.ts | 28 ++++++++++++++----- 2 files changed, 36 insertions(+), 11 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 332b6bd5f..e15b77fa4 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -269,7 +269,14 @@ export function syncTable({ if (column.unique && !column.pk) { builder.addAttribute((b) => { b.setDecl(uniqueAttribute); - if (column.unique_name && column.unique_name != `${table.name}_${column.name}_key`) b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + // Only add map if the unique constraint name differs from default patterns + // Default patterns: TableName_columnName_key (Prisma) or just columnName (MySQL) + const isDefaultName = !column.unique_name + || column.unique_name === `${table.name}_${column.name}_key` + || column.unique_name === column.name; + if (!isDefaultName) { + b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name!), 'map'); + } return b; }); @@ -354,9 +361,13 @@ export function syncTable({ return; } - if (index.columns.length === 1 && index.columns.find((c) => pkColumns.includes(c.name)) - || index.columns.length === 1 && index.unique) { - //skip primary key or unique constraints as they are already handled + // Skip PRIMARY key index (handled via @id or @@id) + if (index.primary) { + return; + } + + // Skip single-column indexes that are already handled by @id or @unique on the field + if (index.columns.length === 1 && (index.columns.find((c) => pkColumns.includes(c.name)) || index.unique)) { return; } diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index ca73a2194..5a03efd64 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -179,8 +179,8 @@ export const mysql: IntrospectionProvider = { const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); - // Handle CURRENT_TIMESTAMP - if (val === 'CURRENT_TIMESTAMP' || val === 'current_timestamp()' || val === 'now()') { + // Handle CURRENT_TIMESTAMP with optional precision (e.g., CURRENT_TIMESTAMP(3)) + if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { @@ -190,7 +190,7 @@ export const mysql: IntrospectionProvider = { } // Handle auto_increment - if (val === 'auto_increment') { + if (val.toLowerCase() === 'auto_increment') { factories.push( defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), ); @@ -212,11 +212,17 @@ export const mysql: IntrospectionProvider = { return factories; } - // Handle numeric values - if (/^-?\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { + // Handle numeric values (integers and decimals) + if (/^-?\d+$/.test(val)) { factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); return factories; } + if (/^-?\d+\.\d+$/.test(val)) { + // For decimal values, normalize to remove trailing zeros but keep reasonable precision + const numVal = parseFloat(val); + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); + return factories; + } // Handle string values (quoted with single quotes) if (val.startsWith("'") && val.endsWith("'")) { @@ -291,16 +297,24 @@ SELECT SELECT JSON_OBJECT( 'ordinal_position', c.ORDINAL_POSITION, 'name', c.COLUMN_NAME, - 'datatype', c.DATA_TYPE, + 'datatype', CASE + WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean' + ELSE c.DATA_TYPE + END, + 'datatype_schema', '', 'length', c.CHARACTER_MAXIMUM_LENGTH, 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), 'nullable', c.IS_NULLABLE = 'YES', - 'default', c.COLUMN_DEFAULT, + 'default', CASE + WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment' + ELSE c.COLUMN_DEFAULT + END, 'pk', c.COLUMN_KEY = 'PRI', 'unique', c.COLUMN_KEY = 'UNI', 'unique_name', CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END, 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '', 'options', JSON_ARRAY(), + 'foreign_key_schema', NULL, 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, From dbd8d7ab086dfbb4abce1fb7d022a616158ef412 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:30:46 +0100 Subject: [PATCH 54/68] test(cli): pass provider to default prelude in tests Ensures that the default schema prelude correctly reflects the database provider specified in test options. This prevents inconsistencies when generating test projects with non-default providers. --- packages/cli/test/utils.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index cb9e76add..8b5d79d10 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -75,7 +75,7 @@ export function createProject( const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude()}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n${zmodel}` : zmodel); return workDir; } From 50b0dd2e0fcb5be0be5c76d284f5fc63ba1fb6fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 01:44:07 +0100 Subject: [PATCH 55/68] fix(cli): improve MySQL introspection for types and defaults Disables NativeEnum support for MySQL to prevent loss of schema-level enums since MySQL enums are column-specific. Refines boolean and numeric type mapping to better handle synthetic boolean types and preserve decimal precision in default values. Updates default value parsing logic to correctly identify unquoted strings and avoid misinterpreting numeric literals as booleans. --- .../cli/src/actions/pull/provider/mysql.ts | 32 +++++++++++++------ 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 5a03efd64..a4979acef 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -10,7 +10,10 @@ export const mysql: IntrospectionProvider = { isSupportedFeature(feature) { switch (feature) { case 'NativeEnum': - return true; + // MySQL enums are defined inline in column definitions, not as separate types. + // They can't be shared across tables like PostgreSQL enums. + // Return false to preserve existing enums from the schema. + return false; case 'Schema': default: return false; @@ -94,7 +97,9 @@ export const mysql: IntrospectionProvider = { case 'String': return { type: 'varchar', precisition: 191 }; case 'Boolean': - return { type: 'tinyint', precisition: 1 }; + // Boolean maps to 'boolean' (our synthetic type from tinyint(1)) + // No precision needed since we handle the mapping in the query + return { type: 'boolean' }; case 'Int': return { type: 'int' }; case 'BigInt': @@ -202,25 +207,25 @@ export const mysql: IntrospectionProvider = { return []; } - // Handle boolean values - if (val === 'true' || val === '1' || val === "b'1'") { + // Handle boolean literal values (not numeric 0/1 which should be handled as numbers) + if (val === 'true' || val === "b'1'") { factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); return factories; } - if (val === 'false' || val === '0' || val === "b'0'") { + if (val === 'false' || val === "b'0'") { factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); return factories; } // Handle numeric values (integers and decimals) - if (/^-?\d+$/.test(val)) { + // Check decimals first to preserve format like 0.00 + if (/^-?\d+\.\d+$/.test(val)) { + // Preserve the original decimal format factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); return factories; } - if (/^-?\d+\.\d+$/.test(val)) { - // For decimal values, normalize to remove trailing zeros but keep reasonable precision - const numVal = parseFloat(val); - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); + if (/^-?\d+$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); return factories; } @@ -263,6 +268,13 @@ export const mysql: IntrospectionProvider = { return factories; } + // Handle unquoted string values (MySQL sometimes returns defaults without quotes) + // If it's not a number, boolean, or function, treat it as a string + if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { + factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val))); + return factories; + } + // For any other unhandled cases, use dbgenerated factories.push( defaultAttr.addArg((a) => From 2dcc361271d2a1c87b25424a5d3375118e169e2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 13:09:33 +0100 Subject: [PATCH 56/68] fix(cli): improve MySQL default value introspection Refines how default values are handled during database introspection for MySQL by considering the specific field type. This ensures that boolean variants and numeric literals for Float and Decimal types are correctly formatted and preserved. Also clarifies unsupported features in the SQLite provider to improve codebase maintainability. --- packages/cli/src/actions/pull/index.ts | 1 + .../cli/src/actions/pull/provider/mysql.ts | 46 ++++++++++++++++--- .../src/actions/pull/provider/postgresql.ts | 2 +- .../cli/src/actions/pull/provider/provider.ts | 1 + .../cli/src/actions/pull/provider/sqlite.ts | 7 ++- 5 files changed, 48 insertions(+), 9 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index e15b77fa4..78f5ca7ac 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -259,6 +259,7 @@ export function syncTable({ if (column.default) { const defaultValuesAttrs = provider.getDefaultValue({ fieldName: column.name, + fieldType: builtinType.type, defaultValue: column.default, services, enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index a4979acef..afd468049 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -178,7 +178,7 @@ export const mysql: IntrospectionProvider = { await connection.end(); } }, - getDefaultValue({ defaultValue, fieldName, services, enums }) { + getDefaultValue({ defaultValue, fieldName, fieldType, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; @@ -207,7 +207,19 @@ export const mysql: IntrospectionProvider = { return []; } - // Handle boolean literal values (not numeric 0/1 which should be handled as numbers) + // Handle boolean values based on field type + if (fieldType === 'Boolean') { + if (val === 'true' || val === '1' || val === "b'1'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); + return factories; + } + if (val === 'false' || val === '0' || val === "b'0'") { + factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); + return factories; + } + } + + // Handle boolean literal values for non-boolean fields if (val === 'true' || val === "b'1'") { factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); return factories; @@ -217,15 +229,35 @@ export const mysql: IntrospectionProvider = { return factories; } - // Handle numeric values (integers and decimals) - // Check decimals first to preserve format like 0.00 + // Handle numeric values based on field type if (/^-?\d+\.\d+$/.test(val)) { - // Preserve the original decimal format - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + if (fieldType === 'Decimal') { + // For Decimal, normalize to 2 decimal places if it's all zeros after decimal + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)))); + } else { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); + } + } else if (fieldType === 'Float') { + // For Float, preserve decimal point + const numVal = parseFloat(val); + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)))); + } else { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + } return factories; } if (/^-?\d+$/.test(val)) { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + if (fieldType === 'Float') { + // For Float fields, add .0 to integer values + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.0'))); + } else if (fieldType === 'Decimal') { + // For Decimal fields, add .00 to integer values + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.00'))); + } else { + factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); + } return factories; } diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index ebe64ef3d..26bc0ed92 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -109,7 +109,7 @@ export const postgresql: IntrospectionProvider = { return { type: 'bytea' }; } }, - getDefaultValue({ defaultValue, fieldName, services, enums }) { + getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 252a8a300..fefb2e950 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -69,6 +69,7 @@ export interface IntrospectionProvider { getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; getDefaultValue(args: { fieldName: string; + fieldType: BuiltinType | 'Unsupported'; defaultValue: string; services: ZModelServices; enums: Enum[]; diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index 1704cb8f0..fc9991fef 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -9,7 +9,12 @@ export const sqlite: IntrospectionProvider = { isSupportedFeature(feature) { switch (feature) { case 'Schema': + // Multi-schema feature is not available for SQLite because it doesn't have + // the same concept of schemas as namespaces (unlike PostgreSQL, CockroachDB, SQL Server). + return false; case 'NativeEnum': + // SQLite doesn't support native enum types + return false; default: return false; } @@ -239,7 +244,7 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue({ defaultValue, fieldName, services, enums }) { + getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { const val = defaultValue.trim(); const factories: DataFieldAttributeFactory[] = []; From 347ed340aef14d533bb85ac5d20ebc7582607d73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 15:19:18 +0100 Subject: [PATCH 57/68] test(cli): expand and reorganize db pull tests Enhances the test suite for the database pull command by adding comprehensive coverage for common schema features and PostgreSQL-specific functionality. Includes new test cases for: - Restoring complex schemas from scratch, including relations and indexes - Preserving existing imports in multi-file schema setups - Handling PostgreSQL-specific features like multi-schema support and native enums - Verifying schema preservation for field and table mappings The tests are restructured for better clarity across different database providers. --- packages/cli/test/db/pull.test.ts | 421 +++++++++++++++++++++++++----- 1 file changed, 357 insertions(+), 64 deletions(-) diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index b04a46ffc..ec152d190 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -4,6 +4,7 @@ import { describe, expect, it } from 'vitest'; import { createProject, getDefaultPrelude, runCli } from '../utils'; import { loadSchemaDocument } from '../../src/actions/action-utils'; import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { getTestDbProvider } from '@zenstackhq/testtools'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); const generator = new ZModelCodeGenerator({ @@ -11,9 +12,193 @@ const generator = new ZModelCodeGenerator({ indent: 4, }); -describe('DB pull - Sqlite specific', () => { - it("simple schema - pull shouldn't modify the schema", () => { - const workDir = createProject(`model User { +describe('DB pull - Common features (all providers)', () => { + describe('Pull from zero - restore complete schema from database', () => { + it('should restore basic schema with all supported types', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + name String? + age Int @default(0) + balance Decimal @default(0.00) + isActive Boolean @default(true) + bigCounter BigInt @default(0) + score Float @default(0.0) + bio String? + avatar Bytes? + metadata Json? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + // Store the schema after db push (this is what provider names will be) + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + // Remove schema content to simulate restoration from zero + fs.writeFileSync(schemaFile, getDefaultPrelude()); + + // Pull should fully restore the schema + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toContain('model User'); + }); + + it('should restore schema with relations', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + posts Post[] +} + +model Post { + id Int @id @default(autoincrement()) + title String + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with many-to-many relations', async () => { + const workDir = createProject( + `model Post { + id Int @id @default(autoincrement()) + title String + tags PostTag[] +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + posts PostTag[] +} + +model PostTag { + post Post @relation(fields: [postId], references: [id], onDelete: Cascade) + postId Int + tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade) + tagId Int + + @@id([postId, tagId]) +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with indexes and unique constraints', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + username String + firstName String + lastName String + role String + + @@unique([username, email]) + @@index([role]) + @@index([firstName, lastName]) +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with composite primary keys', async () => { + const workDir = createProject( + `model UserRole { + userId String + role String + grantedAt DateTime @default(now()) + + @@id([userId, role]) +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + + it('should restore schema with field and table mappings', async () => { + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique @map("email_address") + firstName String @map("first_name") + lastName String @map("last_name") + + @@map("users") +}`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude()); + runCli('db pull --indent 4', workDir); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + }); + }); + + describe('Pull with existing schema - preserve schema features', () => { + it('should not modify a comprehensive schema with all features', () => { + const workDir = createProject(`model User { id String @id @default(cuid()) email String @unique @map("email_address") name String? @default("Anonymous") @@ -91,90 +276,198 @@ enum Role { ADMIN MODERATOR }`, + ); + runCli('format', workDir); + runCli('db push', workDir); + + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); + }); + + it('should preserve imports when pulling with multi-file schema', () => { + const workDir = createProject(''); + const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); + const modelsDir = path.join(workDir, 'zenstack/models'); + fs.mkdirSync(modelsDir, { recursive: true }); + + // Create main schema with imports + const mainSchema = `${getDefaultPrelude()} + +import './models/user' +import './models/post'`; + fs.writeFileSync(schemaPath, mainSchema); + + // Create user model + const userModel = `model User { + id String @id @default(cuid()) + email String @unique + name String? + posts Post[] + createdAt DateTime @default(now()) +}`; + fs.writeFileSync(path.join(modelsDir, 'user.zmodel'), userModel); + + // Create post model + const postModel = `model Post { + id Int @id @default(autoincrement()) + title String + content String? + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + createdAt DateTime @default(now()) +}`; + fs.writeFileSync(path.join(modelsDir, 'post.zmodel'), postModel); + + runCli('format', workDir); + runCli('db push', workDir); + + // Store original schemas + const originalMainSchema = fs.readFileSync(schemaPath).toString(); + const originalUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); + const originalPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); + + // Pull and verify imports are preserved + runCli('db pull --indent 4', workDir); + + const pulledMainSchema = fs.readFileSync(schemaPath).toString(); + const pulledUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); + const pulledPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); + + expect(pulledMainSchema).toEqual(originalMainSchema); + expect(pulledUserSchema).toEqual(originalUserSchema); + expect(pulledPostSchema).toEqual(originalPostSchema); + + // Verify imports are still present in main schema + expect(pulledMainSchema).toContain("import './models/user'"); + expect(pulledMainSchema).toContain("import './models/post'"); + }); + }); +}); + +describe('DB pull - PostgreSQL specific features', () => { + it('should restore schema with multiple database schemas', async ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + posts Post[] + + @@schema("auth") +} + +model Post { + id Int @id @default(autoincrement()) + title String + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + + @@schema("content") +}`, + { provider: 'postgresql' }, ); runCli('format', workDir); runCli('db push', workDir); - const originalSchema = getSchema(workDir); + const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); + const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const expectedSchema = generator.generate(model); + + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); runCli('db pull --indent 4', workDir); - expect(getSchema(workDir)).toEqual(originalSchema); + + const restoredSchema = getSchema(workDir); + expect(restoredSchema).toEqual(expectedSchema); + expect(restoredSchema).toContain('@@schema("auth")'); + expect(restoredSchema).toContain('@@schema("content")'); }); - it('simple schema - pull should recreate the schema.zmodel', async () => { + it('should preserve native PostgreSQL enums when schema exists', ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } const workDir = createProject( - `model Post { - id Int @id @default(autoincrement()) - authorId String - title String - content String? - published Boolean @default(false) - createdAt DateTime @default(now()) - slug String - score Float @default(0.0) - metadata Json? - user User @relation(fields: [authorId], references: [id], onDelete: Cascade, onUpdate: Cascade) - postTag PostTag[] + `model User { + id String @id @default(cuid()) + email String @unique + status UserStatus @default(ACTIVE) + role UserRole @default(USER) +} - @@unique([authorId, slug]) - @@index([authorId, published]) +enum UserStatus { + ACTIVE + INACTIVE + SUSPENDED } -model PostTag { - postId Int - tagId Int - assignedAt DateTime @default(now()) - note String? @default("initial") - post Post @relation(fields: [postId], references: [id], onDelete: Cascade, onUpdate: Cascade) - tag Tag @relation(fields: [tagId], references: [id], onDelete: Cascade, onUpdate: Cascade) +enum UserRole { + USER + ADMIN + MODERATOR +}`, + { provider: 'postgresql' }, + ); + runCli('format', workDir); + runCli('db push', workDir); - @@id([postId, tagId]) -} + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + const pulledSchema = getSchema(workDir); -model Profile { - id Int @id @default(autoincrement()) - userId String @unique - sharedUserId String @unique @map("shared_userId") - bio String? - avatarUrl String? - profileUserId User @relation("Profile_userIdToUser", fields: [userId], references: [id], onDelete: Cascade, onUpdate: Cascade) - profileSharedUserId User @relation("Profile_shared_userIdToUser", fields: [sharedUserId], references: [id], onDelete: Cascade, onUpdate: Cascade) -} + expect(pulledSchema).toEqual(originalSchema); + expect(pulledSchema).toContain('enum UserStatus'); + expect(pulledSchema).toContain('enum UserRole'); + }); -model Tag { - id Int @id @default(autoincrement()) - name String @unique - createdAt DateTime @default(now()) - postTag PostTag[] + it('should not modify schema with PostgreSQL-specific features', ({ skip }) => { + const provider = getTestDbProvider(); + if (provider !== 'postgresql') { + skip(); + return; + } + const workDir = createProject( + `model User { + id String @id @default(cuid()) + email String @unique + status UserStatus @default(ACTIVE) + posts Post[] + metadata Json? - @@index([name], map: "tag_name_idx") + @@schema("auth") + @@index([status]) } -model User { - id String @id - email String @unique - name String? @default("Anonymous") - role String @default("USER") - createdAt DateTime @default(now()) - jsonData Json? - balance Decimal @default(0.00) - isActive Boolean @default(true) - bigCounter BigInt @default(0) - bytes Bytes? - post Post[] - profileUserId Profile? @relation("Profile_userIdToUser") - profileSharedUserId Profile? @relation("Profile_shared_userIdToUser") +model Post { + id Int @id @default(autoincrement()) + title String + author User @relation(fields: [authorId], references: [id], onDelete: Cascade) + authorId String + tags String[] - @@index([role]) + @@schema("content") + @@index([authorId]) +} + +enum UserStatus { + ACTIVE + INACTIVE + SUSPENDED }`, + { provider: 'postgresql' }, ); runCli('format', workDir); runCli('db push', workDir); - const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const originalSchema = generator.generate(model); - fs.writeFileSync(path.join(workDir, 'zenstack/schema.zmodel'), getDefaultPrelude()); - runCli('db pull --indent 4 --field-casing=camel', workDir); + const originalSchema = getSchema(workDir); + runCli('db pull --indent 4', workDir); + expect(getSchema(workDir)).toEqual(originalSchema); }); }); From 5887798f7923cb894d8d14632e0bef473d90bd16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:15 +0100 Subject: [PATCH 58/68] refactor: restructure introspection provider interface and attribute generation --- packages/cli/src/actions/pull/index.ts | 42 +++++++++---------- .../cli/src/actions/pull/provider/provider.ts | 22 ++++++++-- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 78f5ca7ac..896334473 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -256,15 +256,30 @@ export function syncTable({ builder.addAttribute((b) => b.setDecl(idAttribute)); } + // Add field-type-based attributes (e.g., @updatedAt for DateTime fields, @db.* attributes) + const fieldAttrs = provider.getFieldAttributes({ + fieldName: column.name, + fieldType: builtinType.type, + datatype: column.datatype, + length: column.length, + precision: column.precision, + services, + }); + fieldAttrs.forEach(builder.addAttribute.bind(builder)); + if (column.default) { - const defaultValuesAttrs = provider.getDefaultValue({ - fieldName: column.name, + const defaultExprBuilder = provider.getDefaultValue({ fieldType: builtinType.type, defaultValue: column.default, services, enums: model.declarations.filter((d) => d.$type === 'Enum') as Enum[], }); - defaultValuesAttrs.forEach(builder.addAttribute.bind(builder)); + if (defaultExprBuilder) { + const defaultAttr = new DataFieldAttributeFactory() + .setDecl(getAttributeRef('@default', services)) + .addArg(defaultExprBuilder); + builder.addAttribute(defaultAttr); + } } if (column.unique && !column.pk) { @@ -272,7 +287,7 @@ export function syncTable({ b.setDecl(uniqueAttribute); // Only add map if the unique constraint name differs from default patterns // Default patterns: TableName_columnName_key (Prisma) or just columnName (MySQL) - const isDefaultName = !column.unique_name + const isDefaultName = !column.unique_name || column.unique_name === `${table.name}_${column.name}_key` || column.unique_name === column.name; if (!isDefaultName) { @@ -288,25 +303,6 @@ export function syncTable({ ); } - const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( - (d) => d.name.toLowerCase() === `@db.${column.datatype.toLowerCase()}`, - )?.node as Attribute | undefined; - - const defaultDatabaseType = provider.getDefaultDatabaseType(builtinType.type as BuiltinType); - - if ( - dbAttr && - defaultDatabaseType && - (defaultDatabaseType.type !== column.datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (column.length || column.precision))) - ) { - const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); - if (column.length || column.precision) - dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(column.length! || column.precision!)); - builder.addAttribute(dbAttrFactory); - } - return builder; }); }); diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index fefb2e950..6edee0663 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -1,6 +1,6 @@ import type { ZModelServices } from '@zenstackhq/language'; -import type { BuiltinType, Enum } from '@zenstackhq/language/ast'; -import type { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import type { BuiltinType, Enum, Expression } from '@zenstackhq/language/ast'; +import type { AstFactory, DataFieldAttributeFactory, ExpressionBuilder } from '@zenstackhq/language/factory'; export type Cascade = 'NO ACTION' | 'RESTRICT' | 'CASCADE' | 'SET NULL' | 'SET DEFAULT' | null; @@ -67,12 +67,28 @@ export interface IntrospectionProvider { isArray: boolean; }; getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; + /** + * Get the expression builder callback for a field's @default attribute value. + * Returns null if no @default attribute should be added. + * The callback will be passed to DataFieldAttributeFactory.addArg(). + */ getDefaultValue(args: { - fieldName: string; fieldType: BuiltinType | 'Unsupported'; defaultValue: string; services: ZModelServices; enums: Enum[]; + }): ((builder: ExpressionBuilder) => AstFactory) | null; + /** + * Get additional field attributes based on field type and name (e.g., @updatedAt for DateTime fields, @db.* attributes). + * This is separate from getDefaultValue to keep concerns separated. + */ + getFieldAttributes(args: { + fieldName: string; + fieldType: BuiltinType | 'Unsupported'; + datatype: string; + length: number | null; + precision: number | null; + services: ZModelServices; }): DataFieldAttributeFactory[]; isSupportedFeature(feature: DatabaseFeature): boolean; } From 95051e5222dd4dd97dc6673ec8782fab0b4287a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:15 +0100 Subject: [PATCH 59/68] feat: modernize MySQL introspection provider --- .../cli/src/actions/pull/provider/mysql.ts | 226 ++++++++++-------- 1 file changed, 126 insertions(+), 100 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index afd468049..66e103c21 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -1,4 +1,4 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; +import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -178,143 +178,169 @@ export const mysql: IntrospectionProvider = { await connection.end(); } }, - getDefaultValue({ defaultValue, fieldName, fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + // Handle NULL early + if (val.toUpperCase() === 'NULL') { + return null; + } - // Handle CURRENT_TIMESTAMP with optional precision (e.g., CURRENT_TIMESTAMP(3)) - if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + switch (fieldType) { + case 'DateTime': + if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + // Fallback to string literal for other DateTime defaults + return (ab) => ab.StringLiteral.setValue(val); - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); - } - return factories; - } + case 'Int': + case 'BigInt': + if (val.toLowerCase() === 'auto_increment') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + break; - // Handle auto_increment - if (val.toLowerCase() === 'auto_increment') { - factories.push( - defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), - ); - return factories; + case 'Float': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + break; + + case 'Decimal': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); + } + return (ab) => ab.NumberLiteral.setValue(String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + break; + + case 'Boolean': + if (val === 'true' || val === '1' || val === "b'1'") { + return (ab) => ab.BooleanLiteral.setValue(true); + } + if (val === 'false' || val === '0' || val === "b'0'") { + return (ab) => ab.BooleanLiteral.setValue(false); + } + break; + + case 'String': + if (val.startsWith("'") && val.endsWith("'")) { + const strippedValue = val.slice(1, -1).replace(/''/g, "'"); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); + if (enumField) { + return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + } + return (ab) => ab.StringLiteral.setValue(strippedValue); + } + if (val.toLowerCase() === 'uuid()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); + } + if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { + return (ab) => ab.StringLiteral.setValue(val); + } + break; } - // Handle NULL - if (val.toUpperCase() === 'NULL') { - return []; + // Fallback handlers for values that don't match field type-specific patterns + if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === 'current_timestamp()' || val.toLowerCase() === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } - // Handle boolean values based on field type - if (fieldType === 'Boolean') { - if (val === 'true' || val === '1' || val === "b'1'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); - return factories; - } - if (val === 'false' || val === '0' || val === "b'0'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); - return factories; - } + if (val.toLowerCase() === 'auto_increment') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } - // Handle boolean literal values for non-boolean fields if (val === 'true' || val === "b'1'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(true))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(true); } if (val === 'false' || val === "b'0'") { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(false))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(false); } - // Handle numeric values based on field type - if (/^-?\d+\.\d+$/.test(val)) { - if (fieldType === 'Decimal') { - // For Decimal, normalize to 2 decimal places if it's all zeros after decimal - const numVal = parseFloat(val); - if (numVal === Math.floor(numVal)) { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)))); - } else { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(String(numVal)))); - } - } else if (fieldType === 'Float') { - // For Float, preserve decimal point - const numVal = parseFloat(val); - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)))); - } else { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); - } - return factories; - } - if (/^-?\d+$/.test(val)) { - if (fieldType === 'Float') { - // For Float fields, add .0 to integer values - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.0'))); - } else if (fieldType === 'Decimal') { - // For Decimal fields, add .00 to integer values - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val + '.00'))); - } else { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); - } - return factories; + if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); } - // Handle string values (quoted with single quotes) if (val.startsWith("'") && val.endsWith("'")) { const strippedValue = val.slice(1, -1).replace(/''/g, "'"); - - // Check if it's an enum value const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedValue)); if (enumDef) { const enumField = enumDef.fields.find((v) => getDbName(v) === strippedValue); if (enumField) { - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); - return factories; + return (ab) => ab.ReferenceExpr.setTarget(enumField); } } - - factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(strippedValue))); - return factories; + return (ab) => ab.StringLiteral.setValue(strippedValue); } // Handle function calls (e.g., uuid(), now()) if (val.includes('(') && val.includes(')')) { - // Check for known functions if (val.toLowerCase() === 'uuid()') { - factories.push( - defaultAttr.addArg((a) => a.InvocationExpr.setFunction(getFunctionRef('uuid', services))), - ); - return factories; + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('uuid', services)); } - - // For other functions, use dbgenerated - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); - return factories; + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); } - // Handle unquoted string values (MySQL sometimes returns defaults without quotes) - // If it's not a number, boolean, or function, treat it as a string + // Handle unquoted string values if (/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(val)) { - factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val))); - return factories; + return (ab) => ab.StringLiteral.setValue(val); } // For any other unhandled cases, use dbgenerated - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); + }, + + getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + const factories: DataFieldAttributeFactory[] = []; + + // Add @updatedAt for DateTime fields named updatedAt or updated_at + if (fieldType === 'DateTime' && (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at')) { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + + // Add @db.* attribute if the datatype differs from the default + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + + const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (length || precision))) + ) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (length || precision) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + } + factories.push(dbAttrFactory); + } + return factories; }, }; From ce4850aab10bab2c0ea371b07f05b04c670dc73a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:15 +0100 Subject: [PATCH 60/68] feat: modernize PostgreSQL introspection provider --- .../src/actions/pull/provider/postgresql.ts | 176 ++++++++++++------ 1 file changed, 121 insertions(+), 55 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 26bc0ed92..ca70065ff 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,4 +1,4 @@ -import type { BuiltinType } from '@zenstackhq/language/ast'; +import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; @@ -109,37 +109,10 @@ export const postgresql: IntrospectionProvider = { return { type: 'bytea' }; } }, - getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - const factories: DataFieldAttributeFactory[] = []; - - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); - - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); - - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); - } - return factories; - } - if (val.startsWith('nextval(')) { - factories.push( - defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), - ); - return factories; - } - if (val.includes('(') && val.includes(')')) { - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); - return factories; - } + // Handle type casts early (PostgreSQL-specific pattern like 'value'::type) if (val.includes('::')) { const [value, type] = val .replace(/'/g, '') @@ -151,23 +124,17 @@ export const postgresql: IntrospectionProvider = { case 'json': case 'jsonb': case 'text': - if (value === 'NULL') return []; - factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(value))); - break; + if (value === 'NULL') return null; + return (ab) => ab.StringLiteral.setValue(value); case 'real': - factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(value))); - break; + return (ab) => ab.NumberLiteral.setValue(value); default: { const enumDef = enums.find((e) => getDbName(e, true) === type); if (!enumDef) { - factories.push( - defaultAttr.addArg((a) => - a.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ), - ), - ); - break; + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); } const enumField = enumDef.fields.find((v) => getDbName(v) === value); if (!enumField) { @@ -175,30 +142,129 @@ export const postgresql: IntrospectionProvider = { `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, ); } - - factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); - break; + return (ab) => ab.ReferenceExpr.setTarget(enumField); } } + } + + switch (fieldType) { + case 'DateTime': + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + // Fallback to string literal for other DateTime defaults + return (ab) => ab.StringLiteral.setValue(val); + + case 'Int': + case 'BigInt': + if (val.startsWith('nextval(')) { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + break; + + case 'Float': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + break; + + case 'Decimal': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); + } + return (ab) => ab.NumberLiteral.setValue(String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + break; + + case 'Boolean': + if (val === 'true') { + return (ab) => ab.BooleanLiteral.setValue(true); + } + if (val === 'false') { + return (ab) => ab.BooleanLiteral.setValue(false); + } + break; - return factories; + case 'String': + if (val.startsWith("'") && val.endsWith("'")) { + return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); + } + break; + } + + // Fallback handlers for values that don't match field type-specific patterns + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + + if (val.startsWith('nextval(')) { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + + if (val.includes('(') && val.includes(')')) { + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); } if (val === 'true' || val === 'false') { - factories.push(defaultAttr.addArg((ab) => ab.BooleanLiteral.setValue(val === 'true'))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(val === 'true'); } - if (/^\d+$/.test(val) || /^-?\d+(\.\d+)?$/.test(val)) { - factories.push(defaultAttr.addArg((ab) => ab.NumberLiteral.setValue(val))); - return factories; + if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); } if (val.startsWith("'") && val.endsWith("'")) { - factories.push(defaultAttr.addArg((ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")))); - return factories; + return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); + } + + return null; + }, + + getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + const factories: DataFieldAttributeFactory[] = []; + + // Add @updatedAt for DateTime fields named updatedAt or updated_at + if (fieldType === 'DateTime' && (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at')) { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); } - return []; + + // Add @db.* attribute if the datatype differs from the default + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + + const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (length || precision))) + ) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (length || precision) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + } + factories.push(dbAttrFactory); + } + + return factories; }, }; From 721abd7695700b8aeb80c7e450b61f66de5a03ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:16 +0100 Subject: [PATCH 61/68] feat: modernize SQLite introspection provider --- .../cli/src/actions/pull/provider/sqlite.ts | 131 ++++++++++++++---- 1 file changed, 106 insertions(+), 25 deletions(-) diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index fc9991fef..d90c2cef3 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -1,3 +1,4 @@ +import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; @@ -141,7 +142,7 @@ export const sqlite: IntrospectionProvider = { nulls: null, })), }; - }); + }).reverse(); // Reverse to maintain creation order // Foreign keys mapping by column name const fkRows = all<{ @@ -244,37 +245,87 @@ export const sqlite: IntrospectionProvider = { } }, - getDefaultValue({ defaultValue, fieldName, fieldType: _fieldType, services, enums }) { + getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - const factories: DataFieldAttributeFactory[] = []; - const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef('@default', services)); + switch (fieldType) { + case 'DateTime': + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); + } + // Fallback to string literal for other DateTime defaults + return (ab) => ab.StringLiteral.setValue(val); - if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { - factories.push(defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)))); + case 'Int': + case 'BigInt': + if (val === 'autoincrement') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); + } + break; - if (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at') { - factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); - } - return factories; + case 'Float': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.0'); + } + break; + + case 'Decimal': + if (/^-?\d+\.\d+$/.test(val)) { + const numVal = parseFloat(val); + if (numVal === Math.floor(numVal)) { + return (ab) => ab.NumberLiteral.setValue(numVal.toFixed(2)); + } + return (ab) => ab.NumberLiteral.setValue(String(numVal)); + } + if (/^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val + '.00'); + } + break; + + case 'Boolean': + if (val === 'true' || val === '1') { + return (ab) => ab.BooleanLiteral.setValue(true); + } + if (val === 'false' || val === '0') { + return (ab) => ab.BooleanLiteral.setValue(false); + } + break; + + case 'String': + if (val.startsWith("'") && val.endsWith("'")) { + const strippedName = val.slice(1, -1); + const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); + if (enumDef) { + const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); + if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + return (ab) => ab.StringLiteral.setValue(strippedName); + } + break; + } + + // Fallback handlers for values that don't match field type-specific patterns + if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } - // Handle autoincrement if (val === 'autoincrement') { - factories.push( - defaultAttr.addArg((ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services))), - ); - return factories; + return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } if (val === 'true' || val === 'false') { - factories.push(defaultAttr.addArg((a) => a.BooleanLiteral.setValue(val === 'true'))); - return factories; + return (ab) => ab.BooleanLiteral.setValue(val === 'true'); } - if (!Number.isNaN(parseFloat(val)) || !Number.isNaN(parseInt(val))) { - factories.push(defaultAttr.addArg((a) => a.NumberLiteral.setValue(val))); - return factories; + if (/^-?\d+\.\d+$/.test(val) || /^-?\d+$/.test(val)) { + return (ab) => ab.NumberLiteral.setValue(val); } if (val.startsWith("'") && val.endsWith("'")) { @@ -282,16 +333,46 @@ export const sqlite: IntrospectionProvider = { const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName)); if (enumDef) { const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName); - if (enumField) factories.push(defaultAttr.addArg((ab) => ab.ReferenceExpr.setTarget(enumField))); - } else { - factories.push(defaultAttr.addArg((a) => a.StringLiteral.setValue(strippedName))); + if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField); } - return factories; + return (ab) => ab.StringLiteral.setValue(strippedName); } //TODO: add more default value factories if exists throw new Error( - `This default value type currently is not supported. Plesase open an issue on github. Values: "${defaultValue}"`, + `This default value type currently is not supported. Please open an issue on github. Values: "${defaultValue}"`, ); }, + + getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) { + const factories: DataFieldAttributeFactory[] = []; + + // Add @updatedAt for DateTime fields named updatedAt or updated_at + if (fieldType === 'DateTime' && (fieldName.toLowerCase() === 'updatedat' || fieldName.toLowerCase() === 'updated_at')) { + factories.push(new DataFieldAttributeFactory().setDecl(getAttributeRef('@updatedAt', services))); + } + + // Add @db.* attribute if the datatype differs from the default + const dbAttr = services.shared.workspace.IndexManager.allElements('Attribute').find( + (d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`, + )?.node as Attribute | undefined; + + const defaultDatabaseType = this.getDefaultDatabaseType(fieldType as BuiltinType); + + if ( + dbAttr && + defaultDatabaseType && + (defaultDatabaseType.type !== datatype || + (defaultDatabaseType.precisition && + defaultDatabaseType.precisition !== (length || precision))) + ) { + const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); + if (length || precision) { + dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length! || precision!)); + } + factories.push(dbAttrFactory); + } + + return factories; + }, }; From ca7167cc53ce24e299b39b1ee44ac71cece3ebbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:16 +0100 Subject: [PATCH 62/68] fix: improve relation field naming and default action handling --- packages/cli/src/actions/pull/index.ts | 30 +++++++++++++++----------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index 896334473..d61873c64 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,9 @@ import type { ZModelServices } from '@zenstackhq/language'; import colors from 'colors'; import { + isArrayExpr, isEnum, - type Attribute, - type BuiltinType, + isReferenceExpr, type DataField, type DataModel, type Enum, @@ -444,7 +444,8 @@ export function syncRelation({ | undefined; if (!sourceModel) return; - const sourceField = sourceModel.fields.find((f) => getDbName(f) === relation.column) as DataField | undefined; + const sourceFieldId = sourceModel.fields.findIndex((f) => getDbName(f) === relation.column); + const sourceField = sourceModel.fields[sourceFieldId] as DataField | undefined; if (!sourceField) return; const targetModel = model.declarations.find( @@ -458,11 +459,16 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; const relationName = `${relation.table}${simmilarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; + + const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing("camel", sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; + + const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference); + let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, simmilarRelations > 0 ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : targetModel.name, + : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || resolveNameCasing("camel", targetModel.name).name}${relation.type === 'many'? 's' : ''}`, ); if (sourceModel.fields.find((f) => f.name === sourceFieldName)) { @@ -486,7 +492,9 @@ export function syncRelation({ 'references', ); - if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== 'SET NULL') { + // Prisma defaults: onDelete is SetNull for optional, Restrict for mandatory + const onDeleteDefault = relation.nullable ? 'SET NULL' : 'RESTRICT'; + if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) { const enumRef = getEnumRef('ReferentialAction', services); if (!enumRef) throw new Error('ReferentialAction enum not found'); const enumFieldRef = enumRef.fields.find( @@ -496,7 +504,8 @@ export function syncRelation({ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), 'onDelete'); } - if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'SET NULL') { + // Prisma default: onUpdate is Cascade + if (relation.foreign_key_on_update && relation.foreign_key_on_update !== 'CASCADE') { const enumRef = getEnumRef('ReferentialAction', services); if (!enumRef) throw new Error('ReferentialAction enum not found'); const enumFieldRef = enumRef.fields.find( @@ -511,14 +520,14 @@ export function syncRelation({ return ab; }); - sourceModel.fields.push(sourceFieldFactory.node); + sourceModel.fields.splice(sourceFieldId, 0, sourceFieldFactory.node); // Remove the original scalar foreign key field const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const { name: oppositeFieldName } = resolveNameCasing( options.fieldCasing, simmilarRelations > 0 ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` - : sourceModel.name, + : `${resolveNameCasing("camel", sourceModel.name).name}${relation.references.type === 'many'? 's' : ''}`, ); const targetFieldFactory = new DataFieldFactory() @@ -536,9 +545,4 @@ export function syncRelation({ ); targetModel.fields.push(targetFieldFactory.node); - - // targetModel.fields.sort((a, b) => { - // if (a.type.reference || b.type.reference) return a.name.localeCompare(b.name); - // return 0; - // }); } From 58a238eaa558193f8dc603146577aee06baa1db8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:16 +0100 Subject: [PATCH 63/68] feat: track imports and auto-format during db pull --- packages/cli/src/actions/db.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 6a8135650..28c60051f 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -1,5 +1,5 @@ import { config } from '@dotenvx/dotenvx'; -import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { formatDocument, ZModelCodeGenerator } from '@zenstackhq/language'; import { DataModel, Enum, type Model } from '@zenstackhq/language/ast'; import colors from 'colors'; import fs from 'node:fs'; @@ -83,7 +83,7 @@ async function runPull(options: PullOptions) { const spinner = ora(); try { const schemaFile = getSchemaFile(options.schema); - const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true }); + const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true, keepImports: true }); config({ ignore: ['MISSING_ENV_FILE'], }); @@ -400,7 +400,7 @@ async function runPull(options: PullOptions) { }); if (options.out) { - const zmodelSchema = generator.generate(newModel); + const zmodelSchema = await formatDocument(generator.generate(newModel)); console.log(colors.blue(`Writing to ${options.out}`)); @@ -408,11 +408,11 @@ async function runPull(options: PullOptions) { fs.writeFileSync(outPath, zmodelSchema); } else { - docs.forEach(({ uri, parseResult: { value: model } }) => { - const zmodelSchema = generator.generate(model); + for (const { uri, parseResult: { value: model } } of docs) { + const zmodelSchema = await formatDocument(generator.generate(model)); console.log(colors.blue(`Writing to ${uri.path}`)); fs.writeFileSync(uri.fsPath, zmodelSchema); - }); + } } console.log(colors.green.bold('\nPull completed successfully!')); From 5720c918adb3a5fb70a689beac9e0b07bba6ab71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:03:17 +0100 Subject: [PATCH 64/68] test: update pull tests to reflect naming and formatting improvements --- packages/cli/src/actions/pull/index.ts | 2 - packages/cli/test/db/pull.test.ts | 155 +++++++++++-------------- packages/cli/test/utils.ts | 15 ++- 3 files changed, 80 insertions(+), 92 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index d61873c64..b94eb69d1 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -1,9 +1,7 @@ import type { ZModelServices } from '@zenstackhq/language'; import colors from 'colors'; import { - isArrayExpr, isEnum, - isReferenceExpr, type DataField, type DataModel, type Enum, diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index ec152d190..9c0230141 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -1,9 +1,9 @@ import fs from 'node:fs'; import path from 'node:path'; import { describe, expect, it } from 'vitest'; -import { createProject, getDefaultPrelude, runCli } from '../utils'; +import { createFormattedProject, createProject, getDefaultPrelude, runCli } from '../utils'; import { loadSchemaDocument } from '../../src/actions/action-utils'; -import { ZModelCodeGenerator } from '@zenstackhq/language'; +import { ZModelCodeGenerator, formatDocument } from '@zenstackhq/language'; import { getTestDbProvider } from '@zenstackhq/testtools'; const getSchema = (workDir: string) => fs.readFileSync(path.join(workDir, 'zenstack/schema.zmodel')).toString(); @@ -15,9 +15,9 @@ const generator = new ZModelCodeGenerator({ describe('DB pull - Common features (all providers)', () => { describe('Pull from zero - restore complete schema from database', () => { it('should restore basic schema with all supported types', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique name String? age Int @default(0) @@ -32,7 +32,6 @@ describe('DB pull - Common features (all providers)', () => { updatedAt DateTime @updatedAt }`, ); - runCli('format', workDir); runCli('db push', workDir); // Store the schema after db push (this is what provider names will be) @@ -48,25 +47,23 @@ describe('DB pull - Common features (all providers)', () => { const restoredSchema = getSchema(workDir); expect(restoredSchema).toEqual(expectedSchema); - expect(restoredSchema).toContain('model User'); }); it('should restore schema with relations', async () => { - const workDir = createProject( - `model User { - id String @id @default(cuid()) - email String @unique - posts Post[] -} - -model Post { + const workDir = await createFormattedProject( + `model Post { id Int @id @default(autoincrement()) title String author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int +} + +model User { + id Int @id @default(autoincrement()) + email String @unique + posts Post[] }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -81,17 +78,11 @@ model Post { }); it('should restore schema with many-to-many relations', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model Post { id Int @id @default(autoincrement()) title String - tags PostTag[] -} - -model Tag { - id Int @id @default(autoincrement()) - name String @unique - posts PostTag[] + postTags PostTag[] } model PostTag { @@ -101,9 +92,14 @@ model PostTag { tagId Int @@id([postId, tagId]) +} + +model Tag { + id Int @id @default(autoincrement()) + name String @unique + postTags PostTag[] }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -118,9 +114,9 @@ model PostTag { }); it('should restore schema with indexes and unique constraints', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique username String firstName String @@ -130,9 +126,9 @@ model PostTag { @@unique([username, email]) @@index([role]) @@index([firstName, lastName]) + @@index([email, username, role]) }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -147,7 +143,7 @@ model PostTag { }); it('should restore schema with composite primary keys', async () => { - const workDir = createProject( + const workDir = await createFormattedProject( `model UserRole { userId String role String @@ -156,7 +152,6 @@ model PostTag { @@id([userId, role]) }`, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -170,10 +165,13 @@ model PostTag { expect(restoredSchema).toEqual(expectedSchema); }); - it('should restore schema with field and table mappings', async () => { - const workDir = createProject( + }); + + describe('Pull with existing schema - preserve schema features', () => { + it('should preserve field and table mappings', async () => { + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique @map("email_address") firstName String @map("first_name") lastName String @map("last_name") @@ -181,25 +179,17 @@ model PostTag { @@map("users") }`, ); - runCli('format', workDir); runCli('db push', workDir); - const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); - const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); - const expectedSchema = generator.generate(model); - - fs.writeFileSync(schemaFile, getDefaultPrelude()); + const originalSchema = getSchema(workDir); runCli('db pull --indent 4', workDir); - const restoredSchema = getSchema(workDir); - expect(restoredSchema).toEqual(expectedSchema); + expect(getSchema(workDir)).toEqual(originalSchema); }); - }); - describe('Pull with existing schema - preserve schema features', () => { - it('should not modify a comprehensive schema with all features', () => { - const workDir = createProject(`model User { - id String @id @default(cuid()) + it('should not modify a comprehensive schema with all features', async () => { + const workDir = await createFormattedProject(`model User { + id Int @id @default(autoincrement()) email String @unique @map("email_address") name String? @default("Anonymous") role Role @default(USER) @@ -221,9 +211,9 @@ model PostTag { model Profile { id Int @id @default(autoincrement()) user User @relation(fields: [userId], references: [id], onDelete: Cascade) - userId String @unique + userId Int @unique user_shared User @relation("shared", fields: [shared_userId], references: [id], onDelete: Cascade) - shared_userId String @unique + shared_userId Int @unique bio String? avatarUrl String? @@ -233,7 +223,7 @@ model Profile { model Post { id Int @id @default(autoincrement()) author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int title String content String? published Boolean @default(false) @@ -277,7 +267,6 @@ enum Role { MODERATOR }`, ); - runCli('format', workDir); runCli('db push', workDir); const originalSchema = getSchema(workDir); @@ -285,48 +274,47 @@ enum Role { expect(getSchema(workDir)).toEqual(originalSchema); }); - it('should preserve imports when pulling with multi-file schema', () => { - const workDir = createProject(''); + it('should preserve imports when pulling with multi-file schema', async () => { + const workDir = createProject('', { customPrelude: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); const modelsDir = path.join(workDir, 'zenstack/models'); + fs.mkdirSync(modelsDir, { recursive: true }); // Create main schema with imports - const mainSchema = `${getDefaultPrelude()} + const mainSchema = await formatDocument(`import "./models/user" +import "./models/post" -import './models/user' -import './models/post'`; +${getDefaultPrelude()}`); fs.writeFileSync(schemaPath, mainSchema); // Create user model - const userModel = `model User { - id String @id @default(cuid()) + const userModel = await formatDocument(`import "./post" + +model User { + id Int @id @default(autoincrement()) email String @unique name String? posts Post[] createdAt DateTime @default(now()) -}`; +}`); fs.writeFileSync(path.join(modelsDir, 'user.zmodel'), userModel); // Create post model - const postModel = `model Post { + const postModel = await formatDocument(`import "./user" + +model Post { id Int @id @default(autoincrement()) title String content String? author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int createdAt DateTime @default(now()) -}`; +}`); fs.writeFileSync(path.join(modelsDir, 'post.zmodel'), postModel); - runCli('format', workDir); runCli('db push', workDir); - // Store original schemas - const originalMainSchema = fs.readFileSync(schemaPath).toString(); - const originalUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); - const originalPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); - // Pull and verify imports are preserved runCli('db pull --indent 4', workDir); @@ -334,13 +322,9 @@ import './models/post'`; const pulledUserSchema = fs.readFileSync(path.join(modelsDir, 'user.zmodel')).toString(); const pulledPostSchema = fs.readFileSync(path.join(modelsDir, 'post.zmodel')).toString(); - expect(pulledMainSchema).toEqual(originalMainSchema); - expect(pulledUserSchema).toEqual(originalUserSchema); - expect(pulledPostSchema).toEqual(originalPostSchema); - - // Verify imports are still present in main schema - expect(pulledMainSchema).toContain("import './models/user'"); - expect(pulledMainSchema).toContain("import './models/post'"); + expect(pulledMainSchema).toEqual(mainSchema); + expect(pulledUserSchema).toEqual(userModel); + expect(pulledPostSchema).toEqual(postModel); }); }); }); @@ -352,9 +336,9 @@ describe('DB pull - PostgreSQL specific features', () => { skip(); return; } - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique posts Post[] @@ -365,13 +349,12 @@ model Post { id Int @id @default(autoincrement()) title String author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int @@schema("content") }`, { provider: 'postgresql' }, ); - runCli('format', workDir); runCli('db push', workDir); const schemaFile = path.join(workDir, 'zenstack/schema.zmodel'); @@ -387,15 +370,15 @@ model Post { expect(restoredSchema).toContain('@@schema("content")'); }); - it('should preserve native PostgreSQL enums when schema exists', ({ skip }) => { + it('should preserve native PostgreSQL enums when schema exists', async ({ skip }) => { const provider = getTestDbProvider(); if (provider !== 'postgresql') { skip(); return; } - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique status UserStatus @default(ACTIVE) role UserRole @default(USER) @@ -414,7 +397,6 @@ enum UserRole { }`, { provider: 'postgresql' }, ); - runCli('format', workDir); runCli('db push', workDir); const originalSchema = getSchema(workDir); @@ -426,15 +408,15 @@ enum UserRole { expect(pulledSchema).toContain('enum UserRole'); }); - it('should not modify schema with PostgreSQL-specific features', ({ skip }) => { + it('should not modify schema with PostgreSQL-specific features', async ({ skip }) => { const provider = getTestDbProvider(); if (provider !== 'postgresql') { skip(); return; } - const workDir = createProject( + const workDir = await createFormattedProject( `model User { - id String @id @default(cuid()) + id Int @id @default(autoincrement()) email String @unique status UserStatus @default(ACTIVE) posts Post[] @@ -448,7 +430,7 @@ model Post { id Int @id @default(autoincrement()) title String author User @relation(fields: [authorId], references: [id], onDelete: Cascade) - authorId String + authorId Int tags String[] @@schema("content") @@ -462,7 +444,6 @@ enum UserStatus { }`, { provider: 'postgresql' }, ); - runCli('format', workDir); runCli('db push', workDir); const originalSchema = getSchema(workDir); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 8b5d79d10..7820a0b5a 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -4,6 +4,7 @@ import { execSync } from 'node:child_process'; import fs from 'node:fs'; import path from 'node:path'; import { expect } from 'vitest'; +import { formatDocument } from '@zenstackhq/language'; const TEST_PG_CONFIG = { host: process.env['TEST_PG_HOST'] ?? 'localhost', @@ -63,8 +64,7 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' const ZMODEL_PRELUDE = `datasource db { provider = "${provider}" url = "${dbUrl}" -} -`; +}`; return ZMODEL_PRELUDE; } @@ -75,10 +75,19 @@ export function createProject( const workDir = createTestProject(); fs.mkdirSync(path.join(workDir, 'zenstack'), { recursive: true }); const schemaPath = path.join(workDir, 'zenstack/schema.zmodel'); - fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n${zmodel}` : zmodel); + fs.writeFileSync(schemaPath, !options?.customPrelude ? `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}` : zmodel); return workDir; } +export async function createFormattedProject( + zmodel: string, + options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }, +) { + const fullContent = `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}`; + const formatted = await formatDocument(fullContent); + return createProject(formatted, { customPrelude: true, provider: options?.provider }); +} + export function runCli(command: string, cwd: string) { const cli = path.join(__dirname, '../dist/index.js'); execSync(`node ${cli} ${command}`, { cwd }); From b4a6651a9aafe3d0541a4be9e98af3782246d5c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 20:49:55 +0100 Subject: [PATCH 65/68] fix(cli): refactor PostgreSQL type casting and fix index order Extracts PostgreSQL type casting logic into a reusable helper function to improve maintainability and ensure consistent attribute handling across all field types. Adjusts the table index sorting logic to better preserve the original database creation order while maintaining the priority of unique indexes. --- packages/cli/src/actions/pull/index.ts | 2 +- .../src/actions/pull/provider/postgresql.ts | 100 +++++++++++------- .../cli/src/actions/pull/provider/sqlite.ts | 2 +- 3 files changed, 65 insertions(+), 39 deletions(-) diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index b94eb69d1..e9cb68f6d 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -331,7 +331,7 @@ export function syncTable({ } // Sort indexes: unique indexes first, then other indexes - const sortedIndexes = table.indexes.sort((a, b) => { + const sortedIndexes = table.indexes.reverse().sort((a, b) => { if (a.unique && !b.unique) return -1; if (!a.unique && b.unique) return 1; return 0; diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index ca70065ff..1e9dfcac3 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -1,8 +1,9 @@ -import type { Attribute, BuiltinType } from '@zenstackhq/language/ast'; -import { DataFieldAttributeFactory } from '@zenstackhq/language/factory'; +import type { Attribute, BuiltinType, Enum, Expression } from '@zenstackhq/language/ast'; +import { AstFactory, DataFieldAttributeFactory, ExpressionBuilder } from '@zenstackhq/language/factory'; import { Client } from 'pg'; import { getAttributeRef, getDbName, getFunctionRef } from '../utils'; import type { IntrospectedEnum, IntrospectedSchema, IntrospectedTable, IntrospectionProvider } from './provider'; +import type { ZModelServices } from '@zenstackhq/language'; export const postgresql: IntrospectionProvider = { isSupportedFeature(feature) { @@ -112,46 +113,16 @@ export const postgresql: IntrospectionProvider = { getDefaultValue({ defaultValue, fieldType, services, enums }) { const val = defaultValue.trim(); - // Handle type casts early (PostgreSQL-specific pattern like 'value'::type) - if (val.includes('::')) { - const [value, type] = val - .replace(/'/g, '') - .split('::') - .map((s) => s.trim()) as [string, string]; - switch (type) { - case 'character varying': - case 'uuid': - case 'json': - case 'jsonb': - case 'text': - if (value === 'NULL') return null; - return (ab) => ab.StringLiteral.setValue(value); - case 'real': - return (ab) => ab.NumberLiteral.setValue(value); - default: { - const enumDef = enums.find((e) => getDbName(e, true) === type); - if (!enumDef) { - return (ab) => - ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => - a.setValue((v) => v.StringLiteral.setValue(val)), - ); - } - const enumField = enumDef.fields.find((v) => getDbName(v) === value); - if (!enumField) { - throw new Error( - `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, - ); - } - return (ab) => ab.ReferenceExpr.setTarget(enumField); - } - } - } - switch (fieldType) { case 'DateTime': if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); } + + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + // Fallback to string literal for other DateTime defaults return (ab) => ab.StringLiteral.setValue(val); @@ -160,12 +131,21 @@ export const postgresql: IntrospectionProvider = { if (val.startsWith('nextval(')) { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('autoincrement', services)); } + + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (/^-?\d+$/.test(val)) { return (ab) => ab.NumberLiteral.setValue(val); } break; case 'Float': + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (/^-?\d+\.\d+$/.test(val)) { const numVal = parseFloat(val); return (ab) => ab.NumberLiteral.setValue(numVal === Math.floor(numVal) ? numVal.toFixed(1) : String(numVal)); @@ -176,6 +156,10 @@ export const postgresql: IntrospectionProvider = { break; case 'Decimal': + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (/^-?\d+\.\d+$/.test(val)) { const numVal = parseFloat(val); if (numVal === Math.floor(numVal)) { @@ -198,12 +182,20 @@ export const postgresql: IntrospectionProvider = { break; case 'String': + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + if (val.startsWith("'") && val.endsWith("'")) { return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'")); } break; } + if (val.includes('::')) { + return typeCastingConvert({defaultValue,enums,val,services}); + } + // Fallback handlers for values that don't match field type-specific patterns if (val === 'CURRENT_TIMESTAMP' || val === 'now()') { return (ab) => ab.InvocationExpr.setFunction(getFunctionRef('now', services)); @@ -447,3 +439,37 @@ WHERE AND "cls"."relname" !~ '_prisma_migrations' ORDER BY "ns"."nspname", "cls"."relname" ASC; `; + +function typeCastingConvert({defaultValue, enums, val, services}:{val: string, enums: Enum[], defaultValue:string, services:ZModelServices}): ((builder: ExpressionBuilder) => AstFactory) | null { + const [value, type] = val + .replace(/'/g, '') + .split('::') + .map((s) => s.trim()) as [string, string]; + switch (type) { + case 'character varying': + case 'uuid': + case 'json': + case 'jsonb': + case 'text': + if (value === 'NULL') return null; + return (ab) => ab.StringLiteral.setValue(value); + case 'real': + return (ab) => ab.NumberLiteral.setValue(value); + default: { + const enumDef = enums.find((e) => getDbName(e, true) === type); + if (!enumDef) { + return (ab) => + ab.InvocationExpr.setFunction(getFunctionRef('dbgenerated', services)).addArg((a) => + a.setValue((v) => v.StringLiteral.setValue(val)), + ); + } + const enumField = enumDef.fields.find((v) => getDbName(v) === value); + if (!enumField) { + throw new Error( + `Enum value ${value} not found in enum ${type} for default value ${defaultValue}`, + ); + } + return (ab) => ab.ReferenceExpr.setTarget(enumField); + } + } +} \ No newline at end of file diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index d90c2cef3..a6961b009 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -142,7 +142,7 @@ export const sqlite: IntrospectionProvider = { nulls: null, })), }; - }).reverse(); // Reverse to maintain creation order + }); // Foreign keys mapping by column name const fkRows = all<{ From 84d5afc7c3b516845c3c2127bb99bda3f22fbba4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 21:11:14 +0100 Subject: [PATCH 66/68] fix(cli): filter out auto-generated MySQL indexes Prevents foreign key indexes created automatically by MySQL from appearing in the introspected schema. This ensures the output reflects manually defined indexes and avoids redundancy in schema definitions. --- packages/cli/src/actions/pull/provider/mysql.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 66e103c21..702cc06b5 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -146,13 +146,20 @@ export const mysql: IntrospectionProvider = { (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0) ); + // Filter out auto-generated FK indexes (MySQL creates these automatically) + // Pattern: {Table}_{column}_fkey for single-column FK indexes + const filteredIndexes = (indexes || []).filter( + (idx: { name: string; columns: { name: string }[] }) => + !(idx.columns.length === 1 && idx.name === `${row.name}_${idx.columns[0]?.name}_fkey`) + ); + tables.push({ schema: '', // MySQL doesn't support multi-schema name: row.name, type: row.type as 'table' | 'view', definition: row.definition, columns: sortedColumns, - indexes: indexes || [], + indexes: filteredIndexes, }); } From 3937a62aa231d3c54193c2fc6ef33bca273f8480 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Fri, 30 Jan 2026 21:29:06 +0100 Subject: [PATCH 67/68] test(cli): support datasource extras in test utils Enhances the test utility helpers to allow passing extra datasource properties, such as multi-schema configurations for PostgreSQL. Refactors existing database pull tests to use these extra properties, ensuring the generated ZModel schema correctly reflects multi-schema environments while simplifying assertions. --- packages/cli/src/actions/pull/utils.ts | 9 ++++----- packages/cli/test/db/pull.test.ts | 10 +++------- packages/cli/test/utils.ts | 27 +++++++++++++++++++------- 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/packages/cli/src/actions/pull/utils.ts b/packages/cli/src/actions/pull/utils.ts index 38a5f0e9c..b46693afe 100644 --- a/packages/cli/src/actions/pull/utils.ts +++ b/packages/cli/src/actions/pull/utils.ts @@ -64,12 +64,11 @@ export function getDatasource(model: Model) { const schemasField = datasource.fields.find((f) => f.name === 'schemas'); const schemas = - (schemasField && - getLiteralArray(schemasField.value) - ?.map(getStringLiteral) - .filter((s) => s !== undefined)) || + (schemasField && + getLiteralArray(schemasField.value) + ?.filter((s) => s !== undefined)) as string[] || []; - + return { name: datasource.name, provider: getStringLiteral( diff --git a/packages/cli/test/db/pull.test.ts b/packages/cli/test/db/pull.test.ts index 9c0230141..d8d677258 100644 --- a/packages/cli/test/db/pull.test.ts +++ b/packages/cli/test/db/pull.test.ts @@ -353,7 +353,7 @@ model Post { @@schema("content") }`, - { provider: 'postgresql' }, + { provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"] } }, ); runCli('db push', workDir); @@ -361,13 +361,11 @@ model Post { const { model } = await loadSchemaDocument(schemaFile, { returnServices: true }); const expectedSchema = generator.generate(model); - fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql' })); + fs.writeFileSync(schemaFile, getDefaultPrelude({ provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"]} })); runCli('db pull --indent 4', workDir); const restoredSchema = getSchema(workDir); expect(restoredSchema).toEqual(expectedSchema); - expect(restoredSchema).toContain('@@schema("auth")'); - expect(restoredSchema).toContain('@@schema("content")'); }); it('should preserve native PostgreSQL enums when schema exists', async ({ skip }) => { @@ -404,8 +402,6 @@ enum UserRole { const pulledSchema = getSchema(workDir); expect(pulledSchema).toEqual(originalSchema); - expect(pulledSchema).toContain('enum UserStatus'); - expect(pulledSchema).toContain('enum UserRole'); }); it('should not modify schema with PostgreSQL-specific features', async ({ skip }) => { @@ -442,7 +438,7 @@ enum UserStatus { INACTIVE SUSPENDED }`, - { provider: 'postgresql' }, + { provider: 'postgresql', extra:{ schemas: ["public", "content", "auth"] } }, ); runCli('db push', workDir); diff --git a/packages/cli/test/utils.ts b/packages/cli/test/utils.ts index 7820a0b5a..4a58598c2 100644 --- a/packages/cli/test/utils.ts +++ b/packages/cli/test/utils.ts @@ -42,7 +42,7 @@ function getTestDbName(provider: string) { ); } -export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }) { +export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', extra?: Record }) { const provider = (options?.provider || getTestDbProvider()) ?? 'sqlite'; const dbName = getTestDbName(provider); let dbUrl: string; @@ -60,11 +60,24 @@ export function getDefaultPrelude(options?: { provider?: 'sqlite' | 'postgresql' default: throw new Error(`Unsupported provider: ${provider}`); } + // Build fields array for proper alignment (matching ZModelCodeGenerator) + const fields: [string, string][] = [ + ['provider', `"${provider}"`], + ['url', `"${dbUrl}"`], + ...Object.entries(options?.extra || {}).map(([k, v]) => { + const value = Array.isArray(v) ? `[${v.map(item => `"${item}"`).join(', ')}]` : `"${v}"`; + return [k, value] as [string, string]; + }), + ]; - const ZMODEL_PRELUDE = `datasource db { - provider = "${provider}" - url = "${dbUrl}" -}`; + // Calculate alignment padding based on longest field name + const longestName = Math.max(...fields.map(([name]) => name.length)); + const formattedFields = fields.map(([name, value]) => { + const padding = ' '.repeat(longestName - name.length + 1); + return ` ${name}${padding}= ${value}`; + }).join('\n'); + + const ZMODEL_PRELUDE = `datasource db {\n${formattedFields}\n}`; return ZMODEL_PRELUDE; } @@ -81,9 +94,9 @@ export function createProject( export async function createFormattedProject( zmodel: string, - options?: { provider?: 'sqlite' | 'postgresql' | 'mysql' }, + options?: { provider?: 'sqlite' | 'postgresql' | 'mysql', extra?: Record }, ) { - const fullContent = `${getDefaultPrelude({ provider: options?.provider })}\n\n${zmodel}`; + const fullContent = `${getDefaultPrelude({ provider: options?.provider, extra: options?.extra })}\n\n${zmodel}`; const formatted = await formatDocument(fullContent); return createProject(formatted, { customPrelude: true, provider: options?.provider }); } From b52aa5b1b97dce0ffec0b761f2479153e9eccf42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rom=C3=A1n=20Benj=C3=A1min?= Date: Sat, 31 Jan 2026 00:09:53 +0100 Subject: [PATCH 68/68] fix: address PR comments --- packages/cli/package.json | 1 - packages/cli/src/actions/db.ts | 8 ++-- packages/cli/src/actions/pull/index.ts | 40 ++++++++++++++----- .../cli/src/actions/pull/provider/mysql.ts | 10 ++--- .../src/actions/pull/provider/postgresql.ts | 6 +-- .../cli/src/actions/pull/provider/provider.ts | 2 +- .../cli/src/actions/pull/provider/sqlite.ts | 8 ++-- packages/cli/src/index.ts | 4 +- packages/language/src/factory/attribute.ts | 14 +++++-- 9 files changed, 60 insertions(+), 33 deletions(-) diff --git a/packages/cli/package.json b/packages/cli/package.json index 4d8411162..7ab08561f 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -38,7 +38,6 @@ "dependencies": { "@dotenvx/dotenvx": "^1.51.0", "@zenstackhq/common-helpers": "workspace:*", - "@zenstackhq/language": "workspace:*", "@zenstackhq/schema": "workspace:*", "@zenstackhq/language": "workspace:*", "@zenstackhq/orm": "workspace:*", diff --git a/packages/cli/src/actions/db.ts b/packages/cli/src/actions/db.ts index 28c60051f..702268f0b 100644 --- a/packages/cli/src/actions/db.ts +++ b/packages/cli/src/actions/db.ts @@ -149,7 +149,7 @@ async function runPull(options: PullOptions) { } // sync relation fields for (const relation of resolvedRelations) { - const simmilarRelations = resolvedRelations.filter((rr) => { + const similarRelations = resolvedRelations.filter((rr) => { return ( rr !== relation && ((rr.schema === relation.schema && @@ -170,7 +170,7 @@ async function runPull(options: PullOptions) { services, options, selfRelation, - simmilarRelations, + similarRelations: similarRelations, }); } @@ -390,8 +390,8 @@ async function runPull(options: PullOptions) { deletedFields.forEach((msg) => console.log(msg)); } - if (options.out && !fs.lstatSync(options.out).isFile()) { - throw new Error(`Output path ${options.out} is not a file`); + if (options.out && fs.existsSync(options.out) && !fs.lstatSync(options.out).isFile()) { + throw new Error(`Output path ${options.out} exists but is not a file`); } const generator = new ZModelCodeGenerator({ diff --git a/packages/cli/src/actions/pull/index.ts b/packages/cli/src/actions/pull/index.ts index e9cb68f6d..e68513961 100644 --- a/packages/cli/src/actions/pull/index.ts +++ b/packages/cli/src/actions/pull/index.ts @@ -76,8 +76,28 @@ export function syncEnums({ .filter((d) => isEnum(d)) .forEach((d) => { const factory = new EnumFactory().setName(d.name); + // Copy enum-level comments + if (d.comments?.length) { + factory.update({ comments: [...d.comments] }); + } + // Copy enum-level attributes (@@map, @@schema, etc.) + if (d.attributes?.length) { + factory.update({ attributes: [...d.attributes] }); + } + // Copy fields with their attributes and comments d.fields.forEach((v) => { - factory.addField((builder) => builder.setName(v.name)); + factory.addField((builder) => { + builder.setName(v.name); + // Copy field-level comments + if (v.comments?.length) { + v.comments.forEach((c) => builder.addComment(c)); + } + // Copy field-level attributes (@map, etc.) + if (v.attributes?.length) { + builder.update({ attributes: [...v.attributes] }); + } + return builder; + }); }); model.declarations.push(factory.get({ $container: model })); }); @@ -322,8 +342,10 @@ export function syncTable({ ); } - const uniqueColumns = table.columns.filter((c) => c.unique || c.pk); - if(uniqueColumns.length === 0) { + const hasUniqueConstraint = + table.columns.some((c) => c.unique || c.pk) || + table.indexes.some((i) => i.unique); + if (!hasUniqueConstraint) { modelFactory.addAttribute((a) => a.setDecl(getAttributeRef('@@ignore', services))); modelFactory.comments.push( '/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.', @@ -415,14 +437,14 @@ export function syncRelation({ services, options, selfRelation, - simmilarRelations, + similarRelations, }: { model: Model; relation: Relation; services: ZModelServices; options: PullOptions; //self included - simmilarRelations: number; + similarRelations: number; selfRelation: boolean; }) { const idAttribute = getAttributeRef('@id', services); @@ -431,7 +453,7 @@ export function syncRelation({ const fieldMapAttribute = getAttributeRef('@map', services); const tableMapAttribute = getAttributeRef('@@map', services); - const includeRelationName = selfRelation || simmilarRelations > 0; + const includeRelationName = selfRelation || similarRelations > 0; if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) { throw new Error('Cannot find required attributes in the model.'); @@ -456,7 +478,7 @@ export function syncRelation({ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? '_' : ''; - const relationName = `${relation.table}${simmilarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; + const relationName = `${relation.table}${similarRelations > 0 ? `_${relation.column}` : ''}To${relation.references.table}`; const sourceNameFromReference = sourceField.name.toLowerCase().endsWith('id') ? `${resolveNameCasing("camel", sourceField.name.slice(0, -2)).name}${relation.type === 'many'? 's' : ''}` : undefined; @@ -464,7 +486,7 @@ export function syncRelation({ let { name: sourceFieldName } = resolveNameCasing( options.fieldCasing, - simmilarRelations > 0 + similarRelations > 0 ? `${fieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` : `${(!sourceFieldFromReference? sourceNameFromReference : undefined) || resolveNameCasing("camel", targetModel.name).name}${relation.type === 'many'? 's' : ''}`, ); @@ -523,7 +545,7 @@ export function syncRelation({ const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? '_' : ''; const { name: oppositeFieldName } = resolveNameCasing( options.fieldCasing, - simmilarRelations > 0 + similarRelations > 0 ? `${oppositeFieldPrefix}${sourceModel.name.charAt(0).toLowerCase()}${sourceModel.name.slice(1)}_${relation.column}` : `${resolveNameCasing("camel", sourceModel.name).name}${relation.references.type === 'many'? 's' : ''}`, ); diff --git a/packages/cli/src/actions/pull/provider/mysql.ts b/packages/cli/src/actions/pull/provider/mysql.ts index 702cc06b5..cb104eb1e 100644 --- a/packages/cli/src/actions/pull/provider/mysql.ts +++ b/packages/cli/src/actions/pull/provider/mysql.ts @@ -95,7 +95,7 @@ export const mysql: IntrospectionProvider = { getDefaultDatabaseType(type: BuiltinType) { switch (type) { case 'String': - return { type: 'varchar', precisition: 191 }; + return { type: 'varchar', precision: 191 }; case 'Boolean': // Boolean maps to 'boolean' (our synthetic type from tinyint(1)) // No precision needed since we handle the mapping in the query @@ -107,9 +107,9 @@ export const mysql: IntrospectionProvider = { case 'Float': return { type: 'double' }; case 'Decimal': - return { type: 'decimal', precisition: 65 }; + return { type: 'decimal', precision: 65 }; case 'DateTime': - return { type: 'datetime', precisition: 3 }; + return { type: 'datetime', precision: 3 }; case 'Json': return { type: 'json' }; case 'Bytes': @@ -338,8 +338,8 @@ export const mysql: IntrospectionProvider = { dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (length || precision))) + (defaultDatabaseType.precision && + defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (length || precision) { diff --git a/packages/cli/src/actions/pull/provider/postgresql.ts b/packages/cli/src/actions/pull/provider/postgresql.ts index 1e9dfcac3..08a041b56 100644 --- a/packages/cli/src/actions/pull/provider/postgresql.ts +++ b/packages/cli/src/actions/pull/provider/postgresql.ts @@ -103,7 +103,7 @@ export const postgresql: IntrospectionProvider = { case 'Decimal': return { type: 'decimal' }; case 'DateTime': - return { type: 'timestamp', precisition: 3 }; + return { type: 'timestamp', precision: 3 }; case 'Json': return { type: 'jsonb' }; case 'Bytes': @@ -246,8 +246,8 @@ export const postgresql: IntrospectionProvider = { dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (length || precision))) + (defaultDatabaseType.precision && + defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (length || precision) { diff --git a/packages/cli/src/actions/pull/provider/provider.ts b/packages/cli/src/actions/pull/provider/provider.ts index 6edee0663..a3922b7a7 100644 --- a/packages/cli/src/actions/pull/provider/provider.ts +++ b/packages/cli/src/actions/pull/provider/provider.ts @@ -66,7 +66,7 @@ export interface IntrospectionProvider { type: BuiltinType | 'Unsupported'; isArray: boolean; }; - getDefaultDatabaseType(type: BuiltinType): { precisition?: number; type: string } | undefined; + getDefaultDatabaseType(type: BuiltinType): { precision?: number; type: string } | undefined; /** * Get the expression builder callback for a field's @default attribute value. * Returns null if no @default attribute should be added. diff --git a/packages/cli/src/actions/pull/provider/sqlite.ts b/packages/cli/src/actions/pull/provider/sqlite.ts index a6961b009..fcdbfbad7 100644 --- a/packages/cli/src/actions/pull/provider/sqlite.ts +++ b/packages/cli/src/actions/pull/provider/sqlite.ts @@ -115,7 +115,7 @@ export const sqlite: IntrospectionProvider = { // Unique columns detection via unique indexes with single column const uniqueSingleColumn = new Set(); - const uniqueIndexRows = idxList.filter((r) => r.unique === 1); + const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1); for (const idx of uniqueIndexRows) { const idxCols = all<{ name: string }>(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`); if (idxCols.length === 1 && idxCols[0]?.name) { @@ -134,7 +134,7 @@ export const sqlite: IntrospectionProvider = { valid: true, // SQLite does not expose index validity ready: true, // SQLite does not expose index readiness partial: idx.partial === 1, - predicate: null, // SQLite does not expose index predicate + predicate: idx.partial === 1 ? '[partial]' : null, // SQLite does not expose index predicate columns: idxCols.map((col) => ({ name: col.name, expression: null, @@ -363,8 +363,8 @@ export const sqlite: IntrospectionProvider = { dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || - (defaultDatabaseType.precisition && - defaultDatabaseType.precisition !== (length || precision))) + (defaultDatabaseType.precision && + defaultDatabaseType.precision !== (length || precision))) ) { const dbAttrFactory = new DataFieldAttributeFactory().setDecl(dbAttr); if (length || precision) { diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index a7bb403e0..8d253cc3e 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -150,12 +150,12 @@ function createProgram() { .addOption(noVersionCheckOption) .addOption(new Option('-o, --out ', 'add custom output path for the introspected schema')) .addOption( - new Option('--model-casing ', 'set the casing of generated models').default( + new Option('--model-casing ', 'set the casing of generated models').default( 'none', ), ) .addOption( - new Option('--field-casing ', 'set the casing of generated fields').default( + new Option('--field-casing ', 'set the casing of generated fields').default( 'none', ), ) diff --git a/packages/language/src/factory/attribute.ts b/packages/language/src/factory/attribute.ts index 52aeebc7c..138d41c8f 100644 --- a/packages/language/src/factory/attribute.ts +++ b/packages/language/src/factory/attribute.ts @@ -21,9 +21,12 @@ export class DataFieldAttributeFactory extends AstFactory { super({ type: DataFieldAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { + if (!decl) { + throw new Error('Attribute declaration is required'); + } this.decl = { - $refText: decl?.name ?? '', - ref: decl!, + $refText: decl.name, + ref: decl, }; this.update({ decl: this.decl, @@ -50,9 +53,12 @@ export class DataModelAttributeFactory extends AstFactory { super({ type: DataModelAttribute, node: { args: [] } }); } setDecl(decl: Attribute) { + if (!decl) { + throw new Error('Attribute declaration is required'); + } this.decl = { - $refText: decl?.name ?? '', - ref: decl!, + $refText: decl.name, + ref: decl, }; this.update({ decl: this.decl,