Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
e37359d
feat: initial db pull implementation
svetch Sep 23, 2025
fe31243
fix: generate imports and attributes for zmodel-code-generator
svetch Sep 24, 2025
cdf4d38
fix: add option to not exclude imports in loadDocument
svetch Sep 24, 2025
d04fb3d
fix: continue work on db pull
svetch Sep 24, 2025
5b5e0be
fix: missing import
svetch Sep 24, 2025
45b9afa
fix: rewrite model generation
svetch Sep 26, 2025
defcaab
feat: add ast factory
svetch Oct 5, 2025
d146335
fix: ast factory import order
svetch Oct 5, 2025
1fee4fc
fix: some runtime bugs
svetch Oct 6, 2025
76fecae
fix: lint fix
svetch Oct 20, 2025
a7af852
fix: update zmodel code generator
svetch Oct 20, 2025
c81fe44
feat: add exclude schemas option
svetch Oct 20, 2025
d52f862
feat: implement initial diff update
svetch Oct 20, 2025
61991d3
fix: update format in zmodel code generator
svetch Oct 20, 2025
55ea05f
fix: typo
svetch Oct 20, 2025
b6db32f
feat: progress on database introspection and syncing
svetch Oct 21, 2025
7ea747c
fix: make ignore behave it does in prisma with no index models
svetch Oct 21, 2025
fb1dd26
fix: lint fix
svetch Oct 21, 2025
af3043d
feat: make all format options configurable
svetch Oct 21, 2025
373a46e
fix: lint fix
svetch Oct 21, 2025
018b616
feat: Handle the database type mapping
svetch Oct 22, 2025
df8371d
fix: catch up with feature updates
svetch Nov 12, 2025
68ee9d2
fix: add sqlite e2e test and fix some bugs
svetch Nov 21, 2025
7eea598
fix: lint fix
svetch Nov 21, 2025
93a400a
fix: formatting for e2e test schemas
svetch Nov 21, 2025
2c3ad13
test: run db pull e2e test also for postgres
svetch Nov 21, 2025
8e89c88
fix: postgres instorspection schema filter
svetch Nov 23, 2025
0bae20d
test: update cli tests
svetch Nov 23, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@
"pack": "pnpm pack"
},
"dependencies": {
"@dotenvx/dotenvx": "^1.51.0",
"@zenstackhq/common-helpers": "workspace:*",
"@zenstackhq/language": "workspace:*",
"@zenstackhq/schema": "workspace:*",
"@zenstackhq/sdk": "workspace:*",
"prisma": "catalog:",
"colors": "1.4.0",
Expand All @@ -41,10 +43,12 @@
"ora": "^5.4.1",
"package-manager-detector": "^1.3.0",
"semver": "^7.7.2",
"ts-pattern": "catalog:"
"ts-pattern": "catalog:",
"vscode-uri": "^3.1.0"
},
"devDependencies": {
"@types/better-sqlite3": "catalog:",
"@types/pg": "^8.11.11",
"@types/semver": "^7.7.0",
"@types/tmp": "catalog:",
"@zenstackhq/eslint-config": "workspace:*",
Expand All @@ -53,6 +57,7 @@
"@zenstackhq/typescript-config": "workspace:*",
"@zenstackhq/vitest-config": "workspace:*",
"better-sqlite3": "catalog:",
"pg": "^8.16.3",
"tmp": "catalog:"
}
}
25 changes: 21 additions & 4 deletions packages/cli/src/actions/action-utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { loadDocument } from '@zenstackhq/language';
import { isDataSource } from '@zenstackhq/language/ast';
import { type ZModelServices, loadDocument } from '@zenstackhq/language';
import { type Model, isDataSource } from '@zenstackhq/language/ast';
import { PrismaSchemaGenerator } from '@zenstackhq/sdk';
import colors from 'colors';
import fs from 'node:fs';
Expand Down Expand Up @@ -41,8 +41,22 @@ export function getSchemaFile(file?: string) {
}
}

export async function loadSchemaDocument(schemaFile: string) {
const loadResult = await loadDocument(schemaFile);
export async function loadSchemaDocument(
schemaFile: string,
opts?: { keepImports?: boolean; returnServices?: false },
): Promise<Model>;
export async function loadSchemaDocument(
schemaFile: string,
opts: { returnServices: true; keepImports?: boolean },
): Promise<{ model: Model; services: ZModelServices }>;
export async function loadSchemaDocument(
schemaFile: string,
opts: { returnServices?: boolean; keepImports?: boolean } = {},
) {
const returnServices = opts.returnServices || false;
const keepImports = opts.keepImports || false;

const loadResult = await loadDocument(schemaFile, [], keepImports);
if (!loadResult.success) {
loadResult.errors.forEach((err) => {
console.error(colors.red(err));
Expand All @@ -52,6 +66,9 @@ export async function loadSchemaDocument(schemaFile: string) {
loadResult.warnings.forEach((warn) => {
console.warn(colors.yellow(warn));
});

if (returnServices) return { model: loadResult.model, services: loadResult.services };

return loadResult.model;
}

Expand Down
287 changes: 283 additions & 4 deletions packages/cli/src/actions/db.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,51 @@
import { config } from '@dotenvx/dotenvx';
import { ZModelCodeGenerator } from '@zenstackhq/language';
import { DataModel, Enum, type Model } from '@zenstackhq/language/ast';
import fs from 'node:fs';
import path from 'node:path';
import { execPrisma } from '../utils/exec-utils';
import { generateTempPrismaSchema, getSchemaFile, handleSubProcessError, requireDataSourceUrl } from './action-utils';
import {
generateTempPrismaSchema,
getSchemaFile,
handleSubProcessError,
requireDataSourceUrl,
loadSchemaDocument,
} from './action-utils';
import { syncEnums, syncRelation, syncTable, type Relation } from './pull';
import { providers } from './pull/provider';
import { getDatasource, getDbName, getRelationFkName } from './pull/utils';

type Options = {
type PushOptions = {
schema?: string;
acceptDataLoss?: boolean;
forceReset?: boolean;
};

export type PullOptions = {
schema?: string;
out?: string;
modelCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none';
fieldCasing: 'pascal' | 'camel' | 'snake' | 'kebab' | 'none';
alwaysMap: boolean;
quote: 'single' | 'double';
indent: number;
};

/**
* CLI action for db related commands
*/
export async function run(command: string, options: Options) {
export async function run(command: string, options: any) {
switch (command) {
case 'push':
await runPush(options);
break;
case 'pull':
await runPull(options);
break;
}
}

async function runPush(options: Options) {
async function runPush(options: PushOptions) {
const schemaFile = getSchemaFile(options.schema);

// validate datasource url exists
Expand Down Expand Up @@ -49,3 +75,256 @@ async function runPush(options: Options) {
}
}
}

async function runPull(options: PullOptions) {
try {
const schemaFile = getSchemaFile(options.schema);
const { model, services } = await loadSchemaDocument(schemaFile, { returnServices: true });
config({
ignore: ['MISSING_ENV_FILE'],
});
const SUPPORTED_PROVIDERS = ['sqlite', 'postgresql'];
const datasource = getDatasource(model);
if (!datasource) {
throw new Error('No datasource found in the schema.');
}

if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) {
throw new Error(`Unsupported datasource provider: ${datasource.provider}`);
}

const provider = providers[datasource.provider];

if (!provider) {
throw new Error(`No introspection provider found for: ${datasource.provider}`);
}
console.log('Starging introspect the database...');
const { enums: allEnums, tables: allTables } = await provider.introspect(datasource.url);
const enums = provider.isSupportedFeature('Schema')
? allEnums.filter((e) => datasource.allSchemas.includes(e.schema_name))
: allEnums;
const tables = provider.isSupportedFeature('Schema')
? allTables.filter((t) => datasource.allSchemas.includes(t.schema))
: allTables;

const newModel: Model = {
$type: 'Model',
$container: undefined,
$containerProperty: undefined,
$containerIndex: undefined,
declarations: [...model.declarations.filter((d) => ['DataSource'].includes(d.$type))],
imports: [],
};
syncEnums({
dbEnums: enums,
model: newModel,
services,
options,
defaultSchema: datasource.defaultSchema,
oldModel: model,
provider,
});

const resolvedRelations: Relation[] = [];
for (const table of tables) {
const relations = syncTable({
table,
model: newModel,
provider,
services,
options,
defaultSchema: datasource.defaultSchema,
oldModel: model,
});
resolvedRelations.push(...relations);
}
// sync relation fields
for (const relation of resolvedRelations) {
const simmilarRelations = resolvedRelations.filter((rr) => {
return (
rr !== relation &&
((rr.schema === relation.schema &&
rr.table === relation.table &&
rr.references.schema === relation.references.schema &&
rr.references.table === relation.references.table) ||
(rr.schema === relation.references.schema &&
rr.column === relation.references.column &&
rr.references.schema === relation.schema &&
rr.references.table === relation.table))
);
}).length;
const selfRelation =
relation.references.schema === relation.schema && relation.references.table === relation.table;
syncRelation({
model: newModel,
relation,
services,
options,
selfRelation,
simmilarRelations,
});
}

const cwd = new URL(`file://${process.cwd()}`).pathname;
const docs = services.shared.workspace.LangiumDocuments.all
.filter(({ uri }) => uri.path.toLowerCase().startsWith(cwd.toLowerCase()))
.toArray();
const docsSet = new Set(docs.map((d) => d.uri.toString()));

//Delete models
services.shared.workspace.IndexManager.allElements('DataModel', docsSet)
.filter(
(declaration) =>
!newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)),
)
.forEach((decl) => {
const model = decl.node!.$container as Model;
const index = model.declarations.findIndex((d) => d === decl.node);
model.declarations.splice(index, 1);
console.log(`Delete model ${decl.name}`);
});

// Delete Enums
if (provider.isSupportedFeature('NativeEnum'))
services.shared.workspace.IndexManager.allElements('Enum', docsSet)
.filter(
(declaration) =>
!newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node as any)),
)
.forEach((decl) => {
const model = decl.node!.$container as Model;
const index = model.declarations.findIndex((d) => d === decl.node);
model.declarations.splice(index, 1);
console.log(`Delete enum ${decl.name}`);
});
//
newModel.declarations
.filter((d) => [DataModel, Enum].includes(d.$type))
.forEach((_declaration) => {
const newDataModel = _declaration as DataModel | Enum;
const declarations = services.shared.workspace.IndexManager.allElements(
newDataModel.$type,
docsSet,
).toArray();
const originalDataModel = declarations.find((d) => getDbName(d.node as any) === getDbName(newDataModel))
?.node as DataModel | Enum | undefined;
if (!originalDataModel) {
model.declarations.push(newDataModel);
(newDataModel as any).$container = model;
newDataModel.fields.forEach((f) => {
if (f.$type === 'DataField' && f.type.reference?.ref) {
const ref = declarations.find(
(d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any),
)?.node;
if (ref) (f.type.reference.ref as any) = ref;
}
});
return;
}

newDataModel.fields.forEach((f) => {
const originalFields = originalDataModel.fields.filter((d) => {
return (
getDbName(d) === getDbName(f) ||
(getRelationFkName(d as any) === getRelationFkName(f as any) &&
!!getRelationFkName(d as any) &&
!!getRelationFkName(f as any)) ||
(f.$type === 'DataField' &&
d.$type === 'DataField' &&
f.type.reference?.ref &&
d.type.reference?.ref &&
getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref))
);
});

if (originalFields.length > 1) {
console.warn(
`Found more original fields, need to tweak the search algorith. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(', ')}](${f.name})`,
);
return;
}
const originalField = originalFields.at(0);
Object.freeze(originalField);
if (!originalField) {
console.log(`Added field ${f.name} to ${originalDataModel.name}`);
(f as any).$container = originalDataModel;
originalDataModel.fields.push(f as any);
if (f.$type === 'DataField' && f.type.reference?.ref) {
const ref = declarations.find(
(d) => getDbName(d.node as any) === getDbName(f.type.reference!.ref as any),
)?.node as DataModel | undefined;
if (ref) {
(f.type.reference.$refText as any) = ref.name;
(f.type.reference.ref as any) = ref;
}
}
return;
}
if (f.name === 'profiles') console.log(f.attributes.length);
originalField.attributes
.filter(
(attr) =>
!f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) &&
!['@map', '@@map', '@default', '@updatedAt'].includes(attr.decl.$refText),
)
.forEach((attr) => {
const field = attr.$container;
const index = field.attributes.findIndex((d) => d === attr);
field.attributes.splice(index, 1);
console.log(`Delete attribute from field:${field.name} ${attr.decl.$refText}`);
});
});
originalDataModel.fields
.filter(
(f) =>
!newDataModel.fields.find((d) => {
return (
getDbName(d) === getDbName(f) ||
(getRelationFkName(d as any) === getRelationFkName(f as any) &&
!!getRelationFkName(d as any) &&
!!getRelationFkName(f as any)) ||
(f.$type === 'DataField' &&
d.$type === 'DataField' &&
f.type.reference?.ref &&
d.type.reference?.ref &&
getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref))
);
}),
)
.forEach((f) => {
const _model = f.$container;
const index = _model.fields.findIndex((d) => d === f);
_model.fields.splice(index, 1);
console.log(`Delete field ${f.name}`);
});
});

if (options.out && !fs.lstatSync(options.out).isFile()) {
throw new Error(`Output path ${options.out} is not a file`);
}

const generator = new ZModelCodeGenerator({
quote: options.quote,
indent: options.indent,
});

if (options.out) {
const zmodelSchema = generator.generate(newModel);

console.log(`Writing to ${options.out}`);

const outPath = options.out ? path.resolve(options.out) : schemaFile;

fs.writeFileSync(outPath, zmodelSchema);
} else {
docs.forEach(({ uri, parseResult: { value: model } }) => {
const zmodelSchema = generator.generate(model);
console.log(`Writing to ${uri.path}`);
fs.writeFileSync(uri.fsPath, zmodelSchema);
});
}
} catch (error) {
console.log(error);
throw error;
}
}
Loading
Loading