diff --git a/README.md b/README.md index f98a9f1..44afebb 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,15 @@ # JSONormalize -[![GitHub stars](https://img.shields.io/github/stars/MrCheater/jsonormalize?style=for-the-badge&logo=github)](https://github.com/MrCheater/jsonormalize/stargazers) -[![GitHub forks](https://img.shields.io/github/forks/MrCheater/jsonormalize?style=for-the-badge&logo=github)](https://github.com/MrCheater/jsonormalize/network) -[![GitHub issues](https://img.shields.io/github/issues/MrCheater/jsonormalize?style=for-the-badge&logo=github)](https://github.com/MrCheater/jsonormalize/issues) -[![GitHub license](https://img.shields.io/github/license/MrCheater/jsonormalize?style=for-the-badge)](https://github.com/MrCheater/jsonormalize/blob/main/LICENSE) -[![Test Coverage](https://raw.githubusercontent.com/MrCheater/jsonormalize/gh-pages/badge.svg)](https://mrcheater.github.io/jsonormalize/) +[![GitHub stars](https://img.shields.io/github/stars/analtools/jsonormalize?style=for-the-badge&logo=github)](https://github.com/analtools/jsonormalize/stargazers) +[![GitHub forks](https://img.shields.io/github/forks/analtools/jsonormalize?style=for-the-badge&logo=github)](https://github.com/analtools/jsonormalize/network) +[![GitHub issues](https://img.shields.io/github/issues/analtools/jsonormalize?style=for-the-badge&logo=github)](https://github.com/analtools/jsonormalize/issues) +[![GitHub license](https://img.shields.io/github/license/analtools/jsonormalize?style=for-the-badge)](https://github.com/analtools/jsonormalize/blob/main/LICENSE) +[![Test Coverage](https://raw.githubusercontent.com/analtools/jsonormalize/gh-pages/badge.svg)](https://analtools.github.io/jsonormalize/) [![TypeScript](https://img.shields.io/badge/TypeScript-007ACC?style=for-the-badge&logo=typescript&logoColor=white)](https://www.typescriptlang.org/) [![Drizzle](https://img.shields.io/badge/Drizzle-1E88E5?style=for-the-badge&logo=drizzle&logoColor=white)](https://orm.drizzle.team/) [![Node.js](https://img.shields.io/badge/Node.js-339933?style=for-the-badge&logo=nodedotjs&logoColor=white)](https://nodejs.org/) -[![GitHub last commit](https://img.shields.io/github/last-commit/MrCheater/jsonormalize?style=for-the-badge&logo=git)](https://github.com/MrCheater/jsonormalize/commits/main) -[![GitHub repo size](https://img.shields.io/github/repo-size/MrCheater/jsonormalize?style=for-the-badge&logo=github)](https://github.com/MrCheater/jsonormalize) +[![GitHub last commit](https://img.shields.io/github/last-commit/analtools/jsonormalize?style=for-the-badge&logo=git)](https://github.com/analtools/jsonormalize/commits/main) +[![GitHub repo size](https://img.shields.io/github/repo-size/analtools/jsonormalize?style=for-the-badge&logo=github)](https://github.com/analtools/jsonormalize) ## 🚀 Description @@ -41,11 +41,11 @@ Currently supports **SQLite** with plans to expand to PostgreSQL, MySQL, and oth Check out the examples/ directory for ready-to-use JSON samples: -- [`examples/simple/`](https://github.com/MrCheater/jsonormalize/tree/main/examples/simple/) - Basic data structures +- [`examples/simple/`](https://github.com/analtools/jsonormalize/tree/main/examples/simple/) - Basic data structures -- [`examples/complex/`](https://github.com/MrCheater/jsonormalize/tree/main/examples/complex/) - Real-world scenarios +- [`examples/complex/`](https://github.com/analtools/jsonormalize/tree/main/examples/complex/) - Real-world scenarios -- [`examples/edge-cases/`](https://github.com/MrCheater/jsonormalize/tree/main/examples/edge-cases/) - Special data patterns +- [`examples/edge-cases/`](https://github.com/analtools/jsonormalize/tree/main/examples/edge-cases/) - Special data patterns ## 📦 Installation @@ -63,7 +63,7 @@ pnpm add jsonormalize ````bash ```sh -curl -o example.json https://raw.githubusercontent.com/MrCheater/jsonormalize/main/examples/simple/users.json +curl -o example.json https://raw.githubusercontent.com/analtools/jsonormalize/main/examples/simple/users.json ```` ### Generate SQLite migration @@ -75,7 +75,7 @@ npx jsonormalize sqlite:setup ./example.json ./demo.sqlite3 ### Or directly from URL (requires fetch support in your CLI) ```sh -npx jsonormalize sqlite:setup https://raw.githubusercontent.com/MrCheater/jsonormalize/main/examples/simple/users.json ./demo.db +npx jsonormalize sqlite:setup https://raw.githubusercontent.com/analtools/jsonormalize/main/examples/simple/users.json ./demo.db ``` ### Using local JSON file @@ -86,6 +86,8 @@ npx jsonormalize sqlite:setup ./data.json ./app.sqlite3 ## ❓ Help +### CLI + ``` Usage: jsonormalize [options] [command] @@ -93,10 +95,48 @@ JSONormalize — Transform any JSON into a relational database schema. Automatic migrations. Perfect for rapid prototyping, data migrations, and structured data workflows. Options: - -h, --help display help for command + -h, --help display help for command Commands: - sqlite:setup 🗄️ Setup tables, indexes and seed with data from JSON - sqlite:sql 🛠️ Generate SQL for create tables, indexes and seed with data from JSON - help [command] display help for command + postgres:setup [options] [db-path] 🗄️ Setup tables, indexes and seed with data from JSON + postgres:sql 🛠️ Generate SQL for create tables, indexes and seed with data from JSON + sqlite:setup [db-path] 🗄️ Setup tables, indexes and seed with data from JSON + sqlite:sql 🛠️ Generate SQL for create tables, indexes and seed with data from JSON + help [command] display help for command +``` + +### Command "jsonormalize postgres:setup" + +``` +Usage: jsonormalize postgres:setup [options] [db-path] + +🗄️ Setup tables, indexes and seed with data from JSON + +Arguments: + json-path Path to JSON file with any data (table structure will be inferred) + db-path Path to the database file or ':memory:' (no file, RAM only) + +Options: + --user default process.env.PGUSER || process.env.USER + --password default process.env.PGPASSWORD + --host default process.env.PGHOST + --port default process.env.PGPORT + --database default process.env.PGDATABASE || user + --connection-string e.g. postgres://user:password@host:5432/database + --ssl passed directly to node.TLSSocket, supports all tls.connect options + --statement-timeout number of milliseconds before a statement in query will time out, default is no + timeout + --query-timeout number of milliseconds before a query call will timeout, default is no timeout + --lock-timeout number of milliseconds a query is allowed to be en lock state before it's cancelled + due to lock timeout + --application-name The name of the application that created this Client instance + --connection-timeout-millis number of milliseconds to wait for connection, default is no timeout + --keep-alive-initial-delay-millis set the initial delay before the first keepalive probe is sent on an idle socket + --idle-in-transaction-session-timeout number of milliseconds before terminating any session with an open idle transaction, + default is no timeout + --client-encoding specifies the character set encoding that the database uses for sending data to the + client + --fallback-application-name provide an application name to use if application_name is not set + --options command-line options to be sent to the server + -h, --help display help for command ``` diff --git a/package-lock.json b/package-lock.json index 9a89d34..deec57e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,26 +1,29 @@ { "name": "@analtools/jsonormalize", - "version": "0.0.4", + "version": "0.0.6", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@analtools/jsonormalize", - "version": "0.0.4", + "version": "0.0.6", "license": "MIT", "dependencies": { + "@electric-sql/pglite": "^0.3.14", "better-sqlite3": "^12.5.0", "commander": "^14.0.2", - "node-fetch": "2.7.0" + "node-fetch": "2.7.0", + "pg": "^8.16.3" }, "bin": { - "json-to-drizzle": "bin/run.js" + "jsonormalize": "bin/run.js" }, "devDependencies": { "@eslint/js": "^9.39.2", "@types/better-sqlite3": "^7.6.13", "@types/node": "^24.1.0", "@types/node-fetch": "^2.6.13", + "@types/pg": "^8.16.0", "@vitest/coverage-v8": "^4.0.16", "coverage-badges-cli": "^2.2.0", "eslint": "^9.39.2", @@ -95,6 +98,12 @@ "node": ">=18" } }, + "node_modules/@electric-sql/pglite": { + "version": "0.3.14", + "resolved": "https://registry.npmjs.org/@electric-sql/pglite/-/pglite-0.3.14.tgz", + "integrity": "sha512-3DB258dhqdsArOI1fIt7cb9RpUOgcDg5hXWVgVHAeqVQ/qxtFy605QKs4gx6mFq3jWsSPqDN8TgSEsqC3OfV9Q==", + "license": "Apache-2.0" + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.27.2", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", @@ -1216,6 +1225,18 @@ "form-data": "^4.0.4" } }, + "node_modules/@types/pg": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.16.0.tgz", + "integrity": "sha512-RmhMd/wD+CF8Dfo+cVIy3RR5cl8CyfXQ0tGgW6XBL8L4LM/UTEbNXYRbLwU6w+CgrKBNbrQWt4FUtTfaU5jSYQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.52.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.52.0.tgz", @@ -3596,6 +3617,95 @@ "dev": true, "license": "MIT" }, + "node_modules/pg": { + "version": "8.16.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", + "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.9.1", + "pg-pool": "^3.10.1", + "pg-protocol": "^1.10.3", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.2.7" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz", + "integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz", + "integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz", + "integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz", + "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -3658,6 +3768,45 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prebuild-install": { "version": "7.1.3", "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz", @@ -4034,6 +4183,15 @@ "node": ">=0.10.0" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -4688,6 +4846,15 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "license": "MIT", + "engines": { + "node": ">=0.4" + } + }, "node_modules/yaml": { "version": "2.8.2", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", diff --git a/package.json b/package.json index 4a1c795..e1690f9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@analtools/jsonormalize", - "version": "0.0.4", + "version": "0.0.6", "description": "JSONormalize — Transform any JSON into a relational database schema. Automatically normalizes nested structures, detects relationships, and generates SQLite migrations. Perfect for rapid prototyping, data migrations, and structured data workflows.", "keywords": [ "json-normalize", @@ -44,7 +44,7 @@ }, "homepage": "https://github.com/analtools/jsonormalize#readme", "bin": { - "json-to-drizzle": "./bin/run.js" + "jsonormalize": "./bin/run.js" }, "main": "dist/src/index.js", "type": "commonjs", @@ -55,6 +55,7 @@ "@types/better-sqlite3": "^7.6.13", "@types/node": "^24.1.0", "@types/node-fetch": "^2.6.13", + "@types/pg": "^8.16.0", "@vitest/coverage-v8": "^4.0.16", "coverage-badges-cli": "^2.2.0", "eslint": "^9.39.2", @@ -70,6 +71,8 @@ }, "dependencies": { "better-sqlite3": "^12.5.0", + "@electric-sql/pglite": "^0.3.14", + "pg": "^8.16.3", "commander": "^14.0.2", "node-fetch": "2.7.0" }, diff --git a/src/commands/constants.ts b/src/commands/constants.ts new file mode 100644 index 0000000..7b52848 --- /dev/null +++ b/src/commands/constants.ts @@ -0,0 +1,38 @@ +export const requiredArgs = { + jsonPath: { + name: "", + description: + "Path to JSON file with any data (table structure will be inferred)", + }, + sqlPath: { name: "", description: "Path to .sql file" }, + dbPath: { + name: "", + description: "Path to the database file or ':memory:' (no file, RAM only)", + }, +} as const satisfies Record; + +export const optionalArgs = Object.fromEntries( + Object.entries(requiredArgs).map(([key, { name, description }]) => [ + key, + { name: name.replace(/^<(.*)>$/, `[$1]`), description }, + ]), +) as { + [K in keyof typeof requiredArgs]: { + name: (typeof requiredArgs)[K]["name"] extends `<${infer TName}>` + ? `[${TName}]` + : (typeof requiredArgs)[K]["name"]; + description: (typeof requiredArgs)[K]["description"]; + }; +}; + +export const commands = { + sql: { + name: (prefix: string) => `${prefix}:sql`, + description: + "🛠️ Generate SQL for create tables, indexes and seed with data from JSON", + }, + setup: { + name: (prefix: string) => `${prefix}:setup`, + description: "🗄️ Setup tables, indexes and seed with data from JSON", + }, +} as const; diff --git a/src/commands/index.ts b/src/commands/index.ts index a53243a..297a812 100644 --- a/src/commands/index.ts +++ b/src/commands/index.ts @@ -1,5 +1,6 @@ +import * as Postgres from "./postgres"; import * as SQLite from "./sqlite"; export { cli } from "./cli"; -export { SQLite }; +export { Postgres, SQLite }; diff --git a/src/commands/postgres/index.ts b/src/commands/postgres/index.ts new file mode 100644 index 0000000..c18830e --- /dev/null +++ b/src/commands/postgres/index.ts @@ -0,0 +1,2 @@ +export { setup } from "./setup"; +export { sql } from "./sql"; diff --git a/src/commands/postgres/setup.ts b/src/commands/postgres/setup.ts new file mode 100644 index 0000000..08c6144 --- /dev/null +++ b/src/commands/postgres/setup.ts @@ -0,0 +1,140 @@ +import type { ClientConfig } from "pg"; + +import { setupTables } from "../../postgres"; +import { commands, optionalArgs, requiredArgs } from "../constants"; +import { prepare } from "../prepare"; +import { program } from "../program"; + +program + .command(commands.setup.name("postgres")) + .description(commands.setup.description) + .argument(requiredArgs.jsonPath.name, requiredArgs.jsonPath.description) + .argument(optionalArgs.dbPath.name, optionalArgs.dbPath.description) + .option("--user ", "default process.env.PGUSER || process.env.USER") + .option("--password ", "default process.env.PGPASSWORD") + .option("--host ", "default process.env.PGHOST") + .option("--port ", "default process.env.PGPORT") + .option("--database ", "default process.env.PGDATABASE || user") + .option( + "--connection-string ", + "e.g. postgres://user:password@host:5432/database", + ) + .option( + "--ssl ", + "passed directly to node.TLSSocket, supports all tls.connect options", + ) + .option( + "--statement-timeout ", + "number of milliseconds before a statement in query will time out, default is no timeout", + ) + .option( + "--query-timeout ", + "number of milliseconds before a query call will timeout, default is no timeout", + ) + .option( + "--lock-timeout ", + "number of milliseconds a query is allowed to be en lock state before it's cancelled due to lock timeout", + ) + .option( + "--application-name ", + "The name of the application that created this Client instance", + ) + .option( + "--connection-timeout-millis ", + "number of milliseconds to wait for connection, default is no timeout", + ) + .option( + "--keep-alive-initial-delay-millis ", + "set the initial delay before the first keepalive probe is sent on an idle socket", + ) + .option( + "--idle-in-transaction-session-timeout ", + "number of milliseconds before terminating any session with an open idle transaction, default is no timeout", + ) + .option( + "--client-encoding ", + "specifies the character set encoding that the database uses for sending data to the client", + ) + .option( + "--fallback-application-name ", + "provide an application name to use if application_name is not set", + ) + .option( + "--options ", + "command-line options to be sent to the server", + ) + .action(setup); + +function getNumberOption(value: string | undefined): number | undefined { + return value === undefined ? undefined : Number(value); +} + +function getSslOption(value: any): ClientConfig["ssl"] { + try { + return JSON.parse(value); + } catch { + return value == "true" ? true : undefined; + } +} + +export async function setup( + jsonPath: string, + dbPath: string | undefined, + options: { + user?: string; + password?: string; + host?: string; + port?: string; + database?: string; + connectionString?: string; + ssl?: string; + statementTimeout?: string; + queryTimeout?: string; + lockTimeout?: string; + applicationName?: string; + connectionTimeoutMillis?: string; + keepAliveInitialDelayMillis?: string; + idleInTransactionSessionTimeout?: string; + clientEncoding?: string; + fallbackApplicationName?: string; + options?: string; + }, +) { + const config: ClientConfig | undefined = + dbPath === undefined && Object.keys(options).length + ? { + user: options.user, + password: options.password, + host: options.host, + port: getNumberOption(options.port), + database: options.database, + connectionString: options.connectionString, + statement_timeout: getNumberOption(options.statementTimeout), + ssl: getSslOption(options.ssl), + query_timeout: getNumberOption(options.queryTimeout), + lock_timeout: getNumberOption(options.lockTimeout), + application_name: options.applicationName, + connectionTimeoutMillis: getNumberOption( + options.connectionTimeoutMillis, + ), + keepAliveInitialDelayMillis: getNumberOption( + options.keepAliveInitialDelayMillis, + ), + idle_in_transaction_session_timeout: getNumberOption( + options.idleInTransactionSessionTimeout, + ), + client_encoding: options.clientEncoding, + fallback_application_name: options.fallbackApplicationName, + options: options.options, + } + : undefined; + + const { data, prefix } = await prepare(jsonPath); + + await setupTables({ + config, + path: dbPath, + data, + prefix, + }); +} diff --git a/src/commands/postgres/sql.ts b/src/commands/postgres/sql.ts new file mode 100644 index 0000000..1844457 --- /dev/null +++ b/src/commands/postgres/sql.ts @@ -0,0 +1,25 @@ +import * as fs from "node:fs"; +import { EOL } from "node:os"; + +import { createMigrations } from "../../postgres"; +import { commands, requiredArgs } from "../constants"; +import { prepare } from "../prepare"; +import { program } from "../program"; + +program + .command(commands.sql.name("postgres")) + .description(commands.sql.description) + .argument(requiredArgs.jsonPath.name, requiredArgs.jsonPath.description) + .argument(requiredArgs.sqlPath.name, requiredArgs.sqlPath.description) + .action(sql); + +export async function sql(jsonPath: string, sqlPath: string) { + const { data, prefix } = await prepare(jsonPath); + + const { initialMigration, dataMigration } = createMigrations({ + prefix, + data, + }); + + fs.writeFileSync(sqlPath, `${initialMigration}${EOL}${EOL}${dataMigration}`); +} diff --git a/src/commands/sqlite/setup.ts b/src/commands/sqlite/setup.ts index ec5904a..f990be4 100644 --- a/src/commands/sqlite/setup.ts +++ b/src/commands/sqlite/setup.ts @@ -1,18 +1,13 @@ import { setupTables } from "../../sqlite"; +import { commands, optionalArgs, requiredArgs } from "../constants"; import { prepare } from "../prepare"; import { program } from "../program"; program - .command("sqlite:setup") - .description("🗄️ Setup tables, indexes and seed with data from JSON") - .argument( - "", - "Path to JSON file with any data (table structure will be inferred)", - ) - .argument( - "", - "Path to SQLite database file or ':memory:' (no file, RAM only)", - ) + .command(commands.setup.name("sqlite")) + .description(commands.setup.description) + .argument(requiredArgs.jsonPath.name, requiredArgs.jsonPath.description) + .argument(optionalArgs.dbPath.name, optionalArgs.dbPath.description) .action(setup); export async function setup(jsonPath: string, dbPath: string) { diff --git a/src/commands/sqlite/sql.ts b/src/commands/sqlite/sql.ts index b2913cd..3ce6bdf 100644 --- a/src/commands/sqlite/sql.ts +++ b/src/commands/sqlite/sql.ts @@ -2,19 +2,15 @@ import * as fs from "node:fs"; import { EOL } from "node:os"; import { createMigrations } from "../../sqlite"; +import { commands, requiredArgs } from "../constants"; import { prepare } from "../prepare"; import { program } from "../program"; program - .command("sqlite:sql") - .description( - "🛠️ Generate SQL for create tables, indexes and seed with data from JSON", - ) - .argument( - "", - "Path to JSON file with any data (table structure will be inferred)", - ) - .argument("", "Path to .sql file") + .command(commands.sql.name("sqlite")) + .description(commands.sql.description) + .argument(requiredArgs.jsonPath.name, requiredArgs.jsonPath.description) + .argument(requiredArgs.sqlPath.name, requiredArgs.sqlPath.description) .action(sql); export async function sql(jsonPath: string, sqlPath: string) { diff --git a/src/postgres/create-data-migration.ts b/src/postgres/create-data-migration.ts new file mode 100644 index 0000000..2f004d2 --- /dev/null +++ b/src/postgres/create-data-migration.ts @@ -0,0 +1,22 @@ +import { EOL } from "node:os"; + +import type { RelationalTable } from "../create-relational-structure"; +import { escapeValue } from "./escape-value"; + +export function createDataMigration(tables: RelationalTable[]) { + return tables + .filter((table) => table.data.length > 0) + .map((table) => { + const keys = table.fields.map(({ key }) => key); + return [ + `INSERT INTO ${table.name} (${keys.join(", ")}) VALUES`, + `${table.data + .map((row) => { + const values = keys.map((key) => row[key]); + return ` (${values.map((value) => escapeValue(value)).join(", ")})`; + }) + .join(`,${EOL}`)};`, + ].join(EOL); + }) + .join(`${EOL}${EOL}`); +} diff --git a/src/postgres/create-initial-migration.ts b/src/postgres/create-initial-migration.ts new file mode 100644 index 0000000..d062cf5 --- /dev/null +++ b/src/postgres/create-initial-migration.ts @@ -0,0 +1,34 @@ +import { EOL } from "node:os"; + +import type { RelationalTable } from "../create-relational-structure"; +import { getForeignKeys, getPrimaryKeys } from "../create-relational-structure"; +import { getFieldType } from "./get-field-type"; + +export function createInitialMigration(tables: RelationalTable[]): string { + return tables + .map((table) => { + const foreignKeys = getForeignKeys(table); + return [ + `CREATE TABLE ${table.name} (`, + [ + ...table.fields.map( + (field) => + ` ${field.key} ${getFieldType(field)}${field.isNullable ? "" : " NOT NULL"}`, + ), + ` PRIMARY KEY (${getPrimaryKeys(table).join(", ")})`, + ...(foreignKeys.length + ? foreignKeys.map( + ({ key, reference }) => + ` FOREIGN KEY (${key}) REFERENCES ${reference.table} (${reference.key})`, + ) + : []), + ].join(`,${EOL}`), + `);`, + ...foreignKeys.map( + ({ key }) => + `CREATE INDEX idx_${table.name}_${key} ON ${table.name} (${key});`, + ), + ].join(EOL); + }) + .join(`${EOL}${EOL}`); +} diff --git a/src/postgres/create-migrations.ts b/src/postgres/create-migrations.ts new file mode 100644 index 0000000..ea2ce48 --- /dev/null +++ b/src/postgres/create-migrations.ts @@ -0,0 +1,19 @@ +import { createRelationalStructure } from "../create-relational-structure"; +import { normalize } from "../normalize"; +import { createDataMigration } from "./create-data-migration"; +import { createInitialMigration } from "./create-initial-migration"; + +export function createMigrations({ + prefix, + data, +}: { + prefix: string; + data: unknown[]; +}) { + const tables = createRelationalStructure(prefix, normalize(data)); + + return { + initialMigration: createInitialMigration(tables), + dataMigration: createDataMigration(tables), + }; +} diff --git a/src/postgres/escape-value.ts b/src/postgres/escape-value.ts new file mode 100644 index 0000000..b9f6255 --- /dev/null +++ b/src/postgres/escape-value.ts @@ -0,0 +1,18 @@ +import type { NormalizedValue } from "../normalize"; + +export function escapeValue(value: NormalizedValue | undefined): string { + if (value === null || value === undefined) { + return "NULL"; + } + switch (typeof value) { + case "boolean": { + return value ? "TRUE" : "FALSE"; + } + case "string": { + return `'${String(value).replace(/(')/gi, "$1$1")}'`; + } + case "number": { + return `${value}`; + } + } +} diff --git a/src/postgres/get-field-type.ts b/src/postgres/get-field-type.ts new file mode 100644 index 0000000..82f9b54 --- /dev/null +++ b/src/postgres/get-field-type.ts @@ -0,0 +1,18 @@ +import type { Field } from "../create-relational-structure"; + +export function getFieldType(field: Field) { + switch (field.type) { + case "boolean": { + return "BOOLEAN"; + } + case "integer": { + return "INTEGER"; + } + case "real": { + return "REAL"; + } + case "text": { + return "TEXT"; + } + } +} diff --git a/src/postgres/index.ts b/src/postgres/index.ts new file mode 100644 index 0000000..9adf778 --- /dev/null +++ b/src/postgres/index.ts @@ -0,0 +1,5 @@ +export { snakeCase } from "../utils"; +export { createDataMigration } from "./create-data-migration"; +export { createInitialMigration } from "./create-initial-migration"; +export { createMigrations } from "./create-migrations"; +export { setupTables } from "./setup-tables"; diff --git a/src/postgres/setup-tables.ts b/src/postgres/setup-tables.ts new file mode 100644 index 0000000..ac9aba1 --- /dev/null +++ b/src/postgres/setup-tables.ts @@ -0,0 +1,39 @@ +import { PGlite } from "@electric-sql/pglite"; +import { Client, type ClientConfig } from "pg"; + +import { createMigrations } from "./create-migrations"; + +export async function setupTables({ + config, + path, + prefix, + data, +}: { + config?: ClientConfig; + path?: string; + prefix: string; + data: unknown[]; +}) { + const { initialMigration, dataMigration } = createMigrations({ + prefix, + data, + }); + + if (config) { + const db = new Client(config); + + await db.connect(); + + await db.query(initialMigration); + await db.query(dataMigration); + + await db.end(); + } else { + const db = new PGlite(path); + + await db.exec(initialMigration); + await db.exec(dataMigration); + + await db.close(); + } +}