diff --git a/app/__test__/App.spec.ts b/app/__test__/App.spec.ts index 395aec0..79fdc51 100644 --- a/app/__test__/App.spec.ts +++ b/app/__test__/App.spec.ts @@ -1,9 +1,9 @@ -import { afterAll, describe, expect, test } from "bun:test"; +import { afterAll, afterEach, describe, expect, test } from "bun:test"; import { App } from "../src"; import { getDummyConnection } from "./helper"; const { dummyConnection, afterAllCleanup } = getDummyConnection(); -afterAll(afterAllCleanup); +afterEach(afterAllCleanup); describe("App tests", async () => { test("boots and pongs", async () => { @@ -12,4 +12,16 @@ describe("App tests", async () => { //expect(await app.data?.em.ping()).toBeTrue(); }); + + /*test.only("what", async () => { + const app = new App(dummyConnection, { + auth: { + enabled: true, + }, + }); + await app.module.auth.build(); + await app.module.data.build(); + console.log(app.em.entities.map((e) => e.name)); + console.log(await app.em.schema().getDiff()); + });*/ }); diff --git a/app/__test__/data/relations.test.ts b/app/__test__/data/relations.test.ts index b4ff708..07ecd19 100644 --- a/app/__test__/data/relations.test.ts +++ b/app/__test__/data/relations.test.ts @@ -27,7 +27,7 @@ describe("Relations", async () => { const sql1 = schema .createTable("posts") - .addColumn(...r1.schema()!) + .addColumn(...em.connection.getFieldSchema(r1.schema())!) .compile().sql; expect(sql1).toBe( @@ -43,7 +43,7 @@ describe("Relations", async () => { const sql2 = schema .createTable("posts") - .addColumn(...r2.schema()!) + .addColumn(...em.connection.getFieldSchema(r2.schema())!) .compile().sql; expect(sql2).toBe( diff --git a/app/__test__/data/specs/SchemaManager.spec.ts b/app/__test__/data/specs/SchemaManager.spec.ts index 9e9fe90..7c2322b 100644 --- a/app/__test__/data/specs/SchemaManager.spec.ts +++ b/app/__test__/data/specs/SchemaManager.spec.ts @@ -15,7 +15,7 @@ describe("SchemaManager tests", async () => { const em = new EntityManager([entity], dummyConnection, [], [index]); const schema = new SchemaManager(em); - const introspection = schema.getIntrospectionFromEntity(em.entities[0]); + const introspection = schema.getIntrospectionFromEntity(em.entities[0]!); expect(introspection).toEqual({ name: "test", isView: false, @@ -109,7 +109,7 @@ describe("SchemaManager tests", async () => { await schema.sync({ force: true, drop: true }); const diffAfter = await schema.getDiff(); - console.log("diffAfter", diffAfter); + //console.log("diffAfter", diffAfter); expect(diffAfter.length).toBe(0); await kysely.schema.dropTable(table).execute(); diff --git a/app/__test__/data/specs/connection/SqliteIntrospector.spec.ts b/app/__test__/data/specs/connection/SqliteIntrospector.spec.ts new file mode 100644 index 0000000..ee46b7b --- /dev/null +++ b/app/__test__/data/specs/connection/SqliteIntrospector.spec.ts @@ -0,0 +1,107 @@ +import { describe, expect, test } from "bun:test"; +import { SqliteIntrospector } from "data/connection"; +import { getDummyDatabase } from "../../helper"; +import { Kysely, SqliteDialect } from "kysely"; + +function create() { + const database = getDummyDatabase().dummyDb; + return new Kysely({ + dialect: new SqliteDialect({ database }), + }); +} + +function createLibsql() { + const database = getDummyDatabase().dummyDb; + return new Kysely({ + dialect: new SqliteDialect({ database }), + }); +} + +describe("SqliteIntrospector", () => { + test("asdf", async () => { + const kysely = create(); + + await kysely.schema + .createTable("test") + .addColumn("id", "integer", (col) => col.primaryKey().autoIncrement().notNull()) + .addColumn("string", "text", (col) => col.notNull()) + .addColumn("number", "integer") + .execute(); + + await kysely.schema + .createIndex("idx_test_string") + .on("test") + .columns(["string"]) + .unique() + .execute(); + + await kysely.schema + .createTable("test2") + .addColumn("id", "integer", (col) => col.primaryKey().autoIncrement().notNull()) + .addColumn("number", "integer") + .execute(); + + await kysely.schema.createIndex("idx_test2_number").on("test2").columns(["number"]).execute(); + + const introspector = new SqliteIntrospector(kysely, {}); + + const result = await introspector.getTables(); + + //console.log(_jsonp(result)); + + expect(result).toEqual([ + { + name: "test", + isView: false, + columns: [ + { + name: "id", + dataType: "INTEGER", + isNullable: false, + isAutoIncrementing: true, + hasDefaultValue: false, + comment: undefined, + }, + { + name: "string", + dataType: "TEXT", + isNullable: false, + isAutoIncrementing: false, + hasDefaultValue: false, + comment: undefined, + }, + { + comment: undefined, + dataType: "INTEGER", + hasDefaultValue: false, + isAutoIncrementing: false, + isNullable: true, + name: "number", + }, + ], + }, + { + name: "test2", + isView: false, + columns: [ + { + name: "id", + dataType: "INTEGER", + isNullable: false, + isAutoIncrementing: true, + hasDefaultValue: false, + comment: undefined, + }, + { + name: "number", + dataType: "INTEGER", + isNullable: true, + isAutoIncrementing: false, + hasDefaultValue: false, + comment: undefined, + }, + ], + }, + ]); + }); +}); diff --git a/app/__test__/data/specs/fields/Field.spec.ts b/app/__test__/data/specs/fields/Field.spec.ts index 45e4351..82ba9de 100644 --- a/app/__test__/data/specs/fields/Field.spec.ts +++ b/app/__test__/data/specs/fields/Field.spec.ts @@ -1,23 +1,29 @@ import { describe, expect, test } from "bun:test"; -import { Default, parse, stripMark } from "../../../../src/core/utils"; -import { Field, type SchemaResponse, TextField, baseFieldConfigSchema } from "../../../../src/data"; -import { runBaseFieldTests, transformPersist } from "./inc"; +import { Default, stripMark } from "../../../../src/core/utils"; +import { baseFieldConfigSchema, Field } from "../../../../src/data/fields/Field"; +import { runBaseFieldTests } from "./inc"; describe("[data] Field", async () => { class FieldSpec extends Field { - schema(): SchemaResponse { - return this.useSchemaHelper("text"); - } getSchema() { return baseFieldConfigSchema; } } + test("fieldSpec", () => { + expect(new FieldSpec("test").schema()).toEqual({ + name: "test", + type: "text", + nullable: true, // always true + dflt: undefined, // never using default value + }); + }); + runBaseFieldTests(FieldSpec, { defaultValue: "test", schemaType: "text" }); test("default config", async () => { const config = Default(baseFieldConfigSchema, {}); - expect(stripMark(new FieldSpec("test").config)).toEqual(config); + expect(stripMark(new FieldSpec("test").config)).toEqual(config as any); }); test("transformPersist (specific)", async () => { diff --git a/app/__test__/data/specs/fields/PrimaryField.spec.ts b/app/__test__/data/specs/fields/PrimaryField.spec.ts index 5d6dd54..6be0166 100644 --- a/app/__test__/data/specs/fields/PrimaryField.spec.ts +++ b/app/__test__/data/specs/fields/PrimaryField.spec.ts @@ -10,7 +10,12 @@ describe("[data] PrimaryField", async () => { test("schema", () => { expect(field.name).toBe("primary"); - expect(field.schema()).toEqual(["primary", "integer", expect.any(Function)]); + expect(field.schema()).toEqual({ + name: "primary", + type: "integer" as const, + nullable: false, + primary: true, + }); }); test("hasDefault", async () => { diff --git a/app/__test__/data/specs/fields/inc.ts b/app/__test__/data/specs/fields/inc.ts index 1754c20..ff2d00e 100644 --- a/app/__test__/data/specs/fields/inc.ts +++ b/app/__test__/data/specs/fields/inc.ts @@ -34,11 +34,14 @@ export function runBaseFieldTests( test("schema", () => { expect(noConfigField.name).toBe("no_config"); - expect(noConfigField.schema(null as any)).toEqual([ - "no_config", - config.schemaType, - expect.any(Function), - ]); + + const { type, name, nullable, dflt } = noConfigField.schema()!; + expect({ type, name, nullable, dflt }).toEqual({ + type: config.schemaType as any, + name: "no_config", + nullable: true, // always true + dflt: undefined, // never using default value + }); }); test("hasDefault", async () => { diff --git a/app/__test__/integration/auth.integration.test.ts b/app/__test__/integration/auth.integration.test.ts index 9b2bb51..298ad31 100644 --- a/app/__test__/integration/auth.integration.test.ts +++ b/app/__test__/integration/auth.integration.test.ts @@ -1,9 +1,12 @@ -import { afterAll, beforeAll, describe, expect, it } from "bun:test"; +import { afterAll, afterEach, beforeAll, describe, expect, it } from "bun:test"; import { App, createApp } from "../../src"; import type { AuthResponse } from "../../src/auth"; import { auth } from "../../src/auth/middlewares"; import { randomString, secureRandomString, withDisabledConsole } from "../../src/core/utils"; -import { disableConsoleLog, enableConsoleLog } from "../helper"; +import { disableConsoleLog, enableConsoleLog, getDummyConnection } from "../helper"; + +const { dummyConnection, afterAllCleanup } = getDummyConnection(); +afterEach(afterAllCleanup); beforeAll(disableConsoleLog); afterAll(enableConsoleLog); @@ -64,6 +67,7 @@ const configs = { function createAuthApp() { const app = createApp({ + connection: dummyConnection, initialConfig: { auth: configs.auth, }, diff --git a/app/build.ts b/app/build.ts index 0022a80..836a816 100644 --- a/app/build.ts +++ b/app/build.ts @@ -53,6 +53,9 @@ function banner(title: string) { console.log("-".repeat(40)); } +// collection of always-external packages +const external = ["bun:test", "@libsql/client"] as const; + /** * Building backend and general API */ @@ -64,7 +67,7 @@ async function buildApi() { watch, entry: ["src/index.ts", "src/data/index.ts", "src/core/index.ts", "src/core/utils/index.ts"], outDir: "dist", - external: ["bun:test", "@libsql/client"], + external: [...external], metafile: true, platform: "browser", format: ["esm"], @@ -93,7 +96,7 @@ async function buildUi() { sourcemap, watch, external: [ - "bun:test", + ...external, "react", "react-dom", "react/jsx-runtime", diff --git a/app/src/adapter/cloudflare/D1Connection.ts b/app/src/adapter/cloudflare/D1Connection.ts index 768ca44..5b4b059 100644 --- a/app/src/adapter/cloudflare/D1Connection.ts +++ b/app/src/adapter/cloudflare/D1Connection.ts @@ -18,6 +18,10 @@ class CustomD1Dialect extends D1Dialect { } export class D1Connection extends SqliteConnection { + protected override readonly supported = { + batching: true, + }; + constructor(private config: D1ConnectionConfig) { const plugins = [new ParseJSONResultsPlugin()]; @@ -28,14 +32,6 @@ export class D1Connection extends SqliteConnection { super(kysely, {}, plugins); } - override supportsBatching(): boolean { - return true; - } - - override supportsIndices(): boolean { - return true; - } - protected override async batch( queries: [...Queries], ): Promise<{ diff --git a/app/src/core/utils/numbers.ts b/app/src/core/utils/numbers.ts index 1435f68..33394f6 100644 --- a/app/src/core/utils/numbers.ts +++ b/app/src/core/utils/numbers.ts @@ -3,3 +3,11 @@ export function clampNumber(value: number, min: number, max: number): number { const upper = Math.max(min, max); return Math.max(lower, Math.min(value, upper)); } + +export function ensureInt(value?: string | number | null | undefined): number { + if (value === undefined || value === null) { + return 0; + } + + return typeof value === "number" ? value : Number.parseInt(value, 10); +} diff --git a/app/src/data/connection/BaseIntrospector.ts b/app/src/data/connection/BaseIntrospector.ts new file mode 100644 index 0000000..e96a44d --- /dev/null +++ b/app/src/data/connection/BaseIntrospector.ts @@ -0,0 +1,75 @@ +import { + type DatabaseMetadata, + type DatabaseMetadataOptions, + type Kysely, + type KyselyPlugin, + type RawBuilder, + type TableMetadata, + type DatabaseIntrospector, + type SchemaMetadata, + ParseJSONResultsPlugin, + DEFAULT_MIGRATION_TABLE, + DEFAULT_MIGRATION_LOCK_TABLE, +} from "kysely"; +import { KyselyPluginRunner } from "data/plugins/KyselyPluginRunner"; +import type { IndexMetadata } from "data/connection/Connection"; + +export type TableSpec = TableMetadata & { + indices: IndexMetadata[]; +}; +export type SchemaSpec = TableSpec[]; + +export type BaseIntrospectorConfig = { + excludeTables?: string[]; + plugins?: KyselyPlugin[]; +}; + +export abstract class BaseIntrospector implements DatabaseIntrospector { + readonly _excludeTables: string[] = []; + readonly _plugins: KyselyPlugin[]; + + constructor( + protected readonly db: Kysely, + config: BaseIntrospectorConfig = {}, + ) { + this._excludeTables = config.excludeTables ?? []; + this._plugins = config.plugins ?? [new ParseJSONResultsPlugin()]; + } + + abstract getSchemaSpec(): Promise; + abstract getSchemas(): Promise; + + protected getExcludedTableNames(): string[] { + return [...this._excludeTables, DEFAULT_MIGRATION_TABLE, DEFAULT_MIGRATION_LOCK_TABLE]; + } + + protected async executeWithPlugins(query: RawBuilder): Promise { + const result = await query.execute(this.db); + const runner = new KyselyPluginRunner(this._plugins ?? []); + return (await runner.transformResultRows(result.rows)) as unknown as T; + } + + async getMetadata(options?: DatabaseMetadataOptions): Promise { + return { + tables: await this.getTables(options), + }; + } + + async getIndices(tbl_name?: string): Promise { + const schema = await this.getSchemaSpec(); + return schema + .flatMap((table) => table.indices) + .filter((index) => !tbl_name || index.table === tbl_name); + } + + async getTables( + options: DatabaseMetadataOptions = { withInternalKyselyTables: false }, + ): Promise { + const schema = await this.getSchemaSpec(); + return schema.map((table) => ({ + name: table.name, + isView: table.isView, + columns: table.columns, + })); + } +} diff --git a/app/src/data/connection/Connection.ts b/app/src/data/connection/Connection.ts index 2a3933b..7eae588 100644 --- a/app/src/data/connection/Connection.ts +++ b/app/src/data/connection/Connection.ts @@ -1,15 +1,18 @@ import { type AliasableExpression, - type DatabaseIntrospector, + type ColumnBuilderCallback, + type ColumnDataType, type Expression, type Kysely, type KyselyPlugin, + type OnModifyForeignAction, type RawBuilder, type SelectQueryBuilder, type SelectQueryNode, type Simplify, sql, } from "kysely"; +import type { BaseIntrospector } from "./BaseIntrospector"; export type QB = SelectQueryBuilder; @@ -20,15 +23,43 @@ export type IndexMetadata = { columns: { name: string; order: number }[]; }; -export interface ConnectionIntrospector extends DatabaseIntrospector { - getIndices(tbl_name?: string): Promise; -} - export interface SelectQueryBuilderExpression extends AliasableExpression { get isSelectQueryBuilder(): true; toOperationNode(): SelectQueryNode; } +export type SchemaResponse = [string, ColumnDataType, ColumnBuilderCallback] | undefined; + +const FieldSpecTypes = [ + "text", + "integer", + "real", + "blob", + "date", + "datetime", + "timestamp", + "boolean", + "json", +] as const; + +export type FieldSpec = { + type: (typeof FieldSpecTypes)[number]; + name: string; + nullable?: boolean; + dflt?: any; + unique?: boolean; + primary?: boolean; + references?: string; + onDelete?: OnModifyForeignAction; + onUpdate?: OnModifyForeignAction; +}; + +export type IndexSpec = { + name: string; + columns: string[]; + unique?: boolean; +}; + export type DbFunctions = { jsonObjectFrom(expr: SelectQueryBuilderExpression): RawBuilder | null>; jsonArrayFrom(expr: SelectQueryBuilderExpression): RawBuilder[]>; @@ -45,6 +76,9 @@ const CONN_SYMBOL = Symbol.for("bknd:connection"); export abstract class Connection { kysely: Kysely; + protected readonly supported = { + batching: false, + }; constructor( kysely: Kysely, @@ -65,17 +99,12 @@ export abstract class Connection { return conn[CONN_SYMBOL] === true; } - getIntrospector(): ConnectionIntrospector { - return this.kysely.introspection as ConnectionIntrospector; + getIntrospector(): BaseIntrospector { + return this.kysely.introspection as any; } - supportsBatching(): boolean { - return false; - } - - // @todo: add if only first field is used in index - supportsIndices(): boolean { - return false; + supports(feature: keyof typeof this.supported): boolean { + return this.supported[feature] ?? false; } async ping(): Promise { @@ -97,7 +126,7 @@ export abstract class Connection { [K in keyof Queries]: Awaited>; }> { // bypass if no client support - if (!this.supportsBatching()) { + if (!this.supports("batching")) { const data: any = []; for (const q of queries) { const result = await q.execute(); @@ -108,4 +137,19 @@ export abstract class Connection { return await this.batch(queries); } + + protected validateFieldSpecType(type: string): type is FieldSpec["type"] { + if (!FieldSpecTypes.includes(type as any)) { + throw new Error( + `Invalid field type "${type}". Allowed types are: ${FieldSpecTypes.join(", ")}`, + ); + } + return true; + } + + abstract getFieldSchema(spec: FieldSpec, strict?: boolean): SchemaResponse; + + async close(): Promise { + // no-op by default + } } diff --git a/app/src/data/connection/DummyConnection.ts b/app/src/data/connection/DummyConnection.ts index 451575d..d04d0af 100644 --- a/app/src/data/connection/DummyConnection.ts +++ b/app/src/data/connection/DummyConnection.ts @@ -1,7 +1,15 @@ -import { Connection } from "./Connection"; +import { Connection, type FieldSpec, type SchemaResponse } from "./Connection"; export class DummyConnection extends Connection { + protected override readonly supported = { + batching: true, + }; + constructor() { super(undefined as any); } + + override getFieldSchema(spec: FieldSpec, strict?: boolean): SchemaResponse { + throw new Error("Method not implemented."); + } } diff --git a/app/src/data/connection/SqliteConnection.ts b/app/src/data/connection/SqliteConnection.ts deleted file mode 100644 index 2572667..0000000 --- a/app/src/data/connection/SqliteConnection.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { Kysely, KyselyPlugin } from "kysely"; -import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/sqlite"; -import { Connection, type DbFunctions } from "./Connection"; - -export class SqliteConnection extends Connection { - constructor(kysely: Kysely, fn: Partial = {}, plugins: KyselyPlugin[] = []) { - super( - kysely, - { - ...fn, - jsonArrayFrom, - jsonObjectFrom, - jsonBuildObject, - }, - plugins, - ); - } - - override supportsIndices(): boolean { - return true; - } -} diff --git a/app/src/data/connection/SqliteIntrospector.ts b/app/src/data/connection/SqliteIntrospector.ts deleted file mode 100644 index cf68816..0000000 --- a/app/src/data/connection/SqliteIntrospector.ts +++ /dev/null @@ -1,164 +0,0 @@ -import type { - DatabaseIntrospector, - DatabaseMetadata, - DatabaseMetadataOptions, - ExpressionBuilder, - Kysely, - SchemaMetadata, - TableMetadata, -} from "kysely"; -import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, sql } from "kysely"; -import type { ConnectionIntrospector, IndexMetadata } from "./Connection"; - -export type SqliteIntrospectorConfig = { - excludeTables?: string[]; -}; - -export class SqliteIntrospector implements DatabaseIntrospector, ConnectionIntrospector { - readonly #db: Kysely; - readonly _excludeTables: string[] = []; - - constructor(db: Kysely, config: SqliteIntrospectorConfig = {}) { - this.#db = db; - this._excludeTables = config.excludeTables ?? []; - } - - async getSchemas(): Promise { - // Sqlite doesn't support schemas. - return []; - } - - async getIndices(tbl_name?: string): Promise { - const indices = await this.#db - .selectFrom("sqlite_master") - .where("type", "=", "index") - .$if(!!tbl_name, (eb) => eb.where("tbl_name", "=", tbl_name)) - .select("name") - .$castTo<{ name: string }>() - .execute(); - - return Promise.all(indices.map(({ name }) => this.#getIndexMetadata(name))); - } - - async #getIndexMetadata(index: string): Promise { - const db = this.#db; - - // Get the SQL that was used to create the index. - const indexDefinition = await db - .selectFrom("sqlite_master") - .where("name", "=", index) - .select(["sql", "tbl_name", "type"]) - .$castTo<{ sql: string | undefined; tbl_name: string; type: string }>() - .executeTakeFirstOrThrow(); - - //console.log("--indexDefinition--", indexDefinition, index); - - // check unique by looking for the word "unique" in the sql - const isUnique = indexDefinition.sql?.match(/unique/i) != null; - - const columns = await db - .selectFrom( - sql<{ - seqno: number; - cid: number; - name: string; - }>`pragma_index_info(${index})`.as("index_info"), - ) - .select(["seqno", "cid", "name"]) - .orderBy("cid") - .execute(); - - return { - name: index, - table: indexDefinition.tbl_name, - isUnique: isUnique, - columns: columns.map((col) => ({ - name: col.name, - order: col.seqno, - })), - }; - } - - private excludeTables(tables: string[] = []) { - return (eb: ExpressionBuilder) => { - const and = tables.map((t) => eb("name", "!=", t)); - return eb.and(and); - }; - } - - async getTables( - options: DatabaseMetadataOptions = { withInternalKyselyTables: false }, - ): Promise { - let query = this.#db - .selectFrom("sqlite_master") - .where("type", "in", ["table", "view"]) - .where("name", "not like", "sqlite_%") - .select("name") - .orderBy("name") - .$castTo<{ name: string }>(); - - if (!options.withInternalKyselyTables) { - query = query.where( - this.excludeTables([DEFAULT_MIGRATION_TABLE, DEFAULT_MIGRATION_LOCK_TABLE]), - ); - } - if (this._excludeTables.length > 0) { - query = query.where(this.excludeTables(this._excludeTables)); - } - - const tables = await query.execute(); - return Promise.all(tables.map(({ name }) => this.#getTableMetadata(name))); - } - - async getMetadata(options?: DatabaseMetadataOptions): Promise { - return { - tables: await this.getTables(options), - }; - } - - async #getTableMetadata(table: string): Promise { - const db = this.#db; - - // Get the SQL that was used to create the table. - const tableDefinition = await db - .selectFrom("sqlite_master") - .where("name", "=", table) - .select(["sql", "type"]) - .$castTo<{ sql: string | undefined; type: string }>() - .executeTakeFirstOrThrow(); - - // Try to find the name of the column that has `autoincrement` 🤦 - const autoIncrementCol = tableDefinition.sql - ?.split(/[\(\),]/) - ?.find((it) => it.toLowerCase().includes("autoincrement")) - ?.trimStart() - ?.split(/\s+/)?.[0] - ?.replace(/["`]/g, ""); - - const columns = await db - .selectFrom( - sql<{ - name: string; - type: string; - notnull: 0 | 1; - dflt_value: any; - }>`pragma_table_info(${table})`.as("table_info"), - ) - .select(["name", "type", "notnull", "dflt_value"]) - .orderBy("cid") - .execute(); - - return { - name: table, - isView: tableDefinition.type === "view", - columns: columns.map((col) => ({ - name: col.name, - dataType: col.type, - isNullable: !col.notnull, - isAutoIncrementing: col.name === autoIncrementCol, - hasDefaultValue: col.dflt_value != null, - comment: undefined, - })), - }; - } -} diff --git a/app/src/data/connection/index.ts b/app/src/data/connection/index.ts new file mode 100644 index 0000000..2e745e0 --- /dev/null +++ b/app/src/data/connection/index.ts @@ -0,0 +1,14 @@ +export { BaseIntrospector } from "./BaseIntrospector"; +export { + Connection, + type FieldSpec, + type IndexSpec, + type DbFunctions, + type SchemaResponse, +} from "./Connection"; + +// sqlite +export { LibsqlConnection, type LibSqlCredentials } from "./sqlite/LibsqlConnection"; +export { SqliteConnection } from "./sqlite/SqliteConnection"; +export { SqliteIntrospector } from "./sqlite/SqliteIntrospector"; +export { SqliteLocalConnection } from "./sqlite/SqliteLocalConnection"; diff --git a/app/src/data/connection/LibsqlConnection.ts b/app/src/data/connection/sqlite/LibsqlConnection.ts similarity index 86% rename from app/src/data/connection/LibsqlConnection.ts rename to app/src/data/connection/sqlite/LibsqlConnection.ts index d341adc..895b6b0 100644 --- a/app/src/data/connection/LibsqlConnection.ts +++ b/app/src/data/connection/sqlite/LibsqlConnection.ts @@ -1,9 +1,9 @@ import { type Client, type Config, type InStatement, createClient } from "@libsql/client"; import { LibsqlDialect } from "@libsql/kysely-libsql"; +import { FilterNumericKeysPlugin } from "data/plugins/FilterNumericKeysPlugin"; +import { KyselyPluginRunner } from "data/plugins/KyselyPluginRunner"; import { type DatabaseIntrospector, Kysely, ParseJSONResultsPlugin } from "kysely"; -import { FilterNumericKeysPlugin } from "../plugins/FilterNumericKeysPlugin"; -import { KyselyPluginRunner } from "../plugins/KyselyPluginRunner"; -import type { QB } from "./Connection"; +import type { QB } from "../Connection"; import { SqliteConnection } from "./SqliteConnection"; import { SqliteIntrospector } from "./SqliteIntrospector"; @@ -12,21 +12,26 @@ export type LibSqlCredentials = Config & { protocol?: (typeof LIBSQL_PROTOCOLS)[number]; }; +const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()]; + class CustomLibsqlDialect extends LibsqlDialect { override createIntrospector(db: Kysely): DatabaseIntrospector { return new SqliteIntrospector(db, { excludeTables: ["libsql_wasm_func_table"], + plugins, }); } } export class LibsqlConnection extends SqliteConnection { private client: Client; + protected override readonly supported = { + batching: true, + }; constructor(client: Client); constructor(credentials: LibSqlCredentials); constructor(clientOrCredentials: Client | LibSqlCredentials) { - const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()]; let client: Client; if (clientOrCredentials && "url" in clientOrCredentials) { let { url, authToken, protocol } = clientOrCredentials; @@ -51,14 +56,6 @@ export class LibsqlConnection extends SqliteConnection { this.client = client; } - override supportsBatching(): boolean { - return true; - } - - override supportsIndices(): boolean { - return true; - } - getClient(): Client { return this.client; } diff --git a/app/src/data/connection/sqlite/SqliteConnection.ts b/app/src/data/connection/sqlite/SqliteConnection.ts new file mode 100644 index 0000000..a63d49b --- /dev/null +++ b/app/src/data/connection/sqlite/SqliteConnection.ts @@ -0,0 +1,46 @@ +import type { ColumnDataType, ColumnDefinitionBuilder, Kysely, KyselyPlugin } from "kysely"; +import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/sqlite"; +import { Connection, type DbFunctions, type FieldSpec, type SchemaResponse } from "../Connection"; + +export class SqliteConnection extends Connection { + constructor(kysely: Kysely, fn: Partial = {}, plugins: KyselyPlugin[] = []) { + super( + kysely, + { + ...fn, + jsonArrayFrom, + jsonObjectFrom, + jsonBuildObject, + }, + plugins, + ); + } + + override getFieldSchema(spec: FieldSpec): SchemaResponse { + this.validateFieldSpecType(spec.type); + let type: ColumnDataType = spec.type; + + switch (spec.type) { + case "json": + type = "text"; + break; + } + + return [ + spec.name, + type, + (col: ColumnDefinitionBuilder) => { + if (spec.primary) { + return col.primaryKey().notNull().autoIncrement(); + } + if (spec.references) { + let relCol = col.references(spec.references); + if (spec.onDelete) relCol = relCol.onDelete(spec.onDelete); + if (spec.onUpdate) relCol = relCol.onUpdate(spec.onUpdate); + return relCol; + } + return spec.nullable ? col : col.notNull(); + }, + ] as const; + } +} diff --git a/app/src/data/connection/sqlite/SqliteIntrospector.ts b/app/src/data/connection/sqlite/SqliteIntrospector.ts new file mode 100644 index 0000000..f584050 --- /dev/null +++ b/app/src/data/connection/sqlite/SqliteIntrospector.ts @@ -0,0 +1,95 @@ +import { type SchemaMetadata, sql } from "kysely"; +import { BaseIntrospector } from "../BaseIntrospector"; + +export type SqliteSchemaSpec = { + name: string; + type: "table" | "view"; + sql: string; + columns: { + name: string; + type: string; + notnull: number; + dflt_value: any; + pk: number; + }[]; + indices: { + name: string; + origin: string; + partial: number; + sql: string; + columns: { name: string; seqno: number }[]; + }[]; +}; + +export class SqliteIntrospector extends BaseIntrospector { + async getSchemas(): Promise { + // Sqlite doesn't support schemas. + return []; + } + + async getSchemaSpec() { + const query = sql` + SELECT m.name, m.type, m.sql, + (SELECT json_group_array( + json_object( + 'name', p.name, + 'type', p.type, + 'notnull', p."notnull", + 'default', p.dflt_value, + 'primary_key', p.pk + )) FROM pragma_table_info(m.name) p) AS columns, + (SELECT json_group_array( + json_object( + 'name', i.name, + 'origin', i.origin, + 'partial', i.partial, + 'sql', im.sql, + 'columns', (SELECT json_group_array( + json_object( + 'name', ii.name, + 'seqno', ii.seqno + )) FROM pragma_index_info(i.name) ii) + )) FROM pragma_index_list(m.name) i + LEFT JOIN sqlite_master im ON im.name = i.name + AND im.type = 'index' + ) AS indices + FROM sqlite_master m + WHERE m.type IN ('table', 'view') + and m.name not like 'sqlite_%' + and m.name not in (${this.getExcludedTableNames().join(", ")}) + `; + + const tables = await this.executeWithPlugins(query); + + return tables.map((table) => ({ + name: table.name, + isView: table.type === "view", + columns: table.columns.map((col) => { + const autoIncrementCol = table.sql + ?.split(/[\(\),]/) + ?.find((it) => it.toLowerCase().includes("autoincrement")) + ?.trimStart() + ?.split(/\s+/)?.[0] + ?.replace(/["`]/g, ""); + + return { + name: col.name, + dataType: col.type, + isNullable: !col.notnull, + isAutoIncrementing: col.name === autoIncrementCol, + hasDefaultValue: col.dflt_value != null, + comment: undefined, + }; + }), + indices: table.indices.map((index) => ({ + name: index.name, + table: table.name, + isUnique: index.sql?.match(/unique/i) != null, + columns: index.columns.map((col) => ({ + name: col.name, + order: col.seqno, + })), + })), + })); + } +} diff --git a/app/src/data/connection/SqliteLocalConnection.ts b/app/src/data/connection/sqlite/SqliteLocalConnection.ts similarity index 62% rename from app/src/data/connection/SqliteLocalConnection.ts rename to app/src/data/connection/sqlite/SqliteLocalConnection.ts index 7c26428..a92577b 100644 --- a/app/src/data/connection/SqliteLocalConnection.ts +++ b/app/src/data/connection/sqlite/SqliteLocalConnection.ts @@ -1,30 +1,31 @@ -import { type DatabaseIntrospector, ParseJSONResultsPlugin, type SqliteDatabase } from "kysely"; -import { Kysely, SqliteDialect } from "kysely"; +import { + type DatabaseIntrospector, + Kysely, + ParseJSONResultsPlugin, + type SqliteDatabase, + SqliteDialect, +} from "kysely"; import { SqliteConnection } from "./SqliteConnection"; import { SqliteIntrospector } from "./SqliteIntrospector"; +const plugins = [new ParseJSONResultsPlugin()]; + class CustomSqliteDialect extends SqliteDialect { override createIntrospector(db: Kysely): DatabaseIntrospector { return new SqliteIntrospector(db, { excludeTables: ["test_table"], + plugins, }); } } export class SqliteLocalConnection extends SqliteConnection { constructor(private database: SqliteDatabase) { - const plugins = [new ParseJSONResultsPlugin()]; const kysely = new Kysely({ dialect: new CustomSqliteDialect({ database }), plugins, - //log: ["query"], }); - super(kysely); - this.plugins = plugins; - } - - override supportsIndices(): boolean { - return true; + super(kysely, {}, plugins); } } diff --git a/app/src/data/entities/query/Repository.ts b/app/src/data/entities/query/Repository.ts index 4734ddf..47657f8 100644 --- a/app/src/data/entities/query/Repository.ts +++ b/app/src/data/entities/query/Repository.ts @@ -14,6 +14,7 @@ import { WithBuilder, } from "../index"; import { JoinBuilder } from "./JoinBuilder"; +import { ensureInt } from "core/utils"; export type RepositoryQB = SelectQueryBuilder; @@ -225,8 +226,9 @@ export class Repository extends Field< } } - schema() { - // @todo: potentially use "integer" instead - return this.useSchemaHelper("boolean"); + override schema() { + return Object.freeze({ + ...super.schema()!, + type: "boolean", + }); } override getHtmlConfig() { diff --git a/app/src/data/fields/DateField.ts b/app/src/data/fields/DateField.ts index 5020376..c7ba901 100644 --- a/app/src/data/fields/DateField.ts +++ b/app/src/data/fields/DateField.ts @@ -32,8 +32,10 @@ export class DateField extends Field< } override schema() { - const type = this.config.type === "datetime" ? "datetime" : "date"; - return this.useSchemaHelper(type); + return Object.freeze({ + ...super.schema()!, + type: this.config.type === "datetime" ? "datetime" : "date", + }); } override getHtmlConfig() { diff --git a/app/src/data/fields/EnumField.ts b/app/src/data/fields/EnumField.ts index 79eee93..e8e8772 100644 --- a/app/src/data/fields/EnumField.ts +++ b/app/src/data/fields/EnumField.ts @@ -66,10 +66,6 @@ export class EnumField; -export type SchemaResponse = [string, ColumnDataType, ColumnBuilderCallback] | undefined; - export abstract class Field< Config extends BaseFieldConfig = BaseFieldConfig, Type = any, @@ -106,25 +104,18 @@ export abstract class Field< protected abstract getSchema(): TSchema; - protected useSchemaHelper( - type: ColumnDataType, - builder?: (col: ColumnDefinitionBuilder) => ColumnDefinitionBuilder, - ): SchemaResponse { - return [ - this.name, - type, - (col: ColumnDefinitionBuilder) => { - if (builder) return builder(col); - return col; - }, - ]; - } - /** * Used in SchemaManager.ts * @param em */ - abstract schema(em: EntityManager): SchemaResponse; + schema(): FieldSpec | undefined { + return Object.freeze({ + name: this.name, + type: "text", + nullable: true, + dflt: this.getDefault(), + }); + } hasDefault() { return this.config.default_value !== undefined; diff --git a/app/src/data/fields/JsonField.ts b/app/src/data/fields/JsonField.ts index 62a7677..b25df60 100644 --- a/app/src/data/fields/JsonField.ts +++ b/app/src/data/fields/JsonField.ts @@ -18,10 +18,6 @@ export class JsonField extends Field< }; } - schema() { - return this.useSchemaHelper("integer"); + override schema() { + return Object.freeze({ + ...super.schema()!, + type: "integer", + }); } override getValue(value: any, context?: TRenderContext): any { diff --git a/app/src/data/fields/PrimaryField.ts b/app/src/data/fields/PrimaryField.ts index 6245944..dd3463f 100644 --- a/app/src/data/fields/PrimaryField.ts +++ b/app/src/data/fields/PrimaryField.ts @@ -30,9 +30,12 @@ export class PrimaryField extends Field< return baseFieldConfigSchema; } - schema() { - return this.useSchemaHelper("integer", (col) => { - return col.primaryKey().notNull().autoIncrement(); + override schema() { + return Object.freeze({ + type: "integer", + name: this.name, + primary: true, + nullable: false, }); } diff --git a/app/src/data/fields/TextField.ts b/app/src/data/fields/TextField.ts index 7ebcea5..8c318ec 100644 --- a/app/src/data/fields/TextField.ts +++ b/app/src/data/fields/TextField.ts @@ -47,10 +47,6 @@ export class TextField extends Field< return textFieldConfigSchema; } - override schema() { - return this.useSchemaHelper("text"); - } - override getHtmlConfig() { if (this.config.html_config) { return this.config.html_config as any; diff --git a/app/src/data/fields/VirtualField.ts b/app/src/data/fields/VirtualField.ts index d02869b..c03db19 100644 --- a/app/src/data/fields/VirtualField.ts +++ b/app/src/data/fields/VirtualField.ts @@ -17,7 +17,7 @@ export class VirtualField extends Field { return virtualFieldConfigSchema; } - schema() { + override schema() { return undefined; } diff --git a/app/src/data/index.ts b/app/src/data/index.ts index 165436f..eb3e893 100644 --- a/app/src/data/index.ts +++ b/app/src/data/index.ts @@ -5,6 +5,7 @@ export * from "./entities"; export * from "./relations"; export * from "./schema/SchemaManager"; export * from "./prototype"; +export * from "./connection"; export { type RepoQuery, @@ -14,11 +15,6 @@ export { whereSchema, } from "./server/data-query-impl"; -export { Connection } from "./connection/Connection"; -export { LibsqlConnection, type LibSqlCredentials } from "./connection/LibsqlConnection"; -export { SqliteConnection } from "./connection/SqliteConnection"; -export { SqliteLocalConnection } from "./connection/SqliteLocalConnection"; -export { SqliteIntrospector } from "./connection/SqliteIntrospector"; export { KyselyPluginRunner } from "./plugins/KyselyPluginRunner"; export { constructEntity, constructRelation } from "./schema/constructor"; diff --git a/app/src/data/relations/RelationField.ts b/app/src/data/relations/RelationField.ts index b38cc1c..6868797 100644 --- a/app/src/data/relations/RelationField.ts +++ b/app/src/data/relations/RelationField.ts @@ -1,6 +1,6 @@ import { type Static, StringEnum, Type } from "core/utils"; import type { EntityManager } from "../entities"; -import { Field, type SchemaResponse, baseFieldConfigSchema } from "../fields"; +import { Field, baseFieldConfigSchema } from "../fields"; import type { EntityRelation } from "./EntityRelation"; import type { EntityRelationAnchor } from "./EntityRelationAnchor"; @@ -72,14 +72,12 @@ export class RelationField extends Field { return this.config.target_field!; } - override schema(): SchemaResponse { - return this.useSchemaHelper("integer", (col) => { - //col.references('person.id').onDelete('cascade').notNull() - // @todo: implement cascading? - - return col - .references(`${this.config.target}.${this.config.target_field}`) - .onDelete(this.config.on_delete ?? "set null"); + override schema() { + return Object.freeze({ + ...super.schema()!, + type: "integer", + references: `${this.config.target}.${this.config.target_field}`, + onDelete: this.config.on_delete ?? "set null", }); } diff --git a/app/src/data/schema/SchemaManager.ts b/app/src/data/schema/SchemaManager.ts index 58a3127..7ad1ba1 100644 --- a/app/src/data/schema/SchemaManager.ts +++ b/app/src/data/schema/SchemaManager.ts @@ -1,7 +1,7 @@ -import type { AlterTableColumnAlteringBuilder, CompiledQuery, TableMetadata } from "kysely"; -import type { IndexMetadata } from "../connection/Connection"; +import type { CompiledQuery, TableMetadata } from "kysely"; +import type { IndexMetadata, SchemaResponse } from "../connection/Connection"; import type { Entity, EntityManager } from "../entities"; -import { PrimaryField, type SchemaResponse } from "../fields"; +import { PrimaryField } from "../fields"; type IntrospectedTable = TableMetadata & { indices: IndexMetadata[]; @@ -49,10 +49,6 @@ export class SchemaManager { constructor(private readonly em: EntityManager) {} private getIntrospector() { - if (!this.em.connection.supportsIndices()) { - throw new Error("Indices are not supported by the current connection"); - } - return this.em.connection.getIntrospector(); } @@ -239,10 +235,9 @@ export class SchemaManager { for (const column of columns) { const field = this.em.entity(table).getField(column)!; - const fieldSchema = field.schema(this.em); - if (Array.isArray(fieldSchema) && fieldSchema.length === 3) { - schemas.push(fieldSchema); - //throw new Error(`Field "${field.name}" on entity "${table}" has no schema`); + const fieldSchema = field.schema(); + if (fieldSchema) { + schemas.push(this.em.connection.getFieldSchema(fieldSchema)); } } @@ -330,6 +325,7 @@ export class SchemaManager { if (local_updates === 0) continue; // iterate through built qbs + // @todo: run in batches for (const qb of qbs) { const { sql, parameters } = qb.compile(); statements.push({ sql, parameters }); diff --git a/app/src/media/MediaField.ts b/app/src/media/MediaField.ts index d865f59..66fd0b6 100644 --- a/app/src/media/MediaField.ts +++ b/app/src/media/MediaField.ts @@ -47,7 +47,7 @@ export class MediaField< return this.config.min_items; } - schema() { + override schema() { return undefined; } diff --git a/bun.lock b/bun.lock index 6cb5793..6af2967 100644 --- a/bun.lock +++ b/bun.lock @@ -138,6 +138,22 @@ "react-dom": ">=18", }, }, + "packages/postgres": { + "name": "@bknd/postgres", + "version": "0.0.1", + "dependencies": { + "kysely": "^0.27.6", + "pg": "^8.12.0", + }, + "devDependencies": { + "@types/bun": "^1.2.5", + "@types/node": "^22.13.10", + "@types/pg": "^8.11.11", + "bknd": "workspace:*", + "tsup": "^8.4.0", + "typescript": "^5.6.3", + }, + }, }, "packages": { "@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="], @@ -446,6 +462,8 @@ "@bknd/plasmic": ["@bknd/plasmic@workspace:packages/plasmic"], + "@bknd/postgres": ["@bknd/postgres@workspace:packages/postgres"], + "@bluwy/giget-core": ["@bluwy/giget-core@0.1.2", "", { "dependencies": { "tar": "^6.2.1" } }, "sha512-v9f+ueUOKkZCDKiCm0yxKtYgYNLD9zlKarNux0NSXOvNm94QEYL3RlMpGKgD2hq44pbF2qWqEmHnCvmk56kPJw=="], "@cfworker/json-schema": ["@cfworker/json-schema@4.1.1", "", {}, "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="], @@ -1170,6 +1188,8 @@ "@types/parse-json": ["@types/parse-json@4.0.2", "", {}, "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw=="], + "@types/pg": ["@types/pg@8.11.11", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^4.0.1" } }, "sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw=="], + "@types/prettier": ["@types/prettier@1.19.1", "", {}, "sha512-5qOlnZscTn4xxM5MeGXAMOsIOIKIbh9e85zJWfBRVPlRMEVawzoPhINYbRGkBZCI8LxvBe7tJCdWiarA99OZfQ=="], "@types/react": ["@types/react@19.0.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-JuRQ9KXLEjaUNjTWpzuR231Z2WpIwczOkBEIvbHNCzQefFIT0L8IqE6NV6ULLyC1SI/i234JnDoMkfg+RjQj2g=="], @@ -2526,6 +2546,8 @@ "object.values": ["object.values@1.2.1", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" } }, "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA=="], + "obuf": ["obuf@1.1.2", "", {}, "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg=="], + "ohash": ["ohash@1.1.6", "", {}, "sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg=="], "on-exit-leak-free": ["on-exit-leak-free@0.2.0", "", {}, "sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg=="], @@ -2596,6 +2618,24 @@ "performance-now": ["performance-now@2.1.0", "", {}, "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="], + "pg": ["pg@8.14.0", "", { "dependencies": { "pg-connection-string": "^2.7.0", "pg-pool": "^3.8.0", "pg-protocol": "^1.8.0", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.1.1" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-nXbVpyoaXVmdqlKEzToFf37qzyeeh7mbiXsnoWvstSqohj88yaa/I/Rq/HEVn2QPSZEuLIJa/jSpRDyzjEx4FQ=="], + + "pg-cloudflare": ["pg-cloudflare@1.1.1", "", {}, "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q=="], + + "pg-connection-string": ["pg-connection-string@2.7.0", "", {}, "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="], + + "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], + + "pg-numeric": ["pg-numeric@1.0.2", "", {}, "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw=="], + + "pg-pool": ["pg-pool@3.8.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-VBw3jiVm6ZOdLBTIcXLNdSotb6Iy3uOCwDGFAksZCXmi10nyRvnP2v3jl4d+IsLYRyXf6o9hIm/ZtUzlByNUdw=="], + + "pg-protocol": ["pg-protocol@1.8.0", "", {}, "sha512-jvuYlEkL03NRvOoyoRktBK7+qU5kOvlAwvmrH8sr3wbLrOdVWsRxQfz8mMy9sZFsqJ1hEWNfdWKI4SAmoL+j7g=="], + + "pg-types": ["pg-types@4.0.2", "", { "dependencies": { "pg-int8": "1.0.1", "pg-numeric": "1.0.2", "postgres-array": "~3.0.1", "postgres-bytea": "~3.0.0", "postgres-date": "~2.1.0", "postgres-interval": "^3.0.0", "postgres-range": "^1.1.1" } }, "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng=="], + + "pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="], + "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], @@ -2644,6 +2684,16 @@ "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="], + "postgres-array": ["postgres-array@3.0.4", "", {}, "sha512-nAUSGfSDGOaOAEGwqsRY27GPOea7CNipJPOA7lPbdEpx5Kg3qzdP0AaWC5MlhTWV9s4hFX39nomVZ+C4tnGOJQ=="], + + "postgres-bytea": ["postgres-bytea@3.0.0", "", { "dependencies": { "obuf": "~1.1.2" } }, "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw=="], + + "postgres-date": ["postgres-date@2.1.0", "", {}, "sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA=="], + + "postgres-interval": ["postgres-interval@3.0.0", "", {}, "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw=="], + + "postgres-range": ["postgres-range@1.1.4", "", {}, "sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w=="], + "prelude-ls": ["prelude-ls@1.1.2", "", {}, "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w=="], "prettier": ["prettier@1.19.1", "", { "bin": { "prettier": "./bin-prettier.js" } }, "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew=="], @@ -3956,6 +4006,8 @@ "peek-stream/duplexify": ["duplexify@3.7.1", "", { "dependencies": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" } }, "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g=="], + "pg/pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="], + "pkg-types/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], "pretty-format/ansi-styles": ["ansi-styles@5.2.0", "", {}, "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA=="], @@ -4326,6 +4378,14 @@ "ora/log-symbols/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="], + "pg/pg-types/postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="], + + "pg/pg-types/postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="], + + "pg/pg-types/postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="], + + "pg/pg-types/postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="], + "progress-estimator/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="], "progress-estimator/chalk/escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="], diff --git a/packages/postgres/README.md b/packages/postgres/README.md new file mode 100644 index 0000000..cb22856 --- /dev/null +++ b/packages/postgres/README.md @@ -0,0 +1,50 @@ +# Postgres adapter for `bknd` (experimental) +This packages adds an adapter to use a Postgres database with `bknd`. It is based on `pg` and the driver included in `kysely`. + +## Installation +Install the adapter with: +```bash +npm install @bknd/postgres +``` + +## Usage +Create a connection: + +```ts +import { PostgresConnection } from "@bknd/postgres"; + +const connection = new PostgresConnection({ + host: "localhost", + port: 5432, + user: "postgres", + password: "postgres", + database: "bknd", +}); +``` + +Use the connection depending on which framework or runtime you are using. E.g., when using `createApp`, you can use the connection as follows: + +```ts +import { createApp } from "bknd"; +import { PostgresConnection } from "@bknd/postgres"; + +const connection = new PostgresConnection(); +const app = createApp({ connection }); +``` + +Or if you're using it with a framework, say Next.js, you can add the connection object to where you're initializating the app: + +```ts +// e.g. in src/app/api/[[...bknd]]/route.ts +import { serve } from "bknd/adapter/nextjs"; +import { PostgresConnection } from "@bknd/postgres"; + +const connection = new PostgresConnection(); +const handler = serve({ + connection +}) + +// ... +``` + +For more information about how to integrate Next.js in general, check out the [Next.js documentation](https://docs.bknd.io/integration/nextjs). \ No newline at end of file diff --git a/packages/postgres/package.json b/packages/postgres/package.json new file mode 100644 index 0000000..51d0e90 --- /dev/null +++ b/packages/postgres/package.json @@ -0,0 +1,37 @@ +{ + "name": "@bknd/postgres", + "version": "0.0.1", + "type": "module", + "main": "dist/index.js", + "module": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsup", + "test": "bun test", + "docker:start": "docker run --rm --name bknd-test-postgres -d -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres -e POSTGRES_DB=bknd -p 5430:5432 postgres:17", + "docker:stop": "docker stop bknd-test-postgres" + }, + "dependencies": { + "pg": "^8.12.0", + "kysely": "^0.27.6" + }, + "devDependencies": { + "@types/bun": "^1.2.5", + "@types/node": "^22.13.10", + "@types/pg": "^8.11.11", + "bknd": "workspace:*", + "tsup": "^8.4.0", + "typescript": "^5.6.3" + }, + "tsup": { + "entry": ["src/index.ts"], + "format": ["esm"], + "target": "es2022", + "clean": true, + "minify": true, + "dts": true, + "metafile": true, + "external": ["bknd", "pg", "kysely"] + }, + "files": ["dist", "README.md", "!*.map", "!metafile*.json"] +} diff --git a/packages/postgres/src/PostgresConnection.ts b/packages/postgres/src/PostgresConnection.ts new file mode 100644 index 0000000..c1495d5 --- /dev/null +++ b/packages/postgres/src/PostgresConnection.ts @@ -0,0 +1,106 @@ +import { Connection, type FieldSpec, type SchemaResponse } from "bknd/data"; +import { + type ColumnDataType, + type ColumnDefinitionBuilder, + type DatabaseIntrospector, + Kysely, + ParseJSONResultsPlugin, + PostgresDialect, + type SelectQueryBuilder, +} from "kysely"; +import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/postgres"; +import pg from "pg"; +import { PostgresIntrospector } from "./PostgresIntrospector"; + +export type PostgresConnectionConfig = pg.PoolConfig; +export type QB = SelectQueryBuilder; + +const plugins = [new ParseJSONResultsPlugin()]; + +class CustomPostgresDialect extends PostgresDialect { + override createIntrospector(db: Kysely): DatabaseIntrospector { + return new PostgresIntrospector(db, { + excludeTables: [], + }); + } +} + +export class PostgresConnection extends Connection { + protected override readonly supported = { + batching: true, + }; + private pool: pg.Pool; + + constructor(config: PostgresConnectionConfig) { + const pool = new pg.Pool(config); + const kysely = new Kysely({ + dialect: new CustomPostgresDialect({ + pool, + }), + plugins, + //log: ["query", "error"], + }); + + super( + kysely, + { + jsonArrayFrom, + jsonBuildObject, + jsonObjectFrom, + }, + plugins, + ); + this.pool = pool; + } + + override getFieldSchema(spec: FieldSpec): SchemaResponse { + this.validateFieldSpecType(spec.type); + let type: ColumnDataType = spec.primary ? "serial" : spec.type; + + switch (spec.type) { + case "blob": + type = "bytea"; + break; + case "date": + case "datetime": + // https://www.postgresql.org/docs/17/datatype-datetime.html + type = "timestamp"; + break; + case "text": + // https://www.postgresql.org/docs/17/datatype-character.html + type = "varchar"; + break; + } + + return [ + spec.name, + type, + (col: ColumnDefinitionBuilder) => { + if (spec.primary) { + return col.primaryKey(); + } + if (spec.references) { + return col + .references(spec.references) + .onDelete(spec.onDelete ?? "set null") + .onUpdate(spec.onUpdate ?? "no action"); + } + return spec.nullable ? col : col.notNull(); + }, + ]; + } + + override async close(): Promise { + await this.pool.end(); + } + + protected override async batch( + queries: [...Queries], + ): Promise<{ + [K in keyof Queries]: Awaited>; + }> { + return this.kysely.transaction().execute(async (trx) => { + return Promise.all(queries.map((q) => trx.executeQuery(q).then((r) => r.rows))); + }) as any; + } +} diff --git a/packages/postgres/src/PostgresIntrospector.ts b/packages/postgres/src/PostgresIntrospector.ts new file mode 100644 index 0000000..82b75ba --- /dev/null +++ b/packages/postgres/src/PostgresIntrospector.ts @@ -0,0 +1,127 @@ +import { type SchemaMetadata, sql } from "kysely"; +import { BaseIntrospector } from "bknd/data"; + +type PostgresSchemaSpec = { + name: string; + type: "VIEW" | "BASE TABLE"; + columns: { + name: string; + type: string; + notnull: number; + dflt: string; + pk: boolean; + }[]; + indices: { + name: string; + origin: string; + partial: number; + sql: string; + columns: { name: string; seqno: number }[]; + }[]; +}; + +export class PostgresIntrospector extends BaseIntrospector { + async getSchemas(): Promise { + const rawSchemas = await this.db + .selectFrom("pg_catalog.pg_namespace") + .select("nspname") + .$castTo<{ nspname: string }>() + .execute(); + + return rawSchemas.map((it) => ({ name: it.nspname })); + } + + async getSchemaSpec() { + const query = sql` + WITH tables_and_views AS ( + SELECT table_name AS name, + table_type AS type + FROM information_schema.tables + WHERE table_schema = 'public' + AND table_type IN ('BASE TABLE', 'VIEW') + AND table_name NOT LIKE 'pg_%' + AND table_name NOT IN (${this.getExcludedTableNames().join(", ")}) + ), + + columns_info AS ( + SELECT table_name AS name, + json_agg(json_build_object( + 'name', column_name, + 'type', data_type, + 'notnull', (CASE WHEN is_nullable = 'NO' THEN true ELSE false END), + 'dflt', column_default, + 'pk', (SELECT COUNT(*) > 0 + FROM information_schema.table_constraints tc + INNER JOIN information_schema.key_column_usage kcu + ON tc.constraint_name = kcu.constraint_name + WHERE tc.table_name = c.table_name + AND tc.constraint_type = 'PRIMARY KEY' + AND kcu.column_name = c.column_name) + )) AS columns + FROM information_schema.columns c + WHERE table_schema = 'public' + GROUP BY table_name + ), + + indices_info AS ( + SELECT + t.relname AS table_name, + json_agg(json_build_object( + 'name', i.relname, + 'origin', pg_get_indexdef(i.oid), + 'partial', (CASE WHEN ix.indisvalid THEN false ELSE true END), + 'sql', pg_get_indexdef(i.oid), + 'columns', ( + SELECT json_agg(json_build_object( + 'name', a.attname, + 'seqno', x.ordinal_position + )) + FROM unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinal_position) + JOIN pg_attribute a ON a.attnum = x.attnum AND a.attrelid = t.oid + ))) AS indices + FROM pg_class t + LEFT JOIN pg_index ix ON t.oid = ix.indrelid + LEFT JOIN pg_class i ON i.oid = ix.indexrelid + WHERE t.relkind IN ('r', 'v') -- r = table, v = view + AND t.relname NOT LIKE 'pg_%' + GROUP BY t.relname + ) + + SELECT + tv.name, + tv.type, + ci.columns, + ii.indices + FROM tables_and_views tv + LEFT JOIN columns_info ci ON tv.name = ci.name + LEFT JOIN indices_info ii ON tv.name = ii.table_name; + `; + + const tables = await this.executeWithPlugins(query); + + return tables.map((table) => ({ + name: table.name, + isView: table.type === "VIEW", + columns: table.columns.map((col) => { + return { + name: col.name, + dataType: col.type, + isNullable: !col.notnull, + // @todo: check default value on 'nextval' see https://www.postgresql.org/docs/17/datatype-numeric.html#DATATYPE-SERIAL + isAutoIncrementing: true, // just for now + hasDefaultValue: col.dflt != null, + comment: undefined, + }; + }), + indices: table.indices.map((index) => ({ + name: index.name, + table: table.name, + isUnique: index.sql?.match(/unique/i) != null, + columns: index.columns.map((col) => ({ + name: col.name, + order: col.seqno, + })), + })), + })); + } +} diff --git a/packages/postgres/src/index.ts b/packages/postgres/src/index.ts new file mode 100644 index 0000000..ef7c56f --- /dev/null +++ b/packages/postgres/src/index.ts @@ -0,0 +1,2 @@ +export { PostgresConnection, type PostgresConnectionConfig } from "./PostgresConnection"; +export { PostgresIntrospector } from "./PostgresIntrospector"; diff --git a/packages/postgres/test/base.test.ts b/packages/postgres/test/base.test.ts new file mode 100644 index 0000000..c5adbda --- /dev/null +++ b/packages/postgres/test/base.test.ts @@ -0,0 +1,19 @@ +import { describe, it, expect } from "bun:test"; + +import { PostgresConnection } from "../src"; +import { createConnection, cleanDatabase } from "./setup"; + +describe(PostgresConnection, () => { + it("should connect to the database", async () => { + const connection = createConnection(); + expect(await connection.ping()).toBe(true); + }); + + it("should clean the database", async () => { + const connection = createConnection(); + await cleanDatabase(connection); + + const tables = await connection.getIntrospector().getTables(); + expect(tables).toEqual([]); + }); +}); diff --git a/packages/postgres/test/integration.test.ts b/packages/postgres/test/integration.test.ts new file mode 100644 index 0000000..90b8746 --- /dev/null +++ b/packages/postgres/test/integration.test.ts @@ -0,0 +1,113 @@ +import { describe, it, expect, beforeAll, afterAll, afterEach } from "bun:test"; + +import { createApp } from "bknd"; +import * as proto from "bknd/data"; + +import { createConnection, cleanDatabase } from "./setup"; +import type { PostgresConnection } from "../src"; + +let connection: PostgresConnection; +beforeAll(async () => { + connection = createConnection(); + await cleanDatabase(connection); +}); + +afterEach(async () => { + await cleanDatabase(connection); +}); + +afterAll(async () => { + await connection.close(); +}); + +describe("integration", () => { + it("should create app and ping", async () => { + const app = createApp({ + connection, + }); + await app.build(); + + expect(app.version()).toBeDefined(); + expect(await app.em.ping()).toBe(true); + }); + + it("should create a basic schema", async () => { + const schema = proto.em( + { + posts: proto.entity("posts", { + title: proto.text().required(), + content: proto.text(), + }), + comments: proto.entity("comments", { + content: proto.text(), + }), + }, + (fns, s) => { + fns.relation(s.comments).manyToOne(s.posts); + fns.index(s.posts).on(["title"], true); + }, + ); + + const app = createApp({ + connection, + initialConfig: { + data: schema.toJSON(), + }, + }); + + await app.build(); + + expect(app.em.entities.length).toBe(2); + expect(app.em.entities.map((e) => e.name)).toEqual(["posts", "comments"]); + + const api = app.getApi(); + + expect( + ( + await api.data.createMany("posts", [ + { + title: "Hello", + content: "World", + }, + { + title: "Hello 2", + content: "World 2", + }, + ]) + ).data, + ).toEqual([ + { + id: 1, + title: "Hello", + content: "World", + }, + { + id: 2, + title: "Hello 2", + content: "World 2", + }, + ] as any); + + // try to create an existing + expect( + ( + await api.data.createOne("posts", { + title: "Hello", + }) + ).ok, + ).toBe(false); + + // add a comment to a post + await api.data.createOne("comments", { + content: "Hello", + posts_id: 1, + }); + + // and then query using a `with` property + const result = await api.data.readMany("posts", { with: ["comments"] }); + expect(result.length).toBe(2); + expect(result[0].comments.length).toBe(1); + expect(result[0].comments[0].content).toBe("Hello"); + expect(result[1].comments.length).toBe(0); + }); +}); diff --git a/packages/postgres/test/setup.ts b/packages/postgres/test/setup.ts new file mode 100644 index 0000000..d82427d --- /dev/null +++ b/packages/postgres/test/setup.ts @@ -0,0 +1,25 @@ +import type { Kysely } from "kysely"; +import { PostgresConnection, PostgresIntrospector, type PostgresConnectionConfig } from "../src"; + +export const info = { + host: "localhost", + port: 5430, + user: "postgres", + password: "postgres", + database: "bknd", +}; + +export function createConnection(config: PostgresConnectionConfig = {}) { + return new PostgresConnection({ + ...info, + ...config, + }); +} + +export async function cleanDatabase(connection: PostgresConnection) { + const kysely = connection.kysely; + + // drop all tables & create new schema + await kysely.schema.dropSchema("public").ifExists().cascade().execute(); + await kysely.schema.createSchema("public").execute(); +} diff --git a/packages/postgres/tsconfig.json b/packages/postgres/tsconfig.json new file mode 100644 index 0000000..d2359e0 --- /dev/null +++ b/packages/postgres/tsconfig.json @@ -0,0 +1,29 @@ +{ + "compilerOptions": { + "composite": false, + "module": "ESNext", + "moduleResolution": "bundler", + "allowImportingTsExtensions": false, + "target": "ES2022", + "noImplicitAny": false, + "allowJs": true, + "verbatimModuleSyntax": true, + "declaration": true, + "strict": true, + "allowUnusedLabels": false, + "allowUnreachableCode": false, + "exactOptionalPropertyTypes": false, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noPropertyAccessFromIndexSignature": false, + "noUncheckedIndexedAccess": true, + "noUnusedLocals": false, + "noUnusedParameters": false, + "isolatedModules": true, + "esModuleInterop": true, + "skipLibCheck": true + }, + "include": ["./src/**/*.ts"], + "exclude": ["node_modules"] +}