Merge pull request #105 from bknd-io/feat/add-postgres-and-prepare-others

support for postgres and others
This commit is contained in:
dswbx
2025-03-14 09:57:44 +01:00
committed by GitHub
46 changed files with 1113 additions and 330 deletions

View File

@@ -1,9 +1,9 @@
import { afterAll, describe, expect, test } from "bun:test"; import { afterAll, afterEach, describe, expect, test } from "bun:test";
import { App } from "../src"; import { App } from "../src";
import { getDummyConnection } from "./helper"; import { getDummyConnection } from "./helper";
const { dummyConnection, afterAllCleanup } = getDummyConnection(); const { dummyConnection, afterAllCleanup } = getDummyConnection();
afterAll(afterAllCleanup); afterEach(afterAllCleanup);
describe("App tests", async () => { describe("App tests", async () => {
test("boots and pongs", async () => { test("boots and pongs", async () => {
@@ -12,4 +12,16 @@ describe("App tests", async () => {
//expect(await app.data?.em.ping()).toBeTrue(); //expect(await app.data?.em.ping()).toBeTrue();
}); });
/*test.only("what", async () => {
const app = new App(dummyConnection, {
auth: {
enabled: true,
},
});
await app.module.auth.build();
await app.module.data.build();
console.log(app.em.entities.map((e) => e.name));
console.log(await app.em.schema().getDiff());
});*/
}); });

View File

@@ -27,7 +27,7 @@ describe("Relations", async () => {
const sql1 = schema const sql1 = schema
.createTable("posts") .createTable("posts")
.addColumn(...r1.schema()!) .addColumn(...em.connection.getFieldSchema(r1.schema())!)
.compile().sql; .compile().sql;
expect(sql1).toBe( expect(sql1).toBe(
@@ -43,7 +43,7 @@ describe("Relations", async () => {
const sql2 = schema const sql2 = schema
.createTable("posts") .createTable("posts")
.addColumn(...r2.schema()!) .addColumn(...em.connection.getFieldSchema(r2.schema())!)
.compile().sql; .compile().sql;
expect(sql2).toBe( expect(sql2).toBe(

View File

@@ -15,7 +15,7 @@ describe("SchemaManager tests", async () => {
const em = new EntityManager([entity], dummyConnection, [], [index]); const em = new EntityManager([entity], dummyConnection, [], [index]);
const schema = new SchemaManager(em); const schema = new SchemaManager(em);
const introspection = schema.getIntrospectionFromEntity(em.entities[0]); const introspection = schema.getIntrospectionFromEntity(em.entities[0]!);
expect(introspection).toEqual({ expect(introspection).toEqual({
name: "test", name: "test",
isView: false, isView: false,
@@ -109,7 +109,7 @@ describe("SchemaManager tests", async () => {
await schema.sync({ force: true, drop: true }); await schema.sync({ force: true, drop: true });
const diffAfter = await schema.getDiff(); const diffAfter = await schema.getDiff();
console.log("diffAfter", diffAfter); //console.log("diffAfter", diffAfter);
expect(diffAfter.length).toBe(0); expect(diffAfter.length).toBe(0);
await kysely.schema.dropTable(table).execute(); await kysely.schema.dropTable(table).execute();

View File

@@ -0,0 +1,107 @@
import { describe, expect, test } from "bun:test";
import { SqliteIntrospector } from "data/connection";
import { getDummyDatabase } from "../../helper";
import { Kysely, SqliteDialect } from "kysely";
function create() {
const database = getDummyDatabase().dummyDb;
return new Kysely({
dialect: new SqliteDialect({ database }),
});
}
function createLibsql() {
const database = getDummyDatabase().dummyDb;
return new Kysely({
dialect: new SqliteDialect({ database }),
});
}
describe("SqliteIntrospector", () => {
test("asdf", async () => {
const kysely = create();
await kysely.schema
.createTable("test")
.addColumn("id", "integer", (col) => col.primaryKey().autoIncrement().notNull())
.addColumn("string", "text", (col) => col.notNull())
.addColumn("number", "integer")
.execute();
await kysely.schema
.createIndex("idx_test_string")
.on("test")
.columns(["string"])
.unique()
.execute();
await kysely.schema
.createTable("test2")
.addColumn("id", "integer", (col) => col.primaryKey().autoIncrement().notNull())
.addColumn("number", "integer")
.execute();
await kysely.schema.createIndex("idx_test2_number").on("test2").columns(["number"]).execute();
const introspector = new SqliteIntrospector(kysely, {});
const result = await introspector.getTables();
//console.log(_jsonp(result));
expect(result).toEqual([
{
name: "test",
isView: false,
columns: [
{
name: "id",
dataType: "INTEGER",
isNullable: false,
isAutoIncrementing: true,
hasDefaultValue: false,
comment: undefined,
},
{
name: "string",
dataType: "TEXT",
isNullable: false,
isAutoIncrementing: false,
hasDefaultValue: false,
comment: undefined,
},
{
comment: undefined,
dataType: "INTEGER",
hasDefaultValue: false,
isAutoIncrementing: false,
isNullable: true,
name: "number",
},
],
},
{
name: "test2",
isView: false,
columns: [
{
name: "id",
dataType: "INTEGER",
isNullable: false,
isAutoIncrementing: true,
hasDefaultValue: false,
comment: undefined,
},
{
name: "number",
dataType: "INTEGER",
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
comment: undefined,
},
],
},
]);
});
});

View File

@@ -1,23 +1,29 @@
import { describe, expect, test } from "bun:test"; import { describe, expect, test } from "bun:test";
import { Default, parse, stripMark } from "../../../../src/core/utils"; import { Default, stripMark } from "../../../../src/core/utils";
import { Field, type SchemaResponse, TextField, baseFieldConfigSchema } from "../../../../src/data"; import { baseFieldConfigSchema, Field } from "../../../../src/data/fields/Field";
import { runBaseFieldTests, transformPersist } from "./inc"; import { runBaseFieldTests } from "./inc";
describe("[data] Field", async () => { describe("[data] Field", async () => {
class FieldSpec extends Field { class FieldSpec extends Field {
schema(): SchemaResponse {
return this.useSchemaHelper("text");
}
getSchema() { getSchema() {
return baseFieldConfigSchema; return baseFieldConfigSchema;
} }
} }
test("fieldSpec", () => {
expect(new FieldSpec("test").schema()).toEqual({
name: "test",
type: "text",
nullable: true, // always true
dflt: undefined, // never using default value
});
});
runBaseFieldTests(FieldSpec, { defaultValue: "test", schemaType: "text" }); runBaseFieldTests(FieldSpec, { defaultValue: "test", schemaType: "text" });
test("default config", async () => { test("default config", async () => {
const config = Default(baseFieldConfigSchema, {}); const config = Default(baseFieldConfigSchema, {});
expect(stripMark(new FieldSpec("test").config)).toEqual(config); expect(stripMark(new FieldSpec("test").config)).toEqual(config as any);
}); });
test("transformPersist (specific)", async () => { test("transformPersist (specific)", async () => {

View File

@@ -10,7 +10,12 @@ describe("[data] PrimaryField", async () => {
test("schema", () => { test("schema", () => {
expect(field.name).toBe("primary"); expect(field.name).toBe("primary");
expect(field.schema()).toEqual(["primary", "integer", expect.any(Function)]); expect(field.schema()).toEqual({
name: "primary",
type: "integer" as const,
nullable: false,
primary: true,
});
}); });
test("hasDefault", async () => { test("hasDefault", async () => {

View File

@@ -34,11 +34,14 @@ export function runBaseFieldTests(
test("schema", () => { test("schema", () => {
expect(noConfigField.name).toBe("no_config"); expect(noConfigField.name).toBe("no_config");
expect(noConfigField.schema(null as any)).toEqual([
"no_config", const { type, name, nullable, dflt } = noConfigField.schema()!;
config.schemaType, expect({ type, name, nullable, dflt }).toEqual({
expect.any(Function), type: config.schemaType as any,
]); name: "no_config",
nullable: true, // always true
dflt: undefined, // never using default value
});
}); });
test("hasDefault", async () => { test("hasDefault", async () => {

View File

@@ -1,9 +1,12 @@
import { afterAll, beforeAll, describe, expect, it } from "bun:test"; import { afterAll, afterEach, beforeAll, describe, expect, it } from "bun:test";
import { App, createApp } from "../../src"; import { App, createApp } from "../../src";
import type { AuthResponse } from "../../src/auth"; import type { AuthResponse } from "../../src/auth";
import { auth } from "../../src/auth/middlewares"; import { auth } from "../../src/auth/middlewares";
import { randomString, secureRandomString, withDisabledConsole } from "../../src/core/utils"; import { randomString, secureRandomString, withDisabledConsole } from "../../src/core/utils";
import { disableConsoleLog, enableConsoleLog } from "../helper"; import { disableConsoleLog, enableConsoleLog, getDummyConnection } from "../helper";
const { dummyConnection, afterAllCleanup } = getDummyConnection();
afterEach(afterAllCleanup);
beforeAll(disableConsoleLog); beforeAll(disableConsoleLog);
afterAll(enableConsoleLog); afterAll(enableConsoleLog);
@@ -64,6 +67,7 @@ const configs = {
function createAuthApp() { function createAuthApp() {
const app = createApp({ const app = createApp({
connection: dummyConnection,
initialConfig: { initialConfig: {
auth: configs.auth, auth: configs.auth,
}, },

View File

@@ -53,6 +53,9 @@ function banner(title: string) {
console.log("-".repeat(40)); console.log("-".repeat(40));
} }
// collection of always-external packages
const external = ["bun:test", "@libsql/client"] as const;
/** /**
* Building backend and general API * Building backend and general API
*/ */
@@ -64,7 +67,7 @@ async function buildApi() {
watch, watch,
entry: ["src/index.ts", "src/data/index.ts", "src/core/index.ts", "src/core/utils/index.ts"], entry: ["src/index.ts", "src/data/index.ts", "src/core/index.ts", "src/core/utils/index.ts"],
outDir: "dist", outDir: "dist",
external: ["bun:test", "@libsql/client"], external: [...external],
metafile: true, metafile: true,
platform: "browser", platform: "browser",
format: ["esm"], format: ["esm"],
@@ -93,7 +96,7 @@ async function buildUi() {
sourcemap, sourcemap,
watch, watch,
external: [ external: [
"bun:test", ...external,
"react", "react",
"react-dom", "react-dom",
"react/jsx-runtime", "react/jsx-runtime",

View File

@@ -18,6 +18,10 @@ class CustomD1Dialect extends D1Dialect {
} }
export class D1Connection extends SqliteConnection { export class D1Connection extends SqliteConnection {
protected override readonly supported = {
batching: true,
};
constructor(private config: D1ConnectionConfig) { constructor(private config: D1ConnectionConfig) {
const plugins = [new ParseJSONResultsPlugin()]; const plugins = [new ParseJSONResultsPlugin()];
@@ -28,14 +32,6 @@ export class D1Connection extends SqliteConnection {
super(kysely, {}, plugins); super(kysely, {}, plugins);
} }
override supportsBatching(): boolean {
return true;
}
override supportsIndices(): boolean {
return true;
}
protected override async batch<Queries extends QB[]>( protected override async batch<Queries extends QB[]>(
queries: [...Queries], queries: [...Queries],
): Promise<{ ): Promise<{

View File

@@ -3,3 +3,11 @@ export function clampNumber(value: number, min: number, max: number): number {
const upper = Math.max(min, max); const upper = Math.max(min, max);
return Math.max(lower, Math.min(value, upper)); return Math.max(lower, Math.min(value, upper));
} }
export function ensureInt(value?: string | number | null | undefined): number {
if (value === undefined || value === null) {
return 0;
}
return typeof value === "number" ? value : Number.parseInt(value, 10);
}

View File

@@ -0,0 +1,75 @@
import {
type DatabaseMetadata,
type DatabaseMetadataOptions,
type Kysely,
type KyselyPlugin,
type RawBuilder,
type TableMetadata,
type DatabaseIntrospector,
type SchemaMetadata,
ParseJSONResultsPlugin,
DEFAULT_MIGRATION_TABLE,
DEFAULT_MIGRATION_LOCK_TABLE,
} from "kysely";
import { KyselyPluginRunner } from "data/plugins/KyselyPluginRunner";
import type { IndexMetadata } from "data/connection/Connection";
export type TableSpec = TableMetadata & {
indices: IndexMetadata[];
};
export type SchemaSpec = TableSpec[];
export type BaseIntrospectorConfig = {
excludeTables?: string[];
plugins?: KyselyPlugin[];
};
export abstract class BaseIntrospector implements DatabaseIntrospector {
readonly _excludeTables: string[] = [];
readonly _plugins: KyselyPlugin[];
constructor(
protected readonly db: Kysely<any>,
config: BaseIntrospectorConfig = {},
) {
this._excludeTables = config.excludeTables ?? [];
this._plugins = config.plugins ?? [new ParseJSONResultsPlugin()];
}
abstract getSchemaSpec(): Promise<SchemaSpec>;
abstract getSchemas(): Promise<SchemaMetadata[]>;
protected getExcludedTableNames(): string[] {
return [...this._excludeTables, DEFAULT_MIGRATION_TABLE, DEFAULT_MIGRATION_LOCK_TABLE];
}
protected async executeWithPlugins<T>(query: RawBuilder<any>): Promise<T> {
const result = await query.execute(this.db);
const runner = new KyselyPluginRunner(this._plugins ?? []);
return (await runner.transformResultRows(result.rows)) as unknown as T;
}
async getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata> {
return {
tables: await this.getTables(options),
};
}
async getIndices(tbl_name?: string): Promise<IndexMetadata[]> {
const schema = await this.getSchemaSpec();
return schema
.flatMap((table) => table.indices)
.filter((index) => !tbl_name || index.table === tbl_name);
}
async getTables(
options: DatabaseMetadataOptions = { withInternalKyselyTables: false },
): Promise<TableMetadata[]> {
const schema = await this.getSchemaSpec();
return schema.map((table) => ({
name: table.name,
isView: table.isView,
columns: table.columns,
}));
}
}

View File

@@ -1,15 +1,18 @@
import { import {
type AliasableExpression, type AliasableExpression,
type DatabaseIntrospector, type ColumnBuilderCallback,
type ColumnDataType,
type Expression, type Expression,
type Kysely, type Kysely,
type KyselyPlugin, type KyselyPlugin,
type OnModifyForeignAction,
type RawBuilder, type RawBuilder,
type SelectQueryBuilder, type SelectQueryBuilder,
type SelectQueryNode, type SelectQueryNode,
type Simplify, type Simplify,
sql, sql,
} from "kysely"; } from "kysely";
import type { BaseIntrospector } from "./BaseIntrospector";
export type QB = SelectQueryBuilder<any, any, any>; export type QB = SelectQueryBuilder<any, any, any>;
@@ -20,15 +23,43 @@ export type IndexMetadata = {
columns: { name: string; order: number }[]; columns: { name: string; order: number }[];
}; };
export interface ConnectionIntrospector extends DatabaseIntrospector {
getIndices(tbl_name?: string): Promise<IndexMetadata[]>;
}
export interface SelectQueryBuilderExpression<O> extends AliasableExpression<O> { export interface SelectQueryBuilderExpression<O> extends AliasableExpression<O> {
get isSelectQueryBuilder(): true; get isSelectQueryBuilder(): true;
toOperationNode(): SelectQueryNode; toOperationNode(): SelectQueryNode;
} }
export type SchemaResponse = [string, ColumnDataType, ColumnBuilderCallback] | undefined;
const FieldSpecTypes = [
"text",
"integer",
"real",
"blob",
"date",
"datetime",
"timestamp",
"boolean",
"json",
] as const;
export type FieldSpec = {
type: (typeof FieldSpecTypes)[number];
name: string;
nullable?: boolean;
dflt?: any;
unique?: boolean;
primary?: boolean;
references?: string;
onDelete?: OnModifyForeignAction;
onUpdate?: OnModifyForeignAction;
};
export type IndexSpec = {
name: string;
columns: string[];
unique?: boolean;
};
export type DbFunctions = { export type DbFunctions = {
jsonObjectFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O> | null>; jsonObjectFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O> | null>;
jsonArrayFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O>[]>; jsonArrayFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O>[]>;
@@ -45,6 +76,9 @@ const CONN_SYMBOL = Symbol.for("bknd:connection");
export abstract class Connection<DB = any> { export abstract class Connection<DB = any> {
kysely: Kysely<DB>; kysely: Kysely<DB>;
protected readonly supported = {
batching: false,
};
constructor( constructor(
kysely: Kysely<DB>, kysely: Kysely<DB>,
@@ -65,17 +99,12 @@ export abstract class Connection<DB = any> {
return conn[CONN_SYMBOL] === true; return conn[CONN_SYMBOL] === true;
} }
getIntrospector(): ConnectionIntrospector { getIntrospector(): BaseIntrospector {
return this.kysely.introspection as ConnectionIntrospector; return this.kysely.introspection as any;
} }
supportsBatching(): boolean { supports(feature: keyof typeof this.supported): boolean {
return false; return this.supported[feature] ?? false;
}
// @todo: add if only first field is used in index
supportsIndices(): boolean {
return false;
} }
async ping(): Promise<boolean> { async ping(): Promise<boolean> {
@@ -97,7 +126,7 @@ export abstract class Connection<DB = any> {
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>; [K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> { }> {
// bypass if no client support // bypass if no client support
if (!this.supportsBatching()) { if (!this.supports("batching")) {
const data: any = []; const data: any = [];
for (const q of queries) { for (const q of queries) {
const result = await q.execute(); const result = await q.execute();
@@ -108,4 +137,19 @@ export abstract class Connection<DB = any> {
return await this.batch(queries); return await this.batch(queries);
} }
protected validateFieldSpecType(type: string): type is FieldSpec["type"] {
if (!FieldSpecTypes.includes(type as any)) {
throw new Error(
`Invalid field type "${type}". Allowed types are: ${FieldSpecTypes.join(", ")}`,
);
}
return true;
}
abstract getFieldSchema(spec: FieldSpec, strict?: boolean): SchemaResponse;
async close(): Promise<void> {
// no-op by default
}
} }

View File

@@ -1,7 +1,15 @@
import { Connection } from "./Connection"; import { Connection, type FieldSpec, type SchemaResponse } from "./Connection";
export class DummyConnection extends Connection { export class DummyConnection extends Connection {
protected override readonly supported = {
batching: true,
};
constructor() { constructor() {
super(undefined as any); super(undefined as any);
} }
override getFieldSchema(spec: FieldSpec, strict?: boolean): SchemaResponse {
throw new Error("Method not implemented.");
}
} }

View File

@@ -1,22 +0,0 @@
import type { Kysely, KyselyPlugin } from "kysely";
import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/sqlite";
import { Connection, type DbFunctions } from "./Connection";
export class SqliteConnection extends Connection {
constructor(kysely: Kysely<any>, fn: Partial<DbFunctions> = {}, plugins: KyselyPlugin[] = []) {
super(
kysely,
{
...fn,
jsonArrayFrom,
jsonObjectFrom,
jsonBuildObject,
},
plugins,
);
}
override supportsIndices(): boolean {
return true;
}
}

View File

@@ -1,164 +0,0 @@
import type {
DatabaseIntrospector,
DatabaseMetadata,
DatabaseMetadataOptions,
ExpressionBuilder,
Kysely,
SchemaMetadata,
TableMetadata,
} from "kysely";
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, sql } from "kysely";
import type { ConnectionIntrospector, IndexMetadata } from "./Connection";
export type SqliteIntrospectorConfig = {
excludeTables?: string[];
};
export class SqliteIntrospector implements DatabaseIntrospector, ConnectionIntrospector {
readonly #db: Kysely<any>;
readonly _excludeTables: string[] = [];
constructor(db: Kysely<any>, config: SqliteIntrospectorConfig = {}) {
this.#db = db;
this._excludeTables = config.excludeTables ?? [];
}
async getSchemas(): Promise<SchemaMetadata[]> {
// Sqlite doesn't support schemas.
return [];
}
async getIndices(tbl_name?: string): Promise<IndexMetadata[]> {
const indices = await this.#db
.selectFrom("sqlite_master")
.where("type", "=", "index")
.$if(!!tbl_name, (eb) => eb.where("tbl_name", "=", tbl_name))
.select("name")
.$castTo<{ name: string }>()
.execute();
return Promise.all(indices.map(({ name }) => this.#getIndexMetadata(name)));
}
async #getIndexMetadata(index: string): Promise<IndexMetadata> {
const db = this.#db;
// Get the SQL that was used to create the index.
const indexDefinition = await db
.selectFrom("sqlite_master")
.where("name", "=", index)
.select(["sql", "tbl_name", "type"])
.$castTo<{ sql: string | undefined; tbl_name: string; type: string }>()
.executeTakeFirstOrThrow();
//console.log("--indexDefinition--", indexDefinition, index);
// check unique by looking for the word "unique" in the sql
const isUnique = indexDefinition.sql?.match(/unique/i) != null;
const columns = await db
.selectFrom(
sql<{
seqno: number;
cid: number;
name: string;
}>`pragma_index_info(${index})`.as("index_info"),
)
.select(["seqno", "cid", "name"])
.orderBy("cid")
.execute();
return {
name: index,
table: indexDefinition.tbl_name,
isUnique: isUnique,
columns: columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
};
}
private excludeTables(tables: string[] = []) {
return (eb: ExpressionBuilder<any, any>) => {
const and = tables.map((t) => eb("name", "!=", t));
return eb.and(and);
};
}
async getTables(
options: DatabaseMetadataOptions = { withInternalKyselyTables: false },
): Promise<TableMetadata[]> {
let query = this.#db
.selectFrom("sqlite_master")
.where("type", "in", ["table", "view"])
.where("name", "not like", "sqlite_%")
.select("name")
.orderBy("name")
.$castTo<{ name: string }>();
if (!options.withInternalKyselyTables) {
query = query.where(
this.excludeTables([DEFAULT_MIGRATION_TABLE, DEFAULT_MIGRATION_LOCK_TABLE]),
);
}
if (this._excludeTables.length > 0) {
query = query.where(this.excludeTables(this._excludeTables));
}
const tables = await query.execute();
return Promise.all(tables.map(({ name }) => this.#getTableMetadata(name)));
}
async getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata> {
return {
tables: await this.getTables(options),
};
}
async #getTableMetadata(table: string): Promise<TableMetadata> {
const db = this.#db;
// Get the SQL that was used to create the table.
const tableDefinition = await db
.selectFrom("sqlite_master")
.where("name", "=", table)
.select(["sql", "type"])
.$castTo<{ sql: string | undefined; type: string }>()
.executeTakeFirstOrThrow();
// Try to find the name of the column that has `autoincrement` 🤦
const autoIncrementCol = tableDefinition.sql
?.split(/[\(\),]/)
?.find((it) => it.toLowerCase().includes("autoincrement"))
?.trimStart()
?.split(/\s+/)?.[0]
?.replace(/["`]/g, "");
const columns = await db
.selectFrom(
sql<{
name: string;
type: string;
notnull: 0 | 1;
dflt_value: any;
}>`pragma_table_info(${table})`.as("table_info"),
)
.select(["name", "type", "notnull", "dflt_value"])
.orderBy("cid")
.execute();
return {
name: table,
isView: tableDefinition.type === "view",
columns: columns.map((col) => ({
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
comment: undefined,
})),
};
}
}

View File

@@ -0,0 +1,14 @@
export { BaseIntrospector } from "./BaseIntrospector";
export {
Connection,
type FieldSpec,
type IndexSpec,
type DbFunctions,
type SchemaResponse,
} from "./Connection";
// sqlite
export { LibsqlConnection, type LibSqlCredentials } from "./sqlite/LibsqlConnection";
export { SqliteConnection } from "./sqlite/SqliteConnection";
export { SqliteIntrospector } from "./sqlite/SqliteIntrospector";
export { SqliteLocalConnection } from "./sqlite/SqliteLocalConnection";

View File

@@ -1,9 +1,9 @@
import { type Client, type Config, type InStatement, createClient } from "@libsql/client"; import { type Client, type Config, type InStatement, createClient } from "@libsql/client";
import { LibsqlDialect } from "@libsql/kysely-libsql"; import { LibsqlDialect } from "@libsql/kysely-libsql";
import { FilterNumericKeysPlugin } from "data/plugins/FilterNumericKeysPlugin";
import { KyselyPluginRunner } from "data/plugins/KyselyPluginRunner";
import { type DatabaseIntrospector, Kysely, ParseJSONResultsPlugin } from "kysely"; import { type DatabaseIntrospector, Kysely, ParseJSONResultsPlugin } from "kysely";
import { FilterNumericKeysPlugin } from "../plugins/FilterNumericKeysPlugin"; import type { QB } from "../Connection";
import { KyselyPluginRunner } from "../plugins/KyselyPluginRunner";
import type { QB } from "./Connection";
import { SqliteConnection } from "./SqliteConnection"; import { SqliteConnection } from "./SqliteConnection";
import { SqliteIntrospector } from "./SqliteIntrospector"; import { SqliteIntrospector } from "./SqliteIntrospector";
@@ -12,21 +12,26 @@ export type LibSqlCredentials = Config & {
protocol?: (typeof LIBSQL_PROTOCOLS)[number]; protocol?: (typeof LIBSQL_PROTOCOLS)[number];
}; };
const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()];
class CustomLibsqlDialect extends LibsqlDialect { class CustomLibsqlDialect extends LibsqlDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector { override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db, { return new SqliteIntrospector(db, {
excludeTables: ["libsql_wasm_func_table"], excludeTables: ["libsql_wasm_func_table"],
plugins,
}); });
} }
} }
export class LibsqlConnection extends SqliteConnection { export class LibsqlConnection extends SqliteConnection {
private client: Client; private client: Client;
protected override readonly supported = {
batching: true,
};
constructor(client: Client); constructor(client: Client);
constructor(credentials: LibSqlCredentials); constructor(credentials: LibSqlCredentials);
constructor(clientOrCredentials: Client | LibSqlCredentials) { constructor(clientOrCredentials: Client | LibSqlCredentials) {
const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()];
let client: Client; let client: Client;
if (clientOrCredentials && "url" in clientOrCredentials) { if (clientOrCredentials && "url" in clientOrCredentials) {
let { url, authToken, protocol } = clientOrCredentials; let { url, authToken, protocol } = clientOrCredentials;
@@ -51,14 +56,6 @@ export class LibsqlConnection extends SqliteConnection {
this.client = client; this.client = client;
} }
override supportsBatching(): boolean {
return true;
}
override supportsIndices(): boolean {
return true;
}
getClient(): Client { getClient(): Client {
return this.client; return this.client;
} }

View File

@@ -0,0 +1,46 @@
import type { ColumnDataType, ColumnDefinitionBuilder, Kysely, KyselyPlugin } from "kysely";
import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/sqlite";
import { Connection, type DbFunctions, type FieldSpec, type SchemaResponse } from "../Connection";
export class SqliteConnection extends Connection {
constructor(kysely: Kysely<any>, fn: Partial<DbFunctions> = {}, plugins: KyselyPlugin[] = []) {
super(
kysely,
{
...fn,
jsonArrayFrom,
jsonObjectFrom,
jsonBuildObject,
},
plugins,
);
}
override getFieldSchema(spec: FieldSpec): SchemaResponse {
this.validateFieldSpecType(spec.type);
let type: ColumnDataType = spec.type;
switch (spec.type) {
case "json":
type = "text";
break;
}
return [
spec.name,
type,
(col: ColumnDefinitionBuilder) => {
if (spec.primary) {
return col.primaryKey().notNull().autoIncrement();
}
if (spec.references) {
let relCol = col.references(spec.references);
if (spec.onDelete) relCol = relCol.onDelete(spec.onDelete);
if (spec.onUpdate) relCol = relCol.onUpdate(spec.onUpdate);
return relCol;
}
return spec.nullable ? col : col.notNull();
},
] as const;
}
}

View File

@@ -0,0 +1,95 @@
import { type SchemaMetadata, sql } from "kysely";
import { BaseIntrospector } from "../BaseIntrospector";
export type SqliteSchemaSpec = {
name: string;
type: "table" | "view";
sql: string;
columns: {
name: string;
type: string;
notnull: number;
dflt_value: any;
pk: number;
}[];
indices: {
name: string;
origin: string;
partial: number;
sql: string;
columns: { name: string; seqno: number }[];
}[];
};
export class SqliteIntrospector extends BaseIntrospector {
async getSchemas(): Promise<SchemaMetadata[]> {
// Sqlite doesn't support schemas.
return [];
}
async getSchemaSpec() {
const query = sql`
SELECT m.name, m.type, m.sql,
(SELECT json_group_array(
json_object(
'name', p.name,
'type', p.type,
'notnull', p."notnull",
'default', p.dflt_value,
'primary_key', p.pk
)) FROM pragma_table_info(m.name) p) AS columns,
(SELECT json_group_array(
json_object(
'name', i.name,
'origin', i.origin,
'partial', i.partial,
'sql', im.sql,
'columns', (SELECT json_group_array(
json_object(
'name', ii.name,
'seqno', ii.seqno
)) FROM pragma_index_info(i.name) ii)
)) FROM pragma_index_list(m.name) i
LEFT JOIN sqlite_master im ON im.name = i.name
AND im.type = 'index'
) AS indices
FROM sqlite_master m
WHERE m.type IN ('table', 'view')
and m.name not like 'sqlite_%'
and m.name not in (${this.getExcludedTableNames().join(", ")})
`;
const tables = await this.executeWithPlugins<SqliteSchemaSpec[]>(query);
return tables.map((table) => ({
name: table.name,
isView: table.type === "view",
columns: table.columns.map((col) => {
const autoIncrementCol = table.sql
?.split(/[\(\),]/)
?.find((it) => it.toLowerCase().includes("autoincrement"))
?.trimStart()
?.split(/\s+/)?.[0]
?.replace(/["`]/g, "");
return {
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
})),
}));
}
}

View File

@@ -1,30 +1,31 @@
import { type DatabaseIntrospector, ParseJSONResultsPlugin, type SqliteDatabase } from "kysely"; import {
import { Kysely, SqliteDialect } from "kysely"; type DatabaseIntrospector,
Kysely,
ParseJSONResultsPlugin,
type SqliteDatabase,
SqliteDialect,
} from "kysely";
import { SqliteConnection } from "./SqliteConnection"; import { SqliteConnection } from "./SqliteConnection";
import { SqliteIntrospector } from "./SqliteIntrospector"; import { SqliteIntrospector } from "./SqliteIntrospector";
const plugins = [new ParseJSONResultsPlugin()];
class CustomSqliteDialect extends SqliteDialect { class CustomSqliteDialect extends SqliteDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector { override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db, { return new SqliteIntrospector(db, {
excludeTables: ["test_table"], excludeTables: ["test_table"],
plugins,
}); });
} }
} }
export class SqliteLocalConnection extends SqliteConnection { export class SqliteLocalConnection extends SqliteConnection {
constructor(private database: SqliteDatabase) { constructor(private database: SqliteDatabase) {
const plugins = [new ParseJSONResultsPlugin()];
const kysely = new Kysely({ const kysely = new Kysely({
dialect: new CustomSqliteDialect({ database }), dialect: new CustomSqliteDialect({ database }),
plugins, plugins,
//log: ["query"],
}); });
super(kysely); super(kysely, {}, plugins);
this.plugins = plugins;
}
override supportsIndices(): boolean {
return true;
} }
} }

View File

@@ -14,6 +14,7 @@ import {
WithBuilder, WithBuilder,
} from "../index"; } from "../index";
import { JoinBuilder } from "./JoinBuilder"; import { JoinBuilder } from "./JoinBuilder";
import { ensureInt } from "core/utils";
export type RepositoryQB = SelectQueryBuilder<any, any, any>; export type RepositoryQB = SelectQueryBuilder<any, any, any>;
@@ -225,8 +226,9 @@ export class Repository<TBD extends object = DefaultDB, TB extends keyof TBD = a
data, data,
meta: { meta: {
...payload.meta, ...payload.meta,
total: _total[0]?.total ?? 0, // parsing is important since pg returns string
count: _count[0]?.count ?? 0, // @todo: better graceful method total: ensureInt(_total[0]?.total),
count: ensureInt(_count[0]?.count),
items: result.length, items: result.length,
time, time,
}, },

View File

@@ -32,9 +32,11 @@ export class BooleanField<Required extends true | false = false> extends Field<
} }
} }
schema() { override schema() {
// @todo: potentially use "integer" instead return Object.freeze({
return this.useSchemaHelper("boolean"); ...super.schema()!,
type: "boolean",
});
} }
override getHtmlConfig() { override getHtmlConfig() {

View File

@@ -32,8 +32,10 @@ export class DateField<Required extends true | false = false> extends Field<
} }
override schema() { override schema() {
const type = this.config.type === "datetime" ? "datetime" : "date"; return Object.freeze({
return this.useSchemaHelper(type); ...super.schema()!,
type: this.config.type === "datetime" ? "datetime" : "date",
});
} }
override getHtmlConfig() { override getHtmlConfig() {

View File

@@ -66,10 +66,6 @@ export class EnumField<Required extends true | false = false, TypeOverride = str
return enumFieldConfigSchema; return enumFieldConfigSchema;
} }
override schema() {
return this.useSchemaHelper("text");
}
getOptions(): { label: string; value: string }[] { getOptions(): { label: string; value: string }[] {
const options = this.config?.options ?? { type: "strings", values: [] }; const options = this.config?.options ?? { type: "strings", values: [] };

View File

@@ -1,16 +1,16 @@
import { import {
parse,
snakeToPascalWithSpaces,
type Static, type Static,
StringEnum, StringEnum,
type TSchema, type TSchema,
Type, Type,
TypeInvalidError, TypeInvalidError,
parse,
snakeToPascalWithSpaces,
} from "core/utils"; } from "core/utils";
import type { ColumnBuilderCallback, ColumnDataType, ColumnDefinitionBuilder } from "kysely";
import type { HTMLInputTypeAttribute, InputHTMLAttributes } from "react"; import type { HTMLInputTypeAttribute, InputHTMLAttributes } from "react";
import type { EntityManager } from "../entities"; import type { EntityManager } from "../entities";
import { InvalidFieldConfigException, TransformPersistFailedException } from "../errors"; import { InvalidFieldConfigException, TransformPersistFailedException } from "../errors";
import type { FieldSpec } from "data/connection/Connection";
// @todo: contexts need to be reworked // @todo: contexts need to be reworked
// e.g. "table" is irrelevant, because if read is not given, it fails // e.g. "table" is irrelevant, because if read is not given, it fails
@@ -67,8 +67,6 @@ export const baseFieldConfigSchema = Type.Object(
); );
export type BaseFieldConfig = Static<typeof baseFieldConfigSchema>; export type BaseFieldConfig = Static<typeof baseFieldConfigSchema>;
export type SchemaResponse = [string, ColumnDataType, ColumnBuilderCallback] | undefined;
export abstract class Field< export abstract class Field<
Config extends BaseFieldConfig = BaseFieldConfig, Config extends BaseFieldConfig = BaseFieldConfig,
Type = any, Type = any,
@@ -106,25 +104,18 @@ export abstract class Field<
protected abstract getSchema(): TSchema; protected abstract getSchema(): TSchema;
protected useSchemaHelper(
type: ColumnDataType,
builder?: (col: ColumnDefinitionBuilder) => ColumnDefinitionBuilder,
): SchemaResponse {
return [
this.name,
type,
(col: ColumnDefinitionBuilder) => {
if (builder) return builder(col);
return col;
},
];
}
/** /**
* Used in SchemaManager.ts * Used in SchemaManager.ts
* @param em * @param em
*/ */
abstract schema(em: EntityManager<any>): SchemaResponse; schema(): FieldSpec | undefined {
return Object.freeze({
name: this.name,
type: "text",
nullable: true,
dflt: this.getDefault(),
});
}
hasDefault() { hasDefault() {
return this.config.default_value !== undefined; return this.config.default_value !== undefined;

View File

@@ -18,10 +18,6 @@ export class JsonField<Required extends true | false = false, TypeOverride = obj
return jsonFieldConfigSchema; return jsonFieldConfigSchema;
} }
override schema() {
return this.useSchemaHelper("text");
}
/** /**
* Transform value after retrieving from database * Transform value after retrieving from database
* @param value * @param value

View File

@@ -36,10 +36,6 @@ export class JsonSchemaField<
return jsonSchemaFieldConfigSchema; return jsonSchemaFieldConfigSchema;
} }
override schema() {
return this.useSchemaHelper("text");
}
getJsonSchema(): JsonSchema { getJsonSchema(): JsonSchema {
return this.config?.schema as JsonSchema; return this.config?.schema as JsonSchema;
} }

View File

@@ -44,8 +44,11 @@ export class NumberField<Required extends true | false = false> extends Field<
}; };
} }
schema() { override schema() {
return this.useSchemaHelper("integer"); return Object.freeze({
...super.schema()!,
type: "integer",
});
} }
override getValue(value: any, context?: TRenderContext): any { override getValue(value: any, context?: TRenderContext): any {

View File

@@ -30,9 +30,12 @@ export class PrimaryField<Required extends true | false = false> extends Field<
return baseFieldConfigSchema; return baseFieldConfigSchema;
} }
schema() { override schema() {
return this.useSchemaHelper("integer", (col) => { return Object.freeze({
return col.primaryKey().notNull().autoIncrement(); type: "integer",
name: this.name,
primary: true,
nullable: false,
}); });
} }

View File

@@ -47,10 +47,6 @@ export class TextField<Required extends true | false = false> extends Field<
return textFieldConfigSchema; return textFieldConfigSchema;
} }
override schema() {
return this.useSchemaHelper("text");
}
override getHtmlConfig() { override getHtmlConfig() {
if (this.config.html_config) { if (this.config.html_config) {
return this.config.html_config as any; return this.config.html_config as any;

View File

@@ -17,7 +17,7 @@ export class VirtualField extends Field<VirtualFieldConfig> {
return virtualFieldConfigSchema; return virtualFieldConfigSchema;
} }
schema() { override schema() {
return undefined; return undefined;
} }

View File

@@ -5,6 +5,7 @@ export * from "./entities";
export * from "./relations"; export * from "./relations";
export * from "./schema/SchemaManager"; export * from "./schema/SchemaManager";
export * from "./prototype"; export * from "./prototype";
export * from "./connection";
export { export {
type RepoQuery, type RepoQuery,
@@ -14,11 +15,6 @@ export {
whereSchema, whereSchema,
} from "./server/data-query-impl"; } from "./server/data-query-impl";
export { Connection } from "./connection/Connection";
export { LibsqlConnection, type LibSqlCredentials } from "./connection/LibsqlConnection";
export { SqliteConnection } from "./connection/SqliteConnection";
export { SqliteLocalConnection } from "./connection/SqliteLocalConnection";
export { SqliteIntrospector } from "./connection/SqliteIntrospector";
export { KyselyPluginRunner } from "./plugins/KyselyPluginRunner"; export { KyselyPluginRunner } from "./plugins/KyselyPluginRunner";
export { constructEntity, constructRelation } from "./schema/constructor"; export { constructEntity, constructRelation } from "./schema/constructor";

View File

@@ -1,6 +1,6 @@
import { type Static, StringEnum, Type } from "core/utils"; import { type Static, StringEnum, Type } from "core/utils";
import type { EntityManager } from "../entities"; import type { EntityManager } from "../entities";
import { Field, type SchemaResponse, baseFieldConfigSchema } from "../fields"; import { Field, baseFieldConfigSchema } from "../fields";
import type { EntityRelation } from "./EntityRelation"; import type { EntityRelation } from "./EntityRelation";
import type { EntityRelationAnchor } from "./EntityRelationAnchor"; import type { EntityRelationAnchor } from "./EntityRelationAnchor";
@@ -72,14 +72,12 @@ export class RelationField extends Field<RelationFieldConfig> {
return this.config.target_field!; return this.config.target_field!;
} }
override schema(): SchemaResponse { override schema() {
return this.useSchemaHelper("integer", (col) => { return Object.freeze({
//col.references('person.id').onDelete('cascade').notNull() ...super.schema()!,
// @todo: implement cascading? type: "integer",
references: `${this.config.target}.${this.config.target_field}`,
return col onDelete: this.config.on_delete ?? "set null",
.references(`${this.config.target}.${this.config.target_field}`)
.onDelete(this.config.on_delete ?? "set null");
}); });
} }

View File

@@ -1,7 +1,7 @@
import type { AlterTableColumnAlteringBuilder, CompiledQuery, TableMetadata } from "kysely"; import type { CompiledQuery, TableMetadata } from "kysely";
import type { IndexMetadata } from "../connection/Connection"; import type { IndexMetadata, SchemaResponse } from "../connection/Connection";
import type { Entity, EntityManager } from "../entities"; import type { Entity, EntityManager } from "../entities";
import { PrimaryField, type SchemaResponse } from "../fields"; import { PrimaryField } from "../fields";
type IntrospectedTable = TableMetadata & { type IntrospectedTable = TableMetadata & {
indices: IndexMetadata[]; indices: IndexMetadata[];
@@ -49,10 +49,6 @@ export class SchemaManager {
constructor(private readonly em: EntityManager<any>) {} constructor(private readonly em: EntityManager<any>) {}
private getIntrospector() { private getIntrospector() {
if (!this.em.connection.supportsIndices()) {
throw new Error("Indices are not supported by the current connection");
}
return this.em.connection.getIntrospector(); return this.em.connection.getIntrospector();
} }
@@ -239,10 +235,9 @@ export class SchemaManager {
for (const column of columns) { for (const column of columns) {
const field = this.em.entity(table).getField(column)!; const field = this.em.entity(table).getField(column)!;
const fieldSchema = field.schema(this.em); const fieldSchema = field.schema();
if (Array.isArray(fieldSchema) && fieldSchema.length === 3) { if (fieldSchema) {
schemas.push(fieldSchema); schemas.push(this.em.connection.getFieldSchema(fieldSchema));
//throw new Error(`Field "${field.name}" on entity "${table}" has no schema`);
} }
} }
@@ -330,6 +325,7 @@ export class SchemaManager {
if (local_updates === 0) continue; if (local_updates === 0) continue;
// iterate through built qbs // iterate through built qbs
// @todo: run in batches
for (const qb of qbs) { for (const qb of qbs) {
const { sql, parameters } = qb.compile(); const { sql, parameters } = qb.compile();
statements.push({ sql, parameters }); statements.push({ sql, parameters });

View File

@@ -47,7 +47,7 @@ export class MediaField<
return this.config.min_items; return this.config.min_items;
} }
schema() { override schema() {
return undefined; return undefined;
} }

View File

@@ -138,6 +138,22 @@
"react-dom": ">=18", "react-dom": ">=18",
}, },
}, },
"packages/postgres": {
"name": "@bknd/postgres",
"version": "0.0.1",
"dependencies": {
"kysely": "^0.27.6",
"pg": "^8.12.0",
},
"devDependencies": {
"@types/bun": "^1.2.5",
"@types/node": "^22.13.10",
"@types/pg": "^8.11.11",
"bknd": "workspace:*",
"tsup": "^8.4.0",
"typescript": "^5.6.3",
},
},
}, },
"packages": { "packages": {
"@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="], "@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="],
@@ -446,6 +462,8 @@
"@bknd/plasmic": ["@bknd/plasmic@workspace:packages/plasmic"], "@bknd/plasmic": ["@bknd/plasmic@workspace:packages/plasmic"],
"@bknd/postgres": ["@bknd/postgres@workspace:packages/postgres"],
"@bluwy/giget-core": ["@bluwy/giget-core@0.1.2", "", { "dependencies": { "tar": "^6.2.1" } }, "sha512-v9f+ueUOKkZCDKiCm0yxKtYgYNLD9zlKarNux0NSXOvNm94QEYL3RlMpGKgD2hq44pbF2qWqEmHnCvmk56kPJw=="], "@bluwy/giget-core": ["@bluwy/giget-core@0.1.2", "", { "dependencies": { "tar": "^6.2.1" } }, "sha512-v9f+ueUOKkZCDKiCm0yxKtYgYNLD9zlKarNux0NSXOvNm94QEYL3RlMpGKgD2hq44pbF2qWqEmHnCvmk56kPJw=="],
"@cfworker/json-schema": ["@cfworker/json-schema@4.1.1", "", {}, "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="], "@cfworker/json-schema": ["@cfworker/json-schema@4.1.1", "", {}, "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og=="],
@@ -1170,6 +1188,8 @@
"@types/parse-json": ["@types/parse-json@4.0.2", "", {}, "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw=="], "@types/parse-json": ["@types/parse-json@4.0.2", "", {}, "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw=="],
"@types/pg": ["@types/pg@8.11.11", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^4.0.1" } }, "sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw=="],
"@types/prettier": ["@types/prettier@1.19.1", "", {}, "sha512-5qOlnZscTn4xxM5MeGXAMOsIOIKIbh9e85zJWfBRVPlRMEVawzoPhINYbRGkBZCI8LxvBe7tJCdWiarA99OZfQ=="], "@types/prettier": ["@types/prettier@1.19.1", "", {}, "sha512-5qOlnZscTn4xxM5MeGXAMOsIOIKIbh9e85zJWfBRVPlRMEVawzoPhINYbRGkBZCI8LxvBe7tJCdWiarA99OZfQ=="],
"@types/react": ["@types/react@19.0.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-JuRQ9KXLEjaUNjTWpzuR231Z2WpIwczOkBEIvbHNCzQefFIT0L8IqE6NV6ULLyC1SI/i234JnDoMkfg+RjQj2g=="], "@types/react": ["@types/react@19.0.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-JuRQ9KXLEjaUNjTWpzuR231Z2WpIwczOkBEIvbHNCzQefFIT0L8IqE6NV6ULLyC1SI/i234JnDoMkfg+RjQj2g=="],
@@ -2526,6 +2546,8 @@
"object.values": ["object.values@1.2.1", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" } }, "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA=="], "object.values": ["object.values@1.2.1", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.3", "define-properties": "^1.2.1", "es-object-atoms": "^1.0.0" } }, "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA=="],
"obuf": ["obuf@1.1.2", "", {}, "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg=="],
"ohash": ["ohash@1.1.6", "", {}, "sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg=="], "ohash": ["ohash@1.1.6", "", {}, "sha512-TBu7PtV8YkAZn0tSxobKY2n2aAQva936lhRrj6957aDaCf9IEtqsKbgMzXE/F/sjqYOwmrukeORHNLe5glk7Cg=="],
"on-exit-leak-free": ["on-exit-leak-free@0.2.0", "", {}, "sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg=="], "on-exit-leak-free": ["on-exit-leak-free@0.2.0", "", {}, "sha512-dqaz3u44QbRXQooZLTUKU41ZrzYrcvLISVgbrzbyCMxpmSLJvZ3ZamIJIZ29P6OhZIkNIQKosdeM6t1LYbA9hg=="],
@@ -2596,6 +2618,24 @@
"performance-now": ["performance-now@2.1.0", "", {}, "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="], "performance-now": ["performance-now@2.1.0", "", {}, "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="],
"pg": ["pg@8.14.0", "", { "dependencies": { "pg-connection-string": "^2.7.0", "pg-pool": "^3.8.0", "pg-protocol": "^1.8.0", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.1.1" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-nXbVpyoaXVmdqlKEzToFf37qzyeeh7mbiXsnoWvstSqohj88yaa/I/Rq/HEVn2QPSZEuLIJa/jSpRDyzjEx4FQ=="],
"pg-cloudflare": ["pg-cloudflare@1.1.1", "", {}, "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q=="],
"pg-connection-string": ["pg-connection-string@2.7.0", "", {}, "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="],
"pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="],
"pg-numeric": ["pg-numeric@1.0.2", "", {}, "sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw=="],
"pg-pool": ["pg-pool@3.8.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-VBw3jiVm6ZOdLBTIcXLNdSotb6Iy3uOCwDGFAksZCXmi10nyRvnP2v3jl4d+IsLYRyXf6o9hIm/ZtUzlByNUdw=="],
"pg-protocol": ["pg-protocol@1.8.0", "", {}, "sha512-jvuYlEkL03NRvOoyoRktBK7+qU5kOvlAwvmrH8sr3wbLrOdVWsRxQfz8mMy9sZFsqJ1hEWNfdWKI4SAmoL+j7g=="],
"pg-types": ["pg-types@4.0.2", "", { "dependencies": { "pg-int8": "1.0.1", "pg-numeric": "1.0.2", "postgres-array": "~3.0.1", "postgres-bytea": "~3.0.0", "postgres-date": "~2.1.0", "postgres-interval": "^3.0.0", "postgres-range": "^1.1.1" } }, "sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng=="],
"pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="],
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
"picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="],
@@ -2644,6 +2684,16 @@
"postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="], "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="],
"postgres-array": ["postgres-array@3.0.4", "", {}, "sha512-nAUSGfSDGOaOAEGwqsRY27GPOea7CNipJPOA7lPbdEpx5Kg3qzdP0AaWC5MlhTWV9s4hFX39nomVZ+C4tnGOJQ=="],
"postgres-bytea": ["postgres-bytea@3.0.0", "", { "dependencies": { "obuf": "~1.1.2" } }, "sha512-CNd4jim9RFPkObHSjVHlVrxoVQXz7quwNFpz7RY1okNNme49+sVyiTvTRobiLV548Hx/hb1BG+iE7h9493WzFw=="],
"postgres-date": ["postgres-date@2.1.0", "", {}, "sha512-K7Juri8gtgXVcDfZttFKVmhglp7epKb1K4pgrkLxehjqkrgPhfG6OO8LHLkfaqkbpjNRnra018XwAr1yQFWGcA=="],
"postgres-interval": ["postgres-interval@3.0.0", "", {}, "sha512-BSNDnbyZCXSxgA+1f5UU2GmwhoI0aU5yMxRGO8CdFEcY2BQF9xm/7MqKnYoM1nJDk8nONNWDk9WeSmePFhQdlw=="],
"postgres-range": ["postgres-range@1.1.4", "", {}, "sha512-i/hbxIE9803Alj/6ytL7UHQxRvZkI9O4Sy+J3HGc4F4oo/2eQAjTSNJ0bfxyse3bH0nuVesCk+3IRLaMtG3H6w=="],
"prelude-ls": ["prelude-ls@1.1.2", "", {}, "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w=="], "prelude-ls": ["prelude-ls@1.1.2", "", {}, "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w=="],
"prettier": ["prettier@1.19.1", "", { "bin": { "prettier": "./bin-prettier.js" } }, "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew=="], "prettier": ["prettier@1.19.1", "", { "bin": { "prettier": "./bin-prettier.js" } }, "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew=="],
@@ -3956,6 +4006,8 @@
"peek-stream/duplexify": ["duplexify@3.7.1", "", { "dependencies": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" } }, "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g=="], "peek-stream/duplexify": ["duplexify@3.7.1", "", { "dependencies": { "end-of-stream": "^1.0.0", "inherits": "^2.0.1", "readable-stream": "^2.0.0", "stream-shift": "^1.0.0" } }, "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g=="],
"pg/pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="],
"pkg-types/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="], "pkg-types/pathe": ["pathe@2.0.3", "", {}, "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w=="],
"pretty-format/ansi-styles": ["ansi-styles@5.2.0", "", {}, "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA=="], "pretty-format/ansi-styles": ["ansi-styles@5.2.0", "", {}, "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA=="],
@@ -4326,6 +4378,14 @@
"ora/log-symbols/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="], "ora/log-symbols/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="],
"pg/pg-types/postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="],
"pg/pg-types/postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="],
"pg/pg-types/postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="],
"pg/pg-types/postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="],
"progress-estimator/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="], "progress-estimator/chalk/ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"progress-estimator/chalk/escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="], "progress-estimator/chalk/escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="],

View File

@@ -0,0 +1,50 @@
# Postgres adapter for `bknd` (experimental)
This packages adds an adapter to use a Postgres database with `bknd`. It is based on `pg` and the driver included in `kysely`.
## Installation
Install the adapter with:
```bash
npm install @bknd/postgres
```
## Usage
Create a connection:
```ts
import { PostgresConnection } from "@bknd/postgres";
const connection = new PostgresConnection({
host: "localhost",
port: 5432,
user: "postgres",
password: "postgres",
database: "bknd",
});
```
Use the connection depending on which framework or runtime you are using. E.g., when using `createApp`, you can use the connection as follows:
```ts
import { createApp } from "bknd";
import { PostgresConnection } from "@bknd/postgres";
const connection = new PostgresConnection();
const app = createApp({ connection });
```
Or if you're using it with a framework, say Next.js, you can add the connection object to where you're initializating the app:
```ts
// e.g. in src/app/api/[[...bknd]]/route.ts
import { serve } from "bknd/adapter/nextjs";
import { PostgresConnection } from "@bknd/postgres";
const connection = new PostgresConnection();
const handler = serve({
connection
})
// ...
```
For more information about how to integrate Next.js in general, check out the [Next.js documentation](https://docs.bknd.io/integration/nextjs).

View File

@@ -0,0 +1,37 @@
{
"name": "@bknd/postgres",
"version": "0.0.1",
"type": "module",
"main": "dist/index.js",
"module": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsup",
"test": "bun test",
"docker:start": "docker run --rm --name bknd-test-postgres -d -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres -e POSTGRES_DB=bknd -p 5430:5432 postgres:17",
"docker:stop": "docker stop bknd-test-postgres"
},
"dependencies": {
"pg": "^8.12.0",
"kysely": "^0.27.6"
},
"devDependencies": {
"@types/bun": "^1.2.5",
"@types/node": "^22.13.10",
"@types/pg": "^8.11.11",
"bknd": "workspace:*",
"tsup": "^8.4.0",
"typescript": "^5.6.3"
},
"tsup": {
"entry": ["src/index.ts"],
"format": ["esm"],
"target": "es2022",
"clean": true,
"minify": true,
"dts": true,
"metafile": true,
"external": ["bknd", "pg", "kysely"]
},
"files": ["dist", "README.md", "!*.map", "!metafile*.json"]
}

View File

@@ -0,0 +1,106 @@
import { Connection, type FieldSpec, type SchemaResponse } from "bknd/data";
import {
type ColumnDataType,
type ColumnDefinitionBuilder,
type DatabaseIntrospector,
Kysely,
ParseJSONResultsPlugin,
PostgresDialect,
type SelectQueryBuilder,
} from "kysely";
import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/postgres";
import pg from "pg";
import { PostgresIntrospector } from "./PostgresIntrospector";
export type PostgresConnectionConfig = pg.PoolConfig;
export type QB = SelectQueryBuilder<any, any, any>;
const plugins = [new ParseJSONResultsPlugin()];
class CustomPostgresDialect extends PostgresDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new PostgresIntrospector(db, {
excludeTables: [],
});
}
}
export class PostgresConnection extends Connection {
protected override readonly supported = {
batching: true,
};
private pool: pg.Pool;
constructor(config: PostgresConnectionConfig) {
const pool = new pg.Pool(config);
const kysely = new Kysely({
dialect: new CustomPostgresDialect({
pool,
}),
plugins,
//log: ["query", "error"],
});
super(
kysely,
{
jsonArrayFrom,
jsonBuildObject,
jsonObjectFrom,
},
plugins,
);
this.pool = pool;
}
override getFieldSchema(spec: FieldSpec): SchemaResponse {
this.validateFieldSpecType(spec.type);
let type: ColumnDataType = spec.primary ? "serial" : spec.type;
switch (spec.type) {
case "blob":
type = "bytea";
break;
case "date":
case "datetime":
// https://www.postgresql.org/docs/17/datatype-datetime.html
type = "timestamp";
break;
case "text":
// https://www.postgresql.org/docs/17/datatype-character.html
type = "varchar";
break;
}
return [
spec.name,
type,
(col: ColumnDefinitionBuilder) => {
if (spec.primary) {
return col.primaryKey();
}
if (spec.references) {
return col
.references(spec.references)
.onDelete(spec.onDelete ?? "set null")
.onUpdate(spec.onUpdate ?? "no action");
}
return spec.nullable ? col : col.notNull();
},
];
}
override async close(): Promise<void> {
await this.pool.end();
}
protected override async batch<Queries extends QB[]>(
queries: [...Queries],
): Promise<{
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> {
return this.kysely.transaction().execute(async (trx) => {
return Promise.all(queries.map((q) => trx.executeQuery(q).then((r) => r.rows)));
}) as any;
}
}

View File

@@ -0,0 +1,127 @@
import { type SchemaMetadata, sql } from "kysely";
import { BaseIntrospector } from "bknd/data";
type PostgresSchemaSpec = {
name: string;
type: "VIEW" | "BASE TABLE";
columns: {
name: string;
type: string;
notnull: number;
dflt: string;
pk: boolean;
}[];
indices: {
name: string;
origin: string;
partial: number;
sql: string;
columns: { name: string; seqno: number }[];
}[];
};
export class PostgresIntrospector extends BaseIntrospector {
async getSchemas(): Promise<SchemaMetadata[]> {
const rawSchemas = await this.db
.selectFrom("pg_catalog.pg_namespace")
.select("nspname")
.$castTo<{ nspname: string }>()
.execute();
return rawSchemas.map((it) => ({ name: it.nspname }));
}
async getSchemaSpec() {
const query = sql`
WITH tables_and_views AS (
SELECT table_name AS name,
table_type AS type
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_type IN ('BASE TABLE', 'VIEW')
AND table_name NOT LIKE 'pg_%'
AND table_name NOT IN (${this.getExcludedTableNames().join(", ")})
),
columns_info AS (
SELECT table_name AS name,
json_agg(json_build_object(
'name', column_name,
'type', data_type,
'notnull', (CASE WHEN is_nullable = 'NO' THEN true ELSE false END),
'dflt', column_default,
'pk', (SELECT COUNT(*) > 0
FROM information_schema.table_constraints tc
INNER JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
WHERE tc.table_name = c.table_name
AND tc.constraint_type = 'PRIMARY KEY'
AND kcu.column_name = c.column_name)
)) AS columns
FROM information_schema.columns c
WHERE table_schema = 'public'
GROUP BY table_name
),
indices_info AS (
SELECT
t.relname AS table_name,
json_agg(json_build_object(
'name', i.relname,
'origin', pg_get_indexdef(i.oid),
'partial', (CASE WHEN ix.indisvalid THEN false ELSE true END),
'sql', pg_get_indexdef(i.oid),
'columns', (
SELECT json_agg(json_build_object(
'name', a.attname,
'seqno', x.ordinal_position
))
FROM unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinal_position)
JOIN pg_attribute a ON a.attnum = x.attnum AND a.attrelid = t.oid
))) AS indices
FROM pg_class t
LEFT JOIN pg_index ix ON t.oid = ix.indrelid
LEFT JOIN pg_class i ON i.oid = ix.indexrelid
WHERE t.relkind IN ('r', 'v') -- r = table, v = view
AND t.relname NOT LIKE 'pg_%'
GROUP BY t.relname
)
SELECT
tv.name,
tv.type,
ci.columns,
ii.indices
FROM tables_and_views tv
LEFT JOIN columns_info ci ON tv.name = ci.name
LEFT JOIN indices_info ii ON tv.name = ii.table_name;
`;
const tables = await this.executeWithPlugins<PostgresSchemaSpec[]>(query);
return tables.map((table) => ({
name: table.name,
isView: table.type === "VIEW",
columns: table.columns.map((col) => {
return {
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
// @todo: check default value on 'nextval' see https://www.postgresql.org/docs/17/datatype-numeric.html#DATATYPE-SERIAL
isAutoIncrementing: true, // just for now
hasDefaultValue: col.dflt != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
})),
}));
}
}

View File

@@ -0,0 +1,2 @@
export { PostgresConnection, type PostgresConnectionConfig } from "./PostgresConnection";
export { PostgresIntrospector } from "./PostgresIntrospector";

View File

@@ -0,0 +1,19 @@
import { describe, it, expect } from "bun:test";
import { PostgresConnection } from "../src";
import { createConnection, cleanDatabase } from "./setup";
describe(PostgresConnection, () => {
it("should connect to the database", async () => {
const connection = createConnection();
expect(await connection.ping()).toBe(true);
});
it("should clean the database", async () => {
const connection = createConnection();
await cleanDatabase(connection);
const tables = await connection.getIntrospector().getTables();
expect(tables).toEqual([]);
});
});

View File

@@ -0,0 +1,113 @@
import { describe, it, expect, beforeAll, afterAll, afterEach } from "bun:test";
import { createApp } from "bknd";
import * as proto from "bknd/data";
import { createConnection, cleanDatabase } from "./setup";
import type { PostgresConnection } from "../src";
let connection: PostgresConnection;
beforeAll(async () => {
connection = createConnection();
await cleanDatabase(connection);
});
afterEach(async () => {
await cleanDatabase(connection);
});
afterAll(async () => {
await connection.close();
});
describe("integration", () => {
it("should create app and ping", async () => {
const app = createApp({
connection,
});
await app.build();
expect(app.version()).toBeDefined();
expect(await app.em.ping()).toBe(true);
});
it("should create a basic schema", async () => {
const schema = proto.em(
{
posts: proto.entity("posts", {
title: proto.text().required(),
content: proto.text(),
}),
comments: proto.entity("comments", {
content: proto.text(),
}),
},
(fns, s) => {
fns.relation(s.comments).manyToOne(s.posts);
fns.index(s.posts).on(["title"], true);
},
);
const app = createApp({
connection,
initialConfig: {
data: schema.toJSON(),
},
});
await app.build();
expect(app.em.entities.length).toBe(2);
expect(app.em.entities.map((e) => e.name)).toEqual(["posts", "comments"]);
const api = app.getApi();
expect(
(
await api.data.createMany("posts", [
{
title: "Hello",
content: "World",
},
{
title: "Hello 2",
content: "World 2",
},
])
).data,
).toEqual([
{
id: 1,
title: "Hello",
content: "World",
},
{
id: 2,
title: "Hello 2",
content: "World 2",
},
] as any);
// try to create an existing
expect(
(
await api.data.createOne("posts", {
title: "Hello",
})
).ok,
).toBe(false);
// add a comment to a post
await api.data.createOne("comments", {
content: "Hello",
posts_id: 1,
});
// and then query using a `with` property
const result = await api.data.readMany("posts", { with: ["comments"] });
expect(result.length).toBe(2);
expect(result[0].comments.length).toBe(1);
expect(result[0].comments[0].content).toBe("Hello");
expect(result[1].comments.length).toBe(0);
});
});

View File

@@ -0,0 +1,25 @@
import type { Kysely } from "kysely";
import { PostgresConnection, PostgresIntrospector, type PostgresConnectionConfig } from "../src";
export const info = {
host: "localhost",
port: 5430,
user: "postgres",
password: "postgres",
database: "bknd",
};
export function createConnection(config: PostgresConnectionConfig = {}) {
return new PostgresConnection({
...info,
...config,
});
}
export async function cleanDatabase(connection: PostgresConnection) {
const kysely = connection.kysely;
// drop all tables & create new schema
await kysely.schema.dropSchema("public").ifExists().cascade().execute();
await kysely.schema.createSchema("public").execute();
}

View File

@@ -0,0 +1,29 @@
{
"compilerOptions": {
"composite": false,
"module": "ESNext",
"moduleResolution": "bundler",
"allowImportingTsExtensions": false,
"target": "ES2022",
"noImplicitAny": false,
"allowJs": true,
"verbatimModuleSyntax": true,
"declaration": true,
"strict": true,
"allowUnusedLabels": false,
"allowUnreachableCode": false,
"exactOptionalPropertyTypes": false,
"noFallthroughCasesInSwitch": true,
"noImplicitOverride": true,
"noImplicitReturns": true,
"noPropertyAccessFromIndexSignature": false,
"noUncheckedIndexedAccess": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"isolatedModules": true,
"esModuleInterop": true,
"skipLibCheck": true
},
"include": ["./src/**/*.ts"],
"exclude": ["node_modules"]
}