feat: move postgres as part of the main repo

This commit is contained in:
dswbx
2025-10-31 17:13:23 +01:00
parent 5417aa174e
commit 2c7054c317
30 changed files with 310 additions and 403 deletions

View File

@@ -9,6 +9,21 @@ jobs:
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
services:
postgres:
image: postgres:17
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
POSTGRES_DB: bknd
ports:
- 5430:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@@ -20,7 +35,7 @@ jobs:
- name: Setup Bun - name: Setup Bun
uses: oven-sh/setup-bun@v1 uses: oven-sh/setup-bun@v1
with: with:
bun-version: "1.2.22" bun-version: "1.3.1"
- name: Install dependencies - name: Install dependencies
working-directory: ./app working-directory: ./app

View File

@@ -15,7 +15,7 @@ const mockedBackend = new Hono()
.get("/file/:name", async (c) => { .get("/file/:name", async (c) => {
const { name } = c.req.param(); const { name } = c.req.param();
const file = Bun.file(`${assetsPath}/${name}`); const file = Bun.file(`${assetsPath}/${name}`);
return new Response(file, { return new Response(new File([await file.bytes()], name, { type: file.type }), {
headers: { headers: {
"Content-Type": file.type, "Content-Type": file.type,
"Content-Length": file.size.toString(), "Content-Length": file.size.toString(),
@@ -67,7 +67,7 @@ describe("MediaApi", () => {
const res = await mockedBackend.request("/api/media/file/" + name); const res = await mockedBackend.request("/api/media/file/" + name);
await Bun.write(path, res); await Bun.write(path, res);
const file = await Bun.file(path); const file = Bun.file(path);
expect(file.size).toBeGreaterThan(0); expect(file.size).toBeGreaterThan(0);
expect(file.type).toBe("image/png"); expect(file.type).toBe("image/png");
await file.delete(); await file.delete();
@@ -154,15 +154,12 @@ describe("MediaApi", () => {
} }
// upload via readable from bun // upload via readable from bun
await matches(await api.upload(file.stream(), { filename: "readable.png" }), "readable.png"); await matches(api.upload(file.stream(), { filename: "readable.png" }), "readable.png");
// upload via readable from response // upload via readable from response
{ {
const response = (await mockedBackend.request(url)) as Response; const response = (await mockedBackend.request(url)) as Response;
await matches( await matches(api.upload(response.body!, { filename: "readable.png" }), "readable.png");
await api.upload(response.body!, { filename: "readable.png" }),
"readable.png",
);
} }
}); });
}); });

View File

@@ -1,8 +1,12 @@
import { describe, expect, mock, test } from "bun:test"; import { describe, expect, mock, test, beforeAll, afterAll } from "bun:test";
import { createApp as internalCreateApp, type CreateAppConfig } from "bknd"; import { createApp as internalCreateApp, type CreateAppConfig } from "bknd";
import { getDummyConnection } from "../../__test__/helper"; import { getDummyConnection } from "../../__test__/helper";
import { ModuleManager } from "modules/ModuleManager"; import { ModuleManager } from "modules/ModuleManager";
import { em, entity, text } from "data/prototype"; import { em, entity, text } from "data/prototype";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
async function createApp(config: CreateAppConfig = {}) { async function createApp(config: CreateAppConfig = {}) {
const app = internalCreateApp({ const app = internalCreateApp({

View File

@@ -1,7 +1,11 @@
import { AppEvents } from "App"; import { AppEvents } from "App";
import { describe, test, expect, beforeAll, mock } from "bun:test"; import { describe, test, expect, beforeAll, mock, afterAll } from "bun:test";
import { type App, createApp, createMcpToolCaller } from "core/test/utils"; import { type App, createApp, createMcpToolCaller } from "core/test/utils";
import type { McpServer } from "bknd/utils"; import type { McpServer } from "bknd/utils";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
/** /**
* - [x] system_config * - [x] system_config

View File

@@ -1,8 +1,12 @@
import { describe, expect, test } from "bun:test"; import { describe, expect, test, beforeAll, afterAll } from "bun:test";
import { Guard, type GuardConfig } from "auth/authorize/Guard"; import { Guard, type GuardConfig } from "auth/authorize/Guard";
import { Permission } from "auth/authorize/Permission"; import { Permission } from "auth/authorize/Permission";
import { Role, type RoleSchema } from "auth/authorize/Role"; import { Role, type RoleSchema } from "auth/authorize/Role";
import { objectTransform, s } from "bknd/utils"; import { objectTransform, s } from "bknd/utils";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
function createGuard( function createGuard(
permissionNames: string[], permissionNames: string[],

View File

@@ -7,8 +7,8 @@ import type { App, DB } from "bknd";
import type { CreateUserPayload } from "auth/AppAuth"; import type { CreateUserPayload } from "auth/AppAuth";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test"; import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(() => disableConsoleLog()); beforeAll(disableConsoleLog);
afterAll(() => enableConsoleLog()); afterAll(enableConsoleLog);
async function makeApp(config: Partial<CreateAppConfig["config"]> = {}) { async function makeApp(config: Partial<CreateAppConfig["config"]> = {}) {
const app = createApp({ const app = createApp({

View File

@@ -0,0 +1,78 @@
import { describe, beforeAll, afterAll, expect, test, it, afterEach } from "bun:test";
import type { PostgresConnection } from "data/connection/postgres";
import { createApp, em, entity, text, pg, postgresJs } from "bknd";
import { disableConsoleLog, enableConsoleLog, $waitUntil } from "bknd/utils";
import { $ } from "bun";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { bunTestRunner } from "adapter/bun/test";
const credentials = {
host: "localhost",
port: 5430,
user: "postgres",
password: "postgres",
database: "bknd",
};
async function cleanDatabase(connection: InstanceType<typeof PostgresConnection>) {
const kysely = connection.kysely;
// drop all tables+indexes & create new schema
await kysely.schema.dropSchema("public").ifExists().cascade().execute();
await kysely.schema.dropIndex("public").ifExists().cascade().execute();
await kysely.schema.createSchema("public").execute();
}
async function isPostgresRunning() {
try {
await $`docker exec bknd-test-postgres pg_isready -U ${credentials.user}`;
return true;
} catch (e) {
return false;
}
}
describe("postgres", () => {
beforeAll(async () => {
if (!(await isPostgresRunning())) {
await $`docker run --rm --name bknd-test-postgres -d -e POSTGRES_PASSWORD=${credentials.password} -e POSTGRES_USER=${credentials.user} -e POSTGRES_DB=${credentials.database} -p ${credentials.port}:5432 postgres:17`;
await $waitUntil("Postgres is running", isPostgresRunning);
await new Promise((resolve) => setTimeout(resolve, 500));
}
disableConsoleLog();
});
afterAll(async () => {
if (await isPostgresRunning()) {
await $`docker stop bknd-test-postgres`;
}
enableConsoleLog();
});
describe.serial.each([
["pg", () => pg(credentials)],
["postgresjs", () => postgresJs(credentials)],
])("%s", (name, createConnection) => {
connectionTestSuite(
{
...bunTestRunner,
test: test.serial,
},
{
makeConnection: () => {
const connection = createConnection();
return {
connection,
dispose: async () => {
await cleanDatabase(connection);
await connection.close();
},
};
},
rawDialectDetails: [],
disableConsoleLog: false,
},
);
});
});

View File

@@ -59,7 +59,7 @@ describe("SqliteIntrospector", () => {
dataType: "INTEGER", dataType: "INTEGER",
isNullable: false, isNullable: false,
isAutoIncrementing: true, isAutoIncrementing: true,
hasDefaultValue: false, hasDefaultValue: true,
comment: undefined, comment: undefined,
}, },
{ {
@@ -89,7 +89,7 @@ describe("SqliteIntrospector", () => {
dataType: "INTEGER", dataType: "INTEGER",
isNullable: false, isNullable: false,
isAutoIncrementing: true, isAutoIncrementing: true,
hasDefaultValue: false, hasDefaultValue: true,
comment: undefined, comment: undefined,
}, },
{ {

View File

@@ -10,7 +10,7 @@ import { assetsPath, assetsTmpPath } from "../helper";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test"; import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(() => { beforeAll(() => {
//disableConsoleLog(); disableConsoleLog();
registries.media.register("local", StorageLocalAdapter); registries.media.register("local", StorageLocalAdapter);
}); });
afterAll(enableConsoleLog); afterAll(enableConsoleLog);

View File

@@ -10,12 +10,6 @@ beforeAll(disableConsoleLog);
afterAll(enableConsoleLog); afterAll(enableConsoleLog);
describe("AppAuth", () => { describe("AppAuth", () => {
test.skip("...", () => {
const auth = new AppAuth({});
console.log(auth.toJSON());
console.log(auth.config);
});
moduleTestSuite(AppAuth); moduleTestSuite(AppAuth);
let ctx: ModuleBuildContext; let ctx: ModuleBuildContext;
@@ -39,11 +33,9 @@ describe("AppAuth", () => {
await auth.build(); await auth.build();
const oldConfig = auth.toJSON(true); const oldConfig = auth.toJSON(true);
//console.log(oldConfig);
await auth.schema().patch("enabled", true); await auth.schema().patch("enabled", true);
await auth.build(); await auth.build();
const newConfig = auth.toJSON(true); const newConfig = auth.toJSON(true);
//console.log(newConfig);
expect(newConfig.jwt.secret).not.toBe(oldConfig.jwt.secret); expect(newConfig.jwt.secret).not.toBe(oldConfig.jwt.secret);
}); });
@@ -69,7 +61,6 @@ describe("AppAuth", () => {
const app = new AuthController(auth).getController(); const app = new AuthController(auth).getController();
{ {
disableConsoleLog();
const res = await app.request("/password/register", { const res = await app.request("/password/register", {
method: "POST", method: "POST",
headers: { headers: {
@@ -80,7 +71,6 @@ describe("AppAuth", () => {
password: "12345678", password: "12345678",
}), }),
}); });
enableConsoleLog();
expect(res.status).toBe(200); expect(res.status).toBe(200);
const { data: users } = await ctx.em.repository("users").findMany(); const { data: users } = await ctx.em.repository("users").findMany();
@@ -119,7 +109,6 @@ describe("AppAuth", () => {
const app = new AuthController(auth).getController(); const app = new AuthController(auth).getController();
{ {
disableConsoleLog();
const res = await app.request("/password/register", { const res = await app.request("/password/register", {
method: "POST", method: "POST",
headers: { headers: {
@@ -130,7 +119,6 @@ describe("AppAuth", () => {
password: "12345678", password: "12345678",
}), }),
}); });
enableConsoleLog();
expect(res.status).toBe(200); expect(res.status).toBe(200);
const { data: users } = await ctx.em.repository("users").findMany(); const { data: users } = await ctx.em.repository("users").findMany();

View File

@@ -1,10 +1,14 @@
import { describe, expect, test } from "bun:test"; import { describe, expect, test, beforeAll, afterAll } from "bun:test";
import { createApp } from "core/test/utils"; import { createApp } from "core/test/utils";
import { em, entity, text } from "data/prototype"; import { em, entity, text } from "data/prototype";
import { registries } from "modules/registries"; import { registries } from "modules/registries";
import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter"; import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter";
import { AppMedia } from "../../src/media/AppMedia"; import { AppMedia } from "../../src/media/AppMedia";
import { moduleTestSuite } from "./module-test-suite"; import { moduleTestSuite } from "./module-test-suite";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("AppMedia", () => { describe("AppMedia", () => {
test.skip("...", () => { test.skip("...", () => {

View File

@@ -1,7 +1,11 @@
import { it, expect, describe } from "bun:test"; import { it, expect, describe, beforeAll, afterAll } from "bun:test";
import { DbModuleManager } from "modules/db/DbModuleManager"; import { DbModuleManager } from "modules/db/DbModuleManager";
import { getDummyConnection } from "../helper"; import { getDummyConnection } from "../helper";
import { TABLE_NAME } from "modules/db/migrations"; import { TABLE_NAME } from "modules/db/migrations";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("DbModuleManager", () => { describe("DbModuleManager", () => {
it("should extract secrets", async () => { it("should extract secrets", async () => {

View File

@@ -11,7 +11,7 @@ import { s, stripMark } from "core/utils/schema";
import { Connection } from "data/connection/Connection"; import { Connection } from "data/connection/Connection";
import { entity, text } from "data/prototype"; import { entity, text } from "data/prototype";
beforeAll(disableConsoleLog); beforeAll(() => disableConsoleLog());
afterAll(enableConsoleLog); afterAll(enableConsoleLog);
describe("ModuleManager", async () => { describe("ModuleManager", async () => {
@@ -82,7 +82,6 @@ describe("ModuleManager", async () => {
}, },
}, },
} as any; } as any;
//const { version, ...json } = mm.toJSON() as any;
const { dummyConnection } = getDummyConnection(); const { dummyConnection } = getDummyConnection();
const db = dummyConnection.kysely; const db = dummyConnection.kysely;
@@ -97,10 +96,6 @@ describe("ModuleManager", async () => {
await mm2.build(); await mm2.build();
/* console.log({
json,
configs: mm2.configs(),
}); */
//expect(stripMark(json)).toEqual(stripMark(mm2.configs())); //expect(stripMark(json)).toEqual(stripMark(mm2.configs()));
expect(mm2.configs().data.entities?.test).toBeDefined(); expect(mm2.configs().data.entities?.test).toBeDefined();
expect(mm2.configs().data.entities?.test?.fields?.content).toBeDefined(); expect(mm2.configs().data.entities?.test?.fields?.content).toBeDefined();
@@ -228,8 +223,6 @@ describe("ModuleManager", async () => {
const c = getDummyConnection(); const c = getDummyConnection();
const mm = new ModuleManager(c.dummyConnection); const mm = new ModuleManager(c.dummyConnection);
await mm.build(); await mm.build();
console.log("==".repeat(30));
console.log("");
const json = mm.configs(); const json = mm.configs();
const c2 = getDummyConnection(); const c2 = getDummyConnection();
@@ -275,7 +268,6 @@ describe("ModuleManager", async () => {
} }
override async build() { override async build() {
//console.log("building FailingModule", this.config);
if (this.config.value && this.config.value < 0) { if (this.config.value && this.config.value < 0) {
throw new Error("value must be positive, given: " + this.config.value); throw new Error("value must be positive, given: " + this.config.value);
} }
@@ -296,9 +288,6 @@ describe("ModuleManager", async () => {
} }
} }
beforeEach(() => disableConsoleLog(["log", "warn", "error"]));
afterEach(enableConsoleLog);
test("it builds", async () => { test("it builds", async () => {
const { dummyConnection } = getDummyConnection(); const { dummyConnection } = getDummyConnection();
const mm = new TestModuleManager(dummyConnection); const mm = new TestModuleManager(dummyConnection);

View File

@@ -13,7 +13,7 @@
"bugs": { "bugs": {
"url": "https://github.com/bknd-io/bknd/issues" "url": "https://github.com/bknd-io/bknd/issues"
}, },
"packageManager": "bun@1.2.22", "packageManager": "bun@1.3.1",
"engines": { "engines": {
"node": ">=22.13" "node": ">=22.13"
}, },
@@ -104,8 +104,8 @@
"dotenv": "^16.4.7", "dotenv": "^16.4.7",
"jotai": "^2.12.2", "jotai": "^2.12.2",
"jsdom": "^26.0.0", "jsdom": "^26.0.0",
"kysely-d1": "^0.3.0",
"kysely-generic-sqlite": "^1.2.1", "kysely-generic-sqlite": "^1.2.1",
"kysely-postgres-js": "^2.0.0",
"libsql-stateless-easy": "^1.8.0", "libsql-stateless-easy": "^1.8.0",
"open": "^10.1.0", "open": "^10.1.0",
"openapi-types": "^12.1.3", "openapi-types": "^12.1.3",
@@ -139,7 +139,17 @@
}, },
"peerDependencies": { "peerDependencies": {
"react": ">=19", "react": ">=19",
"react-dom": ">=19" "react-dom": ">=19",
"pg": "*",
"postgres": "*"
},
"peerDependenciesMeta": {
"pg": {
"optional": true
},
"postgres": {
"optional": true
}
}, },
"main": "./dist/index.js", "main": "./dist/index.js",
"module": "./dist/index.js", "module": "./dist/index.js",

View File

@@ -1,3 +1,4 @@
import type { MaybePromise } from "core/types";
import { getRuntimeKey as honoGetRuntimeKey } from "hono/adapter"; import { getRuntimeKey as honoGetRuntimeKey } from "hono/adapter";
/** /**
@@ -77,3 +78,21 @@ export function threw(fn: () => any, instance?: new (...args: any[]) => Error) {
return true; return true;
} }
} }
export async function $waitUntil(
message: string,
condition: () => MaybePromise<boolean>,
delay = 100,
maxAttempts = 10,
) {
let attempts = 0;
while (attempts < maxAttempts) {
if (await condition()) {
return;
}
await new Promise((resolve) => setTimeout(resolve, delay));
attempts++;
}
throw new Error(`$waitUntil: "${message}" failed after ${maxAttempts} attempts`);
}

View File

@@ -14,27 +14,31 @@ export function connectionTestSuite(
{ {
makeConnection, makeConnection,
rawDialectDetails, rawDialectDetails,
disableConsoleLog: _disableConsoleLog = true,
}: { }: {
makeConnection: () => MaybePromise<{ makeConnection: () => MaybePromise<{
connection: Connection; connection: Connection;
dispose: () => MaybePromise<void>; dispose: () => MaybePromise<void>;
}>; }>;
rawDialectDetails: string[]; rawDialectDetails: string[];
disableConsoleLog?: boolean;
}, },
) { ) {
const { test, expect, describe, beforeEach, afterEach, afterAll, beforeAll } = testRunner; const { test, expect, describe, beforeEach, afterEach, afterAll, beforeAll } = testRunner;
beforeAll(() => disableConsoleLog()); if (_disableConsoleLog) {
afterAll(() => enableConsoleLog()); beforeAll(() => disableConsoleLog());
afterAll(() => enableConsoleLog());
}
describe("base", () => { let ctx: Awaited<ReturnType<typeof makeConnection>>;
let ctx: Awaited<ReturnType<typeof makeConnection>>; beforeEach(async () => {
beforeEach(async () => { ctx = await makeConnection();
ctx = await makeConnection(); });
}); afterEach(async () => {
afterEach(async () => { await ctx.dispose();
await ctx.dispose(); });
});
describe("base", async () => {
test("pings", async () => { test("pings", async () => {
const res = await ctx.connection.ping(); const res = await ctx.connection.ping();
expect(res).toBe(true); expect(res).toBe(true);
@@ -98,52 +102,54 @@ export function connectionTestSuite(
}); });
describe("schema", async () => { describe("schema", async () => {
const { connection, dispose } = await makeConnection(); const makeSchema = async () => {
afterAll(async () => { const fields = [
await dispose(); {
}); type: "integer",
name: "id",
primary: true,
},
{
type: "text",
name: "text",
},
{
type: "json",
name: "json",
},
] as const satisfies FieldSpec[];
const fields = [ let b = ctx.connection.kysely.schema.createTable("test");
{ for (const field of fields) {
type: "integer", // @ts-expect-error
name: "id", b = b.addColumn(...ctx.connection.getFieldSchema(field));
primary: true, }
}, await b.execute();
{
type: "text",
name: "text",
},
{
type: "json",
name: "json",
},
] as const satisfies FieldSpec[];
let b = connection.kysely.schema.createTable("test"); // add index
for (const field of fields) { await ctx.connection.kysely.schema
// @ts-expect-error .createIndex("test_index")
b = b.addColumn(...connection.getFieldSchema(field)); .on("test")
} .columns(["id"])
await b.execute(); .execute();
};
// add index
await connection.kysely.schema.createIndex("test_index").on("test").columns(["id"]).execute();
test("executes query", async () => { test("executes query", async () => {
await connection.kysely await makeSchema();
await ctx.connection.kysely
.insertInto("test") .insertInto("test")
.values({ id: 1, text: "test", json: JSON.stringify({ a: 1 }) }) .values({ id: 1, text: "test", json: JSON.stringify({ a: 1 }) })
.execute(); .execute();
const expected = { id: 1, text: "test", json: { a: 1 } }; const expected = { id: 1, text: "test", json: { a: 1 } };
const qb = connection.kysely.selectFrom("test").selectAll(); const qb = ctx.connection.kysely.selectFrom("test").selectAll();
const res = await connection.executeQuery(qb); const res = await ctx.connection.executeQuery(qb);
expect(res.rows).toEqual([expected]); expect(res.rows).toEqual([expected]);
expect(rawDialectDetails.every((detail) => getPath(res, detail) !== undefined)).toBe(true); expect(rawDialectDetails.every((detail) => getPath(res, detail) !== undefined)).toBe(true);
{ {
const res = await connection.executeQueries(qb, qb); const res = await ctx.connection.executeQueries(qb, qb);
expect(res.length).toBe(2); expect(res.length).toBe(2);
res.map((r) => { res.map((r) => {
expect(r.rows).toEqual([expected]); expect(r.rows).toEqual([expected]);
@@ -155,15 +161,21 @@ export function connectionTestSuite(
}); });
test("introspects", async () => { test("introspects", async () => {
const tables = await connection.getIntrospector().getTables({ await makeSchema();
const tables = await ctx.connection.getIntrospector().getTables({
withInternalKyselyTables: false, withInternalKyselyTables: false,
}); });
const clean = tables.map((t) => ({ const clean = tables.map((t) => ({
...t, ...t,
columns: t.columns.map((c) => ({ columns: t.columns
...c, .map((c) => ({
dataType: undefined, ...c,
})), // ignore data type
dataType: undefined,
// ignore default value if "id"
hasDefaultValue: c.name !== "id" ? c.hasDefaultValue : undefined,
}))
.sort((a, b) => a.name.localeCompare(b.name)),
})); }));
expect(clean).toEqual([ expect(clean).toEqual([
@@ -176,14 +188,8 @@ export function connectionTestSuite(
dataType: undefined, dataType: undefined,
isNullable: false, isNullable: false,
isAutoIncrementing: true, isAutoIncrementing: true,
hasDefaultValue: false, hasDefaultValue: undefined,
}, comment: undefined,
{
name: "text",
dataType: undefined,
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
}, },
{ {
name: "json", name: "json",
@@ -191,25 +197,34 @@ export function connectionTestSuite(
isNullable: true, isNullable: true,
isAutoIncrementing: false, isAutoIncrementing: false,
hasDefaultValue: false, hasDefaultValue: false,
comment: undefined,
},
{
name: "text",
dataType: undefined,
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
comment: undefined,
},
],
},
]);
expect(await ctx.connection.getIntrospector().getIndices()).toEqual([
{
name: "test_index",
table: "test",
isUnique: false,
columns: [
{
name: "id",
order: 0,
}, },
], ],
}, },
]); ]);
}); });
expect(await connection.getIntrospector().getIndices()).toEqual([
{
name: "test_index",
table: "test",
isUnique: false,
columns: [
{
name: "id",
order: 0,
},
],
},
]);
}); });
describe("integration", async () => { describe("integration", async () => {

View File

@@ -6,9 +6,8 @@ import $pg from "pg";
export type PgPostgresConnectionConfig = $pg.PoolConfig; export type PgPostgresConnectionConfig = $pg.PoolConfig;
export class PgPostgresConnection extends PostgresConnection { export class PgPostgresConnection extends PostgresConnection<$pg.Pool> {
override name = "pg"; override name = "pg";
private pool: $pg.Pool;
constructor(config: PgPostgresConnectionConfig) { constructor(config: PgPostgresConnectionConfig) {
const pool = new $pg.Pool(config); const pool = new $pg.Pool(config);
@@ -20,11 +19,11 @@ export class PgPostgresConnection extends PostgresConnection {
}); });
super(kysely); super(kysely);
this.pool = pool; this.client = pool;
} }
override async close(): Promise<void> { override async close(): Promise<void> {
await this.pool.end(); await this.client.end();
} }
} }

View File

@@ -20,7 +20,7 @@ export type QB = SelectQueryBuilder<any, any, any>;
export const plugins = [new ParseJSONResultsPlugin()]; export const plugins = [new ParseJSONResultsPlugin()];
export abstract class PostgresConnection extends Connection { export abstract class PostgresConnection<Client = unknown> extends Connection<Client> {
protected override readonly supported = { protected override readonly supported = {
batching: true, batching: true,
softscans: true, softscans: true,
@@ -68,7 +68,7 @@ export abstract class PostgresConnection extends Connection {
type, type,
(col: ColumnDefinitionBuilder) => { (col: ColumnDefinitionBuilder) => {
if (spec.primary) { if (spec.primary) {
return col.primaryKey(); return col.primaryKey().notNull();
} }
if (spec.references) { if (spec.references) {
return col return col
@@ -76,7 +76,7 @@ export abstract class PostgresConnection extends Connection {
.onDelete(spec.onDelete ?? "set null") .onDelete(spec.onDelete ?? "set null")
.onUpdate(spec.onUpdate ?? "no action"); .onUpdate(spec.onUpdate ?? "no action");
} }
return spec.nullable ? col : col.notNull(); return col;
}, },
]; ];
} }

View File

@@ -102,26 +102,27 @@ export class PostgresIntrospector extends BaseIntrospector {
return tables.map((table) => ({ return tables.map((table) => ({
name: table.name, name: table.name,
isView: table.type === "VIEW", isView: table.type === "VIEW",
columns: table.columns.map((col) => { columns: table.columns.map((col) => ({
return { name: col.name,
name: col.name, dataType: col.type,
dataType: col.type, isNullable: !col.notnull,
isNullable: !col.notnull, isAutoIncrementing: col.dflt?.toLowerCase().includes("nextval") ?? false,
// @todo: check default value on 'nextval' see https://www.postgresql.org/docs/17/datatype-numeric.html#DATATYPE-SERIAL hasDefaultValue: col.dflt != null,
isAutoIncrementing: true, // just for now comment: undefined,
hasDefaultValue: col.dflt != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
})), })),
indices: table.indices
// filter out db-managed primary key index
.filter((index) => index.name !== `${table.name}_pkey`)
.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
// seqno starts at 1
order: col.seqno - 1,
})),
})),
})); }));
} }
} }

View File

@@ -7,12 +7,10 @@ import $postgresJs, { type Sql, type Options, type PostgresType } from "postgres
export type PostgresJsConfig = Options<Record<string, PostgresType>>; export type PostgresJsConfig = Options<Record<string, PostgresType>>;
export class PostgresJsConnection extends PostgresConnection { export class PostgresJsConnection extends PostgresConnection<$postgresJs.Sql> {
override name = "postgres-js"; override name = "postgres-js";
private postgres: Sql; constructor(opts: { postgres: $postgresJs.Sql }) {
constructor(opts: { postgres: Sql }) {
const kysely = new Kysely({ const kysely = new Kysely({
dialect: customIntrospector(PostgresJSDialect, PostgresIntrospector, { dialect: customIntrospector(PostgresJSDialect, PostgresIntrospector, {
excludeTables: [], excludeTables: [],
@@ -21,11 +19,11 @@ export class PostgresJsConnection extends PostgresConnection {
}); });
super(kysely); super(kysely);
this.postgres = opts.postgres; this.client = opts.postgres;
} }
override async close(): Promise<void> { override async close(): Promise<void> {
await this.postgres.end(); await this.client.end();
} }
} }

View File

@@ -13,7 +13,6 @@ import { customIntrospector } from "../Connection";
import { SqliteIntrospector } from "./SqliteIntrospector"; import { SqliteIntrospector } from "./SqliteIntrospector";
import type { Field } from "data/fields/Field"; import type { Field } from "data/fields/Field";
// @todo: add pragmas
export type SqliteConnectionConfig< export type SqliteConnectionConfig<
CustomDialect extends Constructor<Dialect> = Constructor<Dialect>, CustomDialect extends Constructor<Dialect> = Constructor<Dialect>,
> = { > = {

View File

@@ -83,7 +83,7 @@ export class SqliteIntrospector extends BaseIntrospector {
dataType: col.type, dataType: col.type,
isNullable: !col.notnull, isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol, isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null, hasDefaultValue: col.name === autoIncrementCol ? true : col.dflt_value != null,
comment: undefined, comment: undefined,
}; };
}) ?? [], }) ?? [],

View File

@@ -1,8 +1,9 @@
import { test, describe, expect } from "bun:test"; import { test, describe, expect, beforeAll, afterAll } from "bun:test";
import * as q from "./query"; import * as q from "./query";
import { parse as $parse, type ParseOptions } from "bknd/utils"; import { parse as $parse, type ParseOptions } from "bknd/utils";
import type { PrimaryFieldType } from "modules"; import type { PrimaryFieldType } from "modules";
import type { Generated } from "kysely"; import type { Generated } from "kysely";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
const parse = (v: unknown, o: ParseOptions = {}) => const parse = (v: unknown, o: ParseOptions = {}) =>
$parse(q.repoQuery, v, { $parse(q.repoQuery, v, {
@@ -15,6 +16,9 @@ const decode = (input: any, output: any) => {
expect(parse(input)).toEqual(output); expect(parse(input)).toEqual(output);
}; };
beforeAll(() => disableConsoleLog());
afterAll(() => enableConsoleLog());
describe("server/query", () => { describe("server/query", () => {
test("limit & offset", () => { test("limit & offset", () => {
//expect(() => parse({ limit: false })).toThrow(); //expect(() => parse({ limit: false })).toThrow();

View File

@@ -132,6 +132,8 @@ export type * from "data/entities/Entity";
export type { EntityManager } from "data/entities/EntityManager"; export type { EntityManager } from "data/entities/EntityManager";
export type { SchemaManager } from "data/schema/SchemaManager"; export type { SchemaManager } from "data/schema/SchemaManager";
export type * from "data/entities"; export type * from "data/entities";
// data connection
export { export {
BaseIntrospector, BaseIntrospector,
Connection, Connection,
@@ -144,9 +146,28 @@ export {
type ConnQuery, type ConnQuery,
type ConnQueryResults, type ConnQueryResults,
} from "data/connection"; } from "data/connection";
// data sqlite
export { SqliteConnection } from "data/connection/sqlite/SqliteConnection"; export { SqliteConnection } from "data/connection/sqlite/SqliteConnection";
export { SqliteIntrospector } from "data/connection/sqlite/SqliteIntrospector"; export { SqliteIntrospector } from "data/connection/sqlite/SqliteIntrospector";
export { SqliteLocalConnection } from "data/connection/sqlite/SqliteLocalConnection"; export { SqliteLocalConnection } from "data/connection/sqlite/SqliteLocalConnection";
// data postgres
export {
pg,
PgPostgresConnection,
type PgPostgresConnectionConfig,
} from "data/connection/postgres/PgPostgresConnection";
export { PostgresIntrospector } from "data/connection/postgres/PostgresIntrospector";
export { PostgresConnection } from "data/connection/postgres/PostgresConnection";
export {
postgresJs,
PostgresJsConnection,
type PostgresJsConfig,
} from "data/connection/postgres/PostgresJsConnection";
export { createCustomPostgresConnection } from "data/connection/postgres/custom";
// data prototype
export { export {
text, text,
number, number,

View File

@@ -15,7 +15,7 @@
}, },
"app": { "app": {
"name": "bknd", "name": "bknd",
"version": "0.18.1", "version": "0.19.0",
"bin": "./dist/cli/index.js", "bin": "./dist/cli/index.js",
"dependencies": { "dependencies": {
"@cfworker/json-schema": "^4.1.1", "@cfworker/json-schema": "^4.1.1",
@@ -74,8 +74,8 @@
"dotenv": "^16.4.7", "dotenv": "^16.4.7",
"jotai": "^2.12.2", "jotai": "^2.12.2",
"jsdom": "^26.0.0", "jsdom": "^26.0.0",
"kysely-d1": "^0.3.0",
"kysely-generic-sqlite": "^1.2.1", "kysely-generic-sqlite": "^1.2.1",
"kysely-postgres-js": "^2.0.0",
"libsql-stateless-easy": "^1.8.0", "libsql-stateless-easy": "^1.8.0",
"miniflare": "^4.20250913.0", "miniflare": "^4.20250913.0",
"open": "^10.1.0", "open": "^10.1.0",
@@ -108,9 +108,15 @@
"@hono/node-server": "^1.14.3", "@hono/node-server": "^1.14.3",
}, },
"peerDependencies": { "peerDependencies": {
"pg": "*",
"postgres": "*",
"react": ">=19", "react": ">=19",
"react-dom": ">=19", "react-dom": ">=19",
}, },
"optionalPeers": [
"pg",
"postgres",
],
}, },
"packages/cli": { "packages/cli": {
"name": "bknd-cli", "name": "bknd-cli",
@@ -2549,8 +2555,6 @@
"kysely": ["kysely@0.27.6", "", {}, "sha512-FIyV/64EkKhJmjgC0g2hygpBv5RNWVPyNCqSAD7eTCv6eFWNIi4PN1UvdSJGicN/o35bnevgis4Y0UDC0qi8jQ=="], "kysely": ["kysely@0.27.6", "", {}, "sha512-FIyV/64EkKhJmjgC0g2hygpBv5RNWVPyNCqSAD7eTCv6eFWNIi4PN1UvdSJGicN/o35bnevgis4Y0UDC0qi8jQ=="],
"kysely-d1": ["kysely-d1@0.3.0", "", { "peerDependencies": { "kysely": "*" } }, "sha512-9wTbE6ooLiYtBa4wPg9e4fjfcmvRtgE/2j9pAjYrIq+iz+EsH/Hj9YbtxpEXA6JoRgfulVQ1EtGj6aycGGRpYw=="],
"kysely-generic-sqlite": ["kysely-generic-sqlite@1.2.1", "", { "peerDependencies": { "kysely": ">=0.26" } }, "sha512-/Bs3/Uktn04nQ9g/4oSphLMEtSHkS5+j5hbKjK5gMqXQfqr/v3V3FKtoN4pLTmo2W35hNdrIpQnBukGL1zZc6g=="], "kysely-generic-sqlite": ["kysely-generic-sqlite@1.2.1", "", { "peerDependencies": { "kysely": ">=0.26" } }, "sha512-/Bs3/Uktn04nQ9g/4oSphLMEtSHkS5+j5hbKjK5gMqXQfqr/v3V3FKtoN4pLTmo2W35hNdrIpQnBukGL1zZc6g=="],
"kysely-neon": ["kysely-neon@1.3.0", "", { "peerDependencies": { "@neondatabase/serverless": "^0.4.3", "kysely": "0.x.x", "ws": "^8.13.0" }, "optionalPeers": ["ws"] }, "sha512-CIIlbmqpIXVJDdBEYtEOwbmALag0jmqYrGfBeM4cHKb9AgBGs+X1SvXUZ8TqkDacQEqEZN2XtsDoUkcMIISjHw=="], "kysely-neon": ["kysely-neon@1.3.0", "", { "peerDependencies": { "@neondatabase/serverless": "^0.4.3", "kysely": "0.x.x", "ws": "^8.13.0" }, "optionalPeers": ["ws"] }, "sha512-CIIlbmqpIXVJDdBEYtEOwbmALag0jmqYrGfBeM4cHKb9AgBGs+X1SvXUZ8TqkDacQEqEZN2XtsDoUkcMIISjHw=="],

View File

@@ -1,16 +0,0 @@
import { describe } from "bun:test";
import { pg } from "../src/PgPostgresConnection";
import { testSuite } from "./suite";
describe("pg", () => {
testSuite({
createConnection: () =>
pg({
host: "localhost",
port: 5430,
user: "postgres",
password: "postgres",
database: "bknd",
}),
});
});

View File

@@ -1,16 +0,0 @@
import { describe } from "bun:test";
import { postgresJs } from "../src/PostgresJsConnection";
import { testSuite } from "./suite";
describe("postgresjs", () => {
testSuite({
createConnection: () =>
postgresJs({
host: "localhost",
port: 5430,
user: "postgres",
password: "postgres",
database: "bknd",
}),
});
});

View File

@@ -1,218 +0,0 @@
import { describe, beforeAll, afterAll, expect, it, afterEach } from "bun:test";
import type { PostgresConnection } from "../src";
import { createApp, em, entity, text } from "bknd";
import { disableConsoleLog, enableConsoleLog } from "bknd/utils";
// @ts-ignore
import { connectionTestSuite } from "$bknd/data/connection/connection-test-suite";
// @ts-ignore
import { bunTestRunner } from "$bknd/adapter/bun/test";
export type TestSuiteConfig = {
createConnection: () => InstanceType<typeof PostgresConnection>;
cleanDatabase?: (connection: InstanceType<typeof PostgresConnection>) => Promise<void>;
};
export async function defaultCleanDatabase(connection: InstanceType<typeof PostgresConnection>) {
const kysely = connection.kysely;
// drop all tables+indexes & create new schema
await kysely.schema.dropSchema("public").ifExists().cascade().execute();
await kysely.schema.dropIndex("public").ifExists().cascade().execute();
await kysely.schema.createSchema("public").execute();
}
async function cleanDatabase(
connection: InstanceType<typeof PostgresConnection>,
config: TestSuiteConfig,
) {
if (config.cleanDatabase) {
await config.cleanDatabase(connection);
} else {
await defaultCleanDatabase(connection);
}
}
export function testSuite(config: TestSuiteConfig) {
beforeAll(() => disableConsoleLog(["log", "warn", "error"]));
afterAll(() => enableConsoleLog());
// @todo: postgres seems to add multiple indexes, thus failing the test suite
/* describe("test suite", () => {
connectionTestSuite(bunTestRunner, {
makeConnection: () => {
const connection = config.createConnection();
return {
connection,
dispose: async () => {
await cleanDatabase(connection, config);
await connection.close();
},
};
},
rawDialectDetails: [],
});
}); */
describe("base", () => {
it("should connect to the database", async () => {
const connection = config.createConnection();
expect(await connection.ping()).toBe(true);
});
it("should clean the database", async () => {
const connection = config.createConnection();
await cleanDatabase(connection, config);
const tables = await connection.getIntrospector().getTables();
expect(tables).toEqual([]);
});
});
describe("integration", () => {
let connection: PostgresConnection;
beforeAll(async () => {
connection = config.createConnection();
await cleanDatabase(connection, config);
});
afterEach(async () => {
await cleanDatabase(connection, config);
});
afterAll(async () => {
await connection.close();
});
it("should create app and ping", async () => {
const app = createApp({
connection,
});
await app.build();
expect(app.version()).toBeDefined();
expect(await app.em.ping()).toBe(true);
});
it("should create a basic schema", async () => {
const schema = em(
{
posts: entity("posts", {
title: text().required(),
content: text(),
}),
comments: entity("comments", {
content: text(),
}),
},
(fns, s) => {
fns.relation(s.comments).manyToOne(s.posts);
fns.index(s.posts).on(["title"], true);
},
);
const app = createApp({
connection,
config: {
data: schema.toJSON(),
},
});
await app.build();
expect(app.em.entities.length).toBe(2);
expect(app.em.entities.map((e) => e.name)).toEqual(["posts", "comments"]);
const api = app.getApi();
expect(
(
await api.data.createMany("posts", [
{
title: "Hello",
content: "World",
},
{
title: "Hello 2",
content: "World 2",
},
])
).data,
).toEqual([
{
id: 1,
title: "Hello",
content: "World",
},
{
id: 2,
title: "Hello 2",
content: "World 2",
},
] as any);
// try to create an existing
expect(
(
await api.data.createOne("posts", {
title: "Hello",
})
).ok,
).toBe(false);
// add a comment to a post
await api.data.createOne("comments", {
content: "Hello",
posts_id: 1,
});
// and then query using a `with` property
const result = await api.data.readMany("posts", { with: ["comments"] });
expect(result.length).toBe(2);
expect(result[0].comments.length).toBe(1);
expect(result[0].comments[0].content).toBe("Hello");
expect(result[1].comments.length).toBe(0);
});
it("should support uuid", async () => {
const schema = em(
{
posts: entity(
"posts",
{
title: text().required(),
content: text(),
},
{
primary_format: "uuid",
},
),
comments: entity("comments", {
content: text(),
}),
},
(fns, s) => {
fns.relation(s.comments).manyToOne(s.posts);
fns.index(s.posts).on(["title"], true);
},
);
const app = createApp({
connection,
config: {
data: schema.toJSON(),
},
});
await app.build();
const config = app.toJSON();
// @ts-expect-error
expect(config.data.entities?.posts.fields?.id.config?.format).toBe("uuid");
const $em = app.em;
const mutator = $em.mutator($em.entity("posts"));
const data = await mutator.insertOne({ title: "Hello", content: "World" });
expect(data.data.id).toBeString();
expect(String(data.data.id).length).toBe(36);
});
});
}