Merge pull request #189 from bknd-io/release/0.15

Release 0.15
This commit is contained in:
dswbx
2025-07-05 10:24:03 +02:00
committed by GitHub
170 changed files with 4139 additions and 2440 deletions

View File

@@ -12,6 +12,11 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "22.x"
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:

View File

@@ -9,10 +9,18 @@
</p>
bknd simplifies app development by providing a fully functional backend for database management, authentication, media and workflows. Being lightweight and built on Web Standards, it can be deployed nearly anywhere, including running inside your framework of choice. No more deploying multiple separate services!
* **Runtimes**: Node.js 22+, Bun 1.0+, Deno, Browser, Cloudflare Workers/Pages, Vercel, Netlify, AWS Lambda, etc.
* **Databases**:
* SQLite: LibSQL, Node SQLite, Bun SQLite, Cloudflare D1, Cloudflare Durable Objects SQLite, SQLocal
* Postgres: Vanilla Postgres, Supabase, Neon, Xata
* **Frameworks**: React, Next.js, React Router, Astro, Vite, Waku
* **Storage**: AWS S3, S3-compatible (Tigris, R2, Minio, etc.), Cloudflare R2 (binding), Cloudinary, Filesystem
**For documentation and examples, please visit https://docs.bknd.io.**
> [!WARNING]
> This project requires Node.js 22 or higher (because of `node:sqlite`).
>
> Please keep in mind that **bknd** is still under active development
> and therefore full backward compatibility is not guaranteed before reaching v1.0.0.

View File

@@ -1,6 +1,9 @@
import { afterAll, afterEach, describe, expect, test } from "bun:test";
import { App } from "../src";
import { afterEach, describe, test, expect } from "bun:test";
import { App, createApp } from "core/test/utils";
import { getDummyConnection } from "./helper";
import { Hono } from "hono";
import * as proto from "../src/data/prototype";
import { pick } from "lodash-es";
const { dummyConnection, afterAllCleanup } = getDummyConnection();
afterEach(afterAllCleanup);
@@ -10,18 +13,133 @@ describe("App tests", async () => {
const app = new App(dummyConnection);
await app.build();
//expect(await app.data?.em.ping()).toBeTrue();
expect(await app.em.ping()).toBeTrue();
});
/*test.only("what", async () => {
const app = new App(dummyConnection, {
auth: {
enabled: true,
test("plugins", async () => {
const called: string[] = [];
const app = createApp({
initialConfig: {
auth: {
enabled: true,
},
},
options: {
plugins: [
(app) => {
expect(app).toBeDefined();
expect(app).toBeInstanceOf(App);
return {
name: "test",
schema: () => {
called.push("schema");
return proto.em(
{
posts: proto.entity("posts", {
title: proto.text(),
}),
comments: proto.entity("comments", {
content: proto.text(),
}),
users: proto.entity("users", {
email_verified: proto.boolean(),
}),
},
(fn, s) => {
fn.relation(s.comments).manyToOne(s.posts);
fn.index(s.posts).on(["title"]);
},
);
},
onBoot: async () => {
called.push("onBoot");
},
beforeBuild: async () => {
called.push("beforeBuild");
},
onBuilt: async () => {
called.push("onBuilt");
},
onServerInit: async (server) => {
called.push("onServerInit");
expect(server).toBeDefined();
expect(server).toBeInstanceOf(Hono);
},
onFirstBoot: async () => {
called.push("onFirstBoot");
},
};
},
],
},
});
await app.module.auth.build();
await app.module.data.build();
console.log(app.em.entities.map((e) => e.name));
console.log(await app.em.schema().getDiff());
});*/
await app.build();
expect(app.em.entities.map((e) => e.name)).toEqual(["users", "posts", "comments"]);
expect(app.em.indices.map((i) => i.name)).toEqual([
"idx_unique_users_email",
"idx_users_strategy",
"idx_users_strategy_value",
"idx_posts_title",
]);
expect(
app.em.relations.all.map((r) => pick(r.toJSON(), ["type", "source", "target"])),
).toEqual([
{
type: "n:1",
source: "comments",
target: "posts",
},
]);
expect(called).toEqual([
"onBoot",
"onServerInit",
"beforeBuild",
"onServerInit",
"schema",
"onFirstBoot",
"onBuilt",
]);
expect(app.plugins.size).toBe(1);
expect(Array.from(app.plugins.keys())).toEqual(["test"]);
});
test.only("drivers", async () => {
const called: string[] = [];
const app = new App(dummyConnection, undefined, {
drivers: {
email: {
send: async (to, subject, body) => {
called.push("email.send");
return {
id: "",
};
},
},
cache: {
get: async (key) => {
called.push("cache.get");
return "";
},
set: async (key, value, ttl) => {
called.push("cache.set");
},
del: async (key) => {
called.push("cache.del");
},
},
},
});
await app.build();
expect(app.drivers.cache).toBeDefined();
expect(app.drivers.email).toBeDefined();
await app.drivers.email.send("", "", "");
await app.drivers.cache.get("");
await app.drivers.cache.set("", "", 0);
await app.drivers.cache.del("");
expect(called).toEqual(["email.send", "cache.get", "cache.set", "cache.del"]);
});
});

View File

@@ -3,21 +3,36 @@ import * as adapter from "adapter";
import { disableConsoleLog, enableConsoleLog } from "core/utils";
import { adapterTestSuite } from "adapter/adapter-test-suite";
import { bunTestRunner } from "adapter/bun/test";
import { omitKeys } from "core/utils";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("adapter", () => {
it("makes config", () => {
expect(adapter.makeConfig({})).toEqual({});
expect(adapter.makeConfig({}, { env: { TEST: "test" } })).toEqual({});
expect(omitKeys(adapter.makeConfig({}), ["connection"])).toEqual({});
expect(omitKeys(adapter.makeConfig({}, { env: { TEST: "test" } }), ["connection"])).toEqual(
{},
);
// merges everything returned from `app` with the config
expect(adapter.makeConfig({ app: (a) => a as any }, { env: { TEST: "test" } })).toEqual({
env: { TEST: "test" },
} as any);
expect(
omitKeys(
adapter.makeConfig(
{ app: (a) => ({ initialConfig: { server: { cors: { origin: a.env.TEST } } } }) },
{ env: { TEST: "test" } },
),
["connection"],
),
).toEqual({
initialConfig: { server: { cors: { origin: "test" } } },
});
});
/* it.only("...", async () => {
const app = await adapter.createAdapterApp();
}); */
it("reuses apps correctly", async () => {
const id = crypto.randomUUID();

View File

@@ -153,7 +153,7 @@ describe("DataApi", () => {
const oneBy = api.readOneBy("posts", { where: { title: "baz" }, select: ["title"] });
const oneByRes = await oneBy;
expect(oneByRes.data).toEqual({ title: "baz" } as any);
expect(oneByRes.body.meta.count).toEqual(1);
expect(oneByRes.body.meta.items).toEqual(1);
});
it("exists/count", async () => {

View File

@@ -1,6 +1,6 @@
import { describe, expect, mock, test } from "bun:test";
import type { ModuleBuildContext } from "../../src";
import { App, createApp } from "../../src/App";
import { App, createApp } from "core/test/utils";
import * as proto from "../../src/data/prototype";
describe("App", () => {
@@ -20,6 +20,7 @@ describe("App", () => {
"guard",
"flags",
"logger",
"helper",
]);
},
},

View File

@@ -1,5 +1,6 @@
import { describe, expect, test } from "bun:test";
import { createApp, registries } from "../../src";
import { registries } from "../../src";
import { createApp } from "core/test/utils";
import * as proto from "../../src/data/prototype";
import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter";

View File

@@ -7,13 +7,13 @@ import {
type EntityData,
EntityManager,
ManyToOneRelation,
type MutatorResponse,
type RepositoryResponse,
TextField,
} from "../../src/data";
import { DataController } from "../../src/data/api/DataController";
import { dataConfigSchema } from "../../src/data/data-schema";
import { disableConsoleLog, enableConsoleLog, getDummyConnection } from "../helper";
import type { RepositoryResultJSON } from "data/entities/query/RepositoryResult";
import type { MutatorResultJSON } from "data/entities/mutation/MutatorResult";
const { dummyConnection, afterAllCleanup } = getDummyConnection();
beforeAll(() => disableConsoleLog(["log", "warn"]));
@@ -21,52 +21,6 @@ afterAll(async () => (await afterAllCleanup()) && enableConsoleLog());
const dataConfig = parse(dataConfigSchema, {});
describe("[data] DataController", async () => {
test("repoResult", async () => {
const em = new EntityManager<any>([], dummyConnection);
const ctx: any = { em, guard: new Guard() };
const controller = new DataController(ctx, dataConfig);
const res = controller.repoResult({
entity: null as any,
data: [] as any,
sql: "",
parameters: [] as any,
result: [] as any,
meta: {
total: 0,
count: 0,
items: 0,
},
});
expect(res).toEqual({
meta: {
total: 0,
count: 0,
items: 0,
},
data: [],
});
});
test("mutatorResult", async () => {
const em = new EntityManager([], dummyConnection);
const ctx: any = { em, guard: new Guard() };
const controller = new DataController(ctx, dataConfig);
const res = controller.mutatorResult({
entity: null as any,
data: [] as any,
sql: "",
parameters: [] as any,
result: [] as any,
});
expect(res).toEqual({
data: [],
});
});
describe("getController", async () => {
const users = new Entity("users", [
new TextField("name", { required: true }),
@@ -120,8 +74,7 @@ describe("[data] DataController", async () => {
method: "POST",
body: JSON.stringify(_user),
});
//console.log("res", { _user }, res);
const result = (await res.json()) as MutatorResponse;
const result = (await res.json()) as MutatorResultJSON;
const { id, ...data } = result.data as any;
expect(res.status).toBe(201);
@@ -135,7 +88,7 @@ describe("[data] DataController", async () => {
method: "POST",
body: JSON.stringify(_post),
});
const result = (await res.json()) as MutatorResponse;
const result = (await res.json()) as MutatorResultJSON;
const { id, ...data } = result.data as any;
expect(res.status).toBe(201);
@@ -146,13 +99,13 @@ describe("[data] DataController", async () => {
test("/:entity (read many)", async () => {
const res = await app.request("/entity/users");
const data = (await res.json()) as RepositoryResponse;
const data = (await res.json()) as RepositoryResultJSON;
expect(data.meta.total).toBe(3);
expect(data.meta.count).toBe(3);
//expect(data.meta.total).toBe(3);
//expect(data.meta.count).toBe(3);
expect(data.meta.items).toBe(3);
expect(data.data.length).toBe(3);
expect(data.data[0].name).toBe("foo");
expect(data.data[0]?.name).toBe("foo");
});
test("/:entity/query (func query)", async () => {
@@ -165,33 +118,32 @@ describe("[data] DataController", async () => {
where: { bio: { $isnull: 1 } },
}),
});
const data = (await res.json()) as RepositoryResponse;
const data = (await res.json()) as RepositoryResultJSON;
expect(data.meta.total).toBe(3);
expect(data.meta.count).toBe(1);
//expect(data.meta.total).toBe(3);
//expect(data.meta.count).toBe(1);
expect(data.meta.items).toBe(1);
expect(data.data.length).toBe(1);
expect(data.data[0].name).toBe("bar");
expect(data.data[0]?.name).toBe("bar");
});
test("/:entity (read many, paginated)", async () => {
const res = await app.request("/entity/users?limit=1&offset=2");
const data = (await res.json()) as RepositoryResponse;
const data = (await res.json()) as RepositoryResultJSON;
expect(data.meta.total).toBe(3);
expect(data.meta.count).toBe(3);
//expect(data.meta.total).toBe(3);
//expect(data.meta.count).toBe(3);
expect(data.meta.items).toBe(1);
expect(data.data.length).toBe(1);
expect(data.data[0].name).toBe("baz");
expect(data.data[0]?.name).toBe("baz");
});
test("/:entity/:id (read one)", async () => {
const res = await app.request("/entity/users/3");
const data = (await res.json()) as RepositoryResponse<EntityData>;
console.log("data", data);
const data = (await res.json()) as RepositoryResultJSON<EntityData>;
expect(data.meta.total).toBe(3);
expect(data.meta.count).toBe(1);
//expect(data.meta.total).toBe(3);
//expect(data.meta.count).toBe(1);
expect(data.meta.items).toBe(1);
expect(data.data).toEqual({ id: 3, ...fixtures.users[2] });
});
@@ -201,7 +153,7 @@ describe("[data] DataController", async () => {
method: "PATCH",
body: JSON.stringify({ name: "new name" }),
});
const { data } = (await res.json()) as MutatorResponse;
const { data } = (await res.json()) as MutatorResultJSON;
expect(res.ok).toBe(true);
expect(data as any).toEqual({ id: 3, ...fixtures.users[2], name: "new name" });
@@ -209,27 +161,26 @@ describe("[data] DataController", async () => {
test("/:entity/:id/:reference (read references)", async () => {
const res = await app.request("/entity/users/1/posts");
const data = (await res.json()) as RepositoryResponse;
console.log("data", data);
const data = (await res.json()) as RepositoryResultJSON;
expect(data.meta.total).toBe(2);
expect(data.meta.count).toBe(1);
//expect(data.meta.total).toBe(2);
//expect(data.meta.count).toBe(1);
expect(data.meta.items).toBe(1);
expect(data.data.length).toBe(1);
expect(data.data[0].content).toBe("post 1");
expect(data.data[0]?.content).toBe("post 1");
});
test("/:entity/:id (delete one)", async () => {
const res = await app.request("/entity/posts/2", {
method: "DELETE",
});
const { data } = (await res.json()) as RepositoryResponse<EntityData>;
const { data } = (await res.json()) as RepositoryResultJSON<EntityData>;
expect(data).toEqual({ id: 2, ...fixtures.posts[1] });
// verify
const res2 = await app.request("/entity/posts");
const data2 = (await res2.json()) as RepositoryResponse;
expect(data2.meta.total).toBe(1);
const data2 = (await res2.json()) as RepositoryResultJSON;
//expect(data2.meta.total).toBe(1);
});
});
});

View File

@@ -34,19 +34,12 @@ describe("some tests", async () => {
test("findId", async () => {
const query = await em.repository(users).findId(1);
/*const { result, total, count, time } = query;
console.log("query", query.result, {
result,
total,
count,
time,
});*/
expect(query.sql).toBe(
'select "users"."id" as "id", "users"."username" as "username", "users"."email" as "email" from "users" where "id" = ? limit ?',
);
expect(query.parameters).toEqual([1, 1]);
expect(query.result).toEqual([]);
expect(query.data).toBeUndefined();
});
test("findMany", async () => {
@@ -56,7 +49,7 @@ describe("some tests", async () => {
'select "users"."id" as "id", "users"."username" as "username", "users"."email" as "email" from "users" order by "users"."id" asc limit ? offset ?',
);
expect(query.parameters).toEqual([10, 0]);
expect(query.result).toEqual([]);
expect(query.data).toEqual([]);
});
test("findMany with number", async () => {
@@ -66,7 +59,7 @@ describe("some tests", async () => {
'select "posts"."id" as "id", "posts"."title" as "title", "posts"."content" as "content", "posts"."created_at" as "created_at", "posts"."likes" as "likes" from "posts" order by "posts"."id" asc limit ? offset ?',
);
expect(query.parameters).toEqual([10, 0]);
expect(query.result).toEqual([]);
expect(query.data).toEqual([]);
});
test("try adding an existing field name", async () => {

View File

@@ -45,7 +45,7 @@ describe("Mutator simple", async () => {
},
});
expect(query.result).toEqual([{ id: 1, label: "test", count: 1 }]);
expect(query.data).toEqual([{ id: 1, label: "test", count: 1 }]);
});
test("update inserted row", async () => {
@@ -87,7 +87,7 @@ describe("Mutator simple", async () => {
expect(mutation.data).toEqual({ id, label: "new label", count: 100 });
const query2 = await em.repository(items).findId(id);
expect(query2.result.length).toBe(0);
expect(query2.data).toBeUndefined();
});
test("validation: insert incomplete row", async () => {
@@ -177,13 +177,13 @@ describe("Mutator simple", async () => {
});
test("insertMany", async () => {
const oldCount = (await em.repo(items).count()).count;
const oldCount = (await em.repo(items).count()).data.count;
const inserts = [{ label: "insert 1" }, { label: "insert 2" }];
const { data } = await em.mutator(items).insertMany(inserts);
expect(data.length).toBe(2);
expect(data.map((d) => ({ label: d.label }))).toEqual(inserts);
const newCount = (await em.repo(items).count()).count;
const newCount = (await em.repo(items).count()).data.count;
expect(newCount).toBe(oldCount + inserts.length);
const { data: data2 } = await em.repo(items).findMany({ offset: oldCount });

View File

@@ -1,4 +1,4 @@
import { afterAll, describe, expect, test } from "bun:test";
import { afterAll, beforeAll, describe, expect, test } from "bun:test";
import type { EventManager } from "../../../src/core/events";
import {
Entity,
@@ -12,11 +12,14 @@ import {
TextField,
} from "../../../src/data";
import * as proto from "../../../src/data/prototype";
import { getDummyConnection } from "../helper";
import { getDummyConnection, disableConsoleLog, enableConsoleLog } from "../../helper";
const { dummyConnection, afterAllCleanup } = getDummyConnection();
afterAll(afterAllCleanup);
beforeAll(() => disableConsoleLog(["log", "warn"]));
afterAll(async () => (await afterAllCleanup()) && enableConsoleLog());
describe("[data] Mutator (base)", async () => {
const entity = new Entity("items", [
new TextField("label", { required: true }),

View File

@@ -26,120 +26,6 @@ async function sleep(ms: number) {
}
describe("[Repository]", async () => {
test.skip("bulk", async () => {
//const connection = dummyConnection;
//const connection = getLocalLibsqlConnection();
const credentials = null as any; // @todo: determine what to do here
const connection = new LibsqlConnection(credentials);
const em = new EntityManager([], connection);
/*const emLibsql = new EntityManager([], {
url: connection.url.replace("https", "libsql"),
authToken: connection.authToken,
});*/
const table = "posts";
const client = connection.getClient();
if (!client) {
console.log("Cannot perform test without libsql connection");
return;
}
const conn = em.connection.kysely;
const selectQ = (e: E) => e.selectFrom(table).selectAll().limit(2);
const countQ = (e: E) => e.selectFrom(table).select(e.fn.count("*").as("count"));
async function executeTransaction(em: EntityManager<any>) {
return await em.connection.kysely.transaction().execute(async (e) => {
const res = await selectQ(e).execute();
const count = await countQ(e).execute();
return [res, count];
});
}
async function executeBatch(em: EntityManager<any>) {
const queries = [selectQ(conn), countQ(conn)];
return await em.connection.batchQuery(queries);
}
async function executeSingleKysely(em: EntityManager<any>) {
const res = await selectQ(conn).execute();
const count = await countQ(conn).execute();
return [res, count];
}
async function executeSingleClient(em: EntityManager<any>) {
const q1 = selectQ(conn).compile();
const res = await client.execute({
sql: q1.sql,
args: q1.parameters as any,
});
const q2 = countQ(conn).compile();
const count = await client.execute({
sql: q2.sql,
args: q2.parameters as any,
});
return [res, count];
}
const transaction = await executeTransaction(em);
const batch = await executeBatch(em);
expect(batch).toEqual(transaction as any);
const testperf = false;
if (testperf) {
const times = 5;
const exec = async (
name: string,
fn: (em: EntityManager<any>) => Promise<any>,
em: EntityManager<any>,
) => {
const res = await Perf.execute(() => fn(em), times);
await sleep(1000);
const info = {
name,
total: res.total.toFixed(2),
avg: (res.total / times).toFixed(2),
first: res.marks[0].time.toFixed(2),
last: res.marks[res.marks.length - 1].time.toFixed(2),
};
console.log(info.name, info, res.marks);
return info;
};
const data: any[] = [];
data.push(await exec("transaction.http", executeTransaction, em));
data.push(await exec("bulk.http", executeBatch, em));
data.push(await exec("singleKy.http", executeSingleKysely, em));
data.push(await exec("singleCl.http", executeSingleClient, em));
/*data.push(await exec("transaction.libsql", executeTransaction, emLibsql));
data.push(await exec("bulk.libsql", executeBatch, emLibsql));
data.push(await exec("singleKy.libsql", executeSingleKysely, emLibsql));
data.push(await exec("singleCl.libsql", executeSingleClient, emLibsql));*/
console.table(data);
/**
* ┌───┬────────────────────┬────────┬────────┬────────┬────────┐
* │ │ name │ total │ avg │ first │ last │
* ├───┼────────────────────┼────────┼────────┼────────┼────────┤
* │ 0 │ transaction.http │ 681.29 │ 136.26 │ 136.46 │ 396.09 │
* │ 1 │ bulk.http │ 164.82 │ 32.96 │ 32.95 │ 99.91 │
* │ 2 │ singleKy.http │ 330.01 │ 66.00 │ 65.86 │ 195.41 │
* │ 3 │ singleCl.http │ 326.17 │ 65.23 │ 61.32 │ 198.08 │
* │ 4 │ transaction.libsql │ 856.79 │ 171.36 │ 132.31 │ 595.24 │
* │ 5 │ bulk.libsql │ 180.63 │ 36.13 │ 35.39 │ 107.71 │
* │ 6 │ singleKy.libsql │ 347.11 │ 69.42 │ 65.00 │ 207.14 │
* │ 7 │ singleCl.libsql │ 328.60 │ 65.72 │ 62.19 │ 195.04 │
* └───┴────────────────────┴────────┴────────┴────────┴────────┘
*/
}
});
test("count & exists", async () => {
const items = new Entity("items", [new TextField("label")]);
const em = new EntityManager([items], dummyConnection);
@@ -160,25 +46,44 @@ describe("[Repository]", async () => {
// count all
const res = await em.repository(items).count();
expect(res.sql).toBe('select count(*) as "count" from "items"');
expect(res.count).toBe(3);
expect(res.data.count).toBe(3);
//
{
const res = await em.repository(items).findMany();
expect(res.count).toBe(3);
}
{
const res = await em
.repository(items, {
includeCounts: true,
})
.findMany();
expect(res.count).toBe(3);
}
// count filtered
const res2 = await em.repository(items).count({ label: { $in: ["a", "b"] } });
const res2 = await em
.repository(items, {
includeCounts: true,
})
.count({ label: { $in: ["a", "b"] } });
expect(res2.sql).toBe('select count(*) as "count" from "items" where "label" in (?, ?)');
expect(res2.parameters).toEqual(["a", "b"]);
expect(res2.count).toBe(2);
expect(res2.data.count).toBe(2);
// check exists
const res3 = await em.repository(items).exists({ label: "a" });
expect(res3.exists).toBe(true);
expect(res3.data.exists).toBe(true);
const res4 = await em.repository(items).exists({ label: "d" });
expect(res4.exists).toBe(false);
expect(res4.data.exists).toBe(false);
// for now, allow empty filter
const res5 = await em.repository(items).exists({});
expect(res5.exists).toBe(true);
expect(res5.data.exists).toBe(true);
});
test("option: silent", async () => {
@@ -191,6 +96,9 @@ describe("[Repository]", async () => {
// should throw because table doesn't exist
expect(em.repo("items").findMany({})).rejects.toThrow(/no such table/);
// should silently return empty result
em.repo("items", { silent: true })
.findMany({})
.then((r) => r.data);
expect(
em
.repo("items", { silent: true })
@@ -209,16 +117,16 @@ describe("[Repository]", async () => {
expect(
em
.repo("items")
.repo("items", { includeCounts: true })
.findMany({})
.then((r) => [r.meta.count, r.meta.total]),
.then((r) => [r.count, r.total]),
).resolves.toEqual([0, 0]);
expect(
em
.repo("items", { includeCounts: false })
.findMany({})
.then((r) => [r.meta.count, r.meta.total]),
.then((r) => [r.count, r.total]),
).resolves.toEqual([undefined, undefined]);
});
});

View File

@@ -89,9 +89,9 @@ describe("[data] WithBuilder", async () => {
const res2 = qb2.compile();
expect(res2.sql).toBe(
'select (select json_object(\'id\', "obj"."id", \'username\', "obj"."username") from (select "users"."id" as "id", "users"."username" as "username" from "users" as "author" where "author"."id" = "posts"."author_id" order by "users"."id" asc limit ? offset ?) as obj) as "author" from "posts"',
'select (select json_object(\'id\', "obj"."id", \'username\', "obj"."username") from (select "users"."id" as "id", "users"."username" as "username" from "users" as "author" where "author"."id" = "posts"."author_id" order by "users"."id" asc limit ?) as obj) as "author" from "posts"',
);
expect(res2.parameters).toEqual([1, 0]);
expect(res2.parameters).toEqual([1]);
});
test("test with empty join", async () => {
@@ -194,9 +194,9 @@ describe("[data] WithBuilder", async () => {
);
const res = qb.compile();
expect(res.sql).toBe(
'select (select json_object(\'id\', "obj"."id", \'path\', "obj"."path") from (select "media"."id" as "id", "media"."path" as "path" from "media" where "media"."reference" = ? and "categories"."id" = "media"."entity_id" order by "media"."id" asc limit ? offset ?) as obj) as "single" from "categories"',
'select (select json_object(\'id\', "obj"."id", \'path\', "obj"."path") from (select "media"."id" as "id", "media"."path" as "path" from "media" where "media"."reference" = ? and "categories"."id" = "media"."entity_id" order by "media"."id" asc limit ?) as obj) as "single" from "categories"',
);
expect(res.parameters).toEqual(["categories.single", 1, 0]);
expect(res.parameters).toEqual(["categories.single", 1]);
const qb2 = WithBuilder.addClause(
em,
@@ -273,9 +273,9 @@ describe("[data] WithBuilder", async () => {
//prettyPrintQb(qb);
expect(qb.compile().sql).toBe(
'select (select json_object(\'id\', "obj"."id", \'username\', "obj"."username", \'avatar\', "obj"."avatar") from (select "users"."id" as "id", "users"."username" as "username", (select json_object(\'id\', "obj"."id", \'path\', "obj"."path") from (select "media"."id" as "id", "media"."path" as "path" from "media" where "media"."reference" = ? and "users"."id" = "media"."entity_id" order by "media"."id" asc limit ? offset ?) as obj) as "avatar" from "users" as "users" where "users"."id" = "posts"."users_id" order by "users"."username" asc limit ? offset ?) as obj) as "users" from "posts"',
'select (select json_object(\'id\', "obj"."id", \'username\', "obj"."username", \'avatar\', "obj"."avatar") from (select "users"."id" as "id", "users"."username" as "username", (select json_object(\'id\', "obj"."id", \'path\', "obj"."path") from (select "media"."id" as "id", "media"."path" as "path" from "media" where "media"."reference" = ? and "users"."id" = "media"."entity_id" order by "media"."id" asc limit ?) as obj) as "avatar" from "users" as "users" where "users"."id" = "posts"."users_id" order by "users"."username" asc limit ?) as obj) as "users" from "posts"',
);
expect(qb.compile().parameters).toEqual(["users.avatar", 1, 0, 1, 0]);
expect(qb.compile().parameters).toEqual(["users.avatar", 1, 1]);
});
test("compiles with many", async () => {
@@ -315,9 +315,9 @@ describe("[data] WithBuilder", async () => {
);
expect(qb.compile().sql).toBe(
'select (select coalesce(json_group_array(json_object(\'id\', "agg"."id", \'posts_id\', "agg"."posts_id", \'users_id\', "agg"."users_id", \'users\', "agg"."users")), \'[]\') from (select "comments"."id" as "id", "comments"."posts_id" as "posts_id", "comments"."users_id" as "users_id", (select json_object(\'username\', "obj"."username") from (select "users"."username" as "username" from "users" as "users" where "users"."id" = "comments"."users_id" order by "users"."id" asc limit ? offset ?) as obj) as "users" from "comments" as "comments" where "comments"."posts_id" = "posts"."id" order by "comments"."id" asc limit ? offset ?) as agg) as "comments" from "posts"',
'select (select coalesce(json_group_array(json_object(\'id\', "agg"."id", \'posts_id\', "agg"."posts_id", \'users_id\', "agg"."users_id", \'users\', "agg"."users")), \'[]\') from (select "comments"."id" as "id", "comments"."posts_id" as "posts_id", "comments"."users_id" as "users_id", (select json_object(\'username\', "obj"."username") from (select "users"."username" as "username" from "users" as "users" where "users"."id" = "comments"."users_id" order by "users"."id" asc limit ?) as obj) as "users" from "comments" as "comments" where "comments"."posts_id" = "posts"."id" order by "comments"."id" asc limit ? offset ?) as agg) as "comments" from "posts"',
);
expect(qb.compile().parameters).toEqual([1, 0, 12, 0]);
expect(qb.compile().parameters).toEqual([1, 12, 0]);
});
test("returns correct result", async () => {

View File

@@ -38,14 +38,15 @@ export function getLocalLibsqlConnection() {
return { url: "http://127.0.0.1:8080" };
}
type ConsoleSeverity = "log" | "warn" | "error";
type ConsoleSeverity = "debug" | "log" | "warn" | "error";
const _oldConsoles = {
debug: console.debug,
log: console.log,
warn: console.warn,
error: console.error,
};
export function disableConsoleLog(severities: ConsoleSeverity[] = ["log", "warn"]) {
export function disableConsoleLog(severities: ConsoleSeverity[] = ["debug", "log", "warn"]) {
severities.forEach((severity) => {
console[severity] = () => null;
});

View File

@@ -1,5 +1,5 @@
import { describe, expect, it } from "bun:test";
import { createApp } from "../../src";
import { createApp } from "core/test/utils";
import { Api } from "../../src/Api";
describe("integration config", () => {

View File

@@ -1,7 +1,8 @@
/// <reference types="@types/bun" />
import { afterAll, beforeAll, describe, expect, test } from "bun:test";
import { createApp, registries } from "../../src";
import { registries } from "../../src";
import { createApp } from "core/test/utils";
import { mergeObject, randomString } from "../../src/core/utils";
import type { TAppMediaConfig } from "../../src/media/media-schema";
import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter";

View File

@@ -1,5 +1,5 @@
import { afterAll, beforeAll, beforeEach, describe, expect, spyOn, test } from "bun:test";
import { createApp } from "../../src";
import { createApp } from "core/test/utils";
import { AuthController } from "../../src/auth/api/AuthController";
import { em, entity, make, text } from "../../src/data";
import { AppAuth, type ModuleBuildContext } from "../../src/modules";

View File

@@ -1,5 +1,6 @@
import { describe, expect, test } from "bun:test";
import { createApp, registries } from "../../src";
import { registries } from "../../src";
import { createApp } from "core/test/utils";
import { em, entity, text } from "../../src/data";
import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter";
import { AppMedia } from "../../src/modules";

View File

@@ -4,6 +4,7 @@ import { type TSchema, Type } from "@sinclair/typebox";
import { EntityManager, em, entity, index, text } from "../../src/data";
import { DummyConnection } from "../../src/data/connection/DummyConnection";
import { Module } from "../../src/modules/Module";
import { ModuleHelper } from "modules/ModuleHelper";
function createModule<Schema extends TSchema>(schema: Schema) {
class TestModule extends Module<typeof schema> {
@@ -46,9 +47,9 @@ describe("Module", async () => {
}
prt = {
ensureEntity: this.ensureEntity.bind(this),
ensureIndex: this.ensureIndex.bind(this),
ensureSchema: this.ensureSchema.bind(this),
ensureEntity: this.ctx.helper.ensureEntity.bind(this.ctx.helper),
ensureIndex: this.ctx.helper.ensureIndex.bind(this.ctx.helper),
ensureSchema: this.ctx.helper.ensureSchema.bind(this.ctx.helper),
};
get em() {
@@ -63,7 +64,11 @@ describe("Module", async () => {
_em.relations,
_em.indices,
);
return new M({} as any, { em, flags: Module.ctx_flags } as any);
const ctx = {
em,
flags: Module.ctx_flags,
};
return new M({} as any, { ...ctx, helper: new ModuleHelper(ctx as any) } as any);
}
function flat(_em: EntityManager) {
return {
@@ -143,14 +148,9 @@ describe("Module", async () => {
// this should only add the field "important"
m.prt.ensureEntity(
entity(
"u",
{
important: text(),
},
undefined,
"system",
),
entity("u", {
important: text(),
}),
);
expect(m.ctx.flags.sync_required).toBe(true);
@@ -159,8 +159,7 @@ describe("Module", async () => {
{
name: "u",
fields: ["id", "name", "important"],
// ensured type must be present
type: "system",
type: "regular",
},
{
name: "p",

View File

@@ -8,10 +8,11 @@ import { Default, stripMark } from "../../src/core/utils";
import { EntityManager } from "../../src/data";
import { Module, type ModuleBuildContext } from "../../src/modules/Module";
import { getDummyConnection } from "../helper";
import { ModuleHelper } from "modules/ModuleHelper";
export function makeCtx(overrides?: Partial<ModuleBuildContext>): ModuleBuildContext {
const { dummyConnection } = getDummyConnection();
return {
const ctx = {
connection: dummyConnection,
server: new Hono(),
em: new EntityManager([], dummyConnection),
@@ -21,6 +22,10 @@ export function makeCtx(overrides?: Partial<ModuleBuildContext>): ModuleBuildCon
logger: new DebugLogger(false),
...overrides,
};
return {
...ctx,
helper: new ModuleHelper(ctx as any),
} as any;
}
export function moduleTestSuite(module: { new (): Module }) {

View File

@@ -60,7 +60,14 @@ function banner(title: string) {
}
// collection of always-external packages
const external = ["bun:test", "node:test", "node:assert/strict", "@libsql/client"] as const;
const external = [
"bun:test",
"node:test",
"node:assert/strict",
"@libsql/client",
"bknd",
/^bknd\/.*/,
] as const;
/**
* Building backend and general API
@@ -78,6 +85,7 @@ async function buildApi() {
"src/core/utils/index.ts",
"src/data/index.ts",
"src/media/index.ts",
"src/plugins/index.ts",
],
outDir: "dist",
external: [...external],
@@ -225,9 +233,10 @@ function baseConfig(adapter: string, overrides: Partial<tsup.Options> = {}): tsu
},
external: [
/^cloudflare*/,
/^@?(hono|libsql).*?/,
/^@?(hono).*?/,
/^(bknd|react|next|node).*?/,
/.*\.(html)$/,
...external,
...(Array.isArray(overrides.external) ? overrides.external : []),
],
};
@@ -244,14 +253,14 @@ async function buildAdapters() {
// specific adatpers
await tsup.build(baseConfig("react-router"));
await tsup.build(baseConfig("bun"));
await tsup.build(baseConfig("astro"));
await tsup.build(baseConfig("aws"));
await tsup.build(
baseConfig("cloudflare", {
external: [/^kysely/],
baseConfig("bun", {
external: [/^bun\:.*/],
}),
);
await tsup.build(baseConfig("astro"));
await tsup.build(baseConfig("aws"));
await tsup.build(baseConfig("cloudflare"));
await tsup.build({
...baseConfig("vite"),
@@ -267,6 +276,29 @@ async function buildAdapters() {
...baseConfig("node"),
platform: "node",
});
await tsup.build({
...baseConfig("sqlite/edge"),
entry: ["src/adapter/sqlite/edge.ts"],
outDir: "dist/adapter/sqlite",
metafile: false,
});
await tsup.build({
...baseConfig("sqlite/node"),
entry: ["src/adapter/sqlite/node.ts"],
outDir: "dist/adapter/sqlite",
platform: "node",
metafile: false,
});
await tsup.build({
...baseConfig("sqlite/bun"),
entry: ["src/adapter/sqlite/bun.ts"],
outDir: "dist/adapter/sqlite",
metafile: false,
external: [/^bun\:.*/],
});
}
await buildApi();

View File

@@ -3,7 +3,7 @@
"type": "module",
"sideEffects": false,
"bin": "./dist/cli/index.js",
"version": "0.14.0",
"version": "0.15.0-rc.10",
"description": "Lightweight Firebase/Supabase alternative built to run anywhere — incl. Next.js, React Router, Astro, Cloudflare, Bun, Node, AWS Lambda & more.",
"homepage": "https://bknd.io",
"repository": {
@@ -13,6 +13,9 @@
"bugs": {
"url": "https://github.com/bknd-io/bknd/issues"
},
"engines": {
"node": ">=22"
},
"scripts": {
"dev": "BKND_CLI_LOG_LEVEL=debug vite",
"build": "NODE_ENV=production bun run build.ts --minify --types",
@@ -31,11 +34,9 @@
"test": "ALL_TESTS=1 bun test --bail",
"test:all": "bun run test && bun run test:node",
"test:bun": "ALL_TESTS=1 bun test --bail",
"test:node": "tsx --test $(find . -type f -name '*.native-spec.ts')",
"test:node": "vitest run",
"test:adapters": "bun test src/adapter/**/*.adapter.spec.ts --bail",
"test:coverage": "ALL_TESTS=1 bun test --bail --coverage",
"test:vitest": "vitest run",
"test:vitest:watch": "vitest",
"test:vitest:coverage": "vitest run --coverage",
"test:e2e": "playwright test",
"test:e2e:adapters": "bun run e2e/adapters.ts",
@@ -50,7 +51,6 @@
"@codemirror/lang-json": "^6.0.1",
"@hello-pangea/dnd": "^18.0.1",
"@hono/swagger-ui": "^0.5.1",
"@libsql/client": "^0.15.2",
"@mantine/core": "^7.17.1",
"@mantine/hooks": "^7.17.1",
"@sinclair/typebox": "0.34.30",
@@ -61,11 +61,12 @@
"bcryptjs": "^3.0.2",
"dayjs": "^1.11.13",
"fast-xml-parser": "^5.0.8",
"hono": "^4.7.11",
"json-schema-form-react": "^0.0.2",
"json-schema-library": "10.0.0-rc7",
"json-schema-to-ts": "^3.1.1",
"jsonv-ts": "^0.1.0",
"kysely": "^0.27.6",
"hono": "^4.7.11",
"lodash-es": "^4.17.21",
"oauth4webapi": "^2.11.1",
"object-path-immutable": "^4.1.2",
@@ -75,11 +76,13 @@
"devDependencies": {
"@aws-sdk/client-s3": "^3.758.0",
"@bluwy/giget-core": "^0.1.2",
"@cloudflare/vitest-pool-workers": "^0.8.38",
"@cloudflare/workers-types": "^4.20250606.0",
"@dagrejs/dagre": "^1.1.4",
"@hono/typebox-validator": "^0.3.3",
"@hono/vite-dev-server": "^0.19.1",
"@hookform/resolvers": "^4.1.3",
"@libsql/kysely-libsql": "^0.4.1",
"@libsql/client": "^0.15.9",
"@mantine/modals": "^7.17.1",
"@mantine/notifications": "^7.17.1",
"@playwright/test": "^1.51.1",
@@ -99,8 +102,9 @@
"dotenv": "^16.4.7",
"jotai": "^2.12.2",
"jsdom": "^26.0.0",
"jsonv-ts": "^0.1.0",
"kysely-d1": "^0.3.0",
"kysely-generic-sqlite": "^1.2.1",
"libsql-stateless-easy": "^1.8.0",
"open": "^10.1.0",
"openapi-types": "^12.1.3",
"picocolors": "^1.1.1",
@@ -124,8 +128,7 @@
"vite": "^6.3.5",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^3.0.9",
"wouter": "^3.6.0",
"@cloudflare/workers-types": "^4.20250606.0"
"wouter": "^3.6.0"
},
"optionalDependencies": {
"@hono/node-server": "^1.14.3"
@@ -183,6 +186,25 @@
"import": "./dist/media/index.js",
"require": "./dist/media/index.js"
},
"./plugins": {
"types": "./dist/types/plugins/index.d.ts",
"import": "./dist/plugins/index.js",
"require": "./dist/plugins/index.js"
},
"./adapter/sqlite": {
"types": "./dist/types/adapter/sqlite/edge.d.ts",
"import": {
"workerd": "./dist/adapter/sqlite/edge.js",
"edge-light": "./dist/adapter/sqlite/edge.js",
"netlify": "./dist/adapter/sqlite/edge.js",
"vercel": "./dist/adapter/sqlite/edge.js",
"browser": "./dist/adapter/sqlite/edge.js",
"bun": "./dist/adapter/sqlite/bun.js",
"node": "./dist/adapter/sqlite/node.js",
"default": "./dist/adapter/sqlite/node.js"
},
"require": "./dist/adapter/sqlite/node.js"
},
"./adapter/cloudflare": {
"types": "./dist/types/adapter/cloudflare/index.d.ts",
"import": "./dist/adapter/cloudflare/index.js",
@@ -231,6 +253,24 @@
"./dist/styles.css": "./dist/ui/styles.css",
"./dist/manifest.json": "./dist/static/.vite/manifest.json"
},
"typesVersions": {
"*": {
"data": ["./dist/types/data/index.d.ts"],
"core": ["./dist/types/core/index.d.ts"],
"utils": ["./dist/types/core/utils/index.d.ts"],
"cli": ["./dist/types/cli/index.d.ts"],
"media": ["./dist/types/media/index.d.ts"],
"plugins": ["./dist/types/plugins/index.d.ts"],
"adapter": ["./dist/types/adapter/index.d.ts"],
"adapter/cloudflare": ["./dist/types/adapter/cloudflare/index.d.ts"],
"adapter/vite": ["./dist/types/adapter/vite/index.d.ts"],
"adapter/nextjs": ["./dist/types/adapter/nextjs/index.d.ts"],
"adapter/react-router": ["./dist/types/adapter/react-router/index.d.ts"],
"adapter/bun": ["./dist/types/adapter/bun/index.d.ts"],
"adapter/node": ["./dist/types/adapter/node/index.d.ts"],
"adapter/sqlite": ["./dist/types/adapter/sqlite/edge.d.ts"]
}
},
"publishConfig": {
"access": "public"
},

View File

@@ -1,7 +1,8 @@
import type { CreateUserPayload } from "auth/AppAuth";
import { $console } from "core";
import { $console } from "core/utils";
import { Event } from "core/events";
import { Connection, type LibSqlCredentials, LibsqlConnection } from "data";
import type { em as prototypeEm } from "data/prototype";
import { Connection } from "data/connection/Connection";
import type { Hono } from "hono";
import {
ModuleManager,
@@ -14,15 +15,29 @@ import {
import * as SystemPermissions from "modules/permissions";
import { AdminController, type AdminControllerOptions } from "modules/server/AdminController";
import { SystemController } from "modules/server/SystemController";
import type { MaybePromise } from "core/types";
import type { ServerEnv } from "modules/Controller";
import type { IEmailDriver, ICacheDriver } from "core/drivers";
// biome-ignore format: must be here
import { Api, type ApiOptions } from "Api";
import type { ServerEnv } from "modules/Controller";
export type AppPlugin = (app: App) => Promise<void> | void;
export type AppPluginConfig = {
name: string;
schema?: () => MaybePromise<ReturnType<typeof prototypeEm> | void>;
beforeBuild?: () => MaybePromise<void>;
onBuilt?: () => MaybePromise<void>;
onServerInit?: (server: Hono<ServerEnv>) => MaybePromise<void>;
onFirstBoot?: () => MaybePromise<void>;
onBoot?: () => MaybePromise<void>;
};
export type AppPlugin = (app: App) => AppPluginConfig;
abstract class AppEvent<A = {}> extends Event<{ app: App } & A> {}
export class AppConfigUpdatedEvent extends AppEvent {
export class AppConfigUpdatedEvent extends AppEvent<{
module: string;
config: ModuleConfigs[keyof ModuleConfigs];
}> {
static override slug = "app-config-updated";
}
export class AppBuiltEvent extends AppEvent {
@@ -50,16 +65,13 @@ export type AppOptions = {
seed?: (ctx: ModuleBuildContext & { app: App }) => Promise<void>;
manager?: Omit<ModuleManagerOptions, "initial" | "onUpdated" | "seed">;
asyncEventsMode?: "sync" | "async" | "none";
drivers?: {
email?: IEmailDriver;
cache?: ICacheDriver;
};
};
export type CreateAppConfig = {
connection?:
| Connection
| {
// @deprecated
type: "libsql";
config: LibSqlCredentials;
}
| LibSqlCredentials;
connection?: Connection | { url: string };
initialConfig?: InitialModuleConfigs;
options?: AppOptions;
};
@@ -67,29 +79,40 @@ export type CreateAppConfig = {
export type AppConfig = InitialModuleConfigs;
export type LocalApiOptions = Request | ApiOptions;
export class App {
export class App<C extends Connection = Connection, Options extends AppOptions = AppOptions> {
static readonly Events = AppEvents;
modules: ModuleManager;
adminController?: AdminController;
_id: string = crypto.randomUUID();
plugins: Map<string, AppPluginConfig> = new Map();
drivers: Options["drivers"] = {};
private trigger_first_boot = false;
private plugins: AppPlugin[];
private _building: boolean = false;
constructor(
private connection: Connection,
public connection: C,
_initialConfig?: InitialModuleConfigs,
private options?: AppOptions,
private options?: Options,
) {
this.plugins = options?.plugins ?? [];
this.drivers = options?.drivers ?? {};
for (const plugin of options?.plugins ?? []) {
const config = plugin(this);
if (this.plugins.has(config.name)) {
throw new Error(`Plugin ${config.name} already registered`);
}
this.plugins.set(config.name, config);
}
this.runPlugins("onBoot");
this.modules = new ModuleManager(connection, {
...(options?.manager ?? {}),
initial: _initialConfig,
onUpdated: this.onUpdated.bind(this),
onFirstBoot: this.onFirstBoot.bind(this),
onServerInit: this.onServerInit.bind(this),
onModulesBuilt: this.onModulesBuilt.bind(this),
});
this.modules.ctx().emgr.registerEvents(AppEvents);
}
@@ -98,6 +121,32 @@ export class App {
return this.modules.ctx().emgr;
}
protected async runPlugins<Key extends keyof AppPluginConfig>(
key: Key,
...args: any[]
): Promise<{ name: string; result: any }[]> {
const results: { name: string; result: any }[] = [];
for (const [name, config] of this.plugins) {
try {
if (key in config && config[key]) {
const fn = config[key];
if (fn && typeof fn === "function") {
$console.debug(`[Plugin:${name}] ${key}`);
// @ts-expect-error
const result = await fn(...args);
results.push({
name,
result,
});
}
}
} catch (e) {
$console.warn(`[Plugin:${name}] error running "${key}"`, String(e));
}
}
return results as any;
}
async build(options?: { sync?: boolean; fetch?: boolean; forceBuild?: boolean }) {
// prevent multiple concurrent builds
if (this._building) {
@@ -106,6 +155,8 @@ export class App {
}
if (!options?.forceBuild) return;
}
await this.runPlugins("beforeBuild");
this._building = true;
if (options?.sync) this.modules.ctx().flags.sync_required = true;
@@ -117,13 +168,10 @@ export class App {
guard.registerPermissions(Object.values(SystemPermissions));
server.route("/api/system", new SystemController(this).getController());
// load plugins
if (this.plugins.length > 0) {
await Promise.all(this.plugins.map((plugin) => plugin(this)));
}
// emit built event
$console.log("App built");
await this.emgr.emit(new AppBuiltEvent({ app: this }));
await this.runPlugins("onBuilt");
// first boot is set from ModuleManager when there wasn't a config table
if (this.trigger_first_boot) {
@@ -220,15 +268,16 @@ export class App {
$console.log("App config updated", module);
// @todo: potentially double syncing
await this.build({ sync: true });
await this.emgr.emit(new AppConfigUpdatedEvent({ app: this }));
await this.emgr.emit(new AppConfigUpdatedEvent({ app: this, module, config }));
}
async onFirstBoot() {
protected async onFirstBoot() {
$console.log("App first boot");
this.trigger_first_boot = true;
await this.runPlugins("onFirstBoot");
}
async onServerInit(server: Hono<ServerEnv>) {
protected async onServerInit(server: Hono<ServerEnv>) {
server.use(async (c, next) => {
c.set("app", this);
await this.emgr.emit(new AppRequest({ app: this, request: c.req.raw }));
@@ -258,35 +307,30 @@ export class App {
if (this.options?.manager?.onServerInit) {
this.options.manager.onServerInit(server);
}
await this.runPlugins("onServerInit", server);
}
protected async onModulesBuilt(ctx: ModuleBuildContext) {
const results = (await this.runPlugins("schema")) as {
name: string;
result: ReturnType<typeof prototypeEm>;
}[];
if (results.length > 0) {
for (const { name, result } of results) {
if (result) {
$console.log(`[Plugin:${name}] schema`);
ctx.helper.ensureSchema(result);
}
}
}
}
}
export function createApp(config: CreateAppConfig = {}) {
let connection: Connection | undefined = undefined;
try {
if (Connection.isConnection(config.connection)) {
connection = config.connection;
} else if (typeof config.connection === "object") {
if ("type" in config.connection) {
$console.warn(
"Using deprecated connection type 'libsql', use the 'config' object directly.",
);
connection = new LibsqlConnection(config.connection.config);
} else {
connection = new LibsqlConnection(config.connection);
}
} else {
connection = new LibsqlConnection({ url: ":memory:" });
$console.warn("No connection provided, using in-memory database");
}
} catch (e) {
$console.error("Could not create connection", e);
}
if (!connection) {
if (!config.connection || !Connection.isConnection(config.connection)) {
throw new Error("Invalid connection");
}
return new App(connection, config.initialConfig, config.options);
return new App(config.connection, config.initialConfig, config.options);
}

View File

@@ -2,10 +2,11 @@
import path from "node:path";
import { type RuntimeBkndConfig, createRuntimeApp, type RuntimeOptions } from "bknd/adapter";
import { registerLocalMediaAdapter } from "bknd/adapter/node";
import { registerLocalMediaAdapter } from ".";
import { config } from "bknd/core";
import type { ServeOptions } from "bun";
import { serveStatic } from "hono/bun";
import type { App } from "App";
type BunEnv = Bun.Env;
export type BunBkndConfig<Env = BunEnv> = RuntimeBkndConfig<Env> & Omit<ServeOptions, "fetch">;
@@ -33,8 +34,11 @@ export function createHandler<Env = BunEnv>(
args: Env = {} as Env,
opts?: RuntimeOptions,
) {
let app: App | undefined;
return async (req: Request) => {
const app = await createApp(config, args ?? (process.env as Env), opts);
if (!app) {
app = await createApp(config, args ?? (process.env as Env), opts);
}
return app.fetch(req);
};
}
@@ -72,5 +76,5 @@ export function serve<Env = BunEnv>(
),
});
console.log(`Server is running on http://localhost:${port}`);
console.info(`Server is running on http://localhost:${port}`);
}

View File

@@ -0,0 +1,15 @@
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { bunSqlite } from "./BunSqliteConnection";
import { bunTestRunner } from "adapter/bun/test";
import { describe } from "bun:test";
import { Database } from "bun:sqlite";
describe("BunSqliteConnection", () => {
connectionTestSuite(bunTestRunner, {
makeConnection: () => ({
connection: bunSqlite({ database: new Database(":memory:") }),
dispose: async () => {},
}),
rawDialectDetails: [],
});
});

View File

@@ -0,0 +1,40 @@
import { Database } from "bun:sqlite";
import { genericSqlite, type GenericSqliteConnection } from "bknd/data";
export type BunSqliteConnection = GenericSqliteConnection<Database>;
export type BunSqliteConnectionConfig = {
database: Database;
};
export function bunSqlite(config?: BunSqliteConnectionConfig | { url: string }) {
let db: Database;
if (config) {
if ("database" in config) {
db = config.database;
} else {
db = new Database(config.url);
}
} else {
db = new Database(":memory:");
}
return genericSqlite("bun-sqlite", db, (utils) => {
//const fn = cache ? "query" : "prepare";
const getStmt = (sql: string) => db.prepare(sql);
return {
db,
query: utils.buildQueryFn({
all: (sql, parameters) => getStmt(sql).all(...(parameters || [])),
run: (sql, parameters) => {
const { changes, lastInsertRowid } = getStmt(sql).run(...(parameters || []));
return {
insertId: utils.parseBigInt(lastInsertRowid),
numAffectedRows: utils.parseBigInt(changes),
};
},
}),
close: () => db.close(),
};
});
}

View File

@@ -1 +1,3 @@
export * from "./bun.adapter";
export * from "../node/storage";
export * from "./connection/BunSqliteConnection";

View File

@@ -1,7 +1,11 @@
import { expect, test, mock } from "bun:test";
import { expect, test, mock, describe, beforeEach, afterEach, afterAll } from "bun:test";
export const bunTestRunner = {
describe,
expect,
test,
mock,
beforeEach,
afterEach,
afterAll,
};

View File

@@ -13,30 +13,32 @@ describe("cf adapter", () => {
const DB_URL = ":memory:";
const $ctx = (env?: any, request?: Request, ctx?: ExecutionContext) => ({
request: request ?? (null as any),
env: env ?? { DB_URL },
env: env ?? { url: DB_URL },
ctx: ctx ?? (null as any),
});
it("makes config", async () => {
expect(
makeConfig(
{
connection: { url: DB_URL },
},
$ctx({ DB_URL }),
),
).toEqual({ connection: { url: DB_URL } });
const staticConfig = makeConfig(
{
connection: { url: DB_URL },
initialConfig: { data: { basepath: DB_URL } },
},
$ctx({ DB_URL }),
);
expect(staticConfig.initialConfig).toEqual({ data: { basepath: DB_URL } });
expect(staticConfig.connection).toBeDefined();
expect(
makeConfig(
{
app: (env) => ({
connection: { url: env.DB_URL },
}),
},
$ctx({ DB_URL }),
),
).toEqual({ connection: { url: DB_URL } });
const dynamicConfig = makeConfig(
{
app: (env) => ({
initialConfig: { data: { basepath: env.DB_URL } },
connection: { url: env.DB_URL },
}),
},
$ctx({ DB_URL }),
);
expect(dynamicConfig.initialConfig).toEqual({ data: { basepath: DB_URL } });
expect(dynamicConfig.connection).toBeDefined();
});
adapterTestSuite<CloudflareBkndConfig, CfMakeConfigArgs<any>>(bunTestRunner, {

View File

@@ -7,7 +7,7 @@ import { getFresh } from "./modes/fresh";
import { getCached } from "./modes/cached";
import { getDurable } from "./modes/durable";
import type { App } from "bknd";
import { $console } from "core";
import { $console } from "core/utils";
declare global {
namespace Cloudflare {
@@ -33,6 +33,7 @@ export type CloudflareBkndConfig<Env = CloudflareEnv> = RuntimeBkndConfig<Env> &
keepAliveSeconds?: number;
forceHttps?: boolean;
manifest?: string;
registerMedia?: boolean | ((env: Env) => void);
};
export type Context<Env = CloudflareEnv> = {

View File

@@ -2,13 +2,15 @@
import { registerMedia } from "./storage/StorageR2Adapter";
import { getBinding } from "./bindings";
import { D1Connection } from "./connection/D1Connection";
import { d1Sqlite } from "./connection/D1Connection";
import { Connection } from "bknd/data";
import type { CloudflareBkndConfig, CloudflareEnv } from ".";
import { App } from "bknd";
import { makeConfig as makeAdapterConfig } from "bknd/adapter";
import type { Context, ExecutionContext } from "hono";
import { $console } from "core";
import { $console } from "core/utils";
import { setCookie } from "hono/cookie";
import { sqlite } from "bknd/adapter/sqlite";
export const constants = {
exec_async_event_id: "cf_register_waituntil",
@@ -91,61 +93,84 @@ export function makeConfig<Env extends CloudflareEnv = CloudflareEnv>(
config: CloudflareBkndConfig<Env>,
args?: CfMakeConfigArgs<Env>,
) {
if (!media_registered) {
registerMedia(args?.env as any);
if (!media_registered && config.registerMedia !== false) {
if (typeof config.registerMedia === "function") {
config.registerMedia(args?.env as any);
} else {
registerMedia(args?.env as any);
}
media_registered = true;
}
const appConfig = makeAdapterConfig(config, args?.env);
if (args?.env) {
const bindings = config.bindings?.(args?.env);
// if connection instance is given, don't do anything
// other than checking if D1 session is defined
if (Connection.isConnection(appConfig.connection)) {
if (config.d1?.session) {
// we cannot guarantee that db was opened with session
throw new Error(
"D1 session don't work when D1 is directly given as connection. Define it in bindings instead.",
);
}
// if connection is given, try to open with unified sqlite adapter
} else if (appConfig.connection) {
appConfig.connection = sqlite(appConfig.connection);
// if connection is not given, but env is set
// try to make D1 from bindings
} else if (args?.env) {
const bindings = config.bindings?.(args?.env);
const sessionHelper = d1SessionHelper(config);
const sessionId = sessionHelper.get(args.request);
let session: D1DatabaseSession | undefined;
let db: D1Database | undefined;
if (!appConfig.connection) {
let db: D1Database | undefined;
if (bindings?.db) {
$console.log("Using database from bindings");
db = bindings.db;
} else if (Object.keys(args).length > 0) {
const binding = getBinding(args.env, "D1Database");
if (binding) {
$console.log(`Using database from env "${binding.key}"`);
db = binding.value;
}
}
// if db is given in bindings, use it
if (bindings?.db) {
$console.debug("Using database from bindings");
db = bindings.db;
if (db) {
if (config.d1?.session) {
session = db.withSession(sessionId ?? config.d1?.first);
appConfig.connection = new D1Connection({ binding: session });
} else {
appConfig.connection = new D1Connection({ binding: db });
}
} else {
throw new Error("No database connection given");
// scan for D1Database in args
} else {
const binding = getBinding(args.env, "D1Database");
if (binding) {
$console.debug(`Using database from env "${binding.key}"`);
db = binding.value;
}
}
if (config.d1?.session) {
appConfig.options = {
...appConfig.options,
manager: {
...appConfig.options?.manager,
onServerInit: (server) => {
server.use(async (c, next) => {
sessionHelper.set(c, session);
await next();
});
// if db is found, check if session is requested
if (db) {
if (config.d1?.session) {
session = db.withSession(sessionId ?? config.d1?.first);
if (!session) {
throw new Error("Couldn't create session");
}
appConfig.connection = d1Sqlite({ binding: session });
appConfig.options = {
...appConfig.options,
manager: {
...appConfig.options?.manager,
onServerInit: (server) => {
server.use(async (c, next) => {
sessionHelper.set(c, session);
await next();
});
},
},
},
};
};
} else {
appConfig.connection = d1Sqlite({ binding: db });
}
}
}
if (!Connection.isConnection(appConfig.connection)) {
throw new Error("Couldn't find database connection");
}
return appConfig;
}

View File

@@ -1,65 +1,75 @@
/// <reference types="@cloudflare/workers-types" />
import { KyselyPluginRunner, SqliteConnection, SqliteIntrospector } from "bknd/data";
import type { QB } from "data/connection/Connection";
import { type DatabaseIntrospector, Kysely, ParseJSONResultsPlugin } from "kysely";
import { D1Dialect } from "kysely-d1";
import { genericSqlite, type GenericSqliteConnection } from "bknd/data";
import type { QueryResult } from "kysely";
export type D1SqliteConnection = GenericSqliteConnection<D1Database>;
export type D1ConnectionConfig<DB extends D1Database | D1DatabaseSession = D1Database> = {
binding: DB;
};
class CustomD1Dialect extends D1Dialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db, {
export function d1Sqlite<DB extends D1Database | D1DatabaseSession = D1Database>(
config: D1ConnectionConfig<DB>,
) {
const db = config.binding;
return genericSqlite(
"d1-sqlite",
db,
(utils) => {
const getStmt = (sql: string, parameters?: any[] | readonly any[]) =>
db.prepare(sql).bind(...(parameters || []));
const mapResult = (res: D1Result<any>): QueryResult<any> => {
if (res.error) {
throw new Error(res.error);
}
const numAffectedRows =
res.meta.changes > 0 ? utils.parseBigInt(res.meta.changes) : undefined;
const insertId = res.meta.last_row_id
? utils.parseBigInt(res.meta.last_row_id)
: undefined;
return {
insertId,
numAffectedRows,
rows: res.results,
// @ts-ignore
meta: res.meta,
};
};
return {
db,
batch: async (stmts) => {
const res = await db.batch(
stmts.map(({ sql, parameters }) => {
return getStmt(sql, parameters);
}),
);
return res.map(mapResult);
},
query: utils.buildQueryFn({
all: async (sql, parameters) => {
const prep = getStmt(sql, parameters);
return mapResult(await prep.all()).rows;
},
run: async (sql, parameters) => {
const prep = getStmt(sql, parameters);
return mapResult(await prep.run());
},
}),
close: () => {},
};
},
{
supports: {
batching: true,
softscans: false,
},
excludeTables: ["_cf_KV", "_cf_METADATA"],
});
}
}
export class D1Connection<
DB extends D1Database | D1DatabaseSession = D1Database,
> extends SqliteConnection {
protected override readonly supported = {
batching: true,
};
constructor(private config: D1ConnectionConfig<DB>) {
const plugins = [new ParseJSONResultsPlugin()];
const kysely = new Kysely({
dialect: new CustomD1Dialect({ database: config.binding as D1Database }),
plugins,
});
super(kysely, {}, plugins);
}
get client(): DB {
return this.config.binding;
}
protected override async batch<Queries extends QB[]>(
queries: [...Queries],
): Promise<{
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> {
const db = this.config.binding;
const res = await db.batch(
queries.map((q) => {
const { sql, parameters } = q.compile();
return db.prepare(sql).bind(...parameters);
}),
);
// let it run through plugins
const kyselyPlugins = new KyselyPluginRunner(this.plugins);
const data: any = [];
for (const r of res) {
const rows = await kyselyPlugins.transformResultRows(r.results);
data.push(rows);
}
return data;
}
},
);
}

View File

@@ -0,0 +1,33 @@
import { describe, test, expect } from "vitest";
import { viTestRunner } from "adapter/node/vitest";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { Miniflare } from "miniflare";
import { d1Sqlite } from "./D1Connection";
describe("d1Sqlite", async () => {
connectionTestSuite(viTestRunner, {
makeConnection: async () => {
const mf = new Miniflare({
modules: true,
script: "export default { async fetch() { return new Response(null); } }",
d1Databases: ["DB"],
});
const binding = (await mf.getD1Database("DB")) as D1Database;
return {
connection: d1Sqlite({ binding }),
dispose: () => mf.dispose(),
};
},
rawDialectDetails: [
"meta.served_by",
"meta.duration",
"meta.changes",
"meta.changed_db",
"meta.size_after",
"meta.rows_read",
"meta.rows_written",
],
});
});

View File

@@ -0,0 +1,80 @@
/// <reference types="@cloudflare/workers-types" />
import { genericSqlite, type GenericSqliteConnection } from "bknd/data";
import type { QueryResult } from "kysely";
export type D1SqliteConnection = GenericSqliteConnection<D1Database>;
export type DurableObjecSql = DurableObjectState["storage"]["sql"];
export type D1ConnectionConfig<DB extends DurableObjecSql> =
| DurableObjectState
| {
sql: DB;
};
export function doSqlite<DB extends DurableObjecSql>(config: D1ConnectionConfig<DB>) {
const db = "sql" in config ? config.sql : config.storage.sql;
return genericSqlite(
"do-sqlite",
db,
(utils) => {
// must be async to work with the miniflare mock
const getStmt = async (sql: string, parameters?: any[] | readonly any[]) =>
await db.exec(sql, ...(parameters || []));
const mapResult = (
cursor: SqlStorageCursor<Record<string, SqlStorageValue>>,
): QueryResult<any> => {
const numAffectedRows =
cursor.rowsWritten > 0 ? utils.parseBigInt(cursor.rowsWritten) : undefined;
const insertId = undefined;
const obj = {
insertId,
numAffectedRows,
rows: cursor.toArray() || [],
// @ts-ignore
meta: {
rowsWritten: cursor.rowsWritten,
rowsRead: cursor.rowsRead,
databaseSize: db.databaseSize,
},
};
//console.info("mapResult", obj);
return obj;
};
return {
db,
batch: async (stmts) => {
// @todo: maybe wrap in a transaction?
// because d1 implicitly does a transaction on batch
return Promise.all(
stmts.map(async (stmt) => {
return mapResult(await getStmt(stmt.sql, stmt.parameters));
}),
);
},
query: utils.buildQueryFn({
all: async (sql, parameters) => {
const prep = getStmt(sql, parameters);
return mapResult(await prep).rows;
},
run: async (sql, parameters) => {
const prep = getStmt(sql, parameters);
return mapResult(await prep);
},
}),
close: () => {},
};
},
{
supports: {
batching: true,
softscans: false,
},
excludeTables: ["_cf_KV", "_cf_METADATA"],
},
);
}

View File

@@ -0,0 +1,92 @@
/// <reference types="@cloudflare/workers-types" />
import { describe, test, expect } from "vitest";
import { viTestRunner } from "adapter/node/vitest";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { Miniflare } from "miniflare";
import { doSqlite } from "./DoConnection";
const script = `
import { DurableObject } from "cloudflare:workers";
export class TestObject extends DurableObject {
constructor(ctx, env) {
super(ctx, env);
this.storage = ctx.storage;
}
async exec(sql, ...parameters) {
//return { sql, parameters }
const cursor = this.storage.sql.exec(sql, ...parameters);
return {
rows: cursor.toArray() || [],
rowsWritten: cursor.rowsWritten,
rowsRead: cursor.rowsRead,
databaseSize: this.storage.sql.databaseSize,
}
}
async databaseSize() {
return this.storage.sql.databaseSize;
}
}
export default {
async fetch(request, env) {
const stub = env.TEST_OBJECT.get(env.TEST_OBJECT.idFromName("test"));
return stub.fetch(request);
}
}
`;
describe("doSqlite", async () => {
connectionTestSuite(viTestRunner, {
makeConnection: async () => {
const mf = new Miniflare({
modules: true,
durableObjects: { TEST_OBJECT: { className: "TestObject", useSQLite: true } },
script,
});
const ns = await mf.getDurableObjectNamespace("TEST_OBJECT");
const id = ns.idFromName("test");
const stub = ns.get(id) as unknown as DurableObjectStub<
Rpc.DurableObjectBranded & {
exec: (sql: string, ...parameters: any[]) => Promise<any>;
}
>;
const stubs: any[] = [];
const mock = {
databaseSize: 0,
exec: async function (sql: string, ...parameters: any[]) {
// @ts-ignore
const result = (await stub.exec(sql, ...parameters)) as any;
this.databaseSize = result.databaseSize;
stubs.push(result);
return {
toArray: () => result.rows,
rowsWritten: result.rowsWritten,
rowsRead: result.rowsRead,
};
},
};
return {
connection: doSqlite({ sql: mock as any }),
dispose: async () => {
await Promise.all(
stubs.map((stub) => {
try {
return stub[Symbol.dispose]();
} catch (e) {}
}),
);
await mf.dispose();
},
};
},
rawDialectDetails: ["meta.rowsWritten", "meta.rowsRead", "meta.databaseSize"],
});
});

View File

@@ -0,0 +1,45 @@
import type { ICacheDriver } from "core/drivers";
interface WorkersKVCacheOptions {
// default time-to-live in seconds
defaultTTL?: number;
// prefix for the cache key
cachePrefix?: string;
}
export class WorkersKVCacheDriver implements ICacheDriver {
protected readonly kv: KVNamespace;
protected readonly defaultTTL?: number;
protected readonly cachePrefix: string;
constructor(kv: KVNamespace, options: WorkersKVCacheOptions = {}) {
this.kv = kv;
this.cachePrefix = options.cachePrefix ?? "";
this.defaultTTL = options.defaultTTL;
}
protected getKey(key: string): string {
return this.cachePrefix + key;
}
async get(key: string): Promise<string | undefined> {
const value = await this.kv.get(this.getKey(key));
return value === null ? undefined : value;
}
async set(key: string, value: string, ttl?: number): Promise<void> {
let expirationTtl = ttl ?? this.defaultTTL;
if (expirationTtl) {
expirationTtl = Math.max(expirationTtl, 60);
}
await this.kv.put(this.getKey(key), value, { expirationTtl: expirationTtl });
}
async del(key: string): Promise<void> {
await this.kv.delete(this.getKey(key));
}
}
export const cacheWorkersKV = (kv: KVNamespace, options?: WorkersKVCacheOptions) => {
return new WorkersKVCacheDriver(kv, options);
};

View File

@@ -0,0 +1,34 @@
import { describe, vi, afterAll, beforeAll } from "vitest";
import { cacheWorkersKV } from "./cache";
import { viTestRunner } from "adapter/node/vitest";
import { cacheDriverTestSuite } from "core/drivers/cache/cache-driver-test-suite";
import { Miniflare } from "miniflare";
describe("cacheWorkersKV", async () => {
beforeAll(() => {
vi.useFakeTimers();
});
afterAll(() => {
vi.restoreAllMocks();
});
const mf = new Miniflare({
modules: true,
script: "export default { async fetch() { return new Response(null); } }",
kvNamespaces: ["KV"],
});
const kv = (await mf.getKVNamespace("KV")) as unknown as KVNamespace;
cacheDriverTestSuite(viTestRunner, {
makeCache: () => cacheWorkersKV(kv),
setTime: (ms: number) => {
vi.advanceTimersByTime(ms);
},
options: {
minTTL: 60,
// doesn't work with miniflare
skipTTL: true,
},
});
});

View File

@@ -1,10 +1,10 @@
import { D1Connection, type D1ConnectionConfig } from "./connection/D1Connection";
import { d1Sqlite, type D1ConnectionConfig } from "./connection/D1Connection";
export * from "./cloudflare-workers.adapter";
export { makeApp, getFresh } from "./modes/fresh";
export { getCached } from "./modes/cached";
export { DurableBkndApp, getDurable } from "./modes/durable";
export { D1Connection, type D1ConnectionConfig };
export { d1Sqlite, type D1ConnectionConfig };
export {
getBinding,
getBindings,
@@ -13,7 +13,12 @@ export {
type BindingMap,
} from "./bindings";
export { constants } from "./config";
export { StorageR2Adapter } from "./storage/StorageR2Adapter";
export { registries } from "bknd";
export function d1(config: D1ConnectionConfig) {
return new D1Connection(config);
// for compatibility with old code
export function d1<DB extends D1Database | D1DatabaseSession = D1Database>(
config: D1ConnectionConfig<DB>,
) {
return d1Sqlite<DB>(config);
}

View File

@@ -3,7 +3,7 @@ import type { App, CreateAppConfig } from "bknd";
import { createRuntimeApp, makeConfig } from "bknd/adapter";
import type { CloudflareBkndConfig, Context, CloudflareEnv } from "../index";
import { constants, registerAsyncsExecutionContext } from "../config";
import { $console } from "core";
import { $console } from "core/utils";
export async function getDurable<Env extends CloudflareEnv = CloudflareEnv>(
config: CloudflareBkndConfig<Env>,
@@ -64,7 +64,7 @@ export class DurableBkndApp extends DurableObject {
"type" in config.connection &&
config.connection.type === "libsql"
) {
config.connection.config.protocol = "wss";
//config.connection.config.protocol = "wss";
}
this.app = await createRuntimeApp({

View File

@@ -1,32 +0,0 @@
import { createWriteStream, readFileSync } from "node:fs";
import { test } from "node:test";
import { Miniflare } from "miniflare";
import { StorageR2Adapter } from "./StorageR2Adapter";
import { adapterTestSuite } from "media";
import { nodeTestRunner } from "adapter/node/test";
import path from "node:path";
// https://github.com/nodejs/node/issues/44372#issuecomment-1736530480
console.log = async (message: any) => {
const tty = createWriteStream("/dev/tty");
const msg = typeof message === "string" ? message : JSON.stringify(message, null, 2);
return tty.write(`${msg}\n`);
};
test("StorageR2Adapter", async () => {
const mf = new Miniflare({
modules: true,
script: "export default { async fetch() { return new Response(null); } }",
r2Buckets: ["BUCKET"],
});
const bucket = (await mf.getR2Bucket("BUCKET")) as unknown as R2Bucket;
const adapter = new StorageR2Adapter(bucket);
const basePath = path.resolve(import.meta.dirname, "../../../../__test__/_assets");
const buffer = readFileSync(path.join(basePath, "image.png"));
const file = new File([buffer], "image.png", { type: "image/png" });
await adapterTestSuite(nodeTestRunner, adapter, file);
await mf.dispose();
});

View File

@@ -1,5 +1,6 @@
import { registries } from "bknd";
import { isDebug } from "bknd/core";
// @ts-ignore
import { StringEnum } from "bknd/utils";
import { guessMimeType as guess, StorageAdapter, type FileBody } from "bknd/media";
import { getBindings } from "../bindings";
@@ -63,46 +64,49 @@ export class StorageR2Adapter extends StorageAdapter {
async putObject(key: string, body: FileBody) {
try {
const res = await this.bucket.put(key, body);
const res = await this.bucket.put(this.getKey(key), body);
return res?.etag;
} catch (e) {
return undefined;
}
}
async listObjects(
prefix?: string,
): Promise<{ key: string; last_modified: Date; size: number }[]> {
const list = await this.bucket.list({ limit: 50 });
async listObjects(prefix = ""): Promise<{ key: string; last_modified: Date; size: number }[]> {
const list = await this.bucket.list({ limit: 50, prefix: this.getKey(prefix) });
return list.objects.map((item) => ({
key: item.key,
key: item.key.replace(this.getKey(""), ""),
size: item.size,
last_modified: item.uploaded,
}));
}
private async headObject(key: string): Promise<R2Object | null> {
return await this.bucket.head(key);
return await this.bucket.head(this.getKey(key));
}
async objectExists(key: string): Promise<boolean> {
return (await this.headObject(key)) !== null;
}
async getObject(key: string, headers: Headers): Promise<Response> {
async getObject(_key: string, headers: Headers): Promise<Response> {
let object: R2ObjectBody | null;
const key = this.getKey(_key);
const responseHeaders = new Headers({
"Accept-Ranges": "bytes",
"Content-Type": guess(key),
});
const range = headers.has("range");
//console.log("getObject:headers", headersToObject(headers));
if (headers.has("range")) {
if (range) {
const options = isDebug()
? {} // miniflare doesn't support range requests
: {
range: headers,
onlyIf: headers,
};
object = (await this.bucket.get(key, options)) as R2ObjectBody;
if (!object) {
@@ -130,13 +134,14 @@ export class StorageR2Adapter extends StorageAdapter {
responseHeaders.set("Last-Modified", object.uploaded.toUTCString());
return new Response(object.body, {
status: object.range ? 206 : 200,
status: range ? 206 : 200,
headers: responseHeaders,
});
}
private writeHttpMetadata(headers: Headers, object: R2Object | R2ObjectBody): void {
let metadata = object.httpMetadata;
if (!metadata || Object.keys(metadata).length === 0) {
// guessing is especially required for dev environment (miniflare)
metadata = {
@@ -163,13 +168,17 @@ export class StorageR2Adapter extends StorageAdapter {
}
async deleteObject(key: string): Promise<void> {
await this.bucket.delete(key);
await this.bucket.delete(this.getKey(key));
}
getObjectUrl(key: string): string {
throw new Error("Method getObjectUrl not implemented.");
}
protected getKey(key: string) {
return key;
}
toJSON(secrets?: boolean) {
return {
type: this.getName(),

View File

@@ -0,0 +1,32 @@
import { readFileSync } from "node:fs";
import { Miniflare } from "miniflare";
import { StorageR2Adapter } from "./StorageR2Adapter";
import { adapterTestSuite } from "media/storage/adapters/adapter-test-suite";
import path from "node:path";
import { describe, afterAll, test, expect } from "vitest";
import { viTestRunner } from "adapter/node/vitest";
let mf: Miniflare | undefined;
describe("StorageR2Adapter", async () => {
mf = new Miniflare({
modules: true,
script: "export default { async fetch() { return new Response(null); } }",
r2Buckets: ["BUCKET"],
});
const bucket = (await mf?.getR2Bucket("BUCKET")) as unknown as R2Bucket;
test("test", () => {
expect(bucket).toBeDefined();
});
const adapter = new StorageR2Adapter(bucket);
const basePath = path.resolve(import.meta.dirname, "../../../../__test__/_assets");
const buffer = readFileSync(path.join(basePath, "image.png"));
const file = new File([buffer], "image.png", { type: "image/png" });
await adapterTestSuite(viTestRunner, adapter, file);
});
afterAll(async () => {
await mf?.dispose();
});

View File

@@ -0,0 +1,14 @@
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
export default defineWorkersConfig({
test: {
poolOptions: {
workers: {
miniflare: {
compatibilityDate: "2025-06-04",
},
},
},
include: ["**/*.vi-test.ts", "**/*.vitest.ts"],
},
});

View File

@@ -1,7 +1,11 @@
import { App, type CreateAppConfig } from "bknd";
import { config as $config } from "bknd/core";
import { $console } from "bknd/utils";
import type { MiddlewareHandler } from "hono";
import type { AdminControllerOptions } from "modules/server/AdminController";
import { Connection } from "bknd/data";
export { Connection } from "bknd/data";
export type BkndConfig<Args = any> = CreateAppConfig & {
app?: CreateAppConfig | ((args: Args) => CreateAppConfig);
@@ -59,7 +63,21 @@ export async function createAdapterApp<Config extends BkndConfig = BkndConfig, A
const id = opts?.id ?? "app";
let app = apps.get(id);
if (!app || opts?.force) {
app = App.create(makeConfig(config, args));
const appConfig = makeConfig(config, args);
if (!appConfig.connection || !Connection.isConnection(appConfig.connection)) {
let connection: Connection | undefined;
if (Connection.isConnection(config.connection)) {
connection = config.connection;
} else {
const sqlite = (await import("bknd/adapter/sqlite")).sqlite;
const conf = config.connection ?? { url: ":memory:" };
connection = sqlite(conf);
$console.info(`Using ${connection.name} connection`, conf.url);
}
appConfig.connection = connection;
}
app = App.create(appConfig);
apps.set(id, app);
}
return app;

View File

@@ -0,0 +1,57 @@
import { genericSqlite } from "bknd/data";
import { DatabaseSync } from "node:sqlite";
export type NodeSqliteConnectionConfig = {
database: DatabaseSync;
};
export function nodeSqlite(config?: NodeSqliteConnectionConfig | { url: string }) {
let db: DatabaseSync;
if (config) {
if ("database" in config) {
db = config.database;
} else {
db = new DatabaseSync(config.url);
}
} else {
db = new DatabaseSync(":memory:");
}
return genericSqlite(
"node-sqlite",
db,
(utils) => {
const getStmt = (sql: string) => {
const stmt = db.prepare(sql);
//stmt.setReadBigInts(true);
return stmt;
};
return {
db,
query: utils.buildQueryFn({
all: (sql, parameters = []) => getStmt(sql).all(...parameters),
run: (sql, parameters = []) => {
const { changes, lastInsertRowid } = getStmt(sql).run(...parameters);
return {
insertId: utils.parseBigInt(lastInsertRowid),
numAffectedRows: utils.parseBigInt(changes),
};
},
}),
close: () => db.close(),
iterator: (isSelect, sql, parameters = []) => {
if (!isSelect) {
throw new Error("Only support select in stream()");
}
return getStmt(sql).iterate(...parameters) as any;
},
};
},
{
supports: {
batching: false,
},
},
);
}

View File

@@ -0,0 +1,15 @@
import { nodeSqlite } from "./NodeSqliteConnection";
import { DatabaseSync } from "node:sqlite";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { describe } from "vitest";
import { viTestRunner } from "../vitest";
describe("NodeSqliteConnection", () => {
connectionTestSuite(viTestRunner, {
makeConnection: () => ({
connection: nodeSqlite({ database: new DatabaseSync(":memory:") }),
dispose: async () => {},
}),
rawDialectDetails: [],
});
});

View File

@@ -1,18 +1,3 @@
import { registries } from "bknd";
import { type LocalAdapterConfig, StorageLocalAdapter } from "./storage/StorageLocalAdapter";
export * from "./node.adapter";
export { StorageLocalAdapter, type LocalAdapterConfig };
let registered = false;
export function registerLocalMediaAdapter() {
if (!registered) {
registries.media.register("local", StorageLocalAdapter);
registered = true;
}
return (config: Partial<LocalAdapterConfig> = {}) => {
const adapter = new StorageLocalAdapter(config);
return adapter.toJSON(true);
};
}
export * from "./storage";
export * from "./connection/NodeSqliteConnection";

View File

@@ -1,5 +1,5 @@
import { afterAll, beforeAll, describe } from "bun:test";
import * as node from "./node.adapter";
import { createApp, createHandler } from "./node.adapter";
import { adapterTestSuite } from "adapter/adapter-test-suite";
import { bunTestRunner } from "adapter/bun/test";
import { disableConsoleLog, enableConsoleLog } from "core/utils";
@@ -9,7 +9,7 @@ afterAll(enableConsoleLog);
describe("node adapter (bun)", () => {
adapterTestSuite(bunTestRunner, {
makeApp: node.createApp,
makeHandler: node.createHandler,
makeApp: createApp,
makeHandler: createHandler,
});
});

View File

@@ -1,10 +1,11 @@
import path from "node:path";
import { serve as honoServe } from "@hono/node-server";
import { serveStatic } from "@hono/node-server/serve-static";
import { registerLocalMediaAdapter } from "adapter/node/index";
import { registerLocalMediaAdapter } from "adapter/node/storage";
import { type RuntimeBkndConfig, createRuntimeApp, type RuntimeOptions } from "bknd/adapter";
import { config as $config } from "bknd/core";
import { $console } from "core";
import { $console } from "core/utils";
import type { App } from "App";
type NodeEnv = NodeJS.ProcessEnv;
export type NodeBkndConfig<Env = NodeEnv> = RuntimeBkndConfig<Env> & {
@@ -45,8 +46,11 @@ export function createHandler<Env = NodeEnv>(
args: Env = {} as Env,
opts?: RuntimeOptions,
) {
let app: App | undefined;
return async (req: Request) => {
const app = await createApp(config, args ?? (process.env as Env), opts);
if (!app) {
app = await createApp(config, args ?? (process.env as Env), opts);
}
return app.fetch(req);
};
}

View File

@@ -1,14 +1,14 @@
import { describe, before, after } from "node:test";
import { describe, beforeAll, afterAll } from "vitest";
import * as node from "./node.adapter";
import { adapterTestSuite } from "adapter/adapter-test-suite";
import { nodeTestRunner } from "adapter/node/test";
import { viTestRunner } from "adapter/node/vitest";
import { disableConsoleLog, enableConsoleLog } from "core/utils";
before(() => disableConsoleLog());
after(enableConsoleLog);
beforeAll(() => disableConsoleLog());
afterAll(enableConsoleLog);
describe("node adapter", () => {
adapterTestSuite(nodeTestRunner, {
adapterTestSuite(viTestRunner, {
makeApp: node.createApp,
makeHandler: node.createHandler,
});

View File

@@ -1,5 +1,5 @@
import { describe } from "node:test";
import { nodeTestRunner } from "adapter/node/test";
import { describe } from "vitest";
import { viTestRunner } from "adapter/node/vitest";
import { StorageLocalAdapter } from "adapter/node";
import { adapterTestSuite } from "media/storage/adapters/adapter-test-suite";
import { readFileSync } from "node:fs";
@@ -14,5 +14,5 @@ describe("StorageLocalAdapter (node)", async () => {
path: path.join(basePath, "tmp"),
});
await adapterTestSuite(nodeTestRunner, adapter, file);
await adapterTestSuite(viTestRunner, adapter, file);
});

View File

@@ -0,0 +1,17 @@
import { registries } from "bknd";
import { type LocalAdapterConfig, StorageLocalAdapter } from "./StorageLocalAdapter";
export * from "./StorageLocalAdapter";
let registered = false;
export function registerLocalMediaAdapter() {
if (!registered) {
registries.media.register("local", StorageLocalAdapter);
registered = true;
}
return (config: Partial<LocalAdapterConfig> = {}) => {
const adapter = new StorageLocalAdapter(config);
return adapter.toJSON(true);
};
}

View File

@@ -1,5 +1,5 @@
import nodeAssert from "node:assert/strict";
import { test } from "node:test";
import { test, describe, beforeEach, afterEach } from "node:test";
import type { Matcher, Test, TestFn, TestRunner } from "core/test";
// Track mock function calls
@@ -85,6 +85,7 @@ nodeTest.skipIf = (condition: boolean): Test => {
};
export const nodeTestRunner: TestRunner = {
describe,
test: nodeTest,
mock: createMockFunction,
expect: <T = unknown>(actual?: T, failMsg?: string) => ({
@@ -96,4 +97,7 @@ export const nodeTestRunner: TestRunner = {
reject: (r) => nodeTestMatcher(r, failMsg),
}),
}),
beforeEach: beforeEach,
afterEach: afterEach,
afterAll: () => {},
};

View File

@@ -0,0 +1,53 @@
import type { TestFn, TestRunner, Test } from "core/test";
import { describe, test, expect, vi, beforeEach, afterEach, afterAll } from "vitest";
function vitestTest(label: string, fn: TestFn, options?: any) {
return test(label, fn as any);
}
vitestTest.if = (condition: boolean): Test => {
if (condition) {
return vitestTest;
}
return (() => {}) as any;
};
vitestTest.skip = (label: string, fn: TestFn) => {
return test.skip(label, fn as any);
};
vitestTest.skipIf = (condition: boolean): Test => {
if (condition) {
return (() => {}) as any;
}
return vitestTest;
};
const vitestExpect = <T = unknown>(actual: T, parentFailMsg?: string) => {
return {
toEqual: (expected: T, failMsg = parentFailMsg) => {
expect(actual, failMsg).toEqual(expected);
},
toBe: (expected: T, failMsg = parentFailMsg) => {
expect(actual, failMsg).toBe(expected);
},
toBeString: () => expect(typeof actual, parentFailMsg).toBe("string"),
toBeUndefined: () => expect(actual, parentFailMsg).toBeUndefined(),
toBeDefined: () => expect(actual, parentFailMsg).toBeDefined(),
toBeOneOf: (expected: T | Array<T> | Iterable<T>, failMsg = parentFailMsg) => {
const e = Array.isArray(expected) ? expected : [expected];
expect(actual, failMsg).toBeOneOf(e);
},
toHaveBeenCalled: () => expect(actual, parentFailMsg).toHaveBeenCalled(),
toHaveBeenCalledTimes: (expected: number, failMsg = parentFailMsg) => {
expect(actual, failMsg).toHaveBeenCalledTimes(expected);
},
};
};
export const viTestRunner: TestRunner = {
describe,
test: vitestTest,
expect: vitestExpect as any,
mock: (fn) => vi.fn(fn),
beforeEach: beforeEach,
afterEach: afterEach,
afterAll: afterAll,
};

View File

@@ -0,0 +1,6 @@
import type { Connection } from "bknd/data";
import { bunSqlite } from "../bun/connection/BunSqliteConnection";
export function sqlite(config?: { url: string }): Connection {
return bunSqlite(config);
}

View File

@@ -0,0 +1,5 @@
import { type Connection, libsql } from "bknd/data";
export function sqlite(config: { url: string }): Connection {
return libsql(config);
}

View File

@@ -0,0 +1,6 @@
import type { Connection } from "bknd/data";
import { nodeSqlite } from "../node/connection/NodeSqliteConnection";
export function sqlite(config?: { url: string }): Connection {
return nodeSqlite(config);
}

View File

@@ -1,14 +1,15 @@
import { Authenticator, AuthPermissions, Role, type Strategy } from "auth";
import type { PasswordStrategy } from "auth/authenticate/strategies";
import { $console, type DB } from "core";
import { secureRandomString, transformObject } from "core/utils";
import type { DB } from "core";
import { $console, secureRandomString, transformObject } from "core/utils";
import type { Entity, EntityManager } from "data";
import { em, entity, enumm, type FieldSchema, text } from "data/prototype";
import { em, entity, enumm, type FieldSchema } from "data/prototype";
import { Module } from "modules/Module";
import { AuthController } from "./api/AuthController";
import { type AppAuthSchema, authConfigSchema, STRATEGIES } from "./auth-schema";
import { AppUserPool } from "auth/AppUserPool";
import type { AppEntity } from "core/config";
import { usersFields } from "./auth-entities";
export type UserFieldSchema = FieldSchema<typeof AppAuth.usersFields>;
declare module "core" {
@@ -125,22 +126,11 @@ export class AppAuth extends Module<typeof authConfigSchema> {
return this.em.entity(entity_name) as any;
}
static usersFields = {
email: text().required(),
strategy: text({
fillable: ["create"],
hidden: ["update", "form"],
}).required(),
strategy_value: text({
fillable: ["create"],
hidden: ["read", "table", "update", "form"],
}).required(),
role: text(),
};
static usersFields = usersFields;
registerEntities() {
const users = this.getUsersEntity(true);
this.ensureSchema(
this.ctx.helper.ensureSchema(
em(
{
[users.name as "users"]: users,
@@ -153,13 +143,13 @@ export class AppAuth extends Module<typeof authConfigSchema> {
try {
const roles = Object.keys(this.config.roles ?? {});
this.replaceEntityField(users, "role", enumm({ enum: roles }));
this.ctx.helper.replaceEntityField(users, "role", enumm({ enum: roles }));
} catch (e) {}
try {
// also keep disabled strategies as a choice
const strategies = Object.keys(this.config.strategies ?? {});
this.replaceEntityField(users, "strategy", enumm({ enum: strategies }));
this.ctx.helper.replaceEntityField(users, "strategy", enumm({ enum: strategies }));
} catch (e) {}
}

View File

@@ -1,6 +1,6 @@
import { AppAuth } from "auth/AppAuth";
import type { CreateUser, SafeUser, User, UserPool } from "auth/authenticate/Authenticator";
import { $console } from "core";
import { $console } from "core/utils";
import { pick } from "lodash-es";
import {
InvalidConditionsException,

View File

@@ -184,6 +184,6 @@ export class AuthController extends Controller {
this.registerStrategyActions(strategy, hono);
}
return hono.all("*", (c) => c.notFound());
return hono;
}
}

View File

@@ -0,0 +1,14 @@
import { text } from "data/prototype";
export const usersFields = {
email: text().required(),
strategy: text({
fillable: ["create"],
hidden: ["update", "form"],
}).required(),
strategy_value: text({
fillable: ["create"],
hidden: ["read", "table", "update", "form"],
}).required(),
role: text(),
};

View File

@@ -1,6 +1,7 @@
import { $console, type DB, Exception } from "core";
import { type DB, Exception } from "core";
import { addFlashMessage } from "core/server/flash";
import {
$console,
type Static,
StringEnum,
type TObject,
@@ -341,9 +342,9 @@ export class Authenticator<Strategies extends Record<string, Strategy> = Record<
await setSignedCookie(c, "auth", token, secret, this.cookieOptions);
}
private async deleteAuthCookie(c: Context) {
private deleteAuthCookie(c: Context) {
$console.debug("deleting auth cookie");
await deleteCookie(c, "auth", this.cookieOptions);
deleteCookie(c, "auth", this.cookieOptions);
}
async logout(c: Context<ServerEnv>) {
@@ -352,9 +353,13 @@ export class Authenticator<Strategies extends Record<string, Strategy> = Record<
const cookie = await this.getAuthCookie(c);
if (cookie) {
await this.deleteAuthCookie(c);
await addFlashMessage(c, "Signed out", "info");
addFlashMessage(c, "Signed out", "info");
}
// on waku, only one cookie setting is performed
// therefore adding deleting cookie at the end
// as the flash isn't that important
this.deleteAuthCookie(c);
}
// @todo: move this to a server helper

View File

@@ -1,6 +1,6 @@
import { type Authenticator, InvalidCredentialsException, type User } from "auth";
import { $console, tbValidator as tb } from "core";
import { hash, parse, type Static, StrictObject, StringEnum } from "core/utils";
import { tbValidator as tb } from "core";
import { $console, hash, parse, type Static, StrictObject, StringEnum } from "core/utils";
import { Hono } from "hono";
import { compare as bcryptCompare, genSalt as bcryptGenSalt, hash as bcryptHash } from "bcryptjs";
import * as tbbox from "@sinclair/typebox";

View File

@@ -1,5 +1,5 @@
import { $console, Exception, Permission } from "core";
import { objectTransform } from "core/utils";
import { Exception, Permission } from "core";
import { $console, objectTransform } from "core/utils";
import type { Context } from "hono";
import type { ServerEnv } from "modules/Controller";
import { Role } from "./Role";

View File

@@ -1,5 +1,5 @@
import { $console, type Permission } from "core";
import { patternMatch } from "core/utils";
import type { Permission } from "core";
import { $console, patternMatch } from "core/utils";
import type { Context } from "hono";
import { createMiddleware } from "hono/factory";
import type { ServerEnv } from "modules/Controller";

View File

@@ -29,30 +29,8 @@ export const cloudflare = {
{ dir: ctx.dir },
);
const db = ctx.skip
? "d1"
: await $p.select({
message: "What database do you want to use?",
options: [
{ label: "Cloudflare D1", value: "d1" },
{ label: "LibSQL", value: "libsql" },
],
});
if ($p.isCancel(db)) {
process.exit(1);
}
try {
switch (db) {
case "d1":
await createD1(ctx);
break;
case "libsql":
await createLibsql(ctx);
break;
default:
throw new Error("Invalid database");
}
await createD1(ctx);
} catch (e) {
const message = (e as any).message || "An error occurred";
$p.log.warn(
@@ -60,7 +38,14 @@ export const cloudflare = {
);
}
await createR2(ctx);
try {
await createR2(ctx);
} catch (e) {
const message = (e as any).message || "An error occurred";
$p.log.warn(
"Couldn't add R2 bucket. You can add it manually later. Error: " + c.red(message),
);
}
},
} as const satisfies Template;
@@ -89,6 +74,21 @@ async function createD1(ctx: TemplateSetupCtx) {
})(),
);
await overrideJson(
WRANGLER_FILE,
(json) => ({
...json,
d1_databases: [
{
binding: "DB",
database_name: name,
database_id: "00000000-0000-0000-0000-000000000000",
},
],
}),
{ dir: ctx.dir },
);
if (!ctx.skip) {
exec(`npx wrangler d1 create ${name}`);
@@ -98,62 +98,6 @@ async function createD1(ctx: TemplateSetupCtx) {
})(),
);
}
await overrideJson(
WRANGLER_FILE,
(json) => ({
...json,
d1_databases: [
{
binding: "DB",
database_name: name,
database_id: uuid(),
},
],
}),
{ dir: ctx.dir },
);
}
async function createLibsql(ctx: TemplateSetupCtx) {
await overrideJson(
WRANGLER_FILE,
(json) => ({
...json,
vars: {
DB_URL: "http://127.0.0.1:8080",
},
}),
{ dir: ctx.dir },
);
await overridePackageJson(
(pkg) => ({
...pkg,
scripts: {
...pkg.scripts,
db: "turso dev",
dev: "npm run db && wrangler dev",
},
}),
{ dir: ctx.dir },
);
await $p.stream.info(
(async function* () {
yield* typewriter("Database set to LibSQL");
await wait();
yield* typewriter(
`\nYou can now run ${c.cyan("npm run db")} to start the database and ${c.cyan("npm run dev")} to start the worker.`,
c.dim,
);
await wait();
yield* typewriter(
`\nAlso make sure you have Turso's CLI installed. Check their docs on how to install at ${c.cyan("https://docs.turso.tech/cli/introduction")}`,
c.dim,
);
})(),
);
}
async function createR2(ctx: TemplateSetupCtx) {
@@ -197,9 +141,11 @@ async function createR2(ctx: TemplateSetupCtx) {
process.exit(1);
}
if (!ctx.skip) {
exec(`npx wrangler r2 bucket create ${name}`);
}
await $p.stream.info(
(async function* () {
yield* typewriter("Now running wrangler to create a R2 bucket...");
})(),
);
await overrideJson(
WRANGLER_FILE,
@@ -214,4 +160,8 @@ async function createR2(ctx: TemplateSetupCtx) {
}),
{ dir: ctx.dir },
);
if (!ctx.skip) {
exec(`npx wrangler r2 bucket create ${name}`);
}
}

View File

@@ -1,6 +1,5 @@
import path from "node:path";
import type { Config } from "@libsql/client/node";
import { $console, config } from "core";
import { $console } from "core/utils";
import type { MiddlewareHandler } from "hono";
import open from "open";
import { fileExists, getRelativeDistPath } from "../../utils/sys";
@@ -27,10 +26,6 @@ export async function serveStatic(server: Platform): Promise<MiddlewareHandler>
}
}
export async function attachServeStatic(app: any, platform: Platform) {
app.module.server.client.get(config.server.assets_path + "*", await serveStatic(platform));
}
export async function startServer(
server: Platform,
app: App,

View File

@@ -1,9 +1,9 @@
import type { Config } from "@libsql/client/node";
import { App, type CreateAppConfig } from "App";
import { StorageLocalAdapter } from "adapter/node";
import type { App, CreateAppConfig } from "App";
import { StorageLocalAdapter } from "adapter/node/storage";
import type { CliBkndConfig, CliCommand } from "cli/types";
import { Option } from "commander";
import { colorizeConsole, config } from "core";
import { config } from "core";
import dotenv from "dotenv";
import { registries } from "modules/registries";
import c from "picocolors";
@@ -11,19 +11,19 @@ import path from "node:path";
import {
PLATFORMS,
type Platform,
attachServeStatic,
getConfigPath,
getConnectionCredentialsFromEnv,
serveStatic,
startServer,
} from "./platform";
import { makeConfig } from "adapter";
import { isBun as $isBun } from "cli/utils/sys";
import { createRuntimeApp, makeConfig } from "adapter";
import { colorizeConsole, isBun } from "core/utils";
const env_files = [".env", ".dev.vars"];
dotenv.config({
path: env_files.map((file) => path.resolve(process.cwd(), file)),
});
const isBun = $isBun();
const is_bun = isBun();
export const run: CliCommand = (program) => {
program
@@ -44,15 +44,14 @@ export const run: CliCommand = (program) => {
)
.addOption(new Option("-c, --config <config>", "config file"))
.addOption(
new Option("--db-url <db>", "database url, can be any valid libsql url").conflicts(
new Option("--db-url <db>", "database url, can be any valid sqlite url").conflicts(
"config",
),
)
.addOption(new Option("--db-token <db>", "database token").conflicts("config"))
.addOption(
new Option("--server <server>", "server type")
.choices(PLATFORMS)
.default(isBun ? "bun" : "node"),
.default(is_bun ? "bun" : "node"),
)
.addOption(new Option("--no-open", "don't open browser window on start"))
.action(action);
@@ -72,23 +71,10 @@ type MakeAppConfig = {
};
async function makeApp(config: MakeAppConfig) {
const app = App.create({ connection: config.connection });
app.emgr.onEvent(
App.Events.AppBuiltEvent,
async () => {
if (config.onBuilt) {
await config.onBuilt(app);
}
await attachServeStatic(app, config.server?.platform ?? "node");
app.registerAdminController();
},
"sync",
);
await app.build();
return app;
return await createRuntimeApp({
serveStatic: await serveStatic(config.server?.platform ?? "node"),
...config,
});
}
export async function makeConfigApp(_config: CliBkndConfig, platform?: Platform) {
@@ -104,7 +90,6 @@ type RunOptions = {
memory?: boolean;
config?: string;
dbUrl?: string;
dbToken?: string;
server: Platform;
open?: boolean;
};
@@ -115,10 +100,8 @@ export async function makeAppFromEnv(options: Partial<RunOptions> = {}) {
let app: App | undefined = undefined;
// first start from arguments if given
if (options.dbUrl) {
console.info("Using connection from", c.cyan("--db-url"));
const connection = options.dbUrl
? { url: options.dbUrl, authToken: options.dbToken }
: undefined;
console.info("Using connection from", c.cyan("--db-url"), c.cyan(options.dbUrl));
const connection = options.dbUrl ? { url: options.dbUrl } : undefined;
app = await makeApp({ connection, server: { platform: options.server } });
// check configuration file to be present

View File

@@ -9,9 +9,9 @@ import type { PasswordStrategy } from "auth/authenticate/strategies";
import { makeAppFromEnv } from "cli/commands/run";
import type { CliCommand } from "cli/types";
import { Argument } from "commander";
import { $console } from "core";
import { $console } from "core/utils";
import c from "picocolors";
import { isBun } from "cli/utils/sys";
import { isBun } from "core/utils";
export const user: CliCommand = (program) => {
program

View File

@@ -1,17 +1,9 @@
import { $console } from "core";
import { $console } from "core/utils";
import { execSync, exec as nodeExec } from "node:child_process";
import { readFile, writeFile as nodeWriteFile } from "node:fs/promises";
import path from "node:path";
import url from "node:url";
export function isBun(): boolean {
try {
return typeof Bun !== "undefined";
} catch (e) {
return false;
}
}
export function getRootPath() {
const _path = path.dirname(url.fileURLToPath(import.meta.url));
// because of "src", local needs one more level up

View File

@@ -1,6 +1,7 @@
import { PostHog } from "posthog-js-lite";
import { getVersion } from "cli/utils/sys";
import { $console, env, isDebug } from "core";
import { env, isDebug } from "core";
import { $console } from "core/utils";
type Properties = { [p: string]: any };

View File

@@ -0,0 +1,72 @@
import type { TestRunner } from "core/test";
import type { ICacheDriver } from "./index";
export function cacheDriverTestSuite(
testRunner: TestRunner,
{
makeCache,
setTime,
options,
}: {
makeCache: () => ICacheDriver;
setTime: (ms: number) => void;
options?: {
minTTL?: number;
skipTTL?: boolean;
};
},
) {
const { test, expect } = testRunner;
const minTTL = options?.minTTL ?? 1;
test("get within ttl", async () => {
const cache = makeCache();
await cache.set("ttl", "bar", minTTL + 2); // 2 second TTL
setTime(minTTL * 1000 + 1000); // advance by 1 second
expect(await cache.get("ttl")).toBe("bar");
});
test("set and get returns value", async () => {
const cache = makeCache();
await cache.set("value", "bar");
expect(await cache.get("value")).toBe("bar");
});
test("get returns undefined for missing key", async () => {
const cache = makeCache();
expect(await cache.get("missing" + Math.random())).toBeUndefined();
});
test("delete removes value", async () => {
const cache = makeCache();
await cache.set("delete", "bar");
await cache.del("delete");
expect(await cache.get("delete")).toBeUndefined();
});
test("set overwrites value", async () => {
const cache = makeCache();
await cache.set("overwrite", "bar");
await cache.set("overwrite", "baz");
expect(await cache.get("overwrite")).toBe("baz");
});
test("set with ttl expires", async () => {
const cache = makeCache();
await cache.set("expire", "bar", minTTL + 1); // 1 second TTL
expect(await cache.get("expire")).toBe("bar");
// advance time
setTime(minTTL * 1000 * 2000);
if (options?.skipTTL) {
await cache.del("expire");
}
expect(await cache.get("expire")).toBeUndefined();
});
test("set without ttl does not expire", async () => {
const cache = makeCache();
await cache.set("ttl0", "bar");
expect(await cache.get("ttl0")).toBe("bar");
setTime(1000);
expect(await cache.get("ttl0")).toBe("bar");
});
}

View File

@@ -0,0 +1,52 @@
import { cacheDriverTestSuite } from "./cache-driver-test-suite";
import { memoryCache } from "./in-memory";
import { bunTestRunner } from "adapter/bun/test";
import { setSystemTime, afterAll, beforeAll, test, expect, describe } from "bun:test";
let baseTime = Date.now();
beforeAll(() => {
baseTime = Date.now();
setSystemTime(new Date(baseTime));
});
afterAll(() => {
setSystemTime(); // Reset to real time
});
describe("InMemoryCacheDriver", () => {
cacheDriverTestSuite(bunTestRunner, {
makeCache: () => memoryCache(),
setTime: (ms: number) => {
setSystemTime(new Date(baseTime + ms));
},
});
test("evicts least recently used entries by byte size", async () => {
// maxSize = 20 bytes for this test
const cache = memoryCache({ maxSize: 20 });
// each key and value is 1 char = 1 byte (ASCII)
// totals to 2 bytes each
await cache.set("a", "1");
await cache.set("b", "2");
await cache.set("c", "3");
await cache.set("d", "4");
await cache.set("e", "5");
// total: 10 bytes
// now add a large value to force eviction
await cache.set("big", "1234567890");
// should evict least recently used entries until it fits
// only "big" and possibly one other small entry should remain
expect(await cache.get("big")).toBe("1234567890");
// the oldest keys should be evicted
expect(await cache.get("a")).toBeUndefined();
expect(await cache.get("b")).toBeUndefined();
// the most recent small keys may or may not remain depending on eviction order
});
test("throws if entry is too large to ever fit", async () => {
const cache = memoryCache({ maxSize: 5 });
// key: 3, value: 10 = 13 bytes
expect(cache.set("big", "1234567890")).rejects.toThrow();
});
});

123
app/src/core/drivers/cache/in-memory.ts vendored Normal file
View File

@@ -0,0 +1,123 @@
import type { ICacheDriver } from "./index";
interface InMemoryCacheOptions {
// maximum total size in bytes for all keys and values
maxSize?: number;
// default time-to-live in seconds
defaultTTL?: number;
}
interface CacheEntry {
value: string;
// timestamp in ms, or null for no expiry
expiresAt: number | null;
// size in bytes of this entry (key + value)
size: number;
}
function byteLength(str: string): number {
return new TextEncoder().encode(str).length;
}
export class InMemoryCacheDriver implements ICacheDriver {
protected cache: Map<string, CacheEntry>;
protected maxSize: number;
protected defaultTTL: number;
protected currentSize: number;
constructor(options: InMemoryCacheOptions = {}) {
this.maxSize = options.maxSize ?? 1024 * 1024 * 10; // 10MB default
this.defaultTTL = options.defaultTTL ?? 60 * 60; // 1 hour default
this.cache = new Map();
this.currentSize = 0;
}
protected now(): number {
return Date.now();
}
protected isExpired(entry: CacheEntry): boolean {
return entry.expiresAt !== null && entry.expiresAt <= this.now();
}
protected setEntry(key: string, entry: CacheEntry) {
const oldEntry = this.cache.get(key);
const oldSize = oldEntry ? oldEntry.size : 0;
let projectedSize = this.currentSize - oldSize + entry.size;
// if the entry itself is too large, throw
if (entry.size > this.maxSize) {
throw new Error(
`InMemoryCacheDriver: entry too large (entry: ${entry.size}, max: ${this.maxSize})`,
);
}
// evict LRU until it fits
while (projectedSize > this.maxSize && this.cache.size > 0) {
// remove least recently used (first inserted)
const lruKey = this.cache.keys().next().value;
if (typeof lruKey === "string") {
const lruEntry = this.cache.get(lruKey);
if (lruEntry) {
this.currentSize -= lruEntry.size;
}
this.cache.delete(lruKey);
projectedSize = this.currentSize - oldSize + entry.size;
} else {
break;
}
}
if (projectedSize > this.maxSize) {
throw new Error(
`InMemoryCacheDriver: maxSize exceeded after eviction (attempted: ${projectedSize}, max: ${this.maxSize})`,
);
}
if (oldEntry) {
this.currentSize -= oldSize;
}
this.cache.delete(key); // Remove to update order (for LRU)
this.cache.set(key, entry);
this.currentSize += entry.size;
}
async get(key: string): Promise<string | undefined> {
const entry = this.cache.get(key);
if (!entry) return;
if (this.isExpired(entry)) {
this.cache.delete(key);
this.currentSize -= entry.size;
return;
}
// mark as recently used
this.cache.delete(key);
this.cache.set(key, entry);
return entry.value;
}
async set(key: string, value: string, ttl?: number): Promise<void> {
const expiresAt =
ttl === undefined
? this.defaultTTL > 0
? this.now() + this.defaultTTL * 1000
: null
: ttl > 0
? this.now() + ttl * 1000
: null;
const size = byteLength(key) + byteLength(value);
this.setEntry(key, { value, expiresAt, size });
}
async del(key: string): Promise<void> {
const entry = this.cache.get(key);
if (entry) {
this.currentSize -= entry.size;
this.cache.delete(key);
}
}
}
export const memoryCache = (options?: InMemoryCacheOptions) => {
return new InMemoryCacheDriver(options);
};

32
app/src/core/drivers/cache/index.ts vendored Normal file
View File

@@ -0,0 +1,32 @@
/**
* Interface for cache driver implementations
* Defines standard methods for interacting with a cache storage system
*/
export interface ICacheDriver {
/**
* Retrieves a value from the cache by its key
*
* @param key unique identifier for the cached value
* @returns resolves to the cached string value or undefined if not found
*/
get(key: string): Promise<string | undefined>;
/**
* Stores a value in the cache with an optional time-to-live
*
* @param key unique identifier for storing the value
* @param value string value to cache
* @param ttl optional time-to-live in seconds before the value expires
* @throws if the value cannot be stored
*/
set(key: string, value: string, ttl?: number): Promise<void>;
/**
* Removes a value from the cache
*
* @param key unique identifier of the value to delete
*/
del(key: string): Promise<void>;
}
export { cacheDriverTestSuite } from "./cache-driver-test-suite";

View File

@@ -0,0 +1,28 @@
export interface IEmailDriver<Data = unknown, Options = object> {
send(
to: string,
subject: string,
body: string | { text: string; html: string },
options?: Options,
): Promise<Data>;
}
import type { BkndConfig } from "bknd";
import { resendEmail, memoryCache } from "bknd/core";
export default {
onBuilt: async (app) => {
app.server.get("/send-email", async (c) => {
if (await app.drivers?.email?.send("test@test.com", "Test", "Test")) {
return c.text("success");
}
return c.text("failed");
});
},
options: {
drivers: {
email: resendEmail({ apiKey: "..." }),
cache: memoryCache(),
},
},
} as const satisfies BkndConfig;

View File

@@ -0,0 +1,20 @@
import { describe, it, expect } from "bun:test";
import { mailchannelsEmail } from "./mailchannels";
const ALL_TESTS = !!process.env.ALL_TESTS;
describe.skipIf(ALL_TESTS)("mailchannels", () => {
it("should throw on failed", async () => {
const driver = mailchannelsEmail({ apiKey: "invalid" } as any);
expect(driver.send("foo@bar.com", "Test", "Test")).rejects.toThrow();
});
it("should send an email", async () => {
const driver = mailchannelsEmail({
apiKey: process.env.MAILCHANNELS_API_KEY!,
from: { email: "accounts@bknd.io", name: "Dennis Senn" },
});
const response = await driver.send("ds@bknd.io", "Test", "Test");
expect(response).toBeDefined();
});
});

View File

@@ -0,0 +1,117 @@
import { mergeObject, type RecursivePartial } from "core/utils";
import type { IEmailDriver } from "./index";
export type MailchannelsEmailOptions = {
apiKey: string;
host?: string;
from?: { email: string; name: string };
};
export type Recipient = {
email: string;
name?: string;
};
export type MailchannelsSendOptions = RecursivePartial<{
attachments: Array<{
content: string;
filename: string;
type: string;
}>;
campaign_id: string;
content: Array<{
template_type?: string;
type: string;
value: string;
}>;
dkim_domain: string;
dkim_private_key: string;
dkim_selector: string;
from: Recipient;
headers: {};
personalizations: Array<{
bcc: Array<Recipient>;
cc: Array<Recipient>;
dkim_domain: string;
dkim_private_key: string;
dkim_selector: string;
dynamic_template_data: {};
from: Recipient;
headers: {};
reply_to: Recipient;
subject: string;
to: Array<Recipient>;
}>;
reply_to: Recipient;
subject: string;
tracking_settings: {
click_tracking: {
enable: boolean;
};
open_tracking: {
enable: boolean;
};
};
transactional: boolean;
}>;
export type MailchannelsEmailResponse = {
request_id: string;
results: Array<{
index: number;
message_id: string;
reason: string;
status: string;
}>;
};
export const mailchannelsEmail = (
config: MailchannelsEmailOptions,
): IEmailDriver<MailchannelsEmailResponse, MailchannelsSendOptions> => {
const host = config.host ?? "https://api.mailchannels.net/tx/v1/send";
const from = config.from ?? { email: "onboarding@mailchannels.net", name: "Mailchannels" };
return {
send: async (
to: string,
subject: string,
body: string | { text: string; html: string },
options?: MailchannelsSendOptions,
) => {
const payload: MailchannelsSendOptions = mergeObject(
{
from,
subject,
content:
typeof body === "string"
? [{ type: "text/html", value: body }]
: [
{ type: "text/plain", value: body.text },
{ type: "text/html", value: body.html },
],
personalizations: [
{
to: [{ email: to }],
},
],
},
options ?? {},
);
const res = await fetch(host, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-Api-Key": config.apiKey,
},
body: JSON.stringify(payload),
});
const data = (await res.json()) as MailchannelsEmailResponse;
if (data?.results.length === 0 || data.results?.[0]?.status !== "sent") {
throw new Error(data.results?.[0]?.reason ?? "Unknown error");
}
return (await res.json()) as MailchannelsEmailResponse;
},
};
};

View File

@@ -0,0 +1,21 @@
import { describe, it, expect } from "bun:test";
import { resendEmail } from "./resend";
const ALL_TESTS = !!process.env.ALL_TESTS;
describe.skipIf(ALL_TESTS)("resend", () => {
it.only("should throw on failed", async () => {
const driver = resendEmail({ apiKey: "invalid" } as any);
expect(driver.send("foo@bar.com", "Test", "Test")).rejects.toThrow();
});
it("should send an email", async () => {
const driver = resendEmail({
apiKey: process.env.RESEND_API_KEY!,
from: "BKND <help@bknd.io>",
});
const response = await driver.send("help@bknd.io", "Test", "Test");
expect(response).toBeDefined();
expect(response.id).toBeDefined();
});
});

View File

@@ -0,0 +1,72 @@
import type { IEmailDriver } from "./index";
export type ResendEmailOptions = {
apiKey: string;
host?: string;
from?: string;
};
export type ResendEmailSendOptions = {
bcc?: string | string[];
cc?: string | string[];
reply_to?: string | string[];
scheduled_at?: string;
headers?: Record<string, string>;
attachments?: {
content: Buffer | string;
filename: string;
path: string;
content_type: string;
}[];
tags?: {
name: string;
value: string;
}[];
};
export type ResendEmailResponse = {
id: string;
};
export const resendEmail = (
config: ResendEmailOptions,
): IEmailDriver<ResendEmailResponse, ResendEmailSendOptions> => {
const host = config.host ?? "https://api.resend.com/emails";
const from = config.from ?? "Acme <onboarding@resend.dev>";
return {
send: async (
to: string,
subject: string,
body: string | { text: string; html: string },
options?: ResendEmailSendOptions,
) => {
const payload: any = {
from,
to,
subject,
};
if (typeof body === "string") {
payload.html = body;
} else {
payload.html = body.html;
payload.text = body.text;
}
const res = await fetch(host, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${config.apiKey}`,
},
body: JSON.stringify({ ...payload, ...options }),
});
if (!res.ok) {
throw new Error(await res.text());
}
return (await res.json()) as ResendEmailResponse;
},
};
};

View File

@@ -0,0 +1,93 @@
import type { IEmailDriver } from "./index";
import { AwsClient } from "aws4fetch";
export type SesEmailOptions = {
region: string;
accessKeyId: string;
secretAccessKey: string;
from: string;
};
export type SesSendOptions = {
cc?: string[];
bcc?: string[];
replyTo?: string[];
};
export type SesEmailResponse = {
MessageId: string;
status: number;
body: string;
};
export const sesEmail = (
config: SesEmailOptions,
): IEmailDriver<SesEmailResponse, SesSendOptions> => {
const endpoint = `https://email.${config.region}.amazonaws.com/v2/email/outbound-emails`;
const from = config.from;
const aws = new AwsClient({
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
service: "ses",
region: config.region,
});
return {
send: async (
to: string,
subject: string,
body: string | { text: string; html: string },
options?: SesSendOptions,
) => {
// SES v2 SendEmail JSON payload
const payload: any = {
FromEmailAddress: from,
Destination: {
ToAddresses: [to],
},
Content: {
Simple: {
Subject: { Data: subject, Charset: "UTF-8" },
Body: {},
},
},
};
if (typeof body === "string") {
payload.Content.Simple.Body.Html = { Data: body, Charset: "UTF-8" };
} else {
if (body.html) payload.Content.Simple.Body.Html = { Data: body.html, Charset: "UTF-8" };
if (body.text) payload.Content.Simple.Body.Text = { Data: body.text, Charset: "UTF-8" };
}
if (options?.cc && options.cc.length > 0) {
payload.Destination.CcAddresses = options.cc;
}
if (options?.bcc && options.bcc.length > 0) {
payload.Destination.BccAddresses = options.bcc;
}
if (options?.replyTo && options.replyTo.length > 0) {
payload.ReplyToAddresses = options.replyTo;
}
const res = await aws.fetch(endpoint, {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify(payload),
});
const text = await res.text();
if (!res.ok) {
// SES v2 returns JSON error body
let errorMsg = text;
try {
const err = JSON.parse(text);
errorMsg = err.message || err.Message || text;
} catch {}
throw new Error(`SES SendEmail failed: ${errorMsg}`);
}
// parse MessageId from JSON response
let MessageId: string = "";
try {
const data = JSON.parse(text);
MessageId = data.MessageId;
} catch {}
return { MessageId, status: res.status, body: text };
},
};
};

View File

@@ -0,0 +1,7 @@
export type { ICacheDriver } from "./cache";
export { memoryCache } from "./cache/in-memory";
export type { IEmailDriver } from "./email";
export { resendEmail } from "./email/resend";
export { sesEmail } from "./email/ses";
export { mailchannelsEmail } from "./email/mailchannels";

View File

@@ -1,6 +1,6 @@
import { type Event, type EventClass, InvalidEventReturn } from "./Event";
import { EventListener, type ListenerHandler, type ListenerMode } from "./EventListener";
import { $console } from "core";
import { $console } from "core/utils";
export type RegisterListenerConfig =
| ListenerMode

View File

@@ -26,9 +26,18 @@ export {
} from "./object/query/query";
export { Registry, type Constructor } from "./registry/Registry";
export { getFlashMessage } from "./server/flash";
export { s, jsc, describeRoute } from "./object/schema";
export {
s,
parse,
jsc,
describeRoute,
schemaToSpec,
openAPISpecs,
type ParseOptions,
InvalidSchemaError,
} from "./object/schema";
export * from "./console";
export * from "./drivers";
export * from "./events";
// compatibility

View File

@@ -1,3 +1,5 @@
import type { MaybePromise } from "core/types";
export type Matcher<T = unknown> = {
toEqual: (expected: T, failMsg?: string) => void;
toBe: (expected: T, failMsg?: string) => void;
@@ -16,6 +18,7 @@ export interface Test {
skipIf: (condition: boolean) => (label: string, fn: TestFn) => void;
}
export type TestRunner = {
describe: (label: string, asyncFn: () => MaybePromise<void>) => void;
test: Test;
mock: <T extends (...args: any[]) => any>(fn: T) => T | any;
expect: <T = unknown>(
@@ -25,6 +28,9 @@ export type TestRunner = {
resolves: Matcher<Awaited<T>>;
rejects: Matcher<Awaited<T>>;
};
beforeEach: (fn: () => MaybePromise<void>) => void;
afterEach: (fn: () => MaybePromise<void>) => void;
afterAll: (fn: () => MaybePromise<void>) => void;
};
export async function retry<T>(

View File

@@ -0,0 +1,12 @@
import { createApp as createAppInternal, type CreateAppConfig } from "App";
import { bunSqlite } from "adapter/bun/connection/BunSqliteConnection";
import { Connection } from "data/connection/Connection";
export { App } from "App";
export function createApp({ connection, ...config }: CreateAppConfig = {}) {
return createAppInternal({
...config,
connection: Connection.isConnection(connection) ? connection : bunSqlite(connection as any),
});
}

View File

@@ -2,3 +2,5 @@ export interface Serializable<Class, Json extends object = object> {
toJSON(): Json;
fromJSON(json: Json): Class;
}
export type MaybePromise<T> = T | Promise<T>;

View File

@@ -2,7 +2,7 @@ import { extension, guess, isMimeType } from "media/storage/mime-types-tiny";
import { randomString } from "core/utils/strings";
import type { Context } from "hono";
import { invariant } from "core/utils/runtime";
import { $console } from "../console";
import { $console } from "./console";
export function getContentName(request: Request): string | undefined;
export function getContentName(contentDisposition: string): string | undefined;

View File

@@ -1,3 +1,4 @@
export * from "./console";
export * from "./browser";
export * from "./objects";
export * from "./strings";

View File

@@ -48,6 +48,14 @@ export function isNode() {
}
}
export function isBun() {
try {
return typeof Bun !== "undefined";
} catch (e) {
return false;
}
}
export function invariant(condition: boolean | any, message: string) {
if (!condition) {
throw new Error(message);

View File

@@ -1,4 +1,4 @@
import { $console } from "core";
import { $console } from "./console";
type ConsoleSeverity = "log" | "warn" | "error";
const _oldConsoles = {
@@ -36,14 +36,14 @@ export function disableConsoleLog(severities: ConsoleSeverity[] = ["log", "warn"
severities.forEach((severity) => {
console[severity] = () => null;
});
$console.setLevel("critical");
$console?.setLevel("critical");
}
export function enableConsoleLog() {
Object.entries(_oldConsoles).forEach(([severity, fn]) => {
console[severity as ConsoleSeverity] = fn;
});
$console.resetLevel();
$console?.resetLevel();
}
export function formatMemoryUsage() {

View File

@@ -1,5 +1,5 @@
import type { DB } from "core";
import type { EntityData, RepoQueryIn, RepositoryResponse } from "data";
import type { EntityData, RepoQueryIn, RepositoryResultJSON } from "data";
import type { Insertable, Selectable, Updateable } from "kysely";
import { type BaseModuleApiOptions, ModuleApi, type PrimaryFieldType } from "modules";
import type { FetchPromise, ResponseObject } from "modules/ModuleApi";
@@ -32,10 +32,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
query: Omit<RepoQueryIn, "where" | "limit" | "offset"> = {},
) {
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.get<Pick<RepositoryResponse<Data>, "meta" | "data">>(
["entity", entity as any, id],
query,
);
return this.get<RepositoryResultJSON<Data>>(["entity", entity as any, id], query);
}
readOneBy<E extends keyof DB | string>(
@@ -43,7 +40,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
query: Omit<RepoQueryIn, "limit" | "offset" | "sort"> = {},
) {
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
type T = Pick<RepositoryResponse<Data>, "meta" | "data">;
type T = RepositoryResultJSON<Data>;
return this.readMany(entity, {
...query,
limit: 1,
@@ -53,7 +50,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
readMany<E extends keyof DB | string>(entity: E, query: RepoQueryIn = {}) {
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
type T = Pick<RepositoryResponse<Data[]>, "meta" | "data">;
type T = RepositoryResultJSON<Data[]>;
const input = query ?? this.options.defaultQuery;
const req = this.get<T>(["entity", entity as any], input);
@@ -72,7 +69,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
query: RepoQueryIn = {},
) {
type Data = R extends keyof DB ? Selectable<DB[R]> : EntityData;
return this.get<Pick<RepositoryResponse<Data[]>, "meta" | "data">>(
return this.get<RepositoryResultJSON<Data[]>>(
["entity", entity as any, id, reference],
query ?? this.options.defaultQuery,
);
@@ -83,7 +80,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
input: Insertable<Input>,
) {
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.post<RepositoryResponse<Data>>(["entity", entity as any], input);
return this.post<RepositoryResultJSON<Data>>(["entity", entity as any], input);
}
createMany<E extends keyof DB | string, Input = E extends keyof DB ? DB[E] : EntityData>(
@@ -94,7 +91,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
throw new Error("input is required");
}
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.post<RepositoryResponse<Data[]>>(["entity", entity as any], input);
return this.post<RepositoryResultJSON<Data[]>>(["entity", entity as any], input);
}
updateOne<E extends keyof DB | string, Input = E extends keyof DB ? DB[E] : EntityData>(
@@ -104,7 +101,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
) {
if (!id) throw new Error("ID is required");
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.patch<RepositoryResponse<Data>>(["entity", entity as any, id], input);
return this.patch<RepositoryResultJSON<Data>>(["entity", entity as any, id], input);
}
updateMany<E extends keyof DB | string, Input = E extends keyof DB ? DB[E] : EntityData>(
@@ -114,7 +111,7 @@ export class DataApi extends ModuleApi<DataApiOptions> {
) {
this.requireObjectSet(where);
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.patch<RepositoryResponse<Data[]>>(["entity", entity as any], {
return this.patch<RepositoryResultJSON<Data[]>>(["entity", entity as any], {
update,
where,
});
@@ -123,24 +120,24 @@ export class DataApi extends ModuleApi<DataApiOptions> {
deleteOne<E extends keyof DB | string>(entity: E, id: PrimaryFieldType) {
if (!id) throw new Error("ID is required");
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.delete<RepositoryResponse<Data>>(["entity", entity as any, id]);
return this.delete<RepositoryResultJSON<Data>>(["entity", entity as any, id]);
}
deleteMany<E extends keyof DB | string>(entity: E, where: RepoQueryIn["where"]) {
this.requireObjectSet(where);
type Data = E extends keyof DB ? Selectable<DB[E]> : EntityData;
return this.delete<RepositoryResponse<Data>>(["entity", entity as any], where);
return this.delete<RepositoryResultJSON<Data>>(["entity", entity as any], where);
}
count<E extends keyof DB | string>(entity: E, where: RepoQueryIn["where"] = {}) {
return this.post<RepositoryResponse<{ entity: E; count: number }>>(
return this.post<RepositoryResultJSON<{ entity: E; count: number }>>(
["entity", entity as any, "fn", "count"],
where,
);
}
exists<E extends keyof DB | string>(entity: E, where: RepoQueryIn["where"] = {}) {
return this.post<RepositoryResponse<{ entity: E; exists: boolean }>>(
return this.post<RepositoryResultJSON<{ entity: E; exists: boolean }>>(
["entity", entity as any, "fn", "exists"],
where,
);

View File

@@ -1,11 +1,8 @@
import { $console, isDebug } from "core";
import {
DataPermissions,
type EntityData,
type EntityManager,
type MutatorResponse,
type RepoQuery,
type RepositoryResponse,
repoQuery,
} from "data";
import type { Handler } from "hono/types";
@@ -32,33 +29,6 @@ export class DataController extends Controller {
return this.ctx.guard;
}
repoResult<T extends RepositoryResponse<any> = RepositoryResponse>(
res: T,
): Pick<T, "meta" | "data"> {
let meta: Partial<RepositoryResponse["meta"]> = {};
if ("meta" in res) {
const { query, ...rest } = res.meta;
meta = rest;
if (isDebug()) meta.query = query;
}
const template = { data: res.data, meta };
// @todo: this works but it breaks in FE (need to improve DataTable)
// filter empty
return Object.fromEntries(
Object.entries(template).filter(([_, v]) => typeof v !== "undefined" && v !== null),
) as any;
}
mutatorResult(res: MutatorResponse | MutatorResponse<EntityData>) {
const template = { data: res.data };
// filter empty
return Object.fromEntries(Object.entries(template).filter(([_, v]) => v !== undefined));
}
entityExists(entity: string) {
try {
return !!this.em.entity(entity);
@@ -225,7 +195,7 @@ export class DataController extends Controller {
},
);
return hono.all("*", (c) => c.notFound());
return hono;
}
private getEntityRoutes() {
@@ -257,7 +227,7 @@ export class DataController extends Controller {
const where = c.req.valid("json") as any;
const result = await this.em.repository(entity).count(where);
return c.json({ entity, count: result.count });
return c.json({ entity, ...result.data });
},
);
@@ -279,7 +249,7 @@ export class DataController extends Controller {
const where = c.req.valid("json") as any;
const result = await this.em.repository(entity).exists(where);
return c.json({ entity, exists: result.exists });
return c.json({ entity, ...result.data });
},
);
@@ -318,7 +288,7 @@ export class DataController extends Controller {
const options = c.req.valid("query") as RepoQuery;
const result = await this.em.repository(entity).findMany(options);
return c.json(this.repoResult(result), { status: result.data ? 200 : 404 });
return c.json(result, { status: result.data ? 200 : 404 });
},
);
@@ -347,7 +317,7 @@ export class DataController extends Controller {
const options = c.req.valid("query") as RepoQuery;
const result = await this.em.repository(entity).findId(id, options);
return c.json(this.repoResult(result), { status: result.data ? 200 : 404 });
return c.json(result, { status: result.data ? 200 : 404 });
},
);
@@ -380,7 +350,7 @@ export class DataController extends Controller {
.repository(entity)
.findManyByReference(id, reference, options);
return c.json(this.repoResult(result), { status: result.data ? 200 : 404 });
return c.json(result, { status: result.data ? 200 : 404 });
},
);
@@ -414,7 +384,7 @@ export class DataController extends Controller {
const options = (await c.req.json()) as RepoQuery;
const result = await this.em.repository(entity).findMany(options);
return c.json(this.repoResult(result), { status: result.data ? 200 : 404 });
return c.json(result, { status: result.data ? 200 : 404 });
},
);
@@ -440,11 +410,11 @@ export class DataController extends Controller {
if (Array.isArray(body)) {
const result = await this.em.mutator(entity).insertMany(body);
return c.json(this.mutatorResult(result), 201);
return c.json(result, 201);
}
const result = await this.em.mutator(entity).insertOne(body);
return c.json(this.mutatorResult(result), 201);
return c.json(result, 201);
},
);
@@ -475,7 +445,7 @@ export class DataController extends Controller {
};
const result = await this.em.mutator(entity).updateWhere(update, where);
return c.json(this.mutatorResult(result));
return c.json(result);
},
);
@@ -497,7 +467,7 @@ export class DataController extends Controller {
const body = (await c.req.json()) as EntityData;
const result = await this.em.mutator(entity).updateOne(id, body);
return c.json(this.mutatorResult(result));
return c.json(result);
},
);
@@ -517,7 +487,7 @@ export class DataController extends Controller {
}
const result = await this.em.mutator(entity).deleteOne(id);
return c.json(this.mutatorResult(result));
return c.json(result);
},
);
@@ -539,7 +509,7 @@ export class DataController extends Controller {
const where = (await c.req.json()) as RepoQuery["where"];
const result = await this.em.mutator(entity).deleteWhere(where);
return c.json(this.mutatorResult(result));
return c.json(result);
},
);

View File

@@ -2,12 +2,15 @@ import {
type AliasableExpression,
type ColumnBuilderCallback,
type ColumnDataType,
type Compilable,
type CompiledQuery,
type DatabaseIntrospector,
type Dialect,
type Expression,
type Kysely,
type KyselyPlugin,
type OnModifyForeignAction,
type QueryResult,
type RawBuilder,
type SelectQueryBuilder,
type SelectQueryNode,
@@ -15,7 +18,9 @@ import {
sql,
} from "kysely";
import type { BaseIntrospector, BaseIntrospectorConfig } from "./BaseIntrospector";
import type { Constructor } from "core";
import type { Constructor, DB } from "core";
import { KyselyPluginRunner } from "data/plugins/KyselyPluginRunner";
import type { Field } from "data/fields/Field";
export type QB = SelectQueryBuilder<any, any, any>;
@@ -75,22 +80,44 @@ export type DbFunctions = {
>;
};
export type ConnQuery = CompiledQuery | Compilable;
export type ConnQueryResult<T extends ConnQuery> = T extends CompiledQuery<infer R>
? QueryResult<R>
: T extends Compilable<infer R>
? QueryResult<R>
: never;
export type ConnQueryResults<T extends ConnQuery[]> = {
[K in keyof T]: ConnQueryResult<T[K]>;
};
const CONN_SYMBOL = Symbol.for("bknd:connection");
export abstract class Connection<DB = any> {
export type Features = {
batching: boolean;
softscans: boolean;
};
export abstract class Connection<Client = unknown> {
abstract name: string;
protected initialized = false;
kysely: Kysely<DB>;
protected readonly supported = {
protected pluginRunner: KyselyPluginRunner;
protected readonly supported: Partial<Features> = {
batching: false,
softscans: true,
};
kysely: Kysely<DB>;
client!: Client;
constructor(
kysely: Kysely<DB>,
kysely: Kysely<any>,
public fn: Partial<DbFunctions> = {},
protected plugins: KyselyPlugin[] = [],
) {
this.kysely = kysely;
this[CONN_SYMBOL] = true;
this.pluginRunner = new KyselyPluginRunner(plugins);
}
// @todo: consider moving constructor logic here, required by sqlocal
@@ -121,30 +148,46 @@ export abstract class Connection<DB = any> {
return res.rows.length > 0;
}
protected async batch<Queries extends QB[]>(
queries: [...Queries],
): Promise<{
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> {
throw new Error("Batching not supported");
protected async transformResultRows(result: any[]): Promise<any[]> {
return await this.pluginRunner.transformResultRows(result);
}
async batchQuery<Queries extends QB[]>(
queries: [...Queries],
): Promise<{
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> {
// bypass if no client support
if (!this.supports("batching")) {
const data: any = [];
for (const q of queries) {
const result = await q.execute();
data.push(result);
}
return data;
}
/**
* Execute a query and return the result including all metadata
* returned from the dialect.
*/
async executeQueries<O extends ConnQuery[]>(...qbs: O): Promise<ConnQueryResults<O>> {
return Promise.all(qbs.map(async (qb) => await this.kysely.executeQuery(qb))) as any;
}
return await this.batch(queries);
async executeQuery<O extends ConnQuery>(qb: O): Promise<ConnQueryResult<O>> {
const res = await this.executeQueries(qb);
return res[0] as any;
}
protected getCompiled(...qbs: ConnQuery[]): CompiledQuery[] {
return qbs.map((qb) => {
if ("compile" in qb) {
return qb.compile();
}
return qb;
});
}
protected async withTransformedRows<
Key extends string = "rows",
O extends { [K in Key]: any[] }[] = [],
>(result: O, _key?: Key): Promise<O> {
return (await Promise.all(
result.map(async (row) => {
const key = _key ?? "rows";
const { [key]: rows, ...r } = row;
return {
...r,
rows: await this.transformResultRows(rows),
};
}),
)) as any;
}
protected validateFieldSpecType(type: string): type is FieldSpec["type"] {
@@ -158,6 +201,14 @@ export abstract class Connection<DB = any> {
abstract getFieldSchema(spec: FieldSpec, strict?: boolean): SchemaResponse;
toDriver(value: unknown, field: Field): unknown {
return value;
}
fromDriver(value: any, field: Field): unknown {
return value;
}
async close(): Promise<void> {
// no-op by default
}

View File

@@ -1,6 +1,8 @@
import { Connection, type FieldSpec, type SchemaResponse } from "./Connection";
export class DummyConnection extends Connection {
override name: string = "dummy";
protected override readonly supported = {
batching: true,
};

View File

@@ -0,0 +1,353 @@
import type { TestRunner } from "core/test";
import { Connection, type FieldSpec } from "./Connection";
import { getPath } from "core/utils";
import * as proto from "data/prototype";
import { createApp } from "App";
import type { MaybePromise } from "core/types";
// @todo: add various datatypes: string, number, boolean, object, array, null, undefined, date, etc.
// @todo: add toDriver/fromDriver tests on all types and fields
export function connectionTestSuite(
testRunner: TestRunner,
{
makeConnection,
rawDialectDetails,
}: {
makeConnection: () => MaybePromise<{
connection: Connection;
dispose: () => MaybePromise<void>;
}>;
rawDialectDetails: string[];
},
) {
const { test, expect, describe, beforeEach, afterEach, afterAll } = testRunner;
describe("base", () => {
let ctx: Awaited<ReturnType<typeof makeConnection>>;
beforeEach(async () => {
ctx = await makeConnection();
});
afterEach(async () => {
await ctx.dispose();
});
test("pings", async () => {
const res = await ctx.connection.ping();
expect(res).toBe(true);
});
test("initializes", async () => {
await ctx.connection.init();
// @ts-expect-error
expect(ctx.connection.initialized).toBe(true);
expect(ctx.connection.client).toBeDefined();
});
test("isConnection", async () => {
expect(Connection.isConnection(ctx.connection)).toBe(true);
});
test("getFieldSchema", async () => {
const specToNode = (spec: FieldSpec) => {
const schema = ctx.connection.kysely.schema
.createTable("test")
// @ts-expect-error
.addColumn(...ctx.connection.getFieldSchema(spec));
return schema.toOperationNode();
};
{
// primary
const node = specToNode({
type: "integer",
name: "id",
primary: true,
});
const col = node.columns[0]!;
expect(col.primaryKey).toBe(true);
expect(col.notNull).toBe(true);
}
{
// normal
const node = specToNode({
type: "text",
name: "text",
});
const col = node.columns[0]!;
expect(!col.primaryKey).toBe(true);
expect(!col.notNull).toBe(true);
}
{
// nullable (expect to be same as normal)
const node = specToNode({
type: "text",
name: "text",
nullable: true,
});
const col = node.columns[0]!;
expect(!col.primaryKey).toBe(true);
expect(!col.notNull).toBe(true);
}
});
});
describe("schema", async () => {
const { connection, dispose } = await makeConnection();
afterAll(async () => {
await dispose();
});
const fields = [
{
type: "integer",
name: "id",
primary: true,
},
{
type: "text",
name: "text",
},
{
type: "json",
name: "json",
},
] as const satisfies FieldSpec[];
let b = connection.kysely.schema.createTable("test");
for (const field of fields) {
// @ts-expect-error
b = b.addColumn(...connection.getFieldSchema(field));
}
await b.execute();
// add index
await connection.kysely.schema.createIndex("test_index").on("test").columns(["id"]).execute();
test("executes query", async () => {
await connection.kysely
.insertInto("test")
.values({ id: 1, text: "test", json: JSON.stringify({ a: 1 }) })
.execute();
const expected = { id: 1, text: "test", json: { a: 1 } };
const qb = connection.kysely.selectFrom("test").selectAll();
const res = await connection.executeQuery(qb);
expect(res.rows).toEqual([expected]);
expect(rawDialectDetails.every((detail) => getPath(res, detail) !== undefined)).toBe(true);
{
const res = await connection.executeQueries(qb, qb);
expect(res.length).toBe(2);
res.map((r) => {
expect(r.rows).toEqual([expected]);
expect(rawDialectDetails.every((detail) => getPath(r, detail) !== undefined)).toBe(
true,
);
});
}
});
test("introspects", async () => {
const tables = await connection.getIntrospector().getTables({
withInternalKyselyTables: false,
});
const clean = tables.map((t) => ({
...t,
columns: t.columns.map((c) => ({
...c,
dataType: undefined,
})),
}));
expect(clean).toEqual([
{
name: "test",
isView: false,
columns: [
{
name: "id",
dataType: undefined,
isNullable: false,
isAutoIncrementing: true,
hasDefaultValue: false,
},
{
name: "text",
dataType: undefined,
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
},
{
name: "json",
dataType: undefined,
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
},
],
},
]);
});
expect(await connection.getIntrospector().getIndices()).toEqual([
{
name: "test_index",
table: "test",
isUnique: false,
columns: [
{
name: "id",
order: 0,
},
],
},
]);
});
describe("integration", async () => {
let ctx: Awaited<ReturnType<typeof makeConnection>>;
beforeEach(async () => {
ctx = await makeConnection();
});
afterEach(async () => {
await ctx.dispose();
});
test("should create app and ping", async () => {
const app = createApp({
connection: ctx.connection,
});
await app.build();
expect(app.version()).toBeDefined();
expect(await app.em.ping()).toBe(true);
});
test("should create a basic schema", async () => {
const schema = proto.em(
{
posts: proto.entity("posts", {
title: proto.text().required(),
content: proto.text(),
}),
comments: proto.entity("comments", {
content: proto.text(),
}),
},
(fns, s) => {
fns.relation(s.comments).manyToOne(s.posts);
fns.index(s.posts).on(["title"], true);
},
);
const app = createApp({
connection: ctx.connection,
initialConfig: {
data: schema.toJSON(),
},
});
await app.build();
expect(app.em.entities.length).toBe(2);
expect(app.em.entities.map((e) => e.name)).toEqual(["posts", "comments"]);
const api = app.getApi();
expect(
(
await api.data.createMany("posts", [
{
title: "Hello",
content: "World",
},
{
title: "Hello 2",
content: "World 2",
},
])
).data,
).toEqual([
{
id: 1,
title: "Hello",
content: "World",
},
{
id: 2,
title: "Hello 2",
content: "World 2",
},
] as any);
// try to create an existing
expect(
(
await api.data.createOne("posts", {
title: "Hello",
})
).ok,
).toBe(false);
// add a comment to a post
await api.data.createOne("comments", {
content: "Hello",
posts_id: 1,
});
// and then query using a `with` property
const result = await api.data.readMany("posts", { with: ["comments"] });
expect(result.length).toBe(2);
expect(result[0]?.comments?.length).toBe(1);
expect(result[0]?.comments?.[0]?.content).toBe("Hello");
expect(result[1]?.comments?.length).toBe(0);
});
test("should support uuid", async () => {
const schema = proto.em(
{
posts: proto.entity(
"posts",
{
title: proto.text().required(),
content: proto.text(),
},
{
primary_format: "uuid",
},
),
comments: proto.entity("comments", {
content: proto.text(),
}),
},
(fns, s) => {
fns.relation(s.comments).manyToOne(s.posts);
fns.index(s.posts).on(["title"], true);
},
);
const app = createApp({
connection: ctx.connection,
initialConfig: {
data: schema.toJSON(),
},
});
await app.build();
const config = app.toJSON();
// @ts-expect-error
expect(config.data.entities?.posts.fields?.id.config?.format).toBe("uuid");
const em = app.em;
const mutator = em.mutator(em.entity("posts"));
const data = await mutator.insertOne({ title: "Hello", content: "World" });
expect(data.data.id).toBeString();
expect(String(data.data.id).length).toBe(36);
});
});
}

View File

@@ -5,11 +5,12 @@ export {
type IndexSpec,
type DbFunctions,
type SchemaResponse,
type ConnQuery,
type ConnQueryResults,
customIntrospector,
} from "./Connection";
// sqlite
export { LibsqlConnection, type LibSqlCredentials } from "./sqlite/LibsqlConnection";
export { SqliteConnection } from "./sqlite/SqliteConnection";
export { SqliteIntrospector } from "./sqlite/SqliteIntrospector";
export { SqliteLocalConnection } from "./sqlite/SqliteLocalConnection";

View File

@@ -0,0 +1,96 @@
import type { KyselyPlugin, QueryResult } from "kysely";
import {
type IGenericSqlite,
type OnCreateConnection,
type Promisable,
parseBigInt,
buildQueryFn,
GenericSqliteDialect,
} from "kysely-generic-sqlite";
import { SqliteConnection } from "./SqliteConnection";
import type { ConnQuery, ConnQueryResults, Features } from "../Connection";
export type { IGenericSqlite };
export type TStatement = { sql: string; parameters?: any[] | readonly any[] };
export interface IGenericCustomSqlite<DB = unknown> extends IGenericSqlite<DB> {
batch?: (stmts: TStatement[]) => Promisable<QueryResult<any>[]>;
}
export type GenericSqliteConnectionConfig = {
name?: string;
additionalPlugins?: KyselyPlugin[];
excludeTables?: string[];
onCreateConnection?: OnCreateConnection;
supports?: Partial<Features>;
};
export class GenericSqliteConnection<DB = unknown> extends SqliteConnection<DB> {
override name = "generic-sqlite";
#executor: IGenericCustomSqlite<DB> | undefined;
constructor(
public db: DB,
private executor: () => Promisable<IGenericCustomSqlite<DB>>,
config?: GenericSqliteConnectionConfig,
) {
super({
dialect: GenericSqliteDialect,
dialectArgs: [executor, config?.onCreateConnection],
additionalPlugins: config?.additionalPlugins,
excludeTables: config?.excludeTables,
});
this.client = db;
if (config?.name) {
this.name = config.name;
}
if (config?.supports) {
for (const [key, value] of Object.entries(config.supports)) {
if (value !== undefined) {
this.supported[key] = value;
}
}
}
}
private async getExecutor() {
if (!this.#executor) {
this.#executor = await this.executor();
}
return this.#executor;
}
override async executeQueries<O extends ConnQuery[]>(...qbs: O): Promise<ConnQueryResults<O>> {
const executor = await this.getExecutor();
if (!executor.batch) {
//$console.debug("Batching is not supported by this database");
return super.executeQueries(...qbs);
}
const compiled = this.getCompiled(...qbs);
const stms: TStatement[] = compiled.map((q) => {
return {
sql: q.sql,
parameters: q.parameters as any[],
};
});
const results = await executor.batch(stms);
return this.withTransformedRows(results) as any;
}
}
export function genericSqlite<DB>(
name: string,
db: DB,
executor: (utils: typeof genericSqliteUtils) => Promisable<IGenericCustomSqlite<DB>>,
config?: GenericSqliteConnectionConfig,
) {
return new GenericSqliteConnection(db, () => executor(genericSqliteUtils), {
name,
...config,
});
}
export const genericSqliteUtils = {
parseBigInt,
buildQueryFn,
};

View File

@@ -1,92 +0,0 @@
import { type Client, type Config, type InStatement, createClient } from "@libsql/client";
import { LibsqlDialect } from "@libsql/kysely-libsql";
import { FilterNumericKeysPlugin } from "data/plugins/FilterNumericKeysPlugin";
import { KyselyPluginRunner } from "data/plugins/KyselyPluginRunner";
import { type DatabaseIntrospector, Kysely, ParseJSONResultsPlugin } from "kysely";
import type { QB } from "../Connection";
import { SqliteConnection } from "./SqliteConnection";
import { SqliteIntrospector } from "./SqliteIntrospector";
import { $console } from "core";
export const LIBSQL_PROTOCOLS = ["wss", "https", "libsql"] as const;
export type LibSqlCredentials = Config & {
protocol?: (typeof LIBSQL_PROTOCOLS)[number];
};
const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()];
class CustomLibsqlDialect extends LibsqlDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db, {
excludeTables: ["libsql_wasm_func_table"],
plugins,
});
}
}
export class LibsqlConnection extends SqliteConnection {
private client: Client;
protected override readonly supported = {
batching: true,
};
constructor(client: Client);
constructor(credentials: LibSqlCredentials);
constructor(clientOrCredentials: Client | LibSqlCredentials) {
let client: Client;
let batching_enabled = true;
if (clientOrCredentials && "url" in clientOrCredentials) {
let { url, authToken, protocol } = clientOrCredentials;
if (protocol && LIBSQL_PROTOCOLS.includes(protocol)) {
$console.log("changing protocol to", protocol);
const [, rest] = url.split("://");
url = `${protocol}://${rest}`;
}
client = createClient({ url, authToken });
} else {
client = clientOrCredentials;
}
const kysely = new Kysely({
// @ts-expect-error libsql has type issues
dialect: new CustomLibsqlDialect({ client }),
plugins,
});
super(kysely, {}, plugins);
this.client = client;
this.supported.batching = batching_enabled;
}
getClient(): Client {
return this.client;
}
protected override async batch<Queries extends QB[]>(
queries: [...Queries],
): Promise<{
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> {
const stms: InStatement[] = queries.map((q) => {
const compiled = q.compile();
return {
sql: compiled.sql,
args: compiled.parameters as any[],
};
});
const res = await this.client.batch(stms);
// let it run through plugins
const kyselyPlugins = new KyselyPluginRunner(this.plugins);
const data: any = [];
for (const r of res) {
const rows = await kyselyPlugins.transformResultRows(r.rows);
data.push(rows);
}
return data;
}
}

Some files were not shown because too many files have changed in this diff Show More