Merge branch 'main' into cp/216-fix-users-link

This commit is contained in:
Cameron Pak
2026-02-28 11:03:54 -06:00
260 changed files with 8084 additions and 3850 deletions

View File

@@ -9,6 +9,21 @@ jobs:
test:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:17
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
POSTGRES_DB: bknd
ports:
- 5430:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v4
@@ -20,11 +35,11 @@ jobs:
- name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: "1.2.22"
bun-version: "1.3.3"
- name: Install dependencies
working-directory: ./app
run: bun install
run: bun install #--linker=hoisted
- name: Build
working-directory: ./app

3
.gitignore vendored
View File

@@ -27,7 +27,8 @@ packages/media/.env
.npmrc
/.verdaccio
.idea
.vscode
.vscode/*
!.vscode/settings.json
.git_old
docker/tmp
.debug

19
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,19 @@
{
"typescript.tsdk": "node_modules/typescript/lib",
"biome.enabled": true,
"editor.defaultFormatter": "biomejs.biome",
"editor.codeActionsOnSave": {
//"source.organizeImports.biome": "explicit",
"source.fixAll.biome": "explicit"
},
"typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.autoImportFileExcludePatterns": [
"**/dist/**",
"**/node_modules/**/dist/**",
"**/node_modules/**/!(src|lib|esm)/**" // optional, stricter
],
"typescript.preferences.includePackageJsonAutoImports": "on",
"typescript.tsserver.watchOptions": {
"excludeDirectories": ["**/dist", "**/node_modules/**/dist"]
}
}

View File

@@ -1,110 +1,202 @@
# Functional Source License, Version 1.1, MIT Future License
## Abbreviation
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
FSL-1.1-MIT
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
## Notice
1. Definitions.
Copyright 2025 Dennis Senn
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
## Terms and Conditions
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
### Licensor ("We")
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
The party offering the Software under these Terms and Conditions.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
### The Software
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
The "Software" is each version of the software that we make available under
these Terms and Conditions, as indicated by our inclusion of these Terms and
Conditions with the Software.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
### License Grant
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
Subject to your compliance with this License Grant and the Patents,
Redistribution and Trademark clauses below, we hereby grant you the right to
use, copy, modify, create derivative works, publicly perform, publicly display
and redistribute the Software for any Permitted Purpose identified below.
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
### Permitted Purpose
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
A Permitted Purpose is any purpose other than a Competing Use. A Competing Use
means making the Software available to others in a commercial product or
service that:
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
1. substitutes for the Software;
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
2. substitutes for any other product or service we offer using the Software
that exists as of the date we make the Software available; or
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
3. offers the same or substantially similar functionality as the Software.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
Permitted Purposes specifically include using the Software:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
1. for your internal use and access;
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
2. for non-commercial education;
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
3. for non-commercial research; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
4. in connection with professional services that you provide to a licensee
using the Software in accordance with these Terms and Conditions.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
### Patents
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
To the extent your use for a Permitted Purpose would necessarily infringe our
patents, the license grant above includes a license under our patents. If you
make a claim against any party that the Software infringes or contributes to
the infringement of any patent, then your patent license to the Software ends
immediately.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
### Redistribution
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
The Terms and Conditions apply to all copies, modifications and derivatives of
the Software.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
If you redistribute any copies, modifications or derivatives of the Software,
you must include a copy of or a link to these Terms and Conditions and not
remove any copyright notices provided in or with the Software.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
### Disclaimer
END OF TERMS AND CONDITIONS
THE SOFTWARE IS PROVIDED "AS IS" AND WITHOUT WARRANTIES OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING WITHOUT LIMITATION WARRANTIES OF FITNESS FOR A PARTICULAR
PURPOSE, MERCHANTABILITY, TITLE OR NON-INFRINGEMENT.
APPENDIX: How to apply the Apache License to your work.
IN NO EVENT WILL WE HAVE ANY LIABILITY TO YOU ARISING OUT OF OR RELATED TO THE
SOFTWARE, INCLUDING INDIRECT, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES,
EVEN IF WE HAVE BEEN INFORMED OF THEIR POSSIBILITY IN ADVANCE.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
### Trademarks
Copyright 2026 Dennis Senn
Except for displaying the License Details and identifying us as the origin of
the Software, you have no right under these Terms and Conditions to use our
trademarks, trade names, service marks or product names.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
## Grant of Future License
http://www.apache.org/licenses/LICENSE-2.0
We hereby irrevocably grant you an additional license to use the Software under
the MIT license that is effective on the second anniversary of the date we make
the Software available. On or after that date, you may use the Software under
the MIT license, in which case the following will apply:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -17,7 +17,7 @@ It's designed to avoid vendor lock-in and architectural limitations. Built exclu
* SQLite: LibSQL, Node SQLite, Bun SQLite, Cloudflare D1, Cloudflare Durable Objects SQLite, SQLocal
* Postgres: Vanilla Postgres, Supabase, Neon, Xata
* **Frameworks**: React, Next.js, React Router, Astro, Vite, Waku
* **Storage**: AWS S3, S3-compatible (Tigris, R2, Minio, etc.), Cloudflare R2 (binding), Cloudinary, Filesystem
* **Storage**: AWS S3, S3-compatible (Tigris, R2, Minio, etc.), Cloudflare R2 (binding), Cloudinary, Filesystem, Origin Private File System (OPFS)
* **Deployment**: Standalone, Docker, Cloudflare Workers, Vercel, Netlify, Deno Deploy, AWS Lambda, Valtown etc.
**For documentation and examples, please visit https://docs.bknd.io.**

View File

@@ -20,6 +20,7 @@ VITE_SHOW_ROUTES=
# ===== Test Credentials =====
RESEND_API_KEY=
PLUNK_API_KEY=
R2_TOKEN=
R2_ACCESS_KEY=

12
app/.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,12 @@
{
"typescript.preferences.includePackageJsonAutoImports": "off",
"typescript.suggest.autoImports": true,
"typescript.preferences.importModuleSpecifier": "relative",
"search.exclude": {
"**/dist/**": true,
"**/node_modules/**": true
},
"files.exclude": {
"**/dist/**": true
}
}

View File

@@ -108,7 +108,7 @@ describe("App tests", async () => {
expect(Array.from(app.plugins.keys())).toEqual(["test"]);
});
test.only("drivers", async () => {
test("drivers", async () => {
const called: string[] = [];
const app = new App(dummyConnection, undefined, {
drivers: {

View File

@@ -1,66 +1,76 @@
import { expect, describe, it, beforeAll, afterAll, mock } from "bun:test";
import * as adapter from "adapter";
import { disableConsoleLog, enableConsoleLog } from "core/utils";
import { disableConsoleLog, enableConsoleLog, omitKeys } from "core/utils";
import { adapterTestSuite } from "adapter/adapter-test-suite";
import { bunTestRunner } from "adapter/bun/test";
import { omitKeys } from "core/utils";
const stripConnection = <T extends Record<string, any>>(cfg: T) =>
omitKeys(cfg, ["connection"]);
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("adapter", () => {
it("makes config", async () => {
expect(omitKeys(await adapter.makeConfig({}), ["connection"])).toEqual({});
expect(
omitKeys(await adapter.makeConfig({}, { env: { TEST: "test" } }), ["connection"]),
).toEqual({});
describe("makeConfig", () => {
it("returns empty config for empty inputs", async () => {
const cases: Array<Parameters<typeof adapter.makeConfig>> = [
[{}],
[{}, { env: { TEST: "test" } }],
];
// merges everything returned from `app` with the config
expect(
omitKeys(
await adapter.makeConfig(
{ app: (a) => ({ config: { server: { cors: { origin: a.env.TEST } } } }) },
{ env: { TEST: "test" } },
),
["connection"],
),
).toEqual({
config: { server: { cors: { origin: "test" } } },
});
});
for (const args of cases) {
const cfg = await adapter.makeConfig(...(args as any));
expect(stripConnection(cfg)).toEqual({});
}
});
it("allows all properties in app function", async () => {
const called = mock(() => null);
const config = await adapter.makeConfig(
{
app: (env) => ({
connection: { url: "test" },
config: { server: { cors: { origin: "test" } } },
options: {
mode: "db",
},
onBuilt: () => {
called();
expect(env).toEqual({ foo: "bar" });
},
}),
},
{ foo: "bar" },
it("merges app output into config", async () => {
const cfg = await adapter.makeConfig(
{ app: (a) => ({ config: { server: { cors: { origin: a.env.TEST } } } }) },
{ env: { TEST: "test" } },
);
expect(config.connection).toEqual({ url: "test" });
expect(config.config).toEqual({ server: { cors: { origin: "test" } } });
expect(config.options).toEqual({ mode: "db" });
await config.onBuilt?.(null as any);
expect(called).toHaveBeenCalled();
});
adapterTestSuite(bunTestRunner, {
expect(stripConnection(cfg)).toEqual({
config: { server: { cors: { origin: "test" } } },
});
});
it("allows all properties in app() result", async () => {
const called = mock(() => null);
const cfg = await adapter.makeConfig(
{
app: (env) => ({
connection: { url: "test" },
config: { server: { cors: { origin: "test" } } },
options: { mode: "db" as const },
onBuilt: () => {
called();
expect(env).toEqual({ foo: "bar" });
},
}),
},
{ foo: "bar" },
);
expect(cfg.connection).toEqual({ url: "test" });
expect(cfg.config).toEqual({ server: { cors: { origin: "test" } } });
expect(cfg.options).toEqual({ mode: "db" });
await cfg.onBuilt?.({} as any);
expect(called).toHaveBeenCalledTimes(1);
});
});
describe("adapter test suites", () => {
adapterTestSuite(bunTestRunner, {
makeApp: adapter.createFrameworkApp,
label: "framework app",
});
});
adapterTestSuite(bunTestRunner, {
adapterTestSuite(bunTestRunner, {
makeApp: adapter.createRuntimeApp,
label: "runtime app",
});
});
});
});

View File

@@ -1,7 +1,7 @@
/// <reference types="@types/bun" />
import { describe, expect, it } from "bun:test";
import { describe, expect, it, mock } from "bun:test";
import { Hono } from "hono";
import { getFileFromContext, isFile, isReadableStream } from "core/utils";
import { getFileFromContext, isFile, isReadableStream, s, jsc } from "core/utils";
import { MediaApi } from "media/api/MediaApi";
import { assetsPath, assetsTmpPath } from "../helper";
@@ -15,7 +15,7 @@ const mockedBackend = new Hono()
.get("/file/:name", async (c) => {
const { name } = c.req.param();
const file = Bun.file(`${assetsPath}/${name}`);
return new Response(file, {
return new Response(new File([await file.bytes()], name, { type: file.type }), {
headers: {
"Content-Type": file.type,
"Content-Length": file.size.toString(),
@@ -67,7 +67,7 @@ describe("MediaApi", () => {
const res = await mockedBackend.request("/api/media/file/" + name);
await Bun.write(path, res);
const file = await Bun.file(path);
const file = Bun.file(path);
expect(file.size).toBeGreaterThan(0);
expect(file.type).toBe("image/png");
await file.delete();
@@ -98,14 +98,11 @@ describe("MediaApi", () => {
expect(isReadableStream(res.body)).toBe(true);
expect(isReadableStream(res.res.body)).toBe(true);
const blob = await res.res.blob();
// Response.blob() always returns Blob, not File - File metadata (name, lastModified) is lost
// Client code must manually construct File from Blob (see MediaApi.download() for reference)
const file = new File([blob], name, { type: blob.type });
expect(isFile(file)).toBe(true);
expect(file.size).toBeGreaterThan(0);
expect(file.type).toBe("image/png");
expect(file.name).toContain(name);
const blob = (await res.res.blob()) as File;
expect(isFile(blob)).toBe(true);
expect(blob.size).toBeGreaterThan(0);
expect(blob.type).toBe("image/png");
expect(blob.name).toContain(name);
});
it("getFileStream", async () => {
@@ -117,15 +114,11 @@ describe("MediaApi", () => {
const stream = await api.getFileStream(name);
expect(isReadableStream(stream)).toBe(true);
const blob = await new Response(stream).blob();
// Response.blob() always returns Blob, not File - File metadata (name, lastModified) is lost
// Client code must manually construct File from Blob (see MediaApi.download() for reference)
// Use originalRes.headers.get("Content-Type") to preserve MIME type from response
const file = new File([blob], name, { type: originalRes.headers.get("Content-Type") || blob.type });
expect(isFile(file)).toBe(true);
expect(file.size).toBeGreaterThan(0);
expect(file.type).toBe("image/png");
expect(file.name).toContain(name);
const blob = (await new Response(res).blob()) as File;
expect(isFile(blob)).toBe(true);
expect(blob.size).toBeGreaterThan(0);
expect(blob.type).toBe("image/png");
expect(blob.name).toContain(name);
});
it("should upload file in various ways", async () => {
@@ -162,15 +155,38 @@ describe("MediaApi", () => {
}
// upload via readable from bun
await matches(await api.upload(file.stream(), { filename: "readable.png" }), "readable.png");
await matches(api.upload(file.stream(), { filename: "readable.png" }), "readable.png");
// upload via readable from response
{
const response = (await mockedBackend.request(url)) as Response;
await matches(
await api.upload(response.body!, { filename: "readable.png" }),
"readable.png",
);
await matches(api.upload(response.body!, { filename: "readable.png" }), "readable.png");
}
});
it("should add overwrite query for entity upload", async (c) => {
const call = mock(() => null);
const hono = new Hono().post(
"/api/media/entity/:entity/:id/:field",
jsc("query", s.object({ overwrite: s.boolean().optional() })),
async (c) => {
const { overwrite } = c.req.valid("query");
expect(overwrite).toBe(true);
call();
return c.json({ ok: true });
},
);
const api = new MediaApi(
{
upload_fetcher: hono.request,
},
hono.request,
);
const file = Bun.file(`${assetsPath}/image.png`);
const res = await api.uploadToEntity("posts", 1, "cover", file as any, {
overwrite: true,
});
expect(res.ok).toBe(true);
expect(call).toHaveBeenCalled();
});
});

View File

@@ -1,8 +1,12 @@
import { describe, expect, mock, test } from "bun:test";
import { describe, expect, mock, test, beforeAll, afterAll } from "bun:test";
import { createApp as internalCreateApp, type CreateAppConfig } from "bknd";
import { getDummyConnection } from "../../__test__/helper";
import { ModuleManager } from "modules/ModuleManager";
import { em, entity, text } from "data/prototype";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
async function createApp(config: CreateAppConfig = {}) {
const app = internalCreateApp({

View File

@@ -1,7 +1,11 @@
import { AppEvents } from "App";
import { describe, test, expect, beforeAll, mock } from "bun:test";
import { describe, test, expect, beforeAll, mock, afterAll } from "bun:test";
import { type App, createApp, createMcpToolCaller } from "core/test/utils";
import type { McpServer } from "bknd/utils";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
/**
* - [x] system_config

View File

@@ -0,0 +1,42 @@
import { describe, expect, test } from "bun:test";
import { code, hybrid } from "modes";
describe("modes", () => {
describe("code", () => {
test("verify base configuration", async () => {
const c = code({}) as any;
const config = await c.app?.({} as any);
expect(Object.keys(config)).toEqual(["options"]);
expect(config.options.mode).toEqual("code");
expect(config.options.plugins).toEqual([]);
expect(config.options.manager.skipValidation).toEqual(false);
expect(config.options.manager.onModulesBuilt).toBeDefined();
});
test("keeps overrides", async () => {
const c = code({
connection: {
url: ":memory:",
},
}) as any;
const config = await c.app?.({} as any);
expect(config.connection.url).toEqual(":memory:");
});
});
describe("hybrid", () => {
test("fails if no reader is provided", () => {
// @ts-ignore
expect(hybrid({} as any).app?.({} as any)).rejects.toThrow(/reader/);
});
test("verify base configuration", async () => {
const c = hybrid({ reader: async () => ({}) }) as any;
const config = await c.app?.({} as any);
expect(Object.keys(config)).toEqual(["reader", "beforeBuild", "config", "options"]);
expect(config.options.mode).toEqual("db");
expect(config.options.plugins).toEqual([]);
expect(config.options.manager.skipValidation).toEqual(false);
expect(config.options.manager.onModulesBuilt).toBeDefined();
});
});
});

View File

@@ -76,7 +76,7 @@ describe("repros", async () => {
expect(app.em.entities.map((e) => e.name)).toEqual(["media", "test"]);
});
test.only("verify inversedBy", async () => {
test("verify inversedBy", async () => {
const schema = proto.em(
{
products: proto.entity("products", {

View File

@@ -1,8 +1,12 @@
import { describe, expect, test } from "bun:test";
import { describe, expect, test, beforeAll, afterAll } from "bun:test";
import { Guard, type GuardConfig } from "auth/authorize/Guard";
import { Permission } from "auth/authorize/Permission";
import { Role, type RoleSchema } from "auth/authorize/Role";
import { objectTransform, s } from "bknd/utils";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
function createGuard(
permissionNames: string[],

View File

@@ -7,8 +7,8 @@ import type { App, DB } from "bknd";
import type { CreateUserPayload } from "auth/AppAuth";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(() => disableConsoleLog());
afterAll(() => enableConsoleLog());
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
async function makeApp(config: Partial<CreateAppConfig["config"]> = {}) {
const app = createApp({

View File

@@ -0,0 +1,40 @@
import { describe, test, expect, beforeAll, afterAll } from "bun:test";
import { createAuthTestApp } from "./shared";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
import { em, entity, text } from "data/prototype";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
const schema = em(
{
posts: entity("posts", {
title: text(),
content: text(),
}),
comments: entity("comments", {
content: text(),
}),
},
({ relation }, { posts, comments }) => {
relation(posts).manyToOne(comments);
},
);
describe("DataController (auth)", () => {
test("reading schema.json", async () => {
const { request } = await createAuthTestApp(
{
permission: ["system.access.api", "data.entity.read", "system.schema.read"],
request: new Request("http://localhost/api/data/schema.json"),
},
{
config: { data: schema.toJSON() },
},
);
expect((await request.guest()).status).toBe(403);
expect((await request.member()).status).toBe(403);
expect((await request.authorized()).status).toBe(200);
expect((await request.admin()).status).toBe(200);
});
});

View File

@@ -1,20 +0,0 @@
import { describe, it, expect } from "bun:test";
import { SystemController } from "modules/server/SystemController";
import { createApp } from "core/test/utils";
import type { CreateAppConfig } from "App";
import { getPermissionRoutes } from "auth/middlewares/permission.middleware";
async function makeApp(config: Partial<CreateAppConfig> = {}) {
const app = createApp(config);
await app.build();
return app;
}
describe.skip("SystemController", () => {
it("...", async () => {
const app = await makeApp();
const controller = new SystemController(app);
const hono = controller.getController();
console.log(getPermissionRoutes(hono));
});
});

View File

@@ -0,0 +1,41 @@
import { describe, test, expect, beforeAll, afterAll } from "bun:test";
import { createAuthTestApp } from "./shared";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("SystemController (auth)", () => {
test("reading info", async () => {
const { request } = await createAuthTestApp({
permission: ["system.access.api", "system.info"],
request: new Request("http://localhost/api/system/info"),
});
expect((await request.guest()).status).toBe(403);
expect((await request.member()).status).toBe(403);
expect((await request.authorized()).status).toBe(200);
expect((await request.admin()).status).toBe(200);
});
test("reading permissions", async () => {
const { request } = await createAuthTestApp({
permission: ["system.access.api", "system.schema.read"],
request: new Request("http://localhost/api/system/permissions"),
});
expect((await request.guest()).status).toBe(403);
expect((await request.member()).status).toBe(403);
expect((await request.authorized()).status).toBe(200);
expect((await request.admin()).status).toBe(200);
});
test("access openapi", async () => {
const { request } = await createAuthTestApp({
permission: ["system.access.api", "system.openapi"],
request: new Request("http://localhost/api/system/openapi.json"),
});
expect((await request.guest()).status).toBe(403);
expect((await request.member()).status).toBe(403);
expect((await request.authorized()).status).toBe(200);
expect((await request.admin()).status).toBe(200);
});
});

View File

@@ -0,0 +1,171 @@
import { createApp } from "core/test/utils";
import type { CreateAppConfig } from "App";
import type { RoleSchema } from "auth/authorize/Role";
import { isPlainObject } from "core/utils";
export type AuthTestConfig = {
guest?: RoleSchema;
member?: RoleSchema;
authorized?: RoleSchema;
};
export async function createAuthTestApp(
testConfig: {
permission: AuthTestConfig | string | string[];
request: Request;
},
config: Partial<CreateAppConfig> = {},
) {
let member: RoleSchema | undefined;
let authorized: RoleSchema | undefined;
let guest: RoleSchema | undefined;
if (isPlainObject(testConfig.permission)) {
if (testConfig.permission.guest)
guest = {
...testConfig.permission.guest,
is_default: true,
};
if (testConfig.permission.member) member = testConfig.permission.member;
if (testConfig.permission.authorized) authorized = testConfig.permission.authorized;
} else {
member = {
permissions: [],
};
authorized = {
permissions: Array.isArray(testConfig.permission)
? testConfig.permission
: [testConfig.permission],
};
guest = {
permissions: [],
is_default: true,
};
}
console.log("authorized", authorized);
const app = createApp({
...config,
config: {
...config.config,
auth: {
...config.config?.auth,
enabled: true,
guard: {
enabled: true,
...config.config?.auth?.guard,
},
jwt: {
...config.config?.auth?.jwt,
secret: "secret",
},
roles: {
...config.config?.auth?.roles,
guest,
member,
authorized,
admin: {
implicit_allow: true,
},
},
},
},
});
await app.build();
const users = {
guest: null,
member: await app.createUser({
email: "member@test.com",
password: "12345678",
role: "member",
}),
authorized: await app.createUser({
email: "authorized@test.com",
password: "12345678",
role: "authorized",
}),
admin: await app.createUser({
email: "admin@test.com",
password: "12345678",
role: "admin",
}),
} as const;
const tokens = {} as Record<keyof typeof users, string>;
for (const [key, user] of Object.entries(users)) {
if (user) {
tokens[key as keyof typeof users] = await app.module.auth.authenticator.jwt(user);
}
}
async function makeRequest(user: keyof typeof users, input: string, init: RequestInit = {}) {
const headers = new Headers(init.headers ?? {});
if (user in tokens) {
headers.set("Authorization", `Bearer ${tokens[user as keyof typeof tokens]}`);
}
const res = await app.server.request(input, {
...init,
headers,
});
let data: any;
if (res.headers.get("Content-Type")?.startsWith("application/json")) {
data = await res.json();
} else if (res.headers.get("Content-Type")?.startsWith("text/")) {
data = await res.text();
}
return {
status: res.status,
ok: res.ok,
headers: Object.fromEntries(res.headers.entries()),
data,
};
}
const requestFn = new Proxy(
{},
{
get(_, prop: keyof typeof users) {
return async (input: string, init: RequestInit = {}) => {
return makeRequest(prop, input, init);
};
},
},
) as {
[K in keyof typeof users]: (
input: string,
init?: RequestInit,
) => Promise<{
status: number;
ok: boolean;
headers: Record<string, string>;
data: any;
}>;
};
const request = new Proxy(
{},
{
get(_, prop: keyof typeof users) {
return async () => {
return makeRequest(prop, testConfig.request.url, {
headers: testConfig.request.headers,
method: testConfig.request.method,
body: testConfig.request.body,
});
};
},
},
) as {
[K in keyof typeof users]: () => Promise<{
status: number;
ok: boolean;
headers: Record<string, string>;
data: any;
}>;
};
return { app, users, request, requestFn };
}

View File

@@ -0,0 +1,13 @@
import { PasswordStrategy } from "auth/authenticate/strategies/PasswordStrategy";
import { describe, expect, it } from "bun:test";
describe("PasswordStrategy", () => {
it("should enforce provided minimum length", async () => {
const strategy = new PasswordStrategy({ minLength: 8, hashing: "plain" });
expect(strategy.verify("password")({} as any)).rejects.toThrow();
expect(
strategy.verify("password1234")({ strategy_value: "password1234" } as any),
).resolves.toBeUndefined();
});
});

View File

@@ -0,0 +1,85 @@
import { describe, beforeAll, afterAll, test } from "bun:test";
import type { PostgresConnection } from "data/connection/postgres/PostgresConnection";
import { pg, postgresJs } from "bknd";
import { Pool } from "pg";
import postgres from "postgres";
import { disableConsoleLog, enableConsoleLog, $waitUntil } from "bknd/utils";
import { $ } from "bun";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { bunTestRunner } from "adapter/bun/test";
const credentials = {
host: "localhost",
port: 5430,
user: "postgres",
password: "postgres",
database: "bknd",
};
async function cleanDatabase(connection: InstanceType<typeof PostgresConnection>) {
const kysely = connection.kysely;
// drop all tables+indexes & create new schema
await kysely.schema.dropSchema("public").ifExists().cascade().execute();
await kysely.schema.dropIndex("public").ifExists().cascade().execute();
await kysely.schema.createSchema("public").execute();
}
async function isPostgresRunning() {
try {
// Try to actually connect to PostgreSQL
const conn = pg({ pool: new Pool(credentials) });
await conn.ping();
await conn.close();
return true;
} catch (e) {
return false;
}
}
describe("postgres", () => {
beforeAll(async () => {
if (!(await isPostgresRunning())) {
await $`docker run --rm --name bknd-test-postgres -d -e POSTGRES_PASSWORD=${credentials.password} -e POSTGRES_USER=${credentials.user} -e POSTGRES_DB=${credentials.database} -p ${credentials.port}:5432 postgres:17`;
await $waitUntil("Postgres is running", isPostgresRunning);
await new Promise((resolve) => setTimeout(resolve, 500));
}
disableConsoleLog();
});
afterAll(async () => {
if (await isPostgresRunning()) {
try {
await $`docker stop bknd-test-postgres`;
} catch (e) {}
}
enableConsoleLog();
});
describe.serial.each([
["pg", () => pg({ pool: new Pool(credentials) })],
["postgresjs", () => postgresJs({ postgres: postgres(credentials) })],
])("%s", (name, createConnection) => {
connectionTestSuite(
{
...bunTestRunner,
test: test.serial,
},
{
makeConnection: () => {
const connection = createConnection();
return {
connection,
dispose: async () => {
await cleanDatabase(connection);
await connection.close();
},
};
},
rawDialectDetails: [],
disableConsoleLog: false,
},
);
});
});

View File

@@ -124,6 +124,81 @@ describe("[Repository]", async () => {
.then((r) => [r.count, r.total]),
).resolves.toEqual([undefined, undefined]);
});
test("auto join", async () => {
const schema = $em(
{
posts: $entity("posts", {
title: $text(),
content: $text(),
}),
comments: $entity("comments", {
content: $text(),
}),
another: $entity("another", {
title: $text(),
}),
},
({ relation }, { posts, comments }) => {
relation(comments).manyToOne(posts);
},
);
const em = schema.proto.withConnection(getDummyConnection().dummyConnection);
await em.schema().sync({ force: true });
await em.mutator("posts").insertOne({ title: "post1", content: "content1" });
await em
.mutator("comments")
.insertMany([{ content: "comment1", posts_id: 1 }, { content: "comment2" }] as any);
const res = await em.repo("comments").findMany({
where: {
"posts.title": "post1",
},
});
expect(res.data as any).toEqual([
{
id: 1,
content: "comment1",
posts_id: 1,
},
]);
{
// manual join should still work
const res = await em.repo("comments").findMany({
join: ["posts"],
where: {
"posts.title": "post1",
},
});
expect(res.data as any).toEqual([
{
id: 1,
content: "comment1",
posts_id: 1,
},
]);
}
// inexistent should be detected and thrown
expect(
em.repo("comments").findMany({
where: {
"random.title": "post1",
},
}),
).rejects.toThrow(/Invalid where field/);
// existing alias, but not a relation should throw
expect(
em.repo("comments").findMany({
where: {
"another.title": "post1",
},
}),
).rejects.toThrow(/Invalid where field/);
});
});
describe("[data] Repository (Events)", async () => {

View File

@@ -59,7 +59,7 @@ describe("SqliteIntrospector", () => {
dataType: "INTEGER",
isNullable: false,
isAutoIncrementing: true,
hasDefaultValue: false,
hasDefaultValue: true,
comment: undefined,
},
{
@@ -89,7 +89,7 @@ describe("SqliteIntrospector", () => {
dataType: "INTEGER",
isNullable: false,
isAutoIncrementing: true,
hasDefaultValue: false,
hasDefaultValue: true,
comment: undefined,
},
{

View File

@@ -1,9 +1,15 @@
import { afterAll, beforeAll, describe, expect, it } from "bun:test";
import { App, createApp, type AuthResponse } from "../../src";
import { auth } from "../../src/modules/middlewares";
import { randomString, secureRandomString, withDisabledConsole } from "../../src/core/utils";
import {
mergeObject,
randomString,
secureRandomString,
withDisabledConsole,
} from "../../src/core/utils";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
import { getDummyConnection } from "../helper";
import type { AppAuthSchema } from "auth/auth-schema";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
@@ -62,12 +68,12 @@ const configs = {
},
};
function createAuthApp() {
function createAuthApp(config?: Partial<AppAuthSchema>) {
const { dummyConnection } = getDummyConnection();
const app = createApp({
connection: dummyConnection,
config: {
auth: configs.auth,
auth: mergeObject(configs.auth, config ?? {}),
},
});
@@ -132,6 +138,16 @@ const fns = <Mode extends "cookie" | "token" = "token">(app: App, mode?: Mode) =
return { res, data };
},
register: async (user: any): Promise<{ res: Response; data: AuthResponse }> => {
const res = (await app.server.request("/api/auth/password/register", {
method: "POST",
headers: headers(),
body: body(user),
})) as Response;
const data = mode === "cookie" ? getCookie(res, "auth") : await res.json();
return { res, data };
},
me: async (token?: string): Promise<Pick<AuthResponse, "user">> => {
const res = (await app.server.request("/api/auth/me", {
method: "GET",
@@ -245,4 +261,61 @@ describe("integration auth", () => {
expect(await $fns.me()).toEqual({ user: null as any });
}
});
it("should register users with default role", async () => {
const app = createAuthApp({ default_role_register: "guest" });
await app.build();
const $fns = fns(app);
// takes default role
expect(
await app
.createUser({
email: "test@bknd.io",
password: "12345678",
})
.then((r) => r.role),
).toBe("guest");
// throws error if role doesn't exist
expect(
app.createUser({
email: "test@bknd.io",
password: "12345678",
role: "doesnt exist",
}),
).rejects.toThrow();
// takes role if provided
expect(
await app
.createUser({
email: "test2@bknd.io",
password: "12345678",
role: "admin",
})
.then((r) => r.role),
).toBe("admin");
// registering with role is not allowed
expect(
await $fns
.register({
email: "test3@bknd.io",
password: "12345678",
role: "admin",
})
.then((r) => r.res.ok),
).toBe(false);
// takes default role
expect(
await $fns
.register({
email: "test3@bknd.io",
password: "12345678",
})
.then((r) => r.data.user.role),
).toBe("guest");
});
});

View File

@@ -8,9 +8,10 @@ import type { TAppMediaConfig } from "../../src/media/media-schema";
import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter";
import { assetsPath, assetsTmpPath } from "../helper";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
import * as proto from "data/prototype";
beforeAll(() => {
//disableConsoleLog();
disableConsoleLog();
registries.media.register("local", StorageLocalAdapter);
});
afterAll(enableConsoleLog);
@@ -128,4 +129,87 @@ describe("MediaController", () => {
expect(destFile.exists()).resolves.toBe(true);
await destFile.delete();
});
test("entity upload with max_items and overwrite", async () => {
const app = createApp({
config: {
media: mergeObject(
{
enabled: true,
adapter: {
type: "local",
config: {
path: assetsTmpPath,
},
},
},
{},
),
data: {
entities: {
posts: proto
.entity("posts", {
title: proto.text(),
cover: proto.medium(),
})
.toJSON(),
},
},
},
});
await app.build();
// create a post first
const createRes = await app.server.request("/api/data/entity/posts", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ title: "Test Post" }),
});
expect(createRes.status).toBe(201);
const { data: post } = (await createRes.json()) as any;
const file = Bun.file(path);
const uploadedFiles: string[] = [];
// upload first file to entity (should succeed)
const res1 = await app.server.request(`/api/media/entity/posts/${post.id}/cover`, {
method: "POST",
body: file,
});
expect(res1.status).toBe(201);
const result1 = (await res1.json()) as any;
uploadedFiles.push(result1.name);
// upload second file without overwrite (should fail - max_items reached)
const res2 = await app.server.request(`/api/media/entity/posts/${post.id}/cover`, {
method: "POST",
body: file,
});
expect(res2.status).toBe(400);
const result2 = (await res2.json()) as any;
expect(result2.error).toContain("Max items");
// upload third file with overwrite=true (should succeed and delete old file)
const res3 = await app.server.request(
`/api/media/entity/posts/${post.id}/cover?overwrite=true`,
{
method: "POST",
body: file,
},
);
expect(res3.status).toBe(201);
const result3 = (await res3.json()) as any;
uploadedFiles.push(result3.name);
// verify old file was deleted from storage
const oldFile = Bun.file(assetsTmpPath + "/" + uploadedFiles[0]);
expect(await oldFile.exists()).toBe(false);
// verify new file exists
const newFile = Bun.file(assetsTmpPath + "/" + uploadedFiles[1]);
expect(await newFile.exists()).toBe(true);
// cleanup
await newFile.delete();
});
});

View File

@@ -10,12 +10,6 @@ beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("AppAuth", () => {
test.skip("...", () => {
const auth = new AppAuth({});
console.log(auth.toJSON());
console.log(auth.config);
});
moduleTestSuite(AppAuth);
let ctx: ModuleBuildContext;
@@ -39,11 +33,9 @@ describe("AppAuth", () => {
await auth.build();
const oldConfig = auth.toJSON(true);
//console.log(oldConfig);
await auth.schema().patch("enabled", true);
await auth.build();
const newConfig = auth.toJSON(true);
//console.log(newConfig);
expect(newConfig.jwt.secret).not.toBe(oldConfig.jwt.secret);
});
@@ -69,7 +61,6 @@ describe("AppAuth", () => {
const app = new AuthController(auth).getController();
{
disableConsoleLog();
const res = await app.request("/password/register", {
method: "POST",
headers: {
@@ -80,7 +71,6 @@ describe("AppAuth", () => {
password: "12345678",
}),
});
enableConsoleLog();
expect(res.status).toBe(200);
const { data: users } = await ctx.em.repository("users").findMany();
@@ -119,7 +109,6 @@ describe("AppAuth", () => {
const app = new AuthController(auth).getController();
{
disableConsoleLog();
const res = await app.request("/password/register", {
method: "POST",
headers: {
@@ -130,7 +119,6 @@ describe("AppAuth", () => {
password: "12345678",
}),
});
enableConsoleLog();
expect(res.status).toBe(200);
const { data: users } = await ctx.em.repository("users").findMany();
@@ -235,4 +223,32 @@ describe("AppAuth", () => {
}
}
});
test("default role for registration must be a valid role", async () => {
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "123456",
},
allow_register: true,
roles: {
guest: {
is_default: true,
},
},
},
},
});
await app.build();
const auth = app.module.auth;
// doesn't allow invalid role
expect(auth.schema().patch("default_role_register", "admin")).rejects.toThrow();
// allows valid role
await auth.schema().patch("default_role_register", "guest");
expect(auth.toJSON().default_role_register).toBe("guest");
});
});

View File

@@ -1,10 +1,14 @@
import { describe, expect, test } from "bun:test";
import { describe, expect, test, beforeAll, afterAll } from "bun:test";
import { createApp } from "core/test/utils";
import { em, entity, text } from "data/prototype";
import { registries } from "modules/registries";
import { StorageLocalAdapter } from "adapter/node/storage/StorageLocalAdapter";
import { AppMedia } from "../../src/media/AppMedia";
import { moduleTestSuite } from "./module-test-suite";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("AppMedia", () => {
test.skip("...", () => {

View File

@@ -1,7 +1,11 @@
import { it, expect, describe } from "bun:test";
import { it, expect, describe, beforeAll, afterAll } from "bun:test";
import { DbModuleManager } from "modules/db/DbModuleManager";
import { getDummyConnection } from "../helper";
import { TABLE_NAME } from "modules/db/migrations";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("DbModuleManager", () => {
it("should extract secrets", async () => {

View File

@@ -11,7 +11,7 @@ import { s, stripMark } from "core/utils/schema";
import { Connection } from "data/connection/Connection";
import { entity, text } from "data/prototype";
beforeAll(disableConsoleLog);
beforeAll(() => disableConsoleLog());
afterAll(enableConsoleLog);
describe("ModuleManager", async () => {
@@ -82,7 +82,6 @@ describe("ModuleManager", async () => {
},
},
} as any;
//const { version, ...json } = mm.toJSON() as any;
const { dummyConnection } = getDummyConnection();
const db = dummyConnection.kysely;
@@ -97,10 +96,6 @@ describe("ModuleManager", async () => {
await mm2.build();
/* console.log({
json,
configs: mm2.configs(),
}); */
//expect(stripMark(json)).toEqual(stripMark(mm2.configs()));
expect(mm2.configs().data.entities?.test).toBeDefined();
expect(mm2.configs().data.entities?.test?.fields?.content).toBeDefined();
@@ -228,8 +223,6 @@ describe("ModuleManager", async () => {
const c = getDummyConnection();
const mm = new ModuleManager(c.dummyConnection);
await mm.build();
console.log("==".repeat(30));
console.log("");
const json = mm.configs();
const c2 = getDummyConnection();
@@ -275,7 +268,6 @@ describe("ModuleManager", async () => {
}
override async build() {
//console.log("building FailingModule", this.config);
if (this.config.value && this.config.value < 0) {
throw new Error("value must be positive, given: " + this.config.value);
}
@@ -296,9 +288,6 @@ describe("ModuleManager", async () => {
}
}
beforeEach(() => disableConsoleLog(["log", "warn", "error"]));
afterEach(enableConsoleLog);
test("it builds", async () => {
const { dummyConnection } = getDummyConnection();
const mm = new TestModuleManager(dummyConnection);

View File

@@ -1,30 +1,10 @@
import pkg from "./package.json" with { type: "json" };
import c from "picocolors";
import { formatNumber } from "bknd/utils";
import * as esbuild from "esbuild";
const deps = Object.keys(pkg.dependencies);
const external = ["jsonv-ts/*", "wrangler", "bknd", "bknd/*", ...deps];
if (process.env.DEBUG) {
const result = await esbuild.build({
entryPoints: ["./src/cli/index.ts"],
outdir: "./dist/cli",
platform: "node",
minify: true,
format: "esm",
metafile: true,
bundle: true,
external,
define: {
__isDev: "0",
__version: JSON.stringify(pkg.version),
},
});
await Bun.write("./dist/cli/metafile-esm.json", JSON.stringify(result.metafile, null, 2));
process.exit(0);
}
const result = await Bun.build({
entrypoints: ["./src/cli/index.ts"],
target: "node",

View File

@@ -2,6 +2,8 @@ import { $ } from "bun";
import * as tsup from "tsup";
import pkg from "./package.json" with { type: "json" };
import c from "picocolors";
import { watch as fsWatch, readdirSync, rmSync } from "node:fs";
import { join } from "node:path";
const args = process.argv.slice(2);
const watch = args.includes("--watch");
@@ -25,7 +27,18 @@ const define = {
if (clean) {
console.info("Cleaning dist (w/o static)");
await $`find dist -mindepth 1 ! -path "dist/static/*" ! -path "dist/static" -exec rm -rf {} +`;
// Cross-platform clean: remove all files/folders in dist except static
const distPath = join(import.meta.dir, "dist");
try {
const entries = readdirSync(distPath);
for (const entry of entries) {
if (entry === "static") continue;
const entryPath = join(distPath, entry);
rmSync(entryPath, { recursive: true, force: true });
}
} catch (e) {
// dist may not exist yet, ignore
}
}
let types_running = false;
@@ -83,7 +96,8 @@ async function buildApi() {
await tsup.build({
minify,
sourcemap,
watch,
// don't use tsup's broken watch, we'll handle it ourselves
watch: false,
define,
entry: [
"src/index.ts",
@@ -96,6 +110,7 @@ async function buildApi() {
metafile: true,
target: "esnext",
platform: "browser",
removeNodeProtocol: false,
format: ["esm"],
splitting: false,
loader: {
@@ -120,7 +135,7 @@ async function buildUi() {
const base = {
minify,
sourcemap,
watch,
watch: false,
define,
external: [
...external,
@@ -179,12 +194,15 @@ async function buildUiElements() {
await tsup.build({
minify,
sourcemap,
watch,
watch: false,
define,
entry: ["src/ui/elements/index.ts"],
outDir: "dist/ui/elements",
external: [
"ui/client",
"bknd",
/^bknd\/.*/,
"wouter",
"react",
"react-dom",
"react/jsx-runtime",
@@ -221,13 +239,14 @@ function baseConfig(adapter: string, overrides: Partial<tsup.Options> = {}): tsu
return {
minify,
sourcemap,
watch,
watch: false,
entry: [`src/adapter/${adapter}/index.ts`],
format: ["esm"],
platform: "neutral",
outDir: `dist/adapter/${adapter}`,
metafile: true,
splitting: false,
removeNodeProtocol: false,
onSuccess: async () => {
delayTypes();
oldConsole.log(c.cyan("[Adapter]"), adapter || "base", c.green("built"));
@@ -263,6 +282,11 @@ async function buildAdapters() {
// specific adatpers
tsup.build(baseConfig("react-router")),
tsup.build(
baseConfig("browser", {
external: [/^sqlocal\/?.*?/, "wouter"],
}),
),
tsup.build(
baseConfig("bun", {
external: [/^bun\:.*/],
@@ -295,6 +319,11 @@ async function buildAdapters() {
platform: "node",
}),
tsup.build({
...baseConfig("sveltekit"),
platform: "node",
}),
tsup.build({
...baseConfig("node"),
platform: "node",
@@ -325,4 +354,48 @@ async function buildAdapters() {
]);
}
await Promise.all([buildApi(), buildUi(), buildUiElements(), buildAdapters()]);
async function buildAll() {
await Promise.all([buildApi(), buildUi(), buildUiElements(), buildAdapters()]);
}
// initial build
await buildAll();
// custom watcher since tsup's watch is broken in 8.3.5+
if (watch) {
oldConsole.log(c.cyan("[Watch]"), "watching for changes in src/...");
let debounceTimer: ReturnType<typeof setTimeout> | null = null;
let isBuilding = false;
const rebuild = async () => {
if (isBuilding) return;
isBuilding = true;
oldConsole.log(c.cyan("[Watch]"), "rebuilding...");
try {
await buildAll();
oldConsole.log(c.cyan("[Watch]"), c.green("done"));
} catch (e) {
oldConsole.warn(c.cyan("[Watch]"), c.red("build failed"), e);
}
isBuilding = false;
};
const debouncedRebuild = () => {
if (debounceTimer) clearTimeout(debounceTimer);
debounceTimer = setTimeout(rebuild, 100);
};
// watch src directory recursively
fsWatch(join(import.meta.dir, "src"), { recursive: true }, (event, filename) => {
if (!filename) return;
// ignore non-source files
if (!filename.endsWith(".ts") && !filename.endsWith(".tsx") && !filename.endsWith(".css"))
return;
oldConsole.log(c.cyan("[Watch]"), c.dim(`${event}: ${filename}`));
debouncedRebuild();
});
// keep process alive
await new Promise(() => {});
}

View File

@@ -1,6 +0,0 @@
[install]
#registry = "http://localhost:4873"
[test]
coverageSkipTestFiles = true
console.depth = 10

View File

@@ -3,7 +3,7 @@
"type": "module",
"sideEffects": false,
"bin": "./dist/cli/index.js",
"version": "0.19.0",
"version": "0.20.0",
"description": "Lightweight Firebase/Supabase alternative built to run anywhere — incl. Next.js, React Router, Astro, Cloudflare, Bun, Node, AWS Lambda & more.",
"homepage": "https://bknd.io",
"repository": {
@@ -13,7 +13,7 @@
"bugs": {
"url": "https://github.com/bknd-io/bknd/issues"
},
"packageManager": "bun@1.2.22",
"packageManager": "bun@1.3.3",
"engines": {
"node": ">=22.13"
},
@@ -46,97 +46,106 @@
"test:e2e:report": "VITE_DB_URL=:memory: playwright show-report",
"docs:build-assets": "bun internal/docs.build-assets.ts"
},
"license": "FSL-1.1-MIT",
"license": "Apache-2.0",
"dependencies": {
"@cfworker/json-schema": "^4.1.1",
"@clack/prompts": "^0.11.0",
"@codemirror/lang-html": "^6.4.9",
"@codemirror/lang-json": "^6.0.1",
"@codemirror/lang-html": "^6.4.11",
"@codemirror/lang-json": "^6.0.2",
"@hello-pangea/dnd": "^18.0.1",
"@hono/swagger-ui": "^0.5.1",
"@hono/swagger-ui": "^0.5.2",
"@mantine/core": "^7.17.1",
"@mantine/hooks": "^7.17.1",
"@tanstack/react-form": "^1.0.5",
"@uiw/react-codemirror": "^4.23.10",
"@xyflow/react": "^12.4.4",
"@uiw/react-codemirror": "^4.25.2",
"@xyflow/react": "^12.9.2",
"aws4fetch": "^1.0.20",
"bcryptjs": "^3.0.2",
"dayjs": "^1.11.13",
"fast-xml-parser": "^5.0.8",
"hono": "4.8.3",
"bcryptjs": "^3.0.3",
"dayjs": "^1.11.19",
"fast-xml-parser": "^5.3.1",
"hono": "4.10.4",
"json-schema-library": "10.0.0-rc7",
"json-schema-to-ts": "^3.1.1",
"jsonv-ts": "0.9.1",
"kysely": "0.27.6",
"jsonv-ts": "^0.10.1",
"kysely": "0.28.8",
"lodash-es": "^4.17.21",
"oauth4webapi": "^2.11.1",
"object-path-immutable": "^4.1.2",
"radix-ui": "^1.1.3",
"picocolors": "^1.1.1",
"swr": "^2.3.3"
"radix-ui": "^1.1.3",
"swr": "^2.3.6",
"use-sync-external-store": "^1.6.0",
"zustand": "^4"
},
"devDependencies": {
"@aws-sdk/client-s3": "^3.758.0",
"@bluwy/giget-core": "^0.1.2",
"@aws-sdk/client-s3": "^3.922.0",
"@bluwy/giget-core": "^0.1.6",
"@clack/prompts": "^0.11.0",
"@cloudflare/vitest-pool-workers": "^0.9.3",
"@cloudflare/workers-types": "^4.20250606.0",
"@cloudflare/vitest-pool-workers": "^0.10.4",
"@cloudflare/workers-types": "^4.20251014.0",
"@dagrejs/dagre": "^1.1.4",
"@hono/vite-dev-server": "^0.21.0",
"@hookform/resolvers": "^4.1.3",
"@libsql/client": "^0.15.9",
"@hono/vite-dev-server": "^0.23.0",
"@hookform/resolvers": "^5.2.2",
"@libsql/client": "^0.15.15",
"@mantine/modals": "^7.17.1",
"@mantine/notifications": "^7.17.1",
"@playwright/test": "^1.51.1",
"@playwright/test": "^1.56.1",
"@rjsf/core": "5.22.2",
"@rjsf/utils": "5.22.0",
"@standard-schema/spec": "^1.0.0",
"@tabler/icons-react": "3.18.0",
"@tailwindcss/postcss": "^4.0.12",
"@tailwindcss/vite": "^4.0.12",
"@tabler/icons-react": "3.35.0",
"@tailwindcss/postcss": "^4.1.16",
"@tailwindcss/vite": "^4.1.16",
"@tanstack/react-store": "^0.8.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.2.0",
"@types/node": "^22.13.10",
"@types/node": "^24.10.0",
"@types/pg": "^8.15.6",
"@types/react": "^19.0.10",
"@types/react-dom": "^19.0.4",
"@vitejs/plugin-react": "^4.3.4",
"@vitest/coverage-v8": "^3.0.9",
"@vitejs/plugin-react": "^5.1.0",
"@vitest/coverage-v8": "3.0.9",
"autoprefixer": "^10.4.21",
"clsx": "^2.1.1",
"dotenv": "^16.4.7",
"commander": "^14.0.2",
"dotenv": "^17.2.3",
"jotai": "^2.12.2",
"jsdom": "^26.0.0",
"kysely-d1": "^0.3.0",
"jsdom": "^26.1.0",
"kysely-generic-sqlite": "^1.2.1",
"kysely-postgres-js": "^2.0.0",
"libsql": "^0.5.22",
"libsql-stateless-easy": "^1.8.0",
"open": "^10.1.0",
"miniflare": "^4.20251011.2",
"open": "^10.2.0",
"openapi-types": "^12.1.3",
"pg": "^8.16.3",
"postcss": "^8.5.3",
"postcss-preset-mantine": "^1.17.0",
"postcss-preset-mantine": "^1.18.0",
"postcss-simple-vars": "^7.0.1",
"posthog-js-lite": "^3.4.2",
"postgres": "^3.4.7",
"posthog-js-lite": "^3.6.0",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-hook-form": "^7.54.2",
"react-icons": "5.2.1",
"react-json-view-lite": "^2.4.1",
"sql-formatter": "^15.4.11",
"react-hook-form": "^7.66.0",
"react-icons": "5.5.0",
"react-json-view-lite": "^2.5.0",
"sql-formatter": "^15.6.10",
"sqlocal": "^0.16.0",
"tailwind-merge": "^3.0.2",
"tailwindcss": "^4.0.12",
"tailwindcss": "^4.1.16",
"tailwindcss-animate": "^1.0.7",
"tsc-alias": "^1.8.11",
"tsup": "^8.4.0",
"tsx": "^4.19.3",
"uuid": "^11.1.0",
"vite": "^6.3.5",
"tsc-alias": "^1.8.16",
"tsup": "^8.5.0",
"tsx": "^4.20.6",
"uuid": "^13.0.0",
"vite": "^7.1.12",
"vite-plugin-circular-dependency": "^0.5.0",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^3.0.9",
"wouter": "^3.6.0",
"wrangler": "^4.37.1",
"miniflare": "^4.20250913.0"
"vitest": "3.0.9",
"wouter": "^3.7.1",
"wrangler": "^4.45.4"
},
"optionalDependencies": {
"@hono/node-server": "^1.14.3"
"@hono/node-server": "^1.19.6"
},
"peerDependencies": {
"react": ">=19",
@@ -244,11 +253,21 @@
"import": "./dist/adapter/astro/index.js",
"require": "./dist/adapter/astro/index.js"
},
"./adapter/sveltekit": {
"types": "./dist/types/adapter/sveltekit/index.d.ts",
"import": "./dist/adapter/sveltekit/index.js",
"require": "./dist/adapter/sveltekit/index.js"
},
"./adapter/aws": {
"types": "./dist/types/adapter/aws/index.d.ts",
"import": "./dist/adapter/aws/index.js",
"require": "./dist/adapter/aws/index.js"
},
"./adapter/browser": {
"types": "./dist/types/adapter/browser/index.d.ts",
"import": "./dist/adapter/browser/index.js",
"require": "./dist/adapter/browser/index.js"
},
"./dist/main.css": "./dist/ui/main.css",
"./dist/styles.css": "./dist/ui/styles.css",
"./dist/manifest.json": "./dist/static/.vite/manifest.json",
@@ -266,6 +285,7 @@
"adapter/react-router": ["./dist/types/adapter/react-router/index.d.ts"],
"adapter/bun": ["./dist/types/adapter/bun/index.d.ts"],
"adapter/node": ["./dist/types/adapter/node/index.d.ts"],
"adapter/sveltekit": ["./dist/types/adapter/sveltekit/index.d.ts"],
"adapter/sqlite": ["./dist/types/adapter/sqlite/edge.d.ts"]
}
},
@@ -295,6 +315,8 @@
"remix",
"react-router",
"astro",
"sveltekit",
"svelte",
"bun",
"node"
]

View File

@@ -61,7 +61,7 @@ export class Api {
private token?: string;
private user?: TApiUser;
private verified = false;
private token_transport: "header" | "cookie" | "none" = "header";
public token_transport: "header" | "cookie" | "none" = "header";
public system!: SystemApi;
public data!: DataApi;

View File

@@ -5,7 +5,6 @@ import type { em as prototypeEm } from "data/prototype";
import { Connection } from "data/connection/Connection";
import type { Hono } from "hono";
import {
type InitialModuleConfigs,
type ModuleConfigs,
type Modules,
ModuleManager,
@@ -381,8 +380,10 @@ export class App<
if (results.length > 0) {
for (const { name, result } of results) {
if (result) {
$console.log(`[Plugin:${name}] schema`);
ctx.helper.ensureSchema(result);
if (ctx.flags.sync_required) {
$console.log(`[Plugin:${name}] schema, sync required`);
}
}
}
}

View File

@@ -0,0 +1,153 @@
import {
createContext,
lazy,
Suspense,
useContext,
useEffect,
useState,
type ReactNode,
} from "react";
import { checksum } from "bknd/utils";
import { App, registries, sqlocal, type BkndConfig } from "bknd";
import { Route, Router, Switch } from "wouter";
import { ClientProvider } from "bknd/client";
import { SQLocalKysely } from "sqlocal/kysely";
import type { ClientConfig, DatabasePath } from "sqlocal";
import { OpfsStorageAdapter } from "bknd/adapter/browser";
import type { BkndAdminConfig } from "bknd/ui";
const Admin = lazy(() =>
Promise.all([
import("bknd/ui"),
// @ts-ignore
import("bknd/dist/styles.css"),
]).then(([mod]) => ({
default: mod.Admin,
})),
);
function safeViewTransition(fn: () => void) {
if (document.startViewTransition) {
document.startViewTransition(fn);
} else {
fn();
}
}
export type BrowserBkndConfig<Args = ImportMetaEnv> = Omit<
BkndConfig<Args>,
"connection" | "app"
> & {
adminConfig?: BkndAdminConfig;
connection?: ClientConfig | DatabasePath;
};
export type BkndBrowserAppProps = {
children: ReactNode;
header?: ReactNode;
loading?: ReactNode;
notFound?: ReactNode;
} & BrowserBkndConfig;
const BkndBrowserAppContext = createContext<{
app: App;
hash: string;
}>(undefined!);
export function BkndBrowserApp({
children,
adminConfig,
header,
loading,
notFound,
...config
}: BkndBrowserAppProps) {
const [app, setApp] = useState<App | undefined>(undefined);
const [hash, setHash] = useState<string>("");
const adminRoutePath = (adminConfig?.basepath ?? "") + "/*?";
async function onBuilt(app: App) {
safeViewTransition(async () => {
setApp(app);
setHash(await checksum(app.toJSON()));
});
}
useEffect(() => {
setup({ ...config, adminConfig })
.then((app) => onBuilt(app as any))
.catch(console.error);
}, []);
if (!app) {
return (
loading ?? (
<Center>
<span style={{ opacity: 0.2 }}>Loading...</span>
</Center>
)
);
}
return (
<BkndBrowserAppContext.Provider value={{ app, hash }}>
<ClientProvider storage={window.localStorage} fetcher={app.server.request}>
{header}
<Router key={hash}>
<Switch>
{children}
<Route path={adminRoutePath}>
<Suspense>
<Admin config={adminConfig} />
</Suspense>
</Route>
<Route path="*">
{notFound ?? (
<Center style={{ fontSize: "48px", fontFamily: "monospace" }}>404</Center>
)}
</Route>
</Switch>
</Router>
</ClientProvider>
</BkndBrowserAppContext.Provider>
);
}
export function useApp() {
return useContext(BkndBrowserAppContext);
}
const Center = (props: React.HTMLAttributes<HTMLDivElement>) => (
<div
{...props}
style={{
width: "100%",
minHeight: "100vh",
display: "flex",
justifyContent: "center",
alignItems: "center",
...(props.style ?? {}),
}}
/>
);
let initialized = false;
async function setup(config: BrowserBkndConfig = {}) {
if (initialized) return;
initialized = true;
registries.media.register("opfs", OpfsStorageAdapter);
const app = App.create({
...config,
// @ts-ignore
connection: sqlocal(new SQLocalKysely(config.connection ?? ":localStorage:")),
});
await config.beforeBuild?.(app);
await app.build({ sync: true });
await config.onBuilt?.(app);
return app;
}

View File

@@ -0,0 +1,34 @@
import { describe, beforeAll, vi, afterAll, spyOn } from "bun:test";
import { OpfsStorageAdapter } from "./OpfsStorageAdapter";
// @ts-ignore
import { assetsPath } from "../../../__test__/helper";
import { adapterTestSuite } from "media/storage/adapters/adapter-test-suite";
import { bunTestRunner } from "adapter/bun/test";
import { MockFileSystemDirectoryHandle } from "adapter/browser/mock";
describe("OpfsStorageAdapter", async () => {
let mockRoot: MockFileSystemDirectoryHandle;
let testSuiteAdapter: OpfsStorageAdapter;
const _mock = spyOn(global, "navigator");
beforeAll(() => {
// mock navigator.storage.getDirectory()
mockRoot = new MockFileSystemDirectoryHandle("opfs-root");
const mockNavigator = {
storage: {
getDirectory: vi.fn().mockResolvedValue(mockRoot),
},
};
// @ts-ignore
_mock.mockReturnValue(mockNavigator);
testSuiteAdapter = new OpfsStorageAdapter();
});
afterAll(() => {
_mock.mockRestore();
});
const file = Bun.file(`${assetsPath}/image.png`);
await adapterTestSuite(bunTestRunner, () => testSuiteAdapter, file);
});

View File

@@ -0,0 +1,265 @@
import type { FileBody, FileListObject, FileMeta, FileUploadPayload } from "bknd";
import { StorageAdapter, guessMimeType } from "bknd";
import { parse, s, isFile, isBlob } from "bknd/utils";
export const opfsAdapterConfig = s.object(
{
root: s.string({ default: "" }).optional(),
},
{
title: "OPFS",
description: "Origin Private File System storage",
additionalProperties: false,
},
);
export type OpfsAdapterConfig = s.Static<typeof opfsAdapterConfig>;
/**
* Storage adapter for OPFS (Origin Private File System)
* Provides browser-based file storage using the File System Access API
*/
export class OpfsStorageAdapter extends StorageAdapter {
private config: OpfsAdapterConfig;
private rootPromise: Promise<FileSystemDirectoryHandle>;
constructor(config: Partial<OpfsAdapterConfig> = {}) {
super();
this.config = parse(opfsAdapterConfig, config);
this.rootPromise = this.initializeRoot();
}
private async initializeRoot(): Promise<FileSystemDirectoryHandle> {
const opfsRoot = await navigator.storage.getDirectory();
if (!this.config.root) {
return opfsRoot;
}
// navigate to or create nested directory structure
const parts = this.config.root.split("/").filter(Boolean);
let current = opfsRoot;
for (const part of parts) {
current = await current.getDirectoryHandle(part, { create: true });
}
return current;
}
getSchema() {
return opfsAdapterConfig;
}
getName(): string {
return "opfs";
}
async listObjects(prefix?: string): Promise<FileListObject[]> {
const root = await this.rootPromise;
const files: FileListObject[] = [];
for await (const [name, handle] of root.entries()) {
if (handle.kind === "file") {
if (!prefix || name.startsWith(prefix)) {
const file = await (handle as FileSystemFileHandle).getFile();
files.push({
key: name,
last_modified: new Date(file.lastModified),
size: file.size,
});
}
}
}
return files;
}
private async computeEtagFromArrayBuffer(buffer: ArrayBuffer): Promise<string> {
const hashBuffer = await crypto.subtle.digest("SHA-256", buffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, "0")).join("");
// wrap the hex string in quotes for ETag format
return `"${hashHex}"`;
}
async putObject(key: string, body: FileBody): Promise<string | FileUploadPayload> {
if (body === null) {
throw new Error("Body is empty");
}
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key, { create: true });
const writable = await fileHandle.createWritable();
try {
let contentBuffer: ArrayBuffer;
if (isFile(body)) {
contentBuffer = await body.arrayBuffer();
await writable.write(contentBuffer);
} else if (body instanceof ReadableStream) {
const chunks: Uint8Array[] = [];
const reader = body.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
await writable.write(value);
}
} finally {
reader.releaseLock();
}
// compute total size and combine chunks for etag
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const combined = new Uint8Array(totalSize);
let offset = 0;
for (const chunk of chunks) {
combined.set(chunk, offset);
offset += chunk.length;
}
contentBuffer = combined.buffer;
} else if (isBlob(body)) {
contentBuffer = await (body as Blob).arrayBuffer();
await writable.write(contentBuffer);
} else {
// body is ArrayBuffer or ArrayBufferView
if (ArrayBuffer.isView(body)) {
const view = body as ArrayBufferView;
contentBuffer = view.buffer.slice(
view.byteOffset,
view.byteOffset + view.byteLength,
) as ArrayBuffer;
} else {
contentBuffer = body as ArrayBuffer;
}
await writable.write(body);
}
await writable.close();
return await this.computeEtagFromArrayBuffer(contentBuffer);
} catch (error) {
await writable.abort();
throw error;
}
}
async deleteObject(key: string): Promise<void> {
try {
const root = await this.rootPromise;
await root.removeEntry(key);
} catch {
// file doesn't exist, which is fine
}
}
async objectExists(key: string): Promise<boolean> {
try {
const root = await this.rootPromise;
await root.getFileHandle(key);
return true;
} catch {
return false;
}
}
private parseRangeHeader(
rangeHeader: string,
fileSize: number,
): { start: number; end: number } | null {
// parse "bytes=start-end" format
const match = rangeHeader.match(/^bytes=(\d*)-(\d*)$/);
if (!match) return null;
const [, startStr, endStr] = match;
let start = startStr ? Number.parseInt(startStr, 10) : 0;
let end = endStr ? Number.parseInt(endStr, 10) : fileSize - 1;
// handle suffix-byte-range-spec (e.g., "bytes=-500")
if (!startStr && endStr) {
start = Math.max(0, fileSize - Number.parseInt(endStr, 10));
end = fileSize - 1;
}
// validate range
if (start < 0 || end >= fileSize || start > end) {
return null;
}
return { start, end };
}
async getObject(key: string, headers: Headers): Promise<Response> {
try {
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key);
const file = await fileHandle.getFile();
const fileSize = file.size;
const mimeType = guessMimeType(key);
const responseHeaders = new Headers({
"Accept-Ranges": "bytes",
"Content-Type": mimeType || "application/octet-stream",
});
const rangeHeader = headers.get("range");
if (rangeHeader) {
const range = this.parseRangeHeader(rangeHeader, fileSize);
if (!range) {
// invalid range - return 416 Range Not Satisfiable
responseHeaders.set("Content-Range", `bytes */${fileSize}`);
return new Response("", {
status: 416,
headers: responseHeaders,
});
}
const { start, end } = range;
const arrayBuffer = await file.arrayBuffer();
const chunk = arrayBuffer.slice(start, end + 1);
responseHeaders.set("Content-Range", `bytes ${start}-${end}/${fileSize}`);
responseHeaders.set("Content-Length", chunk.byteLength.toString());
return new Response(chunk, {
status: 206, // Partial Content
headers: responseHeaders,
});
} else {
// normal request - return entire file
const content = await file.arrayBuffer();
responseHeaders.set("Content-Length", content.byteLength.toString());
return new Response(content, {
status: 200,
headers: responseHeaders,
});
}
} catch {
// handle file reading errors
return new Response("", { status: 404 });
}
}
getObjectUrl(_key: string): string {
throw new Error("Method not implemented.");
}
async getObjectMeta(key: string): Promise<FileMeta> {
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key);
const file = await fileHandle.getFile();
return {
type: guessMimeType(key) || "application/octet-stream",
size: file.size,
};
}
toJSON(_secrets?: boolean) {
return {
type: this.getName(),
config: this.config,
};
}
}

View File

@@ -0,0 +1,2 @@
export * from "./OpfsStorageAdapter";
export * from "./BkndBrowserApp";

View File

@@ -0,0 +1,136 @@
// mock OPFS API for testing
class MockFileSystemFileHandle {
kind: "file" = "file";
name: string;
private content: ArrayBuffer;
private lastModified: number;
constructor(name: string, content: ArrayBuffer = new ArrayBuffer(0)) {
this.name = name;
this.content = content;
this.lastModified = Date.now();
}
async getFile(): Promise<File> {
return new File([this.content], this.name, {
lastModified: this.lastModified,
type: this.guessMimeType(),
});
}
async createWritable(): Promise<FileSystemWritableFileStream> {
const handle = this;
return {
async write(data: any) {
if (data instanceof ArrayBuffer) {
handle.content = data;
} else if (ArrayBuffer.isView(data)) {
handle.content = data.buffer.slice(
data.byteOffset,
data.byteOffset + data.byteLength,
) as ArrayBuffer;
} else if (data instanceof Blob) {
handle.content = await data.arrayBuffer();
}
handle.lastModified = Date.now();
},
async close() {},
async abort() {},
async seek(_position: number) {},
async truncate(_size: number) {},
} as FileSystemWritableFileStream;
}
private guessMimeType(): string {
const ext = this.name.split(".").pop()?.toLowerCase();
const mimeTypes: Record<string, string> = {
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
webp: "image/webp",
svg: "image/svg+xml",
txt: "text/plain",
json: "application/json",
pdf: "application/pdf",
};
return mimeTypes[ext || ""] || "application/octet-stream";
}
}
export class MockFileSystemDirectoryHandle {
kind: "directory" = "directory";
name: string;
private files: Map<string, MockFileSystemFileHandle> = new Map();
private directories: Map<string, MockFileSystemDirectoryHandle> = new Map();
constructor(name: string = "root") {
this.name = name;
}
async getFileHandle(
name: string,
options?: FileSystemGetFileOptions,
): Promise<FileSystemFileHandle> {
if (this.files.has(name)) {
return this.files.get(name) as any;
}
if (options?.create) {
const handle = new MockFileSystemFileHandle(name);
this.files.set(name, handle);
return handle as any;
}
throw new Error(`File not found: ${name}`);
}
async getDirectoryHandle(
name: string,
options?: FileSystemGetDirectoryOptions,
): Promise<FileSystemDirectoryHandle> {
if (this.directories.has(name)) {
return this.directories.get(name) as any;
}
if (options?.create) {
const handle = new MockFileSystemDirectoryHandle(name);
this.directories.set(name, handle);
return handle as any;
}
throw new Error(`Directory not found: ${name}`);
}
async removeEntry(name: string, _options?: FileSystemRemoveOptions): Promise<void> {
this.files.delete(name);
this.directories.delete(name);
}
async *entries(): AsyncIterableIterator<[string, FileSystemHandle]> {
for (const [name, handle] of this.files) {
yield [name, handle as any];
}
for (const [name, handle] of this.directories) {
yield [name, handle as any];
}
}
async *keys(): AsyncIterableIterator<string> {
for (const name of this.files.keys()) {
yield name;
}
for (const name of this.directories.keys()) {
yield name;
}
}
async *values(): AsyncIterableIterator<FileSystemHandle> {
for (const handle of this.files.values()) {
yield handle as any;
}
for (const handle of this.directories.values()) {
yield handle as any;
}
}
[Symbol.asyncIterator](): AsyncIterableIterator<[string, FileSystemHandle]> {
return this.entries();
}
}

View File

@@ -1,14 +1,12 @@
/// <reference types="bun-types" />
import path from "node:path";
import { type RuntimeBkndConfig, createRuntimeApp } from "bknd/adapter";
import { registerLocalMediaAdapter } from ".";
import { config, type App } from "bknd";
import type { ServeOptions } from "bun";
import { serveStatic } from "hono/bun";
type BunEnv = Bun.Env;
export type BunBkndConfig<Env = BunEnv> = RuntimeBkndConfig<Env> & Omit<ServeOptions, "fetch">;
export type BunBkndConfig<Env = BunEnv> = RuntimeBkndConfig<Env> &
Omit<Bun.Serve.Options<undefined, string>, "fetch">;
export async function createApp<Env = BunEnv>(
{ distPath, serveStatic: _serveStatic, ...config }: BunBkndConfig<Env> = {},
@@ -45,6 +43,7 @@ export function createHandler<Env = BunEnv>(
export function serve<Env = BunEnv>(
{
app,
distPath,
connection,
config: _config,
@@ -60,10 +59,11 @@ export function serve<Env = BunEnv>(
args: Env = Bun.env as Env,
) {
Bun.serve({
...serveOptions,
...(serveOptions as any),
port,
fetch: createHandler(
{
app,
connection,
config: _config,
options,

View File

@@ -3,7 +3,7 @@
import type { RuntimeBkndConfig } from "bknd/adapter";
import { Hono } from "hono";
import { serveStatic } from "hono/cloudflare-workers";
import type { MaybePromise } from "bknd";
import type { App, MaybePromise } from "bknd";
import { $console } from "bknd/utils";
import { createRuntimeApp } from "bknd/adapter";
import { registerAsyncsExecutionContext, makeConfig, type CloudflareContext } from "./config";
@@ -55,8 +55,12 @@ export async function createApp<Env extends CloudflareEnv = CloudflareEnv>(
// compatiblity
export const getFresh = createApp;
let app: App | undefined;
export function serve<Env extends CloudflareEnv = CloudflareEnv>(
config: CloudflareBkndConfig<Env> = {},
serveOptions?: (args: Env) => {
warm?: boolean;
},
) {
return {
async fetch(request: Request, env: Env, ctx: ExecutionContext) {
@@ -92,8 +96,11 @@ export function serve<Env extends CloudflareEnv = CloudflareEnv>(
}
}
const context = { request, env, ctx } as CloudflareContext<Env>;
const app = await createApp(config, context);
const { warm } = serveOptions?.(env) ?? {};
if (!app || warm !== true) {
const context = { request, env, ctx } as CloudflareContext<Env>;
app = await createApp(config, context);
}
return app.fetch(request, env, ctx);
},

View File

@@ -65,37 +65,31 @@ export function withPlatformProxy<Env extends CloudflareEnv>(
}
return {
...config,
beforeBuild: async (app, registries) => {
if (!use_proxy) return;
const env = await getEnv();
registerMedia(env, registries as any);
await config?.beforeBuild?.(app, registries);
},
bindings: async (env) => {
return (await config?.bindings?.(await getEnv(env))) || {};
},
// @ts-ignore
app: async (_env) => {
const env = await getEnv(_env);
const binding = use_proxy ? getBinding(env, "D1Database") : undefined;
const appConfig = typeof config.app === "function" ? await config.app(env) : config;
const connection =
use_proxy && binding
? d1Sqlite({
binding: binding.value as any,
})
: appConfig.connection;
if (config?.app === undefined && use_proxy && binding) {
return {
connection: d1Sqlite({
binding: binding.value,
}),
};
} else if (typeof config?.app === "function") {
const appConfig = await config?.app(env);
if (binding) {
appConfig.connection = d1Sqlite({
binding: binding.value,
}) as any;
}
return appConfig;
}
return config?.app || {};
return {
...appConfig,
beforeBuild: async (app, registries) => {
if (!use_proxy) return;
const env = await getEnv();
registerMedia(env, registries as any);
await config?.beforeBuild?.(app, registries);
},
bindings: async (env) => {
return (await config?.bindings?.(await getEnv(env))) || {};
},
connection,
};
},
} satisfies CloudflareBkndConfig<Env>;
}

View File

@@ -14,14 +14,15 @@ import type { AdminControllerOptions } from "modules/server/AdminController";
import type { Manifest } from "vite";
export type BkndConfig<Args = any, Additional = {}> = Merge<
CreateAppConfig & {
app?:
| Merge<Omit<BkndConfig, "app"> & Additional>
| ((args: Args) => MaybePromise<Merge<Omit<BkndConfig<Args>, "app"> & Additional>>);
onBuilt?: (app: App) => MaybePromise<void>;
beforeBuild?: (app?: App, registries?: typeof $registries) => MaybePromise<void>;
buildConfig?: Parameters<App["build"]>[0];
} & Additional
CreateAppConfig &
Omit<Additional, "app"> & {
app?:
| Omit<BkndConfig<Args, Additional>, "app">
| ((args: Args) => MaybePromise<Omit<BkndConfig<Args, Additional>, "app">>);
onBuilt?: (app: App) => MaybePromise<void>;
beforeBuild?: (app?: App, registries?: typeof $registries) => MaybePromise<void>;
buildConfig?: Parameters<App["build"]>[0];
}
>;
export type FrameworkBkndConfig<Args = any> = BkndConfig<Args>;

View File

@@ -1,5 +1,6 @@
import { createFrameworkApp, type FrameworkBkndConfig } from "bknd/adapter";
import { isNode } from "bknd/utils";
// @ts-expect-error next is not installed
import type { NextApiRequest } from "next";
type NextjsEnv = NextApiRequest["env"];
@@ -18,7 +19,9 @@ function getCleanRequest(req: Request, cleanRequest: NextjsBkndConfig["cleanRequ
if (!cleanRequest) return req;
const url = new URL(req.url);
cleanRequest?.searchParams?.forEach((k) => url.searchParams.delete(k));
cleanRequest?.searchParams?.forEach((k) => {
url.searchParams.delete(k);
});
if (isNode()) {
return new Request(url.toString(), {

View File

@@ -24,7 +24,7 @@ export async function createApp<Env = NodeEnv>(
path.resolve(distPath ?? relativeDistPath ?? "./node_modules/bknd/dist", "static"),
);
if (relativeDistPath) {
console.warn("relativeDistPath is deprecated, please use distPath instead");
$console.warn("relativeDistPath is deprecated, please use distPath instead");
}
registerLocalMediaAdapter();

View File

@@ -1,4 +1,4 @@
import { describe, beforeAll, afterAll } from "vitest";
import { describe } from "vitest";
import * as node from "./node.adapter";
import { adapterTestSuite } from "adapter/adapter-test-suite";
import { viTestRunner } from "adapter/node/vitest";

View File

@@ -0,0 +1 @@
export * from "./sveltekit.adapter";

View File

@@ -0,0 +1,16 @@
import { afterAll, beforeAll, describe } from "bun:test";
import * as sveltekit from "./sveltekit.adapter";
import { disableConsoleLog, enableConsoleLog } from "core/utils";
import { adapterTestSuite } from "adapter/adapter-test-suite";
import { bunTestRunner } from "adapter/bun/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("sveltekit adapter", () => {
adapterTestSuite(bunTestRunner, {
makeApp: (c, a) => sveltekit.getApp(c as any, a ?? ({} as any)),
makeHandler: (c, a) => (request: Request) =>
sveltekit.serve(c as any, a ?? ({} as any))({ request }),
});
});

View File

@@ -0,0 +1,33 @@
import { createRuntimeApp, type RuntimeBkndConfig } from "bknd/adapter";
type TSvelteKit = {
request: Request;
};
export type SvelteKitBkndConfig<Env> = Pick<RuntimeBkndConfig<Env>, "adminOptions">;
/**
* Get bknd app instance
* @param config - bknd configuration
* @param args - environment variables (use $env/dynamic/private for universal runtime support)
*/
export async function getApp<Env>(
config: SvelteKitBkndConfig<Env> = {} as SvelteKitBkndConfig<Env>,
args: Env,
) {
return await createRuntimeApp(config, args);
}
/**
* Create request handler for hooks.server.ts
* @param config - bknd configuration
* @param args - environment variables (use $env/dynamic/private for universal runtime support)
*/
export function serve<Env>(
config: SvelteKitBkndConfig<Env> = {} as SvelteKitBkndConfig<Env>,
args: Env,
) {
return async (fnArgs: TSvelteKit) => {
return (await getApp(config, args)).fetch(fnArgs.request);
};
}

View File

@@ -46,6 +46,22 @@ export class AppAuth extends Module<AppAuthSchema> {
to.strategies!.password!.enabled = true;
}
if (to.default_role_register && to.default_role_register?.length > 0) {
const valid_to_role = Object.keys(to.roles ?? {}).includes(to.default_role_register);
if (!valid_to_role) {
const msg = `Default role for registration not found: ${to.default_role_register}`;
// if changing to a new value
if (from.default_role_register !== to.default_role_register) {
throw new Error(msg);
}
// resetting gracefully, since role doesn't exist anymore
$console.warn(`${msg}, resetting to undefined`);
to.default_role_register = undefined;
}
}
return to;
}
@@ -82,6 +98,7 @@ export class AppAuth extends Module<AppAuthSchema> {
this._authenticator = new Authenticator(strategies, new AppUserPool(this), {
jwt: this.config.jwt,
cookie: this.config.cookie,
default_role_register: this.config.default_role_register,
});
this.registerEntities();
@@ -171,10 +188,20 @@ export class AppAuth extends Module<AppAuthSchema> {
} catch (e) {}
}
async createUser({ email, password, ...additional }: CreateUserPayload): Promise<DB["users"]> {
async createUser({
email,
password,
role,
...additional
}: CreateUserPayload): Promise<DB["users"]> {
if (!this.enabled) {
throw new Error("Cannot create user, auth not enabled");
}
if (role) {
if (!Object.keys(this.config.roles ?? {}).includes(role)) {
throw new Error(`Role "${role}" not found`);
}
}
const strategy = "password" as const;
const pw = this.authenticator.strategy(strategy) as PasswordStrategy;
@@ -183,6 +210,7 @@ export class AppAuth extends Module<AppAuthSchema> {
mutator.__unstable_toggleSystemEntityCreation(false);
const { data: created } = await mutator.insertOne({
...(additional as any),
role: role || this.config.default_role_register || undefined,
email,
strategy,
strategy_value,

View File

@@ -13,6 +13,7 @@ import {
InvalidSchemaError,
transformObject,
mcpTool,
$console,
} from "bknd/utils";
import type { PasswordStrategy } from "auth/authenticate/strategies";
@@ -210,7 +211,7 @@ export class AuthController extends Controller {
const idType = s.anyOf([s.number({ title: "Integer" }), s.string({ title: "UUID" })]);
const getUser = async (params: { id?: string | number; email?: string }) => {
let user: DB["users"] | undefined = undefined;
let user: DB["users"] | undefined;
if (params.id) {
const { data } = await this.userRepo.findId(params.id);
user = data;
@@ -225,26 +226,33 @@ export class AuthController extends Controller {
};
const roles = Object.keys(this.auth.config.roles ?? {});
mcp.tool(
"auth_user_create",
{
description: "Create a new user",
inputSchema: s.object({
email: s.string({ format: "email" }),
password: s.string({ minLength: 8 }),
role: s
.string({
enum: roles.length > 0 ? roles : undefined,
})
.optional(),
}),
},
async (params, c) => {
await c.context.ctx().helper.granted(c, AuthPermissions.createUser);
try {
const actions = this.auth.authenticator.strategy("password").getActions();
if (actions.create) {
const schema = actions.create.schema;
mcp.tool(
"auth_user_create",
{
description: "Create a new user",
inputSchema: s.object({
...schema.properties,
role: s
.string({
enum: roles.length > 0 ? roles : undefined,
})
.optional(),
}),
},
async (params, c) => {
await c.context.ctx().helper.granted(c, AuthPermissions.createUser);
return c.json(await this.auth.createUser(params));
},
);
return c.json(await this.auth.createUser(params));
},
);
}
} catch (e) {
$console.warn("error creating auth_user_create tool", e);
}
mcp.tool(
"auth_user_token",

View File

@@ -51,6 +51,7 @@ export const authConfigSchema = $object(
basepath: s.string({ default: "/api/auth" }),
entity_name: s.string({ default: "users" }),
allow_register: s.boolean({ default: true }).optional(),
default_role_register: s.string().optional(),
jwt: jwtConfig,
cookie: cookieConfig,
strategies: $record(

View File

@@ -74,6 +74,7 @@ export const jwtConfig = s.strictObject(
export const authenticatorConfig = s.object({
jwt: jwtConfig,
cookie: cookieConfig,
default_role_register: s.string().optional(),
});
type AuthConfig = s.Static<typeof authenticatorConfig>;
@@ -164,9 +165,13 @@ export class Authenticator<
if (!("strategy_value" in profile)) {
throw new InvalidConditionsException("Profile must have a strategy value");
}
if ("role" in profile) {
throw new InvalidConditionsException("Role cannot be provided during registration");
}
const user = await this.userPool.create(strategy.getName(), {
...profile,
role: this.config.default_role_register,
strategy_value: profile.strategy_value,
});

View File

@@ -10,6 +10,7 @@ const schema = s
.object({
hashing: s.string({ enum: ["plain", "sha256", "bcrypt"], default: "sha256" }),
rounds: s.number({ minimum: 1, maximum: 10 }).optional(),
minLength: s.number({ default: 8, minimum: 1 }).optional(),
})
.strict();
@@ -37,7 +38,7 @@ export class PasswordStrategy extends AuthStrategy<typeof schema> {
format: "email",
}),
password: s.string({
minLength: 8, // @todo: this should be configurable
minLength: this.config.minLength,
}),
});
}
@@ -65,12 +66,21 @@ export class PasswordStrategy extends AuthStrategy<typeof schema> {
return await bcryptCompare(compare, actual);
}
return false;
return actual === compare;
}
verify(password: string) {
return async (user: User) => {
const compare = await this.compare(user?.strategy_value!, password);
if (!user || !user.strategy_value) {
throw new InvalidCredentialsException();
}
if (!this.getPayloadSchema().properties.password.validate(password).valid) {
$console.debug("PasswordStrategy: Invalid password", password);
throw new InvalidCredentialsException();
}
const compare = await this.compare(user.strategy_value, password);
if (compare !== true) {
throw new InvalidCredentialsException();
}

View File

@@ -67,7 +67,10 @@ export async function startServer(
$console.info("Server listening on", url);
if (options.open) {
await open(url);
const p = await open(url, { wait: false });
p.on("error", () => {
$console.warn("Couldn't open url in browser");
});
}
}

View File

@@ -0,0 +1,55 @@
import { describe, it, expect } from "bun:test";
import { plunkEmail } from "./plunk";
const ALL_TESTS = !!process.env.ALL_TESTS;
describe.skipIf(ALL_TESTS)("plunk", () => {
it("should throw on failed", async () => {
const driver = plunkEmail({ apiKey: "invalid" });
expect(driver.send("foo@bar.com", "Test", "Test")).rejects.toThrow();
});
it("should send an email", async () => {
const driver = plunkEmail({
apiKey: process.env.PLUNK_API_KEY!,
from: undefined, // Default to what Plunk sets
});
const response = await driver.send(
"help@bknd.io",
"Test Email from Plunk",
"This is a test email",
);
expect(response).toBeDefined();
expect(response.success).toBe(true);
expect(response.emails).toBeDefined();
expect(response.timestamp).toBeDefined();
});
it("should send HTML email", async () => {
const driver = plunkEmail({
apiKey: process.env.PLUNK_API_KEY!,
from: undefined,
});
const htmlBody = "<h1>Test Email</h1><p>This is a test email</p>";
const response = await driver.send(
"help@bknd.io",
"HTML Test",
htmlBody,
);
expect(response).toBeDefined();
expect(response.success).toBe(true);
});
it("should send with text and html", async () => {
const driver = plunkEmail({
apiKey: process.env.PLUNK_API_KEY!,
from: undefined,
});
const response = await driver.send("test@example.com", "Test Email", {
text: "help@bknd.io",
html: "<p>This is HTML</p>",
});
expect(response).toBeDefined();
expect(response.success).toBe(true);
});
});

View File

@@ -0,0 +1,70 @@
import type { IEmailDriver } from "./index";
export type PlunkEmailOptions = {
apiKey: string;
host?: string;
from?: string;
};
export type PlunkEmailSendOptions = {
subscribed?: boolean;
name?: string;
from?: string;
reply?: string;
headers?: Record<string, string>;
};
export type PlunkEmailResponse = {
success: boolean;
emails: Array<{
contact: {
id: string;
email: string;
};
email: string;
}>;
timestamp: string;
};
export const plunkEmail = (
config: PlunkEmailOptions,
): IEmailDriver<PlunkEmailResponse, PlunkEmailSendOptions> => {
const host = config.host ?? "https://api.useplunk.com/v1/send";
const from = config.from;
return {
send: async (
to: string,
subject: string,
body: string | { text: string; html: string },
options?: PlunkEmailSendOptions,
) => {
const payload: any = {
from,
to,
subject,
};
if (typeof body === "string") {
payload.body = body;
} else {
payload.body = body.html;
}
const res = await fetch(host, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${config.apiKey}`,
},
body: JSON.stringify({ ...payload, ...options }),
});
if (!res.ok) {
throw new Error(`Plunk API error: ${await res.text()}`);
}
return (await res.json()) as PlunkEmailResponse;
},
};
};

View File

@@ -4,7 +4,7 @@ import { resendEmail } from "./resend";
const ALL_TESTS = !!process.env.ALL_TESTS;
describe.skipIf(ALL_TESTS)("resend", () => {
it.only("should throw on failed", async () => {
it("should throw on failed", async () => {
const driver = resendEmail({ apiKey: "invalid" } as any);
expect(driver.send("foo@bar.com", "Test", "Test")).rejects.toThrow();
});

View File

@@ -5,3 +5,4 @@ export type { IEmailDriver } from "./email";
export { resendEmail } from "./email/resend";
export { sesEmail } from "./email/ses";
export { mailchannelsEmail } from "./email/mailchannels";
export { plunkEmail } from "./email/plunk";

View File

@@ -8,7 +8,7 @@ export function isDebug(): boolean {
try {
// @ts-expect-error - this is a global variable in dev
return is_toggled(__isDev);
} catch (e) {
} catch (_e) {
return false;
}
}

View File

@@ -1,3 +1,4 @@
import type { MaybePromise } from "bknd";
import type { Event } from "./Event";
import type { EventClass } from "./EventManager";
@@ -7,7 +8,7 @@ export type ListenerMode = (typeof ListenerModes)[number];
export type ListenerHandler<E extends Event<any, any>> = (
event: E,
slug: string,
) => E extends Event<any, infer R> ? R | Promise<R | void> : never;
) => E extends Event<any, infer R> ? MaybePromise<R | void> : never;
export class EventListener<E extends Event = Event> {
mode: ListenerMode = "async";

View File

@@ -32,6 +32,7 @@ export function getFlashMessage(
): { type: FlashMessageType; message: string } | undefined {
const flash = getCookieValue(flash_key);
if (flash && clear) {
// biome-ignore lint/suspicious/noDocumentCookie: .
document.cookie = `${flash_key}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;`;
}
return flash ? JSON.parse(flash) : undefined;

View File

@@ -14,9 +14,9 @@ export function isObject(value: unknown): value is Record<string, unknown> {
export function omitKeys<T extends object, K extends keyof T>(
obj: T,
keys_: readonly K[],
keys_: readonly K[] | K[] | string[],
): Omit<T, Extract<K, keyof T>> {
const keys = new Set(keys_);
const keys = new Set(keys_ as readonly K[]);
const result = {} as Omit<T, Extract<K, keyof T>>;
for (const [key, value] of Object.entries(obj) as [keyof T, T[keyof T]][]) {
if (!keys.has(key as K)) {

View File

@@ -1,3 +1,4 @@
import type { MaybePromise } from "core/types";
import { getRuntimeKey as honoGetRuntimeKey } from "hono/adapter";
/**
@@ -77,3 +78,37 @@ export function threw(fn: () => any, instance?: new (...args: any[]) => Error) {
return true;
}
}
export async function threwAsync(fn: Promise<any>, instance?: new (...args: any[]) => Error) {
try {
await fn;
return false;
} catch (e) {
if (instance) {
if (e instanceof instance) {
return true;
}
// if instance given but not what expected, throw
throw e;
}
return true;
}
}
export async function $waitUntil(
message: string,
condition: () => MaybePromise<boolean>,
delay = 100,
maxAttempts = 10,
) {
let attempts = 0;
while (attempts < maxAttempts) {
if (await condition()) {
return;
}
await new Promise((resolve) => setTimeout(resolve, delay));
attempts++;
}
throw new Error(`$waitUntil: "${message}" failed after ${maxAttempts} attempts`);
}

View File

@@ -120,17 +120,14 @@ export function patternMatch(target: string, pattern: RegExp | string): boolean
}
export function slugify(str: string): string {
return (
String(str)
.normalize("NFKD") // split accented characters into their base characters and diacritical marks
// biome-ignore lint/suspicious/noMisleadingCharacterClass: <explanation>
.replace(/[\u0300-\u036f]/g, "") // remove all the accents, which happen to be all in the \u03xx UNICODE block.
.trim() // trim leading or trailing whitespace
.toLowerCase() // convert to lowercase
.replace(/[^a-z0-9 -]/g, "") // remove non-alphanumeric characters
.replace(/\s+/g, "-") // replace spaces with hyphens
.replace(/-+/g, "-") // remove consecutive hyphens
);
return String(str)
.normalize("NFKD") // split accented characters into their base characters and diacritical marks
.replace(/[\u0300-\u036f]/g, "") // remove all the accents, which happen to be all in the \u03xx UNICODE block.
.trim() // trim leading or trailing whitespace
.toLowerCase() // convert to lowercase
.replace(/[^a-z0-9 -]/g, "") // remove non-alphanumeric characters
.replace(/\s+/g, "-") // replace spaces with hyphens
.replace(/-+/g, "-"); // remove consecutive hyphens
}
export function truncate(str: string, length = 50, end = "..."): string {

View File

@@ -96,6 +96,9 @@ export class DataController extends Controller {
// read entity schema
hono.get(
"/schema.json",
permission(SystemPermissions.schemaRead, {
context: (_c) => ({ module: "data" }),
}),
permission(DataPermissions.entityRead, {
context: (c) => ({ entity: c.req.param("entity") }),
}),
@@ -124,6 +127,9 @@ export class DataController extends Controller {
// read schema
hono.get(
"/schemas/:entity/:context?",
permission(SystemPermissions.schemaRead, {
context: (_c) => ({ module: "data" }),
}),
permission(DataPermissions.entityRead, {
context: (c) => ({ entity: c.req.param("entity") }),
}),
@@ -161,7 +167,7 @@ export class DataController extends Controller {
hono.get(
"/types",
permission(SystemPermissions.schemaRead, {
context: (c) => ({ module: "data" }),
context: (_c) => ({ module: "data" }),
}),
describeRoute({
summary: "Retrieve data typescript definitions",
@@ -182,6 +188,9 @@ export class DataController extends Controller {
*/
hono.get(
"/info/:entity",
permission(SystemPermissions.schemaRead, {
context: (_c) => ({ module: "data" }),
}),
permission(DataPermissions.entityRead, {
context: (c) => ({ entity: c.req.param("entity") }),
}),

View File

@@ -6,17 +6,15 @@ import {
type CompiledQuery,
type DatabaseIntrospector,
type Dialect,
type Expression,
type Kysely,
type KyselyPlugin,
type OnModifyForeignAction,
type QueryResult,
type RawBuilder,
type SelectQueryBuilder,
type SelectQueryNode,
type Simplify,
sql,
} from "kysely";
import type { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/sqlite";
import type { BaseIntrospector, BaseIntrospectorConfig } from "./BaseIntrospector";
import type { DB } from "bknd";
import type { Constructor } from "core/registry/Registry";
@@ -70,15 +68,9 @@ export type IndexSpec = {
};
export type DbFunctions = {
jsonObjectFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O> | null>;
jsonArrayFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O>[]>;
jsonBuildObject<O extends Record<string, Expression<unknown>>>(
obj: O,
): RawBuilder<
Simplify<{
[K in keyof O]: O[K] extends Expression<infer V> ? V : never;
}>
>;
jsonObjectFrom: typeof jsonObjectFrom;
jsonArrayFrom: typeof jsonArrayFrom;
jsonBuildObject: typeof jsonBuildObject;
};
export type ConnQuery = CompiledQuery | Compilable;

View File

@@ -14,27 +14,31 @@ export function connectionTestSuite(
{
makeConnection,
rawDialectDetails,
disableConsoleLog: _disableConsoleLog = true,
}: {
makeConnection: () => MaybePromise<{
connection: Connection;
dispose: () => MaybePromise<void>;
}>;
rawDialectDetails: string[];
disableConsoleLog?: boolean;
},
) {
const { test, expect, describe, beforeEach, afterEach, afterAll, beforeAll } = testRunner;
beforeAll(() => disableConsoleLog());
afterAll(() => enableConsoleLog());
if (_disableConsoleLog) {
beforeAll(() => disableConsoleLog());
afterAll(() => enableConsoleLog());
}
describe("base", () => {
let ctx: Awaited<ReturnType<typeof makeConnection>>;
beforeEach(async () => {
ctx = await makeConnection();
});
afterEach(async () => {
await ctx.dispose();
});
let ctx: Awaited<ReturnType<typeof makeConnection>>;
beforeEach(async () => {
ctx = await makeConnection();
});
afterEach(async () => {
await ctx.dispose();
});
describe("base", async () => {
test("pings", async () => {
const res = await ctx.connection.ping();
expect(res).toBe(true);
@@ -98,52 +102,54 @@ export function connectionTestSuite(
});
describe("schema", async () => {
const { connection, dispose } = await makeConnection();
afterAll(async () => {
await dispose();
});
const makeSchema = async () => {
const fields = [
{
type: "integer",
name: "id",
primary: true,
},
{
type: "text",
name: "text",
},
{
type: "json",
name: "json",
},
] as const satisfies FieldSpec[];
const fields = [
{
type: "integer",
name: "id",
primary: true,
},
{
type: "text",
name: "text",
},
{
type: "json",
name: "json",
},
] as const satisfies FieldSpec[];
let b = ctx.connection.kysely.schema.createTable("test");
for (const field of fields) {
// @ts-expect-error
b = b.addColumn(...ctx.connection.getFieldSchema(field));
}
await b.execute();
let b = connection.kysely.schema.createTable("test");
for (const field of fields) {
// @ts-expect-error
b = b.addColumn(...connection.getFieldSchema(field));
}
await b.execute();
// add index
await connection.kysely.schema.createIndex("test_index").on("test").columns(["id"]).execute();
// add index
await ctx.connection.kysely.schema
.createIndex("test_index")
.on("test")
.columns(["id"])
.execute();
};
test("executes query", async () => {
await connection.kysely
await makeSchema();
await ctx.connection.kysely
.insertInto("test")
.values({ id: 1, text: "test", json: JSON.stringify({ a: 1 }) })
.execute();
const expected = { id: 1, text: "test", json: { a: 1 } };
const qb = connection.kysely.selectFrom("test").selectAll();
const res = await connection.executeQuery(qb);
const qb = ctx.connection.kysely.selectFrom("test").selectAll();
const res = await ctx.connection.executeQuery(qb);
expect(res.rows).toEqual([expected]);
expect(rawDialectDetails.every((detail) => getPath(res, detail) !== undefined)).toBe(true);
{
const res = await connection.executeQueries(qb, qb);
const res = await ctx.connection.executeQueries(qb, qb);
expect(res.length).toBe(2);
res.map((r) => {
expect(r.rows).toEqual([expected]);
@@ -155,15 +161,21 @@ export function connectionTestSuite(
});
test("introspects", async () => {
const tables = await connection.getIntrospector().getTables({
await makeSchema();
const tables = await ctx.connection.getIntrospector().getTables({
withInternalKyselyTables: false,
});
const clean = tables.map((t) => ({
...t,
columns: t.columns.map((c) => ({
...c,
dataType: undefined,
})),
columns: t.columns
.map((c) => ({
...c,
// ignore data type
dataType: undefined,
// ignore default value if "id"
hasDefaultValue: c.name !== "id" ? c.hasDefaultValue : undefined,
}))
.sort((a, b) => a.name.localeCompare(b.name)),
}));
expect(clean).toEqual([
@@ -176,14 +188,8 @@ export function connectionTestSuite(
dataType: undefined,
isNullable: false,
isAutoIncrementing: true,
hasDefaultValue: false,
},
{
name: "text",
dataType: undefined,
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
hasDefaultValue: undefined,
comment: undefined,
},
{
name: "json",
@@ -191,25 +197,34 @@ export function connectionTestSuite(
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
comment: undefined,
},
{
name: "text",
dataType: undefined,
isNullable: true,
isAutoIncrementing: false,
hasDefaultValue: false,
comment: undefined,
},
],
},
]);
expect(await ctx.connection.getIntrospector().getIndices()).toEqual([
{
name: "test_index",
table: "test",
isUnique: false,
columns: [
{
name: "id",
order: 0,
},
],
},
]);
});
expect(await connection.getIntrospector().getIndices()).toEqual([
{
name: "test_index",
table: "test",
isUnique: false,
columns: [
{
name: "id",
order: 0,
},
],
},
]);
});
describe("integration", async () => {

View File

@@ -0,0 +1,33 @@
import { Kysely, PostgresDialect, type PostgresDialectConfig as KyselyPostgresDialectConfig } from "kysely";
import { PostgresIntrospector } from "./PostgresIntrospector";
import { PostgresConnection, plugins } from "./PostgresConnection";
import { customIntrospector } from "../Connection";
import type { Pool } from "pg";
export type PostgresDialectConfig = Omit<KyselyPostgresDialectConfig, "pool"> & {
pool: Pool;
};
export class PgPostgresConnection extends PostgresConnection<Pool> {
override name = "pg";
constructor(config: PostgresDialectConfig) {
const kysely = new Kysely({
dialect: customIntrospector(PostgresDialect, PostgresIntrospector, {
excludeTables: [],
}).create(config),
plugins,
});
super(kysely);
this.client = config.pool;
}
override async close(): Promise<void> {
await this.client.end();
}
}
export function pg(config: PostgresDialectConfig): PgPostgresConnection {
return new PgPostgresConnection(config);
}

View File

@@ -5,7 +5,7 @@ import {
type SchemaResponse,
type ConnQuery,
type ConnQueryResults,
} from "bknd";
} from "../Connection";
import {
ParseJSONResultsPlugin,
type ColumnDataType,
@@ -20,7 +20,7 @@ export type QB = SelectQueryBuilder<any, any, any>;
export const plugins = [new ParseJSONResultsPlugin()];
export abstract class PostgresConnection extends Connection {
export abstract class PostgresConnection<Client = unknown> extends Connection<Client> {
protected override readonly supported = {
batching: true,
softscans: true,
@@ -68,7 +68,7 @@ export abstract class PostgresConnection extends Connection {
type,
(col: ColumnDefinitionBuilder) => {
if (spec.primary) {
return col.primaryKey();
return col.primaryKey().notNull();
}
if (spec.references) {
return col
@@ -76,7 +76,7 @@ export abstract class PostgresConnection extends Connection {
.onDelete(spec.onDelete ?? "set null")
.onUpdate(spec.onUpdate ?? "no action");
}
return spec.nullable ? col : col.notNull();
return col;
},
];
}

View File

@@ -1,5 +1,5 @@
import { type SchemaMetadata, sql } from "kysely";
import { BaseIntrospector } from "bknd";
import { BaseIntrospector } from "../BaseIntrospector";
type PostgresSchemaSpec = {
name: string;
@@ -102,26 +102,27 @@ export class PostgresIntrospector extends BaseIntrospector {
return tables.map((table) => ({
name: table.name,
isView: table.type === "VIEW",
columns: table.columns.map((col) => {
return {
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
// @todo: check default value on 'nextval' see https://www.postgresql.org/docs/17/datatype-numeric.html#DATATYPE-SERIAL
isAutoIncrementing: true, // just for now
hasDefaultValue: col.dflt != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
columns: table.columns.map((col) => ({
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.dflt?.toLowerCase().includes("nextval") ?? false,
hasDefaultValue: col.dflt != null,
comment: undefined,
})),
indices: table.indices
// filter out db-managed primary key index
.filter((index) => index.name !== `${table.name}_pkey`)
.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
// seqno starts at 1
order: col.seqno - 1,
})),
})),
}));
}
}

View File

@@ -0,0 +1,31 @@
import { Kysely } from "kysely";
import { PostgresIntrospector } from "./PostgresIntrospector";
import { PostgresConnection, plugins } from "./PostgresConnection";
import { customIntrospector } from "../Connection";
import { PostgresJSDialect, type PostgresJSDialectConfig } from "kysely-postgres-js";
export class PostgresJsConnection extends PostgresConnection<PostgresJSDialectConfig["postgres"]> {
override name = "postgres-js";
constructor(config: PostgresJSDialectConfig) {
const kysely = new Kysely({
dialect: customIntrospector(PostgresJSDialect, PostgresIntrospector, {
excludeTables: [],
}).create(config),
plugins,
});
super(kysely);
this.client = config.postgres;
}
override async close(): Promise<void> {
await this.client.end();
}
}
export function postgresJs(
config: PostgresJSDialectConfig,
): PostgresJsConnection {
return new PostgresJsConnection(config);
}

View File

@@ -1,4 +1,4 @@
import { customIntrospector, type DbFunctions } from "bknd";
import { customIntrospector, type DbFunctions } from "../Connection";
import { Kysely, type Dialect, type KyselyPlugin } from "kysely";
import { plugins, PostgresConnection } from "./PostgresConnection";
import { PostgresIntrospector } from "./PostgresIntrospector";
@@ -6,7 +6,7 @@ import { PostgresIntrospector } from "./PostgresIntrospector";
export type Constructor<T> = new (...args: any[]) => T;
export type CustomPostgresConnection = {
supports?: PostgresConnection["supported"];
supports?: Partial<PostgresConnection["supported"]>;
fn?: Partial<DbFunctions>;
plugins?: KyselyPlugin[];
excludeTables?: string[];

View File

@@ -13,31 +13,43 @@ import { customIntrospector } from "../Connection";
import { SqliteIntrospector } from "./SqliteIntrospector";
import type { Field } from "data/fields/Field";
// @todo: add pragmas
export type SqliteConnectionConfig<
CustomDialect extends Constructor<Dialect> = Constructor<Dialect>,
> = {
excludeTables?: string[];
dialect: CustomDialect;
dialectArgs?: ConstructorParameters<CustomDialect>;
additionalPlugins?: KyselyPlugin[];
customFn?: Partial<DbFunctions>;
};
} & (
| {
dialect: CustomDialect;
dialectArgs?: ConstructorParameters<CustomDialect>;
}
| {
kysely: Kysely<any>;
}
);
export abstract class SqliteConnection<Client = unknown> extends Connection<Client> {
override name = "sqlite";
constructor(config: SqliteConnectionConfig) {
const { excludeTables, dialect, dialectArgs = [], additionalPlugins } = config;
const { excludeTables, additionalPlugins } = config;
const plugins = [new ParseJSONResultsPlugin(), ...(additionalPlugins ?? [])];
const kysely = new Kysely({
dialect: customIntrospector(dialect, SqliteIntrospector, {
excludeTables,
let kysely: Kysely<any>;
if ("dialect" in config) {
kysely = new Kysely({
dialect: customIntrospector(config.dialect, SqliteIntrospector, {
excludeTables,
plugins,
}).create(...(config.dialectArgs ?? [])),
plugins,
}).create(...dialectArgs),
plugins,
});
});
} else if ("kysely" in config) {
kysely = config.kysely;
} else {
throw new Error("Either dialect or kysely must be provided");
}
super(
kysely,

View File

@@ -83,7 +83,7 @@ export class SqliteIntrospector extends BaseIntrospector {
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
hasDefaultValue: col.name === autoIncrementCol ? true : col.dflt_value != null,
comment: undefined,
};
}) ?? [],

View File

@@ -18,7 +18,7 @@ export type LibsqlClientFns = {
function getClient(clientOrCredentials: Client | LibSqlCredentials | LibsqlClientFns): Client {
if (clientOrCredentials && "url" in clientOrCredentials) {
const { url, authToken } = clientOrCredentials;
return createClient({ url, authToken });
return createClient({ url, authToken }) as unknown as Client;
}
return clientOrCredentials as Client;

View File

@@ -0,0 +1,15 @@
import { describe } from "bun:test";
import { SQLocalConnection } from "./SQLocalConnection";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { bunTestRunner } from "adapter/bun/test";
import { SQLocalKysely } from "sqlocal/kysely";
describe("SQLocalConnection", () => {
connectionTestSuite(bunTestRunner, {
makeConnection: () => ({
connection: new SQLocalConnection(new SQLocalKysely({ databasePath: ":memory:" })),
dispose: async () => {},
}),
rawDialectDetails: [],
});
});

View File

@@ -0,0 +1,50 @@
import { Kysely, ParseJSONResultsPlugin } from "kysely";
import { SqliteConnection } from "../SqliteConnection";
import { SqliteIntrospector } from "../SqliteIntrospector";
import type { DB } from "bknd";
import type { SQLocalKysely } from "sqlocal/kysely";
const plugins = [new ParseJSONResultsPlugin()];
export class SQLocalConnection extends SqliteConnection<SQLocalKysely> {
private connected: boolean = false;
constructor(client: SQLocalKysely) {
// @ts-expect-error - config is protected
client.config.onConnect = () => {
// we need to listen for the connection, it will be awaited in init()
this.connected = true;
};
super({
kysely: new Kysely<any>({
dialect: {
...client.dialect,
createIntrospector: (db: Kysely<DB>) => {
return new SqliteIntrospector(db as any, {
plugins,
});
},
},
plugins,
}) as any,
});
this.client = client;
}
override async init() {
if (this.initialized) return;
let tries = 0;
while (!this.connected && tries < 100) {
tries++;
await new Promise((resolve) => setTimeout(resolve, 5));
}
if (!this.connected) {
throw new Error("Failed to connect to SQLite database");
}
this.initialized = true;
}
}
export function sqlocal(instance: InstanceType<typeof SQLocalKysely>): SQLocalConnection {
return new SQLocalConnection(instance);
}

View File

@@ -103,6 +103,7 @@ export class Repository<TBD extends object = DefaultDB, TB extends keyof TBD = a
validated.with = options.with;
}
// add explicit joins. Implicit joins are added in `where` builder
if (options.join && options.join.length > 0) {
for (const entry of options.join) {
const related = this.em.relationOf(entity.name, entry);
@@ -127,12 +128,28 @@ export class Repository<TBD extends object = DefaultDB, TB extends keyof TBD = a
const invalid = WhereBuilder.getPropertyNames(options.where).filter((field) => {
if (field.includes(".")) {
const [alias, prop] = field.split(".") as [string, string];
if (!aliases.includes(alias)) {
// check aliases first (added joins)
if (aliases.includes(alias)) {
this.checkIndex(alias, prop, "where");
return !this.em.entity(alias).getField(prop);
}
// check if alias (entity) exists
if (!this.em.hasEntity(alias)) {
return true;
}
// check related fields for auto join
const related = this.em.relationOf(entity.name, alias);
if (related) {
const other = related.other(entity);
if (other.entity.getField(prop)) {
// if related field is found, add join to validated options
validated.join?.push(alias);
this.checkIndex(alias, prop, "where");
return false;
}
}
this.checkIndex(alias, prop, "where");
return !this.em.entity(alias).getField(prop);
return true;
}
this.checkIndex(entity.name, field, "where");

View File

@@ -4,6 +4,7 @@ import type { KyselyJsonFrom } from "data/relations/EntityRelation";
import type { RepoQuery } from "data/server/query";
import { InvalidSearchParamsException } from "data/errors";
import type { Entity, EntityManager, RepositoryQB } from "data/entities";
import { $console } from "bknd/utils";
export class WithBuilder {
static addClause(
@@ -13,7 +14,7 @@ export class WithBuilder {
withs: RepoQuery["with"],
) {
if (!withs || !isObject(withs)) {
console.warn(`'withs' undefined or invalid, given: ${JSON.stringify(withs)}`);
$console.warn(`'withs' undefined or invalid, given: ${JSON.stringify(withs)}`);
return qb;
}
@@ -37,9 +38,7 @@ export class WithBuilder {
let subQuery = relation.buildWith(entity, ref)(eb);
if (query) {
subQuery = em.repo(other.entity).addOptionsToQueryBuilder(subQuery, query as any, {
ignore: ["with", "join", cardinality === 1 ? "limit" : undefined].filter(
Boolean,
) as any,
ignore: ["with", cardinality === 1 ? "limit" : undefined].filter(Boolean) as any,
});
}
@@ -57,7 +56,7 @@ export class WithBuilder {
static validateWiths(em: EntityManager<any>, entity: string, withs: RepoQuery["with"]) {
let depth = 0;
if (!withs || !isObject(withs)) {
withs && console.warn(`'withs' invalid, given: ${JSON.stringify(withs)}`);
withs && $console.warn(`'withs' invalid, given: ${JSON.stringify(withs)}`);
return depth;
}

View File

@@ -26,7 +26,12 @@ export class JsonSchemaField<
constructor(name: string, config: Partial<JsonSchemaFieldConfig>) {
super(name, config);
this.validator = new Validator({ ...this.getJsonSchema() });
// make sure to hand over clean json
const schema = this.getJsonSchema();
this.validator = new Validator(
typeof schema === "object" ? JSON.parse(JSON.stringify(schema)) : {},
);
}
protected getSchema() {

View File

@@ -52,7 +52,7 @@ export class NumberField<Required extends true | false = false> extends Field<
switch (context) {
case "submit":
return Number.parseInt(value);
return Number.parseInt(value, 10);
}
return value;

View File

@@ -28,7 +28,7 @@ export function getChangeSet(
const value = _value === "" ? null : _value;
// normalize to null if undefined
const newValue = field.getValue(value, "submit") || null;
const newValue = field.getValue(value, "submit") ?? null;
// @todo: add typing for "action"
if (action === "create" || newValue !== data[key]) {
acc[key] = newValue;

View File

@@ -289,7 +289,7 @@ class EntityManagerPrototype<Entities extends Record<string, Entity>> extends En
super(Object.values(__entities), new DummyConnection(), relations, indices);
}
withConnection(connection: Connection): EntityManager<Schema<Entities>> {
withConnection(connection: Connection): EntityManager<Schemas<Entities>> {
return new EntityManager(this.entities, connection, this.relations.all, this.indices);
}
}

View File

@@ -1,8 +1,9 @@
import { test, describe, expect } from "bun:test";
import { test, describe, expect, beforeAll, afterAll } from "bun:test";
import * as q from "./query";
import { parse as $parse, type ParseOptions } from "bknd/utils";
import type { PrimaryFieldType } from "modules";
import type { Generated } from "kysely";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
const parse = (v: unknown, o: ParseOptions = {}) =>
$parse(q.repoQuery, v, {
@@ -15,6 +16,9 @@ const decode = (input: any, output: any) => {
expect(parse(input)).toEqual(output);
};
beforeAll(() => disableConsoleLog());
afterAll(() => enableConsoleLog());
describe("server/query", () => {
test("limit & offset", () => {
//expect(() => parse({ limit: false })).toThrow();

View File

@@ -132,6 +132,8 @@ export type * from "data/entities/Entity";
export type { EntityManager } from "data/entities/EntityManager";
export type { SchemaManager } from "data/schema/SchemaManager";
export type * from "data/entities";
// data connection
export {
BaseIntrospector,
Connection,
@@ -144,9 +146,32 @@ export {
type ConnQuery,
type ConnQueryResults,
} from "data/connection";
// data sqlite
export { SqliteConnection } from "data/connection/sqlite/SqliteConnection";
export { SqliteIntrospector } from "data/connection/sqlite/SqliteIntrospector";
export { SqliteLocalConnection } from "data/connection/sqlite/SqliteLocalConnection";
// data sqlocal
export { SQLocalConnection, sqlocal } from "data/connection/sqlite/sqlocal/SQLocalConnection";
// data postgres
export {
pg,
PgPostgresConnection,
} from "data/connection/postgres/PgPostgresConnection";
export { PostgresIntrospector } from "data/connection/postgres/PostgresIntrospector";
export { PostgresConnection } from "data/connection/postgres/PostgresConnection";
export {
postgresJs,
PostgresJsConnection,
} from "data/connection/postgres/PostgresJsConnection";
export {
createCustomPostgresConnection,
type CustomPostgresConnection,
} from "data/connection/postgres/custom";
// data prototype
export {
text,
number,

View File

@@ -71,11 +71,12 @@ export class MediaApi extends ModuleApi<MediaApiOptions> {
}
protected uploadFile<T extends FileUploadedEventData>(
body: File | Blob | ReadableStream | Buffer<ArrayBufferLike>,
body: BodyInit,
opts?: {
filename?: string;
path?: TInput;
_init?: Omit<RequestInit, "body">;
query?: Record<string, any>;
},
): FetchPromise<ResponseObject<T>> {
const headers = {
@@ -102,14 +103,22 @@ export class MediaApi extends ModuleApi<MediaApiOptions> {
headers,
};
if (opts?.path) {
return this.post(opts.path, body, init);
return this.request<T>(opts.path, opts?.query, {
...init,
body,
method: "POST",
});
}
if (!name || name.length === 0) {
throw new Error("Invalid filename");
}
return this.post<T>(opts?.path ?? ["upload", name], body, init);
return this.request<T>(opts?.path ?? ["upload", name], opts?.query, {
...init,
body,
method: "POST",
});
}
async upload<T extends FileUploadedEventData>(
@@ -119,6 +128,7 @@ export class MediaApi extends ModuleApi<MediaApiOptions> {
_init?: Omit<RequestInit, "body">;
path?: TInput;
fetcher?: ApiFetcher;
query?: Record<string, any>;
} = {},
) {
if (item instanceof Request || typeof item === "string") {
@@ -144,7 +154,7 @@ export class MediaApi extends ModuleApi<MediaApiOptions> {
});
}
return this.uploadFile<T>(item, opts);
return this.uploadFile<T>(item as BodyInit, opts);
}
async uploadToEntity(
@@ -155,11 +165,14 @@ export class MediaApi extends ModuleApi<MediaApiOptions> {
opts?: {
_init?: Omit<RequestInit, "body">;
fetcher?: typeof fetch;
overwrite?: boolean;
},
): Promise<ResponseObject<FileUploadedEventData & { result: DB["media"] }>> {
const query = opts?.overwrite !== undefined ? { overwrite: opts.overwrite } : undefined;
return this.upload(item, {
...opts,
path: ["entity", entity, id, field],
query,
});
}

View File

@@ -5,7 +5,7 @@ import type { BunFile } from "bun";
export async function adapterTestSuite(
testRunner: TestRunner,
adapter: StorageAdapter,
_adapter: StorageAdapter | (() => StorageAdapter),
file: File | BunFile,
opts?: {
retries?: number;
@@ -25,7 +25,12 @@ export async function adapterTestSuite(
const _filename = randomString(10);
const filename = `${_filename}.png`;
const getAdapter = (
typeof _adapter === "function" ? _adapter : () => _adapter
) as () => StorageAdapter;
await test("puts an object", async () => {
const adapter = getAdapter();
objects = (await adapter.listObjects()).length;
const result = await adapter.putObject(filename, file as unknown as File);
expect(result).toBeDefined();
@@ -38,6 +43,7 @@ export async function adapterTestSuite(
});
await test("lists objects", async () => {
const adapter = getAdapter();
const length = await retry(
() => adapter.listObjects().then((res) => res.length),
(length) => length > objects,
@@ -49,10 +55,12 @@ export async function adapterTestSuite(
});
await test("file exists", async () => {
const adapter = getAdapter();
expect(await adapter.objectExists(filename)).toBe(true);
});
await test("gets an object", async () => {
const adapter = getAdapter();
const res = await adapter.getObject(filename, new Headers());
expect(res.ok).toBe(true);
expect(res.headers.get("Accept-Ranges")).toBe("bytes");
@@ -62,6 +70,7 @@ export async function adapterTestSuite(
if (options.testRange) {
await test("handles range request - partial content", async () => {
const headers = new Headers({ Range: "bytes=0-99" });
const adapter = getAdapter();
const res = await adapter.getObject(filename, headers);
expect(res.status).toBe(206); // Partial Content
expect(/^bytes 0-99\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
@@ -70,6 +79,7 @@ export async function adapterTestSuite(
await test("handles range request - suffix range", async () => {
const headers = new Headers({ Range: "bytes=-100" });
const adapter = getAdapter();
const res = await adapter.getObject(filename, headers);
expect(res.status).toBe(206); // Partial Content
expect(/^bytes \d+-\d+\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
@@ -77,6 +87,7 @@ export async function adapterTestSuite(
await test("handles invalid range request", async () => {
const headers = new Headers({ Range: "bytes=invalid" });
const adapter = getAdapter();
const res = await adapter.getObject(filename, headers);
expect(res.status).toBe(416); // Range Not Satisfiable
expect(/^bytes \*\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
@@ -84,6 +95,7 @@ export async function adapterTestSuite(
}
await test("gets object meta", async () => {
const adapter = getAdapter();
expect(await adapter.getObjectMeta(filename)).toEqual({
type: file.type, // image/png
size: file.size,
@@ -91,6 +103,7 @@ export async function adapterTestSuite(
});
await test("deletes an object", async () => {
const adapter = getAdapter();
expect(await adapter.deleteObject(filename)).toBeUndefined();
if (opts?.skipExistsAfterDelete !== true) {

View File

@@ -10,16 +10,19 @@ export type CodeMode<AdapterConfig extends BkndConfig> = AdapterConfig extends B
? BkndModeConfig<Args, AdapterConfig>
: never;
export function code<Args>(config: BkndCodeModeConfig<Args>): BkndConfig<Args> {
export function code<
Config extends BkndConfig,
Args = Config extends BkndConfig<infer A> ? A : unknown,
>(codeConfig: CodeMode<Config>): BkndConfig<Args> {
return {
...config,
...codeConfig,
app: async (args) => {
const {
config: appConfig,
plugins,
isProd,
syncSchemaOptions,
} = await makeModeConfig(config, args);
} = await makeModeConfig(codeConfig, args);
if (appConfig?.options?.mode && appConfig?.options?.mode !== "code") {
$console.warn("You should not set a different mode than `db` when using code mode");

View File

@@ -1,6 +1,6 @@
import type { BkndConfig } from "bknd/adapter";
import { makeModeConfig, type BkndModeConfig } from "./shared";
import { getDefaultConfig, type MaybePromise, type ModuleConfigs, type Merge } from "bknd";
import { getDefaultConfig, type MaybePromise, type Merge } from "bknd";
import type { DbModuleManager } from "modules/db/DbModuleManager";
import { invariant, $console } from "bknd/utils";
@@ -9,7 +9,7 @@ export type BkndHybridModeOptions = {
* Reader function to read the configuration from the file system.
* This is required for hybrid mode to work.
*/
reader?: (path: string) => MaybePromise<string>;
reader?: (path: string) => MaybePromise<string | object>;
/**
* Provided secrets to be merged into the configuration
*/
@@ -23,42 +23,36 @@ export type HybridMode<AdapterConfig extends BkndConfig> = AdapterConfig extends
? BkndModeConfig<Args, Merge<BkndHybridModeOptions & AdapterConfig>>
: never;
export function hybrid<Args>({
configFilePath = "bknd-config.json",
...rest
}: HybridBkndConfig<Args>): BkndConfig<Args> {
export function hybrid<
Config extends BkndConfig,
Args = Config extends BkndConfig<infer A> ? A : unknown,
>(hybridConfig: HybridMode<Config>): BkndConfig<Args> {
return {
...rest,
config: undefined,
...hybridConfig,
app: async (args) => {
const {
config: appConfig,
isProd,
plugins,
syncSchemaOptions,
} = await makeModeConfig(
{
...rest,
configFilePath,
},
args,
);
} = await makeModeConfig(hybridConfig, args);
const configFilePath = appConfig.configFilePath ?? "bknd-config.json";
if (appConfig?.options?.mode && appConfig?.options?.mode !== "db") {
$console.warn("You should not set a different mode than `db` when using hybrid mode");
}
invariant(
typeof appConfig.reader === "function",
"You must set the `reader` option when using hybrid mode",
"You must set a `reader` option when using hybrid mode",
);
let fileConfig: ModuleConfigs;
try {
fileConfig = JSON.parse(await appConfig.reader!(configFilePath)) as ModuleConfigs;
} catch (e) {
const defaultConfig = (appConfig.config ?? getDefaultConfig()) as ModuleConfigs;
await appConfig.writer!(configFilePath, JSON.stringify(defaultConfig, null, 2));
fileConfig = defaultConfig;
const fileContent = await appConfig.reader?.(configFilePath);
let fileConfig = typeof fileContent === "string" ? JSON.parse(fileContent) : fileContent;
if (!fileConfig) {
$console.warn("No config found, using default config");
fileConfig = getDefaultConfig();
await appConfig.writer?.(configFilePath, JSON.stringify(fileConfig, null, 2));
}
return {
@@ -80,6 +74,13 @@ export function hybrid<Args>({
skipValidation: isProd,
// secrets are required for hybrid mode
secrets: appConfig.secrets,
onModulesBuilt: async (ctx) => {
if (ctx.flags.sync_required && !isProd && syncSchemaOptions.force) {
$console.log("[hybrid] syncing schema");
await ctx.em.schema().sync(syncSchemaOptions);
}
await appConfig?.options?.manager?.onModulesBuilt?.(ctx);
},
...appConfig?.options?.manager,
},
},

View File

@@ -1,7 +1,7 @@
import type { AppPlugin, BkndConfig, MaybePromise, Merge } from "bknd";
import { syncTypes, syncConfig } from "bknd/plugins";
import { syncSecrets } from "plugins/dev/sync-secrets.plugin";
import { invariant, $console } from "bknd/utils";
import { $console } from "bknd/utils";
export type BkndModeOptions = {
/**
@@ -56,6 +56,14 @@ export type BkndModeConfig<Args = any, Additional = {}> = BkndConfig<
Merge<BkndModeOptions & Additional>
>;
function _isProd() {
try {
return process.env.NODE_ENV === "production";
} catch (_e) {
return false;
}
}
export async function makeModeConfig<
Args = any,
Config extends BkndModeConfig<Args> = BkndModeConfig<Args>,
@@ -69,25 +77,24 @@ export async function makeModeConfig<
if (typeof config.isProduction !== "boolean") {
$console.warn(
"You should set `isProduction` option when using managed modes to prevent accidental issues",
"You should set `isProduction` option when using managed modes to prevent accidental issues with writing plugins and syncing schema. As fallback, it is set to",
_isProd(),
);
}
invariant(
typeof config.writer === "function",
"You must set the `writer` option when using managed modes",
);
let needsWriter = false;
const { typesFilePath, configFilePath, writer, syncSecrets: syncSecretsOptions } = config;
const isProd = config.isProduction;
const plugins = appConfig?.options?.plugins ?? ([] as AppPlugin[]);
const isProd = config.isProduction ?? _isProd();
const plugins = config?.options?.plugins ?? ([] as AppPlugin[]);
const syncFallback = typeof config.syncSchema === "boolean" ? config.syncSchema : !isProd;
const syncSchemaOptions =
typeof config.syncSchema === "object"
? config.syncSchema
: {
force: config.syncSchema !== false,
drop: true,
force: syncFallback,
drop: syncFallback,
};
if (!isProd) {
@@ -95,6 +102,7 @@ export async function makeModeConfig<
if (plugins.some((p) => p.name === "bknd-sync-types")) {
throw new Error("You have to unregister the `syncTypes` plugin");
}
needsWriter = true;
plugins.push(
syncTypes({
enabled: true,
@@ -114,6 +122,7 @@ export async function makeModeConfig<
if (plugins.some((p) => p.name === "bknd-sync-config")) {
throw new Error("You have to unregister the `syncConfig` plugin");
}
needsWriter = true;
plugins.push(
syncConfig({
enabled: true,
@@ -142,6 +151,7 @@ export async function makeModeConfig<
.join(".");
}
needsWriter = true;
plugins.push(
syncSecrets({
enabled: true,
@@ -174,6 +184,10 @@ export async function makeModeConfig<
}
}
if (needsWriter && typeof config.writer !== "function") {
$console.warn("You must set a `writer` function, attempts to write will fail");
}
return {
config,
isProd,

View File

@@ -87,7 +87,7 @@ export type ModuleManagerOptions = {
verbosity?: Verbosity;
};
const debug_modules = env("modules_debug");
const debug_modules = env("modules_debug", false);
abstract class ModuleManagerEvent<A = {}> extends Event<{ ctx: ModuleBuildContext } & A> {}
export class ModuleManagerConfigUpdateEvent<
@@ -223,7 +223,7 @@ export class ModuleManager {
}
extractSecrets() {
const moduleConfigs = structuredClone(this.configs());
const moduleConfigs = JSON.parse(JSON.stringify(this.configs()));
const secrets = { ...this.options?.secrets };
const extractedKeys: string[] = [];

View File

@@ -1,4 +1,4 @@
import { mark, stripMark, $console, s, SecretSchema, setPath } from "bknd/utils";
import { mark, stripMark, $console, s, setPath } from "bknd/utils";
import { BkndError } from "core/errors";
import * as $diff from "core/object/diff";
import type { Connection } from "data/connection";
@@ -290,13 +290,12 @@ export class DbModuleManager extends ModuleManager {
updated_at: new Date(),
});
}
} else if (e instanceof TransformPersistFailedException) {
$console.error("ModuleManager: Cannot save invalid config");
this.revertModules();
throw e;
} else {
if (e instanceof TransformPersistFailedException) {
$console.error("ModuleManager: Cannot save invalid config");
}
$console.error("ModuleManager: Aborting");
this.revertModules();
await this.revertModules();
throw e;
}
}

View File

@@ -33,3 +33,5 @@ export const schemaRead = new Permission(
);
export const build = new Permission("system.build");
export const mcp = new Permission("system.mcp");
export const info = new Permission("system.info");
export const openapi = new Permission("system.openapi");

View File

@@ -105,7 +105,10 @@ export class AppServer extends Module<AppServerConfig> {
if (err instanceof Error) {
if (isDebug()) {
return c.json({ error: err.message, stack: err.stack }, 500);
return c.json(
{ error: err.message, stack: err.stack?.split("\n").map((line) => line.trim()) },
500,
);
}
}

View File

@@ -1,5 +1,3 @@
/// <reference types="@cloudflare/workers-types" />
import type { App } from "App";
import {
datetimeStringLocal,
@@ -125,7 +123,7 @@ export class SystemController extends Controller {
private registerConfigController(client: Hono<any>): void {
const { permission } = this.middlewares;
// don't add auth again, it's already added in getController
const hono = this.create(); /* .use(permission(SystemPermissions.configRead)); */
const hono = this.create();
if (!this.app.isReadOnly()) {
const manager = this.app.modules as DbModuleManager;
@@ -317,6 +315,11 @@ export class SystemController extends Controller {
summary: "Get the config for a module",
tags: ["system"],
}),
permission(SystemPermissions.configRead, {
context: (c) => ({
module: c.req.param("module"),
}),
}),
mcpTool("system_config", {
annotations: {
readOnlyHint: true,
@@ -354,7 +357,7 @@ export class SystemController extends Controller {
override getController() {
const { permission, auth } = this.middlewares;
const hono = this.create().use(auth());
const hono = this.create().use(auth()).use(permission(SystemPermissions.accessApi, {}));
this.registerConfigController(hono);
@@ -429,6 +432,9 @@ export class SystemController extends Controller {
hono.get(
"/permissions",
permission(SystemPermissions.schemaRead, {
context: (_c) => ({ module: "auth" }),
}),
describeRoute({
summary: "Get the permissions",
tags: ["system"],
@@ -441,6 +447,7 @@ export class SystemController extends Controller {
hono.post(
"/build",
permission(SystemPermissions.build, {}),
describeRoute({
summary: "Build the app",
tags: ["system"],
@@ -471,6 +478,7 @@ export class SystemController extends Controller {
hono.get(
"/info",
permission(SystemPermissions.info, {}),
mcpTool("system_info"),
describeRoute({
summary: "Get the server info",
@@ -504,6 +512,7 @@ export class SystemController extends Controller {
hono.get(
"/openapi.json",
permission(SystemPermissions.openapi, {}),
openAPISpecs(this.ctx.server, {
info: {
title: "bknd API",
@@ -511,7 +520,11 @@ export class SystemController extends Controller {
},
}),
);
hono.get("/swagger", swaggerUI({ url: "/api/system/openapi.json" }));
hono.get(
"/swagger",
permission(SystemPermissions.openapi, {}),
swaggerUI({ url: "/api/system/openapi.json" }),
);
return hono;
}

View File

@@ -0,0 +1,683 @@
import { afterAll, beforeAll, describe, expect, mock, test, setSystemTime } from "bun:test";
import { emailOTP } from "./email-otp.plugin";
import { createApp } from "core/test/utils";
import { disableConsoleLog, enableConsoleLog } from "core/utils/test";
beforeAll(disableConsoleLog);
afterAll(enableConsoleLog);
describe("otp plugin", () => {
test("should not work if auth is not enabled", async () => {
const app = createApp({
options: {
plugins: [emailOTP({ showActualErrors: true })],
},
});
await app.build();
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
expect(res.status).toBe(404);
});
test("should require email driver if sendEmail is true", async () => {
const app = createApp({
config: {
auth: {
enabled: true,
},
},
options: {
plugins: [emailOTP()],
},
});
await app.build();
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
expect(res.status).toBe(404);
{
const app = createApp({
config: {
auth: {
enabled: true,
},
},
options: {
plugins: [emailOTP({ sendEmail: false })],
},
});
await app.build();
const res = await app.server.request("/api/auth/otp/register", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
expect(res.status).toBe(201);
}
});
test("should prevent mutations of the OTP entity", async () => {
const app = createApp({
config: {
auth: {
enabled: true,
},
},
options: {
drivers: {
email: {
send: async () => {},
},
},
plugins: [emailOTP({ showActualErrors: true })],
},
});
await app.build();
const payload = {
email: "test@test.com",
code: "123456",
action: "login",
created_at: new Date(),
expires_at: new Date(Date.now() + 1000 * 60 * 60 * 24),
used_at: null,
};
expect(app.em.mutator("users_otp").insertOne(payload)).rejects.toThrow();
expect(
await app
.getApi()
.data.createOne("users_otp", payload)
.then((r) => r.ok),
).toBe(false);
});
test("should generate a token", async () => {
const called = mock(() => null);
const app = createApp({
config: {
auth: {
enabled: true,
},
},
options: {
plugins: [emailOTP({ showActualErrors: true })],
drivers: {
email: {
send: async (to) => {
expect(to).toBe("test@test.com");
called();
},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
expect(res.status).toBe(201);
const data = (await res.json()) as any;
expect(data.sent).toBe(true);
expect(data.data.email).toBe("test@test.com");
expect(data.data.action).toBe("login");
expect(data.data.expires_at).toBeDefined();
{
const { data } = await app.em.fork().repo("users_otp").findOne({ email: "test@test.com" });
expect(data?.code).toBeDefined();
expect(data?.code?.length).toBe(6);
expect(data?.code?.split("").every((char: string) => Number.isInteger(Number(char)))).toBe(
true,
);
expect(data?.email).toBe("test@test.com");
}
expect(called).toHaveBeenCalled();
});
test("should login with a code", async () => {
let code = "";
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async (to, _subject, body) => {
expect(to).toBe("test@test.com");
code = String(body);
},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
{
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code }),
});
expect(res.status).toBe(200);
expect(res.headers.get("set-cookie")).toBeDefined();
const userData = (await res.json()) as any;
expect(userData.user.email).toBe("test@test.com");
expect(userData.token).toBeDefined();
}
});
test("should register with a code", async () => {
let code = "";
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async (to, _subject, body) => {
expect(to).toBe("test@test.com");
code = String(body);
},
},
},
},
});
await app.build();
const res = await app.server.request("/api/auth/otp/register", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
const data = (await res.json()) as any;
expect(data.sent).toBe(true);
expect(data.data.email).toBe("test@test.com");
expect(data.data.action).toBe("register");
expect(data.data.expires_at).toBeDefined();
{
const res = await app.server.request("/api/auth/otp/register", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code }),
});
expect(res.status).toBe(200);
expect(res.headers.get("set-cookie")).toBeDefined();
const userData = (await res.json()) as any;
expect(userData.user.email).toBe("test@test.com");
expect(userData.token).toBeDefined();
}
});
test("should not send email if sendEmail is false", async () => {
const called = mock(() => null);
const app = createApp({
config: {
auth: {
enabled: true,
},
},
options: {
plugins: [emailOTP({ sendEmail: false })],
drivers: {
email: {
send: async () => {
called();
},
},
},
},
});
await app.build();
const res = await app.server.request("/api/auth/otp/register", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
expect(res.status).toBe(201);
expect(called).not.toHaveBeenCalled();
});
test("should reject invalid codes", async () => {
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async () => {},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
// First send a code
await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
// Try to use an invalid code
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code: "999999" }),
});
expect(res.status).toBe(400);
const error = await res.json();
expect(error).toBeDefined();
});
test("should reject code reuse", async () => {
let code = "";
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async (_to, _subject, body) => {
code = String(body);
},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
// Send a code
await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
// Use the code successfully
{
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code }),
});
expect(res.status).toBe(200);
}
// Try to use the same code again
{
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code }),
});
expect(res.status).toBe(400);
const error = await res.json();
expect(error).toBeDefined();
}
});
test("should reject expired codes", async () => {
// Set a fixed system time
const baseTime = Date.now();
setSystemTime(new Date(baseTime));
try {
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
ttl: 1, // 1 second TTL
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async () => {},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
// Send a code
const sendRes = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
expect(sendRes.status).toBe(201);
// Get the code from the database
const { data: otpData } = await app.em
.fork()
.repo("users_otp")
.findOne({ email: "test@test.com" });
expect(otpData?.code).toBeDefined();
// Advance system time by more than 1 second to expire the code
setSystemTime(new Date(baseTime + 1100));
// Try to use the expired code
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code: otpData?.code }),
});
expect(res.status).toBe(400);
const error = await res.json();
expect(error).toBeDefined();
} finally {
// Reset system time
setSystemTime();
}
});
test("should reject codes with different actions", async () => {
let loginCode = "";
let registerCode = "";
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async () => {},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
// Send a login code
await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
// Get the login code
const { data: loginOtp } = await app
.getApi()
.data.readOneBy("users_otp", { where: { email: "test@test.com", action: "login" } });
loginCode = loginOtp?.code || "";
// Send a register code
await app.server.request("/api/auth/otp/register", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
// Get the register code
const { data: registerOtp } = await app
.getApi()
.data.readOneBy("users_otp", { where: { email: "test@test.com", action: "register" } });
registerCode = registerOtp?.code || "";
// Try to use login code for register
{
const res = await app.server.request("/api/auth/otp/register", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code: loginCode }),
});
expect(res.status).toBe(400);
const error = await res.json();
expect(error).toBeDefined();
}
// Try to use register code for login
{
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code: registerCode }),
});
expect(res.status).toBe(400);
const error = await res.json();
expect(error).toBeDefined();
}
});
test("should invalidate previous codes when sending new code", async () => {
let firstCode = "";
let secondCode = "";
const app = createApp({
config: {
auth: {
enabled: true,
jwt: {
secret: "test",
},
},
},
options: {
plugins: [
emailOTP({
showActualErrors: true,
generateEmail: (otp) => ({ subject: "test", body: otp.code }),
}),
],
drivers: {
email: {
send: async () => {},
},
},
seed: async (ctx) => {
await ctx.app.createUser({ email: "test@test.com", password: "12345678" });
},
},
});
await app.build();
const em = app.em.fork();
// Send first code
await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
// Get the first code
const { data: firstOtp } = await em
.repo("users_otp")
.findOne({ email: "test@test.com", action: "login" });
firstCode = firstOtp?.code || "";
expect(firstCode).toBeDefined();
// Send second code (should invalidate the first)
await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com" }),
});
// Get the second code
const { data: secondOtp } = await em
.repo("users_otp")
.findOne({ email: "test@test.com", action: "login" });
secondCode = secondOtp?.code || "";
expect(secondCode).toBeDefined();
expect(secondCode).not.toBe(firstCode);
// Try to use the first code (should fail as it's been invalidated)
{
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code: firstCode }),
});
expect(res.status).toBe(400);
const error = await res.json();
expect(error).toBeDefined();
}
// The second code should work
{
const res = await app.server.request("/api/auth/otp/login", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email: "test@test.com", code: secondCode }),
});
expect(res.status).toBe(200);
}
});
});

View File

@@ -0,0 +1,388 @@
import {
datetime,
em,
entity,
enumm,
Exception,
text,
type App,
type AppPlugin,
type DB,
type FieldSchema,
type MaybePromise,
type EntityConfig,
DatabaseEvents,
} from "bknd";
import {
invariant,
s,
jsc,
HttpStatus,
threwAsync,
randomString,
$console,
pickKeys,
} from "bknd/utils";
import { Hono } from "hono";
export type EmailOTPPluginOptions = {
/**
* Customize code generation. If not provided, a random 6-digit code will be generated.
*/
generateCode?: (user: Pick<DB["users"], "email">) => string;
/**
* The base path for the API endpoints.
* @default "/api/auth/otp"
*/
apiBasePath?: string;
/**
* The TTL for the OTP tokens in seconds.
* @default 600 (10 minutes)
*/
ttl?: number;
/**
* The name of the OTP entity.
* @default "users_otp"
*/
entity?: string;
/**
* The config for the OTP entity.
*/
entityConfig?: EntityConfig;
/**
* Customize email content. If not provided, a default email will be sent.
*/
generateEmail?: (
otp: EmailOTPFieldSchema,
) => MaybePromise<{ subject: string; body: string | { text: string; html: string } }>;
/**
* Enable debug mode for error messages.
* @default false
*/
showActualErrors?: boolean;
/**
* Allow direct mutations (create/update) of OTP codes outside of this plugin,
* e.g. via API or admin UI. If false, mutations are only allowed via the plugin's flows.
* @default false
*/
allowExternalMutations?: boolean;
/**
* Whether to send the email with the OTP code.
* @default true
*/
sendEmail?: boolean;
};
const otpFields = {
action: enumm({
enum: ["login", "register"],
}),
code: text().required(),
email: text().required(),
created_at: datetime(),
expires_at: datetime().required(),
used_at: datetime(),
};
export type EmailOTPFieldSchema = FieldSchema<typeof otpFields>;
class OTPError extends Exception {
override name = "OTPError";
override code = HttpStatus.BAD_REQUEST;
}
export function emailOTP({
generateCode: _generateCode,
apiBasePath = "/api/auth/otp",
ttl = 600,
entity: entityName = "users_otp",
entityConfig,
generateEmail: _generateEmail,
showActualErrors = false,
allowExternalMutations = false,
sendEmail = true,
}: EmailOTPPluginOptions = {}): AppPlugin {
return (app: App) => {
return {
name: "email-otp",
schema: () =>
em(
{
[entityName]: entity(
entityName,
otpFields,
{
name: "Users OTP",
sort_dir: "desc",
primary_format: app.module.data.config.default_primary_format,
...entityConfig,
},
"generated",
) as any,
},
({ index }, schema) => {
const otp = schema[entityName]!;
index(otp).on(["email", "expires_at", "code"]);
},
),
onBuilt: async () => {
const auth = app.module.auth;
invariant(auth && auth.enabled === true, "Auth is not enabled");
invariant(!sendEmail || app.drivers?.email, "Email driver is not registered");
const generateCode =
_generateCode ?? (() => Math.floor(100000 + Math.random() * 900000).toString());
const generateEmail =
_generateEmail ??
((otp: EmailOTPFieldSchema) => ({
subject: "OTP Code",
body: `Your OTP code is: ${otp.code}`,
}));
const em = app.em.fork();
const hono = new Hono()
.post(
"/login",
jsc(
"json",
s.strictObject({
email: s.string({ format: "email" }),
code: s.string({ minLength: 1 }).optional(),
}),
),
jsc("query", s.object({ redirect: s.string().optional() })),
async (c) => {
const { email, code } = c.req.valid("json");
const { redirect } = c.req.valid("query");
const user = await findUser(app, email);
if (code) {
const otpData = await getValidatedCode(
app,
entityName,
email,
code,
"login",
);
await em.mutator(entityName).updateOne(otpData.id, { used_at: new Date() });
const jwt = await auth.authenticator.jwt(user);
// @ts-expect-error private method
return auth.authenticator.respondWithUser(
c,
{ user, token: jwt },
{ redirect },
);
} else {
const otpData = await invalidateAndGenerateCode(
app,
{ generateCode, ttl, entity: entityName },
user,
"login",
);
if (sendEmail) {
await sendCode(app, otpData, { generateEmail });
}
return c.json(
{
sent: true,
data: pickKeys(otpData, ["email", "action", "expires_at"]),
},
HttpStatus.CREATED,
);
}
},
)
.post(
"/register",
jsc(
"json",
s.object({
email: s.string({ format: "email" }),
code: s.string({ minLength: 1 }).optional(),
}),
),
jsc("query", s.object({ redirect: s.string().optional() })),
async (c) => {
const { email, code, ...rest } = c.req.valid("json");
const { redirect } = c.req.valid("query");
// throw if user exists
if (!(await threwAsync(findUser(app, email)))) {
throw new Exception("User already exists", HttpStatus.BAD_REQUEST);
}
if (code) {
const otpData = await getValidatedCode(
app,
entityName,
email,
code,
"register",
);
await em.mutator(entityName).updateOne(otpData.id, { used_at: new Date() });
const user = await app.createUser({
...rest,
email,
password: randomString(32, true),
});
const jwt = await auth.authenticator.jwt(user);
// @ts-expect-error private method
return auth.authenticator.respondWithUser(
c,
{ user, token: jwt },
{ redirect },
);
} else {
const otpData = await invalidateAndGenerateCode(
app,
{ generateCode, ttl, entity: entityName },
{ email },
"register",
);
if (sendEmail) {
await sendCode(app, otpData, { generateEmail });
}
return c.json(
{
sent: true,
data: pickKeys(otpData, ["email", "action", "expires_at"]),
},
HttpStatus.CREATED,
);
}
},
)
.onError((err) => {
if (showActualErrors || err instanceof OTPError) {
throw err;
}
throw new Exception("Invalid credentials", HttpStatus.BAD_REQUEST);
});
app.server.route(apiBasePath, hono);
if (allowExternalMutations !== true) {
registerListeners(app, entityName);
}
},
};
};
}
async function findUser(app: App, email: string) {
const user_entity = app.module.auth.config.entity_name as "users";
const { data: user } = await app.em.repo(user_entity).findOne({ email });
if (!user) {
throw new Exception("User not found", HttpStatus.BAD_REQUEST);
}
return user;
}
async function invalidateAndGenerateCode(
app: App,
opts: Required<Pick<EmailOTPPluginOptions, "generateCode" | "ttl" | "entity">>,
user: Pick<DB["users"], "email">,
action: EmailOTPFieldSchema["action"],
) {
const { generateCode, ttl, entity: entityName } = opts;
const newCode = generateCode?.(user);
if (!newCode) {
throw new OTPError("Failed to generate code");
}
await invalidateAllUserCodes(app, entityName, user.email, ttl);
const { data: otpData } = await app.em
.fork()
.mutator(entityName)
.insertOne({
code: newCode,
email: user.email,
action,
created_at: new Date(),
expires_at: new Date(Date.now() + ttl * 1000),
});
$console.log("[OTP Code]", newCode);
return otpData;
}
async function sendCode(
app: App,
otpData: EmailOTPFieldSchema,
opts: Required<Pick<EmailOTPPluginOptions, "generateEmail">>,
) {
const { generateEmail } = opts;
const { subject, body } = await generateEmail(otpData);
await app.drivers?.email?.send(otpData.email, subject, body);
}
async function getValidatedCode(
app: App,
entityName: string,
email: string,
code: string,
action: EmailOTPFieldSchema["action"],
) {
invariant(email, "[OTP Plugin]: Email is required");
invariant(code, "[OTP Plugin]: Code is required");
const em = app.em.fork();
const { data: otpData } = await em.repo(entityName).findOne({ email, code, action });
if (!otpData) {
throw new OTPError("Invalid code");
}
if (otpData.expires_at < new Date()) {
throw new OTPError("Code expired");
}
if (otpData.used_at) {
throw new OTPError("Code already used");
}
return otpData;
}
async function invalidateAllUserCodes(app: App, entityName: string, email: string, ttl: number) {
invariant(ttl > 0, "[OTP Plugin]: TTL must be greater than 0");
invariant(email, "[OTP Plugin]: Email is required");
const em = app.em.fork();
await em
.mutator(entityName)
.updateWhere(
{ expires_at: new Date(Date.now() - 1000) },
{ email, used_at: { $isnull: true } },
);
}
function registerListeners(app: App, entityName: string) {
[DatabaseEvents.MutatorInsertBefore, DatabaseEvents.MutatorUpdateBefore].forEach((event) => {
app.emgr.onEvent(
event,
(e: { params: { entity: { name: string } } }) => {
if (e.params.entity.name === entityName) {
throw new OTPError("Mutations of the OTP entity are not allowed");
}
},
{
mode: "sync",
id: "bknd-email-otp",
},
);
});
}

Some files were not shown because too many files have changed in this diff Show More