init opfs and sqlocal as another browser adapter

This commit is contained in:
dswbx
2025-11-25 16:21:16 +01:00
parent 5e5dc62304
commit 36e1bb1867
17 changed files with 844 additions and 110 deletions

View File

@@ -0,0 +1,34 @@
import { describe, beforeAll, vi, afterAll, spyOn } from "bun:test";
import { OpfsStorageAdapter } from "./OpfsStorageAdapter";
// @ts-ignore
import { assetsPath } from "../../../__test__/helper";
import { adapterTestSuite } from "media/storage/adapters/adapter-test-suite";
import { bunTestRunner } from "adapter/bun/test";
import { MockFileSystemDirectoryHandle } from "adapter/browser/mock";
describe("OpfsStorageAdapter", async () => {
let mockRoot: MockFileSystemDirectoryHandle;
let testSuiteAdapter: OpfsStorageAdapter;
const _mock = spyOn(global, "navigator");
beforeAll(() => {
// mock navigator.storage.getDirectory()
mockRoot = new MockFileSystemDirectoryHandle("opfs-root");
const mockNavigator = {
storage: {
getDirectory: vi.fn().mockResolvedValue(mockRoot),
},
};
// @ts-ignore
_mock.mockReturnValue(mockNavigator);
testSuiteAdapter = new OpfsStorageAdapter();
});
afterAll(() => {
_mock.mockRestore();
});
const file = Bun.file(`${assetsPath}/image.png`);
await adapterTestSuite(bunTestRunner, () => testSuiteAdapter, file);
});

View File

@@ -0,0 +1,265 @@
import type { FileBody, FileListObject, FileMeta, FileUploadPayload } from "bknd";
import { StorageAdapter, guessMimeType } from "bknd";
import { parse, s, isFile, isBlob } from "bknd/utils";
export const opfsAdapterConfig = s.object(
{
root: s.string({ default: "" }),
},
{
title: "OPFS",
description: "Origin Private File System storage",
additionalProperties: false,
},
);
export type OpfsAdapterConfig = s.Static<typeof opfsAdapterConfig>;
/**
* Storage adapter for OPFS (Origin Private File System)
* Provides browser-based file storage using the File System Access API
*/
export class OpfsStorageAdapter extends StorageAdapter {
private config: OpfsAdapterConfig;
private rootPromise: Promise<FileSystemDirectoryHandle>;
constructor(config: Partial<OpfsAdapterConfig> = {}) {
super();
this.config = parse(opfsAdapterConfig, config);
this.rootPromise = this.initializeRoot();
}
private async initializeRoot(): Promise<FileSystemDirectoryHandle> {
const opfsRoot = await navigator.storage.getDirectory();
if (!this.config.root) {
return opfsRoot;
}
// navigate to or create nested directory structure
const parts = this.config.root.split("/").filter(Boolean);
let current = opfsRoot;
for (const part of parts) {
current = await current.getDirectoryHandle(part, { create: true });
}
return current;
}
getSchema() {
return opfsAdapterConfig;
}
getName(): string {
return "opfs";
}
async listObjects(prefix?: string): Promise<FileListObject[]> {
const root = await this.rootPromise;
const files: FileListObject[] = [];
for await (const [name, handle] of root.entries()) {
if (handle.kind === "file") {
if (!prefix || name.startsWith(prefix)) {
const file = await (handle as FileSystemFileHandle).getFile();
files.push({
key: name,
last_modified: new Date(file.lastModified),
size: file.size,
});
}
}
}
return files;
}
private async computeEtagFromArrayBuffer(buffer: ArrayBuffer): Promise<string> {
const hashBuffer = await crypto.subtle.digest("SHA-256", buffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, "0")).join("");
// wrap the hex string in quotes for ETag format
return `"${hashHex}"`;
}
async putObject(key: string, body: FileBody): Promise<string | FileUploadPayload> {
if (body === null) {
throw new Error("Body is empty");
}
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key, { create: true });
const writable = await fileHandle.createWritable();
try {
let contentBuffer: ArrayBuffer;
if (isFile(body)) {
contentBuffer = await body.arrayBuffer();
await writable.write(contentBuffer);
} else if (body instanceof ReadableStream) {
const chunks: Uint8Array[] = [];
const reader = body.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
await writable.write(value);
}
} finally {
reader.releaseLock();
}
// compute total size and combine chunks for etag
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const combined = new Uint8Array(totalSize);
let offset = 0;
for (const chunk of chunks) {
combined.set(chunk, offset);
offset += chunk.length;
}
contentBuffer = combined.buffer;
} else if (isBlob(body)) {
contentBuffer = await (body as Blob).arrayBuffer();
await writable.write(contentBuffer);
} else {
// body is ArrayBuffer or ArrayBufferView
if (ArrayBuffer.isView(body)) {
const view = body as ArrayBufferView;
contentBuffer = view.buffer.slice(
view.byteOffset,
view.byteOffset + view.byteLength,
) as ArrayBuffer;
} else {
contentBuffer = body as ArrayBuffer;
}
await writable.write(body);
}
await writable.close();
return await this.computeEtagFromArrayBuffer(contentBuffer);
} catch (error) {
await writable.abort();
throw error;
}
}
async deleteObject(key: string): Promise<void> {
try {
const root = await this.rootPromise;
await root.removeEntry(key);
} catch {
// file doesn't exist, which is fine
}
}
async objectExists(key: string): Promise<boolean> {
try {
const root = await this.rootPromise;
await root.getFileHandle(key);
return true;
} catch {
return false;
}
}
private parseRangeHeader(
rangeHeader: string,
fileSize: number,
): { start: number; end: number } | null {
// parse "bytes=start-end" format
const match = rangeHeader.match(/^bytes=(\d*)-(\d*)$/);
if (!match) return null;
const [, startStr, endStr] = match;
let start = startStr ? Number.parseInt(startStr, 10) : 0;
let end = endStr ? Number.parseInt(endStr, 10) : fileSize - 1;
// handle suffix-byte-range-spec (e.g., "bytes=-500")
if (!startStr && endStr) {
start = Math.max(0, fileSize - Number.parseInt(endStr, 10));
end = fileSize - 1;
}
// validate range
if (start < 0 || end >= fileSize || start > end) {
return null;
}
return { start, end };
}
async getObject(key: string, headers: Headers): Promise<Response> {
try {
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key);
const file = await fileHandle.getFile();
const fileSize = file.size;
const mimeType = guessMimeType(key);
const responseHeaders = new Headers({
"Accept-Ranges": "bytes",
"Content-Type": mimeType || "application/octet-stream",
});
const rangeHeader = headers.get("range");
if (rangeHeader) {
const range = this.parseRangeHeader(rangeHeader, fileSize);
if (!range) {
// invalid range - return 416 Range Not Satisfiable
responseHeaders.set("Content-Range", `bytes */${fileSize}`);
return new Response("", {
status: 416,
headers: responseHeaders,
});
}
const { start, end } = range;
const arrayBuffer = await file.arrayBuffer();
const chunk = arrayBuffer.slice(start, end + 1);
responseHeaders.set("Content-Range", `bytes ${start}-${end}/${fileSize}`);
responseHeaders.set("Content-Length", chunk.byteLength.toString());
return new Response(chunk, {
status: 206, // Partial Content
headers: responseHeaders,
});
} else {
// normal request - return entire file
const content = await file.arrayBuffer();
responseHeaders.set("Content-Length", content.byteLength.toString());
return new Response(content, {
status: 200,
headers: responseHeaders,
});
}
} catch {
// handle file reading errors
return new Response("", { status: 404 });
}
}
getObjectUrl(_key: string): string {
throw new Error("Method not implemented.");
}
async getObjectMeta(key: string): Promise<FileMeta> {
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key);
const file = await fileHandle.getFile();
return {
type: guessMimeType(key) || "application/octet-stream",
size: file.size,
};
}
toJSON(_secrets?: boolean) {
return {
type: this.getName(),
config: this.config,
};
}
}

View File

View File

@@ -0,0 +1,136 @@
// mock OPFS API for testing
class MockFileSystemFileHandle {
kind: "file" = "file";
name: string;
private content: ArrayBuffer;
private lastModified: number;
constructor(name: string, content: ArrayBuffer = new ArrayBuffer(0)) {
this.name = name;
this.content = content;
this.lastModified = Date.now();
}
async getFile(): Promise<File> {
return new File([this.content], this.name, {
lastModified: this.lastModified,
type: this.guessMimeType(),
});
}
async createWritable(): Promise<FileSystemWritableFileStream> {
const handle = this;
return {
async write(data: any) {
if (data instanceof ArrayBuffer) {
handle.content = data;
} else if (ArrayBuffer.isView(data)) {
handle.content = data.buffer.slice(
data.byteOffset,
data.byteOffset + data.byteLength,
) as ArrayBuffer;
} else if (data instanceof Blob) {
handle.content = await data.arrayBuffer();
}
handle.lastModified = Date.now();
},
async close() {},
async abort() {},
async seek(_position: number) {},
async truncate(_size: number) {},
} as FileSystemWritableFileStream;
}
private guessMimeType(): string {
const ext = this.name.split(".").pop()?.toLowerCase();
const mimeTypes: Record<string, string> = {
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
webp: "image/webp",
svg: "image/svg+xml",
txt: "text/plain",
json: "application/json",
pdf: "application/pdf",
};
return mimeTypes[ext || ""] || "application/octet-stream";
}
}
export class MockFileSystemDirectoryHandle {
kind: "directory" = "directory";
name: string;
private files: Map<string, MockFileSystemFileHandle> = new Map();
private directories: Map<string, MockFileSystemDirectoryHandle> = new Map();
constructor(name: string = "root") {
this.name = name;
}
async getFileHandle(
name: string,
options?: FileSystemGetFileOptions,
): Promise<FileSystemFileHandle> {
if (this.files.has(name)) {
return this.files.get(name) as any;
}
if (options?.create) {
const handle = new MockFileSystemFileHandle(name);
this.files.set(name, handle);
return handle as any;
}
throw new Error(`File not found: ${name}`);
}
async getDirectoryHandle(
name: string,
options?: FileSystemGetDirectoryOptions,
): Promise<FileSystemDirectoryHandle> {
if (this.directories.has(name)) {
return this.directories.get(name) as any;
}
if (options?.create) {
const handle = new MockFileSystemDirectoryHandle(name);
this.directories.set(name, handle);
return handle as any;
}
throw new Error(`Directory not found: ${name}`);
}
async removeEntry(name: string, _options?: FileSystemRemoveOptions): Promise<void> {
this.files.delete(name);
this.directories.delete(name);
}
async *entries(): AsyncIterableIterator<[string, FileSystemHandle]> {
for (const [name, handle] of this.files) {
yield [name, handle as any];
}
for (const [name, handle] of this.directories) {
yield [name, handle as any];
}
}
async *keys(): AsyncIterableIterator<string> {
for (const name of this.files.keys()) {
yield name;
}
for (const name of this.directories.keys()) {
yield name;
}
}
async *values(): AsyncIterableIterator<FileSystemHandle> {
for (const handle of this.files.values()) {
yield handle as any;
}
for (const handle of this.directories.values()) {
yield handle as any;
}
}
[Symbol.asyncIterator](): AsyncIterableIterator<[string, FileSystemHandle]> {
return this.entries();
}
}

View File

@@ -18,26 +18,39 @@ export type SqliteConnectionConfig<
CustomDialect extends Constructor<Dialect> = Constructor<Dialect>, CustomDialect extends Constructor<Dialect> = Constructor<Dialect>,
> = { > = {
excludeTables?: string[]; excludeTables?: string[];
dialect: CustomDialect;
dialectArgs?: ConstructorParameters<CustomDialect>;
additionalPlugins?: KyselyPlugin[]; additionalPlugins?: KyselyPlugin[];
customFn?: Partial<DbFunctions>; customFn?: Partial<DbFunctions>;
}; } & (
| {
dialect: CustomDialect;
dialectArgs?: ConstructorParameters<CustomDialect>;
}
| {
kysely: Kysely<any>;
}
);
export abstract class SqliteConnection<Client = unknown> extends Connection<Client> { export abstract class SqliteConnection<Client = unknown> extends Connection<Client> {
override name = "sqlite"; override name = "sqlite";
constructor(config: SqliteConnectionConfig) { constructor(config: SqliteConnectionConfig) {
const { excludeTables, dialect, dialectArgs = [], additionalPlugins } = config; const { excludeTables, additionalPlugins } = config;
const plugins = [new ParseJSONResultsPlugin(), ...(additionalPlugins ?? [])]; const plugins = [new ParseJSONResultsPlugin(), ...(additionalPlugins ?? [])];
const kysely = new Kysely({ let kysely: Kysely<any>;
dialect: customIntrospector(dialect, SqliteIntrospector, { if ("dialect" in config) {
excludeTables, kysely = new Kysely({
dialect: customIntrospector(config.dialect, SqliteIntrospector, {
excludeTables,
plugins,
}).create(...(config.dialectArgs ?? [])),
plugins, plugins,
}).create(...dialectArgs), });
plugins, } else if ("kysely" in config) {
}); kysely = config.kysely;
} else {
throw new Error("Either dialect or kysely must be provided");
}
super( super(
kysely, kysely,

View File

@@ -5,7 +5,7 @@ import type { BunFile } from "bun";
export async function adapterTestSuite( export async function adapterTestSuite(
testRunner: TestRunner, testRunner: TestRunner,
adapter: StorageAdapter, _adapter: StorageAdapter | (() => StorageAdapter),
file: File | BunFile, file: File | BunFile,
opts?: { opts?: {
retries?: number; retries?: number;
@@ -25,7 +25,12 @@ export async function adapterTestSuite(
const _filename = randomString(10); const _filename = randomString(10);
const filename = `${_filename}.png`; const filename = `${_filename}.png`;
const getAdapter = (
typeof _adapter === "function" ? _adapter : () => _adapter
) as () => StorageAdapter;
await test("puts an object", async () => { await test("puts an object", async () => {
const adapter = getAdapter();
objects = (await adapter.listObjects()).length; objects = (await adapter.listObjects()).length;
const result = await adapter.putObject(filename, file as unknown as File); const result = await adapter.putObject(filename, file as unknown as File);
expect(result).toBeDefined(); expect(result).toBeDefined();
@@ -38,6 +43,7 @@ export async function adapterTestSuite(
}); });
await test("lists objects", async () => { await test("lists objects", async () => {
const adapter = getAdapter();
const length = await retry( const length = await retry(
() => adapter.listObjects().then((res) => res.length), () => adapter.listObjects().then((res) => res.length),
(length) => length > objects, (length) => length > objects,
@@ -49,10 +55,12 @@ export async function adapterTestSuite(
}); });
await test("file exists", async () => { await test("file exists", async () => {
const adapter = getAdapter();
expect(await adapter.objectExists(filename)).toBe(true); expect(await adapter.objectExists(filename)).toBe(true);
}); });
await test("gets an object", async () => { await test("gets an object", async () => {
const adapter = getAdapter();
const res = await adapter.getObject(filename, new Headers()); const res = await adapter.getObject(filename, new Headers());
expect(res.ok).toBe(true); expect(res.ok).toBe(true);
expect(res.headers.get("Accept-Ranges")).toBe("bytes"); expect(res.headers.get("Accept-Ranges")).toBe("bytes");
@@ -62,6 +70,7 @@ export async function adapterTestSuite(
if (options.testRange) { if (options.testRange) {
await test("handles range request - partial content", async () => { await test("handles range request - partial content", async () => {
const headers = new Headers({ Range: "bytes=0-99" }); const headers = new Headers({ Range: "bytes=0-99" });
const adapter = getAdapter();
const res = await adapter.getObject(filename, headers); const res = await adapter.getObject(filename, headers);
expect(res.status).toBe(206); // Partial Content expect(res.status).toBe(206); // Partial Content
expect(/^bytes 0-99\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true); expect(/^bytes 0-99\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
@@ -70,6 +79,7 @@ export async function adapterTestSuite(
await test("handles range request - suffix range", async () => { await test("handles range request - suffix range", async () => {
const headers = new Headers({ Range: "bytes=-100" }); const headers = new Headers({ Range: "bytes=-100" });
const adapter = getAdapter();
const res = await adapter.getObject(filename, headers); const res = await adapter.getObject(filename, headers);
expect(res.status).toBe(206); // Partial Content expect(res.status).toBe(206); // Partial Content
expect(/^bytes \d+-\d+\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true); expect(/^bytes \d+-\d+\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
@@ -77,6 +87,7 @@ export async function adapterTestSuite(
await test("handles invalid range request", async () => { await test("handles invalid range request", async () => {
const headers = new Headers({ Range: "bytes=invalid" }); const headers = new Headers({ Range: "bytes=invalid" });
const adapter = getAdapter();
const res = await adapter.getObject(filename, headers); const res = await adapter.getObject(filename, headers);
expect(res.status).toBe(416); // Range Not Satisfiable expect(res.status).toBe(416); // Range Not Satisfiable
expect(/^bytes \*\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true); expect(/^bytes \*\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
@@ -84,6 +95,7 @@ export async function adapterTestSuite(
} }
await test("gets object meta", async () => { await test("gets object meta", async () => {
const adapter = getAdapter();
expect(await adapter.getObjectMeta(filename)).toEqual({ expect(await adapter.getObjectMeta(filename)).toEqual({
type: file.type, // image/png type: file.type, // image/png
size: file.size, size: file.size,
@@ -91,6 +103,7 @@ export async function adapterTestSuite(
}); });
await test("deletes an object", async () => { await test("deletes an object", async () => {
const adapter = getAdapter();
expect(await adapter.deleteObject(filename)).toBeUndefined(); expect(await adapter.deleteObject(filename)).toBeUndefined();
if (opts?.skipExistsAfterDelete !== true) { if (opts?.skipExistsAfterDelete !== true) {

View File

@@ -14,7 +14,7 @@
"bknd": "file:../../app", "bknd": "file:../../app",
"react": "^19.0.0", "react": "^19.0.0",
"react-dom": "^19.0.0", "react-dom": "^19.0.0",
"sqlocal": "^0.14.0", "sqlocal": "^0.16.0",
"wouter": "^3.6.0" "wouter": "^3.6.0"
}, },
"devDependencies": { "devDependencies": {
@@ -26,7 +26,7 @@
"tailwindcss": "^4.0.14", "tailwindcss": "^4.0.14",
"typescript": "~5.7.2", "typescript": "~5.7.2",
"typescript-eslint": "^8.24.1", "typescript-eslint": "^8.24.1",
"vite": "^6.2.0", "vite": "^7.2.4",
"vite-tsconfig-paths": "^5.1.4" "vite-tsconfig-paths": "^5.1.4"
} }
} }

View File

@@ -1,21 +1,25 @@
import { lazy, Suspense, useEffect, useState } from "react"; import { lazy, Suspense, useEffect, useState } from "react";
import { checksum } from "bknd/utils"; import { checksum } from "bknd/utils";
import { App, boolean, em, entity, text } from "bknd"; import { App, boolean, em, entity, text, registries } from "bknd";
import { SQLocalConnection } from "@bknd/sqlocal"; import { SQLocalConnection } from "@bknd/sqlocal";
import { Route, Router, Switch } from "wouter"; import { Route, Router, Switch } from "wouter";
import IndexPage from "~/routes/_index"; import IndexPage from "~/routes/_index";
import { Center } from "~/components/Center"; import { Center } from "~/components/Center";
import { ClientProvider } from "bknd/client"; import { type Api, ClientProvider } from "bknd/client";
import { SQLocalKysely } from "sqlocal/kysely";
import { OpfsStorageAdapter } from "~/OpfsStorageAdapter";
const Admin = lazy(() => import("~/routes/admin")); const Admin = lazy(() => import("~/routes/admin"));
export default function () { export default function () {
const [app, setApp] = useState<App | undefined>(undefined); const [app, setApp] = useState<App | undefined>(undefined);
const [api, setApi] = useState<Api | undefined>(undefined);
const [hash, setHash] = useState<string>(""); const [hash, setHash] = useState<string>("");
async function onBuilt(app: App) { async function onBuilt(app: App) {
document.startViewTransition(async () => { document.startViewTransition(async () => {
setApp(app); setApp(app);
setApi(app.getApi());
setHash(await checksum(app.toJSON())); setHash(await checksum(app.toJSON()));
}); });
} }
@@ -26,7 +30,7 @@ export default function () {
.catch(console.error); .catch(console.error);
}, []); }, []);
if (!app) if (!app || !api)
return ( return (
<Center> <Center>
<span className="opacity-20">Loading...</span> <span className="opacity-20">Loading...</span>
@@ -34,27 +38,22 @@ export default function () {
); );
return ( return (
<Router key={hash}> <ClientProvider api={api}>
<Switch> <Router key={hash}>
<Route <Switch>
path="/" <Route path="/" component={() => <IndexPage app={app} />} />
component={() => (
<ClientProvider api={app.getApi()}>
<IndexPage app={app} />
</ClientProvider>
)}
/>
<Route path="/admin/*?"> <Route path="/admin/*?">
<Suspense> <Suspense>
<Admin config={{ basepath: "/admin", logo_return_path: "/../" }} app={app} /> <Admin config={{ basepath: "/admin", logo_return_path: "/../" }} />
</Suspense> </Suspense>
</Route> </Route>
<Route path="*"> <Route path="*">
<Center className="font-mono text-4xl">404</Center> <Center className="font-mono text-4xl">404</Center>
</Route> </Route>
</Switch> </Switch>
</Router> </Router>
</ClientProvider>
); );
} }
@@ -79,16 +78,26 @@ async function setup(opts?: {
if (initialized) return; if (initialized) return;
initialized = true; initialized = true;
const connection = new SQLocalConnection({ const connection = new SQLocalConnection(
databasePath: ":localStorage:", new SQLocalKysely({
verbose: true, databasePath: ":localStorage:",
}); verbose: true,
}),
);
registries.media.register("opfs", OpfsStorageAdapter);
const app = App.create({ const app = App.create({
connection, connection,
// an initial config is only applied if the database is empty // an initial config is only applied if the database is empty
config: { config: {
data: schema.toJSON(), data: schema.toJSON(),
auth: {
enabled: true,
jwt: {
secret: "secret",
},
},
}, },
options: { options: {
// the seed option is only executed if the database was empty // the seed option is only executed if the database was empty
@@ -99,10 +108,10 @@ async function setup(opts?: {
]); ]);
// @todo: auth is currently not working due to POST request // @todo: auth is currently not working due to POST request
/*await ctx.app.module.auth.createUser({ await ctx.app.module.auth.createUser({
email: "test@bknd.io", email: "test@bknd.io",
password: "12345678", password: "12345678",
});*/ });
}, },
}, },
}); });
@@ -112,6 +121,8 @@ async function setup(opts?: {
App.Events.AppBuiltEvent, App.Events.AppBuiltEvent,
async () => { async () => {
await opts.onBuilt?.(app); await opts.onBuilt?.(app);
// @ts-ignore
window.sql = app.connection.client.sql;
}, },
"sync", "sync",
); );

View File

@@ -0,0 +1,265 @@
import type { FileBody, FileListObject, FileMeta, FileUploadPayload } from "bknd";
import { StorageAdapter, guessMimeType } from "bknd";
import { parse, s, isFile, isBlob } from "bknd/utils";
export const opfsAdapterConfig = s.object(
{
root: s.string({ default: "" }).optional(),
},
{
title: "OPFS",
description: "Origin Private File System storage",
additionalProperties: false,
},
);
export type OpfsAdapterConfig = s.Static<typeof opfsAdapterConfig>;
/**
* Storage adapter for OPFS (Origin Private File System)
* Provides browser-based file storage using the File System Access API
*/
export class OpfsStorageAdapter extends StorageAdapter {
private config: OpfsAdapterConfig;
private rootPromise: Promise<FileSystemDirectoryHandle>;
constructor(config: Partial<OpfsAdapterConfig> = {}) {
super();
this.config = parse(opfsAdapterConfig, config);
this.rootPromise = this.initializeRoot();
}
private async initializeRoot(): Promise<FileSystemDirectoryHandle> {
const opfsRoot = await navigator.storage.getDirectory();
if (!this.config.root) {
return opfsRoot;
}
// navigate to or create nested directory structure
const parts = this.config.root.split("/").filter(Boolean);
let current = opfsRoot;
for (const part of parts) {
current = await current.getDirectoryHandle(part, { create: true });
}
return current;
}
getSchema() {
return opfsAdapterConfig;
}
getName(): string {
return "opfs";
}
async listObjects(prefix?: string): Promise<FileListObject[]> {
const root = await this.rootPromise;
const files: FileListObject[] = [];
for await (const [name, handle] of root.entries()) {
if (handle.kind === "file") {
if (!prefix || name.startsWith(prefix)) {
const file = await (handle as FileSystemFileHandle).getFile();
files.push({
key: name,
last_modified: new Date(file.lastModified),
size: file.size,
});
}
}
}
return files;
}
private async computeEtagFromArrayBuffer(buffer: ArrayBuffer): Promise<string> {
const hashBuffer = await crypto.subtle.digest("SHA-256", buffer);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, "0")).join("");
// wrap the hex string in quotes for ETag format
return `"${hashHex}"`;
}
async putObject(key: string, body: FileBody): Promise<string | FileUploadPayload> {
if (body === null) {
throw new Error("Body is empty");
}
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key, { create: true });
const writable = await fileHandle.createWritable();
try {
let contentBuffer: ArrayBuffer;
if (isFile(body)) {
contentBuffer = await body.arrayBuffer();
await writable.write(contentBuffer);
} else if (body instanceof ReadableStream) {
const chunks: Uint8Array[] = [];
const reader = body.getReader();
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
await writable.write(value);
}
} finally {
reader.releaseLock();
}
// compute total size and combine chunks for etag
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const combined = new Uint8Array(totalSize);
let offset = 0;
for (const chunk of chunks) {
combined.set(chunk, offset);
offset += chunk.length;
}
contentBuffer = combined.buffer;
} else if (isBlob(body)) {
contentBuffer = await (body as Blob).arrayBuffer();
await writable.write(contentBuffer);
} else {
// body is ArrayBuffer or ArrayBufferView
if (ArrayBuffer.isView(body)) {
const view = body as ArrayBufferView;
contentBuffer = view.buffer.slice(
view.byteOffset,
view.byteOffset + view.byteLength,
) as ArrayBuffer;
} else {
contentBuffer = body as ArrayBuffer;
}
await writable.write(body);
}
await writable.close();
return await this.computeEtagFromArrayBuffer(contentBuffer);
} catch (error) {
await writable.abort();
throw error;
}
}
async deleteObject(key: string): Promise<void> {
try {
const root = await this.rootPromise;
await root.removeEntry(key);
} catch {
// file doesn't exist, which is fine
}
}
async objectExists(key: string): Promise<boolean> {
try {
const root = await this.rootPromise;
await root.getFileHandle(key);
return true;
} catch {
return false;
}
}
private parseRangeHeader(
rangeHeader: string,
fileSize: number,
): { start: number; end: number } | null {
// parse "bytes=start-end" format
const match = rangeHeader.match(/^bytes=(\d*)-(\d*)$/);
if (!match) return null;
const [, startStr, endStr] = match;
let start = startStr ? Number.parseInt(startStr, 10) : 0;
let end = endStr ? Number.parseInt(endStr, 10) : fileSize - 1;
// handle suffix-byte-range-spec (e.g., "bytes=-500")
if (!startStr && endStr) {
start = Math.max(0, fileSize - Number.parseInt(endStr, 10));
end = fileSize - 1;
}
// validate range
if (start < 0 || end >= fileSize || start > end) {
return null;
}
return { start, end };
}
async getObject(key: string, headers: Headers): Promise<Response> {
try {
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key);
const file = await fileHandle.getFile();
const fileSize = file.size;
const mimeType = guessMimeType(key);
const responseHeaders = new Headers({
"Accept-Ranges": "bytes",
"Content-Type": mimeType || "application/octet-stream",
});
const rangeHeader = headers.get("range");
if (rangeHeader) {
const range = this.parseRangeHeader(rangeHeader, fileSize);
if (!range) {
// invalid range - return 416 Range Not Satisfiable
responseHeaders.set("Content-Range", `bytes */${fileSize}`);
return new Response("", {
status: 416,
headers: responseHeaders,
});
}
const { start, end } = range;
const arrayBuffer = await file.arrayBuffer();
const chunk = arrayBuffer.slice(start, end + 1);
responseHeaders.set("Content-Range", `bytes ${start}-${end}/${fileSize}`);
responseHeaders.set("Content-Length", chunk.byteLength.toString());
return new Response(chunk, {
status: 206, // Partial Content
headers: responseHeaders,
});
} else {
// normal request - return entire file
const content = await file.arrayBuffer();
responseHeaders.set("Content-Length", content.byteLength.toString());
return new Response(content, {
status: 200,
headers: responseHeaders,
});
}
} catch {
// handle file reading errors
return new Response("", { status: 404 });
}
}
getObjectUrl(_key: string): string {
throw new Error("Method not implemented.");
}
async getObjectMeta(key: string): Promise<FileMeta> {
const root = await this.rootPromise;
const fileHandle = await root.getFileHandle(key);
const file = await fileHandle.getFile();
return {
type: guessMimeType(key) || "application/octet-stream",
size: file.size,
};
}
toJSON(_secrets?: boolean) {
return {
type: this.getName(),
config: this.config,
};
}
}

View File

@@ -1,10 +1,6 @@
import { Admin, type BkndAdminProps } from "bknd/ui"; import { Admin, type BkndAdminProps } from "bknd/ui";
import type { App } from "bknd";
import "bknd/dist/styles.css"; import "bknd/dist/styles.css";
export default function AdminPage({ export default function AdminPage(props: BkndAdminProps) {
app, return <Admin {...props} />;
...props
}: Omit<BkndAdminProps, "withProvider"> & { app: App }) {
return <Admin {...props} withProvider={{ api: app.getApi() }} />;
} }

View File

@@ -2,6 +2,7 @@ import { defineConfig } from "vite";
import react from "@vitejs/plugin-react"; import react from "@vitejs/plugin-react";
import tailwindcss from "@tailwindcss/vite"; import tailwindcss from "@tailwindcss/vite";
import tsconfigPaths from "vite-tsconfig-paths"; import tsconfigPaths from "vite-tsconfig-paths";
import sqlocal from "sqlocal/vite";
// https://vite.dev/config/ // https://vite.dev/config/
// https://sqlocal.dallashoffman.com/guide/setup#vite-configuration // https://sqlocal.dallashoffman.com/guide/setup#vite-configuration
@@ -9,11 +10,16 @@ export default defineConfig({
optimizeDeps: { optimizeDeps: {
exclude: ["sqlocal"], exclude: ["sqlocal"],
}, },
resolve: {
dedupe: ["react", "react-dom"],
},
plugins: [ plugins: [
sqlocal(),
react(), react(),
tailwindcss(), tailwindcss(),
tsconfigPaths(), tsconfigPaths(),
{ /* {
name: "configure-response-headers", name: "configure-response-headers",
configureServer: (server) => { configureServer: (server) => {
server.middlewares.use((_req, res, next) => { server.middlewares.use((_req, res, next) => {
@@ -22,6 +28,6 @@ export default defineConfig({
next(); next();
}); });
}, },
}, }, */
], ],
}); });

View File

@@ -16,12 +16,12 @@
"prepublishOnly": "bun run typecheck && bun run test && bun run build" "prepublishOnly": "bun run typecheck && bun run test && bun run build"
}, },
"dependencies": { "dependencies": {
"sqlocal": "^0.14.0" "sqlocal": "^0.16.0"
}, },
"devDependencies": { "devDependencies": {
"@vitest/browser": "^3.0.8", "@vitest/browser": "^3.0.8",
"@vitest/ui": "^3.0.8", "@vitest/ui": "^3.0.8",
"@types/node": "^22.13.10", "@types/node": "^24.10.1",
"bknd": "workspace:*", "bknd": "workspace:*",
"kysely": "^0.27.6", "kysely": "^0.27.6",
"tsup": "^8.4.0", "tsup": "^8.4.0",

View File

@@ -1,51 +1,44 @@
import { Kysely, ParseJSONResultsPlugin } from "kysely"; import { Kysely, ParseJSONResultsPlugin } from "kysely";
import { SqliteConnection, SqliteIntrospector } from "bknd/data"; import { SqliteConnection, SqliteIntrospector, type DB } from "bknd";
import { SQLocalKysely } from "sqlocal/kysely"; import type { SQLocalKysely } from "sqlocal/kysely";
import type { ClientConfig } from "sqlocal";
const plugins = [new ParseJSONResultsPlugin()]; const plugins = [new ParseJSONResultsPlugin()];
export type SQLocalConnectionConfig = Omit<ClientConfig, "databasePath"> & { export class SQLocalConnection extends SqliteConnection<SQLocalKysely> {
// make it optional private connected: boolean = false;
databasePath?: ClientConfig["databasePath"];
};
export class SQLocalConnection extends SqliteConnection { constructor(client: SQLocalKysely) {
private _client: SQLocalKysely | undefined; // @ts-expect-error - config is protected
client.config.onConnect = () => {
constructor(private config: SQLocalConnectionConfig) { // we need to listen for the connection, it will be awaited in init()
super(null as any, {}, plugins); this.connected = true;
};
super({
kysely: new Kysely<any>({
dialect: {
...client.dialect,
createIntrospector: (db: Kysely<DB>) => {
return new SqliteIntrospector(db as any, {
plugins,
});
},
},
plugins,
}) as any,
});
this.client = client;
} }
override async init() { override async init() {
if (this.initialized) return; if (this.initialized) return;
let tries = 0;
await new Promise((resolve) => { while (!this.connected && tries < 100) {
this._client = new SQLocalKysely({ tries++;
...this.config, await new Promise((resolve) => setTimeout(resolve, 5));
databasePath: this.config.databasePath ?? "session", }
onConnect: (r) => { if (!this.connected) {
this.kysely = new Kysely<any>({ throw new Error("Failed to connect to SQLite database");
dialect: { }
...this._client!.dialect, this.initialized = true;
createIntrospector: (db: Kysely<any>) => {
return new SqliteIntrospector(db, {
plugins,
});
},
},
plugins,
});
this.config.onConnect?.(r);
resolve(1);
},
});
});
super.init();
}
get client(): SQLocalKysely {
if (!this._client) throw new Error("Client not initialized");
return this._client!;
} }
} }

View File

@@ -1 +1 @@
export { SQLocalConnection, type SQLocalConnectionConfig } from "./SQLocalConnection"; export { SQLocalConnection } from "./SQLocalConnection";

View File

@@ -1,14 +1,15 @@
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "vitest";
import { SQLocalConnection, type SQLocalConnectionConfig } from "../src"; import { SQLocalConnection } from "../src";
import type { ClientConfig } from "sqlocal";
import { SQLocalKysely } from "sqlocal/kysely";
describe(SQLocalConnection, () => { describe(SQLocalConnection, () => {
function create(config: SQLocalConnectionConfig = {}) { function create(config: ClientConfig = { databasePath: ":memory:" }) {
return new SQLocalConnection(config); return new SQLocalConnection(new SQLocalKysely(config));
} }
it("constructs", async () => { it("constructs", async () => {
const connection = create(); const connection = create();
expect(() => connection.client).toThrow();
await connection.init(); await connection.init();
expect(connection.client).toBeDefined(); expect(connection.client).toBeDefined();
expect(await connection.client.sql`SELECT 1`).toEqual([{ "1": 1 }]); expect(await connection.client.sql`SELECT 1`).toEqual([{ "1": 1 }]);

View File

@@ -1,11 +1,12 @@
import { describe, expect, it } from "vitest"; import { describe, expect, it } from "bun:test";
import { SQLocalConnection, type SQLocalConnectionConfig } from "../src"; import { SQLocalConnection } from "../src";
import { createApp } from "bknd"; import { createApp, em, entity, text } from "bknd";
import * as proto from "bknd/data"; import type { ClientConfig } from "sqlocal";
import { SQLocalKysely } from "sqlocal/kysely";
describe("integration", () => { describe("integration", () => {
function create(config: SQLocalConnectionConfig = { databasePath: ":memory:" }) { function create(config: ClientConfig = { databasePath: ":memory:" }) {
return new SQLocalConnection(config); return new SQLocalConnection(new SQLocalKysely(config));
} }
it("should create app and ping", async () => { it("should create app and ping", async () => {
@@ -19,14 +20,14 @@ describe("integration", () => {
}); });
it("should create a basic schema", async () => { it("should create a basic schema", async () => {
const schema = proto.em( const schema = em(
{ {
posts: proto.entity("posts", { posts: entity("posts", {
title: proto.text().required(), title: text().required(),
content: proto.text(), content: text(),
}), }),
comments: proto.entity("comments", { comments: entity("comments", {
content: proto.text(), content: text(),
}), }),
}, },
(fns, s) => { (fns, s) => {

View File

@@ -1,6 +1,6 @@
/// <reference types="vitest" /> /// <reference types="vitest" />
/// <reference types="@vitest/browser/providers/webdriverio" /> /// <reference types="@vitest/browser/providers/webdriverio" />
import { defineConfig } from "vite"; import { defineConfig } from "vitest/config";
// https://github.com/DallasHoff/sqlocal/blob/main/vite.config.ts // https://github.com/DallasHoff/sqlocal/blob/main/vite.config.ts
export default defineConfig({ export default defineConfig({