mirror of
https://github.com/shishantbiswas/bknd.git
synced 2026-03-15 20:17:22 +00:00
init opfs and sqlocal as another browser adapter
This commit is contained in:
34
app/src/adapter/browser/OpfsStorageAdapter.spec.ts
Normal file
34
app/src/adapter/browser/OpfsStorageAdapter.spec.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { describe, beforeAll, vi, afterAll, spyOn } from "bun:test";
|
||||
import { OpfsStorageAdapter } from "./OpfsStorageAdapter";
|
||||
// @ts-ignore
|
||||
import { assetsPath } from "../../../__test__/helper";
|
||||
import { adapterTestSuite } from "media/storage/adapters/adapter-test-suite";
|
||||
import { bunTestRunner } from "adapter/bun/test";
|
||||
import { MockFileSystemDirectoryHandle } from "adapter/browser/mock";
|
||||
|
||||
describe("OpfsStorageAdapter", async () => {
|
||||
let mockRoot: MockFileSystemDirectoryHandle;
|
||||
let testSuiteAdapter: OpfsStorageAdapter;
|
||||
|
||||
const _mock = spyOn(global, "navigator");
|
||||
|
||||
beforeAll(() => {
|
||||
// mock navigator.storage.getDirectory()
|
||||
mockRoot = new MockFileSystemDirectoryHandle("opfs-root");
|
||||
const mockNavigator = {
|
||||
storage: {
|
||||
getDirectory: vi.fn().mockResolvedValue(mockRoot),
|
||||
},
|
||||
};
|
||||
// @ts-ignore
|
||||
_mock.mockReturnValue(mockNavigator);
|
||||
testSuiteAdapter = new OpfsStorageAdapter();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
_mock.mockRestore();
|
||||
});
|
||||
|
||||
const file = Bun.file(`${assetsPath}/image.png`);
|
||||
await adapterTestSuite(bunTestRunner, () => testSuiteAdapter, file);
|
||||
});
|
||||
265
app/src/adapter/browser/OpfsStorageAdapter.ts
Normal file
265
app/src/adapter/browser/OpfsStorageAdapter.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
import type { FileBody, FileListObject, FileMeta, FileUploadPayload } from "bknd";
|
||||
import { StorageAdapter, guessMimeType } from "bknd";
|
||||
import { parse, s, isFile, isBlob } from "bknd/utils";
|
||||
|
||||
export const opfsAdapterConfig = s.object(
|
||||
{
|
||||
root: s.string({ default: "" }),
|
||||
},
|
||||
{
|
||||
title: "OPFS",
|
||||
description: "Origin Private File System storage",
|
||||
additionalProperties: false,
|
||||
},
|
||||
);
|
||||
export type OpfsAdapterConfig = s.Static<typeof opfsAdapterConfig>;
|
||||
|
||||
/**
|
||||
* Storage adapter for OPFS (Origin Private File System)
|
||||
* Provides browser-based file storage using the File System Access API
|
||||
*/
|
||||
export class OpfsStorageAdapter extends StorageAdapter {
|
||||
private config: OpfsAdapterConfig;
|
||||
private rootPromise: Promise<FileSystemDirectoryHandle>;
|
||||
|
||||
constructor(config: Partial<OpfsAdapterConfig> = {}) {
|
||||
super();
|
||||
this.config = parse(opfsAdapterConfig, config);
|
||||
this.rootPromise = this.initializeRoot();
|
||||
}
|
||||
|
||||
private async initializeRoot(): Promise<FileSystemDirectoryHandle> {
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
if (!this.config.root) {
|
||||
return opfsRoot;
|
||||
}
|
||||
|
||||
// navigate to or create nested directory structure
|
||||
const parts = this.config.root.split("/").filter(Boolean);
|
||||
let current = opfsRoot;
|
||||
for (const part of parts) {
|
||||
current = await current.getDirectoryHandle(part, { create: true });
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
getSchema() {
|
||||
return opfsAdapterConfig;
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return "opfs";
|
||||
}
|
||||
|
||||
async listObjects(prefix?: string): Promise<FileListObject[]> {
|
||||
const root = await this.rootPromise;
|
||||
const files: FileListObject[] = [];
|
||||
|
||||
for await (const [name, handle] of root.entries()) {
|
||||
if (handle.kind === "file") {
|
||||
if (!prefix || name.startsWith(prefix)) {
|
||||
const file = await (handle as FileSystemFileHandle).getFile();
|
||||
files.push({
|
||||
key: name,
|
||||
last_modified: new Date(file.lastModified),
|
||||
size: file.size,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
private async computeEtagFromArrayBuffer(buffer: ArrayBuffer): Promise<string> {
|
||||
const hashBuffer = await crypto.subtle.digest("SHA-256", buffer);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, "0")).join("");
|
||||
|
||||
// wrap the hex string in quotes for ETag format
|
||||
return `"${hashHex}"`;
|
||||
}
|
||||
|
||||
async putObject(key: string, body: FileBody): Promise<string | FileUploadPayload> {
|
||||
if (body === null) {
|
||||
throw new Error("Body is empty");
|
||||
}
|
||||
|
||||
const root = await this.rootPromise;
|
||||
const fileHandle = await root.getFileHandle(key, { create: true });
|
||||
const writable = await fileHandle.createWritable();
|
||||
|
||||
try {
|
||||
let contentBuffer: ArrayBuffer;
|
||||
|
||||
if (isFile(body)) {
|
||||
contentBuffer = await body.arrayBuffer();
|
||||
await writable.write(contentBuffer);
|
||||
} else if (body instanceof ReadableStream) {
|
||||
const chunks: Uint8Array[] = [];
|
||||
const reader = body.getReader();
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
await writable.write(value);
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
}
|
||||
// compute total size and combine chunks for etag
|
||||
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const combined = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
combined.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
contentBuffer = combined.buffer;
|
||||
} else if (isBlob(body)) {
|
||||
contentBuffer = await (body as Blob).arrayBuffer();
|
||||
await writable.write(contentBuffer);
|
||||
} else {
|
||||
// body is ArrayBuffer or ArrayBufferView
|
||||
if (ArrayBuffer.isView(body)) {
|
||||
const view = body as ArrayBufferView;
|
||||
contentBuffer = view.buffer.slice(
|
||||
view.byteOffset,
|
||||
view.byteOffset + view.byteLength,
|
||||
) as ArrayBuffer;
|
||||
} else {
|
||||
contentBuffer = body as ArrayBuffer;
|
||||
}
|
||||
await writable.write(body);
|
||||
}
|
||||
|
||||
await writable.close();
|
||||
return await this.computeEtagFromArrayBuffer(contentBuffer);
|
||||
} catch (error) {
|
||||
await writable.abort();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteObject(key: string): Promise<void> {
|
||||
try {
|
||||
const root = await this.rootPromise;
|
||||
await root.removeEntry(key);
|
||||
} catch {
|
||||
// file doesn't exist, which is fine
|
||||
}
|
||||
}
|
||||
|
||||
async objectExists(key: string): Promise<boolean> {
|
||||
try {
|
||||
const root = await this.rootPromise;
|
||||
await root.getFileHandle(key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private parseRangeHeader(
|
||||
rangeHeader: string,
|
||||
fileSize: number,
|
||||
): { start: number; end: number } | null {
|
||||
// parse "bytes=start-end" format
|
||||
const match = rangeHeader.match(/^bytes=(\d*)-(\d*)$/);
|
||||
if (!match) return null;
|
||||
|
||||
const [, startStr, endStr] = match;
|
||||
let start = startStr ? Number.parseInt(startStr, 10) : 0;
|
||||
let end = endStr ? Number.parseInt(endStr, 10) : fileSize - 1;
|
||||
|
||||
// handle suffix-byte-range-spec (e.g., "bytes=-500")
|
||||
if (!startStr && endStr) {
|
||||
start = Math.max(0, fileSize - Number.parseInt(endStr, 10));
|
||||
end = fileSize - 1;
|
||||
}
|
||||
|
||||
// validate range
|
||||
if (start < 0 || end >= fileSize || start > end) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
async getObject(key: string, headers: Headers): Promise<Response> {
|
||||
try {
|
||||
const root = await this.rootPromise;
|
||||
const fileHandle = await root.getFileHandle(key);
|
||||
const file = await fileHandle.getFile();
|
||||
const fileSize = file.size;
|
||||
const mimeType = guessMimeType(key);
|
||||
|
||||
const responseHeaders = new Headers({
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Type": mimeType || "application/octet-stream",
|
||||
});
|
||||
|
||||
const rangeHeader = headers.get("range");
|
||||
|
||||
if (rangeHeader) {
|
||||
const range = this.parseRangeHeader(rangeHeader, fileSize);
|
||||
|
||||
if (!range) {
|
||||
// invalid range - return 416 Range Not Satisfiable
|
||||
responseHeaders.set("Content-Range", `bytes */${fileSize}`);
|
||||
return new Response("", {
|
||||
status: 416,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
|
||||
const { start, end } = range;
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const chunk = arrayBuffer.slice(start, end + 1);
|
||||
|
||||
responseHeaders.set("Content-Range", `bytes ${start}-${end}/${fileSize}`);
|
||||
responseHeaders.set("Content-Length", chunk.byteLength.toString());
|
||||
|
||||
return new Response(chunk, {
|
||||
status: 206, // Partial Content
|
||||
headers: responseHeaders,
|
||||
});
|
||||
} else {
|
||||
// normal request - return entire file
|
||||
const content = await file.arrayBuffer();
|
||||
responseHeaders.set("Content-Length", content.byteLength.toString());
|
||||
|
||||
return new Response(content, {
|
||||
status: 200,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// handle file reading errors
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
}
|
||||
|
||||
getObjectUrl(_key: string): string {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async getObjectMeta(key: string): Promise<FileMeta> {
|
||||
const root = await this.rootPromise;
|
||||
const fileHandle = await root.getFileHandle(key);
|
||||
const file = await fileHandle.getFile();
|
||||
|
||||
return {
|
||||
type: guessMimeType(key) || "application/octet-stream",
|
||||
size: file.size,
|
||||
};
|
||||
}
|
||||
|
||||
toJSON(_secrets?: boolean) {
|
||||
return {
|
||||
type: this.getName(),
|
||||
config: this.config,
|
||||
};
|
||||
}
|
||||
}
|
||||
0
app/src/adapter/browser/index.ts
Normal file
0
app/src/adapter/browser/index.ts
Normal file
136
app/src/adapter/browser/mock.ts
Normal file
136
app/src/adapter/browser/mock.ts
Normal file
@@ -0,0 +1,136 @@
|
||||
// mock OPFS API for testing
|
||||
class MockFileSystemFileHandle {
|
||||
kind: "file" = "file";
|
||||
name: string;
|
||||
private content: ArrayBuffer;
|
||||
private lastModified: number;
|
||||
|
||||
constructor(name: string, content: ArrayBuffer = new ArrayBuffer(0)) {
|
||||
this.name = name;
|
||||
this.content = content;
|
||||
this.lastModified = Date.now();
|
||||
}
|
||||
|
||||
async getFile(): Promise<File> {
|
||||
return new File([this.content], this.name, {
|
||||
lastModified: this.lastModified,
|
||||
type: this.guessMimeType(),
|
||||
});
|
||||
}
|
||||
|
||||
async createWritable(): Promise<FileSystemWritableFileStream> {
|
||||
const handle = this;
|
||||
return {
|
||||
async write(data: any) {
|
||||
if (data instanceof ArrayBuffer) {
|
||||
handle.content = data;
|
||||
} else if (ArrayBuffer.isView(data)) {
|
||||
handle.content = data.buffer.slice(
|
||||
data.byteOffset,
|
||||
data.byteOffset + data.byteLength,
|
||||
) as ArrayBuffer;
|
||||
} else if (data instanceof Blob) {
|
||||
handle.content = await data.arrayBuffer();
|
||||
}
|
||||
handle.lastModified = Date.now();
|
||||
},
|
||||
async close() {},
|
||||
async abort() {},
|
||||
async seek(_position: number) {},
|
||||
async truncate(_size: number) {},
|
||||
} as FileSystemWritableFileStream;
|
||||
}
|
||||
|
||||
private guessMimeType(): string {
|
||||
const ext = this.name.split(".").pop()?.toLowerCase();
|
||||
const mimeTypes: Record<string, string> = {
|
||||
png: "image/png",
|
||||
jpg: "image/jpeg",
|
||||
jpeg: "image/jpeg",
|
||||
gif: "image/gif",
|
||||
webp: "image/webp",
|
||||
svg: "image/svg+xml",
|
||||
txt: "text/plain",
|
||||
json: "application/json",
|
||||
pdf: "application/pdf",
|
||||
};
|
||||
return mimeTypes[ext || ""] || "application/octet-stream";
|
||||
}
|
||||
}
|
||||
|
||||
export class MockFileSystemDirectoryHandle {
|
||||
kind: "directory" = "directory";
|
||||
name: string;
|
||||
private files: Map<string, MockFileSystemFileHandle> = new Map();
|
||||
private directories: Map<string, MockFileSystemDirectoryHandle> = new Map();
|
||||
|
||||
constructor(name: string = "root") {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
async getFileHandle(
|
||||
name: string,
|
||||
options?: FileSystemGetFileOptions,
|
||||
): Promise<FileSystemFileHandle> {
|
||||
if (this.files.has(name)) {
|
||||
return this.files.get(name) as any;
|
||||
}
|
||||
if (options?.create) {
|
||||
const handle = new MockFileSystemFileHandle(name);
|
||||
this.files.set(name, handle);
|
||||
return handle as any;
|
||||
}
|
||||
throw new Error(`File not found: ${name}`);
|
||||
}
|
||||
|
||||
async getDirectoryHandle(
|
||||
name: string,
|
||||
options?: FileSystemGetDirectoryOptions,
|
||||
): Promise<FileSystemDirectoryHandle> {
|
||||
if (this.directories.has(name)) {
|
||||
return this.directories.get(name) as any;
|
||||
}
|
||||
if (options?.create) {
|
||||
const handle = new MockFileSystemDirectoryHandle(name);
|
||||
this.directories.set(name, handle);
|
||||
return handle as any;
|
||||
}
|
||||
throw new Error(`Directory not found: ${name}`);
|
||||
}
|
||||
|
||||
async removeEntry(name: string, _options?: FileSystemRemoveOptions): Promise<void> {
|
||||
this.files.delete(name);
|
||||
this.directories.delete(name);
|
||||
}
|
||||
|
||||
async *entries(): AsyncIterableIterator<[string, FileSystemHandle]> {
|
||||
for (const [name, handle] of this.files) {
|
||||
yield [name, handle as any];
|
||||
}
|
||||
for (const [name, handle] of this.directories) {
|
||||
yield [name, handle as any];
|
||||
}
|
||||
}
|
||||
|
||||
async *keys(): AsyncIterableIterator<string> {
|
||||
for (const name of this.files.keys()) {
|
||||
yield name;
|
||||
}
|
||||
for (const name of this.directories.keys()) {
|
||||
yield name;
|
||||
}
|
||||
}
|
||||
|
||||
async *values(): AsyncIterableIterator<FileSystemHandle> {
|
||||
for (const handle of this.files.values()) {
|
||||
yield handle as any;
|
||||
}
|
||||
for (const handle of this.directories.values()) {
|
||||
yield handle as any;
|
||||
}
|
||||
}
|
||||
|
||||
[Symbol.asyncIterator](): AsyncIterableIterator<[string, FileSystemHandle]> {
|
||||
return this.entries();
|
||||
}
|
||||
}
|
||||
@@ -18,26 +18,39 @@ export type SqliteConnectionConfig<
|
||||
CustomDialect extends Constructor<Dialect> = Constructor<Dialect>,
|
||||
> = {
|
||||
excludeTables?: string[];
|
||||
dialect: CustomDialect;
|
||||
dialectArgs?: ConstructorParameters<CustomDialect>;
|
||||
additionalPlugins?: KyselyPlugin[];
|
||||
customFn?: Partial<DbFunctions>;
|
||||
};
|
||||
} & (
|
||||
| {
|
||||
dialect: CustomDialect;
|
||||
dialectArgs?: ConstructorParameters<CustomDialect>;
|
||||
}
|
||||
| {
|
||||
kysely: Kysely<any>;
|
||||
}
|
||||
);
|
||||
|
||||
export abstract class SqliteConnection<Client = unknown> extends Connection<Client> {
|
||||
override name = "sqlite";
|
||||
|
||||
constructor(config: SqliteConnectionConfig) {
|
||||
const { excludeTables, dialect, dialectArgs = [], additionalPlugins } = config;
|
||||
const { excludeTables, additionalPlugins } = config;
|
||||
const plugins = [new ParseJSONResultsPlugin(), ...(additionalPlugins ?? [])];
|
||||
|
||||
const kysely = new Kysely({
|
||||
dialect: customIntrospector(dialect, SqliteIntrospector, {
|
||||
excludeTables,
|
||||
let kysely: Kysely<any>;
|
||||
if ("dialect" in config) {
|
||||
kysely = new Kysely({
|
||||
dialect: customIntrospector(config.dialect, SqliteIntrospector, {
|
||||
excludeTables,
|
||||
plugins,
|
||||
}).create(...(config.dialectArgs ?? [])),
|
||||
plugins,
|
||||
}).create(...dialectArgs),
|
||||
plugins,
|
||||
});
|
||||
});
|
||||
} else if ("kysely" in config) {
|
||||
kysely = config.kysely;
|
||||
} else {
|
||||
throw new Error("Either dialect or kysely must be provided");
|
||||
}
|
||||
|
||||
super(
|
||||
kysely,
|
||||
|
||||
@@ -5,7 +5,7 @@ import type { BunFile } from "bun";
|
||||
|
||||
export async function adapterTestSuite(
|
||||
testRunner: TestRunner,
|
||||
adapter: StorageAdapter,
|
||||
_adapter: StorageAdapter | (() => StorageAdapter),
|
||||
file: File | BunFile,
|
||||
opts?: {
|
||||
retries?: number;
|
||||
@@ -25,7 +25,12 @@ export async function adapterTestSuite(
|
||||
const _filename = randomString(10);
|
||||
const filename = `${_filename}.png`;
|
||||
|
||||
const getAdapter = (
|
||||
typeof _adapter === "function" ? _adapter : () => _adapter
|
||||
) as () => StorageAdapter;
|
||||
|
||||
await test("puts an object", async () => {
|
||||
const adapter = getAdapter();
|
||||
objects = (await adapter.listObjects()).length;
|
||||
const result = await adapter.putObject(filename, file as unknown as File);
|
||||
expect(result).toBeDefined();
|
||||
@@ -38,6 +43,7 @@ export async function adapterTestSuite(
|
||||
});
|
||||
|
||||
await test("lists objects", async () => {
|
||||
const adapter = getAdapter();
|
||||
const length = await retry(
|
||||
() => adapter.listObjects().then((res) => res.length),
|
||||
(length) => length > objects,
|
||||
@@ -49,10 +55,12 @@ export async function adapterTestSuite(
|
||||
});
|
||||
|
||||
await test("file exists", async () => {
|
||||
const adapter = getAdapter();
|
||||
expect(await adapter.objectExists(filename)).toBe(true);
|
||||
});
|
||||
|
||||
await test("gets an object", async () => {
|
||||
const adapter = getAdapter();
|
||||
const res = await adapter.getObject(filename, new Headers());
|
||||
expect(res.ok).toBe(true);
|
||||
expect(res.headers.get("Accept-Ranges")).toBe("bytes");
|
||||
@@ -62,6 +70,7 @@ export async function adapterTestSuite(
|
||||
if (options.testRange) {
|
||||
await test("handles range request - partial content", async () => {
|
||||
const headers = new Headers({ Range: "bytes=0-99" });
|
||||
const adapter = getAdapter();
|
||||
const res = await adapter.getObject(filename, headers);
|
||||
expect(res.status).toBe(206); // Partial Content
|
||||
expect(/^bytes 0-99\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
|
||||
@@ -70,6 +79,7 @@ export async function adapterTestSuite(
|
||||
|
||||
await test("handles range request - suffix range", async () => {
|
||||
const headers = new Headers({ Range: "bytes=-100" });
|
||||
const adapter = getAdapter();
|
||||
const res = await adapter.getObject(filename, headers);
|
||||
expect(res.status).toBe(206); // Partial Content
|
||||
expect(/^bytes \d+-\d+\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
|
||||
@@ -77,6 +87,7 @@ export async function adapterTestSuite(
|
||||
|
||||
await test("handles invalid range request", async () => {
|
||||
const headers = new Headers({ Range: "bytes=invalid" });
|
||||
const adapter = getAdapter();
|
||||
const res = await adapter.getObject(filename, headers);
|
||||
expect(res.status).toBe(416); // Range Not Satisfiable
|
||||
expect(/^bytes \*\/\d+$/.test(res.headers.get("Content-Range")!)).toBe(true);
|
||||
@@ -84,6 +95,7 @@ export async function adapterTestSuite(
|
||||
}
|
||||
|
||||
await test("gets object meta", async () => {
|
||||
const adapter = getAdapter();
|
||||
expect(await adapter.getObjectMeta(filename)).toEqual({
|
||||
type: file.type, // image/png
|
||||
size: file.size,
|
||||
@@ -91,6 +103,7 @@ export async function adapterTestSuite(
|
||||
});
|
||||
|
||||
await test("deletes an object", async () => {
|
||||
const adapter = getAdapter();
|
||||
expect(await adapter.deleteObject(filename)).toBeUndefined();
|
||||
|
||||
if (opts?.skipExistsAfterDelete !== true) {
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"bknd": "file:../../app",
|
||||
"react": "^19.0.0",
|
||||
"react-dom": "^19.0.0",
|
||||
"sqlocal": "^0.14.0",
|
||||
"sqlocal": "^0.16.0",
|
||||
"wouter": "^3.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -26,7 +26,7 @@
|
||||
"tailwindcss": "^4.0.14",
|
||||
"typescript": "~5.7.2",
|
||||
"typescript-eslint": "^8.24.1",
|
||||
"vite": "^6.2.0",
|
||||
"vite": "^7.2.4",
|
||||
"vite-tsconfig-paths": "^5.1.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,25 @@
|
||||
import { lazy, Suspense, useEffect, useState } from "react";
|
||||
import { checksum } from "bknd/utils";
|
||||
import { App, boolean, em, entity, text } from "bknd";
|
||||
import { App, boolean, em, entity, text, registries } from "bknd";
|
||||
import { SQLocalConnection } from "@bknd/sqlocal";
|
||||
import { Route, Router, Switch } from "wouter";
|
||||
import IndexPage from "~/routes/_index";
|
||||
import { Center } from "~/components/Center";
|
||||
import { ClientProvider } from "bknd/client";
|
||||
import { type Api, ClientProvider } from "bknd/client";
|
||||
import { SQLocalKysely } from "sqlocal/kysely";
|
||||
import { OpfsStorageAdapter } from "~/OpfsStorageAdapter";
|
||||
|
||||
const Admin = lazy(() => import("~/routes/admin"));
|
||||
|
||||
export default function () {
|
||||
const [app, setApp] = useState<App | undefined>(undefined);
|
||||
const [api, setApi] = useState<Api | undefined>(undefined);
|
||||
const [hash, setHash] = useState<string>("");
|
||||
|
||||
async function onBuilt(app: App) {
|
||||
document.startViewTransition(async () => {
|
||||
setApp(app);
|
||||
setApi(app.getApi());
|
||||
setHash(await checksum(app.toJSON()));
|
||||
});
|
||||
}
|
||||
@@ -26,7 +30,7 @@ export default function () {
|
||||
.catch(console.error);
|
||||
}, []);
|
||||
|
||||
if (!app)
|
||||
if (!app || !api)
|
||||
return (
|
||||
<Center>
|
||||
<span className="opacity-20">Loading...</span>
|
||||
@@ -34,27 +38,22 @@ export default function () {
|
||||
);
|
||||
|
||||
return (
|
||||
<Router key={hash}>
|
||||
<Switch>
|
||||
<Route
|
||||
path="/"
|
||||
component={() => (
|
||||
<ClientProvider api={app.getApi()}>
|
||||
<IndexPage app={app} />
|
||||
</ClientProvider>
|
||||
)}
|
||||
/>
|
||||
<ClientProvider api={api}>
|
||||
<Router key={hash}>
|
||||
<Switch>
|
||||
<Route path="/" component={() => <IndexPage app={app} />} />
|
||||
|
||||
<Route path="/admin/*?">
|
||||
<Suspense>
|
||||
<Admin config={{ basepath: "/admin", logo_return_path: "/../" }} app={app} />
|
||||
</Suspense>
|
||||
</Route>
|
||||
<Route path="*">
|
||||
<Center className="font-mono text-4xl">404</Center>
|
||||
</Route>
|
||||
</Switch>
|
||||
</Router>
|
||||
<Route path="/admin/*?">
|
||||
<Suspense>
|
||||
<Admin config={{ basepath: "/admin", logo_return_path: "/../" }} />
|
||||
</Suspense>
|
||||
</Route>
|
||||
<Route path="*">
|
||||
<Center className="font-mono text-4xl">404</Center>
|
||||
</Route>
|
||||
</Switch>
|
||||
</Router>
|
||||
</ClientProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -79,16 +78,26 @@ async function setup(opts?: {
|
||||
if (initialized) return;
|
||||
initialized = true;
|
||||
|
||||
const connection = new SQLocalConnection({
|
||||
databasePath: ":localStorage:",
|
||||
verbose: true,
|
||||
});
|
||||
const connection = new SQLocalConnection(
|
||||
new SQLocalKysely({
|
||||
databasePath: ":localStorage:",
|
||||
verbose: true,
|
||||
}),
|
||||
);
|
||||
|
||||
registries.media.register("opfs", OpfsStorageAdapter);
|
||||
|
||||
const app = App.create({
|
||||
connection,
|
||||
// an initial config is only applied if the database is empty
|
||||
config: {
|
||||
data: schema.toJSON(),
|
||||
auth: {
|
||||
enabled: true,
|
||||
jwt: {
|
||||
secret: "secret",
|
||||
},
|
||||
},
|
||||
},
|
||||
options: {
|
||||
// the seed option is only executed if the database was empty
|
||||
@@ -99,10 +108,10 @@ async function setup(opts?: {
|
||||
]);
|
||||
|
||||
// @todo: auth is currently not working due to POST request
|
||||
/*await ctx.app.module.auth.createUser({
|
||||
await ctx.app.module.auth.createUser({
|
||||
email: "test@bknd.io",
|
||||
password: "12345678",
|
||||
});*/
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -112,6 +121,8 @@ async function setup(opts?: {
|
||||
App.Events.AppBuiltEvent,
|
||||
async () => {
|
||||
await opts.onBuilt?.(app);
|
||||
// @ts-ignore
|
||||
window.sql = app.connection.client.sql;
|
||||
},
|
||||
"sync",
|
||||
);
|
||||
|
||||
265
examples/react/src/OpfsStorageAdapter.ts
Normal file
265
examples/react/src/OpfsStorageAdapter.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
import type { FileBody, FileListObject, FileMeta, FileUploadPayload } from "bknd";
|
||||
import { StorageAdapter, guessMimeType } from "bknd";
|
||||
import { parse, s, isFile, isBlob } from "bknd/utils";
|
||||
|
||||
export const opfsAdapterConfig = s.object(
|
||||
{
|
||||
root: s.string({ default: "" }).optional(),
|
||||
},
|
||||
{
|
||||
title: "OPFS",
|
||||
description: "Origin Private File System storage",
|
||||
additionalProperties: false,
|
||||
},
|
||||
);
|
||||
export type OpfsAdapterConfig = s.Static<typeof opfsAdapterConfig>;
|
||||
|
||||
/**
|
||||
* Storage adapter for OPFS (Origin Private File System)
|
||||
* Provides browser-based file storage using the File System Access API
|
||||
*/
|
||||
export class OpfsStorageAdapter extends StorageAdapter {
|
||||
private config: OpfsAdapterConfig;
|
||||
private rootPromise: Promise<FileSystemDirectoryHandle>;
|
||||
|
||||
constructor(config: Partial<OpfsAdapterConfig> = {}) {
|
||||
super();
|
||||
this.config = parse(opfsAdapterConfig, config);
|
||||
this.rootPromise = this.initializeRoot();
|
||||
}
|
||||
|
||||
private async initializeRoot(): Promise<FileSystemDirectoryHandle> {
|
||||
const opfsRoot = await navigator.storage.getDirectory();
|
||||
if (!this.config.root) {
|
||||
return opfsRoot;
|
||||
}
|
||||
|
||||
// navigate to or create nested directory structure
|
||||
const parts = this.config.root.split("/").filter(Boolean);
|
||||
let current = opfsRoot;
|
||||
for (const part of parts) {
|
||||
current = await current.getDirectoryHandle(part, { create: true });
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
getSchema() {
|
||||
return opfsAdapterConfig;
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return "opfs";
|
||||
}
|
||||
|
||||
async listObjects(prefix?: string): Promise<FileListObject[]> {
|
||||
const root = await this.rootPromise;
|
||||
const files: FileListObject[] = [];
|
||||
|
||||
for await (const [name, handle] of root.entries()) {
|
||||
if (handle.kind === "file") {
|
||||
if (!prefix || name.startsWith(prefix)) {
|
||||
const file = await (handle as FileSystemFileHandle).getFile();
|
||||
files.push({
|
||||
key: name,
|
||||
last_modified: new Date(file.lastModified),
|
||||
size: file.size,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
private async computeEtagFromArrayBuffer(buffer: ArrayBuffer): Promise<string> {
|
||||
const hashBuffer = await crypto.subtle.digest("SHA-256", buffer);
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
||||
const hashHex = hashArray.map((byte) => byte.toString(16).padStart(2, "0")).join("");
|
||||
|
||||
// wrap the hex string in quotes for ETag format
|
||||
return `"${hashHex}"`;
|
||||
}
|
||||
|
||||
async putObject(key: string, body: FileBody): Promise<string | FileUploadPayload> {
|
||||
if (body === null) {
|
||||
throw new Error("Body is empty");
|
||||
}
|
||||
|
||||
const root = await this.rootPromise;
|
||||
const fileHandle = await root.getFileHandle(key, { create: true });
|
||||
const writable = await fileHandle.createWritable();
|
||||
|
||||
try {
|
||||
let contentBuffer: ArrayBuffer;
|
||||
|
||||
if (isFile(body)) {
|
||||
contentBuffer = await body.arrayBuffer();
|
||||
await writable.write(contentBuffer);
|
||||
} else if (body instanceof ReadableStream) {
|
||||
const chunks: Uint8Array[] = [];
|
||||
const reader = body.getReader();
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
chunks.push(value);
|
||||
await writable.write(value);
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock();
|
||||
}
|
||||
// compute total size and combine chunks for etag
|
||||
const totalSize = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
||||
const combined = new Uint8Array(totalSize);
|
||||
let offset = 0;
|
||||
for (const chunk of chunks) {
|
||||
combined.set(chunk, offset);
|
||||
offset += chunk.length;
|
||||
}
|
||||
contentBuffer = combined.buffer;
|
||||
} else if (isBlob(body)) {
|
||||
contentBuffer = await (body as Blob).arrayBuffer();
|
||||
await writable.write(contentBuffer);
|
||||
} else {
|
||||
// body is ArrayBuffer or ArrayBufferView
|
||||
if (ArrayBuffer.isView(body)) {
|
||||
const view = body as ArrayBufferView;
|
||||
contentBuffer = view.buffer.slice(
|
||||
view.byteOffset,
|
||||
view.byteOffset + view.byteLength,
|
||||
) as ArrayBuffer;
|
||||
} else {
|
||||
contentBuffer = body as ArrayBuffer;
|
||||
}
|
||||
await writable.write(body);
|
||||
}
|
||||
|
||||
await writable.close();
|
||||
return await this.computeEtagFromArrayBuffer(contentBuffer);
|
||||
} catch (error) {
|
||||
await writable.abort();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteObject(key: string): Promise<void> {
|
||||
try {
|
||||
const root = await this.rootPromise;
|
||||
await root.removeEntry(key);
|
||||
} catch {
|
||||
// file doesn't exist, which is fine
|
||||
}
|
||||
}
|
||||
|
||||
async objectExists(key: string): Promise<boolean> {
|
||||
try {
|
||||
const root = await this.rootPromise;
|
||||
await root.getFileHandle(key);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private parseRangeHeader(
|
||||
rangeHeader: string,
|
||||
fileSize: number,
|
||||
): { start: number; end: number } | null {
|
||||
// parse "bytes=start-end" format
|
||||
const match = rangeHeader.match(/^bytes=(\d*)-(\d*)$/);
|
||||
if (!match) return null;
|
||||
|
||||
const [, startStr, endStr] = match;
|
||||
let start = startStr ? Number.parseInt(startStr, 10) : 0;
|
||||
let end = endStr ? Number.parseInt(endStr, 10) : fileSize - 1;
|
||||
|
||||
// handle suffix-byte-range-spec (e.g., "bytes=-500")
|
||||
if (!startStr && endStr) {
|
||||
start = Math.max(0, fileSize - Number.parseInt(endStr, 10));
|
||||
end = fileSize - 1;
|
||||
}
|
||||
|
||||
// validate range
|
||||
if (start < 0 || end >= fileSize || start > end) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
async getObject(key: string, headers: Headers): Promise<Response> {
|
||||
try {
|
||||
const root = await this.rootPromise;
|
||||
const fileHandle = await root.getFileHandle(key);
|
||||
const file = await fileHandle.getFile();
|
||||
const fileSize = file.size;
|
||||
const mimeType = guessMimeType(key);
|
||||
|
||||
const responseHeaders = new Headers({
|
||||
"Accept-Ranges": "bytes",
|
||||
"Content-Type": mimeType || "application/octet-stream",
|
||||
});
|
||||
|
||||
const rangeHeader = headers.get("range");
|
||||
|
||||
if (rangeHeader) {
|
||||
const range = this.parseRangeHeader(rangeHeader, fileSize);
|
||||
|
||||
if (!range) {
|
||||
// invalid range - return 416 Range Not Satisfiable
|
||||
responseHeaders.set("Content-Range", `bytes */${fileSize}`);
|
||||
return new Response("", {
|
||||
status: 416,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
|
||||
const { start, end } = range;
|
||||
const arrayBuffer = await file.arrayBuffer();
|
||||
const chunk = arrayBuffer.slice(start, end + 1);
|
||||
|
||||
responseHeaders.set("Content-Range", `bytes ${start}-${end}/${fileSize}`);
|
||||
responseHeaders.set("Content-Length", chunk.byteLength.toString());
|
||||
|
||||
return new Response(chunk, {
|
||||
status: 206, // Partial Content
|
||||
headers: responseHeaders,
|
||||
});
|
||||
} else {
|
||||
// normal request - return entire file
|
||||
const content = await file.arrayBuffer();
|
||||
responseHeaders.set("Content-Length", content.byteLength.toString());
|
||||
|
||||
return new Response(content, {
|
||||
status: 200,
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// handle file reading errors
|
||||
return new Response("", { status: 404 });
|
||||
}
|
||||
}
|
||||
|
||||
getObjectUrl(_key: string): string {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
|
||||
async getObjectMeta(key: string): Promise<FileMeta> {
|
||||
const root = await this.rootPromise;
|
||||
const fileHandle = await root.getFileHandle(key);
|
||||
const file = await fileHandle.getFile();
|
||||
|
||||
return {
|
||||
type: guessMimeType(key) || "application/octet-stream",
|
||||
size: file.size,
|
||||
};
|
||||
}
|
||||
|
||||
toJSON(_secrets?: boolean) {
|
||||
return {
|
||||
type: this.getName(),
|
||||
config: this.config,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,6 @@
|
||||
import { Admin, type BkndAdminProps } from "bknd/ui";
|
||||
import type { App } from "bknd";
|
||||
import "bknd/dist/styles.css";
|
||||
|
||||
export default function AdminPage({
|
||||
app,
|
||||
...props
|
||||
}: Omit<BkndAdminProps, "withProvider"> & { app: App }) {
|
||||
return <Admin {...props} withProvider={{ api: app.getApi() }} />;
|
||||
export default function AdminPage(props: BkndAdminProps) {
|
||||
return <Admin {...props} />;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { defineConfig } from "vite";
|
||||
import react from "@vitejs/plugin-react";
|
||||
import tailwindcss from "@tailwindcss/vite";
|
||||
import tsconfigPaths from "vite-tsconfig-paths";
|
||||
import sqlocal from "sqlocal/vite";
|
||||
|
||||
// https://vite.dev/config/
|
||||
// https://sqlocal.dallashoffman.com/guide/setup#vite-configuration
|
||||
@@ -9,11 +10,16 @@ export default defineConfig({
|
||||
optimizeDeps: {
|
||||
exclude: ["sqlocal"],
|
||||
},
|
||||
|
||||
resolve: {
|
||||
dedupe: ["react", "react-dom"],
|
||||
},
|
||||
plugins: [
|
||||
sqlocal(),
|
||||
react(),
|
||||
tailwindcss(),
|
||||
tsconfigPaths(),
|
||||
{
|
||||
/* {
|
||||
name: "configure-response-headers",
|
||||
configureServer: (server) => {
|
||||
server.middlewares.use((_req, res, next) => {
|
||||
@@ -22,6 +28,6 @@ export default defineConfig({
|
||||
next();
|
||||
});
|
||||
},
|
||||
},
|
||||
}, */
|
||||
],
|
||||
});
|
||||
|
||||
@@ -16,12 +16,12 @@
|
||||
"prepublishOnly": "bun run typecheck && bun run test && bun run build"
|
||||
},
|
||||
"dependencies": {
|
||||
"sqlocal": "^0.14.0"
|
||||
"sqlocal": "^0.16.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@vitest/browser": "^3.0.8",
|
||||
"@vitest/ui": "^3.0.8",
|
||||
"@types/node": "^22.13.10",
|
||||
"@types/node": "^24.10.1",
|
||||
"bknd": "workspace:*",
|
||||
"kysely": "^0.27.6",
|
||||
"tsup": "^8.4.0",
|
||||
|
||||
@@ -1,51 +1,44 @@
|
||||
import { Kysely, ParseJSONResultsPlugin } from "kysely";
|
||||
import { SqliteConnection, SqliteIntrospector } from "bknd/data";
|
||||
import { SQLocalKysely } from "sqlocal/kysely";
|
||||
import type { ClientConfig } from "sqlocal";
|
||||
import { SqliteConnection, SqliteIntrospector, type DB } from "bknd";
|
||||
import type { SQLocalKysely } from "sqlocal/kysely";
|
||||
|
||||
const plugins = [new ParseJSONResultsPlugin()];
|
||||
|
||||
export type SQLocalConnectionConfig = Omit<ClientConfig, "databasePath"> & {
|
||||
// make it optional
|
||||
databasePath?: ClientConfig["databasePath"];
|
||||
};
|
||||
export class SQLocalConnection extends SqliteConnection<SQLocalKysely> {
|
||||
private connected: boolean = false;
|
||||
|
||||
export class SQLocalConnection extends SqliteConnection {
|
||||
private _client: SQLocalKysely | undefined;
|
||||
|
||||
constructor(private config: SQLocalConnectionConfig) {
|
||||
super(null as any, {}, plugins);
|
||||
constructor(client: SQLocalKysely) {
|
||||
// @ts-expect-error - config is protected
|
||||
client.config.onConnect = () => {
|
||||
// we need to listen for the connection, it will be awaited in init()
|
||||
this.connected = true;
|
||||
};
|
||||
super({
|
||||
kysely: new Kysely<any>({
|
||||
dialect: {
|
||||
...client.dialect,
|
||||
createIntrospector: (db: Kysely<DB>) => {
|
||||
return new SqliteIntrospector(db as any, {
|
||||
plugins,
|
||||
});
|
||||
},
|
||||
},
|
||||
plugins,
|
||||
}) as any,
|
||||
});
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
override async init() {
|
||||
if (this.initialized) return;
|
||||
|
||||
await new Promise((resolve) => {
|
||||
this._client = new SQLocalKysely({
|
||||
...this.config,
|
||||
databasePath: this.config.databasePath ?? "session",
|
||||
onConnect: (r) => {
|
||||
this.kysely = new Kysely<any>({
|
||||
dialect: {
|
||||
...this._client!.dialect,
|
||||
createIntrospector: (db: Kysely<any>) => {
|
||||
return new SqliteIntrospector(db, {
|
||||
plugins,
|
||||
});
|
||||
},
|
||||
},
|
||||
plugins,
|
||||
});
|
||||
this.config.onConnect?.(r);
|
||||
resolve(1);
|
||||
},
|
||||
});
|
||||
});
|
||||
super.init();
|
||||
}
|
||||
|
||||
get client(): SQLocalKysely {
|
||||
if (!this._client) throw new Error("Client not initialized");
|
||||
return this._client!;
|
||||
let tries = 0;
|
||||
while (!this.connected && tries < 100) {
|
||||
tries++;
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
}
|
||||
if (!this.connected) {
|
||||
throw new Error("Failed to connect to SQLite database");
|
||||
}
|
||||
this.initialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { SQLocalConnection, type SQLocalConnectionConfig } from "./SQLocalConnection";
|
||||
export { SQLocalConnection } from "./SQLocalConnection";
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { SQLocalConnection, type SQLocalConnectionConfig } from "../src";
|
||||
import { SQLocalConnection } from "../src";
|
||||
import type { ClientConfig } from "sqlocal";
|
||||
import { SQLocalKysely } from "sqlocal/kysely";
|
||||
|
||||
describe(SQLocalConnection, () => {
|
||||
function create(config: SQLocalConnectionConfig = {}) {
|
||||
return new SQLocalConnection(config);
|
||||
function create(config: ClientConfig = { databasePath: ":memory:" }) {
|
||||
return new SQLocalConnection(new SQLocalKysely(config));
|
||||
}
|
||||
|
||||
it("constructs", async () => {
|
||||
const connection = create();
|
||||
expect(() => connection.client).toThrow();
|
||||
await connection.init();
|
||||
expect(connection.client).toBeDefined();
|
||||
expect(await connection.client.sql`SELECT 1`).toEqual([{ "1": 1 }]);
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { SQLocalConnection, type SQLocalConnectionConfig } from "../src";
|
||||
import { createApp } from "bknd";
|
||||
import * as proto from "bknd/data";
|
||||
import { describe, expect, it } from "bun:test";
|
||||
import { SQLocalConnection } from "../src";
|
||||
import { createApp, em, entity, text } from "bknd";
|
||||
import type { ClientConfig } from "sqlocal";
|
||||
import { SQLocalKysely } from "sqlocal/kysely";
|
||||
|
||||
describe("integration", () => {
|
||||
function create(config: SQLocalConnectionConfig = { databasePath: ":memory:" }) {
|
||||
return new SQLocalConnection(config);
|
||||
function create(config: ClientConfig = { databasePath: ":memory:" }) {
|
||||
return new SQLocalConnection(new SQLocalKysely(config));
|
||||
}
|
||||
|
||||
it("should create app and ping", async () => {
|
||||
@@ -19,14 +20,14 @@ describe("integration", () => {
|
||||
});
|
||||
|
||||
it("should create a basic schema", async () => {
|
||||
const schema = proto.em(
|
||||
const schema = em(
|
||||
{
|
||||
posts: proto.entity("posts", {
|
||||
title: proto.text().required(),
|
||||
content: proto.text(),
|
||||
posts: entity("posts", {
|
||||
title: text().required(),
|
||||
content: text(),
|
||||
}),
|
||||
comments: proto.entity("comments", {
|
||||
content: proto.text(),
|
||||
comments: entity("comments", {
|
||||
content: text(),
|
||||
}),
|
||||
},
|
||||
(fns, s) => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/// <reference types="vitest" />
|
||||
/// <reference types="@vitest/browser/providers/webdriverio" />
|
||||
import { defineConfig } from "vite";
|
||||
import { defineConfig } from "vitest/config";
|
||||
|
||||
// https://github.com/DallasHoff/sqlocal/blob/main/vite.config.ts
|
||||
export default defineConfig({
|
||||
|
||||
Reference in New Issue
Block a user