rewrite libsql and cloudflare sqlite's to use the generic adapter

This commit is contained in:
dswbx
2025-07-02 14:02:33 +02:00
parent a9f3a582eb
commit d41fd5541f
16 changed files with 533 additions and 458 deletions

View File

@@ -4,51 +4,30 @@ import { viTestRunner } from "adapter/node/vitest";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { Miniflare } from "miniflare";
import { d1Sqlite } from "./D1Connection";
import { sql } from "kysely";
describe("d1Sqlite", async () => {
const mf = new Miniflare({
modules: true,
script: "export default { async fetch() { return new Response(null); } }",
d1Databases: ["DB"],
});
const binding = (await mf.getD1Database("DB")) as D1Database;
test("connection", async () => {
const conn = d1Sqlite({ binding });
expect(conn.supports("batching")).toBe(true);
expect(conn.supports("softscans")).toBe(false);
});
test("query details", async () => {
const conn = d1Sqlite({ binding });
const res = await conn.executeQuery(sql`select 1`.compile(conn.kysely));
expect(res.rows).toEqual([{ "1": 1 }]);
expect(res.numAffectedRows).toBe(undefined);
expect(res.insertId).toBe(undefined);
// @ts-expect-error
expect(res.meta.changed_db).toBe(false);
// @ts-expect-error
expect(res.meta.rows_read).toBe(0);
const batchResult = await conn.executeQueries(
sql`select 1`.compile(conn.kysely),
sql`select 2`.compile(conn.kysely),
);
// rewrite to get index
for (const [index, result] of batchResult.entries()) {
expect(result.rows).toEqual([{ [String(index + 1)]: index + 1 }]);
expect(result.numAffectedRows).toBe(undefined);
expect(result.insertId).toBe(undefined);
// @ts-expect-error
expect(result.meta.changed_db).toBe(false);
}
});
connectionTestSuite(viTestRunner, {
makeConnection: () => d1Sqlite({ binding }),
rawDialectDetails: [],
makeConnection: async () => {
const mf = new Miniflare({
modules: true,
script: "export default { async fetch() { return new Response(null); } }",
d1Databases: ["DB"],
});
const binding = (await mf.getD1Database("DB")) as D1Database;
return {
connection: d1Sqlite({ binding }),
dispose: () => mf.dispose(),
};
},
rawDialectDetails: [
"meta.served_by",
"meta.duration",
"meta.changes",
"meta.changed_db",
"meta.size_after",
"meta.rows_read",
"meta.rows_written",
],
});
});

View File

@@ -1,138 +0,0 @@
import {
SqliteAdapter,
SqliteIntrospector,
SqliteQueryCompiler,
type CompiledQuery,
type DatabaseConnection,
type DatabaseIntrospector,
type Dialect,
type Driver,
type Kysely,
type QueryCompiler,
type QueryResult,
} from "kysely";
/**
* Config for the D1 dialect. Pass your D1 instance to this object that you bound in `wrangler.toml`.
*/
export interface D1DialectConfig {
database: D1Database;
}
/**
* D1 dialect that adds support for [Cloudflare D1][0] in [Kysely][1].
* The constructor takes the instance of your D1 database that you bound in `wrangler.toml`.
*
* ```typescript
* new D1Dialect({
* database: env.DB,
* })
* ```
*
* [0]: https://blog.cloudflare.com/introducing-d1/
* [1]: https://github.com/koskimas/kysely
*/
export class D1Dialect implements Dialect {
#config: D1DialectConfig;
constructor(config: D1DialectConfig) {
this.#config = config;
}
createAdapter() {
return new SqliteAdapter();
}
createDriver(): Driver {
return new D1Driver(this.#config);
}
createQueryCompiler(): QueryCompiler {
return new SqliteQueryCompiler();
}
createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db);
}
}
class D1Driver implements Driver {
#config: D1DialectConfig;
constructor(config: D1DialectConfig) {
this.#config = config;
}
async init(): Promise<void> {}
async acquireConnection(): Promise<DatabaseConnection> {
return new D1Connection(this.#config);
}
async beginTransaction(conn: D1Connection): Promise<void> {
return await conn.beginTransaction();
}
async commitTransaction(conn: D1Connection): Promise<void> {
return await conn.commitTransaction();
}
async rollbackTransaction(conn: D1Connection): Promise<void> {
return await conn.rollbackTransaction();
}
async releaseConnection(_conn: D1Connection): Promise<void> {}
async destroy(): Promise<void> {}
}
class D1Connection implements DatabaseConnection {
#config: D1DialectConfig;
constructor(config: D1DialectConfig) {
this.#config = config;
}
async executeQuery<O>(compiledQuery: CompiledQuery): Promise<QueryResult<O>> {
const results = await this.#config.database
.prepare(compiledQuery.sql)
.bind(...compiledQuery.parameters)
.all();
if (results.error) {
throw new Error(results.error);
}
const numAffectedRows = results.meta.changes > 0 ? BigInt(results.meta.changes) : undefined;
return {
insertId:
results.meta.last_row_id === undefined || results.meta.last_row_id === null
? undefined
: BigInt(results.meta.last_row_id),
rows: (results?.results as O[]) || [],
numAffectedRows,
// @ts-ignore deprecated in kysely >= 0.23, keep for backward compatibility.
numUpdatedOrDeletedRows: numAffectedRows,
};
}
async beginTransaction() {
throw new Error("Transactions are not supported yet.");
}
async commitTransaction() {
throw new Error("Transactions are not supported yet.");
}
async rollbackTransaction() {
throw new Error("Transactions are not supported yet.");
}
// biome-ignore lint/correctness/useYield: <explanation>
async *streamQuery<O>(
_compiledQuery: CompiledQuery,
_chunkSize: number,
): AsyncIterableIterator<QueryResult<O>> {
throw new Error("D1 Driver does not support streaming");
}
}

View File

@@ -0,0 +1,83 @@
/// <reference types="@cloudflare/workers-types" />
import {
genericSqlite,
type GenericSqliteConnection,
} from "data/connection/sqlite/GenericSqliteConnection";
import type { QueryResult } from "kysely";
export type D1SqliteConnection = GenericSqliteConnection<D1Database>;
export type DurableObjecSql = DurableObjectState["storage"]["sql"];
export type D1ConnectionConfig<DB extends DurableObjecSql> =
| DurableObjectState
| {
sql: DB;
};
export function doSqlite<DB extends DurableObjecSql>(config: D1ConnectionConfig<DB>) {
const db = "sql" in config ? config.sql : config.storage.sql;
return genericSqlite(
"do-sqlite",
db,
(utils) => {
// must be async to work with the miniflare mock
const getStmt = async (sql: string, parameters?: any[] | readonly any[]) =>
await db.exec(sql, ...(parameters || []));
const mapResult = (
cursor: SqlStorageCursor<Record<string, SqlStorageValue>>,
): QueryResult<any> => {
const numAffectedRows =
cursor.rowsWritten > 0 ? utils.parseBigInt(cursor.rowsWritten) : undefined;
const insertId = undefined;
const obj = {
insertId,
numAffectedRows,
rows: cursor.toArray() || [],
// @ts-ignore
meta: {
rowsWritten: cursor.rowsWritten,
rowsRead: cursor.rowsRead,
databaseSize: db.databaseSize,
},
};
//console.info("mapResult", obj);
return obj;
};
return {
db,
batch: async (stmts) => {
// @todo: maybe wrap in a transaction?
// because d1 implicitly does a transaction on batch
return Promise.all(
stmts.map(async (stmt) => {
return mapResult(await getStmt(stmt.sql, stmt.parameters));
}),
);
},
query: utils.buildQueryFn({
all: async (sql, parameters) => {
const prep = getStmt(sql, parameters);
return mapResult(await prep).rows;
},
run: async (sql, parameters) => {
const prep = getStmt(sql, parameters);
return mapResult(await prep);
},
}),
close: () => {},
};
},
{
supports: {
batching: true,
softscans: false,
},
excludeTables: ["_cf_KV", "_cf_METADATA"],
},
);
}

View File

@@ -0,0 +1,92 @@
/// <reference types="@cloudflare/workers-types" />
import { describe, test, expect } from "vitest";
import { viTestRunner } from "adapter/node/vitest";
import { connectionTestSuite } from "data/connection/connection-test-suite";
import { Miniflare } from "miniflare";
import { doSqlite } from "./DoConnection";
const script = `
import { DurableObject } from "cloudflare:workers";
export class TestObject extends DurableObject {
constructor(ctx, env) {
super(ctx, env);
this.storage = ctx.storage;
}
async exec(sql, ...parameters) {
//return { sql, parameters }
const cursor = this.storage.sql.exec(sql, ...parameters);
return {
rows: cursor.toArray() || [],
rowsWritten: cursor.rowsWritten,
rowsRead: cursor.rowsRead,
databaseSize: this.storage.sql.databaseSize,
}
}
async databaseSize() {
return this.storage.sql.databaseSize;
}
}
export default {
async fetch(request, env) {
const stub = env.TEST_OBJECT.get(env.TEST_OBJECT.idFromName("test"));
return stub.fetch(request);
}
}
`;
describe("doSqlite", async () => {
connectionTestSuite(viTestRunner, {
makeConnection: async () => {
const mf = new Miniflare({
modules: true,
durableObjects: { TEST_OBJECT: { className: "TestObject", useSQLite: true } },
script,
});
const ns = await mf.getDurableObjectNamespace("TEST_OBJECT");
const id = ns.idFromName("test");
const stub = ns.get(id) as unknown as DurableObjectStub<
Rpc.DurableObjectBranded & {
exec: (sql: string, ...parameters: any[]) => Promise<any>;
}
>;
const stubs: any[] = [];
const mock = {
databaseSize: 0,
exec: async function (sql: string, ...parameters: any[]) {
// @ts-ignore
const result = (await stub.exec(sql, ...parameters)) as any;
this.databaseSize = result.databaseSize;
stubs.push(result);
return {
toArray: () => result.rows,
rowsWritten: result.rowsWritten,
rowsRead: result.rowsRead,
};
},
};
return {
connection: doSqlite({ sql: mock as any }),
dispose: async () => {
await Promise.all(
stubs.map((stub) => {
try {
return stub[Symbol.dispose]();
} catch (e) {}
}),
);
await mf.dispose();
},
};
},
rawDialectDetails: ["meta.rowsWritten", "meta.rowsRead", "meta.databaseSize"],
});
});