mirror of
https://github.com/shishantbiswas/bknd.git
synced 2026-03-16 04:27:21 +00:00
move postgres to a separate package
This commit is contained in:
50
packages/postgres/README.md
Normal file
50
packages/postgres/README.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# Postgres adapter for `bknd` (experimental)
|
||||
This packages adds an adapter to use a Postgres database with `bknd`. It is based on `pg` and the driver included in `kysely`.
|
||||
|
||||
## Installation
|
||||
Install the adapter with:
|
||||
```bash
|
||||
npm install @bknd/postgres
|
||||
```
|
||||
|
||||
## Usage
|
||||
Create a connection:
|
||||
|
||||
```ts
|
||||
import { PostgresConnection } from "@bknd/postgres";
|
||||
|
||||
const connection = new PostgresConnection({
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
user: "postgres",
|
||||
password: "postgres",
|
||||
database: "bknd",
|
||||
});
|
||||
```
|
||||
|
||||
Use the connection depending on which framework or runtime you are using. E.g., when using `createApp`, you can use the connection as follows:
|
||||
|
||||
```ts
|
||||
import { createApp } from "bknd";
|
||||
import { PostgresConnection } from "@bknd/postgres";
|
||||
|
||||
const connection = new PostgresConnection();
|
||||
const app = createApp({ connection });
|
||||
```
|
||||
|
||||
Or if you're using it with a framework, say Next.js, you can add the connection object to where you're initializating the app:
|
||||
|
||||
```ts
|
||||
// e.g. in src/app/api/[[...bknd]]/route.ts
|
||||
import { serve } from "bknd/adapter/nextjs";
|
||||
import { PostgresConnection } from "@bknd/postgres";
|
||||
|
||||
const connection = new PostgresConnection();
|
||||
const handler = serve({
|
||||
connection
|
||||
})
|
||||
|
||||
// ...
|
||||
```
|
||||
|
||||
For more information about how to integrate Next.js in general, check out the [Next.js documentation](https://docs.bknd.io/integration/nextjs).
|
||||
37
packages/postgres/package.json
Normal file
37
packages/postgres/package.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "@bknd/postgres",
|
||||
"version": "0.0.1",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"test": "bun test",
|
||||
"docker:start": "docker run --rm --name bknd-test-postgres -d -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres -e POSTGRES_DB=bknd -p 5430:5432 postgres:17",
|
||||
"docker:stop": "docker stop bknd-test-postgres"
|
||||
},
|
||||
"dependencies": {
|
||||
"pg": "^8.12.0",
|
||||
"kysely": "^0.27.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "^1.2.5",
|
||||
"@types/node": "^22.13.10",
|
||||
"@types/pg": "^8.11.11",
|
||||
"bknd": "workspace:*",
|
||||
"tsup": "^8.4.0",
|
||||
"typescript": "^5.6.3"
|
||||
},
|
||||
"tsup": {
|
||||
"entry": ["src/index.ts"],
|
||||
"format": ["esm"],
|
||||
"target": "es2022",
|
||||
"clean": true,
|
||||
"minify": true,
|
||||
"dts": true,
|
||||
"metafile": true,
|
||||
"external": ["bknd", "pg", "kysely"]
|
||||
},
|
||||
"files": ["dist", "!*.map", "!metafile*.json"]
|
||||
}
|
||||
106
packages/postgres/src/PostgresConnection.ts
Normal file
106
packages/postgres/src/PostgresConnection.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { Connection, type FieldSpec, type SchemaResponse } from "bknd/data";
|
||||
import {
|
||||
type ColumnDataType,
|
||||
type ColumnDefinitionBuilder,
|
||||
type DatabaseIntrospector,
|
||||
Kysely,
|
||||
ParseJSONResultsPlugin,
|
||||
PostgresDialect,
|
||||
type SelectQueryBuilder,
|
||||
} from "kysely";
|
||||
import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/postgres";
|
||||
import pg from "pg";
|
||||
import { PostgresIntrospector } from "./PostgresIntrospector";
|
||||
|
||||
export type PostgresConnectionConfig = pg.PoolConfig;
|
||||
export type QB = SelectQueryBuilder<any, any, any>;
|
||||
|
||||
const plugins = [new ParseJSONResultsPlugin()];
|
||||
|
||||
class CustomPostgresDialect extends PostgresDialect {
|
||||
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
|
||||
return new PostgresIntrospector(db, {
|
||||
excludeTables: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class PostgresConnection extends Connection {
|
||||
protected override readonly supported = {
|
||||
batching: true,
|
||||
};
|
||||
private pool: pg.Pool;
|
||||
|
||||
constructor(config: PostgresConnectionConfig) {
|
||||
const pool = new pg.Pool(config);
|
||||
const kysely = new Kysely({
|
||||
dialect: new CustomPostgresDialect({
|
||||
pool,
|
||||
}),
|
||||
plugins,
|
||||
//log: ["query", "error"],
|
||||
});
|
||||
|
||||
super(
|
||||
kysely,
|
||||
{
|
||||
jsonArrayFrom,
|
||||
jsonBuildObject,
|
||||
jsonObjectFrom,
|
||||
},
|
||||
plugins,
|
||||
);
|
||||
this.pool = pool;
|
||||
}
|
||||
|
||||
override getFieldSchema(spec: FieldSpec): SchemaResponse {
|
||||
this.validateFieldSpecType(spec.type);
|
||||
let type: ColumnDataType = spec.primary ? "serial" : spec.type;
|
||||
|
||||
switch (spec.type) {
|
||||
case "blob":
|
||||
type = "bytea";
|
||||
break;
|
||||
case "date":
|
||||
case "datetime":
|
||||
// https://www.postgresql.org/docs/17/datatype-datetime.html
|
||||
type = "timestamp";
|
||||
break;
|
||||
case "text":
|
||||
// https://www.postgresql.org/docs/17/datatype-character.html
|
||||
type = "varchar";
|
||||
break;
|
||||
}
|
||||
|
||||
return [
|
||||
spec.name,
|
||||
type,
|
||||
(col: ColumnDefinitionBuilder) => {
|
||||
if (spec.primary) {
|
||||
return col.primaryKey();
|
||||
}
|
||||
if (spec.references) {
|
||||
return col
|
||||
.references(spec.references)
|
||||
.onDelete(spec.onDelete ?? "set null")
|
||||
.onUpdate(spec.onUpdate ?? "no action");
|
||||
}
|
||||
return spec.nullable ? col : col.notNull();
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
override async close(): Promise<void> {
|
||||
await this.pool.end();
|
||||
}
|
||||
|
||||
protected override async batch<Queries extends QB[]>(
|
||||
queries: [...Queries],
|
||||
): Promise<{
|
||||
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
|
||||
}> {
|
||||
return this.kysely.transaction().execute(async (trx) => {
|
||||
return Promise.all(queries.map((q) => trx.executeQuery(q).then((r) => r.rows)));
|
||||
}) as any;
|
||||
}
|
||||
}
|
||||
127
packages/postgres/src/PostgresIntrospector.ts
Normal file
127
packages/postgres/src/PostgresIntrospector.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { type SchemaMetadata, sql } from "kysely";
|
||||
import { BaseIntrospector } from "bknd/data";
|
||||
|
||||
type PostgresSchemaSpec = {
|
||||
name: string;
|
||||
type: "VIEW" | "BASE TABLE";
|
||||
columns: {
|
||||
name: string;
|
||||
type: string;
|
||||
notnull: number;
|
||||
dflt: string;
|
||||
pk: boolean;
|
||||
}[];
|
||||
indices: {
|
||||
name: string;
|
||||
origin: string;
|
||||
partial: number;
|
||||
sql: string;
|
||||
columns: { name: string; seqno: number }[];
|
||||
}[];
|
||||
};
|
||||
|
||||
export class PostgresIntrospector extends BaseIntrospector {
|
||||
async getSchemas(): Promise<SchemaMetadata[]> {
|
||||
const rawSchemas = await this.db
|
||||
.selectFrom("pg_catalog.pg_namespace")
|
||||
.select("nspname")
|
||||
.$castTo<{ nspname: string }>()
|
||||
.execute();
|
||||
|
||||
return rawSchemas.map((it) => ({ name: it.nspname }));
|
||||
}
|
||||
|
||||
async getSchemaSpec() {
|
||||
const query = sql`
|
||||
WITH tables_and_views AS (
|
||||
SELECT table_name AS name,
|
||||
table_type AS type
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_type IN ('BASE TABLE', 'VIEW')
|
||||
AND table_name NOT LIKE 'pg_%'
|
||||
AND table_name NOT IN (${this.getExcludedTableNames().join(", ")})
|
||||
),
|
||||
|
||||
columns_info AS (
|
||||
SELECT table_name AS name,
|
||||
json_agg(json_build_object(
|
||||
'name', column_name,
|
||||
'type', data_type,
|
||||
'notnull', (CASE WHEN is_nullable = 'NO' THEN true ELSE false END),
|
||||
'dflt', column_default,
|
||||
'pk', (SELECT COUNT(*) > 0
|
||||
FROM information_schema.table_constraints tc
|
||||
INNER JOIN information_schema.key_column_usage kcu
|
||||
ON tc.constraint_name = kcu.constraint_name
|
||||
WHERE tc.table_name = c.table_name
|
||||
AND tc.constraint_type = 'PRIMARY KEY'
|
||||
AND kcu.column_name = c.column_name)
|
||||
)) AS columns
|
||||
FROM information_schema.columns c
|
||||
WHERE table_schema = 'public'
|
||||
GROUP BY table_name
|
||||
),
|
||||
|
||||
indices_info AS (
|
||||
SELECT
|
||||
t.relname AS table_name,
|
||||
json_agg(json_build_object(
|
||||
'name', i.relname,
|
||||
'origin', pg_get_indexdef(i.oid),
|
||||
'partial', (CASE WHEN ix.indisvalid THEN false ELSE true END),
|
||||
'sql', pg_get_indexdef(i.oid),
|
||||
'columns', (
|
||||
SELECT json_agg(json_build_object(
|
||||
'name', a.attname,
|
||||
'seqno', x.ordinal_position
|
||||
))
|
||||
FROM unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinal_position)
|
||||
JOIN pg_attribute a ON a.attnum = x.attnum AND a.attrelid = t.oid
|
||||
))) AS indices
|
||||
FROM pg_class t
|
||||
LEFT JOIN pg_index ix ON t.oid = ix.indrelid
|
||||
LEFT JOIN pg_class i ON i.oid = ix.indexrelid
|
||||
WHERE t.relkind IN ('r', 'v') -- r = table, v = view
|
||||
AND t.relname NOT LIKE 'pg_%'
|
||||
GROUP BY t.relname
|
||||
)
|
||||
|
||||
SELECT
|
||||
tv.name,
|
||||
tv.type,
|
||||
ci.columns,
|
||||
ii.indices
|
||||
FROM tables_and_views tv
|
||||
LEFT JOIN columns_info ci ON tv.name = ci.name
|
||||
LEFT JOIN indices_info ii ON tv.name = ii.table_name;
|
||||
`;
|
||||
|
||||
const tables = await this.executeWithPlugins<PostgresSchemaSpec[]>(query);
|
||||
|
||||
return tables.map((table) => ({
|
||||
name: table.name,
|
||||
isView: table.type === "VIEW",
|
||||
columns: table.columns.map((col) => {
|
||||
return {
|
||||
name: col.name,
|
||||
dataType: col.type,
|
||||
isNullable: !col.notnull,
|
||||
// @todo: check default value on 'nextval' see https://www.postgresql.org/docs/17/datatype-numeric.html#DATATYPE-SERIAL
|
||||
isAutoIncrementing: true, // just for now
|
||||
hasDefaultValue: col.dflt != null,
|
||||
comment: undefined,
|
||||
};
|
||||
}),
|
||||
indices: table.indices.map((index) => ({
|
||||
name: index.name,
|
||||
table: table.name,
|
||||
isUnique: index.sql?.match(/unique/i) != null,
|
||||
columns: index.columns.map((col) => ({
|
||||
name: col.name,
|
||||
order: col.seqno,
|
||||
})),
|
||||
})),
|
||||
}));
|
||||
}
|
||||
}
|
||||
2
packages/postgres/src/index.ts
Normal file
2
packages/postgres/src/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { PostgresConnection, type PostgresConnectionConfig } from "./PostgresConnection";
|
||||
export { PostgresIntrospector } from "./PostgresIntrospector";
|
||||
19
packages/postgres/test/base.test.ts
Normal file
19
packages/postgres/test/base.test.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { describe, it, expect } from "bun:test";
|
||||
|
||||
import { PostgresConnection } from "../src";
|
||||
import { createConnection, cleanDatabase } from "./setup";
|
||||
|
||||
describe(PostgresConnection, () => {
|
||||
it("should connect to the database", async () => {
|
||||
const connection = createConnection();
|
||||
expect(await connection.ping()).toBe(true);
|
||||
});
|
||||
|
||||
it("should clean the database", async () => {
|
||||
const connection = createConnection();
|
||||
await cleanDatabase(connection);
|
||||
|
||||
const tables = await connection.getIntrospector().getTables();
|
||||
expect(tables).toEqual([]);
|
||||
});
|
||||
});
|
||||
113
packages/postgres/test/integration.test.ts
Normal file
113
packages/postgres/test/integration.test.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, afterEach } from "bun:test";
|
||||
|
||||
import { createApp } from "bknd";
|
||||
import * as proto from "bknd/data";
|
||||
|
||||
import { createConnection, cleanDatabase } from "./setup";
|
||||
import type { PostgresConnection } from "../src";
|
||||
|
||||
let connection: PostgresConnection;
|
||||
beforeAll(async () => {
|
||||
connection = createConnection();
|
||||
await cleanDatabase(connection);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanDatabase(connection);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await connection.close();
|
||||
});
|
||||
|
||||
describe("integration", () => {
|
||||
it("should create app and ping", async () => {
|
||||
const app = createApp({
|
||||
connection,
|
||||
});
|
||||
await app.build();
|
||||
|
||||
expect(app.version()).toBeDefined();
|
||||
expect(await app.em.ping()).toBe(true);
|
||||
});
|
||||
|
||||
it("should create a basic schema", async () => {
|
||||
const schema = proto.em(
|
||||
{
|
||||
posts: proto.entity("posts", {
|
||||
title: proto.text().required(),
|
||||
content: proto.text(),
|
||||
}),
|
||||
comments: proto.entity("comments", {
|
||||
content: proto.text(),
|
||||
}),
|
||||
},
|
||||
(fns, s) => {
|
||||
fns.relation(s.comments).manyToOne(s.posts);
|
||||
fns.index(s.posts).on(["title"], true);
|
||||
},
|
||||
);
|
||||
|
||||
const app = createApp({
|
||||
connection,
|
||||
initialConfig: {
|
||||
data: schema.toJSON(),
|
||||
},
|
||||
});
|
||||
|
||||
await app.build();
|
||||
|
||||
expect(app.em.entities.length).toBe(2);
|
||||
expect(app.em.entities.map((e) => e.name)).toEqual(["posts", "comments"]);
|
||||
|
||||
const api = app.getApi();
|
||||
|
||||
expect(
|
||||
(
|
||||
await api.data.createMany("posts", [
|
||||
{
|
||||
title: "Hello",
|
||||
content: "World",
|
||||
},
|
||||
{
|
||||
title: "Hello 2",
|
||||
content: "World 2",
|
||||
},
|
||||
])
|
||||
).data,
|
||||
).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
title: "Hello",
|
||||
content: "World",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
title: "Hello 2",
|
||||
content: "World 2",
|
||||
},
|
||||
] as any);
|
||||
|
||||
// try to create an existing
|
||||
expect(
|
||||
(
|
||||
await api.data.createOne("posts", {
|
||||
title: "Hello",
|
||||
})
|
||||
).ok,
|
||||
).toBe(false);
|
||||
|
||||
// add a comment to a post
|
||||
await api.data.createOne("comments", {
|
||||
content: "Hello",
|
||||
posts_id: 1,
|
||||
});
|
||||
|
||||
// and then query using a `with` property
|
||||
const result = await api.data.readMany("posts", { with: ["comments"] });
|
||||
expect(result.length).toBe(2);
|
||||
expect(result[0].comments.length).toBe(1);
|
||||
expect(result[0].comments[0].content).toBe("Hello");
|
||||
expect(result[1].comments.length).toBe(0);
|
||||
});
|
||||
});
|
||||
25
packages/postgres/test/setup.ts
Normal file
25
packages/postgres/test/setup.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { Kysely } from "kysely";
|
||||
import { PostgresConnection, PostgresIntrospector, type PostgresConnectionConfig } from "../src";
|
||||
|
||||
export const info = {
|
||||
host: "localhost",
|
||||
port: 5430,
|
||||
user: "postgres",
|
||||
password: "postgres",
|
||||
database: "bknd",
|
||||
};
|
||||
|
||||
export function createConnection(config: PostgresConnectionConfig = {}) {
|
||||
return new PostgresConnection({
|
||||
...info,
|
||||
...config,
|
||||
});
|
||||
}
|
||||
|
||||
export async function cleanDatabase(connection: PostgresConnection) {
|
||||
const kysely = connection.kysely;
|
||||
|
||||
// drop all tables & create new schema
|
||||
await kysely.schema.dropSchema("public").ifExists().cascade().execute();
|
||||
await kysely.schema.createSchema("public").execute();
|
||||
}
|
||||
29
packages/postgres/tsconfig.json
Normal file
29
packages/postgres/tsconfig.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"composite": false,
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "bundler",
|
||||
"allowImportingTsExtensions": false,
|
||||
"target": "ES2022",
|
||||
"noImplicitAny": false,
|
||||
"allowJs": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"declaration": true,
|
||||
"strict": true,
|
||||
"allowUnusedLabels": false,
|
||||
"allowUnreachableCode": false,
|
||||
"exactOptionalPropertyTypes": false,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noPropertyAccessFromIndexSignature": false,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": ["./src/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
Reference in New Issue
Block a user