move postgres to a separate package

This commit is contained in:
dswbx
2025-03-14 09:42:51 +01:00
parent b1b2e97a5a
commit c4aaa5a90b
20 changed files with 381 additions and 80 deletions

View File

@@ -0,0 +1,106 @@
import { Connection, type FieldSpec, type SchemaResponse } from "bknd/data";
import {
type ColumnDataType,
type ColumnDefinitionBuilder,
type DatabaseIntrospector,
Kysely,
ParseJSONResultsPlugin,
PostgresDialect,
type SelectQueryBuilder,
} from "kysely";
import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/postgres";
import pg from "pg";
import { PostgresIntrospector } from "./PostgresIntrospector";
export type PostgresConnectionConfig = pg.PoolConfig;
export type QB = SelectQueryBuilder<any, any, any>;
const plugins = [new ParseJSONResultsPlugin()];
class CustomPostgresDialect extends PostgresDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new PostgresIntrospector(db, {
excludeTables: [],
});
}
}
export class PostgresConnection extends Connection {
protected override readonly supported = {
batching: true,
};
private pool: pg.Pool;
constructor(config: PostgresConnectionConfig) {
const pool = new pg.Pool(config);
const kysely = new Kysely({
dialect: new CustomPostgresDialect({
pool,
}),
plugins,
//log: ["query", "error"],
});
super(
kysely,
{
jsonArrayFrom,
jsonBuildObject,
jsonObjectFrom,
},
plugins,
);
this.pool = pool;
}
override getFieldSchema(spec: FieldSpec): SchemaResponse {
this.validateFieldSpecType(spec.type);
let type: ColumnDataType = spec.primary ? "serial" : spec.type;
switch (spec.type) {
case "blob":
type = "bytea";
break;
case "date":
case "datetime":
// https://www.postgresql.org/docs/17/datatype-datetime.html
type = "timestamp";
break;
case "text":
// https://www.postgresql.org/docs/17/datatype-character.html
type = "varchar";
break;
}
return [
spec.name,
type,
(col: ColumnDefinitionBuilder) => {
if (spec.primary) {
return col.primaryKey();
}
if (spec.references) {
return col
.references(spec.references)
.onDelete(spec.onDelete ?? "set null")
.onUpdate(spec.onUpdate ?? "no action");
}
return spec.nullable ? col : col.notNull();
},
];
}
override async close(): Promise<void> {
await this.pool.end();
}
protected override async batch<Queries extends QB[]>(
queries: [...Queries],
): Promise<{
[K in keyof Queries]: Awaited<ReturnType<Queries[K]["execute"]>>;
}> {
return this.kysely.transaction().execute(async (trx) => {
return Promise.all(queries.map((q) => trx.executeQuery(q).then((r) => r.rows)));
}) as any;
}
}

View File

@@ -0,0 +1,127 @@
import { type SchemaMetadata, sql } from "kysely";
import { BaseIntrospector } from "bknd/data";
type PostgresSchemaSpec = {
name: string;
type: "VIEW" | "BASE TABLE";
columns: {
name: string;
type: string;
notnull: number;
dflt: string;
pk: boolean;
}[];
indices: {
name: string;
origin: string;
partial: number;
sql: string;
columns: { name: string; seqno: number }[];
}[];
};
export class PostgresIntrospector extends BaseIntrospector {
async getSchemas(): Promise<SchemaMetadata[]> {
const rawSchemas = await this.db
.selectFrom("pg_catalog.pg_namespace")
.select("nspname")
.$castTo<{ nspname: string }>()
.execute();
return rawSchemas.map((it) => ({ name: it.nspname }));
}
async getSchemaSpec() {
const query = sql`
WITH tables_and_views AS (
SELECT table_name AS name,
table_type AS type
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_type IN ('BASE TABLE', 'VIEW')
AND table_name NOT LIKE 'pg_%'
AND table_name NOT IN (${this.getExcludedTableNames().join(", ")})
),
columns_info AS (
SELECT table_name AS name,
json_agg(json_build_object(
'name', column_name,
'type', data_type,
'notnull', (CASE WHEN is_nullable = 'NO' THEN true ELSE false END),
'dflt', column_default,
'pk', (SELECT COUNT(*) > 0
FROM information_schema.table_constraints tc
INNER JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
WHERE tc.table_name = c.table_name
AND tc.constraint_type = 'PRIMARY KEY'
AND kcu.column_name = c.column_name)
)) AS columns
FROM information_schema.columns c
WHERE table_schema = 'public'
GROUP BY table_name
),
indices_info AS (
SELECT
t.relname AS table_name,
json_agg(json_build_object(
'name', i.relname,
'origin', pg_get_indexdef(i.oid),
'partial', (CASE WHEN ix.indisvalid THEN false ELSE true END),
'sql', pg_get_indexdef(i.oid),
'columns', (
SELECT json_agg(json_build_object(
'name', a.attname,
'seqno', x.ordinal_position
))
FROM unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinal_position)
JOIN pg_attribute a ON a.attnum = x.attnum AND a.attrelid = t.oid
))) AS indices
FROM pg_class t
LEFT JOIN pg_index ix ON t.oid = ix.indrelid
LEFT JOIN pg_class i ON i.oid = ix.indexrelid
WHERE t.relkind IN ('r', 'v') -- r = table, v = view
AND t.relname NOT LIKE 'pg_%'
GROUP BY t.relname
)
SELECT
tv.name,
tv.type,
ci.columns,
ii.indices
FROM tables_and_views tv
LEFT JOIN columns_info ci ON tv.name = ci.name
LEFT JOIN indices_info ii ON tv.name = ii.table_name;
`;
const tables = await this.executeWithPlugins<PostgresSchemaSpec[]>(query);
return tables.map((table) => ({
name: table.name,
isView: table.type === "VIEW",
columns: table.columns.map((col) => {
return {
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
// @todo: check default value on 'nextval' see https://www.postgresql.org/docs/17/datatype-numeric.html#DATATYPE-SERIAL
isAutoIncrementing: true, // just for now
hasDefaultValue: col.dflt != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
})),
}));
}
}

View File

@@ -0,0 +1,2 @@
export { PostgresConnection, type PostgresConnectionConfig } from "./PostgresConnection";
export { PostgresIntrospector } from "./PostgresIntrospector";