initialized postgres support

This commit is contained in:
dswbx
2025-03-07 15:02:19 +01:00
parent 8550aef606
commit a5c422d45d
30 changed files with 759 additions and 220 deletions

View File

@@ -1,9 +1,12 @@
import {
type AliasableExpression,
type ColumnBuilderCallback,
type ColumnDataType,
type DatabaseIntrospector,
type Expression,
type Kysely,
type KyselyPlugin,
type OnModifyForeignAction,
type RawBuilder,
type SelectQueryBuilder,
type SelectQueryNode,
@@ -29,6 +32,38 @@ export interface SelectQueryBuilderExpression<O> extends AliasableExpression<O>
toOperationNode(): SelectQueryNode;
}
export type SchemaResponse = [string, ColumnDataType, ColumnBuilderCallback] | undefined;
const FieldSpecTypes = [
"text",
"integer",
"real",
"blob",
"date",
"datetime",
"timestamp",
"boolean",
"json",
] as const;
export type FieldSpec = {
type: (typeof FieldSpecTypes)[number];
name: string;
nullable?: boolean;
dflt?: any;
unique?: boolean;
primary?: boolean;
references?: string;
onDelete?: OnModifyForeignAction;
onUpdate?: OnModifyForeignAction;
};
export type IndexSpec = {
name: string;
columns: string[];
unique?: boolean;
};
export type DbFunctions = {
jsonObjectFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O> | null>;
jsonArrayFrom<O>(expr: SelectQueryBuilderExpression<O>): RawBuilder<Simplify<O>[]>;
@@ -108,4 +143,15 @@ export abstract class Connection<DB = any> {
return await this.batch(queries);
}
protected validateFieldSpecType(type: string): type is FieldSpec["type"] {
if (!FieldSpecTypes.includes(type as any)) {
throw new Error(
`Invalid field type "${type}". Allowed types are: ${FieldSpecTypes.join(", ")}`,
);
}
return true;
}
abstract getFieldSchema(spec: FieldSpec, strict?: boolean): SchemaResponse;
}

View File

@@ -12,10 +12,13 @@ export type LibSqlCredentials = Config & {
protocol?: (typeof LIBSQL_PROTOCOLS)[number];
};
const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()];
class CustomLibsqlDialect extends LibsqlDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db, {
excludeTables: ["libsql_wasm_func_table"],
plugins,
});
}
}
@@ -26,7 +29,6 @@ export class LibsqlConnection extends SqliteConnection {
constructor(client: Client);
constructor(credentials: LibSqlCredentials);
constructor(clientOrCredentials: Client | LibSqlCredentials) {
const plugins = [new FilterNumericKeysPlugin(), new ParseJSONResultsPlugin()];
let client: Client;
if (clientOrCredentials && "url" in clientOrCredentials) {
let { url, authToken, protocol } = clientOrCredentials;

View File

@@ -1,6 +1,6 @@
import type { Kysely, KyselyPlugin } from "kysely";
import type { ColumnDataType, ColumnDefinitionBuilder, Kysely, KyselyPlugin } from "kysely";
import { jsonArrayFrom, jsonBuildObject, jsonObjectFrom } from "kysely/helpers/sqlite";
import { Connection, type DbFunctions } from "./Connection";
import { Connection, type DbFunctions, type FieldSpec, type SchemaResponse } from "./Connection";
export class SqliteConnection extends Connection {
constructor(kysely: Kysely<any>, fn: Partial<DbFunctions> = {}, plugins: KyselyPlugin[] = []) {
@@ -19,4 +19,32 @@ export class SqliteConnection extends Connection {
override supportsIndices(): boolean {
return true;
}
override getFieldSchema(spec: FieldSpec): SchemaResponse {
this.validateFieldSpecType(spec.type);
let type: ColumnDataType = spec.type;
switch (spec.type) {
case "json":
type = "text";
break;
}
return [
spec.name,
type,
(col: ColumnDefinitionBuilder) => {
if (spec.primary) {
return col.primaryKey().notNull().autoIncrement();
}
if (spec.references) {
let relCol = col.references(spec.references);
if (spec.onDelete) relCol = relCol.onDelete(spec.onDelete);
if (spec.onUpdate) relCol = relCol.onUpdate(spec.onUpdate);
return relCol;
}
return spec.nullable ? col : col.notNull();
},
] as const;
}
}

View File

@@ -1,26 +1,31 @@
import type {
DatabaseIntrospector,
DatabaseMetadata,
DatabaseMetadataOptions,
ExpressionBuilder,
Kysely,
SchemaMetadata,
TableMetadata,
import {
type DatabaseIntrospector,
type DatabaseMetadata,
type DatabaseMetadataOptions,
type Kysely,
ParseJSONResultsPlugin,
type SchemaMetadata,
type TableMetadata,
type KyselyPlugin,
} from "kysely";
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, sql } from "kysely";
import type { ConnectionIntrospector, IndexMetadata } from "./Connection";
import { KyselyPluginRunner } from "data";
export type SqliteIntrospectorConfig = {
excludeTables?: string[];
plugins?: KyselyPlugin[];
};
export class SqliteIntrospector implements DatabaseIntrospector, ConnectionIntrospector {
readonly #db: Kysely<any>;
readonly _excludeTables: string[] = [];
readonly _plugins: KyselyPlugin[];
constructor(db: Kysely<any>, config: SqliteIntrospectorConfig = {}) {
this.#db = db;
this._excludeTables = config.excludeTables ?? [];
this._plugins = config.plugins ?? [new ParseJSONResultsPlugin()];
}
async getSchemas(): Promise<SchemaMetadata[]> {
@@ -28,86 +33,96 @@ export class SqliteIntrospector implements DatabaseIntrospector, ConnectionIntro
return [];
}
async getIndices(tbl_name?: string): Promise<IndexMetadata[]> {
const indices = await this.#db
.selectFrom("sqlite_master")
.where("type", "=", "index")
.$if(!!tbl_name, (eb) => eb.where("tbl_name", "=", tbl_name))
.select("name")
.$castTo<{ name: string }>()
.execute();
async getSchema() {
const excluded = [
...this._excludeTables,
DEFAULT_MIGRATION_TABLE,
DEFAULT_MIGRATION_LOCK_TABLE,
];
const query = sql`
SELECT m.name, m.type, m.sql,
(SELECT json_group_array(
json_object(
'name', p.name,
'type', p.type,
'notnull', p."notnull",
'default', p.dflt_value,
'primary_key', p.pk
)) FROM pragma_table_info(m.name) p) AS columns,
(SELECT json_group_array(
json_object(
'name', i.name,
'origin', i.origin,
'partial', i.partial,
'sql', im.sql,
'columns', (SELECT json_group_array(
json_object(
'name', ii.name,
'seqno', ii.seqno
)) FROM pragma_index_info(i.name) ii)
)) FROM pragma_index_list(m.name) i
LEFT JOIN sqlite_master im ON im.name = i.name
AND im.type = 'index'
) AS indices
FROM sqlite_master m
WHERE m.type IN ('table', 'view')
and m.name not like 'sqlite_%'
and m.name not in (${excluded.join(", ")})
`;
return Promise.all(indices.map(({ name }) => this.#getIndexMetadata(name)));
}
const result = await query.execute(this.#db);
const runner = new KyselyPluginRunner(this._plugins ?? []);
const tables = (await runner.transformResultRows(result.rows)) as unknown as {
name: string;
type: string;
sql: string;
columns: {
name: string;
type: string;
notnull: number;
dflt_value: any;
pk: number;
}[];
indices: {
name: string;
origin: string;
partial: number;
sql: string;
columns: { name: string; seqno: number }[];
}[];
}[];
async #getIndexMetadata(index: string): Promise<IndexMetadata> {
const db = this.#db;
//console.log("tables", tables);
return tables.map((table) => ({
name: table.name,
isView: table.type === "view",
columns: table.columns.map((col) => {
const autoIncrementCol = table.sql
?.split(/[\(\),]/)
?.find((it) => it.toLowerCase().includes("autoincrement"))
?.trimStart()
?.split(/\s+/)?.[0]
?.replace(/["`]/g, "");
// Get the SQL that was used to create the index.
const indexDefinition = await db
.selectFrom("sqlite_master")
.where("name", "=", index)
.select(["sql", "tbl_name", "type"])
.$castTo<{ sql: string | undefined; tbl_name: string; type: string }>()
.executeTakeFirstOrThrow();
//console.log("--indexDefinition--", indexDefinition, index);
// check unique by looking for the word "unique" in the sql
const isUnique = indexDefinition.sql?.match(/unique/i) != null;
const columns = await db
.selectFrom(
sql<{
seqno: number;
cid: number;
name: string;
}>`pragma_index_info(${index})`.as("index_info"),
)
.select(["seqno", "cid", "name"])
.orderBy("cid")
.execute();
return {
name: index,
table: indexDefinition.tbl_name,
isUnique: isUnique,
columns: columns.map((col) => ({
name: col.name,
order: col.seqno,
return {
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
})),
};
}
private excludeTables(tables: string[] = []) {
return (eb: ExpressionBuilder<any, any>) => {
const and = tables.map((t) => eb("name", "!=", t));
return eb.and(and);
};
}
async getTables(
options: DatabaseMetadataOptions = { withInternalKyselyTables: false },
): Promise<TableMetadata[]> {
let query = this.#db
.selectFrom("sqlite_master")
.where("type", "in", ["table", "view"])
.where("name", "not like", "sqlite_%")
.select("name")
.orderBy("name")
.$castTo<{ name: string }>();
if (!options.withInternalKyselyTables) {
query = query.where(
this.excludeTables([DEFAULT_MIGRATION_TABLE, DEFAULT_MIGRATION_LOCK_TABLE]),
);
}
if (this._excludeTables.length > 0) {
query = query.where(this.excludeTables(this._excludeTables));
}
const tables = await query.execute();
return Promise.all(tables.map(({ name }) => this.#getTableMetadata(name)));
}));
}
async getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata> {
@@ -116,49 +131,21 @@ export class SqliteIntrospector implements DatabaseIntrospector, ConnectionIntro
};
}
async #getTableMetadata(table: string): Promise<TableMetadata> {
const db = this.#db;
async getIndices(tbl_name?: string): Promise<IndexMetadata[]> {
const schema = await this.getSchema();
return schema
.flatMap((table) => table.indices)
.filter((index) => !tbl_name || index.table === tbl_name);
}
// Get the SQL that was used to create the table.
const tableDefinition = await db
.selectFrom("sqlite_master")
.where("name", "=", table)
.select(["sql", "type"])
.$castTo<{ sql: string | undefined; type: string }>()
.executeTakeFirstOrThrow();
// Try to find the name of the column that has `autoincrement` 🤦
const autoIncrementCol = tableDefinition.sql
?.split(/[\(\),]/)
?.find((it) => it.toLowerCase().includes("autoincrement"))
?.trimStart()
?.split(/\s+/)?.[0]
?.replace(/["`]/g, "");
const columns = await db
.selectFrom(
sql<{
name: string;
type: string;
notnull: 0 | 1;
dflt_value: any;
}>`pragma_table_info(${table})`.as("table_info"),
)
.select(["name", "type", "notnull", "dflt_value"])
.orderBy("cid")
.execute();
return {
name: table,
isView: tableDefinition.type === "view",
columns: columns.map((col) => ({
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
comment: undefined,
})),
};
async getTables(
options: DatabaseMetadataOptions = { withInternalKyselyTables: false },
): Promise<TableMetadata[]> {
const schema = await this.getSchema();
return schema.map((table) => ({
name: table.name,
isView: table.isView,
columns: table.columns,
}));
}
}

View File

@@ -3,25 +3,25 @@ import { Kysely, SqliteDialect } from "kysely";
import { SqliteConnection } from "./SqliteConnection";
import { SqliteIntrospector } from "./SqliteIntrospector";
const plugins = [new ParseJSONResultsPlugin()];
class CustomSqliteDialect extends SqliteDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new SqliteIntrospector(db, {
excludeTables: ["test_table"],
plugins,
});
}
}
export class SqliteLocalConnection extends SqliteConnection {
constructor(private database: SqliteDatabase) {
const plugins = [new ParseJSONResultsPlugin()];
const kysely = new Kysely({
dialect: new CustomSqliteDialect({ database }),
plugins,
//log: ["query"],
});
super(kysely);
this.plugins = plugins;
super(kysely, {}, plugins);
}
override supportsIndices(): boolean {

View File

@@ -0,0 +1,71 @@
import {
Kysely,
PostgresDialect,
type DatabaseIntrospector,
type ColumnDataType,
type ColumnDefinitionBuilder,
ParseJSONResultsPlugin,
} from "kysely";
import pg from "pg";
import { PostgresIntrospector } from "./PostgresIntrospector";
import { type FieldSpec, type SchemaResponse, Connection } from "data/connection/Connection";
export type PostgresConnectionConfig = pg.PoolConfig;
const plugins = [new ParseJSONResultsPlugin()];
class CustomPostgresDialect extends PostgresDialect {
override createIntrospector(db: Kysely<any>): DatabaseIntrospector {
return new PostgresIntrospector(db);
}
}
export class PostgresConnection extends Connection {
constructor(config: PostgresConnectionConfig) {
const kysely = new Kysely({
dialect: new CustomPostgresDialect({
pool: new pg.Pool(config),
}),
plugins,
//log: ["query", "error"],
});
super(kysely, {}, plugins);
}
override supportsIndices(): boolean {
return true;
}
override getFieldSchema(spec: FieldSpec): SchemaResponse {
this.validateFieldSpecType(spec.type);
let type: ColumnDataType = spec.primary ? "serial" : spec.type;
switch (spec.type) {
case "date":
case "datetime":
type = "timestamp";
break;
case "text":
type = "varchar";
break;
}
return [
spec.name,
type,
(col: ColumnDefinitionBuilder) => {
if (spec.primary) {
return col.primaryKey();
}
if (spec.references) {
return col
.references(spec.references)
.onDelete(spec.onDelete ?? "set null")
.onUpdate(spec.onUpdate ?? "no action");
}
return spec.nullable ? col : col.notNull();
},
];
}
}

View File

@@ -0,0 +1,185 @@
import {
type DatabaseIntrospector,
type DatabaseMetadata,
type DatabaseMetadataOptions,
type SchemaMetadata,
type TableMetadata,
type Kysely,
type KyselyPlugin,
ParseJSONResultsPlugin,
} from "kysely";
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, sql } from "kysely";
import { KyselyPluginRunner } from "data";
import type { IndexMetadata } from "data/connection/Connection";
export type PostgresIntrospectorConfig = {
excludeTables?: string[];
plugins?: KyselyPlugin[];
};
export class PostgresIntrospector implements DatabaseIntrospector {
readonly #db: Kysely<any>;
readonly _excludeTables: string[] = [];
readonly _plugins: KyselyPlugin[];
constructor(db: Kysely<any>, config: PostgresIntrospectorConfig = {}) {
this.#db = db;
this._excludeTables = config.excludeTables ?? [];
this._plugins = config.plugins ?? [new ParseJSONResultsPlugin()];
}
async getSchemas(): Promise<SchemaMetadata[]> {
const rawSchemas = await this.#db
.selectFrom("pg_catalog.pg_namespace")
.select("nspname")
.$castTo<RawSchemaMetadata>()
.execute();
return rawSchemas.map((it) => ({ name: it.nspname }));
}
async getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata> {
return {
tables: await this.getTables(options),
};
}
async getSchema() {
const excluded = [
...this._excludeTables,
DEFAULT_MIGRATION_TABLE,
DEFAULT_MIGRATION_LOCK_TABLE,
];
const query = sql`
WITH tables_and_views AS (
SELECT table_name AS name,
table_type AS type
FROM information_schema.tables
WHERE table_schema = 'public'
AND table_type IN ('BASE TABLE', 'VIEW')
AND table_name NOT LIKE 'pg_%'
AND table_name NOT IN (${excluded.join(", ")})
),
columns_info AS (
SELECT table_name AS name,
json_agg(json_build_object(
'name', column_name,
'type', data_type,
'notnull', (CASE WHEN is_nullable = 'NO' THEN true ELSE false END),
'dflt', column_default,
'pk', (SELECT COUNT(*) > 0
FROM information_schema.table_constraints tc
INNER JOIN information_schema.key_column_usage kcu
ON tc.constraint_name = kcu.constraint_name
WHERE tc.table_name = c.table_name
AND tc.constraint_type = 'PRIMARY KEY'
AND kcu.column_name = c.column_name)
)) AS columns
FROM information_schema.columns c
WHERE table_schema = 'public'
GROUP BY table_name
),
indices_info AS (
SELECT
t.relname AS table_name,
json_agg(json_build_object(
'name', i.relname,
'origin', pg_get_indexdef(i.oid),
'partial', (CASE WHEN ix.indisvalid THEN false ELSE true END),
'sql', pg_get_indexdef(i.oid),
'columns', (
SELECT json_agg(json_build_object(
'name', a.attname,
'seqno', x.ordinal_position
))
FROM unnest(ix.indkey) WITH ORDINALITY AS x(attnum, ordinal_position)
JOIN pg_attribute a ON a.attnum = x.attnum AND a.attrelid = t.oid
))) AS indices
FROM pg_class t
LEFT JOIN pg_index ix ON t.oid = ix.indrelid
LEFT JOIN pg_class i ON i.oid = ix.indexrelid
WHERE t.relkind IN ('r', 'v') -- r = table, v = view
AND t.relname NOT LIKE 'pg_%'
GROUP BY t.relname
)
SELECT
tv.name,
tv.type,
ci.columns,
ii.indices
FROM tables_and_views tv
LEFT JOIN columns_info ci ON tv.name = ci.name
LEFT JOIN indices_info ii ON tv.name = ii.table_name;
`;
const result = await query.execute(this.#db);
const runner = new KyselyPluginRunner(this._plugins ?? []);
const tables = (await runner.transformResultRows(result.rows)) as unknown as {
name: string;
type: "VIEW" | "BASE TABLE";
columns: {
name: string;
type: string;
notnull: number;
dflt: string;
pk: boolean;
}[];
indices: {
name: string;
origin: string;
partial: number;
sql: string;
columns: { name: string; seqno: number }[];
}[];
}[];
return tables.map((table) => ({
name: table.name,
isView: table.type === "VIEW",
columns: table.columns.map((col) => {
return {
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: true, // just for now
hasDefaultValue: col.dflt != null,
comment: undefined,
};
}),
indices: table.indices.map((index) => ({
name: index.name,
table: table.name,
isUnique: index.sql?.match(/unique/i) != null,
columns: index.columns.map((col) => ({
name: col.name,
order: col.seqno,
})),
})),
}));
}
async getIndices(tbl_name?: string): Promise<IndexMetadata[]> {
const schema = await this.getSchema();
return schema
.flatMap((table) => table.indices)
.filter((index) => !tbl_name || index.table === tbl_name);
}
async getTables(
options: DatabaseMetadataOptions = { withInternalKyselyTables: false },
): Promise<TableMetadata[]> {
const schema = await this.getSchema();
return schema.map((table) => ({
name: table.name,
isView: table.isView,
columns: table.columns,
}));
}
}
interface RawSchemaMetadata {
nspname: string;
}

View File

@@ -32,9 +32,11 @@ export class BooleanField<Required extends true | false = false> extends Field<
}
}
schema() {
// @todo: potentially use "integer" instead
return this.useSchemaHelper("boolean");
override schema() {
return Object.freeze({
...super.schema()!,
type: "boolean",
});
}
override getHtmlConfig() {

View File

@@ -32,8 +32,10 @@ export class DateField<Required extends true | false = false> extends Field<
}
override schema() {
const type = this.config.type === "datetime" ? "datetime" : "date";
return this.useSchemaHelper(type);
return Object.freeze({
...super.schema()!,
type: this.config.type === "datetime" ? "datetime" : "date",
});
}
override getHtmlConfig() {

View File

@@ -66,10 +66,6 @@ export class EnumField<Required extends true | false = false, TypeOverride = str
return enumFieldConfigSchema;
}
override schema() {
return this.useSchemaHelper("text");
}
getOptions(): { label: string; value: string }[] {
const options = this.config?.options ?? { type: "strings", values: [] };

View File

@@ -1,16 +1,16 @@
import {
parse,
snakeToPascalWithSpaces,
type Static,
StringEnum,
type TSchema,
Type,
TypeInvalidError,
parse,
snakeToPascalWithSpaces,
} from "core/utils";
import type { ColumnBuilderCallback, ColumnDataType, ColumnDefinitionBuilder } from "kysely";
import type { HTMLInputTypeAttribute, InputHTMLAttributes } from "react";
import type { EntityManager } from "../entities";
import { InvalidFieldConfigException, TransformPersistFailedException } from "../errors";
import type { FieldSpec } from "data/connection/Connection";
// @todo: contexts need to be reworked
// e.g. "table" is irrelevant, because if read is not given, it fails
@@ -67,8 +67,6 @@ export const baseFieldConfigSchema = Type.Object(
);
export type BaseFieldConfig = Static<typeof baseFieldConfigSchema>;
export type SchemaResponse = [string, ColumnDataType, ColumnBuilderCallback] | undefined;
export abstract class Field<
Config extends BaseFieldConfig = BaseFieldConfig,
Type = any,
@@ -106,25 +104,18 @@ export abstract class Field<
protected abstract getSchema(): TSchema;
protected useSchemaHelper(
type: ColumnDataType,
builder?: (col: ColumnDefinitionBuilder) => ColumnDefinitionBuilder,
): SchemaResponse {
return [
this.name,
type,
(col: ColumnDefinitionBuilder) => {
if (builder) return builder(col);
return col;
},
];
}
/**
* Used in SchemaManager.ts
* @param em
*/
abstract schema(em: EntityManager<any>): SchemaResponse;
schema(): FieldSpec | undefined {
return Object.freeze({
name: this.name,
type: "text",
nullable: true,
dflt: this.getDefault(),
});
}
hasDefault() {
return this.config.default_value !== undefined;

View File

@@ -18,10 +18,6 @@ export class JsonField<Required extends true | false = false, TypeOverride = obj
return jsonFieldConfigSchema;
}
override schema() {
return this.useSchemaHelper("text");
}
/**
* Transform value after retrieving from database
* @param value

View File

@@ -36,10 +36,6 @@ export class JsonSchemaField<
return jsonSchemaFieldConfigSchema;
}
override schema() {
return this.useSchemaHelper("text");
}
getJsonSchema(): JsonSchema {
return this.config?.schema as JsonSchema;
}

View File

@@ -44,8 +44,11 @@ export class NumberField<Required extends true | false = false> extends Field<
};
}
schema() {
return this.useSchemaHelper("integer");
override schema() {
return Object.freeze({
...super.schema()!,
type: "integer",
});
}
override getValue(value: any, context?: TRenderContext): any {

View File

@@ -30,9 +30,12 @@ export class PrimaryField<Required extends true | false = false> extends Field<
return baseFieldConfigSchema;
}
schema() {
return this.useSchemaHelper("integer", (col) => {
return col.primaryKey().notNull().autoIncrement();
override schema() {
return Object.freeze({
type: "integer",
name: this.name,
primary: true,
nullable: false,
});
}

View File

@@ -47,10 +47,6 @@ export class TextField<Required extends true | false = false> extends Field<
return textFieldConfigSchema;
}
override schema() {
return this.useSchemaHelper("text");
}
override getHtmlConfig() {
if (this.config.html_config) {
return this.config.html_config as any;

View File

@@ -17,7 +17,7 @@ export class VirtualField extends Field<VirtualFieldConfig> {
return virtualFieldConfigSchema;
}
schema() {
override schema() {
return undefined;
}

View File

@@ -1,6 +1,6 @@
import { type Static, StringEnum, Type } from "core/utils";
import type { EntityManager } from "../entities";
import { Field, type SchemaResponse, baseFieldConfigSchema } from "../fields";
import { Field, baseFieldConfigSchema } from "../fields";
import type { EntityRelation } from "./EntityRelation";
import type { EntityRelationAnchor } from "./EntityRelationAnchor";
@@ -72,14 +72,12 @@ export class RelationField extends Field<RelationFieldConfig> {
return this.config.target_field!;
}
override schema(): SchemaResponse {
return this.useSchemaHelper("integer", (col) => {
//col.references('person.id').onDelete('cascade').notNull()
// @todo: implement cascading?
return col
.references(`${this.config.target}.${this.config.target_field}`)
.onDelete(this.config.on_delete ?? "set null");
override schema() {
return Object.freeze({
...super.schema()!,
type: "integer",
references: `${this.config.target}.${this.config.target_field}`,
onDelete: this.config.on_delete ?? "set null",
});
}

View File

@@ -1,7 +1,7 @@
import type { AlterTableColumnAlteringBuilder, CompiledQuery, TableMetadata } from "kysely";
import type { IndexMetadata } from "../connection/Connection";
import type { IndexMetadata, SchemaResponse } from "../connection/Connection";
import type { Entity, EntityManager } from "../entities";
import { PrimaryField, type SchemaResponse } from "../fields";
import { PrimaryField } from "../fields";
type IntrospectedTable = TableMetadata & {
indices: IndexMetadata[];
@@ -239,10 +239,9 @@ export class SchemaManager {
for (const column of columns) {
const field = this.em.entity(table).getField(column)!;
const fieldSchema = field.schema(this.em);
if (Array.isArray(fieldSchema) && fieldSchema.length === 3) {
schemas.push(fieldSchema);
//throw new Error(`Field "${field.name}" on entity "${table}" has no schema`);
const fieldSchema = field.schema();
if (fieldSchema) {
schemas.push(this.em.connection.getFieldSchema(fieldSchema));
}
}