Initial commit - Event Planner application

This commit is contained in:
mberlin
2026-03-18 14:55:56 -03:00
commit 86d779eb4d
7548 changed files with 1006324 additions and 0 deletions

View File

@@ -0,0 +1,115 @@
import {
type EntityProperty,
type IsolationLevel,
RawQueryFragment,
type SimpleColumnMeta,
Type,
} from '@mikro-orm/core';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import type { IndexDef } from '../../typings.js';
import { PostgreSqlNativeQueryBuilder } from './PostgreSqlNativeQueryBuilder.js';
import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper.js';
import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter.js';
export declare class BasePostgreSqlPlatform extends AbstractSqlPlatform {
#private;
protected readonly schemaHelper: PostgreSqlSchemaHelper;
protected readonly exceptionConverter: PostgreSqlExceptionConverter;
createNativeQueryBuilder(): PostgreSqlNativeQueryBuilder;
usesReturningStatement(): boolean;
usesCascadeStatement(): boolean;
supportsNativeEnums(): boolean;
usesEnumCheckConstraints(): boolean;
supportsMaterializedViews(): boolean;
supportsCustomPrimaryKeyNames(): boolean;
getCurrentTimestampSQL(length: number): string;
getDateTimeTypeDeclarationSQL(column: { length?: number }): string;
getDefaultDateTimeLength(): number;
getTimeTypeDeclarationSQL(): string;
getIntegerTypeDeclarationSQL(column: { length?: number; autoincrement?: boolean; generated?: string }): string;
getBigIntTypeDeclarationSQL(column: { autoincrement?: boolean }): string;
getTinyIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
getUuidTypeDeclarationSQL(column: { length?: number }): string;
getFullTextWhereClause(prop: EntityProperty): string;
supportsCreatingFullTextIndex(): boolean;
getFullTextIndexExpression(
indexName: string,
schemaName: string | undefined,
tableName: string,
columns: SimpleColumnMeta[],
): string;
normalizeColumnType(
type: string,
options: {
length?: number;
precision?: number;
scale?: number;
autoincrement?: boolean;
},
): string;
getMappedType(type: string): Type<unknown>;
getRegExpOperator(val?: unknown, flags?: string): string;
getRegExpValue(val: RegExp): {
$re: string;
$flags?: string;
};
isBigIntProperty(prop: EntityProperty): boolean;
getArrayDeclarationSQL(): string;
getFloatDeclarationSQL(): string;
getDoubleDeclarationSQL(): string;
getEnumTypeDeclarationSQL(column: { fieldNames: string[]; items?: unknown[]; nativeEnumName?: string }): string;
supportsMultipleStatements(): boolean;
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
marshallArray(values: string[]): string;
unmarshallArray(value: string): string[];
getVarcharTypeDeclarationSQL(column: { length?: number }): string;
getCharTypeDeclarationSQL(column: { length?: number }): string;
getIntervalTypeDeclarationSQL(column: { length?: number }): string;
getBlobDeclarationSQL(): string;
getJsonDeclarationSQL(): string;
getSearchJsonPropertyKey(
path: string[],
type: string | undefined | Type,
aliased: boolean,
value?: unknown,
): string | RawQueryFragment;
getJsonIndexDefinition(index: IndexDef): string[];
quoteIdentifier(
id:
| string
| {
toString: () => string;
},
quote?: string,
): string;
private pad;
/** @internal */
formatDate(date: Date): string;
indexForeignKeys(): boolean;
getDefaultMappedType(type: string): Type<unknown>;
supportsSchemas(): boolean;
getDefaultSchemaName(): string | undefined;
/**
* Returns the default name of index for the given columns
* cannot go past 63 character length for identifiers in MySQL
*/
getIndexName(
tableName: string,
columns: string[],
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
): string;
getDefaultPrimaryName(tableName: string, columns: string[]): string;
/**
* @inheritDoc
*/
castColumn(prop?: { columnTypes?: string[] }): string;
getJsonArrayFromSQL(
column: string,
alias: string,
_properties: {
name: string;
type: string;
}[],
): string;
getJsonArrayElementPropertySQL(alias: string, property: string, type: string): string;
getDefaultClientUrl(): string;
}

View File

@@ -0,0 +1,363 @@
import { ALIAS_REPLACEMENT, ARRAY_OPERATORS, raw, RawQueryFragment, Type, Utils } from '@mikro-orm/core';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import { PostgreSqlNativeQueryBuilder } from './PostgreSqlNativeQueryBuilder.js';
import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper.js';
import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter.js';
import { FullTextType } from './FullTextType.js';
export class BasePostgreSqlPlatform extends AbstractSqlPlatform {
schemaHelper = new PostgreSqlSchemaHelper(this);
exceptionConverter = new PostgreSqlExceptionConverter();
/** Maps JS runtime type names to PostgreSQL cast types for JSON property access. @internal */
#jsonTypeCasts = { number: 'float8', bigint: 'int8', boolean: 'bool' };
createNativeQueryBuilder() {
return new PostgreSqlNativeQueryBuilder(this);
}
usesReturningStatement() {
return true;
}
usesCascadeStatement() {
return true;
}
supportsNativeEnums() {
return true;
}
usesEnumCheckConstraints() {
return true;
}
supportsMaterializedViews() {
return true;
}
supportsCustomPrimaryKeyNames() {
return true;
}
getCurrentTimestampSQL(length) {
return `current_timestamp(${length})`;
}
getDateTimeTypeDeclarationSQL(column) {
/* v8 ignore next */
return 'timestamptz' + (column.length != null ? `(${column.length})` : '');
}
getDefaultDateTimeLength() {
return 6;
}
getTimeTypeDeclarationSQL() {
return 'time(0)';
}
getIntegerTypeDeclarationSQL(column) {
if (column.autoincrement && !column.generated) {
return 'serial';
}
return 'int';
}
getBigIntTypeDeclarationSQL(column) {
/* v8 ignore next */
if (column.autoincrement) {
return `bigserial`;
}
return 'bigint';
}
getTinyIntTypeDeclarationSQL(column) {
return 'smallint';
}
getUuidTypeDeclarationSQL(column) {
return `uuid`;
}
getFullTextWhereClause(prop) {
if (prop.customType instanceof FullTextType) {
return `:column: @@ plainto_tsquery('${prop.customType.regconfig}', :query)`;
}
/* v8 ignore next */
if (prop.columnTypes[0] === 'tsvector') {
return `:column: @@ plainto_tsquery('simple', :query)`;
}
return `to_tsvector('simple', :column:) @@ plainto_tsquery('simple', :query)`;
}
supportsCreatingFullTextIndex() {
return true;
}
getFullTextIndexExpression(indexName, schemaName, tableName, columns) {
/* v8 ignore next */
const quotedTableName = this.quoteIdentifier(schemaName ? `${schemaName}.${tableName}` : tableName);
const quotedColumnNames = columns.map(c => this.quoteIdentifier(c.name));
const quotedIndexName = this.quoteIdentifier(indexName);
if (columns.length === 1 && columns[0].type === 'tsvector') {
return `create index ${quotedIndexName} on ${quotedTableName} using gin(${quotedColumnNames[0]})`;
}
return `create index ${quotedIndexName} on ${quotedTableName} using gin(to_tsvector('simple', ${quotedColumnNames.join(` || ' ' || `)}))`;
}
normalizeColumnType(type, options) {
const simpleType = this.extractSimpleType(type);
if (['int', 'int4', 'integer'].includes(simpleType)) {
return this.getIntegerTypeDeclarationSQL({});
}
if (['bigint', 'int8'].includes(simpleType)) {
return this.getBigIntTypeDeclarationSQL({});
}
if (['smallint', 'int2'].includes(simpleType)) {
return this.getSmallIntTypeDeclarationSQL({});
}
if (['boolean', 'bool'].includes(simpleType)) {
return this.getBooleanTypeDeclarationSQL();
}
if (['varchar', 'character varying'].includes(simpleType)) {
return this.getVarcharTypeDeclarationSQL(options);
}
if (['char', 'bpchar'].includes(simpleType)) {
return this.getCharTypeDeclarationSQL(options);
}
if (['decimal', 'numeric'].includes(simpleType)) {
return this.getDecimalTypeDeclarationSQL(options);
}
if (['interval'].includes(simpleType)) {
return this.getIntervalTypeDeclarationSQL(options);
}
return super.normalizeColumnType(type, options);
}
getMappedType(type) {
switch (this.extractSimpleType(type)) {
case 'tsvector':
return Type.getType(FullTextType);
default:
return super.getMappedType(type);
}
}
getRegExpOperator(val, flags) {
/* v8 ignore next */
if ((val instanceof RegExp && val.flags.includes('i')) || flags?.includes('i')) {
return '~*';
}
return '~';
}
/* v8 ignore next */
getRegExpValue(val) {
if (val.flags.includes('i')) {
return { $re: val.source, $flags: val.flags };
}
return { $re: val.source };
}
isBigIntProperty(prop) {
return super.isBigIntProperty(prop) || ['bigserial', 'int8'].includes(prop.columnTypes?.[0]);
}
getArrayDeclarationSQL() {
return 'text[]';
}
getFloatDeclarationSQL() {
return 'real';
}
getDoubleDeclarationSQL() {
return 'double precision';
}
getEnumTypeDeclarationSQL(column) {
/* v8 ignore next */
if (column.nativeEnumName) {
return column.nativeEnumName;
}
if (column.items?.every(item => typeof item === 'string')) {
return 'text';
}
return `smallint`;
}
supportsMultipleStatements() {
return true;
}
getBeginTransactionSQL(options) {
if (options?.isolationLevel || options?.readOnly) {
let sql = 'start transaction';
sql += options.isolationLevel ? ` isolation level ${options.isolationLevel}` : '';
sql += options.readOnly ? ` read only` : '';
return [sql];
}
return ['begin'];
}
marshallArray(values) {
const quote = v => (v === '' || /["{},\\]/.exec(v) ? JSON.stringify(v) : v);
return `{${values.map(v => quote('' + v)).join(',')}}`;
}
/* v8 ignore next */
unmarshallArray(value) {
if (value === '{}') {
return [];
}
return value
.substring(1, value.length - 1)
.split(',')
.map(v => {
if (v === `""`) {
return '';
}
if (/"(.*)"/.exec(v)) {
return v.substring(1, v.length - 1).replaceAll('\\"', '"');
}
return v;
});
}
getVarcharTypeDeclarationSQL(column) {
if (column.length === -1) {
return 'varchar';
}
return super.getVarcharTypeDeclarationSQL(column);
}
getCharTypeDeclarationSQL(column) {
if (column.length === -1) {
return 'char';
}
return super.getCharTypeDeclarationSQL(column);
}
getIntervalTypeDeclarationSQL(column) {
return 'interval' + (column.length != null ? `(${column.length})` : '');
}
getBlobDeclarationSQL() {
return 'bytea';
}
getJsonDeclarationSQL() {
return 'jsonb';
}
getSearchJsonPropertyKey(path, type, aliased, value) {
const first = path.shift();
const last = path.pop();
const root = this.quoteIdentifier(aliased ? `${ALIAS_REPLACEMENT}.${first}` : first);
type = typeof type === 'string' ? this.getMappedType(type).runtimeType : String(type);
const cast = key => raw(type in this.#jsonTypeCasts ? `(${key})::${this.#jsonTypeCasts[type]}` : key);
let lastOperator = '->>';
// force `->` for operator payloads with array values
if (
Utils.isPlainObject(value) &&
Object.keys(value).every(key => ARRAY_OPERATORS.includes(key) && Array.isArray(value[key]))
) {
lastOperator = '->';
}
if (path.length === 0) {
return cast(`${root}${lastOperator}'${last}'`);
}
return cast(`${root}->${path.map(a => this.quoteValue(a)).join('->')}${lastOperator}'${last}'`);
}
getJsonIndexDefinition(index) {
return index.columnNames.map(column => {
if (!column.includes('.')) {
return column;
}
const path = column.split('.');
const first = path.shift();
const last = path.pop();
if (path.length === 0) {
return `(${this.quoteIdentifier(first)}->>${this.quoteValue(last)})`;
}
return `(${this.quoteIdentifier(first)}->${path.map(c => this.quoteValue(c)).join('->')}->>${this.quoteValue(last)})`;
});
}
quoteIdentifier(id, quote = '"') {
if (RawQueryFragment.isKnownFragment(id)) {
return super.quoteIdentifier(id);
}
return `${quote}${id.toString().replace('.', `${quote}.${quote}`)}${quote}`;
}
pad(number, digits) {
return String(number).padStart(digits, '0');
}
/** @internal */
formatDate(date) {
if (this.timezone === 'Z') {
return date.toISOString();
}
let offset = -date.getTimezoneOffset();
let year = date.getFullYear();
const isBCYear = year < 1;
/* v8 ignore next */
if (isBCYear) {
year = Math.abs(year) + 1;
}
const datePart = `${this.pad(year, 4)}-${this.pad(date.getMonth() + 1, 2)}-${this.pad(date.getDate(), 2)}`;
const timePart = `${this.pad(date.getHours(), 2)}:${this.pad(date.getMinutes(), 2)}:${this.pad(date.getSeconds(), 2)}.${this.pad(date.getMilliseconds(), 3)}`;
let ret = `${datePart}T${timePart}`;
/* v8 ignore next */
if (offset < 0) {
ret += '-';
offset *= -1;
} else {
ret += '+';
}
ret += this.pad(Math.floor(offset / 60), 2) + ':' + this.pad(offset % 60, 2);
/* v8 ignore next */
if (isBCYear) {
ret += ' BC';
}
return ret;
}
indexForeignKeys() {
return false;
}
getDefaultMappedType(type) {
const normalizedType = this.extractSimpleType(type);
const map = {
int2: 'smallint',
smallserial: 'smallint',
int: 'integer',
int4: 'integer',
serial: 'integer',
serial4: 'integer',
int8: 'bigint',
bigserial: 'bigint',
serial8: 'bigint',
numeric: 'decimal',
bool: 'boolean',
real: 'float',
float4: 'float',
float8: 'double',
timestamp: 'datetime',
timestamptz: 'datetime',
bytea: 'blob',
jsonb: 'json',
'character varying': 'varchar',
bpchar: 'character',
};
return super.getDefaultMappedType(map[normalizedType] ?? type);
}
supportsSchemas() {
return true;
}
getDefaultSchemaName() {
return 'public';
}
/**
* Returns the default name of index for the given columns
* cannot go past 63 character length for identifiers in MySQL
*/
getIndexName(tableName, columns, type) {
const indexName = super.getIndexName(tableName, columns, type);
if (indexName.length > 63) {
const suffix = type === 'primary' ? 'pkey' : type;
return `${indexName.substring(0, 55 - type.length)}_${Utils.hash(indexName, 5)}_${suffix}`;
}
return indexName;
}
getDefaultPrimaryName(tableName, columns) {
const indexName = `${tableName}_pkey`;
if (indexName.length > 63) {
return `${indexName.substring(0, 55 - 'pkey'.length)}_${Utils.hash(indexName, 5)}_pkey`;
}
return indexName;
}
/**
* @inheritDoc
*/
castColumn(prop) {
switch (prop?.columnTypes?.[0]) {
case this.getUuidTypeDeclarationSQL({}):
return '::text';
case this.getBooleanTypeDeclarationSQL():
return '::int';
default:
return '';
}
}
getJsonArrayFromSQL(column, alias, _properties) {
return `jsonb_array_elements(${column}) as ${this.quoteIdentifier(alias)}`;
}
getJsonArrayElementPropertySQL(alias, property, type) {
const expr = `${this.quoteIdentifier(alias)}->>${this.quoteValue(property)}`;
return type in this.#jsonTypeCasts ? `(${expr})::${this.#jsonTypeCasts[type]}` : expr;
}
getDefaultClientUrl() {
return 'postgresql://postgres@127.0.0.1:5432';
}
}

View File

@@ -0,0 +1,18 @@
import { Type, type TransformContext, type RawQueryFragment } from '@mikro-orm/core';
import type { BasePostgreSqlPlatform } from './BasePostgreSqlPlatform.js';
type FullTextWeight = 'A' | 'B' | 'C' | 'D';
export type WeightedFullTextValue = {
[K in FullTextWeight]?: string | null;
};
export declare class FullTextType extends Type<string | WeightedFullTextValue, string | null | RawQueryFragment> {
regconfig: string;
constructor(regconfig?: string);
compareAsType(): string;
getColumnType(): string;
convertToDatabaseValue(
value: string | WeightedFullTextValue,
platform: BasePostgreSqlPlatform,
context?: TransformContext | boolean,
): string | null | RawQueryFragment;
}
export {};

View File

@@ -0,0 +1,59 @@
import { raw, Type } from '@mikro-orm/core';
export class FullTextType extends Type {
regconfig;
constructor(regconfig = 'simple') {
super();
this.regconfig = regconfig;
}
compareAsType() {
return 'any';
}
getColumnType() {
return 'tsvector';
}
// Use convertToDatabaseValue to prepare insert queries as this method has
// access to the raw JS value. Return Knex#raw to prevent QueryBuilderHelper#mapData
// from sanitizing the returned chaing of SQL functions.
convertToDatabaseValue(value, platform, context) {
// Don't convert to values from select queries to the to_tsvector notation
// these should be compared as string using a special oparator or function
// this behaviour is defined in Platform#getFullTextWhereClause.
// This is always a string.
if (typeof context === 'object' && context.fromQuery) {
return value;
}
// Null values should not be processed
if (!value) {
return null;
}
// the object from that looks like { A: 'test data', B: 'test data2' ... }
// must be converted to
// setweight(to_tsvector(regconfig, value), A) || setweight(to_tsvector(regconfig, value), B)... etc
// use Knex#raw to do binding of the values sanitization of the boundvalues
// as we return a raw string which should not be sanitzed anymore
if (typeof value === 'object') {
const bindings = [];
const sqlParts = [];
for (const [weight, data] of Object.entries(value)) {
// Check whether the weight is valid according to Postgres,
// Postgres allows the weight to be upper and lowercase.
if (!['A', 'B', 'C', 'D'].includes(weight.toUpperCase())) {
throw new Error('Weight should be one of A, B, C, D.');
}
// Ignore all values that are not a string
if (typeof data === 'string') {
sqlParts.push('setweight(to_tsvector(?, ?), ?)');
bindings.push(this.regconfig, data, weight);
}
}
// Return null if the object has no valid strings
if (sqlParts.length === 0) {
return null;
}
// Join all the `setweight` parts using the PostgreSQL tsvector `||` concatenation operator
return raw(sqlParts.join(' || '), bindings);
}
// if it's not an object, it is expected to be string which does not have to be wrapped in setweight.
return raw('to_tsvector(?, ?)', [this.regconfig, value]);
}
}

View File

@@ -0,0 +1,8 @@
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
export declare class PostgreSqlExceptionConverter extends ExceptionConverter {
/**
* @see http://www.postgresql.org/docs/9.4/static/errcodes-appendix.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractPostgreSQLDriver.php
*/
convertException(exception: Error & Dictionary): DriverException;
}

View File

@@ -0,0 +1,59 @@
import {
DeadlockException,
ExceptionConverter,
ForeignKeyConstraintViolationException,
InvalidFieldNameException,
NonUniqueFieldNameException,
NotNullConstraintViolationException,
SyntaxErrorException,
TableExistsException,
TableNotFoundException,
UniqueConstraintViolationException,
CheckConstraintViolationException,
} from '@mikro-orm/core';
export class PostgreSqlExceptionConverter extends ExceptionConverter {
/**
* @see http://www.postgresql.org/docs/9.4/static/errcodes-appendix.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractPostgreSQLDriver.php
*/
convertException(exception) {
if (exception.detail?.toString().trim()) {
exception.message += '\n - detail: ' + exception.detail;
}
if (exception.hint?.toString().trim()) {
exception.message += '\n - hint: ' + exception.hint;
}
/* v8 ignore next */
switch (exception.code) {
case '40001':
case '40P01':
return new DeadlockException(exception);
case '0A000':
// Foreign key constraint violations during a TRUNCATE operation
// are considered "feature not supported" in PostgreSQL.
if (exception.message.includes('truncate')) {
return new ForeignKeyConstraintViolationException(exception);
}
break;
case '23502':
return new NotNullConstraintViolationException(exception);
case '23503':
return new ForeignKeyConstraintViolationException(exception);
case '23505':
return new UniqueConstraintViolationException(exception);
case '23514':
return new CheckConstraintViolationException(exception);
case '42601':
return new SyntaxErrorException(exception);
case '42702':
return new NonUniqueFieldNameException(exception);
case '42703':
return new InvalidFieldNameException(exception);
case '42P01':
return new TableNotFoundException(exception);
case '42P07':
return new TableExistsException(exception);
}
return super.convertException(exception);
}
}

View File

@@ -0,0 +1,5 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export declare class PostgreSqlNativeQueryBuilder extends NativeQueryBuilder {
protected compileTruncate(): void;
}

View File

@@ -0,0 +1,8 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export class PostgreSqlNativeQueryBuilder extends NativeQueryBuilder {
compileTruncate() {
super.compileTruncate();
this.parts.push('restart identity cascade');
}
}

View File

@@ -0,0 +1,110 @@
import { type Dictionary } from '@mikro-orm/core';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../../typings.js';
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
export declare class PostgreSqlSchemaHelper extends SchemaHelper {
static readonly DEFAULT_VALUES: {
'now()': string[];
'current_timestamp(?)': string[];
"('now'::text)::timestamp(?) with time zone": string[];
"('now'::text)::timestamp(?) without time zone": string[];
'null::character varying': string[];
'null::timestamp with time zone': string[];
'null::timestamp without time zone': string[];
};
getSchemaBeginning(charset: string, disableForeignKeys?: boolean): string;
getCreateDatabaseSQL(name: string): string;
getListTablesSQL(): string;
private getIgnoredViewsCondition;
getListViewsSQL(): string;
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection): Promise<void>;
getListMaterializedViewsSQL(): string;
loadMaterializedViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
createMaterializedView(name: string, schema: string | undefined, definition: string, withData?: boolean): string;
dropMaterializedViewIfExists(name: string, schema?: string): string;
refreshMaterializedView(name: string, schema?: string, concurrently?: boolean): string;
getNamespaces(connection: AbstractSqlConnection): Promise<string[]>;
private getIgnoredNamespacesConditionSQL;
loadInformationSchema(
schema: DatabaseSchema,
connection: AbstractSqlConnection,
tables: Table[],
schemas?: string[],
): Promise<void>;
getAllIndexes(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<IndexDef[]>>;
/**
* Parses column definitions from the full CREATE INDEX expression.
* Since pg_get_indexdef(oid, col_num, true) doesn't include sort modifiers,
* we extract them from the full expression instead.
*
* We use columnDefs (from individual pg_get_indexdef calls) as the source
* of column names, and find their modifiers in the expression.
*/
private parseIndexColumnsFromExpression;
/**
* Extracts the content inside parentheses starting at the given position.
* Handles nested parentheses correctly.
*/
private extractParenthesizedContent;
getAllColumns(
connection: AbstractSqlConnection,
tablesBySchemas: Map<string | undefined, Table[]>,
nativeEnums?: Dictionary<{
name: string;
schema?: string;
items: string[];
}>,
): Promise<Dictionary<Column[]>>;
getAllChecks(
connection: AbstractSqlConnection,
tablesBySchemas: Map<string | undefined, Table[]>,
): Promise<Dictionary<CheckDef[]>>;
getAllForeignKeys(
connection: AbstractSqlConnection,
tablesBySchemas: Map<string | undefined, Table[]>,
): Promise<Dictionary<Dictionary<ForeignKey>>>;
getNativeEnumDefinitions(
connection: AbstractSqlConnection,
schemas: string[],
): Promise<
Dictionary<{
name: string;
schema?: string;
items: string[];
}>
>;
getCreateNativeEnumSQL(name: string, values: unknown[], schema?: string): string;
getDropNativeEnumSQL(name: string, schema?: string): string;
getAlterNativeEnumSQL(name: string, schema?: string, value?: string, items?: string[], oldItems?: string[]): string;
private getEnumDefinitions;
createTableColumn(column: Column, table: DatabaseTable): string | undefined;
getPreAlterTable(tableDiff: TableDifference, safe: boolean): string[];
castColumn(name: string, type: string): string;
dropForeignKey(tableName: string, constraintName: string): string;
getPostAlterTable(tableDiff: TableDifference, safe: boolean): string[];
private getAlterColumnAutoincrement;
getChangeColumnCommentSQL(tableName: string, to: Column, schemaName?: string): string;
alterTableComment(table: DatabaseTable, comment?: string): string;
normalizeDefaultValue(defaultValue: string, length: number): string | number;
appendComments(table: DatabaseTable): string[];
getDatabaseExistsSQL(name: string): string;
getDatabaseNotExistsError(dbName: string): string;
getManagementDbName(): string;
disableForeignKeysSQL(): string;
enableForeignKeysSQL(): string;
getRenameIndexSQL(tableName: string, index: IndexDef, oldIndexName: string): string[];
dropIndex(table: string, index: IndexDef, oldIndexName?: string): string;
/**
* Build the column list for a PostgreSQL index.
*/
protected getIndexColumns(index: IndexDef): string;
/**
* PostgreSQL-specific index options like fill factor.
*/
protected getCreateIndexSuffix(index: IndexDef): string;
private getIndexesSQL;
private getChecksSQL;
inferLengthFromColumnType(type: string): number | undefined;
}

View File

@@ -0,0 +1,776 @@
import { DeferMode, EnumType, Type, Utils } from '@mikro-orm/core';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
/** PostGIS system views that should be automatically ignored */
const POSTGIS_VIEWS = ['geography_columns', 'geometry_columns'];
export class PostgreSqlSchemaHelper extends SchemaHelper {
static DEFAULT_VALUES = {
'now()': ['now()', 'current_timestamp'],
'current_timestamp(?)': ['current_timestamp(?)'],
"('now'::text)::timestamp(?) with time zone": ['current_timestamp(?)'],
"('now'::text)::timestamp(?) without time zone": ['current_timestamp(?)'],
'null::character varying': ['null'],
'null::timestamp with time zone': ['null'],
'null::timestamp without time zone': ['null'],
};
getSchemaBeginning(charset, disableForeignKeys) {
if (disableForeignKeys) {
return `set names '${charset}';\n${this.disableForeignKeysSQL()}\n\n`;
}
return `set names '${charset}';\n\n`;
}
getCreateDatabaseSQL(name) {
return `create database ${this.quote(name)}`;
}
getListTablesSQL() {
return (
`select table_name, table_schema as schema_name, ` +
`(select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c
where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment ` +
`from information_schema.tables ` +
`where ${this.getIgnoredNamespacesConditionSQL('table_schema')} ` +
`and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' ` +
`and table_name not in (select inhrelid::regclass::text from pg_inherits) ` +
`order by table_name`
);
}
getIgnoredViewsCondition() {
return POSTGIS_VIEWS.map(v => `table_name != '${v}'`).join(' and ');
}
getListViewsSQL() {
return (
`select table_name as view_name, table_schema as schema_name, view_definition ` +
`from information_schema.views ` +
`where ${this.getIgnoredNamespacesConditionSQL('table_schema')} ` +
`and ${this.getIgnoredViewsCondition()} ` +
`order by table_name`
);
}
async loadViews(schema, connection) {
const views = await connection.execute(this.getListViewsSQL());
for (const view of views) {
const definition = view.view_definition?.trim().replace(/;$/, '') ?? '';
if (definition) {
schema.addView(view.view_name, view.schema_name, definition);
}
}
}
getListMaterializedViewsSQL() {
return (
`select matviewname as view_name, schemaname as schema_name, definition as view_definition ` +
`from pg_matviews ` +
`where ${this.getIgnoredNamespacesConditionSQL('schemaname')} ` +
`order by matviewname`
);
}
async loadMaterializedViews(schema, connection, schemaName) {
const views = await connection.execute(this.getListMaterializedViewsSQL());
for (const view of views) {
const definition = view.view_definition?.trim().replace(/;$/, '') ?? '';
if (definition) {
schema.addView(view.view_name, view.schema_name, definition, true);
}
}
}
createMaterializedView(name, schema, definition, withData = true) {
const viewName = this.quote(this.getTableName(name, schema));
const dataClause = withData ? ' with data' : ' with no data';
return `create materialized view ${viewName} as ${definition}${dataClause}`;
}
dropMaterializedViewIfExists(name, schema) {
return `drop materialized view if exists ${this.quote(this.getTableName(name, schema))} cascade`;
}
refreshMaterializedView(name, schema, concurrently = false) {
const concurrent = concurrently ? ' concurrently' : '';
return `refresh materialized view${concurrent} ${this.quote(this.getTableName(name, schema))}`;
}
async getNamespaces(connection) {
const sql =
`select schema_name from information_schema.schemata ` +
`where ${this.getIgnoredNamespacesConditionSQL()} ` +
`order by schema_name`;
const res = await connection.execute(sql);
return res.map(row => row.schema_name);
}
getIgnoredNamespacesConditionSQL(column = 'schema_name') {
const ignored = [
'information_schema',
'tiger',
'topology',
/* v8 ignore next */
...(this.platform.getConfig().get('schemaGenerator').ignoreSchema ?? []),
]
.map(s => this.platform.quoteValue(s))
.join(', ');
const ignoredPrefixes = ['pg_', 'crdb_', '_timescaledb_'].map(p => `"${column}" not like '${p}%'`).join(' and ');
return `${ignoredPrefixes} and "${column}" not in (${ignored})`;
}
async loadInformationSchema(schema, connection, tables, schemas) {
schemas ??= tables.length === 0 ? [schema.name] : tables.map(t => t.schema_name);
const nativeEnums = await this.getNativeEnumDefinitions(connection, schemas);
schema.setNativeEnums(nativeEnums);
if (tables.length === 0) {
return;
}
const tablesBySchema = this.getTablesGroupedBySchemas(tables);
const columns = await this.getAllColumns(connection, tablesBySchema, nativeEnums);
const indexes = await this.getAllIndexes(connection, tables);
const checks = await this.getAllChecks(connection, tablesBySchema);
const fks = await this.getAllForeignKeys(connection, tablesBySchema);
for (const t of tables) {
const key = this.getTableKey(t);
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
const pks = await this.getPrimaryKeys(connection, indexes[key], table.name, table.schema);
const enums = this.getEnumDefinitions(checks[key] ?? []);
if (columns[key]) {
table.init(columns[key], indexes[key], checks[key], pks, fks[key], enums);
}
}
}
async getAllIndexes(connection, tables) {
const sql = this.getIndexesSQL(tables);
const unquote = str => str.replace(/['"`]/g, '');
const allIndexes = await connection.execute(sql);
const ret = {};
for (const index of allIndexes) {
const key = this.getTableKey(index);
// Extract INCLUDE columns from expression first, to filter them from key columns
const includeMatch = index.expression?.match(/include\s*\(([^)]+)\)/i);
const includeColumns = includeMatch ? includeMatch[1].split(',').map(col => unquote(col.trim())) : [];
// Filter out INCLUDE columns from the column definitions to get only key columns
const keyColumnDefs = index.index_def.filter(col => !includeColumns.includes(unquote(col)));
// Parse sort order and NULLS ordering from the full expression
// pg_get_indexdef individual columns don't include sort modifiers, so we parse from full expression
const columns = this.parseIndexColumnsFromExpression(index.expression, keyColumnDefs, unquote);
const columnNames = columns.map(col => col.name);
const hasAdvancedColumnOptions = columns.some(col => col.sort || col.nulls || col.collation);
const indexDef = {
columnNames,
composite: columnNames.length > 1,
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
constraint: index.contype === 'u',
keyName: index.constraint_name,
unique: index.unique,
primary: index.primary,
};
// Add columns array if there are advanced options
if (hasAdvancedColumnOptions) {
indexDef.columns = columns;
}
if (index.condeferrable) {
indexDef.deferMode = index.condeferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
}
if (index.index_def.some(col => /[(): ,"'`]/.exec(col)) || index.expression?.match(/ where /i)) {
indexDef.expression = index.expression;
}
if (index.deferrable) {
indexDef.deferMode = index.initially_deferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
}
// Extract fillFactor from reloptions
if (index.reloptions) {
const fillFactorMatch = index.reloptions.find(opt => opt.startsWith('fillfactor='));
if (fillFactorMatch) {
indexDef.fillFactor = parseInt(fillFactorMatch.split('=')[1], 10);
}
}
// Add INCLUDE columns (already extracted above)
if (includeColumns.length > 0) {
indexDef.include = includeColumns;
}
// Add index type if not btree (the default)
if (index.index_type && index.index_type !== 'btree') {
indexDef.type = index.index_type;
}
ret[key] ??= [];
ret[key].push(indexDef);
}
return ret;
}
/**
* Parses column definitions from the full CREATE INDEX expression.
* Since pg_get_indexdef(oid, col_num, true) doesn't include sort modifiers,
* we extract them from the full expression instead.
*
* We use columnDefs (from individual pg_get_indexdef calls) as the source
* of column names, and find their modifiers in the expression.
*/
parseIndexColumnsFromExpression(expression, columnDefs, unquote) {
// Extract just the column list from the expression (between first parens after USING)
// Pattern: ... USING method (...columns...) [INCLUDE (...)] [WHERE ...]
// Note: pg_get_indexdef always returns a valid expression with USING clause
const usingMatch = /using\s+\w+\s*\(/i.exec(expression);
const startIdx = usingMatch.index + usingMatch[0].length - 1; // Position of opening (
const columnsStr = this.extractParenthesizedContent(expression, startIdx);
// Use the column names from columnDefs and find their modifiers in the expression
return columnDefs.map(colDef => {
const name = unquote(colDef);
const result = { name };
// Find this column in the expression and extract modifiers
// Create a pattern that matches the column name (quoted or unquoted) followed by modifiers
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
const colPattern = new RegExp(`"?${escapedName}"?\\s*([^,)]*?)(?:,|$)`, 'i');
const colMatch = columnsStr.match(colPattern);
if (colMatch) {
const modifiers = colMatch[1];
// Extract sort order (PostgreSQL omits ASC in output as it's the default)
if (/\bdesc\b/i.test(modifiers)) {
result.sort = 'DESC';
}
// Extract NULLS ordering
const nullsMatch = /nulls\s+(first|last)/i.exec(modifiers);
if (nullsMatch) {
result.nulls = nullsMatch[1].toUpperCase();
}
// Extract collation
const collateMatch = /collate\s+"?([^"\s,)]+)"?/i.exec(modifiers);
if (collateMatch) {
result.collation = collateMatch[1];
}
}
return result;
});
}
/**
* Extracts the content inside parentheses starting at the given position.
* Handles nested parentheses correctly.
*/
extractParenthesizedContent(str, startIdx) {
let depth = 0;
const start = startIdx + 1;
for (let i = startIdx; i < str.length; i++) {
if (str[i] === '(') {
depth++;
} else if (str[i] === ')') {
depth--;
if (depth === 0) {
return str.slice(start, i);
}
}
}
/* v8 ignore next - pg_get_indexdef always returns balanced parentheses */
return '';
}
async getAllColumns(connection, tablesBySchemas, nativeEnums) {
const sql = `select table_schema as schema_name, table_name, column_name,
column_default,
is_nullable,
udt_name,
udt_schema,
coalesce(datetime_precision, character_maximum_length) length,
atttypmod custom_length,
numeric_precision,
numeric_scale,
data_type,
is_identity,
identity_generation,
generation_expression,
pg_catalog.col_description(pgc.oid, cols.ordinal_position::int) column_comment
from information_schema.columns cols
join pg_class pgc on cols.table_name = pgc.relname
join pg_attribute pga on pgc.oid = pga.attrelid and cols.column_name = pga.attname
where (${[...tablesBySchemas.entries()].map(([schema, tables]) => `(table_schema = ${this.platform.quoteValue(schema)} and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}))`).join(' or ')})
order by ordinal_position`;
const allColumns = await connection.execute(sql);
const str = val => (val != null ? '' + val : val);
const ret = {};
for (const col of allColumns) {
const mappedType = connection.getPlatform().getMappedType(col.data_type);
const increments =
(col.column_default?.includes('nextval') || col.is_identity === 'YES') &&
connection.getPlatform().isNumericColumn(mappedType);
const key = this.getTableKey(col);
ret[key] ??= [];
let type = col.data_type.toLowerCase() === 'array' ? col.udt_name.replace(/^_(.*)$/, '$1[]') : col.udt_name;
if (
col.data_type === 'USER-DEFINED' &&
col.udt_schema &&
col.udt_schema !== this.platform.getDefaultSchemaName()
) {
type = `${col.udt_schema}.${type}`;
}
if (type === 'bpchar') {
type = 'char';
}
if (type === 'vector' && col.length == null && col.custom_length != null && col.custom_length !== -1) {
col.length = col.custom_length;
}
if (col.length != null && !type.endsWith(`(${col.length})`) && !['text', 'date'].includes(type)) {
type += `(${col.length})`;
}
if (type === 'numeric' && col.numeric_precision != null && col.numeric_scale != null) {
type += `(${col.numeric_precision},${col.numeric_scale})`;
}
const length = this.inferLengthFromColumnType(type) === -1 ? -1 : col.length;
const column = {
name: col.column_name,
type,
mappedType,
length,
precision: col.numeric_precision,
scale: col.numeric_scale,
nullable: col.is_nullable === 'YES',
default: str(this.normalizeDefaultValue(col.column_default, col.length)),
unsigned: increments,
autoincrement: increments,
generated:
col.is_identity === 'YES'
? col.identity_generation === 'BY DEFAULT'
? 'by default as identity'
: 'identity'
: col.generation_expression
? col.generation_expression + ' stored'
: undefined,
comment: col.column_comment,
};
if (nativeEnums?.[column.type]) {
column.mappedType = Type.getType(EnumType);
column.nativeEnumName = column.type;
column.enumItems = nativeEnums[column.type]?.items;
}
ret[key].push(column);
}
return ret;
}
async getAllChecks(connection, tablesBySchemas) {
const sql = this.getChecksSQL(tablesBySchemas);
const allChecks = await connection.execute(sql);
const ret = {};
const seen = new Set();
for (const check of allChecks) {
const key = this.getTableKey(check);
const dedupeKey = `${key}:${check.name}`;
if (seen.has(dedupeKey)) {
continue;
}
seen.add(dedupeKey);
ret[key] ??= [];
const m = /^check \(\((.*)\)\)$/is.exec(check.expression);
const def = m?.[1].replace(/\((.*?)\)::\w+/g, '$1');
ret[key].push({
name: check.name,
columnName: check.column_name,
definition: check.expression,
expression: def,
});
}
return ret;
}
async getAllForeignKeys(connection, tablesBySchemas) {
const sql = `select nsp1.nspname schema_name, cls1.relname table_name, nsp2.nspname referenced_schema_name,
cls2.relname referenced_table_name, a.attname column_name, af.attname referenced_column_name, conname constraint_name,
confupdtype update_rule, confdeltype delete_rule, array_position(con.conkey,a.attnum) as ord, condeferrable, condeferred,
pg_get_constraintdef(con.oid) as constraint_def
from pg_attribute a
join pg_constraint con on con.conrelid = a.attrelid AND a.attnum = ANY (con.conkey)
join pg_attribute af on af.attnum = con.confkey[array_position(con.conkey,a.attnum)] AND af.attrelid = con.confrelid
join pg_namespace nsp1 on nsp1.oid = con.connamespace
join pg_class cls1 on cls1.oid = con.conrelid
join pg_class cls2 on cls2.oid = confrelid
join pg_namespace nsp2 on nsp2.oid = cls2.relnamespace
where (${[...tablesBySchemas.entries()].map(([schema, tables]) => `(cls1.relname in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}) and nsp1.nspname = ${this.platform.quoteValue(schema)})`).join(' or ')})
and confrelid > 0
order by nsp1.nspname, cls1.relname, constraint_name, ord`;
const allFks = await connection.execute(sql);
const ret = {};
function mapReferentialIntegrity(value, def) {
const match = ['n', 'd'].includes(value) && /ON DELETE (SET (NULL|DEFAULT) \(.*?\))/.exec(def);
if (match) {
return match[1];
}
/* v8 ignore next */
switch (value) {
case 'r':
return 'RESTRICT';
case 'c':
return 'CASCADE';
case 'n':
return 'SET NULL';
case 'd':
return 'SET DEFAULT';
case 'a':
default:
return 'NO ACTION';
}
}
for (const fk of allFks) {
fk.update_rule = mapReferentialIntegrity(fk.update_rule, fk.constraint_def);
fk.delete_rule = mapReferentialIntegrity(fk.delete_rule, fk.constraint_def);
if (fk.condeferrable) {
fk.defer_mode = fk.condeferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
}
const key = this.getTableKey(fk);
ret[key] ??= [];
ret[key].push(fk);
}
Object.keys(ret).forEach(key => {
const [schemaName, tableName] = key.split('.');
ret[key] = this.mapForeignKeys(ret[key], tableName, schemaName);
});
return ret;
}
async getNativeEnumDefinitions(connection, schemas) {
const uniqueSchemas = Utils.unique(schemas);
const res = await connection.execute(
`select t.typname as enum_name, n.nspname as schema_name, array_agg(e.enumlabel order by e.enumsortorder) as enum_value
from pg_type t
join pg_enum e on t.oid = e.enumtypid
join pg_catalog.pg_namespace n on n.oid = t.typnamespace
where n.nspname in (${Array(uniqueSchemas.length).fill('?').join(', ')})
group by t.typname, n.nspname`,
uniqueSchemas,
);
return res.reduce((o, row) => {
let name = row.enum_name;
if (row.schema_name && row.schema_name !== this.platform.getDefaultSchemaName()) {
name = row.schema_name + '.' + name;
}
let items = row.enum_value;
if (!Array.isArray(items)) {
items = this.platform.unmarshallArray(row.enum_value);
}
o[name] = {
name: row.enum_name,
schema: row.schema_name,
items,
};
return o;
}, {});
}
getCreateNativeEnumSQL(name, values, schema) {
if (schema && schema !== this.platform.getDefaultSchemaName()) {
name = schema + '.' + name;
}
return `create type ${this.quote(name)} as enum (${values.map(value => this.platform.quoteValue(value)).join(', ')})`;
}
getDropNativeEnumSQL(name, schema) {
if (schema && schema !== this.platform.getDefaultSchemaName()) {
name = schema + '.' + name;
}
return `drop type ${this.quote(name)}`;
}
getAlterNativeEnumSQL(name, schema, value, items, oldItems) {
if (schema && schema !== this.platform.getDefaultSchemaName()) {
name = schema + '.' + name;
}
let suffix = '';
if (items && value && oldItems) {
const position = items.indexOf(value);
if (position > 0) {
suffix = ` after ${this.platform.quoteValue(items[position - 1])}`;
} else if (items.length > 1 && oldItems.length > 0) {
suffix = ` before ${this.platform.quoteValue(oldItems[0])}`;
}
}
return `alter type ${this.quote(name)} add value if not exists ${this.platform.quoteValue(value)}${suffix}`;
}
getEnumDefinitions(checks) {
return checks.reduce((o, item) => {
// check constraints are defined as one of:
// `CHECK ((type = ANY (ARRAY['local'::text, 'global'::text])))`
// `CHECK (("columnName" = ANY (ARRAY['local'::text, 'global'::text])))`
// `CHECK (((enum_test)::text = ANY ((ARRAY['a'::character varying, 'b'::character varying, 'c'::character varying])::text[])))`
// `CHECK ((("enumTest")::text = ANY ((ARRAY['a'::character varying, 'b'::character varying, 'c'::character varying])::text[])))`
// `CHECK ((type = 'a'::text))`
const m1 =
item.definition?.match(/check \(\(\("?(\w+)"?\)::/i) || item.definition?.match(/check \(\("?(\w+)"? = /i);
const m2 = item.definition?.match(/\(array\[(.*)]\)/i) || item.definition?.match(/ = (.*)\)/i);
if (item.columnName && m1 && m2) {
const m3 = m2[1].match(/('[^']*'::text)/g);
let items;
/* v8 ignore next */
if (m3) {
items = m3.map(item => /^\(?'(.*)'/.exec(item.trim())?.[1]);
} else {
items = m2[1].split(',').map(item => /^\(?'(.*)'/.exec(item.trim())?.[1]);
}
items = items.filter(item => item !== undefined);
if (items.length > 0) {
o[item.columnName] = items;
item.expression = `${this.quote(item.columnName)} in ('${items.join("', '")}')`;
item.definition = `check (${item.expression})`;
}
}
return o;
}, {});
}
createTableColumn(column, table) {
const pk = table.getPrimaryKey();
const compositePK = pk?.composite;
const primaryKey = !this.hasNonDefaultPrimaryKeyName(table);
const col = [this.quote(column.name)];
if (column.autoincrement && !column.generated && !compositePK) {
col.push(column.mappedType.getColumnType({ autoincrement: true }, this.platform));
} else {
let columnType = column.type;
if (column.nativeEnumName) {
const parts = column.type.split('.');
if (parts.length === 2 && parts[0] === '*') {
columnType = `${table.schema}.${parts[1]}`;
}
if (columnType.endsWith('[]')) {
columnType = this.quote(columnType.substring(0, columnType.length - 2)) + '[]';
} else {
columnType = this.quote(columnType);
}
}
if (column.generated === 'by default as identity') {
columnType += ` generated ${column.generated}`;
} else if (column.generated) {
columnType += ` generated always as ${column.generated}`;
}
col.push(columnType);
Utils.runIfNotEmpty(() => col.push('null'), column.nullable);
Utils.runIfNotEmpty(() => col.push('not null'), !column.nullable);
}
if (column.autoincrement && !compositePK) {
Utils.runIfNotEmpty(() => col.push('primary key'), primaryKey && column.primary);
}
const useDefault = column.default != null && column.default !== 'null' && !column.autoincrement;
Utils.runIfNotEmpty(() => col.push(`default ${column.default}`), useDefault);
return col.join(' ');
}
getPreAlterTable(tableDiff, safe) {
const ret = [];
const parts = tableDiff.name.split('.');
const tableName = parts.pop();
const schemaName = parts.pop();
/* v8 ignore next */
const name =
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
const quotedName = this.quote(name);
// detect that the column was an enum before and remove the check constraint in such case here
const changedEnums = Object.values(tableDiff.changedColumns).filter(
col => col.fromColumn.mappedType instanceof EnumType,
);
for (const col of changedEnums) {
if (!col.fromColumn.nativeEnumName && col.column.nativeEnumName && col.fromColumn.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" drop default`);
}
if (col.fromColumn.nativeEnumName && !col.column.nativeEnumName && col.fromColumn.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" drop default`);
}
}
// changing uuid column type requires to cast it to text first
const uuids = Object.values(tableDiff.changedColumns).filter(
col => col.changedProperties.has('type') && col.fromColumn.type === 'uuid',
);
for (const col of uuids) {
ret.push(
`alter table ${quotedName} alter column "${col.column.name}" type text using ("${col.column.name}"::text)`,
);
}
for (const { column } of Object.values(tableDiff.changedColumns).filter(diff =>
diff.changedProperties.has('autoincrement'),
)) {
if (!column.autoincrement && column.default == null) {
ret.push(`alter table ${quotedName} alter column ${this.quote(column.name)} drop default`);
}
}
return ret;
}
castColumn(name, type) {
if (type === 'uuid') {
type = 'text::uuid';
}
return ` using (${this.quote(name)}::${type})`;
}
dropForeignKey(tableName, constraintName) {
return `alter table ${this.quote(tableName)} drop constraint ${this.quote(constraintName)}`;
}
getPostAlterTable(tableDiff, safe) {
const ret = [];
const parts = tableDiff.name.split('.');
const tableName = parts.pop();
const schemaName = parts.pop();
/* v8 ignore next */
const name =
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
const quotedName = this.quote(name);
// detect that the column was an enum before and remove the check constraint in such a case here
const changedEnums = Object.values(tableDiff.changedColumns).filter(
col => col.fromColumn.mappedType instanceof EnumType,
);
for (const col of changedEnums) {
if (!col.fromColumn.nativeEnumName && col.column.nativeEnumName && col.column.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" set default ${col.column.default}`);
}
if (col.fromColumn.nativeEnumName && !col.column.nativeEnumName && col.column.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" set default ${col.column.default}`);
}
}
for (const { column } of Object.values(tableDiff.changedColumns).filter(diff =>
diff.changedProperties.has('autoincrement'),
)) {
ret.push(...this.getAlterColumnAutoincrement(tableName, column, schemaName));
}
return ret;
}
getAlterColumnAutoincrement(tableName, column, schemaName) {
const ret = [];
/* v8 ignore next */
const name =
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
if (column.autoincrement) {
const seqName = this.platform.getIndexName(tableName, [column.name], 'sequence');
ret.push(`create sequence if not exists ${this.quote(seqName)}`);
ret.push(`select setval('${seqName}', (select max(${this.quote(column.name)}) from ${this.quote(name)}))`);
ret.push(
`alter table ${this.quote(name)} alter column ${this.quote(column.name)} set default nextval('${seqName}')`,
);
}
return ret;
}
getChangeColumnCommentSQL(tableName, to, schemaName) {
const name = this.quote(
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName,
);
const value = to.comment ? this.platform.quoteValue(to.comment) : 'null';
return `comment on column ${name}.${this.quote(to.name)} is ${value}`;
}
alterTableComment(table, comment) {
return `comment on table ${table.getQuotedName()} is ${this.platform.quoteValue(comment ?? '')}`;
}
normalizeDefaultValue(defaultValue, length) {
if (!defaultValue || typeof defaultValue !== 'string') {
return super.normalizeDefaultValue(defaultValue, length, PostgreSqlSchemaHelper.DEFAULT_VALUES);
}
const match = /^'(.*)'::(.*)$/.exec(defaultValue);
if (match) {
if (match[2] === 'integer') {
return +match[1];
}
return `'${match[1]}'`;
}
return super.normalizeDefaultValue(defaultValue, length, PostgreSqlSchemaHelper.DEFAULT_VALUES);
}
appendComments(table) {
const sql = [];
if (table.comment) {
const comment = this.platform.quoteValue(this.processComment(table.comment));
sql.push(`comment on table ${table.getQuotedName()} is ${comment}`);
}
for (const column of table.getColumns()) {
if (column.comment) {
const comment = this.platform.quoteValue(this.processComment(column.comment));
sql.push(`comment on column ${table.getQuotedName()}.${this.quote(column.name)} is ${comment}`);
}
}
return sql;
}
getDatabaseExistsSQL(name) {
return `select 1 from pg_database where datname = '${name}'`;
}
getDatabaseNotExistsError(dbName) {
return `database ${this.quote(dbName)} does not exist`;
}
getManagementDbName() {
return this.platform.getConfig().get('schemaGenerator', {}).managementDbName ?? 'postgres';
}
disableForeignKeysSQL() {
return `set session_replication_role = 'replica';`;
}
enableForeignKeysSQL() {
return `set session_replication_role = 'origin';`;
}
getRenameIndexSQL(tableName, index, oldIndexName) {
oldIndexName = this.quote(oldIndexName);
const keyName = this.quote(index.keyName);
return [`alter index ${oldIndexName} rename to ${keyName}`];
}
dropIndex(table, index, oldIndexName = index.keyName) {
if (index.primary || (index.unique && index.constraint)) {
return `alter table ${this.quote(table)} drop constraint ${this.quote(oldIndexName)}`;
}
return `drop index ${this.quote(oldIndexName)}`;
}
/**
* Build the column list for a PostgreSQL index.
*/
getIndexColumns(index) {
if (index.columns?.length) {
return index.columns
.map(col => {
let colDef = this.quote(col.name);
// PostgreSQL supports collation with double quotes
if (col.collation) {
colDef += ` collate ${this.quote(col.collation)}`;
}
// PostgreSQL supports sort order
if (col.sort) {
colDef += ` ${col.sort}`;
}
// PostgreSQL supports NULLS FIRST/LAST
if (col.nulls) {
colDef += ` nulls ${col.nulls}`;
}
return colDef;
})
.join(', ');
}
return index.columnNames.map(c => this.quote(c)).join(', ');
}
/**
* PostgreSQL-specific index options like fill factor.
*/
getCreateIndexSuffix(index) {
const withOptions = [];
if (index.fillFactor != null) {
withOptions.push(`fillfactor = ${index.fillFactor}`);
}
if (withOptions.length > 0) {
return ` with (${withOptions.join(', ')})`;
}
return super.getCreateIndexSuffix(index);
}
getIndexesSQL(tables) {
return `select indrelid::regclass as table_name, ns.nspname as schema_name, relname as constraint_name, idx.indisunique as unique, idx.indisprimary as primary, contype, condeferrable, condeferred,
array(
select pg_get_indexdef(idx.indexrelid, k + 1, true)
from generate_subscripts(idx.indkey, 1) as k
order by k
) as index_def,
pg_get_indexdef(idx.indexrelid) as expression,
c.condeferrable as deferrable,
c.condeferred as initially_deferred,
i.reloptions,
am.amname as index_type
from pg_index idx
join pg_class as i on i.oid = idx.indexrelid
join pg_namespace as ns on i.relnamespace = ns.oid
join pg_am as am on am.oid = i.relam
left join pg_constraint as c on c.conname = i.relname
where indrelid in (${tables.map(t => `${this.platform.quoteValue(`${this.quote(t.schema_name)}.${this.quote(t.table_name)}`)}::regclass`).join(', ')})
order by relname`;
}
getChecksSQL(tablesBySchemas) {
return `select ccu.table_name as table_name, ccu.table_schema as schema_name, pgc.conname as name, conrelid::regclass as table_from, ccu.column_name as column_name, pg_get_constraintdef(pgc.oid) as expression
from pg_constraint pgc
join pg_namespace nsp on nsp.oid = pgc.connamespace
join pg_class cls on pgc.conrelid = cls.oid
join information_schema.constraint_column_usage ccu on pgc.conname = ccu.constraint_name and nsp.nspname = ccu.constraint_schema and cls.relname = ccu.table_name
where contype = 'c' and (${[...tablesBySchemas.entries()].map(([schema, tables]) => `ccu.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}) and ccu.table_schema = ${this.platform.quoteValue(schema)}`).join(' or ')})
order by pgc.conname`;
}
inferLengthFromColumnType(type) {
const match = /^(\w+(?:\s+\w+)*)\s*(?:\(\s*(\d+)\s*\)|$)/.exec(type);
if (!match) {
return;
}
if (!match[2]) {
switch (match[1]) {
case 'character varying':
case 'varchar':
case 'bpchar':
case 'char':
case 'character':
return -1;
case 'interval':
case 'time':
case 'timestamp':
case 'timestamptz':
return this.platform.getDefaultDateTimeLength();
}
return;
}
return +match[2];
}
}

View File

@@ -0,0 +1,4 @@
export * from './PostgreSqlNativeQueryBuilder.js';
export * from './BasePostgreSqlPlatform.js';
export * from './FullTextType.js';
export * from './PostgreSqlSchemaHelper.js';

View File

@@ -0,0 +1,4 @@
export * from './PostgreSqlNativeQueryBuilder.js';
export * from './BasePostgreSqlPlatform.js';
export * from './FullTextType.js';
export * from './PostgreSqlSchemaHelper.js';