Initial commit - Event Planner application
This commit is contained in:
5
node_modules/@mikro-orm/sql/dialects/index.d.ts
generated
vendored
Normal file
5
node_modules/@mikro-orm/sql/dialects/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export * from './mssql/index.js';
|
||||
export * from './mysql/index.js';
|
||||
export * from './postgresql/index.js';
|
||||
export * from './sqlite/index.js';
|
||||
export * from './oracledb/index.js';
|
||||
5
node_modules/@mikro-orm/sql/dialects/index.js
generated
vendored
Normal file
5
node_modules/@mikro-orm/sql/dialects/index.js
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export * from './mssql/index.js';
|
||||
export * from './mysql/index.js';
|
||||
export * from './postgresql/index.js';
|
||||
export * from './sqlite/index.js';
|
||||
export * from './oracledb/index.js';
|
||||
16
node_modules/@mikro-orm/sql/dialects/mssql/MsSqlNativeQueryBuilder.d.ts
generated
vendored
Normal file
16
node_modules/@mikro-orm/sql/dialects/mssql/MsSqlNativeQueryBuilder.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export declare class MsSqlNativeQueryBuilder extends NativeQueryBuilder {
|
||||
compile(): {
|
||||
sql: string;
|
||||
params: unknown[];
|
||||
};
|
||||
protected compileInsert(): void;
|
||||
private appendOutputTable;
|
||||
private compileUpsert;
|
||||
protected compileSelect(): void;
|
||||
protected addLockClause(): void;
|
||||
protected compileTruncate(): void;
|
||||
/** MSSQL has no RECURSIVE keyword — CTEs are implicitly recursive. */
|
||||
protected getCteKeyword(_hasRecursive: boolean): string;
|
||||
}
|
||||
202
node_modules/@mikro-orm/sql/dialects/mssql/MsSqlNativeQueryBuilder.js
generated
vendored
Normal file
202
node_modules/@mikro-orm/sql/dialects/mssql/MsSqlNativeQueryBuilder.js
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
import { LockMode, QueryFlag, RawQueryFragment, Utils } from '@mikro-orm/core';
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
import { QueryType } from '../../query/enums.js';
|
||||
/** @internal */
|
||||
export class MsSqlNativeQueryBuilder extends NativeQueryBuilder {
|
||||
compile() {
|
||||
if (!this.type) {
|
||||
throw new Error('No query type provided');
|
||||
}
|
||||
this.parts.length = 0;
|
||||
this.params.length = 0;
|
||||
if (this.options.flags?.has(QueryFlag.IDENTITY_INSERT)) {
|
||||
this.parts.push(`set identity_insert ${this.getTableName()} on;`);
|
||||
}
|
||||
const { prefix, suffix } = this.appendOutputTable();
|
||||
if (prefix) {
|
||||
this.parts.push(prefix);
|
||||
}
|
||||
if (this.options.comment) {
|
||||
this.parts.push(...this.options.comment.map(comment => `/* ${comment} */`));
|
||||
}
|
||||
this.compileCtes();
|
||||
if (this.options.onConflict && !Utils.isEmpty(Utils.asArray(this.options.data)[0])) {
|
||||
this.compileUpsert();
|
||||
} else {
|
||||
switch (this.type) {
|
||||
case QueryType.SELECT:
|
||||
case QueryType.COUNT:
|
||||
this.compileSelect();
|
||||
break;
|
||||
case QueryType.INSERT:
|
||||
this.compileInsert();
|
||||
break;
|
||||
case QueryType.UPDATE:
|
||||
this.compileUpdate();
|
||||
break;
|
||||
case QueryType.DELETE:
|
||||
this.compileDelete();
|
||||
break;
|
||||
case QueryType.TRUNCATE:
|
||||
this.compileTruncate();
|
||||
break;
|
||||
}
|
||||
if (suffix) {
|
||||
this.parts[this.parts.length - 1] += ';';
|
||||
this.parts.push(suffix);
|
||||
} else if ([QueryType.INSERT, QueryType.UPDATE, QueryType.DELETE].includes(this.type)) {
|
||||
this.parts[this.parts.length - 1] += '; select @@rowcount;';
|
||||
}
|
||||
}
|
||||
if (this.options.flags?.has(QueryFlag.IDENTITY_INSERT)) {
|
||||
this.parts.push(`set identity_insert ${this.getTableName()} off;`);
|
||||
}
|
||||
return this.combineParts();
|
||||
}
|
||||
compileInsert() {
|
||||
if (!this.options.data) {
|
||||
throw new Error('No data provided');
|
||||
}
|
||||
this.parts.push('insert');
|
||||
this.addHintComment();
|
||||
this.parts.push(`into ${this.getTableName()}`);
|
||||
if (Object.keys(this.options.data).length === 0) {
|
||||
this.addOutputClause('inserted');
|
||||
this.parts.push('default values');
|
||||
return;
|
||||
}
|
||||
const parts = this.processInsertData();
|
||||
if (this.options.flags?.has(QueryFlag.OUTPUT_TABLE)) {
|
||||
this.parts[this.parts.length - 2] += ' into #out ';
|
||||
}
|
||||
this.parts.push(parts.join(', '));
|
||||
}
|
||||
appendOutputTable() {
|
||||
if (!this.options.flags?.has(QueryFlag.OUTPUT_TABLE)) {
|
||||
return { prefix: '', suffix: '' };
|
||||
}
|
||||
const returningFields = this.options.returning;
|
||||
const selections = returningFields.map(field => `[t].${this.platform.quoteIdentifier(field)}`).join(',');
|
||||
return {
|
||||
prefix: `select top(0) ${selections} into #out from ${this.getTableName()} as t left join ${this.getTableName()} on 0 = 1;`,
|
||||
suffix: `select ${selections} from #out as t; drop table #out`,
|
||||
};
|
||||
}
|
||||
compileUpsert() {
|
||||
const clause = this.options.onConflict;
|
||||
const dataAsArray = Utils.asArray(this.options.data);
|
||||
const keys = Object.keys(dataAsArray[0]);
|
||||
const values = keys.map(() => '?');
|
||||
const parts = [];
|
||||
for (const data of dataAsArray) {
|
||||
for (const key of keys) {
|
||||
this.params.push(data[key]);
|
||||
}
|
||||
parts.push(`(${values.join(', ')})`);
|
||||
}
|
||||
this.parts.push(`merge into ${this.getTableName()}`);
|
||||
this.parts.push(`using (values ${parts.join(', ')}) as tsource(${keys.map(key => this.quote(key)).join(', ')})`);
|
||||
if (clause.fields instanceof RawQueryFragment) {
|
||||
this.parts.push(clause.fields.sql);
|
||||
this.params.push(...clause.fields.params);
|
||||
} else if (clause.fields.length > 0) {
|
||||
const fields = clause.fields.map(field => {
|
||||
const col = this.quote(field);
|
||||
return `${this.getTableName()}.${col} = tsource.${col}`;
|
||||
});
|
||||
this.parts.push(`on ${fields.join(' and ')}`);
|
||||
}
|
||||
const sourceColumns = keys.map(field => `tsource.${this.quote(field)}`).join(', ');
|
||||
const destinationColumns = keys.map(field => this.quote(field)).join(', ');
|
||||
this.parts.push(`when not matched then insert (${destinationColumns}) values (${sourceColumns})`);
|
||||
if (!clause.ignore) {
|
||||
this.parts.push('when matched');
|
||||
if (clause.where) {
|
||||
this.parts.push(`and ${clause.where.sql}`);
|
||||
this.params.push(...clause.where.params);
|
||||
}
|
||||
this.parts.push('then update set');
|
||||
if (!clause.merge || Array.isArray(clause.merge)) {
|
||||
const parts = (clause.merge || keys)
|
||||
.filter(field => !Array.isArray(clause.fields) || !clause.fields.includes(field))
|
||||
.map(column => `${this.quote(column)} = tsource.${this.quote(column)}`);
|
||||
this.parts.push(parts.join(', '));
|
||||
} else if (typeof clause.merge === 'object') {
|
||||
const parts = Object.entries(clause.merge).map(([key, value]) => {
|
||||
this.params.push(value);
|
||||
return `${this.getTableName()}.${this.quote(key)} = ?`;
|
||||
});
|
||||
this.parts.push(parts.join(', '));
|
||||
}
|
||||
}
|
||||
this.addOutputClause('inserted');
|
||||
this.parts[this.parts.length - 1] += ';';
|
||||
}
|
||||
compileSelect() {
|
||||
this.parts.push('select');
|
||||
if (this.options.limit != null && this.options.offset == null) {
|
||||
this.parts.push(`top (?)`);
|
||||
this.params.push(this.options.limit);
|
||||
}
|
||||
this.addHintComment();
|
||||
this.parts.push(`${this.getFields()} from ${this.getTableName()}`);
|
||||
this.addLockClause();
|
||||
if (this.options.joins) {
|
||||
for (const join of this.options.joins) {
|
||||
this.parts.push(join.sql);
|
||||
this.params.push(...join.params);
|
||||
}
|
||||
}
|
||||
if (this.options.where?.sql.trim()) {
|
||||
this.parts.push(`where ${this.options.where.sql}`);
|
||||
this.params.push(...this.options.where.params);
|
||||
}
|
||||
if (this.options.groupBy) {
|
||||
const fields = this.options.groupBy.map(field => this.quote(field));
|
||||
this.parts.push(`group by ${fields.join(', ')}`);
|
||||
}
|
||||
if (this.options.having) {
|
||||
this.parts.push(`having ${this.options.having.sql}`);
|
||||
this.params.push(...this.options.having.params);
|
||||
}
|
||||
if (this.options.orderBy) {
|
||||
this.parts.push(`order by ${this.options.orderBy}`);
|
||||
}
|
||||
if (this.options.offset != null) {
|
||||
/* v8 ignore next */
|
||||
if (!this.options.orderBy) {
|
||||
throw new Error('Order by clause is required for pagination');
|
||||
}
|
||||
this.parts.push(`offset ? rows`);
|
||||
this.params.push(this.options.offset);
|
||||
if (this.options.limit != null) {
|
||||
this.parts.push(`fetch next ? rows only`);
|
||||
this.params.push(this.options.limit);
|
||||
}
|
||||
}
|
||||
}
|
||||
addLockClause() {
|
||||
if (
|
||||
!this.options.lockMode ||
|
||||
![LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_WRITE].includes(this.options.lockMode)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const map = {
|
||||
[LockMode.PESSIMISTIC_READ]: 'with (holdlock)',
|
||||
[LockMode.PESSIMISTIC_WRITE]: 'with (updlock)',
|
||||
};
|
||||
if (this.options.lockMode !== LockMode.OPTIMISTIC) {
|
||||
this.parts.push(map[this.options.lockMode]);
|
||||
}
|
||||
}
|
||||
compileTruncate() {
|
||||
const tableName = this.getTableName();
|
||||
const sql = `delete from ${tableName}; declare @count int = case @@rowcount when 0 then 1 else 0 end; dbcc checkident ('${tableName.replace(/[[\]]/g, '')}', reseed, @count)`;
|
||||
this.parts.push(sql);
|
||||
}
|
||||
/** MSSQL has no RECURSIVE keyword — CTEs are implicitly recursive. */
|
||||
getCteKeyword(_hasRecursive) {
|
||||
return 'with';
|
||||
}
|
||||
}
|
||||
1
node_modules/@mikro-orm/sql/dialects/mssql/index.d.ts
generated
vendored
Normal file
1
node_modules/@mikro-orm/sql/dialects/mssql/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export * from './MsSqlNativeQueryBuilder.js';
|
||||
1
node_modules/@mikro-orm/sql/dialects/mssql/index.js
generated
vendored
Normal file
1
node_modules/@mikro-orm/sql/dialects/mssql/index.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export * from './MsSqlNativeQueryBuilder.js';
|
||||
71
node_modules/@mikro-orm/sql/dialects/mysql/BaseMySqlPlatform.d.ts
generated
vendored
Normal file
71
node_modules/@mikro-orm/sql/dialects/mysql/BaseMySqlPlatform.d.ts
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import {
|
||||
type SimpleColumnMeta,
|
||||
type Type,
|
||||
type TransformContext,
|
||||
type MikroORM,
|
||||
type IsolationLevel,
|
||||
} from '@mikro-orm/core';
|
||||
import { MySqlSchemaHelper } from './MySqlSchemaHelper.js';
|
||||
import { MySqlExceptionConverter } from './MySqlExceptionConverter.js';
|
||||
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
|
||||
import type { IndexDef } from '../../typings.js';
|
||||
import { MySqlNativeQueryBuilder } from './MySqlNativeQueryBuilder.js';
|
||||
export declare class BaseMySqlPlatform extends AbstractSqlPlatform {
|
||||
#private;
|
||||
protected readonly schemaHelper: MySqlSchemaHelper;
|
||||
protected readonly exceptionConverter: MySqlExceptionConverter;
|
||||
protected readonly ORDER_BY_NULLS_TRANSLATE: {
|
||||
readonly 'asc nulls first': 'is not null';
|
||||
readonly 'asc nulls last': 'is null';
|
||||
readonly 'desc nulls first': 'is not null';
|
||||
readonly 'desc nulls last': 'is null';
|
||||
};
|
||||
/** @internal */
|
||||
createNativeQueryBuilder(): MySqlNativeQueryBuilder;
|
||||
getDefaultCharset(): string;
|
||||
init(orm: MikroORM): void;
|
||||
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
|
||||
convertJsonToDatabaseValue(value: unknown, context?: TransformContext): unknown;
|
||||
getJsonIndexDefinition(index: IndexDef): string[];
|
||||
getBooleanTypeDeclarationSQL(): string;
|
||||
normalizeColumnType(
|
||||
type: string,
|
||||
options: {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
},
|
||||
): string;
|
||||
getDefaultMappedType(type: string): Type<unknown>;
|
||||
isNumericColumn(mappedType: Type<unknown>): boolean;
|
||||
supportsUnsigned(): boolean;
|
||||
/**
|
||||
* Returns the default name of index for the given columns
|
||||
* cannot go past 64 character length for identifiers in MySQL
|
||||
*/
|
||||
getIndexName(
|
||||
tableName: string,
|
||||
columns: string[],
|
||||
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
|
||||
): string;
|
||||
getDefaultPrimaryName(tableName: string, columns: string[]): string;
|
||||
supportsCreatingFullTextIndex(): boolean;
|
||||
getFullTextWhereClause(): string;
|
||||
getFullTextIndexExpression(
|
||||
indexName: string,
|
||||
schemaName: string | undefined,
|
||||
tableName: string,
|
||||
columns: SimpleColumnMeta[],
|
||||
): string;
|
||||
getOrderByExpression(column: string, direction: string, collation?: string): string[];
|
||||
getJsonArrayFromSQL(
|
||||
column: string,
|
||||
alias: string,
|
||||
properties: {
|
||||
name: string;
|
||||
type: string;
|
||||
}[],
|
||||
): string;
|
||||
getJsonArrayExistsSQL(from: string, where: string): string;
|
||||
getDefaultClientUrl(): string;
|
||||
}
|
||||
140
node_modules/@mikro-orm/sql/dialects/mysql/BaseMySqlPlatform.js
generated
vendored
Normal file
140
node_modules/@mikro-orm/sql/dialects/mysql/BaseMySqlPlatform.js
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
import { Utils, QueryOrder, DecimalType, DoubleType } from '@mikro-orm/core';
|
||||
import { MySqlSchemaHelper } from './MySqlSchemaHelper.js';
|
||||
import { MySqlExceptionConverter } from './MySqlExceptionConverter.js';
|
||||
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
|
||||
import { MySqlNativeQueryBuilder } from './MySqlNativeQueryBuilder.js';
|
||||
export class BaseMySqlPlatform extends AbstractSqlPlatform {
|
||||
schemaHelper = new MySqlSchemaHelper(this);
|
||||
exceptionConverter = new MySqlExceptionConverter();
|
||||
#jsonTypeCasts = {
|
||||
string: 'text',
|
||||
number: 'double',
|
||||
bigint: 'bigint',
|
||||
boolean: 'unsigned',
|
||||
};
|
||||
ORDER_BY_NULLS_TRANSLATE = {
|
||||
[QueryOrder.asc_nulls_first]: 'is not null',
|
||||
[QueryOrder.asc_nulls_last]: 'is null',
|
||||
[QueryOrder.desc_nulls_first]: 'is not null',
|
||||
[QueryOrder.desc_nulls_last]: 'is null',
|
||||
};
|
||||
/** @internal */
|
||||
createNativeQueryBuilder() {
|
||||
return new MySqlNativeQueryBuilder(this);
|
||||
}
|
||||
getDefaultCharset() {
|
||||
return 'utf8mb4';
|
||||
}
|
||||
init(orm) {
|
||||
super.init(orm);
|
||||
orm.config.get('schemaGenerator').disableForeignKeysForClear ??= true;
|
||||
}
|
||||
getBeginTransactionSQL(options) {
|
||||
if (options?.isolationLevel || options?.readOnly) {
|
||||
const parts = [];
|
||||
if (options.isolationLevel) {
|
||||
parts.push(`isolation level ${options.isolationLevel}`);
|
||||
}
|
||||
if (options.readOnly) {
|
||||
parts.push('read only');
|
||||
}
|
||||
const sql = `set transaction ${parts.join(', ')}`;
|
||||
return [sql, 'begin'];
|
||||
}
|
||||
return ['begin'];
|
||||
}
|
||||
convertJsonToDatabaseValue(value, context) {
|
||||
if (context?.mode === 'query') {
|
||||
return value;
|
||||
}
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
getJsonIndexDefinition(index) {
|
||||
return index.columnNames.map(column => {
|
||||
if (!column.includes('.')) {
|
||||
return column;
|
||||
}
|
||||
const [root, ...path] = column.split('.');
|
||||
return `(json_value(${this.quoteIdentifier(root)}, '$.${path.join('.')}' returning ${index.options?.returning ?? 'char(255)'}))`;
|
||||
});
|
||||
}
|
||||
getBooleanTypeDeclarationSQL() {
|
||||
return 'tinyint(1)';
|
||||
}
|
||||
normalizeColumnType(type, options) {
|
||||
const simpleType = this.extractSimpleType(type);
|
||||
if (['decimal', 'numeric'].includes(simpleType)) {
|
||||
return this.getDecimalTypeDeclarationSQL(options);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
getDefaultMappedType(type) {
|
||||
if (type === 'tinyint(1)') {
|
||||
return super.getDefaultMappedType('boolean');
|
||||
}
|
||||
return super.getDefaultMappedType(type);
|
||||
}
|
||||
isNumericColumn(mappedType) {
|
||||
return super.isNumericColumn(mappedType) || [DecimalType, DoubleType].some(t => mappedType instanceof t);
|
||||
}
|
||||
supportsUnsigned() {
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Returns the default name of index for the given columns
|
||||
* cannot go past 64 character length for identifiers in MySQL
|
||||
*/
|
||||
getIndexName(tableName, columns, type) {
|
||||
if (type === 'primary') {
|
||||
return this.getDefaultPrimaryName(tableName, columns);
|
||||
}
|
||||
const indexName = super.getIndexName(tableName, columns, type);
|
||||
if (indexName.length > 64) {
|
||||
return `${indexName.substring(0, 56 - type.length)}_${Utils.hash(indexName, 5)}_${type}`;
|
||||
}
|
||||
return indexName;
|
||||
}
|
||||
getDefaultPrimaryName(tableName, columns) {
|
||||
return 'PRIMARY'; // https://dev.mysql.com/doc/refman/8.0/en/create-table.html#create-table-indexes-keys
|
||||
}
|
||||
supportsCreatingFullTextIndex() {
|
||||
return true;
|
||||
}
|
||||
getFullTextWhereClause() {
|
||||
return `match(:column:) against (:query in boolean mode)`;
|
||||
}
|
||||
getFullTextIndexExpression(indexName, schemaName, tableName, columns) {
|
||||
/* v8 ignore next */
|
||||
const quotedTableName = this.quoteIdentifier(schemaName ? `${schemaName}.${tableName}` : tableName);
|
||||
const quotedColumnNames = columns.map(c => this.quoteIdentifier(c.name));
|
||||
const quotedIndexName = this.quoteIdentifier(indexName);
|
||||
return `alter table ${quotedTableName} add fulltext index ${quotedIndexName}(${quotedColumnNames.join(',')})`;
|
||||
}
|
||||
getOrderByExpression(column, direction, collation) {
|
||||
const ret = [];
|
||||
const dir = direction.toLowerCase();
|
||||
const col = collation ? `${column} collate ${this.quoteCollation(collation)}` : column;
|
||||
if (dir in this.ORDER_BY_NULLS_TRANSLATE) {
|
||||
ret.push(`${col} ${this.ORDER_BY_NULLS_TRANSLATE[dir]}`);
|
||||
}
|
||||
ret.push(`${col} ${dir.replace(/(\s|nulls|first|last)*/gi, '')}`);
|
||||
return ret;
|
||||
}
|
||||
getJsonArrayFromSQL(column, alias, properties) {
|
||||
const columns = properties
|
||||
.map(
|
||||
p =>
|
||||
`${this.quoteIdentifier(p.name)} ${this.#jsonTypeCasts[p.type] ?? 'text'} path '$.${this.quoteJsonKey(p.name)}'`,
|
||||
)
|
||||
.join(', ');
|
||||
return `json_table(${column}, '$[*]' columns (${columns})) as ${this.quoteIdentifier(alias)}`;
|
||||
}
|
||||
// MySQL does not support correlated json_table inside EXISTS subqueries,
|
||||
// so we use a semi-join via the comma-join pattern instead.
|
||||
getJsonArrayExistsSQL(from, where) {
|
||||
return `(select 1 from ${from} where ${where} limit 1) is not null`;
|
||||
}
|
||||
getDefaultClientUrl() {
|
||||
return 'mysql://root@127.0.0.1:3306';
|
||||
}
|
||||
}
|
||||
9
node_modules/@mikro-orm/sql/dialects/mysql/MySqlExceptionConverter.d.ts
generated
vendored
Normal file
9
node_modules/@mikro-orm/sql/dialects/mysql/MySqlExceptionConverter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
|
||||
export declare class MySqlExceptionConverter extends ExceptionConverter {
|
||||
/**
|
||||
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-client.html
|
||||
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
|
||||
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractMySQLDriver.php
|
||||
*/
|
||||
convertException(exception: Error & Dictionary): DriverException;
|
||||
}
|
||||
94
node_modules/@mikro-orm/sql/dialects/mysql/MySqlExceptionConverter.js
generated
vendored
Normal file
94
node_modules/@mikro-orm/sql/dialects/mysql/MySqlExceptionConverter.js
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
import {
|
||||
DeadlockException,
|
||||
LockWaitTimeoutException,
|
||||
TableExistsException,
|
||||
TableNotFoundException,
|
||||
ForeignKeyConstraintViolationException,
|
||||
UniqueConstraintViolationException,
|
||||
InvalidFieldNameException,
|
||||
NonUniqueFieldNameException,
|
||||
SyntaxErrorException,
|
||||
ConnectionException,
|
||||
NotNullConstraintViolationException,
|
||||
ExceptionConverter,
|
||||
CheckConstraintViolationException,
|
||||
} from '@mikro-orm/core';
|
||||
export class MySqlExceptionConverter extends ExceptionConverter {
|
||||
/**
|
||||
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-client.html
|
||||
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
|
||||
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractMySQLDriver.php
|
||||
*/
|
||||
convertException(exception) {
|
||||
/* v8 ignore next */
|
||||
switch (exception.errno) {
|
||||
case 1213:
|
||||
return new DeadlockException(exception);
|
||||
case 1205:
|
||||
return new LockWaitTimeoutException(exception);
|
||||
case 1050:
|
||||
return new TableExistsException(exception);
|
||||
case 1051:
|
||||
case 1146:
|
||||
return new TableNotFoundException(exception);
|
||||
case 1216:
|
||||
case 1217:
|
||||
case 1451:
|
||||
case 1452:
|
||||
case 1701:
|
||||
return new ForeignKeyConstraintViolationException(exception);
|
||||
case 3819:
|
||||
case 4025:
|
||||
return new CheckConstraintViolationException(exception);
|
||||
case 1062:
|
||||
case 1557:
|
||||
case 1569:
|
||||
case 1586:
|
||||
return new UniqueConstraintViolationException(exception);
|
||||
case 1054:
|
||||
case 1166:
|
||||
case 1611:
|
||||
return new InvalidFieldNameException(exception);
|
||||
case 1052:
|
||||
case 1060:
|
||||
case 1110:
|
||||
return new NonUniqueFieldNameException(exception);
|
||||
case 1064:
|
||||
case 1149:
|
||||
case 1287:
|
||||
case 1341:
|
||||
case 1342:
|
||||
case 1343:
|
||||
case 1344:
|
||||
case 1382:
|
||||
case 1479:
|
||||
case 1541:
|
||||
case 1554:
|
||||
case 1626:
|
||||
return new SyntaxErrorException(exception);
|
||||
case 1044:
|
||||
case 1045:
|
||||
case 1046:
|
||||
case 1049:
|
||||
case 1095:
|
||||
case 1142:
|
||||
case 1143:
|
||||
case 1227:
|
||||
case 1370:
|
||||
case 1429:
|
||||
case 2002:
|
||||
case 2005:
|
||||
return new ConnectionException(exception);
|
||||
case 1048:
|
||||
case 1121:
|
||||
case 1138:
|
||||
case 1171:
|
||||
case 1252:
|
||||
case 1263:
|
||||
case 1364:
|
||||
case 1566:
|
||||
return new NotNullConstraintViolationException(exception);
|
||||
}
|
||||
return super.convertException(exception);
|
||||
}
|
||||
}
|
||||
7
node_modules/@mikro-orm/sql/dialects/mysql/MySqlNativeQueryBuilder.d.ts
generated
vendored
Normal file
7
node_modules/@mikro-orm/sql/dialects/mysql/MySqlNativeQueryBuilder.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export declare class MySqlNativeQueryBuilder extends NativeQueryBuilder {
|
||||
protected compileInsert(): void;
|
||||
protected addLockClause(): void;
|
||||
protected addOnConflictClause(): void;
|
||||
}
|
||||
74
node_modules/@mikro-orm/sql/dialects/mysql/MySqlNativeQueryBuilder.js
generated
vendored
Normal file
74
node_modules/@mikro-orm/sql/dialects/mysql/MySqlNativeQueryBuilder.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
import { LockMode, RawQueryFragment, Utils } from '@mikro-orm/core';
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export class MySqlNativeQueryBuilder extends NativeQueryBuilder {
|
||||
compileInsert() {
|
||||
if (!this.options.data) {
|
||||
throw new Error('No data provided');
|
||||
}
|
||||
this.parts.push('insert');
|
||||
if (this.options.onConflict?.ignore) {
|
||||
this.parts.push('ignore');
|
||||
}
|
||||
this.addHintComment();
|
||||
this.parts.push(`into ${this.getTableName()}`);
|
||||
if (Object.keys(this.options.data).length === 0) {
|
||||
this.parts.push('default values');
|
||||
return;
|
||||
}
|
||||
const parts = this.processInsertData();
|
||||
this.parts.push(parts.join(', '));
|
||||
}
|
||||
addLockClause() {
|
||||
if (!this.options.lockMode) {
|
||||
return;
|
||||
}
|
||||
const map = {
|
||||
[LockMode.PESSIMISTIC_READ]: 'lock in share mode',
|
||||
[LockMode.PESSIMISTIC_WRITE]: 'for update',
|
||||
[LockMode.PESSIMISTIC_PARTIAL_WRITE]: 'for update skip locked',
|
||||
[LockMode.PESSIMISTIC_WRITE_OR_FAIL]: 'for update nowait',
|
||||
[LockMode.PESSIMISTIC_PARTIAL_READ]: 'lock in share mode skip locked',
|
||||
[LockMode.PESSIMISTIC_READ_OR_FAIL]: 'lock in share mode nowait',
|
||||
};
|
||||
if (this.options.lockMode !== LockMode.OPTIMISTIC) {
|
||||
this.parts.push(map[this.options.lockMode]);
|
||||
}
|
||||
}
|
||||
addOnConflictClause() {
|
||||
const clause = this.options.onConflict;
|
||||
if (!clause || clause.ignore) {
|
||||
return;
|
||||
}
|
||||
if (clause.merge) {
|
||||
this.parts.push('on duplicate key update');
|
||||
if (Utils.isObject(clause.merge)) {
|
||||
const fields = Object.keys(clause.merge).map(field => {
|
||||
this.params.push(clause.merge[field]);
|
||||
return `${this.quote(field)} = ?`;
|
||||
});
|
||||
this.parts.push(fields.join(', '));
|
||||
} else if (clause.merge.length === 0) {
|
||||
const dataAsArray = Utils.asArray(this.options.data);
|
||||
const keys = Object.keys(dataAsArray[0]);
|
||||
this.parts.push(keys.map(key => `${this.quote(key)} = values(${this.quote(key)})`).join(', '));
|
||||
} else {
|
||||
const fields = clause.merge.map(key => `${this.quote(key)} = values(${this.quote(key)})`);
|
||||
this.parts.push(fields.join(', '));
|
||||
}
|
||||
if (clause.where) {
|
||||
this.parts.push(`where ${clause.where.sql}`);
|
||||
this.params.push(...clause.where.params);
|
||||
}
|
||||
return;
|
||||
}
|
||||
this.parts.push('on conflict');
|
||||
if (clause.fields instanceof RawQueryFragment) {
|
||||
this.parts.push(clause.fields.sql);
|
||||
this.params.push(...clause.fields.params);
|
||||
} else if (clause.fields.length > 0) {
|
||||
const fields = clause.fields.map(field => this.quote(field));
|
||||
this.parts.push(`(${fields.join(', ')})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
47
node_modules/@mikro-orm/sql/dialects/mysql/MySqlSchemaHelper.d.ts
generated
vendored
Normal file
47
node_modules/@mikro-orm/sql/dialects/mysql/MySqlSchemaHelper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
import { type Dictionary, type Type } from '@mikro-orm/core';
|
||||
import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../../typings.js';
|
||||
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
|
||||
import { SchemaHelper } from '../../schema/SchemaHelper.js';
|
||||
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
|
||||
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
|
||||
export declare class MySqlSchemaHelper extends SchemaHelper {
|
||||
#private;
|
||||
static readonly DEFAULT_VALUES: {
|
||||
'now()': string[];
|
||||
'current_timestamp(?)': string[];
|
||||
'0': string[];
|
||||
};
|
||||
getSchemaBeginning(charset: string, disableForeignKeys?: boolean): string;
|
||||
disableForeignKeysSQL(): string;
|
||||
enableForeignKeysSQL(): string;
|
||||
finalizeTable(table: DatabaseTable, charset: string, collate?: string): string;
|
||||
getListTablesSQL(): string;
|
||||
getListViewsSQL(): string;
|
||||
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
|
||||
loadInformationSchema(schema: DatabaseSchema, connection: AbstractSqlConnection, tables: Table[]): Promise<void>;
|
||||
getAllIndexes(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<IndexDef[]>>;
|
||||
getCreateIndexSQL(tableName: string, index: IndexDef, partialExpression?: boolean): string;
|
||||
/**
|
||||
* Build the column list for a MySQL index, with MySQL-specific handling for collation.
|
||||
* MySQL requires collation to be specified as an expression: (column_name COLLATE collation_name)
|
||||
*/
|
||||
protected getIndexColumns(index: IndexDef): string;
|
||||
/**
|
||||
* Append MySQL-specific index suffixes like INVISIBLE.
|
||||
*/
|
||||
protected appendMySqlIndexSuffix(sql: string, index: IndexDef): string;
|
||||
getAllColumns(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<Column[]>>;
|
||||
getAllChecks(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<CheckDef[]>>;
|
||||
getAllForeignKeys(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<Dictionary<ForeignKey>>>;
|
||||
getPreAlterTable(tableDiff: TableDifference, safe: boolean): string[];
|
||||
getRenameColumnSQL(tableName: string, oldColumnName: string, to: Column): string;
|
||||
getRenameIndexSQL(tableName: string, index: IndexDef, oldIndexName: string): string[];
|
||||
getChangeColumnCommentSQL(tableName: string, to: Column, schemaName?: string): string;
|
||||
alterTableColumn(column: Column, table: DatabaseTable, changedProperties: Set<string>): string[];
|
||||
private getColumnDeclarationSQL;
|
||||
getAllEnumDefinitions(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<Dictionary<string[]>>>;
|
||||
private supportsCheckConstraints;
|
||||
protected getChecksSQL(tables: Table[]): string;
|
||||
normalizeDefaultValue(defaultValue: string, length: number): string | number;
|
||||
protected wrap(val: string | null | undefined, type: Type<unknown>): string | null | undefined;
|
||||
}
|
||||
379
node_modules/@mikro-orm/sql/dialects/mysql/MySqlSchemaHelper.js
generated
vendored
Normal file
379
node_modules/@mikro-orm/sql/dialects/mysql/MySqlSchemaHelper.js
generated
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
import { EnumType, StringType, TextType } from '@mikro-orm/core';
|
||||
import { SchemaHelper } from '../../schema/SchemaHelper.js';
|
||||
export class MySqlSchemaHelper extends SchemaHelper {
|
||||
#cache = {};
|
||||
static DEFAULT_VALUES = {
|
||||
'now()': ['now()', 'current_timestamp'],
|
||||
'current_timestamp(?)': ['current_timestamp(?)'],
|
||||
0: ['0', 'false'],
|
||||
};
|
||||
getSchemaBeginning(charset, disableForeignKeys) {
|
||||
if (disableForeignKeys) {
|
||||
return `set names ${charset};\n${this.disableForeignKeysSQL()}\n\n`;
|
||||
}
|
||||
return `set names ${charset};\n\n`;
|
||||
}
|
||||
disableForeignKeysSQL() {
|
||||
return 'set foreign_key_checks = 0;';
|
||||
}
|
||||
enableForeignKeysSQL() {
|
||||
return 'set foreign_key_checks = 1;';
|
||||
}
|
||||
finalizeTable(table, charset, collate) {
|
||||
let sql = ` default character set ${charset}`;
|
||||
if (collate) {
|
||||
sql += ` collate ${collate}`;
|
||||
}
|
||||
sql += ' engine = InnoDB';
|
||||
if (table.comment) {
|
||||
sql += ` comment = ${this.platform.quoteValue(table.comment)}`;
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
getListTablesSQL() {
|
||||
return `select table_name as table_name, nullif(table_schema, schema()) as schema_name, table_comment as table_comment from information_schema.tables where table_type = 'BASE TABLE' and table_schema = schema()`;
|
||||
}
|
||||
getListViewsSQL() {
|
||||
return `select table_name as view_name, nullif(table_schema, schema()) as schema_name, view_definition from information_schema.views where table_schema = schema()`;
|
||||
}
|
||||
async loadViews(schema, connection, schemaName) {
|
||||
const views = await connection.execute(this.getListViewsSQL());
|
||||
for (const view of views) {
|
||||
// MySQL information_schema.views.view_definition requires SHOW VIEW privilege
|
||||
// and may return NULL. Use SHOW CREATE VIEW as fallback.
|
||||
let definition = view.view_definition?.trim();
|
||||
if (!definition) {
|
||||
const createView = await connection.execute(`show create view \`${view.view_name}\``);
|
||||
if (createView[0]?.['Create View']) {
|
||||
// Extract SELECT statement from CREATE VIEW ... AS SELECT ...
|
||||
const match = /\bAS\s+(.+)$/is.exec(createView[0]['Create View']);
|
||||
definition = match?.[1]?.trim();
|
||||
}
|
||||
}
|
||||
if (definition) {
|
||||
schema.addView(view.view_name, view.schema_name ?? undefined, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
async loadInformationSchema(schema, connection, tables) {
|
||||
if (tables.length === 0) {
|
||||
return;
|
||||
}
|
||||
const columns = await this.getAllColumns(connection, tables);
|
||||
const indexes = await this.getAllIndexes(connection, tables);
|
||||
const checks = await this.getAllChecks(connection, tables);
|
||||
const fks = await this.getAllForeignKeys(connection, tables);
|
||||
const enums = await this.getAllEnumDefinitions(connection, tables);
|
||||
for (const t of tables) {
|
||||
const key = this.getTableKey(t);
|
||||
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
|
||||
const pks = await this.getPrimaryKeys(connection, indexes[key], table.name, table.schema);
|
||||
table.init(columns[key], indexes[key], checks[key], pks, fks[key], enums[key]);
|
||||
}
|
||||
}
|
||||
async getAllIndexes(connection, tables) {
|
||||
const sql = `select table_name as table_name, nullif(table_schema, schema()) as schema_name, index_name as index_name, non_unique as non_unique, column_name as column_name, index_type as index_type, sub_part as sub_part, collation as sort_order /*!80013 , expression as expression, is_visible as is_visible */
|
||||
from information_schema.statistics where table_schema = database()
|
||||
and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(', ')})
|
||||
order by schema_name, table_name, index_name, seq_in_index`;
|
||||
const allIndexes = await connection.execute(sql);
|
||||
const ret = {};
|
||||
for (const index of allIndexes) {
|
||||
const key = this.getTableKey(index);
|
||||
const indexDef = {
|
||||
columnNames: [index.column_name],
|
||||
keyName: index.index_name,
|
||||
unique: !index.non_unique,
|
||||
primary: index.index_name === 'PRIMARY',
|
||||
constraint: !index.non_unique,
|
||||
};
|
||||
// Capture column options (prefix length, sort order)
|
||||
if (index.sub_part != null || index.sort_order === 'D') {
|
||||
indexDef.columns = [
|
||||
{
|
||||
name: index.column_name,
|
||||
...(index.sub_part != null && { length: index.sub_part }),
|
||||
...(index.sort_order === 'D' && { sort: 'DESC' }),
|
||||
},
|
||||
];
|
||||
}
|
||||
// Capture index type for fulltext and spatial indexes
|
||||
if (index.index_type === 'FULLTEXT') {
|
||||
indexDef.type = 'fulltext';
|
||||
} else if (index.index_type === 'SPATIAL') {
|
||||
/* v8 ignore next */
|
||||
indexDef.type = 'spatial';
|
||||
}
|
||||
// Capture invisible flag (MySQL 8.0.13+)
|
||||
if (index.is_visible === 'NO') {
|
||||
indexDef.invisible = true;
|
||||
}
|
||||
if (!index.column_name || index.expression?.match(/ where /i)) {
|
||||
indexDef.expression = index.expression; // required for the `getCreateIndexSQL()` call
|
||||
indexDef.expression = this.getCreateIndexSQL(index.table_name, indexDef, !!index.expression);
|
||||
}
|
||||
ret[key] ??= [];
|
||||
ret[key].push(indexDef);
|
||||
}
|
||||
for (const key of Object.keys(ret)) {
|
||||
ret[key] = await this.mapIndexes(ret[key]);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
getCreateIndexSQL(tableName, index, partialExpression = false) {
|
||||
/* v8 ignore next */
|
||||
if (index.expression && !partialExpression) {
|
||||
return index.expression;
|
||||
}
|
||||
tableName = this.quote(tableName);
|
||||
const keyName = this.quote(index.keyName);
|
||||
let sql = `alter table ${tableName} add ${index.unique ? 'unique' : 'index'} ${keyName} `;
|
||||
if (index.expression && partialExpression) {
|
||||
sql += `(${index.expression})`;
|
||||
return this.appendMySqlIndexSuffix(sql, index);
|
||||
}
|
||||
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
|
||||
if (index.columnNames.some(column => column.includes('.'))) {
|
||||
const columns = this.platform.getJsonIndexDefinition(index);
|
||||
sql = `alter table ${tableName} add ${index.unique ? 'unique ' : ''}index ${keyName} `;
|
||||
sql += `(${columns.join(', ')})`;
|
||||
return this.appendMySqlIndexSuffix(sql, index);
|
||||
}
|
||||
// Build column list with advanced options
|
||||
const columns = this.getIndexColumns(index);
|
||||
sql += `(${columns})`;
|
||||
return this.appendMySqlIndexSuffix(sql, index);
|
||||
}
|
||||
/**
|
||||
* Build the column list for a MySQL index, with MySQL-specific handling for collation.
|
||||
* MySQL requires collation to be specified as an expression: (column_name COLLATE collation_name)
|
||||
*/
|
||||
getIndexColumns(index) {
|
||||
if (index.columns?.length) {
|
||||
return index.columns
|
||||
.map(col => {
|
||||
const quotedName = this.quote(col.name);
|
||||
// MySQL supports collation via expression: (column_name COLLATE collation_name)
|
||||
// When collation is specified, wrap in parentheses as an expression
|
||||
if (col.collation) {
|
||||
let expr = col.length ? `${quotedName}(${col.length})` : quotedName;
|
||||
expr = `(${expr} collate ${col.collation})`;
|
||||
// Sort order comes after the expression
|
||||
if (col.sort) {
|
||||
expr += ` ${col.sort}`;
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
// Standard column definition without collation
|
||||
let colDef = quotedName;
|
||||
// MySQL supports prefix length
|
||||
if (col.length) {
|
||||
colDef += `(${col.length})`;
|
||||
}
|
||||
// MySQL supports sort order
|
||||
if (col.sort) {
|
||||
colDef += ` ${col.sort}`;
|
||||
}
|
||||
return colDef;
|
||||
})
|
||||
.join(', ');
|
||||
}
|
||||
return index.columnNames.map(c => this.quote(c)).join(', ');
|
||||
}
|
||||
/**
|
||||
* Append MySQL-specific index suffixes like INVISIBLE.
|
||||
*/
|
||||
appendMySqlIndexSuffix(sql, index) {
|
||||
// MySQL 8.0+ supports INVISIBLE indexes
|
||||
if (index.invisible) {
|
||||
sql += ' invisible';
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
async getAllColumns(connection, tables) {
|
||||
const sql = `select table_name as table_name,
|
||||
nullif(table_schema, schema()) as schema_name,
|
||||
column_name as column_name,
|
||||
column_default as column_default,
|
||||
nullif(column_comment, '') as column_comment,
|
||||
is_nullable as is_nullable,
|
||||
data_type as data_type,
|
||||
column_type as column_type,
|
||||
column_key as column_key,
|
||||
extra as extra,
|
||||
generation_expression as generation_expression,
|
||||
numeric_precision as numeric_precision,
|
||||
numeric_scale as numeric_scale,
|
||||
ifnull(datetime_precision, character_maximum_length) length
|
||||
from information_schema.columns where table_schema = database() and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name))})
|
||||
order by ordinal_position`;
|
||||
const allColumns = await connection.execute(sql);
|
||||
const str = val => (val != null ? '' + val : val);
|
||||
const extra = val =>
|
||||
val.replace(/auto_increment|default_generated|(stored|virtual) generated/i, '').trim() || undefined;
|
||||
const ret = {};
|
||||
for (const col of allColumns) {
|
||||
const mappedType = this.platform.getMappedType(col.column_type);
|
||||
const defaultValue = str(
|
||||
this.normalizeDefaultValue(
|
||||
mappedType.compareAsType() === 'boolean' && ['0', '1'].includes(col.column_default)
|
||||
? ['false', 'true'][+col.column_default]
|
||||
: col.column_default,
|
||||
col.length,
|
||||
),
|
||||
);
|
||||
const key = this.getTableKey(col);
|
||||
const generated = col.generation_expression
|
||||
? `(${col.generation_expression.replaceAll(`\\'`, `'`)}) ${col.extra.match(/stored generated/i) ? 'stored' : 'virtual'}`
|
||||
: undefined;
|
||||
ret[key] ??= [];
|
||||
ret[key].push({
|
||||
name: col.column_name,
|
||||
type: this.platform.isNumericColumn(mappedType)
|
||||
? col.column_type.replace(/ unsigned$/, '').replace(/\(\d+\)$/, '')
|
||||
: col.column_type,
|
||||
mappedType,
|
||||
unsigned: col.column_type.endsWith(' unsigned'),
|
||||
length: col.length,
|
||||
default: this.wrap(defaultValue, mappedType),
|
||||
nullable: col.is_nullable === 'YES',
|
||||
primary: col.column_key === 'PRI',
|
||||
unique: col.column_key === 'UNI',
|
||||
autoincrement: col.extra === 'auto_increment',
|
||||
precision: col.numeric_precision,
|
||||
scale: col.numeric_scale,
|
||||
comment: col.column_comment,
|
||||
extra: extra(col.extra),
|
||||
generated,
|
||||
});
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
async getAllChecks(connection, tables) {
|
||||
/* v8 ignore next */
|
||||
if (!(await this.supportsCheckConstraints(connection))) {
|
||||
return {};
|
||||
}
|
||||
const sql = this.getChecksSQL(tables);
|
||||
const allChecks = await connection.execute(sql);
|
||||
const ret = {};
|
||||
for (const check of allChecks) {
|
||||
const key = this.getTableKey(check);
|
||||
ret[key] ??= [];
|
||||
ret[key].push({
|
||||
name: check.name,
|
||||
columnName: check.column_name,
|
||||
definition: `check ${check.expression}`,
|
||||
expression: check.expression.replace(/^\((.*)\)$/, '$1'),
|
||||
});
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
async getAllForeignKeys(connection, tables) {
|
||||
const sql = `select k.constraint_name as constraint_name, nullif(k.table_schema, schema()) as schema_name, k.table_name as table_name, k.column_name as column_name, k.referenced_table_name as referenced_table_name, k.referenced_column_name as referenced_column_name, c.update_rule as update_rule, c.delete_rule as delete_rule
|
||||
from information_schema.key_column_usage k
|
||||
inner join information_schema.referential_constraints c on c.constraint_name = k.constraint_name and c.table_name = k.table_name
|
||||
where k.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(', ')})
|
||||
and k.table_schema = database() and c.constraint_schema = database() and k.referenced_column_name is not null
|
||||
order by constraint_name, k.ordinal_position`;
|
||||
const allFks = await connection.execute(sql);
|
||||
const ret = {};
|
||||
for (const fk of allFks) {
|
||||
const key = this.getTableKey(fk);
|
||||
ret[key] ??= [];
|
||||
ret[key].push(fk);
|
||||
}
|
||||
Object.keys(ret).forEach(key => {
|
||||
const parts = key.split('.');
|
||||
/* v8 ignore next */
|
||||
const schemaName = parts.length > 1 ? parts[0] : undefined;
|
||||
ret[key] = this.mapForeignKeys(ret[key], key, schemaName);
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
getPreAlterTable(tableDiff, safe) {
|
||||
// Dropping primary keys requires to unset autoincrement attribute on the particular column first.
|
||||
const pk = Object.values(tableDiff.removedIndexes).find(idx => idx.primary);
|
||||
if (!pk || safe) {
|
||||
return [];
|
||||
}
|
||||
return pk.columnNames
|
||||
.filter(col => tableDiff.fromTable.hasColumn(col))
|
||||
.map(col => tableDiff.fromTable.getColumn(col))
|
||||
.filter(col => col.autoincrement)
|
||||
.map(
|
||||
col =>
|
||||
`alter table \`${tableDiff.name}\` modify \`${col.name}\` ${this.getColumnDeclarationSQL({ ...col, autoincrement: false })}`,
|
||||
);
|
||||
}
|
||||
getRenameColumnSQL(tableName, oldColumnName, to) {
|
||||
tableName = this.quote(tableName);
|
||||
oldColumnName = this.quote(oldColumnName);
|
||||
const columnName = this.quote(to.name);
|
||||
return `alter table ${tableName} change ${oldColumnName} ${columnName} ${this.getColumnDeclarationSQL(to)}`;
|
||||
}
|
||||
getRenameIndexSQL(tableName, index, oldIndexName) {
|
||||
tableName = this.quote(tableName);
|
||||
oldIndexName = this.quote(oldIndexName);
|
||||
const keyName = this.quote(index.keyName);
|
||||
return [`alter table ${tableName} rename index ${oldIndexName} to ${keyName}`];
|
||||
}
|
||||
getChangeColumnCommentSQL(tableName, to, schemaName) {
|
||||
tableName = this.quote(tableName);
|
||||
const columnName = this.quote(to.name);
|
||||
return `alter table ${tableName} modify ${columnName} ${this.getColumnDeclarationSQL(to)}`;
|
||||
}
|
||||
alterTableColumn(column, table, changedProperties) {
|
||||
const col = this.createTableColumn(column, table, changedProperties);
|
||||
return [`alter table ${table.getQuotedName()} modify ${col}`];
|
||||
}
|
||||
getColumnDeclarationSQL(col) {
|
||||
let ret = col.type;
|
||||
ret += col.unsigned ? ' unsigned' : '';
|
||||
ret += col.autoincrement ? ' auto_increment' : '';
|
||||
ret += ' ';
|
||||
ret += col.nullable ? 'null' : 'not null';
|
||||
ret += col.default ? ' default ' + col.default : '';
|
||||
ret += col.comment ? ` comment ${this.platform.quoteValue(col.comment)}` : '';
|
||||
return ret;
|
||||
}
|
||||
async getAllEnumDefinitions(connection, tables) {
|
||||
const sql = `select column_name as column_name, column_type as column_type, table_name as table_name
|
||||
from information_schema.columns
|
||||
where data_type = 'enum' and table_name in (${tables.map(t => `'${t.table_name}'`).join(', ')}) and table_schema = database()`;
|
||||
const enums = await connection.execute(sql);
|
||||
return enums.reduce((o, item) => {
|
||||
o[item.table_name] ??= {};
|
||||
o[item.table_name][item.column_name] = item.column_type
|
||||
.match(/enum\((.*)\)/)[1]
|
||||
.split(',')
|
||||
.map(item => /'(.*)'/.exec(item)[1]);
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
async supportsCheckConstraints(connection) {
|
||||
if (this.#cache.supportsCheckConstraints != null) {
|
||||
return this.#cache.supportsCheckConstraints;
|
||||
}
|
||||
const sql = `select 1 from information_schema.tables where table_name = 'CHECK_CONSTRAINTS' and table_schema = 'information_schema'`;
|
||||
const res = await connection.execute(sql);
|
||||
return (this.#cache.supportsCheckConstraints = res.length > 0);
|
||||
}
|
||||
getChecksSQL(tables) {
|
||||
return `select cc.constraint_schema as table_schema, tc.table_name as table_name, cc.constraint_name as name, cc.check_clause as expression
|
||||
from information_schema.check_constraints cc
|
||||
join information_schema.table_constraints tc
|
||||
on tc.constraint_schema = cc.constraint_schema
|
||||
and tc.constraint_name = cc.constraint_name
|
||||
and constraint_type = 'CHECK'
|
||||
where tc.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name))}) and tc.constraint_schema = database()
|
||||
order by tc.constraint_name`;
|
||||
}
|
||||
normalizeDefaultValue(defaultValue, length) {
|
||||
return super.normalizeDefaultValue(defaultValue, length, MySqlSchemaHelper.DEFAULT_VALUES);
|
||||
}
|
||||
wrap(val, type) {
|
||||
const stringType = type instanceof StringType || type instanceof TextType || type instanceof EnumType;
|
||||
return typeof val === 'string' && val.length > 0 && stringType ? this.platform.quoteValue(val) : val;
|
||||
}
|
||||
}
|
||||
3
node_modules/@mikro-orm/sql/dialects/mysql/index.d.ts
generated
vendored
Normal file
3
node_modules/@mikro-orm/sql/dialects/mysql/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './MySqlSchemaHelper.js';
|
||||
export * from './BaseMySqlPlatform.js';
|
||||
export * from './MySqlNativeQueryBuilder.js';
|
||||
3
node_modules/@mikro-orm/sql/dialects/mysql/index.js
generated
vendored
Normal file
3
node_modules/@mikro-orm/sql/dialects/mysql/index.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './MySqlSchemaHelper.js';
|
||||
export * from './BaseMySqlPlatform.js';
|
||||
export * from './MySqlNativeQueryBuilder.js';
|
||||
107
node_modules/@mikro-orm/sql/dialects/oracledb/OracleDialect.d.ts
generated
vendored
Normal file
107
node_modules/@mikro-orm/sql/dialects/oracledb/OracleDialect.d.ts
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import {
|
||||
type AliasNode,
|
||||
CompiledQuery,
|
||||
type DatabaseConnection,
|
||||
type DatabaseIntrospector,
|
||||
DefaultQueryCompiler,
|
||||
type Dialect,
|
||||
DialectAdapterBase,
|
||||
type Driver,
|
||||
type Kysely,
|
||||
type QueryCompiler,
|
||||
type QueryResult,
|
||||
type TransactionSettings,
|
||||
} from 'kysely';
|
||||
/**
|
||||
* Subset of oracledb's Pool interface used by the dialect.
|
||||
* We define our own interface to avoid importing the `oracledb` package directly.
|
||||
*/
|
||||
export interface OraclePool {
|
||||
getConnection(): Promise<OraclePoolConnection>;
|
||||
close(drainTime?: number): Promise<void>;
|
||||
}
|
||||
/**
|
||||
* Subset of oracledb's Connection interface used by the dialect.
|
||||
*/
|
||||
export interface OraclePoolConnection {
|
||||
execute<R>(
|
||||
sql: string,
|
||||
params: unknown[],
|
||||
options?: Record<string, unknown>,
|
||||
): Promise<{
|
||||
rows?: R[];
|
||||
rowsAffected?: number;
|
||||
resultSet?: OracleResultSet<R>;
|
||||
outBinds?: unknown;
|
||||
}>;
|
||||
commit(): Promise<void>;
|
||||
rollback(): Promise<void>;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
interface OracleResultSet<R> {
|
||||
getRow(): Promise<R>;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
declare class OracleQueryCompiler extends DefaultQueryCompiler {
|
||||
protected getLeftIdentifierWrapper(): string;
|
||||
protected getRightIdentifierWrapper(): string;
|
||||
protected visitAlias(node: AliasNode): void;
|
||||
}
|
||||
declare class OracleAdapter extends DialectAdapterBase {
|
||||
#private;
|
||||
get supportsReturning(): boolean;
|
||||
get supportsTransactionalDdl(): boolean;
|
||||
acquireMigrationLock(_: Kysely<any>): Promise<void>;
|
||||
releaseMigrationLock(_: Kysely<any>): Promise<void>;
|
||||
}
|
||||
declare class OracleConnection implements DatabaseConnection {
|
||||
#private;
|
||||
readonly id: number;
|
||||
constructor(connection: OraclePoolConnection, executeOptions?: Record<string, unknown>);
|
||||
executeQuery<R>(compiledQuery: CompiledQuery): Promise<QueryResult<R>>;
|
||||
formatQuery(query: CompiledQuery): {
|
||||
sql: string;
|
||||
bindParams: unknown[];
|
||||
};
|
||||
streamQuery<R>(compiledQuery: CompiledQuery, _chunkSize?: number): AsyncIterableIterator<QueryResult<R>>;
|
||||
get connection(): OraclePoolConnection;
|
||||
}
|
||||
declare class OracleDriver implements Driver {
|
||||
#private;
|
||||
constructor(config: OracleDialectConfig);
|
||||
init(): Promise<void>;
|
||||
acquireConnection(): Promise<OracleConnection>;
|
||||
savepoint(
|
||||
connection: OracleConnection,
|
||||
savepointName: string,
|
||||
compileQuery: QueryCompiler['compileQuery'],
|
||||
): Promise<void>;
|
||||
rollbackToSavepoint(
|
||||
connection: OracleConnection,
|
||||
savepointName: string,
|
||||
compileQuery: QueryCompiler['compileQuery'],
|
||||
): Promise<void>;
|
||||
releaseSavepoint(
|
||||
connection: OracleConnection,
|
||||
savepointName: string,
|
||||
compileQuery: QueryCompiler['compileQuery'],
|
||||
): Promise<void>;
|
||||
beginTransaction(connection: OracleConnection, settings: TransactionSettings): Promise<void>;
|
||||
commitTransaction(connection: OracleConnection): Promise<void>;
|
||||
rollbackTransaction(connection: OracleConnection): Promise<void>;
|
||||
releaseConnection(connection: OracleConnection): Promise<void>;
|
||||
destroy(): Promise<void>;
|
||||
}
|
||||
export interface OracleDialectConfig {
|
||||
pool: OraclePool;
|
||||
executeOptions?: Record<string, unknown>;
|
||||
}
|
||||
export declare class OracleDialect implements Dialect {
|
||||
#private;
|
||||
constructor(config: OracleDialectConfig);
|
||||
createDriver(): OracleDriver;
|
||||
createAdapter(): OracleAdapter;
|
||||
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
|
||||
createQueryCompiler(): OracleQueryCompiler;
|
||||
}
|
||||
export {};
|
||||
172
node_modules/@mikro-orm/sql/dialects/oracledb/OracleDialect.js
generated
vendored
Normal file
172
node_modules/@mikro-orm/sql/dialects/oracledb/OracleDialect.js
generated
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
// inlined https://github.com/griffiths-waite/kysely-oracledb with minor adjustments
|
||||
/* v8 ignore start: internal Kysely driver integration, tested through the main Oracle driver */
|
||||
import {
|
||||
CompiledQuery,
|
||||
createQueryId,
|
||||
DefaultQueryCompiler,
|
||||
DialectAdapterBase,
|
||||
IdentifierNode,
|
||||
RawNode,
|
||||
} from 'kysely';
|
||||
function parseSavepointCommand(command, savepointName) {
|
||||
return RawNode.createWithChildren([
|
||||
RawNode.createWithSql(`${command} `),
|
||||
IdentifierNode.create(savepointName), // ensures savepointName gets sanitized
|
||||
]);
|
||||
}
|
||||
class OracleQueryCompiler extends DefaultQueryCompiler {
|
||||
getLeftIdentifierWrapper() {
|
||||
return '';
|
||||
}
|
||||
getRightIdentifierWrapper() {
|
||||
return '';
|
||||
}
|
||||
visitAlias(node) {
|
||||
this.visitNode(node.node);
|
||||
this.append(' ');
|
||||
this.visitNode(node.alias);
|
||||
}
|
||||
}
|
||||
class OracleAdapter extends DialectAdapterBase {
|
||||
#supportsReturning = false;
|
||||
#supportsTransactionalDdl = false;
|
||||
get supportsReturning() {
|
||||
return this.#supportsReturning;
|
||||
}
|
||||
get supportsTransactionalDdl() {
|
||||
return this.#supportsTransactionalDdl;
|
||||
}
|
||||
async acquireMigrationLock(_) {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
async releaseMigrationLock(_) {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
}
|
||||
const OUT_FORMAT_OBJECT = 4002;
|
||||
let i = 0;
|
||||
class OracleConnection {
|
||||
id = i++;
|
||||
#executeOptions;
|
||||
#connection;
|
||||
constructor(connection, executeOptions) {
|
||||
this.#executeOptions = executeOptions ?? {};
|
||||
this.#connection = connection;
|
||||
}
|
||||
async executeQuery(compiledQuery) {
|
||||
const { sql, bindParams } = this.formatQuery(compiledQuery);
|
||||
const result = await this.#connection.execute(sql, bindParams, {
|
||||
autoCommit: compiledQuery.autoCommit,
|
||||
outFormat: OUT_FORMAT_OBJECT,
|
||||
...this.#executeOptions,
|
||||
});
|
||||
return {
|
||||
rows: result?.rows || [],
|
||||
numAffectedRows: result.rowsAffected ? BigInt(result.rowsAffected) : undefined,
|
||||
// @ts-ignore internal extension for Oracle returning clause
|
||||
outBinds: result.outBinds,
|
||||
};
|
||||
}
|
||||
formatQuery(query) {
|
||||
return {
|
||||
sql: query.sql.replace(/\$(\d+)/g, (_match, p1) => `:${parseInt(p1, 10) - 1}`), // Format bind params in Oracle syntax :0, :1, etc.
|
||||
bindParams: query.parameters,
|
||||
};
|
||||
}
|
||||
async *streamQuery(compiledQuery, _chunkSize) {
|
||||
const { sql, bindParams } = this.formatQuery(compiledQuery);
|
||||
const result = await this.#connection.execute(sql, bindParams, {
|
||||
resultSet: true,
|
||||
autoCommit: compiledQuery.autoCommit,
|
||||
outFormat: OUT_FORMAT_OBJECT,
|
||||
...this.#executeOptions,
|
||||
});
|
||||
const rs = result.resultSet;
|
||||
try {
|
||||
let row;
|
||||
while ((row = await rs.getRow())) {
|
||||
yield { rows: [row] };
|
||||
}
|
||||
} finally {
|
||||
await rs.close();
|
||||
}
|
||||
}
|
||||
get connection() {
|
||||
return this.#connection;
|
||||
}
|
||||
}
|
||||
class OracleDriver {
|
||||
#config;
|
||||
#connections = new Set();
|
||||
constructor(config) {
|
||||
this.#config = config;
|
||||
}
|
||||
async init() {
|
||||
//
|
||||
}
|
||||
async acquireConnection() {
|
||||
const connection = new OracleConnection(await this.#config.pool.getConnection(), this.#config.executeOptions);
|
||||
this.#connections.add(connection);
|
||||
return connection;
|
||||
}
|
||||
async savepoint(connection, savepointName, compileQuery) {
|
||||
await connection.executeQuery(compileQuery(parseSavepointCommand('savepoint', savepointName), createQueryId()));
|
||||
}
|
||||
async rollbackToSavepoint(connection, savepointName, compileQuery) {
|
||||
await connection.executeQuery(
|
||||
compileQuery(parseSavepointCommand('rollback to savepoint', savepointName), createQueryId()),
|
||||
);
|
||||
}
|
||||
async releaseSavepoint(connection, savepointName, compileQuery) {
|
||||
//
|
||||
}
|
||||
async beginTransaction(connection, settings) {
|
||||
if (settings.accessMode) {
|
||||
await connection.executeQuery(CompiledQuery.raw(`set transaction ${settings.accessMode}`));
|
||||
return;
|
||||
}
|
||||
if (settings.isolationLevel) {
|
||||
await connection.executeQuery(CompiledQuery.raw(`set transaction isolation level ${settings.isolationLevel}`));
|
||||
}
|
||||
}
|
||||
async commitTransaction(connection) {
|
||||
await connection.connection.commit();
|
||||
}
|
||||
async rollbackTransaction(connection) {
|
||||
await connection.connection.rollback();
|
||||
}
|
||||
async releaseConnection(connection) {
|
||||
try {
|
||||
await connection.connection.close();
|
||||
} catch (err) {
|
||||
//
|
||||
} finally {
|
||||
this.#connections.delete(connection);
|
||||
}
|
||||
}
|
||||
async destroy() {
|
||||
for (const connection of this.#connections) {
|
||||
await this.releaseConnection(connection);
|
||||
}
|
||||
await this.#config.pool?.close(0);
|
||||
}
|
||||
}
|
||||
export class OracleDialect {
|
||||
#config;
|
||||
constructor(config) {
|
||||
this.#config = config;
|
||||
}
|
||||
createDriver() {
|
||||
return new OracleDriver(this.#config);
|
||||
}
|
||||
createAdapter() {
|
||||
return new OracleAdapter();
|
||||
}
|
||||
createIntrospector(db) {
|
||||
throw new Error('Not implemented');
|
||||
}
|
||||
createQueryCompiler() {
|
||||
return new OracleQueryCompiler();
|
||||
}
|
||||
}
|
||||
/* v8 ignore stop */
|
||||
19
node_modules/@mikro-orm/sql/dialects/oracledb/OracleNativeQueryBuilder.d.ts
generated
vendored
Normal file
19
node_modules/@mikro-orm/sql/dialects/oracledb/OracleNativeQueryBuilder.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
import { type Dictionary } from '@mikro-orm/core';
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export declare function markOutBindings(obj: Dictionary): void;
|
||||
/** @internal */
|
||||
export declare class OracleNativeQueryBuilder extends NativeQueryBuilder {
|
||||
as(alias: string): this;
|
||||
compile(): {
|
||||
sql: string;
|
||||
params: unknown[];
|
||||
};
|
||||
protected compileTruncate(): void;
|
||||
protected combineParts(): {
|
||||
sql: string;
|
||||
params: unknown[];
|
||||
};
|
||||
private compileUpsert;
|
||||
protected compileSelect(): void;
|
||||
}
|
||||
245
node_modules/@mikro-orm/sql/dialects/oracledb/OracleNativeQueryBuilder.js
generated
vendored
Normal file
245
node_modules/@mikro-orm/sql/dialects/oracledb/OracleNativeQueryBuilder.js
generated
vendored
Normal file
@@ -0,0 +1,245 @@
|
||||
import { raw, RawQueryFragment, Utils } from '@mikro-orm/core';
|
||||
import { QueryType } from '../../query/enums.js';
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export function markOutBindings(obj) {
|
||||
Object.defineProperty(obj, '__outBindings', {
|
||||
value: true,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
});
|
||||
}
|
||||
/** @internal */
|
||||
export class OracleNativeQueryBuilder extends NativeQueryBuilder {
|
||||
as(alias) {
|
||||
this.wrap('(', `) ${this.platform.quoteIdentifier(alias)}`);
|
||||
return this;
|
||||
}
|
||||
compile() {
|
||||
if (!this.type) {
|
||||
throw new Error('No query type provided');
|
||||
}
|
||||
this.parts.length = 0;
|
||||
this.params.length = 0;
|
||||
/* v8 ignore next 3: query comment branch */
|
||||
if (this.options.comment) {
|
||||
this.parts.push(...this.options.comment.map(comment => `/* ${comment} */`));
|
||||
}
|
||||
let copy;
|
||||
if (this.options.onConflict && !Utils.isEmpty(Utils.asArray(this.options.data)[0])) {
|
||||
this.compileUpsert();
|
||||
} else {
|
||||
if (this.options.returning && Array.isArray(this.options.data) && this.options.data.length > 1) {
|
||||
copy = [...this.options.data];
|
||||
this.options.data.length = 1;
|
||||
}
|
||||
switch (this.type) {
|
||||
case QueryType.SELECT:
|
||||
case QueryType.COUNT:
|
||||
this.compileSelect();
|
||||
break;
|
||||
case QueryType.INSERT:
|
||||
this.compileInsert();
|
||||
break;
|
||||
case QueryType.UPDATE:
|
||||
this.compileUpdate();
|
||||
break;
|
||||
case QueryType.DELETE:
|
||||
this.compileDelete();
|
||||
break;
|
||||
case QueryType.TRUNCATE:
|
||||
this.compileTruncate();
|
||||
break;
|
||||
}
|
||||
this.addOnConflictClause();
|
||||
}
|
||||
if (this.options.returning) {
|
||||
const isUpsert = this.options.onConflict && !Utils.isEmpty(Utils.asArray(this.options.data)[0]);
|
||||
const prefix = isUpsert ? `${this.getTableName()}.` : '';
|
||||
const fields = this.options.returning.map(field => prefix + this.quote(Array.isArray(field) ? field[0] : field));
|
||||
const into = this.options.returning.map(field => ':out_' + (Array.isArray(field) ? field[0] : field));
|
||||
const outBindings = this.options.returning.map(field => {
|
||||
const name = 'out_' + (Array.isArray(field) ? field[0] : field);
|
||||
const type = Array.isArray(field) ? field[1] : 'string';
|
||||
return [name, type];
|
||||
});
|
||||
markOutBindings(outBindings);
|
||||
this.parts.push(`returning ${fields.join(', ')}`);
|
||||
this.parts.push(`into ${into.join(', ')}`);
|
||||
this.params.push(outBindings);
|
||||
}
|
||||
this.addLockClause();
|
||||
if (!copy) {
|
||||
return this.combineParts();
|
||||
}
|
||||
// multi insert with returning
|
||||
const sql = this.parts.join(' ');
|
||||
const blockLines = [];
|
||||
const block2Lines = [];
|
||||
const keys = Object.keys(copy[0]);
|
||||
const last = this.params[this.params.length - 1];
|
||||
/* v8 ignore next 3: defensive check — output bindings are always set by compile() */
|
||||
if (!Array.isArray(last) || !('__outBindings' in last) || !last.__outBindings) {
|
||||
throw new Error('Output bindings are required for multi insert with returning');
|
||||
}
|
||||
const outBindings = {};
|
||||
markOutBindings(outBindings);
|
||||
for (let i = 0; i < copy.length; i++) {
|
||||
const params = [];
|
||||
for (const key of keys) {
|
||||
/* v8 ignore next 3: undefined value branch in multi-insert */
|
||||
if (typeof copy[i][key] === 'undefined') {
|
||||
params.push(this.platform.usesDefaultKeyword() ? raw('default') : null);
|
||||
} else {
|
||||
params.push(copy[i][key]);
|
||||
}
|
||||
}
|
||||
// we need to interpolate to allow proper escaping
|
||||
const formatted = this.platform.formatQuery(sql, params).replaceAll(`'`, `''`);
|
||||
/* v8 ignore next 3: returning field type branches */
|
||||
const using = this.options.returning.map(field => {
|
||||
const name = Array.isArray(field) ? field[0] : field;
|
||||
const type = Array.isArray(field) ? field[1] : 'string';
|
||||
outBindings[`out_${name}__${i}`] = {
|
||||
dir: this.platform.mapToBindType('out'),
|
||||
type: this.platform.mapToBindType(type),
|
||||
};
|
||||
return `out :out_${name}__${i}`;
|
||||
});
|
||||
blockLines.push(` execute immediate '${formatted}' using ${using.join(', ')};`);
|
||||
block2Lines.push(` execute immediate '${sql}' using ${using.join(', ')};`);
|
||||
}
|
||||
const block = `begin\n${blockLines.join('\n')}\n end;`;
|
||||
const block2 = `begin\n${block2Lines.join('\n')}\n end;`;
|
||||
// save raw query without interpolation for logging,
|
||||
Object.defineProperty(outBindings, '__rawQuery', {
|
||||
value: block2,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
});
|
||||
this.options.data = copy;
|
||||
return { sql: block, params: [outBindings] };
|
||||
}
|
||||
compileTruncate() {
|
||||
super.compileTruncate();
|
||||
this.parts.push('drop all storage cascade');
|
||||
}
|
||||
combineParts() {
|
||||
let sql = this.parts.join(' ');
|
||||
const last = this.params[this.params.length - 1];
|
||||
if (this.options.wrap) {
|
||||
const [a, b] = this.options.wrap;
|
||||
sql = `${a}${sql}${b}`;
|
||||
}
|
||||
if (!(Array.isArray(last) && '__outBindings' in last && last.__outBindings)) {
|
||||
return { sql, params: this.params };
|
||||
}
|
||||
const out = this.params.pop();
|
||||
const outBindings = {};
|
||||
markOutBindings(outBindings);
|
||||
this.params.push(outBindings);
|
||||
for (const item of out) {
|
||||
outBindings[item[0]] = {
|
||||
dir: this.platform.mapToBindType('out'),
|
||||
type: this.platform.mapToBindType(item[1]),
|
||||
};
|
||||
}
|
||||
return { sql, params: this.params };
|
||||
}
|
||||
compileUpsert() {
|
||||
const clause = this.options.onConflict;
|
||||
const dataAsArray = Utils.asArray(this.options.data);
|
||||
const keys = Object.keys(dataAsArray[0]);
|
||||
const parts = [];
|
||||
for (const data of dataAsArray) {
|
||||
for (const key of keys) {
|
||||
this.params.push(data[key]);
|
||||
}
|
||||
parts.push(`select ${keys.map(k => `? as ${this.quote(k)}`).join(', ')} from dual`);
|
||||
}
|
||||
this.parts.push(`merge into ${this.getTableName()}`);
|
||||
this.parts.push(`using (${parts.join(' union all ')}) tsource`);
|
||||
/* v8 ignore next 4: RawQueryFragment conflict fields branch */
|
||||
if (clause.fields instanceof RawQueryFragment) {
|
||||
this.parts.push(clause.fields.sql);
|
||||
this.params.push(...clause.fields.params);
|
||||
} else if (clause.fields.length > 0) {
|
||||
const fields = clause.fields.map(field => {
|
||||
const col = this.quote(field);
|
||||
return `${this.getTableName()}.${col} = tsource.${col}`;
|
||||
});
|
||||
this.parts.push(`on (${fields.join(' and ')})`);
|
||||
}
|
||||
const sourceColumns = keys.map(field => `tsource.${this.quote(field)}`).join(', ');
|
||||
const destinationColumns = keys.map(field => this.quote(field)).join(', ');
|
||||
this.parts.push(`when not matched then insert (${destinationColumns}) values (${sourceColumns})`);
|
||||
if (!clause.ignore) {
|
||||
/* v8 ignore next: merge type branch */
|
||||
if (!clause.merge || Array.isArray(clause.merge)) {
|
||||
const mergeParts = (clause.merge || keys)
|
||||
.filter(field => !Array.isArray(clause.fields) || !clause.fields.includes(field))
|
||||
.filter(field => keys.includes(field)) // only reference columns present in the source data
|
||||
.map(column => `${this.quote(column)} = tsource.${this.quote(column)}`);
|
||||
/* v8 ignore next 10: empty mergeParts branch */
|
||||
if (mergeParts.length > 0) {
|
||||
this.parts.push('when matched');
|
||||
if (clause.where) {
|
||||
this.parts.push(`and ${clause.where.sql}`);
|
||||
this.params.push(...clause.where.params);
|
||||
}
|
||||
this.parts.push('then update set');
|
||||
this.parts.push(mergeParts.join(', '));
|
||||
}
|
||||
} /* v8 ignore start: object-form merge branch */ else if (typeof clause.merge === 'object') {
|
||||
this.parts.push('when matched');
|
||||
if (clause.where) {
|
||||
this.parts.push(`and ${clause.where.sql}`);
|
||||
this.params.push(...clause.where.params);
|
||||
}
|
||||
this.parts.push('then update set');
|
||||
const parts = Object.entries(clause.merge).map(([key, value]) => {
|
||||
this.params.push(value);
|
||||
return `${this.getTableName()}.${this.quote(key)} = ?`;
|
||||
});
|
||||
this.parts.push(parts.join(', '));
|
||||
}
|
||||
/* v8 ignore stop */
|
||||
}
|
||||
}
|
||||
compileSelect() {
|
||||
this.parts.push('select');
|
||||
this.addHintComment();
|
||||
this.parts.push(`${this.getFields()} from ${this.getTableName()}`);
|
||||
if (this.options.joins) {
|
||||
for (const join of this.options.joins) {
|
||||
this.parts.push(join.sql);
|
||||
this.params.push(...join.params);
|
||||
}
|
||||
}
|
||||
if (this.options.where?.sql.trim()) {
|
||||
this.parts.push(`where ${this.options.where.sql}`);
|
||||
this.params.push(...this.options.where.params);
|
||||
}
|
||||
if (this.options.groupBy) {
|
||||
const fields = this.options.groupBy.map(field => this.quote(field));
|
||||
this.parts.push(`group by ${fields.join(', ')}`);
|
||||
}
|
||||
if (this.options.having) {
|
||||
this.parts.push(`having ${this.options.having.sql}`);
|
||||
this.params.push(...this.options.having.params);
|
||||
}
|
||||
if (this.options.orderBy) {
|
||||
this.parts.push(`order by ${this.options.orderBy}`);
|
||||
}
|
||||
if (this.options.offset != null) {
|
||||
this.parts.push(`offset ? rows`);
|
||||
this.params.push(this.options.offset);
|
||||
}
|
||||
if (this.options.limit != null) {
|
||||
this.parts.push(`fetch next ? rows only`);
|
||||
this.params.push(this.options.limit);
|
||||
}
|
||||
}
|
||||
}
|
||||
2
node_modules/@mikro-orm/sql/dialects/oracledb/index.d.ts
generated
vendored
Normal file
2
node_modules/@mikro-orm/sql/dialects/oracledb/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './OracleDialect.js';
|
||||
export * from './OracleNativeQueryBuilder.js';
|
||||
2
node_modules/@mikro-orm/sql/dialects/oracledb/index.js
generated
vendored
Normal file
2
node_modules/@mikro-orm/sql/dialects/oracledb/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './OracleDialect.js';
|
||||
export * from './OracleNativeQueryBuilder.js';
|
||||
115
node_modules/@mikro-orm/sql/dialects/postgresql/BasePostgreSqlPlatform.d.ts
generated
vendored
Normal file
115
node_modules/@mikro-orm/sql/dialects/postgresql/BasePostgreSqlPlatform.d.ts
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
import {
|
||||
type EntityProperty,
|
||||
type IsolationLevel,
|
||||
RawQueryFragment,
|
||||
type SimpleColumnMeta,
|
||||
Type,
|
||||
} from '@mikro-orm/core';
|
||||
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
|
||||
import type { IndexDef } from '../../typings.js';
|
||||
import { PostgreSqlNativeQueryBuilder } from './PostgreSqlNativeQueryBuilder.js';
|
||||
import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper.js';
|
||||
import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter.js';
|
||||
export declare class BasePostgreSqlPlatform extends AbstractSqlPlatform {
|
||||
#private;
|
||||
protected readonly schemaHelper: PostgreSqlSchemaHelper;
|
||||
protected readonly exceptionConverter: PostgreSqlExceptionConverter;
|
||||
createNativeQueryBuilder(): PostgreSqlNativeQueryBuilder;
|
||||
usesReturningStatement(): boolean;
|
||||
usesCascadeStatement(): boolean;
|
||||
supportsNativeEnums(): boolean;
|
||||
usesEnumCheckConstraints(): boolean;
|
||||
supportsMaterializedViews(): boolean;
|
||||
supportsCustomPrimaryKeyNames(): boolean;
|
||||
getCurrentTimestampSQL(length: number): string;
|
||||
getDateTimeTypeDeclarationSQL(column: { length?: number }): string;
|
||||
getDefaultDateTimeLength(): number;
|
||||
getTimeTypeDeclarationSQL(): string;
|
||||
getIntegerTypeDeclarationSQL(column: { length?: number; autoincrement?: boolean; generated?: string }): string;
|
||||
getBigIntTypeDeclarationSQL(column: { autoincrement?: boolean }): string;
|
||||
getTinyIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
|
||||
getUuidTypeDeclarationSQL(column: { length?: number }): string;
|
||||
getFullTextWhereClause(prop: EntityProperty): string;
|
||||
supportsCreatingFullTextIndex(): boolean;
|
||||
getFullTextIndexExpression(
|
||||
indexName: string,
|
||||
schemaName: string | undefined,
|
||||
tableName: string,
|
||||
columns: SimpleColumnMeta[],
|
||||
): string;
|
||||
normalizeColumnType(
|
||||
type: string,
|
||||
options: {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
autoincrement?: boolean;
|
||||
},
|
||||
): string;
|
||||
getMappedType(type: string): Type<unknown>;
|
||||
getRegExpOperator(val?: unknown, flags?: string): string;
|
||||
getRegExpValue(val: RegExp): {
|
||||
$re: string;
|
||||
$flags?: string;
|
||||
};
|
||||
isBigIntProperty(prop: EntityProperty): boolean;
|
||||
getArrayDeclarationSQL(): string;
|
||||
getFloatDeclarationSQL(): string;
|
||||
getDoubleDeclarationSQL(): string;
|
||||
getEnumTypeDeclarationSQL(column: { fieldNames: string[]; items?: unknown[]; nativeEnumName?: string }): string;
|
||||
supportsMultipleStatements(): boolean;
|
||||
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
|
||||
marshallArray(values: string[]): string;
|
||||
unmarshallArray(value: string): string[];
|
||||
getVarcharTypeDeclarationSQL(column: { length?: number }): string;
|
||||
getCharTypeDeclarationSQL(column: { length?: number }): string;
|
||||
getIntervalTypeDeclarationSQL(column: { length?: number }): string;
|
||||
getBlobDeclarationSQL(): string;
|
||||
getJsonDeclarationSQL(): string;
|
||||
getSearchJsonPropertyKey(
|
||||
path: string[],
|
||||
type: string | undefined | Type,
|
||||
aliased: boolean,
|
||||
value?: unknown,
|
||||
): string | RawQueryFragment;
|
||||
getJsonIndexDefinition(index: IndexDef): string[];
|
||||
quoteIdentifier(
|
||||
id:
|
||||
| string
|
||||
| {
|
||||
toString: () => string;
|
||||
},
|
||||
quote?: string,
|
||||
): string;
|
||||
private pad;
|
||||
/** @internal */
|
||||
formatDate(date: Date): string;
|
||||
indexForeignKeys(): boolean;
|
||||
getDefaultMappedType(type: string): Type<unknown>;
|
||||
supportsSchemas(): boolean;
|
||||
getDefaultSchemaName(): string | undefined;
|
||||
/**
|
||||
* Returns the default name of index for the given columns
|
||||
* cannot go past 63 character length for identifiers in MySQL
|
||||
*/
|
||||
getIndexName(
|
||||
tableName: string,
|
||||
columns: string[],
|
||||
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
|
||||
): string;
|
||||
getDefaultPrimaryName(tableName: string, columns: string[]): string;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
castColumn(prop?: { columnTypes?: string[] }): string;
|
||||
getJsonArrayFromSQL(
|
||||
column: string,
|
||||
alias: string,
|
||||
_properties: {
|
||||
name: string;
|
||||
type: string;
|
||||
}[],
|
||||
): string;
|
||||
getJsonArrayElementPropertySQL(alias: string, property: string, type: string): string;
|
||||
getDefaultClientUrl(): string;
|
||||
}
|
||||
363
node_modules/@mikro-orm/sql/dialects/postgresql/BasePostgreSqlPlatform.js
generated
vendored
Normal file
363
node_modules/@mikro-orm/sql/dialects/postgresql/BasePostgreSqlPlatform.js
generated
vendored
Normal file
@@ -0,0 +1,363 @@
|
||||
import { ALIAS_REPLACEMENT, ARRAY_OPERATORS, raw, RawQueryFragment, Type, Utils } from '@mikro-orm/core';
|
||||
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
|
||||
import { PostgreSqlNativeQueryBuilder } from './PostgreSqlNativeQueryBuilder.js';
|
||||
import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper.js';
|
||||
import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter.js';
|
||||
import { FullTextType } from './FullTextType.js';
|
||||
export class BasePostgreSqlPlatform extends AbstractSqlPlatform {
|
||||
schemaHelper = new PostgreSqlSchemaHelper(this);
|
||||
exceptionConverter = new PostgreSqlExceptionConverter();
|
||||
/** Maps JS runtime type names to PostgreSQL cast types for JSON property access. @internal */
|
||||
#jsonTypeCasts = { number: 'float8', bigint: 'int8', boolean: 'bool' };
|
||||
createNativeQueryBuilder() {
|
||||
return new PostgreSqlNativeQueryBuilder(this);
|
||||
}
|
||||
usesReturningStatement() {
|
||||
return true;
|
||||
}
|
||||
usesCascadeStatement() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeEnums() {
|
||||
return true;
|
||||
}
|
||||
usesEnumCheckConstraints() {
|
||||
return true;
|
||||
}
|
||||
supportsMaterializedViews() {
|
||||
return true;
|
||||
}
|
||||
supportsCustomPrimaryKeyNames() {
|
||||
return true;
|
||||
}
|
||||
getCurrentTimestampSQL(length) {
|
||||
return `current_timestamp(${length})`;
|
||||
}
|
||||
getDateTimeTypeDeclarationSQL(column) {
|
||||
/* v8 ignore next */
|
||||
return 'timestamptz' + (column.length != null ? `(${column.length})` : '');
|
||||
}
|
||||
getDefaultDateTimeLength() {
|
||||
return 6;
|
||||
}
|
||||
getTimeTypeDeclarationSQL() {
|
||||
return 'time(0)';
|
||||
}
|
||||
getIntegerTypeDeclarationSQL(column) {
|
||||
if (column.autoincrement && !column.generated) {
|
||||
return 'serial';
|
||||
}
|
||||
return 'int';
|
||||
}
|
||||
getBigIntTypeDeclarationSQL(column) {
|
||||
/* v8 ignore next */
|
||||
if (column.autoincrement) {
|
||||
return `bigserial`;
|
||||
}
|
||||
return 'bigint';
|
||||
}
|
||||
getTinyIntTypeDeclarationSQL(column) {
|
||||
return 'smallint';
|
||||
}
|
||||
getUuidTypeDeclarationSQL(column) {
|
||||
return `uuid`;
|
||||
}
|
||||
getFullTextWhereClause(prop) {
|
||||
if (prop.customType instanceof FullTextType) {
|
||||
return `:column: @@ plainto_tsquery('${prop.customType.regconfig}', :query)`;
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (prop.columnTypes[0] === 'tsvector') {
|
||||
return `:column: @@ plainto_tsquery('simple', :query)`;
|
||||
}
|
||||
return `to_tsvector('simple', :column:) @@ plainto_tsquery('simple', :query)`;
|
||||
}
|
||||
supportsCreatingFullTextIndex() {
|
||||
return true;
|
||||
}
|
||||
getFullTextIndexExpression(indexName, schemaName, tableName, columns) {
|
||||
/* v8 ignore next */
|
||||
const quotedTableName = this.quoteIdentifier(schemaName ? `${schemaName}.${tableName}` : tableName);
|
||||
const quotedColumnNames = columns.map(c => this.quoteIdentifier(c.name));
|
||||
const quotedIndexName = this.quoteIdentifier(indexName);
|
||||
if (columns.length === 1 && columns[0].type === 'tsvector') {
|
||||
return `create index ${quotedIndexName} on ${quotedTableName} using gin(${quotedColumnNames[0]})`;
|
||||
}
|
||||
return `create index ${quotedIndexName} on ${quotedTableName} using gin(to_tsvector('simple', ${quotedColumnNames.join(` || ' ' || `)}))`;
|
||||
}
|
||||
normalizeColumnType(type, options) {
|
||||
const simpleType = this.extractSimpleType(type);
|
||||
if (['int', 'int4', 'integer'].includes(simpleType)) {
|
||||
return this.getIntegerTypeDeclarationSQL({});
|
||||
}
|
||||
if (['bigint', 'int8'].includes(simpleType)) {
|
||||
return this.getBigIntTypeDeclarationSQL({});
|
||||
}
|
||||
if (['smallint', 'int2'].includes(simpleType)) {
|
||||
return this.getSmallIntTypeDeclarationSQL({});
|
||||
}
|
||||
if (['boolean', 'bool'].includes(simpleType)) {
|
||||
return this.getBooleanTypeDeclarationSQL();
|
||||
}
|
||||
if (['varchar', 'character varying'].includes(simpleType)) {
|
||||
return this.getVarcharTypeDeclarationSQL(options);
|
||||
}
|
||||
if (['char', 'bpchar'].includes(simpleType)) {
|
||||
return this.getCharTypeDeclarationSQL(options);
|
||||
}
|
||||
if (['decimal', 'numeric'].includes(simpleType)) {
|
||||
return this.getDecimalTypeDeclarationSQL(options);
|
||||
}
|
||||
if (['interval'].includes(simpleType)) {
|
||||
return this.getIntervalTypeDeclarationSQL(options);
|
||||
}
|
||||
return super.normalizeColumnType(type, options);
|
||||
}
|
||||
getMappedType(type) {
|
||||
switch (this.extractSimpleType(type)) {
|
||||
case 'tsvector':
|
||||
return Type.getType(FullTextType);
|
||||
default:
|
||||
return super.getMappedType(type);
|
||||
}
|
||||
}
|
||||
getRegExpOperator(val, flags) {
|
||||
/* v8 ignore next */
|
||||
if ((val instanceof RegExp && val.flags.includes('i')) || flags?.includes('i')) {
|
||||
return '~*';
|
||||
}
|
||||
return '~';
|
||||
}
|
||||
/* v8 ignore next */
|
||||
getRegExpValue(val) {
|
||||
if (val.flags.includes('i')) {
|
||||
return { $re: val.source, $flags: val.flags };
|
||||
}
|
||||
return { $re: val.source };
|
||||
}
|
||||
isBigIntProperty(prop) {
|
||||
return super.isBigIntProperty(prop) || ['bigserial', 'int8'].includes(prop.columnTypes?.[0]);
|
||||
}
|
||||
getArrayDeclarationSQL() {
|
||||
return 'text[]';
|
||||
}
|
||||
getFloatDeclarationSQL() {
|
||||
return 'real';
|
||||
}
|
||||
getDoubleDeclarationSQL() {
|
||||
return 'double precision';
|
||||
}
|
||||
getEnumTypeDeclarationSQL(column) {
|
||||
/* v8 ignore next */
|
||||
if (column.nativeEnumName) {
|
||||
return column.nativeEnumName;
|
||||
}
|
||||
if (column.items?.every(item => typeof item === 'string')) {
|
||||
return 'text';
|
||||
}
|
||||
return `smallint`;
|
||||
}
|
||||
supportsMultipleStatements() {
|
||||
return true;
|
||||
}
|
||||
getBeginTransactionSQL(options) {
|
||||
if (options?.isolationLevel || options?.readOnly) {
|
||||
let sql = 'start transaction';
|
||||
sql += options.isolationLevel ? ` isolation level ${options.isolationLevel}` : '';
|
||||
sql += options.readOnly ? ` read only` : '';
|
||||
return [sql];
|
||||
}
|
||||
return ['begin'];
|
||||
}
|
||||
marshallArray(values) {
|
||||
const quote = v => (v === '' || /["{},\\]/.exec(v) ? JSON.stringify(v) : v);
|
||||
return `{${values.map(v => quote('' + v)).join(',')}}`;
|
||||
}
|
||||
/* v8 ignore next */
|
||||
unmarshallArray(value) {
|
||||
if (value === '{}') {
|
||||
return [];
|
||||
}
|
||||
return value
|
||||
.substring(1, value.length - 1)
|
||||
.split(',')
|
||||
.map(v => {
|
||||
if (v === `""`) {
|
||||
return '';
|
||||
}
|
||||
if (/"(.*)"/.exec(v)) {
|
||||
return v.substring(1, v.length - 1).replaceAll('\\"', '"');
|
||||
}
|
||||
return v;
|
||||
});
|
||||
}
|
||||
getVarcharTypeDeclarationSQL(column) {
|
||||
if (column.length === -1) {
|
||||
return 'varchar';
|
||||
}
|
||||
return super.getVarcharTypeDeclarationSQL(column);
|
||||
}
|
||||
getCharTypeDeclarationSQL(column) {
|
||||
if (column.length === -1) {
|
||||
return 'char';
|
||||
}
|
||||
return super.getCharTypeDeclarationSQL(column);
|
||||
}
|
||||
getIntervalTypeDeclarationSQL(column) {
|
||||
return 'interval' + (column.length != null ? `(${column.length})` : '');
|
||||
}
|
||||
getBlobDeclarationSQL() {
|
||||
return 'bytea';
|
||||
}
|
||||
getJsonDeclarationSQL() {
|
||||
return 'jsonb';
|
||||
}
|
||||
getSearchJsonPropertyKey(path, type, aliased, value) {
|
||||
const first = path.shift();
|
||||
const last = path.pop();
|
||||
const root = this.quoteIdentifier(aliased ? `${ALIAS_REPLACEMENT}.${first}` : first);
|
||||
type = typeof type === 'string' ? this.getMappedType(type).runtimeType : String(type);
|
||||
const cast = key => raw(type in this.#jsonTypeCasts ? `(${key})::${this.#jsonTypeCasts[type]}` : key);
|
||||
let lastOperator = '->>';
|
||||
// force `->` for operator payloads with array values
|
||||
if (
|
||||
Utils.isPlainObject(value) &&
|
||||
Object.keys(value).every(key => ARRAY_OPERATORS.includes(key) && Array.isArray(value[key]))
|
||||
) {
|
||||
lastOperator = '->';
|
||||
}
|
||||
if (path.length === 0) {
|
||||
return cast(`${root}${lastOperator}'${last}'`);
|
||||
}
|
||||
return cast(`${root}->${path.map(a => this.quoteValue(a)).join('->')}${lastOperator}'${last}'`);
|
||||
}
|
||||
getJsonIndexDefinition(index) {
|
||||
return index.columnNames.map(column => {
|
||||
if (!column.includes('.')) {
|
||||
return column;
|
||||
}
|
||||
const path = column.split('.');
|
||||
const first = path.shift();
|
||||
const last = path.pop();
|
||||
if (path.length === 0) {
|
||||
return `(${this.quoteIdentifier(first)}->>${this.quoteValue(last)})`;
|
||||
}
|
||||
return `(${this.quoteIdentifier(first)}->${path.map(c => this.quoteValue(c)).join('->')}->>${this.quoteValue(last)})`;
|
||||
});
|
||||
}
|
||||
quoteIdentifier(id, quote = '"') {
|
||||
if (RawQueryFragment.isKnownFragment(id)) {
|
||||
return super.quoteIdentifier(id);
|
||||
}
|
||||
return `${quote}${id.toString().replace('.', `${quote}.${quote}`)}${quote}`;
|
||||
}
|
||||
pad(number, digits) {
|
||||
return String(number).padStart(digits, '0');
|
||||
}
|
||||
/** @internal */
|
||||
formatDate(date) {
|
||||
if (this.timezone === 'Z') {
|
||||
return date.toISOString();
|
||||
}
|
||||
let offset = -date.getTimezoneOffset();
|
||||
let year = date.getFullYear();
|
||||
const isBCYear = year < 1;
|
||||
/* v8 ignore next */
|
||||
if (isBCYear) {
|
||||
year = Math.abs(year) + 1;
|
||||
}
|
||||
const datePart = `${this.pad(year, 4)}-${this.pad(date.getMonth() + 1, 2)}-${this.pad(date.getDate(), 2)}`;
|
||||
const timePart = `${this.pad(date.getHours(), 2)}:${this.pad(date.getMinutes(), 2)}:${this.pad(date.getSeconds(), 2)}.${this.pad(date.getMilliseconds(), 3)}`;
|
||||
let ret = `${datePart}T${timePart}`;
|
||||
/* v8 ignore next */
|
||||
if (offset < 0) {
|
||||
ret += '-';
|
||||
offset *= -1;
|
||||
} else {
|
||||
ret += '+';
|
||||
}
|
||||
ret += this.pad(Math.floor(offset / 60), 2) + ':' + this.pad(offset % 60, 2);
|
||||
/* v8 ignore next */
|
||||
if (isBCYear) {
|
||||
ret += ' BC';
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
indexForeignKeys() {
|
||||
return false;
|
||||
}
|
||||
getDefaultMappedType(type) {
|
||||
const normalizedType = this.extractSimpleType(type);
|
||||
const map = {
|
||||
int2: 'smallint',
|
||||
smallserial: 'smallint',
|
||||
int: 'integer',
|
||||
int4: 'integer',
|
||||
serial: 'integer',
|
||||
serial4: 'integer',
|
||||
int8: 'bigint',
|
||||
bigserial: 'bigint',
|
||||
serial8: 'bigint',
|
||||
numeric: 'decimal',
|
||||
bool: 'boolean',
|
||||
real: 'float',
|
||||
float4: 'float',
|
||||
float8: 'double',
|
||||
timestamp: 'datetime',
|
||||
timestamptz: 'datetime',
|
||||
bytea: 'blob',
|
||||
jsonb: 'json',
|
||||
'character varying': 'varchar',
|
||||
bpchar: 'character',
|
||||
};
|
||||
return super.getDefaultMappedType(map[normalizedType] ?? type);
|
||||
}
|
||||
supportsSchemas() {
|
||||
return true;
|
||||
}
|
||||
getDefaultSchemaName() {
|
||||
return 'public';
|
||||
}
|
||||
/**
|
||||
* Returns the default name of index for the given columns
|
||||
* cannot go past 63 character length for identifiers in MySQL
|
||||
*/
|
||||
getIndexName(tableName, columns, type) {
|
||||
const indexName = super.getIndexName(tableName, columns, type);
|
||||
if (indexName.length > 63) {
|
||||
const suffix = type === 'primary' ? 'pkey' : type;
|
||||
return `${indexName.substring(0, 55 - type.length)}_${Utils.hash(indexName, 5)}_${suffix}`;
|
||||
}
|
||||
return indexName;
|
||||
}
|
||||
getDefaultPrimaryName(tableName, columns) {
|
||||
const indexName = `${tableName}_pkey`;
|
||||
if (indexName.length > 63) {
|
||||
return `${indexName.substring(0, 55 - 'pkey'.length)}_${Utils.hash(indexName, 5)}_pkey`;
|
||||
}
|
||||
return indexName;
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
castColumn(prop) {
|
||||
switch (prop?.columnTypes?.[0]) {
|
||||
case this.getUuidTypeDeclarationSQL({}):
|
||||
return '::text';
|
||||
case this.getBooleanTypeDeclarationSQL():
|
||||
return '::int';
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
getJsonArrayFromSQL(column, alias, _properties) {
|
||||
return `jsonb_array_elements(${column}) as ${this.quoteIdentifier(alias)}`;
|
||||
}
|
||||
getJsonArrayElementPropertySQL(alias, property, type) {
|
||||
const expr = `${this.quoteIdentifier(alias)}->>${this.quoteValue(property)}`;
|
||||
return type in this.#jsonTypeCasts ? `(${expr})::${this.#jsonTypeCasts[type]}` : expr;
|
||||
}
|
||||
getDefaultClientUrl() {
|
||||
return 'postgresql://postgres@127.0.0.1:5432';
|
||||
}
|
||||
}
|
||||
18
node_modules/@mikro-orm/sql/dialects/postgresql/FullTextType.d.ts
generated
vendored
Normal file
18
node_modules/@mikro-orm/sql/dialects/postgresql/FullTextType.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Type, type TransformContext, type RawQueryFragment } from '@mikro-orm/core';
|
||||
import type { BasePostgreSqlPlatform } from './BasePostgreSqlPlatform.js';
|
||||
type FullTextWeight = 'A' | 'B' | 'C' | 'D';
|
||||
export type WeightedFullTextValue = {
|
||||
[K in FullTextWeight]?: string | null;
|
||||
};
|
||||
export declare class FullTextType extends Type<string | WeightedFullTextValue, string | null | RawQueryFragment> {
|
||||
regconfig: string;
|
||||
constructor(regconfig?: string);
|
||||
compareAsType(): string;
|
||||
getColumnType(): string;
|
||||
convertToDatabaseValue(
|
||||
value: string | WeightedFullTextValue,
|
||||
platform: BasePostgreSqlPlatform,
|
||||
context?: TransformContext | boolean,
|
||||
): string | null | RawQueryFragment;
|
||||
}
|
||||
export {};
|
||||
59
node_modules/@mikro-orm/sql/dialects/postgresql/FullTextType.js
generated
vendored
Normal file
59
node_modules/@mikro-orm/sql/dialects/postgresql/FullTextType.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import { raw, Type } from '@mikro-orm/core';
|
||||
export class FullTextType extends Type {
|
||||
regconfig;
|
||||
constructor(regconfig = 'simple') {
|
||||
super();
|
||||
this.regconfig = regconfig;
|
||||
}
|
||||
compareAsType() {
|
||||
return 'any';
|
||||
}
|
||||
getColumnType() {
|
||||
return 'tsvector';
|
||||
}
|
||||
// Use convertToDatabaseValue to prepare insert queries as this method has
|
||||
// access to the raw JS value. Return Knex#raw to prevent QueryBuilderHelper#mapData
|
||||
// from sanitizing the returned chaing of SQL functions.
|
||||
convertToDatabaseValue(value, platform, context) {
|
||||
// Don't convert to values from select queries to the to_tsvector notation
|
||||
// these should be compared as string using a special oparator or function
|
||||
// this behaviour is defined in Platform#getFullTextWhereClause.
|
||||
// This is always a string.
|
||||
if (typeof context === 'object' && context.fromQuery) {
|
||||
return value;
|
||||
}
|
||||
// Null values should not be processed
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
// the object from that looks like { A: 'test data', B: 'test data2' ... }
|
||||
// must be converted to
|
||||
// setweight(to_tsvector(regconfig, value), A) || setweight(to_tsvector(regconfig, value), B)... etc
|
||||
// use Knex#raw to do binding of the values sanitization of the boundvalues
|
||||
// as we return a raw string which should not be sanitzed anymore
|
||||
if (typeof value === 'object') {
|
||||
const bindings = [];
|
||||
const sqlParts = [];
|
||||
for (const [weight, data] of Object.entries(value)) {
|
||||
// Check whether the weight is valid according to Postgres,
|
||||
// Postgres allows the weight to be upper and lowercase.
|
||||
if (!['A', 'B', 'C', 'D'].includes(weight.toUpperCase())) {
|
||||
throw new Error('Weight should be one of A, B, C, D.');
|
||||
}
|
||||
// Ignore all values that are not a string
|
||||
if (typeof data === 'string') {
|
||||
sqlParts.push('setweight(to_tsvector(?, ?), ?)');
|
||||
bindings.push(this.regconfig, data, weight);
|
||||
}
|
||||
}
|
||||
// Return null if the object has no valid strings
|
||||
if (sqlParts.length === 0) {
|
||||
return null;
|
||||
}
|
||||
// Join all the `setweight` parts using the PostgreSQL tsvector `||` concatenation operator
|
||||
return raw(sqlParts.join(' || '), bindings);
|
||||
}
|
||||
// if it's not an object, it is expected to be string which does not have to be wrapped in setweight.
|
||||
return raw('to_tsvector(?, ?)', [this.regconfig, value]);
|
||||
}
|
||||
}
|
||||
8
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlExceptionConverter.d.ts
generated
vendored
Normal file
8
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlExceptionConverter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
|
||||
export declare class PostgreSqlExceptionConverter extends ExceptionConverter {
|
||||
/**
|
||||
* @see http://www.postgresql.org/docs/9.4/static/errcodes-appendix.html
|
||||
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractPostgreSQLDriver.php
|
||||
*/
|
||||
convertException(exception: Error & Dictionary): DriverException;
|
||||
}
|
||||
59
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlExceptionConverter.js
generated
vendored
Normal file
59
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlExceptionConverter.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import {
|
||||
DeadlockException,
|
||||
ExceptionConverter,
|
||||
ForeignKeyConstraintViolationException,
|
||||
InvalidFieldNameException,
|
||||
NonUniqueFieldNameException,
|
||||
NotNullConstraintViolationException,
|
||||
SyntaxErrorException,
|
||||
TableExistsException,
|
||||
TableNotFoundException,
|
||||
UniqueConstraintViolationException,
|
||||
CheckConstraintViolationException,
|
||||
} from '@mikro-orm/core';
|
||||
export class PostgreSqlExceptionConverter extends ExceptionConverter {
|
||||
/**
|
||||
* @see http://www.postgresql.org/docs/9.4/static/errcodes-appendix.html
|
||||
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractPostgreSQLDriver.php
|
||||
*/
|
||||
convertException(exception) {
|
||||
if (exception.detail?.toString().trim()) {
|
||||
exception.message += '\n - detail: ' + exception.detail;
|
||||
}
|
||||
if (exception.hint?.toString().trim()) {
|
||||
exception.message += '\n - hint: ' + exception.hint;
|
||||
}
|
||||
/* v8 ignore next */
|
||||
switch (exception.code) {
|
||||
case '40001':
|
||||
case '40P01':
|
||||
return new DeadlockException(exception);
|
||||
case '0A000':
|
||||
// Foreign key constraint violations during a TRUNCATE operation
|
||||
// are considered "feature not supported" in PostgreSQL.
|
||||
if (exception.message.includes('truncate')) {
|
||||
return new ForeignKeyConstraintViolationException(exception);
|
||||
}
|
||||
break;
|
||||
case '23502':
|
||||
return new NotNullConstraintViolationException(exception);
|
||||
case '23503':
|
||||
return new ForeignKeyConstraintViolationException(exception);
|
||||
case '23505':
|
||||
return new UniqueConstraintViolationException(exception);
|
||||
case '23514':
|
||||
return new CheckConstraintViolationException(exception);
|
||||
case '42601':
|
||||
return new SyntaxErrorException(exception);
|
||||
case '42702':
|
||||
return new NonUniqueFieldNameException(exception);
|
||||
case '42703':
|
||||
return new InvalidFieldNameException(exception);
|
||||
case '42P01':
|
||||
return new TableNotFoundException(exception);
|
||||
case '42P07':
|
||||
return new TableExistsException(exception);
|
||||
}
|
||||
return super.convertException(exception);
|
||||
}
|
||||
}
|
||||
5
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlNativeQueryBuilder.d.ts
generated
vendored
Normal file
5
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlNativeQueryBuilder.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export declare class PostgreSqlNativeQueryBuilder extends NativeQueryBuilder {
|
||||
protected compileTruncate(): void;
|
||||
}
|
||||
8
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlNativeQueryBuilder.js
generated
vendored
Normal file
8
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlNativeQueryBuilder.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export class PostgreSqlNativeQueryBuilder extends NativeQueryBuilder {
|
||||
compileTruncate() {
|
||||
super.compileTruncate();
|
||||
this.parts.push('restart identity cascade');
|
||||
}
|
||||
}
|
||||
110
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlSchemaHelper.d.ts
generated
vendored
Normal file
110
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlSchemaHelper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
import { type Dictionary } from '@mikro-orm/core';
|
||||
import { SchemaHelper } from '../../schema/SchemaHelper.js';
|
||||
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
|
||||
import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../../typings.js';
|
||||
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
|
||||
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
|
||||
export declare class PostgreSqlSchemaHelper extends SchemaHelper {
|
||||
static readonly DEFAULT_VALUES: {
|
||||
'now()': string[];
|
||||
'current_timestamp(?)': string[];
|
||||
"('now'::text)::timestamp(?) with time zone": string[];
|
||||
"('now'::text)::timestamp(?) without time zone": string[];
|
||||
'null::character varying': string[];
|
||||
'null::timestamp with time zone': string[];
|
||||
'null::timestamp without time zone': string[];
|
||||
};
|
||||
getSchemaBeginning(charset: string, disableForeignKeys?: boolean): string;
|
||||
getCreateDatabaseSQL(name: string): string;
|
||||
getListTablesSQL(): string;
|
||||
private getIgnoredViewsCondition;
|
||||
getListViewsSQL(): string;
|
||||
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection): Promise<void>;
|
||||
getListMaterializedViewsSQL(): string;
|
||||
loadMaterializedViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
|
||||
createMaterializedView(name: string, schema: string | undefined, definition: string, withData?: boolean): string;
|
||||
dropMaterializedViewIfExists(name: string, schema?: string): string;
|
||||
refreshMaterializedView(name: string, schema?: string, concurrently?: boolean): string;
|
||||
getNamespaces(connection: AbstractSqlConnection): Promise<string[]>;
|
||||
private getIgnoredNamespacesConditionSQL;
|
||||
loadInformationSchema(
|
||||
schema: DatabaseSchema,
|
||||
connection: AbstractSqlConnection,
|
||||
tables: Table[],
|
||||
schemas?: string[],
|
||||
): Promise<void>;
|
||||
getAllIndexes(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<IndexDef[]>>;
|
||||
/**
|
||||
* Parses column definitions from the full CREATE INDEX expression.
|
||||
* Since pg_get_indexdef(oid, col_num, true) doesn't include sort modifiers,
|
||||
* we extract them from the full expression instead.
|
||||
*
|
||||
* We use columnDefs (from individual pg_get_indexdef calls) as the source
|
||||
* of column names, and find their modifiers in the expression.
|
||||
*/
|
||||
private parseIndexColumnsFromExpression;
|
||||
/**
|
||||
* Extracts the content inside parentheses starting at the given position.
|
||||
* Handles nested parentheses correctly.
|
||||
*/
|
||||
private extractParenthesizedContent;
|
||||
getAllColumns(
|
||||
connection: AbstractSqlConnection,
|
||||
tablesBySchemas: Map<string | undefined, Table[]>,
|
||||
nativeEnums?: Dictionary<{
|
||||
name: string;
|
||||
schema?: string;
|
||||
items: string[];
|
||||
}>,
|
||||
): Promise<Dictionary<Column[]>>;
|
||||
getAllChecks(
|
||||
connection: AbstractSqlConnection,
|
||||
tablesBySchemas: Map<string | undefined, Table[]>,
|
||||
): Promise<Dictionary<CheckDef[]>>;
|
||||
getAllForeignKeys(
|
||||
connection: AbstractSqlConnection,
|
||||
tablesBySchemas: Map<string | undefined, Table[]>,
|
||||
): Promise<Dictionary<Dictionary<ForeignKey>>>;
|
||||
getNativeEnumDefinitions(
|
||||
connection: AbstractSqlConnection,
|
||||
schemas: string[],
|
||||
): Promise<
|
||||
Dictionary<{
|
||||
name: string;
|
||||
schema?: string;
|
||||
items: string[];
|
||||
}>
|
||||
>;
|
||||
getCreateNativeEnumSQL(name: string, values: unknown[], schema?: string): string;
|
||||
getDropNativeEnumSQL(name: string, schema?: string): string;
|
||||
getAlterNativeEnumSQL(name: string, schema?: string, value?: string, items?: string[], oldItems?: string[]): string;
|
||||
private getEnumDefinitions;
|
||||
createTableColumn(column: Column, table: DatabaseTable): string | undefined;
|
||||
getPreAlterTable(tableDiff: TableDifference, safe: boolean): string[];
|
||||
castColumn(name: string, type: string): string;
|
||||
dropForeignKey(tableName: string, constraintName: string): string;
|
||||
getPostAlterTable(tableDiff: TableDifference, safe: boolean): string[];
|
||||
private getAlterColumnAutoincrement;
|
||||
getChangeColumnCommentSQL(tableName: string, to: Column, schemaName?: string): string;
|
||||
alterTableComment(table: DatabaseTable, comment?: string): string;
|
||||
normalizeDefaultValue(defaultValue: string, length: number): string | number;
|
||||
appendComments(table: DatabaseTable): string[];
|
||||
getDatabaseExistsSQL(name: string): string;
|
||||
getDatabaseNotExistsError(dbName: string): string;
|
||||
getManagementDbName(): string;
|
||||
disableForeignKeysSQL(): string;
|
||||
enableForeignKeysSQL(): string;
|
||||
getRenameIndexSQL(tableName: string, index: IndexDef, oldIndexName: string): string[];
|
||||
dropIndex(table: string, index: IndexDef, oldIndexName?: string): string;
|
||||
/**
|
||||
* Build the column list for a PostgreSQL index.
|
||||
*/
|
||||
protected getIndexColumns(index: IndexDef): string;
|
||||
/**
|
||||
* PostgreSQL-specific index options like fill factor.
|
||||
*/
|
||||
protected getCreateIndexSuffix(index: IndexDef): string;
|
||||
private getIndexesSQL;
|
||||
private getChecksSQL;
|
||||
inferLengthFromColumnType(type: string): number | undefined;
|
||||
}
|
||||
776
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlSchemaHelper.js
generated
vendored
Normal file
776
node_modules/@mikro-orm/sql/dialects/postgresql/PostgreSqlSchemaHelper.js
generated
vendored
Normal file
@@ -0,0 +1,776 @@
|
||||
import { DeferMode, EnumType, Type, Utils } from '@mikro-orm/core';
|
||||
import { SchemaHelper } from '../../schema/SchemaHelper.js';
|
||||
/** PostGIS system views that should be automatically ignored */
|
||||
const POSTGIS_VIEWS = ['geography_columns', 'geometry_columns'];
|
||||
export class PostgreSqlSchemaHelper extends SchemaHelper {
|
||||
static DEFAULT_VALUES = {
|
||||
'now()': ['now()', 'current_timestamp'],
|
||||
'current_timestamp(?)': ['current_timestamp(?)'],
|
||||
"('now'::text)::timestamp(?) with time zone": ['current_timestamp(?)'],
|
||||
"('now'::text)::timestamp(?) without time zone": ['current_timestamp(?)'],
|
||||
'null::character varying': ['null'],
|
||||
'null::timestamp with time zone': ['null'],
|
||||
'null::timestamp without time zone': ['null'],
|
||||
};
|
||||
getSchemaBeginning(charset, disableForeignKeys) {
|
||||
if (disableForeignKeys) {
|
||||
return `set names '${charset}';\n${this.disableForeignKeysSQL()}\n\n`;
|
||||
}
|
||||
return `set names '${charset}';\n\n`;
|
||||
}
|
||||
getCreateDatabaseSQL(name) {
|
||||
return `create database ${this.quote(name)}`;
|
||||
}
|
||||
getListTablesSQL() {
|
||||
return (
|
||||
`select table_name, table_schema as schema_name, ` +
|
||||
`(select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c
|
||||
where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment ` +
|
||||
`from information_schema.tables ` +
|
||||
`where ${this.getIgnoredNamespacesConditionSQL('table_schema')} ` +
|
||||
`and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' ` +
|
||||
`and table_name not in (select inhrelid::regclass::text from pg_inherits) ` +
|
||||
`order by table_name`
|
||||
);
|
||||
}
|
||||
getIgnoredViewsCondition() {
|
||||
return POSTGIS_VIEWS.map(v => `table_name != '${v}'`).join(' and ');
|
||||
}
|
||||
getListViewsSQL() {
|
||||
return (
|
||||
`select table_name as view_name, table_schema as schema_name, view_definition ` +
|
||||
`from information_schema.views ` +
|
||||
`where ${this.getIgnoredNamespacesConditionSQL('table_schema')} ` +
|
||||
`and ${this.getIgnoredViewsCondition()} ` +
|
||||
`order by table_name`
|
||||
);
|
||||
}
|
||||
async loadViews(schema, connection) {
|
||||
const views = await connection.execute(this.getListViewsSQL());
|
||||
for (const view of views) {
|
||||
const definition = view.view_definition?.trim().replace(/;$/, '') ?? '';
|
||||
if (definition) {
|
||||
schema.addView(view.view_name, view.schema_name, definition);
|
||||
}
|
||||
}
|
||||
}
|
||||
getListMaterializedViewsSQL() {
|
||||
return (
|
||||
`select matviewname as view_name, schemaname as schema_name, definition as view_definition ` +
|
||||
`from pg_matviews ` +
|
||||
`where ${this.getIgnoredNamespacesConditionSQL('schemaname')} ` +
|
||||
`order by matviewname`
|
||||
);
|
||||
}
|
||||
async loadMaterializedViews(schema, connection, schemaName) {
|
||||
const views = await connection.execute(this.getListMaterializedViewsSQL());
|
||||
for (const view of views) {
|
||||
const definition = view.view_definition?.trim().replace(/;$/, '') ?? '';
|
||||
if (definition) {
|
||||
schema.addView(view.view_name, view.schema_name, definition, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
createMaterializedView(name, schema, definition, withData = true) {
|
||||
const viewName = this.quote(this.getTableName(name, schema));
|
||||
const dataClause = withData ? ' with data' : ' with no data';
|
||||
return `create materialized view ${viewName} as ${definition}${dataClause}`;
|
||||
}
|
||||
dropMaterializedViewIfExists(name, schema) {
|
||||
return `drop materialized view if exists ${this.quote(this.getTableName(name, schema))} cascade`;
|
||||
}
|
||||
refreshMaterializedView(name, schema, concurrently = false) {
|
||||
const concurrent = concurrently ? ' concurrently' : '';
|
||||
return `refresh materialized view${concurrent} ${this.quote(this.getTableName(name, schema))}`;
|
||||
}
|
||||
async getNamespaces(connection) {
|
||||
const sql =
|
||||
`select schema_name from information_schema.schemata ` +
|
||||
`where ${this.getIgnoredNamespacesConditionSQL()} ` +
|
||||
`order by schema_name`;
|
||||
const res = await connection.execute(sql);
|
||||
return res.map(row => row.schema_name);
|
||||
}
|
||||
getIgnoredNamespacesConditionSQL(column = 'schema_name') {
|
||||
const ignored = [
|
||||
'information_schema',
|
||||
'tiger',
|
||||
'topology',
|
||||
/* v8 ignore next */
|
||||
...(this.platform.getConfig().get('schemaGenerator').ignoreSchema ?? []),
|
||||
]
|
||||
.map(s => this.platform.quoteValue(s))
|
||||
.join(', ');
|
||||
const ignoredPrefixes = ['pg_', 'crdb_', '_timescaledb_'].map(p => `"${column}" not like '${p}%'`).join(' and ');
|
||||
return `${ignoredPrefixes} and "${column}" not in (${ignored})`;
|
||||
}
|
||||
async loadInformationSchema(schema, connection, tables, schemas) {
|
||||
schemas ??= tables.length === 0 ? [schema.name] : tables.map(t => t.schema_name);
|
||||
const nativeEnums = await this.getNativeEnumDefinitions(connection, schemas);
|
||||
schema.setNativeEnums(nativeEnums);
|
||||
if (tables.length === 0) {
|
||||
return;
|
||||
}
|
||||
const tablesBySchema = this.getTablesGroupedBySchemas(tables);
|
||||
const columns = await this.getAllColumns(connection, tablesBySchema, nativeEnums);
|
||||
const indexes = await this.getAllIndexes(connection, tables);
|
||||
const checks = await this.getAllChecks(connection, tablesBySchema);
|
||||
const fks = await this.getAllForeignKeys(connection, tablesBySchema);
|
||||
for (const t of tables) {
|
||||
const key = this.getTableKey(t);
|
||||
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
|
||||
const pks = await this.getPrimaryKeys(connection, indexes[key], table.name, table.schema);
|
||||
const enums = this.getEnumDefinitions(checks[key] ?? []);
|
||||
if (columns[key]) {
|
||||
table.init(columns[key], indexes[key], checks[key], pks, fks[key], enums);
|
||||
}
|
||||
}
|
||||
}
|
||||
async getAllIndexes(connection, tables) {
|
||||
const sql = this.getIndexesSQL(tables);
|
||||
const unquote = str => str.replace(/['"`]/g, '');
|
||||
const allIndexes = await connection.execute(sql);
|
||||
const ret = {};
|
||||
for (const index of allIndexes) {
|
||||
const key = this.getTableKey(index);
|
||||
// Extract INCLUDE columns from expression first, to filter them from key columns
|
||||
const includeMatch = index.expression?.match(/include\s*\(([^)]+)\)/i);
|
||||
const includeColumns = includeMatch ? includeMatch[1].split(',').map(col => unquote(col.trim())) : [];
|
||||
// Filter out INCLUDE columns from the column definitions to get only key columns
|
||||
const keyColumnDefs = index.index_def.filter(col => !includeColumns.includes(unquote(col)));
|
||||
// Parse sort order and NULLS ordering from the full expression
|
||||
// pg_get_indexdef individual columns don't include sort modifiers, so we parse from full expression
|
||||
const columns = this.parseIndexColumnsFromExpression(index.expression, keyColumnDefs, unquote);
|
||||
const columnNames = columns.map(col => col.name);
|
||||
const hasAdvancedColumnOptions = columns.some(col => col.sort || col.nulls || col.collation);
|
||||
const indexDef = {
|
||||
columnNames,
|
||||
composite: columnNames.length > 1,
|
||||
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
|
||||
constraint: index.contype === 'u',
|
||||
keyName: index.constraint_name,
|
||||
unique: index.unique,
|
||||
primary: index.primary,
|
||||
};
|
||||
// Add columns array if there are advanced options
|
||||
if (hasAdvancedColumnOptions) {
|
||||
indexDef.columns = columns;
|
||||
}
|
||||
if (index.condeferrable) {
|
||||
indexDef.deferMode = index.condeferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
|
||||
}
|
||||
if (index.index_def.some(col => /[(): ,"'`]/.exec(col)) || index.expression?.match(/ where /i)) {
|
||||
indexDef.expression = index.expression;
|
||||
}
|
||||
if (index.deferrable) {
|
||||
indexDef.deferMode = index.initially_deferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
|
||||
}
|
||||
// Extract fillFactor from reloptions
|
||||
if (index.reloptions) {
|
||||
const fillFactorMatch = index.reloptions.find(opt => opt.startsWith('fillfactor='));
|
||||
if (fillFactorMatch) {
|
||||
indexDef.fillFactor = parseInt(fillFactorMatch.split('=')[1], 10);
|
||||
}
|
||||
}
|
||||
// Add INCLUDE columns (already extracted above)
|
||||
if (includeColumns.length > 0) {
|
||||
indexDef.include = includeColumns;
|
||||
}
|
||||
// Add index type if not btree (the default)
|
||||
if (index.index_type && index.index_type !== 'btree') {
|
||||
indexDef.type = index.index_type;
|
||||
}
|
||||
ret[key] ??= [];
|
||||
ret[key].push(indexDef);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
/**
|
||||
* Parses column definitions from the full CREATE INDEX expression.
|
||||
* Since pg_get_indexdef(oid, col_num, true) doesn't include sort modifiers,
|
||||
* we extract them from the full expression instead.
|
||||
*
|
||||
* We use columnDefs (from individual pg_get_indexdef calls) as the source
|
||||
* of column names, and find their modifiers in the expression.
|
||||
*/
|
||||
parseIndexColumnsFromExpression(expression, columnDefs, unquote) {
|
||||
// Extract just the column list from the expression (between first parens after USING)
|
||||
// Pattern: ... USING method (...columns...) [INCLUDE (...)] [WHERE ...]
|
||||
// Note: pg_get_indexdef always returns a valid expression with USING clause
|
||||
const usingMatch = /using\s+\w+\s*\(/i.exec(expression);
|
||||
const startIdx = usingMatch.index + usingMatch[0].length - 1; // Position of opening (
|
||||
const columnsStr = this.extractParenthesizedContent(expression, startIdx);
|
||||
// Use the column names from columnDefs and find their modifiers in the expression
|
||||
return columnDefs.map(colDef => {
|
||||
const name = unquote(colDef);
|
||||
const result = { name };
|
||||
// Find this column in the expression and extract modifiers
|
||||
// Create a pattern that matches the column name (quoted or unquoted) followed by modifiers
|
||||
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
const colPattern = new RegExp(`"?${escapedName}"?\\s*([^,)]*?)(?:,|$)`, 'i');
|
||||
const colMatch = columnsStr.match(colPattern);
|
||||
if (colMatch) {
|
||||
const modifiers = colMatch[1];
|
||||
// Extract sort order (PostgreSQL omits ASC in output as it's the default)
|
||||
if (/\bdesc\b/i.test(modifiers)) {
|
||||
result.sort = 'DESC';
|
||||
}
|
||||
// Extract NULLS ordering
|
||||
const nullsMatch = /nulls\s+(first|last)/i.exec(modifiers);
|
||||
if (nullsMatch) {
|
||||
result.nulls = nullsMatch[1].toUpperCase();
|
||||
}
|
||||
// Extract collation
|
||||
const collateMatch = /collate\s+"?([^"\s,)]+)"?/i.exec(modifiers);
|
||||
if (collateMatch) {
|
||||
result.collation = collateMatch[1];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Extracts the content inside parentheses starting at the given position.
|
||||
* Handles nested parentheses correctly.
|
||||
*/
|
||||
extractParenthesizedContent(str, startIdx) {
|
||||
let depth = 0;
|
||||
const start = startIdx + 1;
|
||||
for (let i = startIdx; i < str.length; i++) {
|
||||
if (str[i] === '(') {
|
||||
depth++;
|
||||
} else if (str[i] === ')') {
|
||||
depth--;
|
||||
if (depth === 0) {
|
||||
return str.slice(start, i);
|
||||
}
|
||||
}
|
||||
}
|
||||
/* v8 ignore next - pg_get_indexdef always returns balanced parentheses */
|
||||
return '';
|
||||
}
|
||||
async getAllColumns(connection, tablesBySchemas, nativeEnums) {
|
||||
const sql = `select table_schema as schema_name, table_name, column_name,
|
||||
column_default,
|
||||
is_nullable,
|
||||
udt_name,
|
||||
udt_schema,
|
||||
coalesce(datetime_precision, character_maximum_length) length,
|
||||
atttypmod custom_length,
|
||||
numeric_precision,
|
||||
numeric_scale,
|
||||
data_type,
|
||||
is_identity,
|
||||
identity_generation,
|
||||
generation_expression,
|
||||
pg_catalog.col_description(pgc.oid, cols.ordinal_position::int) column_comment
|
||||
from information_schema.columns cols
|
||||
join pg_class pgc on cols.table_name = pgc.relname
|
||||
join pg_attribute pga on pgc.oid = pga.attrelid and cols.column_name = pga.attname
|
||||
where (${[...tablesBySchemas.entries()].map(([schema, tables]) => `(table_schema = ${this.platform.quoteValue(schema)} and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}))`).join(' or ')})
|
||||
order by ordinal_position`;
|
||||
const allColumns = await connection.execute(sql);
|
||||
const str = val => (val != null ? '' + val : val);
|
||||
const ret = {};
|
||||
for (const col of allColumns) {
|
||||
const mappedType = connection.getPlatform().getMappedType(col.data_type);
|
||||
const increments =
|
||||
(col.column_default?.includes('nextval') || col.is_identity === 'YES') &&
|
||||
connection.getPlatform().isNumericColumn(mappedType);
|
||||
const key = this.getTableKey(col);
|
||||
ret[key] ??= [];
|
||||
let type = col.data_type.toLowerCase() === 'array' ? col.udt_name.replace(/^_(.*)$/, '$1[]') : col.udt_name;
|
||||
if (
|
||||
col.data_type === 'USER-DEFINED' &&
|
||||
col.udt_schema &&
|
||||
col.udt_schema !== this.platform.getDefaultSchemaName()
|
||||
) {
|
||||
type = `${col.udt_schema}.${type}`;
|
||||
}
|
||||
if (type === 'bpchar') {
|
||||
type = 'char';
|
||||
}
|
||||
if (type === 'vector' && col.length == null && col.custom_length != null && col.custom_length !== -1) {
|
||||
col.length = col.custom_length;
|
||||
}
|
||||
if (col.length != null && !type.endsWith(`(${col.length})`) && !['text', 'date'].includes(type)) {
|
||||
type += `(${col.length})`;
|
||||
}
|
||||
if (type === 'numeric' && col.numeric_precision != null && col.numeric_scale != null) {
|
||||
type += `(${col.numeric_precision},${col.numeric_scale})`;
|
||||
}
|
||||
const length = this.inferLengthFromColumnType(type) === -1 ? -1 : col.length;
|
||||
const column = {
|
||||
name: col.column_name,
|
||||
type,
|
||||
mappedType,
|
||||
length,
|
||||
precision: col.numeric_precision,
|
||||
scale: col.numeric_scale,
|
||||
nullable: col.is_nullable === 'YES',
|
||||
default: str(this.normalizeDefaultValue(col.column_default, col.length)),
|
||||
unsigned: increments,
|
||||
autoincrement: increments,
|
||||
generated:
|
||||
col.is_identity === 'YES'
|
||||
? col.identity_generation === 'BY DEFAULT'
|
||||
? 'by default as identity'
|
||||
: 'identity'
|
||||
: col.generation_expression
|
||||
? col.generation_expression + ' stored'
|
||||
: undefined,
|
||||
comment: col.column_comment,
|
||||
};
|
||||
if (nativeEnums?.[column.type]) {
|
||||
column.mappedType = Type.getType(EnumType);
|
||||
column.nativeEnumName = column.type;
|
||||
column.enumItems = nativeEnums[column.type]?.items;
|
||||
}
|
||||
ret[key].push(column);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
async getAllChecks(connection, tablesBySchemas) {
|
||||
const sql = this.getChecksSQL(tablesBySchemas);
|
||||
const allChecks = await connection.execute(sql);
|
||||
const ret = {};
|
||||
const seen = new Set();
|
||||
for (const check of allChecks) {
|
||||
const key = this.getTableKey(check);
|
||||
const dedupeKey = `${key}:${check.name}`;
|
||||
if (seen.has(dedupeKey)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(dedupeKey);
|
||||
ret[key] ??= [];
|
||||
const m = /^check \(\((.*)\)\)$/is.exec(check.expression);
|
||||
const def = m?.[1].replace(/\((.*?)\)::\w+/g, '$1');
|
||||
ret[key].push({
|
||||
name: check.name,
|
||||
columnName: check.column_name,
|
||||
definition: check.expression,
|
||||
expression: def,
|
||||
});
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
async getAllForeignKeys(connection, tablesBySchemas) {
|
||||
const sql = `select nsp1.nspname schema_name, cls1.relname table_name, nsp2.nspname referenced_schema_name,
|
||||
cls2.relname referenced_table_name, a.attname column_name, af.attname referenced_column_name, conname constraint_name,
|
||||
confupdtype update_rule, confdeltype delete_rule, array_position(con.conkey,a.attnum) as ord, condeferrable, condeferred,
|
||||
pg_get_constraintdef(con.oid) as constraint_def
|
||||
from pg_attribute a
|
||||
join pg_constraint con on con.conrelid = a.attrelid AND a.attnum = ANY (con.conkey)
|
||||
join pg_attribute af on af.attnum = con.confkey[array_position(con.conkey,a.attnum)] AND af.attrelid = con.confrelid
|
||||
join pg_namespace nsp1 on nsp1.oid = con.connamespace
|
||||
join pg_class cls1 on cls1.oid = con.conrelid
|
||||
join pg_class cls2 on cls2.oid = confrelid
|
||||
join pg_namespace nsp2 on nsp2.oid = cls2.relnamespace
|
||||
where (${[...tablesBySchemas.entries()].map(([schema, tables]) => `(cls1.relname in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}) and nsp1.nspname = ${this.platform.quoteValue(schema)})`).join(' or ')})
|
||||
and confrelid > 0
|
||||
order by nsp1.nspname, cls1.relname, constraint_name, ord`;
|
||||
const allFks = await connection.execute(sql);
|
||||
const ret = {};
|
||||
function mapReferentialIntegrity(value, def) {
|
||||
const match = ['n', 'd'].includes(value) && /ON DELETE (SET (NULL|DEFAULT) \(.*?\))/.exec(def);
|
||||
if (match) {
|
||||
return match[1];
|
||||
}
|
||||
/* v8 ignore next */
|
||||
switch (value) {
|
||||
case 'r':
|
||||
return 'RESTRICT';
|
||||
case 'c':
|
||||
return 'CASCADE';
|
||||
case 'n':
|
||||
return 'SET NULL';
|
||||
case 'd':
|
||||
return 'SET DEFAULT';
|
||||
case 'a':
|
||||
default:
|
||||
return 'NO ACTION';
|
||||
}
|
||||
}
|
||||
for (const fk of allFks) {
|
||||
fk.update_rule = mapReferentialIntegrity(fk.update_rule, fk.constraint_def);
|
||||
fk.delete_rule = mapReferentialIntegrity(fk.delete_rule, fk.constraint_def);
|
||||
if (fk.condeferrable) {
|
||||
fk.defer_mode = fk.condeferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
|
||||
}
|
||||
const key = this.getTableKey(fk);
|
||||
ret[key] ??= [];
|
||||
ret[key].push(fk);
|
||||
}
|
||||
Object.keys(ret).forEach(key => {
|
||||
const [schemaName, tableName] = key.split('.');
|
||||
ret[key] = this.mapForeignKeys(ret[key], tableName, schemaName);
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
async getNativeEnumDefinitions(connection, schemas) {
|
||||
const uniqueSchemas = Utils.unique(schemas);
|
||||
const res = await connection.execute(
|
||||
`select t.typname as enum_name, n.nspname as schema_name, array_agg(e.enumlabel order by e.enumsortorder) as enum_value
|
||||
from pg_type t
|
||||
join pg_enum e on t.oid = e.enumtypid
|
||||
join pg_catalog.pg_namespace n on n.oid = t.typnamespace
|
||||
where n.nspname in (${Array(uniqueSchemas.length).fill('?').join(', ')})
|
||||
group by t.typname, n.nspname`,
|
||||
uniqueSchemas,
|
||||
);
|
||||
return res.reduce((o, row) => {
|
||||
let name = row.enum_name;
|
||||
if (row.schema_name && row.schema_name !== this.platform.getDefaultSchemaName()) {
|
||||
name = row.schema_name + '.' + name;
|
||||
}
|
||||
let items = row.enum_value;
|
||||
if (!Array.isArray(items)) {
|
||||
items = this.platform.unmarshallArray(row.enum_value);
|
||||
}
|
||||
o[name] = {
|
||||
name: row.enum_name,
|
||||
schema: row.schema_name,
|
||||
items,
|
||||
};
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
getCreateNativeEnumSQL(name, values, schema) {
|
||||
if (schema && schema !== this.platform.getDefaultSchemaName()) {
|
||||
name = schema + '.' + name;
|
||||
}
|
||||
return `create type ${this.quote(name)} as enum (${values.map(value => this.platform.quoteValue(value)).join(', ')})`;
|
||||
}
|
||||
getDropNativeEnumSQL(name, schema) {
|
||||
if (schema && schema !== this.platform.getDefaultSchemaName()) {
|
||||
name = schema + '.' + name;
|
||||
}
|
||||
return `drop type ${this.quote(name)}`;
|
||||
}
|
||||
getAlterNativeEnumSQL(name, schema, value, items, oldItems) {
|
||||
if (schema && schema !== this.platform.getDefaultSchemaName()) {
|
||||
name = schema + '.' + name;
|
||||
}
|
||||
let suffix = '';
|
||||
if (items && value && oldItems) {
|
||||
const position = items.indexOf(value);
|
||||
if (position > 0) {
|
||||
suffix = ` after ${this.platform.quoteValue(items[position - 1])}`;
|
||||
} else if (items.length > 1 && oldItems.length > 0) {
|
||||
suffix = ` before ${this.platform.quoteValue(oldItems[0])}`;
|
||||
}
|
||||
}
|
||||
return `alter type ${this.quote(name)} add value if not exists ${this.platform.quoteValue(value)}${suffix}`;
|
||||
}
|
||||
getEnumDefinitions(checks) {
|
||||
return checks.reduce((o, item) => {
|
||||
// check constraints are defined as one of:
|
||||
// `CHECK ((type = ANY (ARRAY['local'::text, 'global'::text])))`
|
||||
// `CHECK (("columnName" = ANY (ARRAY['local'::text, 'global'::text])))`
|
||||
// `CHECK (((enum_test)::text = ANY ((ARRAY['a'::character varying, 'b'::character varying, 'c'::character varying])::text[])))`
|
||||
// `CHECK ((("enumTest")::text = ANY ((ARRAY['a'::character varying, 'b'::character varying, 'c'::character varying])::text[])))`
|
||||
// `CHECK ((type = 'a'::text))`
|
||||
const m1 =
|
||||
item.definition?.match(/check \(\(\("?(\w+)"?\)::/i) || item.definition?.match(/check \(\("?(\w+)"? = /i);
|
||||
const m2 = item.definition?.match(/\(array\[(.*)]\)/i) || item.definition?.match(/ = (.*)\)/i);
|
||||
if (item.columnName && m1 && m2) {
|
||||
const m3 = m2[1].match(/('[^']*'::text)/g);
|
||||
let items;
|
||||
/* v8 ignore next */
|
||||
if (m3) {
|
||||
items = m3.map(item => /^\(?'(.*)'/.exec(item.trim())?.[1]);
|
||||
} else {
|
||||
items = m2[1].split(',').map(item => /^\(?'(.*)'/.exec(item.trim())?.[1]);
|
||||
}
|
||||
items = items.filter(item => item !== undefined);
|
||||
if (items.length > 0) {
|
||||
o[item.columnName] = items;
|
||||
item.expression = `${this.quote(item.columnName)} in ('${items.join("', '")}')`;
|
||||
item.definition = `check (${item.expression})`;
|
||||
}
|
||||
}
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
createTableColumn(column, table) {
|
||||
const pk = table.getPrimaryKey();
|
||||
const compositePK = pk?.composite;
|
||||
const primaryKey = !this.hasNonDefaultPrimaryKeyName(table);
|
||||
const col = [this.quote(column.name)];
|
||||
if (column.autoincrement && !column.generated && !compositePK) {
|
||||
col.push(column.mappedType.getColumnType({ autoincrement: true }, this.platform));
|
||||
} else {
|
||||
let columnType = column.type;
|
||||
if (column.nativeEnumName) {
|
||||
const parts = column.type.split('.');
|
||||
if (parts.length === 2 && parts[0] === '*') {
|
||||
columnType = `${table.schema}.${parts[1]}`;
|
||||
}
|
||||
if (columnType.endsWith('[]')) {
|
||||
columnType = this.quote(columnType.substring(0, columnType.length - 2)) + '[]';
|
||||
} else {
|
||||
columnType = this.quote(columnType);
|
||||
}
|
||||
}
|
||||
if (column.generated === 'by default as identity') {
|
||||
columnType += ` generated ${column.generated}`;
|
||||
} else if (column.generated) {
|
||||
columnType += ` generated always as ${column.generated}`;
|
||||
}
|
||||
col.push(columnType);
|
||||
Utils.runIfNotEmpty(() => col.push('null'), column.nullable);
|
||||
Utils.runIfNotEmpty(() => col.push('not null'), !column.nullable);
|
||||
}
|
||||
if (column.autoincrement && !compositePK) {
|
||||
Utils.runIfNotEmpty(() => col.push('primary key'), primaryKey && column.primary);
|
||||
}
|
||||
const useDefault = column.default != null && column.default !== 'null' && !column.autoincrement;
|
||||
Utils.runIfNotEmpty(() => col.push(`default ${column.default}`), useDefault);
|
||||
return col.join(' ');
|
||||
}
|
||||
getPreAlterTable(tableDiff, safe) {
|
||||
const ret = [];
|
||||
const parts = tableDiff.name.split('.');
|
||||
const tableName = parts.pop();
|
||||
const schemaName = parts.pop();
|
||||
/* v8 ignore next */
|
||||
const name =
|
||||
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
|
||||
const quotedName = this.quote(name);
|
||||
// detect that the column was an enum before and remove the check constraint in such case here
|
||||
const changedEnums = Object.values(tableDiff.changedColumns).filter(
|
||||
col => col.fromColumn.mappedType instanceof EnumType,
|
||||
);
|
||||
for (const col of changedEnums) {
|
||||
if (!col.fromColumn.nativeEnumName && col.column.nativeEnumName && col.fromColumn.default) {
|
||||
ret.push(`alter table ${quotedName} alter column "${col.column.name}" drop default`);
|
||||
}
|
||||
if (col.fromColumn.nativeEnumName && !col.column.nativeEnumName && col.fromColumn.default) {
|
||||
ret.push(`alter table ${quotedName} alter column "${col.column.name}" drop default`);
|
||||
}
|
||||
}
|
||||
// changing uuid column type requires to cast it to text first
|
||||
const uuids = Object.values(tableDiff.changedColumns).filter(
|
||||
col => col.changedProperties.has('type') && col.fromColumn.type === 'uuid',
|
||||
);
|
||||
for (const col of uuids) {
|
||||
ret.push(
|
||||
`alter table ${quotedName} alter column "${col.column.name}" type text using ("${col.column.name}"::text)`,
|
||||
);
|
||||
}
|
||||
for (const { column } of Object.values(tableDiff.changedColumns).filter(diff =>
|
||||
diff.changedProperties.has('autoincrement'),
|
||||
)) {
|
||||
if (!column.autoincrement && column.default == null) {
|
||||
ret.push(`alter table ${quotedName} alter column ${this.quote(column.name)} drop default`);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
castColumn(name, type) {
|
||||
if (type === 'uuid') {
|
||||
type = 'text::uuid';
|
||||
}
|
||||
return ` using (${this.quote(name)}::${type})`;
|
||||
}
|
||||
dropForeignKey(tableName, constraintName) {
|
||||
return `alter table ${this.quote(tableName)} drop constraint ${this.quote(constraintName)}`;
|
||||
}
|
||||
getPostAlterTable(tableDiff, safe) {
|
||||
const ret = [];
|
||||
const parts = tableDiff.name.split('.');
|
||||
const tableName = parts.pop();
|
||||
const schemaName = parts.pop();
|
||||
/* v8 ignore next */
|
||||
const name =
|
||||
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
|
||||
const quotedName = this.quote(name);
|
||||
// detect that the column was an enum before and remove the check constraint in such a case here
|
||||
const changedEnums = Object.values(tableDiff.changedColumns).filter(
|
||||
col => col.fromColumn.mappedType instanceof EnumType,
|
||||
);
|
||||
for (const col of changedEnums) {
|
||||
if (!col.fromColumn.nativeEnumName && col.column.nativeEnumName && col.column.default) {
|
||||
ret.push(`alter table ${quotedName} alter column "${col.column.name}" set default ${col.column.default}`);
|
||||
}
|
||||
if (col.fromColumn.nativeEnumName && !col.column.nativeEnumName && col.column.default) {
|
||||
ret.push(`alter table ${quotedName} alter column "${col.column.name}" set default ${col.column.default}`);
|
||||
}
|
||||
}
|
||||
for (const { column } of Object.values(tableDiff.changedColumns).filter(diff =>
|
||||
diff.changedProperties.has('autoincrement'),
|
||||
)) {
|
||||
ret.push(...this.getAlterColumnAutoincrement(tableName, column, schemaName));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
getAlterColumnAutoincrement(tableName, column, schemaName) {
|
||||
const ret = [];
|
||||
/* v8 ignore next */
|
||||
const name =
|
||||
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
|
||||
if (column.autoincrement) {
|
||||
const seqName = this.platform.getIndexName(tableName, [column.name], 'sequence');
|
||||
ret.push(`create sequence if not exists ${this.quote(seqName)}`);
|
||||
ret.push(`select setval('${seqName}', (select max(${this.quote(column.name)}) from ${this.quote(name)}))`);
|
||||
ret.push(
|
||||
`alter table ${this.quote(name)} alter column ${this.quote(column.name)} set default nextval('${seqName}')`,
|
||||
);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
getChangeColumnCommentSQL(tableName, to, schemaName) {
|
||||
const name = this.quote(
|
||||
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName,
|
||||
);
|
||||
const value = to.comment ? this.platform.quoteValue(to.comment) : 'null';
|
||||
return `comment on column ${name}.${this.quote(to.name)} is ${value}`;
|
||||
}
|
||||
alterTableComment(table, comment) {
|
||||
return `comment on table ${table.getQuotedName()} is ${this.platform.quoteValue(comment ?? '')}`;
|
||||
}
|
||||
normalizeDefaultValue(defaultValue, length) {
|
||||
if (!defaultValue || typeof defaultValue !== 'string') {
|
||||
return super.normalizeDefaultValue(defaultValue, length, PostgreSqlSchemaHelper.DEFAULT_VALUES);
|
||||
}
|
||||
const match = /^'(.*)'::(.*)$/.exec(defaultValue);
|
||||
if (match) {
|
||||
if (match[2] === 'integer') {
|
||||
return +match[1];
|
||||
}
|
||||
return `'${match[1]}'`;
|
||||
}
|
||||
return super.normalizeDefaultValue(defaultValue, length, PostgreSqlSchemaHelper.DEFAULT_VALUES);
|
||||
}
|
||||
appendComments(table) {
|
||||
const sql = [];
|
||||
if (table.comment) {
|
||||
const comment = this.platform.quoteValue(this.processComment(table.comment));
|
||||
sql.push(`comment on table ${table.getQuotedName()} is ${comment}`);
|
||||
}
|
||||
for (const column of table.getColumns()) {
|
||||
if (column.comment) {
|
||||
const comment = this.platform.quoteValue(this.processComment(column.comment));
|
||||
sql.push(`comment on column ${table.getQuotedName()}.${this.quote(column.name)} is ${comment}`);
|
||||
}
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
getDatabaseExistsSQL(name) {
|
||||
return `select 1 from pg_database where datname = '${name}'`;
|
||||
}
|
||||
getDatabaseNotExistsError(dbName) {
|
||||
return `database ${this.quote(dbName)} does not exist`;
|
||||
}
|
||||
getManagementDbName() {
|
||||
return this.platform.getConfig().get('schemaGenerator', {}).managementDbName ?? 'postgres';
|
||||
}
|
||||
disableForeignKeysSQL() {
|
||||
return `set session_replication_role = 'replica';`;
|
||||
}
|
||||
enableForeignKeysSQL() {
|
||||
return `set session_replication_role = 'origin';`;
|
||||
}
|
||||
getRenameIndexSQL(tableName, index, oldIndexName) {
|
||||
oldIndexName = this.quote(oldIndexName);
|
||||
const keyName = this.quote(index.keyName);
|
||||
return [`alter index ${oldIndexName} rename to ${keyName}`];
|
||||
}
|
||||
dropIndex(table, index, oldIndexName = index.keyName) {
|
||||
if (index.primary || (index.unique && index.constraint)) {
|
||||
return `alter table ${this.quote(table)} drop constraint ${this.quote(oldIndexName)}`;
|
||||
}
|
||||
return `drop index ${this.quote(oldIndexName)}`;
|
||||
}
|
||||
/**
|
||||
* Build the column list for a PostgreSQL index.
|
||||
*/
|
||||
getIndexColumns(index) {
|
||||
if (index.columns?.length) {
|
||||
return index.columns
|
||||
.map(col => {
|
||||
let colDef = this.quote(col.name);
|
||||
// PostgreSQL supports collation with double quotes
|
||||
if (col.collation) {
|
||||
colDef += ` collate ${this.quote(col.collation)}`;
|
||||
}
|
||||
// PostgreSQL supports sort order
|
||||
if (col.sort) {
|
||||
colDef += ` ${col.sort}`;
|
||||
}
|
||||
// PostgreSQL supports NULLS FIRST/LAST
|
||||
if (col.nulls) {
|
||||
colDef += ` nulls ${col.nulls}`;
|
||||
}
|
||||
return colDef;
|
||||
})
|
||||
.join(', ');
|
||||
}
|
||||
return index.columnNames.map(c => this.quote(c)).join(', ');
|
||||
}
|
||||
/**
|
||||
* PostgreSQL-specific index options like fill factor.
|
||||
*/
|
||||
getCreateIndexSuffix(index) {
|
||||
const withOptions = [];
|
||||
if (index.fillFactor != null) {
|
||||
withOptions.push(`fillfactor = ${index.fillFactor}`);
|
||||
}
|
||||
if (withOptions.length > 0) {
|
||||
return ` with (${withOptions.join(', ')})`;
|
||||
}
|
||||
return super.getCreateIndexSuffix(index);
|
||||
}
|
||||
getIndexesSQL(tables) {
|
||||
return `select indrelid::regclass as table_name, ns.nspname as schema_name, relname as constraint_name, idx.indisunique as unique, idx.indisprimary as primary, contype, condeferrable, condeferred,
|
||||
array(
|
||||
select pg_get_indexdef(idx.indexrelid, k + 1, true)
|
||||
from generate_subscripts(idx.indkey, 1) as k
|
||||
order by k
|
||||
) as index_def,
|
||||
pg_get_indexdef(idx.indexrelid) as expression,
|
||||
c.condeferrable as deferrable,
|
||||
c.condeferred as initially_deferred,
|
||||
i.reloptions,
|
||||
am.amname as index_type
|
||||
from pg_index idx
|
||||
join pg_class as i on i.oid = idx.indexrelid
|
||||
join pg_namespace as ns on i.relnamespace = ns.oid
|
||||
join pg_am as am on am.oid = i.relam
|
||||
left join pg_constraint as c on c.conname = i.relname
|
||||
where indrelid in (${tables.map(t => `${this.platform.quoteValue(`${this.quote(t.schema_name)}.${this.quote(t.table_name)}`)}::regclass`).join(', ')})
|
||||
order by relname`;
|
||||
}
|
||||
getChecksSQL(tablesBySchemas) {
|
||||
return `select ccu.table_name as table_name, ccu.table_schema as schema_name, pgc.conname as name, conrelid::regclass as table_from, ccu.column_name as column_name, pg_get_constraintdef(pgc.oid) as expression
|
||||
from pg_constraint pgc
|
||||
join pg_namespace nsp on nsp.oid = pgc.connamespace
|
||||
join pg_class cls on pgc.conrelid = cls.oid
|
||||
join information_schema.constraint_column_usage ccu on pgc.conname = ccu.constraint_name and nsp.nspname = ccu.constraint_schema and cls.relname = ccu.table_name
|
||||
where contype = 'c' and (${[...tablesBySchemas.entries()].map(([schema, tables]) => `ccu.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}) and ccu.table_schema = ${this.platform.quoteValue(schema)}`).join(' or ')})
|
||||
order by pgc.conname`;
|
||||
}
|
||||
inferLengthFromColumnType(type) {
|
||||
const match = /^(\w+(?:\s+\w+)*)\s*(?:\(\s*(\d+)\s*\)|$)/.exec(type);
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
if (!match[2]) {
|
||||
switch (match[1]) {
|
||||
case 'character varying':
|
||||
case 'varchar':
|
||||
case 'bpchar':
|
||||
case 'char':
|
||||
case 'character':
|
||||
return -1;
|
||||
case 'interval':
|
||||
case 'time':
|
||||
case 'timestamp':
|
||||
case 'timestamptz':
|
||||
return this.platform.getDefaultDateTimeLength();
|
||||
}
|
||||
return;
|
||||
}
|
||||
return +match[2];
|
||||
}
|
||||
}
|
||||
4
node_modules/@mikro-orm/sql/dialects/postgresql/index.d.ts
generated
vendored
Normal file
4
node_modules/@mikro-orm/sql/dialects/postgresql/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './PostgreSqlNativeQueryBuilder.js';
|
||||
export * from './BasePostgreSqlPlatform.js';
|
||||
export * from './FullTextType.js';
|
||||
export * from './PostgreSqlSchemaHelper.js';
|
||||
4
node_modules/@mikro-orm/sql/dialects/postgresql/index.js
generated
vendored
Normal file
4
node_modules/@mikro-orm/sql/dialects/postgresql/index.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './PostgreSqlNativeQueryBuilder.js';
|
||||
export * from './BasePostgreSqlPlatform.js';
|
||||
export * from './FullTextType.js';
|
||||
export * from './PostgreSqlSchemaHelper.js';
|
||||
8
node_modules/@mikro-orm/sql/dialects/sqlite/BaseSqliteConnection.d.ts
generated
vendored
Normal file
8
node_modules/@mikro-orm/sql/dialects/sqlite/BaseSqliteConnection.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import { type Dialect } from 'kysely';
|
||||
import type { Dictionary } from '@mikro-orm/core';
|
||||
import { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
|
||||
export declare class BaseSqliteConnection extends AbstractSqlConnection {
|
||||
createKyselyDialect(options: Dictionary): Dialect;
|
||||
connect(options?: { skipOnConnect?: boolean }): Promise<void>;
|
||||
protected attachDatabases(): Promise<void>;
|
||||
}
|
||||
27
node_modules/@mikro-orm/sql/dialects/sqlite/BaseSqliteConnection.js
generated
vendored
Normal file
27
node_modules/@mikro-orm/sql/dialects/sqlite/BaseSqliteConnection.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import { CompiledQuery } from 'kysely';
|
||||
import { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
|
||||
export class BaseSqliteConnection extends AbstractSqlConnection {
|
||||
createKyselyDialect(options) {
|
||||
throw new Error(
|
||||
'No SQLite dialect configured. Pass a Kysely dialect via the `driverOptions` config option, ' +
|
||||
'e.g. `new NodeSqliteDialect(...)` for node:sqlite or a custom dialect for other libraries.',
|
||||
);
|
||||
}
|
||||
async connect(options) {
|
||||
await super.connect(options);
|
||||
await this.getClient().executeQuery(CompiledQuery.raw('pragma foreign_keys = on'));
|
||||
await this.attachDatabases();
|
||||
}
|
||||
async attachDatabases() {
|
||||
const attachDatabases = this.config.get('attachDatabases');
|
||||
if (!attachDatabases?.length) {
|
||||
return;
|
||||
}
|
||||
const { fs } = await import('@mikro-orm/core/fs-utils');
|
||||
const baseDir = this.config.get('baseDir');
|
||||
for (const db of attachDatabases) {
|
||||
const path = fs.absolutePath(db.path, baseDir);
|
||||
await this.execute(`attach database '${path}' as ${this.platform.quoteIdentifier(db.name)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
21
node_modules/@mikro-orm/sql/dialects/sqlite/NodeSqliteDialect.d.ts
generated
vendored
Normal file
21
node_modules/@mikro-orm/sql/dialects/sqlite/NodeSqliteDialect.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { SqliteDialect } from 'kysely';
|
||||
/**
|
||||
* Kysely dialect for `node:sqlite` (Node.js 22.5+, Deno 2.2+).
|
||||
*
|
||||
* Bridges `node:sqlite`'s `DatabaseSync` to the `better-sqlite3` interface
|
||||
* that Kysely's `SqliteDialect` expects.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { SqliteDriver, NodeSqliteDialect } from '@mikro-orm/sql';
|
||||
*
|
||||
* const orm = await MikroORM.init({
|
||||
* driver: SqliteDriver,
|
||||
* dbName: ':memory:',
|
||||
* driverOptions: new NodeSqliteDialect(':memory:'),
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export declare class NodeSqliteDialect extends SqliteDialect {
|
||||
constructor(dbName: string);
|
||||
}
|
||||
43
node_modules/@mikro-orm/sql/dialects/sqlite/NodeSqliteDialect.js
generated
vendored
Normal file
43
node_modules/@mikro-orm/sql/dialects/sqlite/NodeSqliteDialect.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
import { SqliteDialect } from 'kysely';
|
||||
/**
|
||||
* Kysely dialect for `node:sqlite` (Node.js 22.5+, Deno 2.2+).
|
||||
*
|
||||
* Bridges `node:sqlite`'s `DatabaseSync` to the `better-sqlite3` interface
|
||||
* that Kysely's `SqliteDialect` expects.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { SqliteDriver, NodeSqliteDialect } from '@mikro-orm/sql';
|
||||
*
|
||||
* const orm = await MikroORM.init({
|
||||
* driver: SqliteDriver,
|
||||
* dbName: ':memory:',
|
||||
* driverOptions: new NodeSqliteDialect(':memory:'),
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export class NodeSqliteDialect extends SqliteDialect {
|
||||
constructor(dbName) {
|
||||
const { DatabaseSync } = globalThis.process.getBuiltinModule('node:sqlite');
|
||||
super({
|
||||
database: () => {
|
||||
const db = new DatabaseSync(dbName);
|
||||
return {
|
||||
prepare(sql) {
|
||||
const stmt = db.prepare(sql);
|
||||
return {
|
||||
reader: /^\s*(select|pragma|explain|with)/i.test(sql) || /\breturning\b/i.test(sql),
|
||||
all: params => stmt.all(...params),
|
||||
run: params => stmt.run(...params),
|
||||
/* v8 ignore next */
|
||||
get: params => stmt.get(...params),
|
||||
};
|
||||
},
|
||||
close() {
|
||||
db.close();
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
12
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteDriver.d.ts
generated
vendored
Normal file
12
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteDriver.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import type { Configuration } from '@mikro-orm/core';
|
||||
import { AbstractSqlDriver } from '../../AbstractSqlDriver.js';
|
||||
import { BaseSqliteConnection } from './BaseSqliteConnection.js';
|
||||
/**
|
||||
* Generic SQLite driver that uses `driverOptions` for the Kysely dialect.
|
||||
* Use this with any SQLite library by passing a Kysely dialect via `driverOptions`.
|
||||
*
|
||||
* For the default better-sqlite3 experience, use `@mikro-orm/sqlite` instead.
|
||||
*/
|
||||
export declare class SqliteDriver extends AbstractSqlDriver<BaseSqliteConnection> {
|
||||
constructor(config: Configuration);
|
||||
}
|
||||
14
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteDriver.js
generated
vendored
Normal file
14
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteDriver.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import { AbstractSqlDriver } from '../../AbstractSqlDriver.js';
|
||||
import { BaseSqliteConnection } from './BaseSqliteConnection.js';
|
||||
import { SqlitePlatform } from './SqlitePlatform.js';
|
||||
/**
|
||||
* Generic SQLite driver that uses `driverOptions` for the Kysely dialect.
|
||||
* Use this with any SQLite library by passing a Kysely dialect via `driverOptions`.
|
||||
*
|
||||
* For the default better-sqlite3 experience, use `@mikro-orm/sqlite` instead.
|
||||
*/
|
||||
export class SqliteDriver extends AbstractSqlDriver {
|
||||
constructor(config) {
|
||||
super(config, new SqlitePlatform(), BaseSqliteConnection, ['kysely']);
|
||||
}
|
||||
}
|
||||
9
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteExceptionConverter.d.ts
generated
vendored
Normal file
9
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteExceptionConverter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
|
||||
export declare class SqliteExceptionConverter extends ExceptionConverter {
|
||||
/**
|
||||
* @inheritDoc
|
||||
* @see http://www.sqlite.org/c3ref/c_abort.html
|
||||
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractSQLiteDriver.php
|
||||
*/
|
||||
convertException(exception: Error & Dictionary): DriverException;
|
||||
}
|
||||
70
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteExceptionConverter.js
generated
vendored
Normal file
70
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteExceptionConverter.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
import {
|
||||
ConnectionException,
|
||||
ExceptionConverter,
|
||||
InvalidFieldNameException,
|
||||
LockWaitTimeoutException,
|
||||
NonUniqueFieldNameException,
|
||||
CheckConstraintViolationException,
|
||||
NotNullConstraintViolationException,
|
||||
ReadOnlyException,
|
||||
SyntaxErrorException,
|
||||
TableExistsException,
|
||||
TableNotFoundException,
|
||||
UniqueConstraintViolationException,
|
||||
ForeignKeyConstraintViolationException,
|
||||
} from '@mikro-orm/core';
|
||||
export class SqliteExceptionConverter extends ExceptionConverter {
|
||||
/**
|
||||
* @inheritDoc
|
||||
* @see http://www.sqlite.org/c3ref/c_abort.html
|
||||
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractSQLiteDriver.php
|
||||
*/
|
||||
convertException(exception) {
|
||||
/* v8 ignore next */
|
||||
if (exception.message.includes('database is locked')) {
|
||||
return new LockWaitTimeoutException(exception);
|
||||
}
|
||||
if (
|
||||
exception.message.includes('must be unique') ||
|
||||
exception.message.includes('is not unique') ||
|
||||
exception.message.includes('are not unique') ||
|
||||
exception.message.includes('UNIQUE constraint failed')
|
||||
) {
|
||||
return new UniqueConstraintViolationException(exception);
|
||||
}
|
||||
if (exception.message.includes('may not be NULL') || exception.message.includes('NOT NULL constraint failed')) {
|
||||
return new NotNullConstraintViolationException(exception);
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (exception.message.includes('CHECK constraint failed')) {
|
||||
return new CheckConstraintViolationException(exception);
|
||||
}
|
||||
if (exception.message.includes('no such table:')) {
|
||||
return new TableNotFoundException(exception);
|
||||
}
|
||||
if (exception.message.includes('already exists')) {
|
||||
return new TableExistsException(exception);
|
||||
}
|
||||
if (exception.message.includes('no such column:')) {
|
||||
return new InvalidFieldNameException(exception);
|
||||
}
|
||||
if (exception.message.includes('ambiguous column name')) {
|
||||
return new NonUniqueFieldNameException(exception);
|
||||
}
|
||||
if (exception.message.includes('syntax error')) {
|
||||
return new SyntaxErrorException(exception);
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (exception.message.includes('attempt to write a readonly database')) {
|
||||
return new ReadOnlyException(exception);
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (exception.message.includes('unable to open database file')) {
|
||||
return new ConnectionException(exception);
|
||||
}
|
||||
if (exception.message.includes('FOREIGN KEY constraint failed')) {
|
||||
return new ForeignKeyConstraintViolationException(exception);
|
||||
}
|
||||
return super.convertException(exception);
|
||||
}
|
||||
}
|
||||
6
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteNativeQueryBuilder.d.ts
generated
vendored
Normal file
6
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteNativeQueryBuilder.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export declare class SqliteNativeQueryBuilder extends NativeQueryBuilder {
|
||||
protected compileTruncate(): void;
|
||||
protected addLockClause(): void;
|
||||
}
|
||||
11
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteNativeQueryBuilder.js
generated
vendored
Normal file
11
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteNativeQueryBuilder.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
|
||||
/** @internal */
|
||||
export class SqliteNativeQueryBuilder extends NativeQueryBuilder {
|
||||
compileTruncate() {
|
||||
const sql = `delete from ${this.getTableName()}`;
|
||||
this.parts.push(sql);
|
||||
}
|
||||
addLockClause() {
|
||||
return; // not supported
|
||||
}
|
||||
}
|
||||
71
node_modules/@mikro-orm/sql/dialects/sqlite/SqlitePlatform.d.ts
generated
vendored
Normal file
71
node_modules/@mikro-orm/sql/dialects/sqlite/SqlitePlatform.d.ts
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
import { type EntityProperty, type IsolationLevel } from '@mikro-orm/core';
|
||||
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
|
||||
import { SqliteNativeQueryBuilder } from './SqliteNativeQueryBuilder.js';
|
||||
import { SqliteSchemaHelper } from './SqliteSchemaHelper.js';
|
||||
import { SqliteExceptionConverter } from './SqliteExceptionConverter.js';
|
||||
export declare class SqlitePlatform extends AbstractSqlPlatform {
|
||||
protected readonly schemaHelper: SqliteSchemaHelper;
|
||||
protected readonly exceptionConverter: SqliteExceptionConverter;
|
||||
/** @internal */
|
||||
createNativeQueryBuilder(): SqliteNativeQueryBuilder;
|
||||
usesDefaultKeyword(): boolean;
|
||||
usesReturningStatement(): boolean;
|
||||
usesEnumCheckConstraints(): boolean;
|
||||
getCurrentTimestampSQL(length: number): string;
|
||||
getDateTimeTypeDeclarationSQL(column: { length: number }): string;
|
||||
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
|
||||
getEnumTypeDeclarationSQL(column: {
|
||||
items?: unknown[];
|
||||
fieldNames: string[];
|
||||
length?: number;
|
||||
unsigned?: boolean;
|
||||
autoincrement?: boolean;
|
||||
}): string;
|
||||
getTinyIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
|
||||
getSmallIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
|
||||
getIntegerTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
|
||||
getFloatDeclarationSQL(): string;
|
||||
getBooleanTypeDeclarationSQL(): string;
|
||||
getCharTypeDeclarationSQL(column: { length?: number }): string;
|
||||
getVarcharTypeDeclarationSQL(column: { length?: number }): string;
|
||||
normalizeColumnType(
|
||||
type: string,
|
||||
options: {
|
||||
length?: number;
|
||||
precision?: number;
|
||||
scale?: number;
|
||||
},
|
||||
): string;
|
||||
convertsJsonAutomatically(): boolean;
|
||||
/**
|
||||
* This is used to narrow the value of Date properties as they will be stored as timestamps in sqlite.
|
||||
* We use this method to convert Dates to timestamps when computing the changeset, so we have the right
|
||||
* data type in the payload as well as in original entity data. Without that, we would end up with diffs
|
||||
* including all Date properties, as we would be comparing Date object with timestamp.
|
||||
*/
|
||||
processDateProperty(value: unknown): string | number | Date;
|
||||
getIndexName(
|
||||
tableName: string,
|
||||
columns: string[],
|
||||
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
|
||||
): string;
|
||||
supportsDeferredUniqueConstraints(): boolean;
|
||||
/**
|
||||
* SQLite supports schemas via ATTACH DATABASE. Returns true when there are
|
||||
* attached databases configured.
|
||||
*/
|
||||
supportsSchemas(): boolean;
|
||||
getDefaultSchemaName(): string | undefined;
|
||||
getFullTextWhereClause(): string;
|
||||
escape(value: any): string;
|
||||
convertVersionValue(
|
||||
value: Date | number,
|
||||
prop: EntityProperty,
|
||||
):
|
||||
| number
|
||||
| {
|
||||
$in: (string | number)[];
|
||||
};
|
||||
getJsonArrayElementPropertySQL(alias: string, property: string, _type: string): string;
|
||||
quoteValue(value: any): string;
|
||||
}
|
||||
145
node_modules/@mikro-orm/sql/dialects/sqlite/SqlitePlatform.js
generated
vendored
Normal file
145
node_modules/@mikro-orm/sql/dialects/sqlite/SqlitePlatform.js
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
|
||||
import { SqliteNativeQueryBuilder } from './SqliteNativeQueryBuilder.js';
|
||||
import { SqliteSchemaHelper } from './SqliteSchemaHelper.js';
|
||||
import { SqliteExceptionConverter } from './SqliteExceptionConverter.js';
|
||||
export class SqlitePlatform extends AbstractSqlPlatform {
|
||||
schemaHelper = new SqliteSchemaHelper(this);
|
||||
exceptionConverter = new SqliteExceptionConverter();
|
||||
/** @internal */
|
||||
createNativeQueryBuilder() {
|
||||
return new SqliteNativeQueryBuilder(this);
|
||||
}
|
||||
usesDefaultKeyword() {
|
||||
return false;
|
||||
}
|
||||
usesReturningStatement() {
|
||||
return true;
|
||||
}
|
||||
usesEnumCheckConstraints() {
|
||||
return true;
|
||||
}
|
||||
getCurrentTimestampSQL(length) {
|
||||
return `(strftime('%s', 'now') * 1000)`;
|
||||
}
|
||||
getDateTimeTypeDeclarationSQL(column) {
|
||||
return 'datetime';
|
||||
}
|
||||
getBeginTransactionSQL(options) {
|
||||
return ['begin'];
|
||||
}
|
||||
getEnumTypeDeclarationSQL(column) {
|
||||
if (column.items?.every(item => typeof item === 'string')) {
|
||||
return 'text';
|
||||
}
|
||||
/* v8 ignore next */
|
||||
return this.getTinyIntTypeDeclarationSQL(column);
|
||||
}
|
||||
getTinyIntTypeDeclarationSQL(column) {
|
||||
return this.getIntegerTypeDeclarationSQL(column);
|
||||
}
|
||||
getSmallIntTypeDeclarationSQL(column) {
|
||||
return this.getIntegerTypeDeclarationSQL(column);
|
||||
}
|
||||
getIntegerTypeDeclarationSQL(column) {
|
||||
return 'integer';
|
||||
}
|
||||
getFloatDeclarationSQL() {
|
||||
return 'real';
|
||||
}
|
||||
getBooleanTypeDeclarationSQL() {
|
||||
return 'integer';
|
||||
}
|
||||
getCharTypeDeclarationSQL(column) {
|
||||
return 'text';
|
||||
}
|
||||
getVarcharTypeDeclarationSQL(column) {
|
||||
return 'text';
|
||||
}
|
||||
normalizeColumnType(type, options) {
|
||||
const simpleType = this.extractSimpleType(type);
|
||||
if (['varchar', 'text'].includes(simpleType)) {
|
||||
return this.getVarcharTypeDeclarationSQL(options);
|
||||
}
|
||||
return simpleType;
|
||||
}
|
||||
convertsJsonAutomatically() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* This is used to narrow the value of Date properties as they will be stored as timestamps in sqlite.
|
||||
* We use this method to convert Dates to timestamps when computing the changeset, so we have the right
|
||||
* data type in the payload as well as in original entity data. Without that, we would end up with diffs
|
||||
* including all Date properties, as we would be comparing Date object with timestamp.
|
||||
*/
|
||||
processDateProperty(value) {
|
||||
if (value instanceof Date) {
|
||||
return +value;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
getIndexName(tableName, columns, type) {
|
||||
if (type === 'primary') {
|
||||
return this.getDefaultPrimaryName(tableName, columns);
|
||||
}
|
||||
return super.getIndexName(tableName, columns, type);
|
||||
}
|
||||
supportsDeferredUniqueConstraints() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* SQLite supports schemas via ATTACH DATABASE. Returns true when there are
|
||||
* attached databases configured.
|
||||
*/
|
||||
supportsSchemas() {
|
||||
const attachDatabases = this.config.get('attachDatabases');
|
||||
return !!attachDatabases?.length;
|
||||
}
|
||||
getDefaultSchemaName() {
|
||||
// Return 'main' only when schema support is active (i.e., databases are attached)
|
||||
return this.supportsSchemas() ? 'main' : undefined;
|
||||
}
|
||||
getFullTextWhereClause() {
|
||||
return `:column: match :query`;
|
||||
}
|
||||
escape(value) {
|
||||
if (value == null) {
|
||||
return 'null';
|
||||
}
|
||||
if (typeof value === 'boolean') {
|
||||
return value ? 'true' : 'false';
|
||||
}
|
||||
if (typeof value === 'number' || typeof value === 'bigint') {
|
||||
return '' + value;
|
||||
}
|
||||
if (value instanceof Date) {
|
||||
return '' + +value;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(v => this.escape(v)).join(', ');
|
||||
}
|
||||
if (Buffer.isBuffer(value)) {
|
||||
return `X'${value.toString('hex')}'`;
|
||||
}
|
||||
return `'${String(value).replace(/'/g, "''")}'`;
|
||||
}
|
||||
convertVersionValue(value, prop) {
|
||||
if (prop.runtimeType === 'Date') {
|
||||
const ts = +value;
|
||||
const str = new Date(ts)
|
||||
.toISOString()
|
||||
.replace('T', ' ')
|
||||
.replace(/\.\d{3}Z$/, '');
|
||||
return { $in: [ts, str] };
|
||||
}
|
||||
return value;
|
||||
}
|
||||
getJsonArrayElementPropertySQL(alias, property, _type) {
|
||||
return `json_extract(${this.quoteIdentifier(alias)}.value, '$.${this.quoteJsonKey(property)}')`;
|
||||
}
|
||||
quoteValue(value) {
|
||||
if (value instanceof Date) {
|
||||
return '' + +value;
|
||||
}
|
||||
return super.quoteValue(value);
|
||||
}
|
||||
}
|
||||
78
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteSchemaHelper.d.ts
generated
vendored
Normal file
78
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteSchemaHelper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
import { type Connection } from '@mikro-orm/core';
|
||||
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
|
||||
import { SchemaHelper } from '../../schema/SchemaHelper.js';
|
||||
import type { Column, IndexDef, Table, TableDifference } from '../../typings.js';
|
||||
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
|
||||
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
|
||||
export declare class SqliteSchemaHelper extends SchemaHelper {
|
||||
disableForeignKeysSQL(): string;
|
||||
enableForeignKeysSQL(): string;
|
||||
supportsSchemaConstraints(): boolean;
|
||||
getCreateNamespaceSQL(name: string): string;
|
||||
getDropNamespaceSQL(name: string): string;
|
||||
getListTablesSQL(): string;
|
||||
getAllTables(connection: AbstractSqlConnection, schemas?: string[]): Promise<Table[]>;
|
||||
getNamespaces(connection: AbstractSqlConnection): Promise<string[]>;
|
||||
private getIgnoredViewsCondition;
|
||||
getListViewsSQL(): string;
|
||||
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
|
||||
getDropDatabaseSQL(name: string): string;
|
||||
loadInformationSchema(
|
||||
schema: DatabaseSchema,
|
||||
connection: AbstractSqlConnection,
|
||||
tables: Table[],
|
||||
schemas?: string[],
|
||||
): Promise<void>;
|
||||
createTable(table: DatabaseTable, alter?: boolean): string[];
|
||||
createTableColumn(column: Column, table: DatabaseTable, _changedProperties?: Set<string>): string | undefined;
|
||||
getAddColumnsSQL(table: DatabaseTable, columns: Column[], diff?: TableDifference): string[];
|
||||
dropForeignKey(tableName: string, constraintName: string): string;
|
||||
getDropColumnsSQL(tableName: string, columns: Column[], schemaName?: string): string;
|
||||
getCreateIndexSQL(tableName: string, index: IndexDef): string;
|
||||
private parseTableDefinition;
|
||||
/**
|
||||
* Returns schema prefix for pragma and sqlite_master queries.
|
||||
* Returns empty string for main database (no prefix needed).
|
||||
*/
|
||||
private getSchemaPrefix;
|
||||
/**
|
||||
* Returns all database names excluding 'temp'.
|
||||
*/
|
||||
private getDatabaseList;
|
||||
/**
|
||||
* Extracts the SELECT part from a CREATE VIEW statement.
|
||||
*/
|
||||
private extractViewDefinition;
|
||||
private getColumns;
|
||||
/**
|
||||
* SQLite strips outer parentheses from expression defaults (`DEFAULT (expr)` → `expr` in pragma).
|
||||
* We need to add them back so they match what we generate in DDL.
|
||||
*/
|
||||
private wrapExpressionDefault;
|
||||
private getEnumDefinitions;
|
||||
getPrimaryKeys(
|
||||
connection: AbstractSqlConnection,
|
||||
indexes: IndexDef[],
|
||||
tableName: string,
|
||||
schemaName?: string,
|
||||
): Promise<string[]>;
|
||||
private getIndexes;
|
||||
private getChecks;
|
||||
private getColumnDefinitions;
|
||||
private getForeignKeys;
|
||||
getManagementDbName(): string;
|
||||
getCreateDatabaseSQL(name: string): string;
|
||||
databaseExists(connection: Connection, name: string): Promise<boolean>;
|
||||
/**
|
||||
* Implicit indexes will be ignored when diffing
|
||||
*/
|
||||
isImplicitIndex(name: string): boolean;
|
||||
dropIndex(table: string, index: IndexDef, oldIndexName?: string): string;
|
||||
/**
|
||||
* SQLite does not support schema-qualified table names in REFERENCES clauses.
|
||||
* Foreign key references can only point to tables in the same database.
|
||||
*/
|
||||
getReferencedTableName(referencedTableName: string, schema?: string): string;
|
||||
alterTable(diff: TableDifference, safe?: boolean): string[];
|
||||
private getAlterTempTableSQL;
|
||||
}
|
||||
543
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteSchemaHelper.js
generated
vendored
Normal file
543
node_modules/@mikro-orm/sql/dialects/sqlite/SqliteSchemaHelper.js
generated
vendored
Normal file
@@ -0,0 +1,543 @@
|
||||
import { Utils } from '@mikro-orm/core';
|
||||
import { SchemaHelper } from '../../schema/SchemaHelper.js';
|
||||
/** SpatiaLite system views that should be automatically ignored */
|
||||
const SPATIALITE_VIEWS = [
|
||||
'geometry_columns',
|
||||
'spatial_ref_sys',
|
||||
'views_geometry_columns',
|
||||
'virts_geometry_columns',
|
||||
'geom_cols_ref_sys',
|
||||
'spatial_ref_sys_aux',
|
||||
'vector_layers',
|
||||
'vector_layers_auth',
|
||||
'vector_layers_field_infos',
|
||||
'vector_layers_statistics',
|
||||
'ElementaryGeometries',
|
||||
];
|
||||
export class SqliteSchemaHelper extends SchemaHelper {
|
||||
disableForeignKeysSQL() {
|
||||
return 'pragma foreign_keys = off;';
|
||||
}
|
||||
enableForeignKeysSQL() {
|
||||
return 'pragma foreign_keys = on;';
|
||||
}
|
||||
supportsSchemaConstraints() {
|
||||
return false;
|
||||
}
|
||||
getCreateNamespaceSQL(name) {
|
||||
return '';
|
||||
}
|
||||
getDropNamespaceSQL(name) {
|
||||
return '';
|
||||
}
|
||||
getListTablesSQL() {
|
||||
return (
|
||||
`select name as table_name from sqlite_master where type = 'table' and name != 'sqlite_sequence' and name != 'geometry_columns' and name != 'spatial_ref_sys' ` +
|
||||
`union all select name as table_name from sqlite_temp_master where type = 'table' order by name`
|
||||
);
|
||||
}
|
||||
async getAllTables(connection, schemas) {
|
||||
const databases = await this.getDatabaseList(connection);
|
||||
const hasAttachedDbs = databases.length > 1; // More than just 'main'
|
||||
// If no attached databases, use original behavior
|
||||
if (!hasAttachedDbs && !schemas?.length) {
|
||||
return connection.execute(this.getListTablesSQL());
|
||||
}
|
||||
// With attached databases, query each one
|
||||
const targetSchemas = schemas?.length ? schemas : databases;
|
||||
const allTables = [];
|
||||
for (const dbName of targetSchemas) {
|
||||
const prefix = this.getSchemaPrefix(dbName);
|
||||
const tables = await connection.execute(
|
||||
`select name from ${prefix}sqlite_master where type = 'table' ` +
|
||||
`and name != 'sqlite_sequence' and name != 'geometry_columns' and name != 'spatial_ref_sys'`,
|
||||
);
|
||||
for (const t of tables) {
|
||||
allTables.push({ table_name: t.name, schema_name: dbName });
|
||||
}
|
||||
}
|
||||
return allTables;
|
||||
}
|
||||
async getNamespaces(connection) {
|
||||
return this.getDatabaseList(connection);
|
||||
}
|
||||
getIgnoredViewsCondition() {
|
||||
return SPATIALITE_VIEWS.map(v => `name != '${v}'`).join(' and ');
|
||||
}
|
||||
getListViewsSQL() {
|
||||
return `select name as view_name, sql as view_definition from sqlite_master where type = 'view' and ${this.getIgnoredViewsCondition()} order by name`;
|
||||
}
|
||||
async loadViews(schema, connection, schemaName) {
|
||||
const databases = await this.getDatabaseList(connection);
|
||||
const hasAttachedDbs = databases.length > 1; // More than just 'main'
|
||||
// If no attached databases and no specific schema, use original behavior
|
||||
if (!hasAttachedDbs && !schemaName) {
|
||||
const views = await connection.execute(this.getListViewsSQL());
|
||||
for (const view of views) {
|
||||
schema.addView(view.view_name, schemaName, this.extractViewDefinition(view.view_definition));
|
||||
}
|
||||
return;
|
||||
}
|
||||
// With attached databases, query each one
|
||||
/* v8 ignore next - schemaName branch not commonly used */
|
||||
const targetDbs = schemaName ? [schemaName] : databases;
|
||||
for (const dbName of targetDbs) {
|
||||
const prefix = this.getSchemaPrefix(dbName);
|
||||
const views = await connection.execute(
|
||||
`select name as view_name, sql as view_definition from ${prefix}sqlite_master where type = 'view' and ${this.getIgnoredViewsCondition()} order by name`,
|
||||
);
|
||||
for (const view of views) {
|
||||
schema.addView(view.view_name, dbName, this.extractViewDefinition(view.view_definition));
|
||||
}
|
||||
}
|
||||
}
|
||||
getDropDatabaseSQL(name) {
|
||||
if (name === ':memory:') {
|
||||
return '';
|
||||
}
|
||||
/* v8 ignore next */
|
||||
return `drop database if exists ${this.quote(name)}`;
|
||||
}
|
||||
async loadInformationSchema(schema, connection, tables, schemas) {
|
||||
for (const t of tables) {
|
||||
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
|
||||
const cols = await this.getColumns(connection, table.name, table.schema);
|
||||
const indexes = await this.getIndexes(connection, table.name, table.schema);
|
||||
const checks = await this.getChecks(connection, table.name, table.schema);
|
||||
const pks = await this.getPrimaryKeys(connection, indexes, table.name, table.schema);
|
||||
const fks = await this.getForeignKeys(connection, table.name, table.schema);
|
||||
const enums = await this.getEnumDefinitions(connection, table.name, table.schema);
|
||||
table.init(cols, indexes, checks, pks, fks, enums);
|
||||
}
|
||||
}
|
||||
createTable(table, alter) {
|
||||
let sql = `create table ${table.getQuotedName()} (`;
|
||||
const columns = table.getColumns();
|
||||
const lastColumn = columns[columns.length - 1].name;
|
||||
for (const column of columns) {
|
||||
const col = this.createTableColumn(column, table);
|
||||
if (col) {
|
||||
const comma = column.name === lastColumn ? '' : ', ';
|
||||
sql += col + comma;
|
||||
}
|
||||
}
|
||||
const primaryKey = table.getPrimaryKey();
|
||||
const createPrimary = primaryKey?.composite;
|
||||
if (createPrimary && primaryKey) {
|
||||
sql += `, primary key (${primaryKey.columnNames.map(c => this.quote(c)).join(', ')})`;
|
||||
}
|
||||
const parts = [];
|
||||
for (const fk of Object.values(table.getForeignKeys())) {
|
||||
parts.push(this.createForeignKey(table, fk, false));
|
||||
}
|
||||
for (const check of table.getChecks()) {
|
||||
const sql = `constraint ${this.quote(check.name)} check (${check.expression})`;
|
||||
parts.push(sql);
|
||||
}
|
||||
if (parts.length > 0) {
|
||||
sql += ', ' + parts.join(', ');
|
||||
}
|
||||
sql += ')';
|
||||
if (table.comment) {
|
||||
sql += ` /* ${table.comment} */`;
|
||||
}
|
||||
const ret = [];
|
||||
this.append(ret, sql);
|
||||
for (const index of table.getIndexes()) {
|
||||
this.append(ret, this.createIndex(index, table));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
createTableColumn(column, table, _changedProperties) {
|
||||
const col = [this.quote(column.name)];
|
||||
const checks = table.getChecks();
|
||||
const check = checks.findIndex(check => check.columnName === column.name);
|
||||
const useDefault = column.default != null && column.default !== 'null';
|
||||
let columnType = column.type;
|
||||
if (column.autoincrement) {
|
||||
columnType = 'integer';
|
||||
}
|
||||
if (column.generated) {
|
||||
columnType += ` generated always as ${column.generated}`;
|
||||
}
|
||||
col.push(columnType);
|
||||
if (check !== -1) {
|
||||
col.push(`check (${checks[check].expression})`);
|
||||
checks.splice(check, 1);
|
||||
}
|
||||
Utils.runIfNotEmpty(() => col.push('null'), column.nullable);
|
||||
Utils.runIfNotEmpty(() => col.push('not null'), !column.nullable && !column.generated);
|
||||
Utils.runIfNotEmpty(() => col.push('primary key'), column.primary);
|
||||
Utils.runIfNotEmpty(() => col.push('autoincrement'), column.autoincrement);
|
||||
Utils.runIfNotEmpty(() => col.push(`default ${column.default}`), useDefault);
|
||||
return col.join(' ');
|
||||
}
|
||||
getAddColumnsSQL(table, columns, diff) {
|
||||
return columns.map(column => {
|
||||
let sql = `alter table ${table.getQuotedName()} add column ${this.createTableColumn(column, table)}`;
|
||||
const foreignKey = Object.values(diff.addedForeignKeys).find(
|
||||
fk => fk.columnNames.length === 1 && fk.columnNames[0] === column.name,
|
||||
);
|
||||
if (foreignKey && this.options.createForeignKeyConstraints) {
|
||||
delete diff.addedForeignKeys[foreignKey.constraintName];
|
||||
sql += ' ' + this.createForeignKey(diff.toTable, foreignKey, false, true);
|
||||
}
|
||||
return sql;
|
||||
});
|
||||
}
|
||||
dropForeignKey(tableName, constraintName) {
|
||||
return '';
|
||||
}
|
||||
getDropColumnsSQL(tableName, columns, schemaName) {
|
||||
/* v8 ignore next */
|
||||
const name = this.quote(
|
||||
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName,
|
||||
);
|
||||
return columns
|
||||
.map(column => {
|
||||
return `alter table ${name} drop column ${this.quote(column.name)}`;
|
||||
})
|
||||
.join(';\n');
|
||||
}
|
||||
getCreateIndexSQL(tableName, index) {
|
||||
/* v8 ignore next */
|
||||
if (index.expression) {
|
||||
return index.expression;
|
||||
}
|
||||
// SQLite requires: CREATE INDEX schema.index_name ON table_name (columns)
|
||||
// NOT: CREATE INDEX index_name ON schema.table_name (columns)
|
||||
const [schemaName, rawTableName] = this.splitTableName(tableName);
|
||||
const quotedTableName = this.quote(rawTableName);
|
||||
// If there's a schema, prefix the index name with it
|
||||
let keyName;
|
||||
if (schemaName && schemaName !== 'main') {
|
||||
keyName = `${this.quote(schemaName)}.${this.quote(index.keyName)}`;
|
||||
} else {
|
||||
keyName = this.quote(index.keyName);
|
||||
}
|
||||
const sqlPrefix = `create ${index.unique ? 'unique ' : ''}index ${keyName} on ${quotedTableName}`;
|
||||
/* v8 ignore next 4 */
|
||||
if (index.columnNames.some(column => column.includes('.'))) {
|
||||
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
|
||||
const columns = this.platform.getJsonIndexDefinition(index);
|
||||
return `${sqlPrefix} (${columns.join(', ')})`;
|
||||
}
|
||||
// Use getIndexColumns to support advanced options like sort order and collation
|
||||
return `${sqlPrefix} (${this.getIndexColumns(index)})`;
|
||||
}
|
||||
parseTableDefinition(sql, cols) {
|
||||
const columns = {};
|
||||
const constraints = [];
|
||||
// extract all columns definitions
|
||||
let columnsDef = new RegExp(`create table [\`"']?.*?[\`"']? \\((.*)\\)`, 'i').exec(sql.replaceAll('\n', ''))?.[1];
|
||||
/* v8 ignore next */
|
||||
if (columnsDef) {
|
||||
if (columnsDef.includes(', constraint ')) {
|
||||
constraints.push(...columnsDef.substring(columnsDef.indexOf(', constraint') + 2).split(', '));
|
||||
columnsDef = columnsDef.substring(0, columnsDef.indexOf(', constraint'));
|
||||
}
|
||||
for (let i = cols.length - 1; i >= 0; i--) {
|
||||
const col = cols[i];
|
||||
const re = ` *, *[\`"']?${col.name}[\`"']? (.*)`;
|
||||
const columnDef = new RegExp(re, 'i').exec(columnsDef);
|
||||
if (columnDef) {
|
||||
columns[col.name] = { name: col.name, definition: columnDef[1] };
|
||||
columnsDef = columnsDef.substring(0, columnDef.index);
|
||||
}
|
||||
}
|
||||
}
|
||||
return { columns, constraints };
|
||||
}
|
||||
/**
|
||||
* Returns schema prefix for pragma and sqlite_master queries.
|
||||
* Returns empty string for main database (no prefix needed).
|
||||
*/
|
||||
getSchemaPrefix(schemaName) {
|
||||
if (!schemaName || schemaName === 'main') {
|
||||
return '';
|
||||
}
|
||||
return `${this.platform.quoteIdentifier(schemaName)}.`;
|
||||
}
|
||||
/**
|
||||
* Returns all database names excluding 'temp'.
|
||||
*/
|
||||
async getDatabaseList(connection) {
|
||||
const databases = await connection.execute('pragma database_list');
|
||||
return databases.filter(d => d.name !== 'temp').map(d => d.name);
|
||||
}
|
||||
/**
|
||||
* Extracts the SELECT part from a CREATE VIEW statement.
|
||||
*/
|
||||
extractViewDefinition(viewDefinition) {
|
||||
const match = /create\s+view\s+[`"']?\w+[`"']?\s+as\s+(.*)/is.exec(viewDefinition);
|
||||
/* v8 ignore next - fallback for non-standard view definitions */
|
||||
return match ? match[1] : viewDefinition;
|
||||
}
|
||||
async getColumns(connection, tableName, schemaName) {
|
||||
const prefix = this.getSchemaPrefix(schemaName);
|
||||
const columns = await connection.execute(`pragma ${prefix}table_xinfo('${tableName}')`);
|
||||
const sql = `select sql from ${prefix}sqlite_master where type = ? and name = ?`;
|
||||
const tableDefinition = await connection.execute(sql, ['table', tableName], 'get');
|
||||
const composite = columns.reduce((count, col) => count + (col.pk ? 1 : 0), 0) > 1;
|
||||
// there can be only one, so naive check like this should be enough
|
||||
const hasAutoincrement = tableDefinition.sql.toLowerCase().includes('autoincrement');
|
||||
const { columns: columnDefinitions } = this.parseTableDefinition(tableDefinition.sql, columns);
|
||||
return columns.map(col => {
|
||||
const mappedType = connection.getPlatform().getMappedType(col.type);
|
||||
let generated;
|
||||
if (col.hidden > 1) {
|
||||
/* v8 ignore next */
|
||||
const storage = col.hidden === 2 ? 'virtual' : 'stored';
|
||||
const re = new RegExp(`(generated always)? as \\((.*)\\)( ${storage})?$`, 'i');
|
||||
const match = columnDefinitions[col.name].definition.match(re);
|
||||
if (match) {
|
||||
generated = `${match[2]} ${storage}`;
|
||||
}
|
||||
}
|
||||
return {
|
||||
name: col.name,
|
||||
type: col.type,
|
||||
default: this.wrapExpressionDefault(col.dflt_value),
|
||||
nullable: !col.notnull,
|
||||
primary: !!col.pk,
|
||||
mappedType,
|
||||
unsigned: false,
|
||||
autoincrement: !composite && col.pk && this.platform.isNumericColumn(mappedType) && hasAutoincrement,
|
||||
generated,
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* SQLite strips outer parentheses from expression defaults (`DEFAULT (expr)` → `expr` in pragma).
|
||||
* We need to add them back so they match what we generate in DDL.
|
||||
*/
|
||||
wrapExpressionDefault(value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
// simple values that are returned as-is from pragma (no wrapping needed)
|
||||
if (
|
||||
/^-?\d/.test(value) ||
|
||||
/^[xX]'/.test(value) ||
|
||||
value.startsWith("'") ||
|
||||
value.startsWith('"') ||
|
||||
value.startsWith('(')
|
||||
) {
|
||||
return value;
|
||||
}
|
||||
const lower = value.toLowerCase();
|
||||
if (['null', 'true', 'false', 'current_timestamp', 'current_date', 'current_time'].includes(lower)) {
|
||||
return value;
|
||||
}
|
||||
// everything else is an expression that had its outer parens stripped
|
||||
return `(${value})`;
|
||||
}
|
||||
async getEnumDefinitions(connection, tableName, schemaName) {
|
||||
const prefix = this.getSchemaPrefix(schemaName);
|
||||
const sql = `select sql from ${prefix}sqlite_master where type = ? and name = ?`;
|
||||
const tableDefinition = await connection.execute(sql, ['table', tableName], 'get');
|
||||
const checkConstraints = [...(tableDefinition.sql.match(/[`["'][^`\]"']+[`\]"'] text check \(.*?\)/gi) ?? [])];
|
||||
return checkConstraints.reduce((o, item) => {
|
||||
// check constraints are defined as (note that last closing paren is missing):
|
||||
// `type` text check (`type` in ('local', 'global')
|
||||
const match = /[`["']([^`\]"']+)[`\]"'] text check \(.* \((.*)\)/i.exec(item);
|
||||
/* v8 ignore next */
|
||||
if (match) {
|
||||
o[match[1]] = match[2].split(',').map(item => /^\(?'(.*)'/.exec(item.trim())[1]);
|
||||
}
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
async getPrimaryKeys(connection, indexes, tableName, schemaName) {
|
||||
const prefix = this.getSchemaPrefix(schemaName);
|
||||
const sql = `pragma ${prefix}table_info(\`${tableName}\`)`;
|
||||
const cols = await connection.execute(sql);
|
||||
return cols.filter(col => !!col.pk).map(col => col.name);
|
||||
}
|
||||
async getIndexes(connection, tableName, schemaName) {
|
||||
const prefix = this.getSchemaPrefix(schemaName);
|
||||
const sql = `pragma ${prefix}table_info(\`${tableName}\`)`;
|
||||
const cols = await connection.execute(sql);
|
||||
const indexes = await connection.execute(`pragma ${prefix}index_list(\`${tableName}\`)`);
|
||||
const ret = [];
|
||||
for (const col of cols.filter(c => c.pk)) {
|
||||
ret.push({
|
||||
columnNames: [col.name],
|
||||
keyName: 'primary',
|
||||
constraint: true,
|
||||
unique: true,
|
||||
primary: true,
|
||||
});
|
||||
}
|
||||
for (const index of indexes.filter(index => !this.isImplicitIndex(index.name))) {
|
||||
const res = await connection.execute(`pragma ${prefix}index_info(\`${index.name}\`)`);
|
||||
ret.push(
|
||||
...res.map(row => ({
|
||||
columnNames: [row.name],
|
||||
keyName: index.name,
|
||||
unique: !!index.unique,
|
||||
constraint: !!index.unique,
|
||||
primary: false,
|
||||
})),
|
||||
);
|
||||
}
|
||||
return this.mapIndexes(ret);
|
||||
}
|
||||
async getChecks(connection, tableName, schemaName) {
|
||||
const { columns, constraints } = await this.getColumnDefinitions(connection, tableName, schemaName);
|
||||
const checks = [];
|
||||
for (const key of Object.keys(columns)) {
|
||||
const column = columns[key];
|
||||
const expression = / (check \((.*)\))/i.exec(column.definition);
|
||||
if (expression) {
|
||||
checks.push({
|
||||
name: this.platform.getConfig().getNamingStrategy().indexName(tableName, [column.name], 'check'),
|
||||
definition: expression[1],
|
||||
expression: expression[2],
|
||||
columnName: column.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
for (const constraint of constraints) {
|
||||
const expression = /constraint *[`"']?(.*?)[`"']? * (check \((.*)\))/i.exec(constraint);
|
||||
if (expression) {
|
||||
checks.push({
|
||||
name: expression[1],
|
||||
definition: expression[2],
|
||||
expression: expression[3],
|
||||
});
|
||||
}
|
||||
}
|
||||
return checks;
|
||||
}
|
||||
async getColumnDefinitions(connection, tableName, schemaName) {
|
||||
const prefix = this.getSchemaPrefix(schemaName);
|
||||
const columns = await connection.execute(`pragma ${prefix}table_xinfo('${tableName}')`);
|
||||
const sql = `select sql from ${prefix}sqlite_master where type = ? and name = ?`;
|
||||
const tableDefinition = await connection.execute(sql, ['table', tableName], 'get');
|
||||
return this.parseTableDefinition(tableDefinition.sql, columns);
|
||||
}
|
||||
async getForeignKeys(connection, tableName, schemaName) {
|
||||
const { constraints } = await this.getColumnDefinitions(connection, tableName, schemaName);
|
||||
const prefix = this.getSchemaPrefix(schemaName);
|
||||
const fks = await connection.execute(`pragma ${prefix}foreign_key_list(\`${tableName}\`)`);
|
||||
const qualifiedTableName = schemaName ? `${schemaName}.${tableName}` : tableName;
|
||||
return fks.reduce((ret, fk) => {
|
||||
const constraintName = this.platform.getIndexName(tableName, [fk.from], 'foreign');
|
||||
const constraint = constraints?.find(c => c.includes(constraintName));
|
||||
ret[constraintName] = {
|
||||
constraintName,
|
||||
columnName: fk.from,
|
||||
columnNames: [fk.from],
|
||||
localTableName: qualifiedTableName,
|
||||
referencedTableName: fk.table,
|
||||
referencedColumnName: fk.to,
|
||||
referencedColumnNames: [fk.to],
|
||||
updateRule: fk.on_update.toLowerCase(),
|
||||
deleteRule: fk.on_delete.toLowerCase(),
|
||||
deferMode: constraint?.match(/ deferrable initially (deferred|immediate)/i)?.[1].toLowerCase(),
|
||||
};
|
||||
return ret;
|
||||
}, {});
|
||||
}
|
||||
getManagementDbName() {
|
||||
return '';
|
||||
}
|
||||
getCreateDatabaseSQL(name) {
|
||||
return '';
|
||||
}
|
||||
async databaseExists(connection, name) {
|
||||
const tables = await connection.execute(this.getListTablesSQL());
|
||||
return tables.length > 0;
|
||||
}
|
||||
/**
|
||||
* Implicit indexes will be ignored when diffing
|
||||
*/
|
||||
isImplicitIndex(name) {
|
||||
// Ignore indexes with reserved names, e.g. autoindexes
|
||||
return name.startsWith('sqlite_');
|
||||
}
|
||||
dropIndex(table, index, oldIndexName = index.keyName) {
|
||||
return `drop index ${this.quote(oldIndexName)}`;
|
||||
}
|
||||
/**
|
||||
* SQLite does not support schema-qualified table names in REFERENCES clauses.
|
||||
* Foreign key references can only point to tables in the same database.
|
||||
*/
|
||||
getReferencedTableName(referencedTableName, schema) {
|
||||
const [schemaName, tableName] = this.splitTableName(referencedTableName);
|
||||
// Strip any schema prefix - SQLite REFERENCES clause doesn't support it
|
||||
return tableName;
|
||||
}
|
||||
alterTable(diff, safe) {
|
||||
const ret = [];
|
||||
const [schemaName, tableName] = this.splitTableName(diff.name);
|
||||
if (
|
||||
Utils.hasObjectKeys(diff.removedChecks) ||
|
||||
Utils.hasObjectKeys(diff.changedChecks) ||
|
||||
Utils.hasObjectKeys(diff.changedForeignKeys) ||
|
||||
Utils.hasObjectKeys(diff.changedColumns)
|
||||
) {
|
||||
return this.getAlterTempTableSQL(diff);
|
||||
}
|
||||
for (const index of Object.values(diff.removedIndexes)) {
|
||||
this.append(ret, this.dropIndex(diff.name, index));
|
||||
}
|
||||
for (const index of Object.values(diff.changedIndexes)) {
|
||||
this.append(ret, this.dropIndex(diff.name, index));
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (!safe && Object.values(diff.removedColumns).length > 0) {
|
||||
this.append(ret, this.getDropColumnsSQL(tableName, Object.values(diff.removedColumns), schemaName));
|
||||
}
|
||||
if (Object.values(diff.addedColumns).length > 0) {
|
||||
this.append(ret, this.getAddColumnsSQL(diff.toTable, Object.values(diff.addedColumns), diff));
|
||||
}
|
||||
if (Utils.hasObjectKeys(diff.addedForeignKeys) || Utils.hasObjectKeys(diff.addedChecks)) {
|
||||
return this.getAlterTempTableSQL(diff);
|
||||
}
|
||||
for (const [oldColumnName, column] of Object.entries(diff.renamedColumns)) {
|
||||
this.append(ret, this.getRenameColumnSQL(tableName, oldColumnName, column, schemaName));
|
||||
}
|
||||
for (const index of Object.values(diff.addedIndexes)) {
|
||||
ret.push(this.createIndex(index, diff.toTable));
|
||||
}
|
||||
for (const index of Object.values(diff.changedIndexes)) {
|
||||
ret.push(this.createIndex(index, diff.toTable, true));
|
||||
}
|
||||
for (const [oldIndexName, index] of Object.entries(diff.renamedIndexes)) {
|
||||
if (index.unique) {
|
||||
this.append(ret, this.dropIndex(diff.name, index, oldIndexName));
|
||||
this.append(ret, this.createIndex(index, diff.toTable));
|
||||
} else {
|
||||
this.append(ret, this.getRenameIndexSQL(diff.name, index, oldIndexName));
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
getAlterTempTableSQL(changedTable) {
|
||||
const tempName = `${changedTable.toTable.name}__temp_alter`;
|
||||
const quotedName = this.quote(changedTable.toTable.name);
|
||||
const quotedTempName = this.quote(tempName);
|
||||
const [first, ...rest] = this.createTable(changedTable.toTable);
|
||||
const sql = [
|
||||
'pragma foreign_keys = off;',
|
||||
first.replace(`create table ${quotedName}`, `create table ${quotedTempName}`),
|
||||
];
|
||||
const columns = [];
|
||||
for (const column of changedTable.toTable.getColumns()) {
|
||||
const fromColumn = changedTable.fromTable.getColumn(column.name);
|
||||
if (fromColumn) {
|
||||
columns.push(this.quote(column.name));
|
||||
} else {
|
||||
columns.push(`null as ${this.quote(column.name)}`);
|
||||
}
|
||||
}
|
||||
sql.push(`insert into ${quotedTempName} select ${columns.join(', ')} from ${quotedName};`);
|
||||
sql.push(`drop table ${quotedName};`);
|
||||
sql.push(`alter table ${quotedTempName} rename to ${quotedName};`);
|
||||
sql.push(...rest);
|
||||
sql.push('pragma foreign_keys = on;');
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
6
node_modules/@mikro-orm/sql/dialects/sqlite/index.d.ts
generated
vendored
Normal file
6
node_modules/@mikro-orm/sql/dialects/sqlite/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from './BaseSqliteConnection.js';
|
||||
export * from './NodeSqliteDialect.js';
|
||||
export * from './SqliteDriver.js';
|
||||
export * from './SqlitePlatform.js';
|
||||
export * from './SqliteSchemaHelper.js';
|
||||
export * from './SqliteNativeQueryBuilder.js';
|
||||
6
node_modules/@mikro-orm/sql/dialects/sqlite/index.js
generated
vendored
Normal file
6
node_modules/@mikro-orm/sql/dialects/sqlite/index.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from './BaseSqliteConnection.js';
|
||||
export * from './NodeSqliteDialect.js';
|
||||
export * from './SqliteDriver.js';
|
||||
export * from './SqlitePlatform.js';
|
||||
export * from './SqliteSchemaHelper.js';
|
||||
export * from './SqliteNativeQueryBuilder.js';
|
||||
Reference in New Issue
Block a user