Initial commit - Event Planner application

This commit is contained in:
mberlin
2026-03-18 14:55:56 -03:00
commit 86d779eb4d
7548 changed files with 1006324 additions and 0 deletions

104
node_modules/@mikro-orm/sql/AbstractSqlConnection.d.ts generated vendored Normal file
View File

@@ -0,0 +1,104 @@
import { type ControlledTransaction, type Dialect, Kysely } from 'kysely';
import {
type AnyEntity,
Connection,
type Dictionary,
type EntityData,
type IsolationLevel,
type LogContext,
type LoggingOptions,
type MaybePromise,
type QueryResult,
RawQueryFragment,
type Transaction,
type TransactionEventBroadcaster,
} from '@mikro-orm/core';
import type { AbstractSqlPlatform } from './AbstractSqlPlatform.js';
import { NativeQueryBuilder } from './query/NativeQueryBuilder.js';
/** Base class for SQL database connections, built on top of Kysely. */
export declare abstract class AbstractSqlConnection extends Connection {
#private;
protected platform: AbstractSqlPlatform;
/** Creates a Kysely dialect instance with driver-specific configuration. */
abstract createKyselyDialect(overrides: Dictionary): MaybePromise<Dialect>;
/** Establishes the database connection and runs the onConnect hook. */
connect(options?: { skipOnConnect?: boolean }): Promise<void>;
/** Initializes the Kysely client from driver options or a user-provided Kysely instance. */
createKysely(): MaybePromise<void>;
/**
* @inheritDoc
*/
close(force?: boolean): Promise<void>;
/**
* @inheritDoc
*/
isConnected(): Promise<boolean>;
/**
* @inheritDoc
*/
checkConnection(): Promise<
| {
ok: true;
}
| {
ok: false;
reason: string;
error?: Error;
}
>;
/** Returns the underlying Kysely client, creating it synchronously if needed. */
getClient<T = any>(): Kysely<T>;
/** Ensures the Kysely client is initialized, creating it asynchronously if needed. */
initClient(): Promise<void>;
/** Executes a callback within a transaction, committing on success and rolling back on error. */
transactional<T>(
cb: (trx: Transaction<ControlledTransaction<any, any>>) => Promise<T>,
options?: {
isolationLevel?: IsolationLevel;
readOnly?: boolean;
ctx?: ControlledTransaction<any>;
eventBroadcaster?: TransactionEventBroadcaster;
loggerContext?: LogContext;
},
): Promise<T>;
/** Begins a new transaction or creates a savepoint if a transaction context already exists. */
begin(options?: {
isolationLevel?: IsolationLevel;
readOnly?: boolean;
ctx?: ControlledTransaction<any, any>;
eventBroadcaster?: TransactionEventBroadcaster;
loggerContext?: LogContext;
}): Promise<ControlledTransaction<any, any>>;
/** Commits the transaction or releases the savepoint. */
commit(
ctx: ControlledTransaction<any, any>,
eventBroadcaster?: TransactionEventBroadcaster,
loggerContext?: LogContext,
): Promise<void>;
/** Rolls back the transaction or rolls back to the savepoint. */
rollback(
ctx: ControlledTransaction<any, any>,
eventBroadcaster?: TransactionEventBroadcaster,
loggerContext?: LogContext,
): Promise<void>;
private prepareQuery;
/** Executes a SQL query and returns the result based on the method: `'all'` for rows, `'get'` for single row, `'run'` for affected count. */
execute<T extends QueryResult | EntityData<AnyEntity> | EntityData<AnyEntity>[] = EntityData<AnyEntity>[]>(
query: string | NativeQueryBuilder | RawQueryFragment,
params?: readonly unknown[],
method?: 'all' | 'get' | 'run',
ctx?: Transaction,
loggerContext?: LoggingOptions,
): Promise<T>;
/** Executes a SQL query and returns an async iterable that yields results row by row. */
stream<T extends EntityData<AnyEntity>>(
query: string | NativeQueryBuilder | RawQueryFragment,
params?: readonly unknown[],
ctx?: Transaction<Kysely<any>>,
loggerContext?: LoggingOptions,
): AsyncIterableIterator<T>;
/** @inheritDoc */
executeDump(dump: string): Promise<void>;
protected getSql(query: string, formatted: string, context?: LogContext): string;
protected transformRawResult<T>(res: any, method?: 'all' | 'get' | 'run'): T;
}

257
node_modules/@mikro-orm/sql/AbstractSqlConnection.js generated vendored Normal file
View File

@@ -0,0 +1,257 @@
import { CompiledQuery, Kysely } from 'kysely';
import { Connection, EventType, RawQueryFragment, Utils } from '@mikro-orm/core';
import { NativeQueryBuilder } from './query/NativeQueryBuilder.js';
/** Base class for SQL database connections, built on top of Kysely. */
export class AbstractSqlConnection extends Connection {
#client;
/** Establishes the database connection and runs the onConnect hook. */
async connect(options) {
await this.initClient();
this.connected = true;
if (options?.skipOnConnect !== true) {
await this.onConnect();
}
}
/** Initializes the Kysely client from driver options or a user-provided Kysely instance. */
createKysely() {
let driverOptions = this.options.driverOptions ?? this.config.get('driverOptions');
if (typeof driverOptions === 'function') {
driverOptions = driverOptions();
}
if (driverOptions instanceof Kysely) {
this.logger.log('info', 'Reusing Kysely client provided via `driverOptions`');
this.#client = driverOptions;
} else if ('createDriver' in driverOptions) {
this.logger.log('info', 'Reusing Kysely dialect provided via `driverOptions`');
this.#client = new Kysely({ dialect: driverOptions });
} else {
const dialect = this.createKyselyDialect(driverOptions);
if (dialect instanceof Promise) {
return dialect.then(d => {
this.#client = new Kysely({ dialect: d });
});
}
this.#client = new Kysely({ dialect });
}
}
/**
* @inheritDoc
*/
async close(force) {
await super.close(force);
await this.#client?.destroy();
this.connected = false;
this.#client = undefined;
}
/**
* @inheritDoc
*/
async isConnected() {
const check = await this.checkConnection();
return check.ok;
}
/**
* @inheritDoc
*/
async checkConnection() {
if (!this.connected) {
return { ok: false, reason: 'Connection not established' };
}
try {
await this.getClient().executeQuery(CompiledQuery.raw('select 1'));
return { ok: true };
} catch (error) {
return { ok: false, reason: error.message, error };
}
}
/** Returns the underlying Kysely client, creating it synchronously if needed. */
getClient() {
if (!this.#client) {
const maybePromise = this.createKysely();
/* v8 ignore next */
if (maybePromise instanceof Promise) {
throw new Error(
'Current driver requires async initialization, use `MikroORM.init()` instead of the constructor',
);
}
}
return this.#client;
}
/** Ensures the Kysely client is initialized, creating it asynchronously if needed. */
async initClient() {
if (!this.#client) {
await this.createKysely();
}
}
/** Executes a callback within a transaction, committing on success and rolling back on error. */
async transactional(cb, options = {}) {
const trx = await this.begin(options);
try {
const ret = await cb(trx);
await this.commit(trx, options.eventBroadcaster, options.loggerContext);
return ret;
} catch (error) {
await this.rollback(trx, options.eventBroadcaster, options.loggerContext);
throw error;
}
}
/** Begins a new transaction or creates a savepoint if a transaction context already exists. */
async begin(options = {}) {
if (options.ctx) {
const ctx = options.ctx;
await options.eventBroadcaster?.dispatchEvent(EventType.beforeTransactionStart, ctx);
ctx.index ??= 0;
const savepointName = `trx${ctx.index + 1}`;
const trx = await options.ctx.savepoint(savepointName).execute();
Reflect.defineProperty(trx, 'index', { value: ctx.index + 1 });
Reflect.defineProperty(trx, 'savepointName', { value: savepointName });
this.logQuery(this.platform.getSavepointSQL(savepointName), options.loggerContext);
await options.eventBroadcaster?.dispatchEvent(EventType.afterTransactionStart, trx);
return trx;
}
await this.ensureConnection();
await options.eventBroadcaster?.dispatchEvent(EventType.beforeTransactionStart);
let trxBuilder = this.getClient().startTransaction();
if (options.isolationLevel) {
trxBuilder = trxBuilder.setIsolationLevel(options.isolationLevel);
}
if (options.readOnly) {
trxBuilder = trxBuilder.setAccessMode('read only');
}
const trx = await trxBuilder.execute();
if (options.ctx) {
const ctx = options.ctx;
ctx.index ??= 0;
const savepointName = `trx${ctx.index + 1}`;
Reflect.defineProperty(trx, 'index', { value: ctx.index + 1 });
Reflect.defineProperty(trx, 'savepointName', { value: savepointName });
this.logQuery(this.platform.getSavepointSQL(savepointName), options.loggerContext);
} else {
for (const query of this.platform.getBeginTransactionSQL(options)) {
this.logQuery(query, options.loggerContext);
}
}
await options.eventBroadcaster?.dispatchEvent(EventType.afterTransactionStart, trx);
return trx;
}
/** Commits the transaction or releases the savepoint. */
async commit(ctx, eventBroadcaster, loggerContext) {
if (ctx.isRolledBack) {
return;
}
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionCommit, ctx);
if ('savepointName' in ctx) {
await ctx.releaseSavepoint(ctx.savepointName).execute();
this.logQuery(this.platform.getReleaseSavepointSQL(ctx.savepointName), loggerContext);
} else {
await ctx.commit().execute();
this.logQuery(this.platform.getCommitTransactionSQL(), loggerContext);
}
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionCommit, ctx);
}
/** Rolls back the transaction or rolls back to the savepoint. */
async rollback(ctx, eventBroadcaster, loggerContext) {
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionRollback, ctx);
if ('savepointName' in ctx) {
await ctx.rollbackToSavepoint(ctx.savepointName).execute();
this.logQuery(this.platform.getRollbackToSavepointSQL(ctx.savepointName), loggerContext);
} else {
await ctx.rollback().execute();
this.logQuery(this.platform.getRollbackTransactionSQL(), loggerContext);
}
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionRollback, ctx);
}
prepareQuery(query, params = []) {
if (query instanceof NativeQueryBuilder) {
query = query.toRaw();
}
if (query instanceof RawQueryFragment) {
params = query.params;
query = query.sql;
}
query = this.config.get('onQuery')(query, params);
const formatted = this.platform.formatQuery(query, params);
return { query, params, formatted };
}
/** Executes a SQL query and returns the result based on the method: `'all'` for rows, `'get'` for single row, `'run'` for affected count. */
async execute(query, params = [], method = 'all', ctx, loggerContext) {
await this.ensureConnection();
const q = this.prepareQuery(query, params);
const sql = this.getSql(q.query, q.formatted, loggerContext);
return this.executeQuery(
sql,
async () => {
const compiled = CompiledQuery.raw(q.formatted);
const res = await (ctx ?? this.#client).executeQuery(compiled);
return this.transformRawResult(res, method);
},
{ ...q, ...loggerContext },
);
}
/** Executes a SQL query and returns an async iterable that yields results row by row. */
async *stream(query, params = [], ctx, loggerContext) {
await this.ensureConnection();
const q = this.prepareQuery(query, params);
const sql = this.getSql(q.query, q.formatted, loggerContext);
// construct the compiled query manually with `kind: 'SelectQueryNode'` to avoid sqlite validation for select queries when streaming
const compiled = {
query: {
kind: 'SelectQueryNode',
},
sql: q.formatted,
parameters: [],
};
try {
const res = (ctx ?? this.getClient()).getExecutor().stream(compiled, 1);
this.logQuery(sql, {
sql,
params,
...loggerContext,
affected: Utils.isPlainObject(res) ? res.affectedRows : undefined,
});
for await (const items of res) {
for (const row of this.transformRawResult(items, 'all')) {
yield row;
}
}
} catch (e) {
this.logQuery(sql, { sql, params, ...loggerContext, level: 'error' });
throw e;
}
}
/** @inheritDoc */
async executeDump(dump) {
await this.ensureConnection();
try {
const raw = CompiledQuery.raw(dump);
await this.getClient().executeQuery(raw);
} catch (e) {
/* v8 ignore next */
throw this.platform.getExceptionConverter().convertException(e);
}
}
getSql(query, formatted, context) {
const logger = this.config.getLogger();
if (!logger.isEnabled('query', context)) {
return query;
}
if (logger.isEnabled('query-params', context)) {
return formatted;
}
return query;
}
transformRawResult(res, method) {
if (method === 'get') {
return res.rows[0];
}
if (method === 'all') {
return res.rows;
}
return {
affectedRows: Number(res.numAffectedRows ?? res.rows.length),
insertId: res.insertId != null ? Number(res.insertId) : res.insertId,
row: res.rows[0],
rows: res.rows,
};
}
}

423
node_modules/@mikro-orm/sql/AbstractSqlDriver.d.ts generated vendored Normal file
View File

@@ -0,0 +1,423 @@
import {
type AnyEntity,
type Collection,
type Configuration,
type ConnectionType,
type Constructor,
type CountOptions,
DatabaseDriver,
type DeleteOptions,
type Dictionary,
type DriverMethodOptions,
type EntityData,
type EntityDictionary,
type EntityField,
EntityManagerType,
type EntityMetadata,
type EntityName,
type EntityProperty,
type FilterQuery,
type FindOneOptions,
type FindOptions,
type FormulaTable,
type LockOptions,
type LoggingOptions,
type NativeInsertUpdateManyOptions,
type NativeInsertUpdateOptions,
type ObjectQuery,
type Options,
type OrderDefinition,
type PopulateOptions,
type PopulatePath,
type Primary,
type QueryOrderMap,
type QueryResult,
type Raw,
RawQueryFragment,
type StreamOptions,
type Transaction,
type UpsertManyOptions,
type UpsertOptions,
} from '@mikro-orm/core';
import type { AbstractSqlConnection } from './AbstractSqlConnection.js';
import type { AbstractSqlPlatform } from './AbstractSqlPlatform.js';
import { type AnyQueryBuilder } from './query/QueryBuilder.js';
import { type NativeQueryBuilder } from './query/NativeQueryBuilder.js';
import { QueryType } from './query/enums.js';
import { SqlEntityManager } from './SqlEntityManager.js';
import type { InternalField } from './typings.js';
/** Base class for SQL database drivers, implementing find/insert/update/delete using QueryBuilder. */
export declare abstract class AbstractSqlDriver<
Connection extends AbstractSqlConnection = AbstractSqlConnection,
Platform extends AbstractSqlPlatform = AbstractSqlPlatform,
> extends DatabaseDriver<Connection> {
[EntityManagerType]: SqlEntityManager<this>;
protected readonly connection: Connection;
protected readonly replicas: Connection[];
protected readonly platform: Platform;
protected constructor(
config: Configuration,
platform: Platform,
connection: Constructor<Connection>,
connector: string[],
);
getPlatform(): Platform;
/** Evaluates a formula callback, handling both string and Raw return values. */
evaluateFormula(formula: (...args: any[]) => string | Raw, columns: any, table: FormulaTable): string;
/** For TPT entities, returns ownProps (columns in this table); otherwise returns all props. */
private getTableProps;
/** Creates a FormulaTable object for use in formula callbacks. */
private createFormulaTable;
private validateSqlOptions;
createEntityManager(useContext?: boolean): this[typeof EntityManagerType];
private createQueryBuilderFromOptions;
find<T extends object, P extends string = never, F extends string = PopulatePath.ALL, E extends string = never>(
entityName: EntityName<T>,
where: ObjectQuery<T>,
options?: FindOptions<T, P, F, E>,
): Promise<EntityData<T>[]>;
findOne<T extends object, P extends string = never, F extends string = PopulatePath.ALL, E extends string = never>(
entityName: EntityName<T>,
where: ObjectQuery<T>,
options?: FindOneOptions<T, P, F, E>,
): Promise<EntityData<T> | null>;
protected hasToManyJoins<T extends object>(hint: PopulateOptions<T>, meta: EntityMetadata<T>): boolean;
findVirtual<T extends object>(
entityName: EntityName<T>,
where: ObjectQuery<T>,
options: FindOptions<T, any, any, any>,
): Promise<EntityData<T>[]>;
countVirtual<T extends object>(
entityName: EntityName<T>,
where: ObjectQuery<T>,
options: CountOptions<T, any>,
): Promise<number>;
protected findFromVirtual<T extends object>(
entityName: EntityName<T>,
where: ObjectQuery<T>,
options: FindOptions<T, any> | CountOptions<T, any>,
type: QueryType,
): Promise<EntityData<T>[] | number>;
protected streamFromVirtual<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options: StreamOptions<T, any>,
): AsyncIterableIterator<EntityData<T>>;
protected wrapVirtualExpressionInSubquery<T extends object>(
meta: EntityMetadata<T>,
expression: string,
where: FilterQuery<T>,
options: FindOptions<T, any>,
type: QueryType,
): Promise<T[] | number>;
protected wrapVirtualExpressionInSubqueryStream<T extends object>(
meta: EntityMetadata<T>,
expression: string,
where: FilterQuery<T>,
options: FindOptions<T, any, any, any>,
type: QueryType.SELECT,
): AsyncIterableIterator<T>;
/**
* Virtual entities have no PKs, so to-many populate joins can't be deduplicated.
* Force balanced strategy to load to-many relations via separate queries.
*/
private forceBalancedStrategy;
mapResult<T extends object>(
result: EntityData<T>,
meta: EntityMetadata<T>,
populate?: PopulateOptions<T>[],
qb?: AnyQueryBuilder<T>,
map?: Dictionary,
): EntityData<T> | null;
/**
* Maps aliased columns from TPT parent tables back to their original field names.
* TPT parent columns are selected with aliases like `parent_alias__column_name`,
* and need to be renamed back to `column_name` for the result mapper to work.
*/
private mapTPTColumns;
private mapJoinedProps;
/**
* Maps a single property from a joined result row into the relation pojo.
* Handles polymorphic FKs, composite keys, Date parsing, and embedded objects.
*/
private mapJoinedProp;
count<T extends object>(entityName: EntityName<T>, where: any, options?: CountOptions<T>): Promise<number>;
nativeInsert<T extends object>(
entityName: EntityName<T>,
data: EntityDictionary<T>,
options?: NativeInsertUpdateOptions<T>,
): Promise<QueryResult<T>>;
nativeInsertMany<T extends object>(
entityName: EntityName<T>,
data: EntityDictionary<T>[],
options?: NativeInsertUpdateManyOptions<T>,
transform?: (sql: string) => string,
): Promise<QueryResult<T>>;
nativeUpdate<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
data: EntityDictionary<T>,
options?: NativeInsertUpdateOptions<T> & UpsertOptions<T>,
): Promise<QueryResult<T>>;
nativeUpdateMany<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>[],
data: EntityDictionary<T>[],
options?: NativeInsertUpdateManyOptions<T> & UpsertManyOptions<T>,
transform?: (sql: string, params: any[]) => string,
): Promise<QueryResult<T>>;
nativeDelete<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T> | string | any,
options?: DeleteOptions<T>,
): Promise<QueryResult<T>>;
/**
* Fast comparison for collection snapshots that are represented by PK arrays.
* Compares scalars via `===` and fallbacks to Utils.equals()` for more complex types like Buffer.
* Always expects the same length of the arrays, since we only compare PKs of the same entity type.
*/
private comparePrimaryKeyArrays;
syncCollections<T extends object, O extends object>(
collections: Iterable<Collection<T, O>>,
options?: DriverMethodOptions,
): Promise<void>;
loadFromPivotTable<T extends object, O extends object>(
prop: EntityProperty,
owners: Primary<O>[][],
where?: FilterQuery<any>,
orderBy?: OrderDefinition<T>,
ctx?: Transaction,
options?: FindOptions<T, any, any, any>,
pivotJoin?: boolean,
): Promise<Dictionary<T[]>>;
/**
* Load from a polymorphic M:N pivot table.
*/
protected loadFromPolymorphicPivotTable<T extends object, O extends object>(
prop: EntityProperty,
owners: Primary<O>[][],
where?: FilterQuery<any>,
orderBy?: OrderDefinition<T>,
ctx?: Transaction,
options?: FindOptions<T, any, any, any>,
pivotJoin?: boolean,
): Promise<Dictionary<T[]>>;
/**
* Load from owner side of polymorphic M:N (e.g., Post -> Tags)
*/
protected loadPolymorphicPivotOwnerSide<T extends object, O extends object>(
prop: EntityProperty,
owners: Primary<O>[][],
where: FilterQuery<any>,
orderBy?: OrderDefinition<T>,
ctx?: Transaction,
options?: FindOptions<T, any, any, any>,
pivotJoin?: boolean,
inverseProp?: EntityProperty,
): Promise<Dictionary<T[]>>;
/**
* Load from inverse side of polymorphic M:N (e.g., Tag -> Posts)
* Uses single query with join via virtual relation on pivot.
*/
protected loadPolymorphicPivotInverseSide<T extends object, O extends object>(
prop: EntityProperty,
owners: Primary<O>[][],
where: FilterQuery<any>,
orderBy?: OrderDefinition<T>,
ctx?: Transaction,
options?: FindOptions<T, any, any, any>,
): Promise<Dictionary<T[]>>;
/**
* Build a map from owner PKs to their related entities from pivot table results.
*/
private buildPivotResultMap;
private wrapPopulateFilter;
private getPivotOrderBy;
execute<T extends QueryResult | EntityData<AnyEntity> | EntityData<AnyEntity>[] = EntityData<AnyEntity>[]>(
query: string | NativeQueryBuilder | RawQueryFragment,
params?: any[],
method?: 'all' | 'get' | 'run',
ctx?: Transaction,
loggerContext?: LoggingOptions,
): Promise<T>;
stream<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options: StreamOptions<T, any, any, any>,
): AsyncIterableIterator<T>;
/**
* 1:1 owner side needs to be marked for population so QB auto-joins the owner id
*/
protected autoJoinOneToOneOwner<T extends object>(
meta: EntityMetadata<T>,
populate: PopulateOptions<T>[],
fields?: readonly EntityField<T, any>[],
): PopulateOptions<T>[];
/**
* @internal
*/
joinedProps<T>(
meta: EntityMetadata,
populate: readonly PopulateOptions<T>[],
options?: {
strategy?: Options['loadStrategy'];
},
): PopulateOptions<T>[];
/**
* @internal
*/
mergeJoinedResult<T extends object>(
rawResults: EntityData<T>[],
meta: EntityMetadata<T>,
joinedProps: PopulateOptions<T>[],
): EntityData<T>[];
protected shouldHaveColumn<T, U>(
meta: EntityMetadata<T>,
prop: EntityProperty<U>,
populate: readonly PopulateOptions<U>[],
fields?: readonly InternalField<U>[],
exclude?: readonly InternalField<U>[],
): boolean;
protected getFieldsForJoinedLoad<T extends object>(
qb: AnyQueryBuilder<T>,
meta: EntityMetadata<T>,
options: FieldsForJoinedLoadOptions<T>,
): InternalField<T>[];
/**
* Adds LEFT JOINs and fields for TPT polymorphic loading when populating a relation to a TPT base class.
* @internal
*/
protected addTPTPolymorphicJoinsForRelation<T extends object>(
qb: AnyQueryBuilder<T>,
meta: EntityMetadata<T>,
baseAlias: string,
fields: InternalField<T>[],
): void;
/**
* Find the alias for a TPT child table in the query builder.
* @internal
*/
protected findTPTChildAlias<T extends object>(qb: AnyQueryBuilder<T>, childMeta: EntityMetadata): string | undefined;
/**
* Builds a CASE WHEN expression for TPT discriminator.
* Determines concrete entity type based on which child table has a non-null PK.
* @internal
*/
buildTPTDiscriminatorExpression(
meta: EntityMetadata,
descendants: EntityMetadata[],
aliasMap: Dictionary<string>,
baseAlias: string,
): Raw;
/**
* Maps TPT child-specific fields during hydration.
* When a relation points to a TPT base class, the actual entity might be a child class.
* This method reads the discriminator to determine the concrete type and maps child-specific fields.
* @internal
*/
protected mapTPTChildFields<T extends object>(
relationPojo: EntityData<T>,
meta: EntityMetadata<T>,
relationAlias: string,
qb: AnyQueryBuilder<T>,
root: EntityData<T>,
): void;
/**
* @internal
*/
mapPropToFieldNames<T extends object>(
qb: AnyQueryBuilder<T>,
prop: EntityProperty<T>,
tableAlias: string,
meta: EntityMetadata<T>,
schema?: string,
explicitFields?: readonly InternalField<T>[],
): InternalField<T>[];
/** @internal */
createQueryBuilder<T extends object>(
entityName: EntityName<T> | AnyQueryBuilder<T>,
ctx?: Transaction,
preferredConnectionType?: ConnectionType,
convertCustomTypes?: boolean,
loggerContext?: LoggingOptions,
alias?: string,
em?: SqlEntityManager,
): AnyQueryBuilder<T>;
protected resolveConnectionType(args: { ctx?: Transaction; connectionType?: ConnectionType }): ConnectionType;
protected extractManyToMany<T>(meta: EntityMetadata<T>, data: EntityDictionary<T>): EntityData<T>;
protected processManyToMany<T extends object>(
meta: EntityMetadata<T>,
pks: Primary<T>[],
collections: EntityData<T>,
clear: boolean,
options?: DriverMethodOptions,
): Promise<void>;
lockPessimistic<T extends object>(entity: T, options: LockOptions): Promise<void>;
protected buildPopulateWhere<T extends object>(
meta: EntityMetadata<T>,
joinedProps: PopulateOptions<T>[],
options: Pick<FindOptions<any>, 'populateWhere'>,
): ObjectQuery<T>;
/**
* Builds a UNION ALL (or UNION) subquery from `unionWhere` branches and merges it
* into the main WHERE as `pk IN (branch_1 UNION ALL branch_2 ...)`.
* Each branch is planned independently by the database, enabling per-table index usage.
*/
protected applyUnionWhere<T extends object>(
meta: EntityMetadata<T>,
where: ObjectQuery<T>,
options: FindOptions<T, any, any, any> | CountOptions<T> | NativeInsertUpdateOptions<T> | DeleteOptions<T>,
forDml?: boolean,
): Promise<ObjectQuery<T>>;
protected buildOrderBy<T extends object>(
qb: AnyQueryBuilder<T>,
meta: EntityMetadata<T>,
populate: PopulateOptions<T>[],
options: Pick<FindOptions<any>, 'strategy' | 'orderBy' | 'populateOrderBy'>,
): QueryOrderMap<T>[];
protected buildPopulateOrderBy<T extends object>(
qb: AnyQueryBuilder<T>,
meta: EntityMetadata<T>,
populateOrderBy: QueryOrderMap<T>[],
parentPath: string,
explicit: boolean,
parentAlias?: string,
): QueryOrderMap<T>[];
protected buildJoinedPropsOrderBy<T extends object>(
qb: AnyQueryBuilder<T>,
meta: EntityMetadata<T>,
populate: PopulateOptions<T>[],
options?: Pick<FindOptions<any>, 'strategy' | 'orderBy' | 'populateOrderBy'>,
parentPath?: string,
): QueryOrderMap<T>[];
private buildToManyOrderBy;
protected normalizeFields<T extends object>(fields: InternalField<T>[], prefix?: string): string[];
protected processField<T extends object>(
meta: EntityMetadata<T>,
prop: EntityProperty<T> | undefined,
field: string,
ret: InternalField<T>[],
): void;
protected buildFields<T extends object>(
meta: EntityMetadata<T>,
populate: PopulateOptions<T>[],
joinedProps: PopulateOptions<T>[],
qb: AnyQueryBuilder<T>,
alias: string,
options: Pick<FindOptions<T, any, any, any>, 'strategy' | 'fields' | 'exclude'>,
schema?: string,
): InternalField<T>[];
}
interface FieldsForJoinedLoadOptions<T extends object> {
explicitFields?: readonly InternalField<T>[];
exclude?: readonly InternalField<T>[];
populate?: readonly PopulateOptions<T>[];
strategy?: Options['loadStrategy'];
populateWhere?: FindOptions<any>['populateWhere'];
populateFilter?: FindOptions<any>['populateFilter'];
parentTableAlias: string;
parentJoinPath?: string;
count?: boolean;
schema?: string;
}
export {};

2123
node_modules/@mikro-orm/sql/AbstractSqlDriver.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

90
node_modules/@mikro-orm/sql/AbstractSqlPlatform.d.ts generated vendored Normal file
View File

@@ -0,0 +1,90 @@
import {
type RawQueryFragment,
type Constructor,
type EntityManager,
type EntityRepository,
type IDatabaseDriver,
type IsolationLevel,
type MikroORM,
Platform,
} from '@mikro-orm/core';
import { SqlSchemaGenerator } from './schema/SqlSchemaGenerator.js';
import { type SchemaHelper } from './schema/SchemaHelper.js';
import type { IndexDef } from './typings.js';
import { NativeQueryBuilder } from './query/NativeQueryBuilder.js';
/** Base class for SQL database platforms, providing SQL generation and quoting utilities. */
export declare abstract class AbstractSqlPlatform extends Platform {
#private;
protected readonly schemaHelper?: SchemaHelper;
usesPivotTable(): boolean;
indexForeignKeys(): boolean;
getRepositoryClass<T extends object>(): Constructor<EntityRepository<T>>;
getSchemaHelper(): SchemaHelper | undefined;
/** @inheritDoc */
lookupExtensions(orm: MikroORM): void;
getSchemaGenerator(driver: IDatabaseDriver, em?: EntityManager): SqlSchemaGenerator;
/** @internal */
createNativeQueryBuilder(): NativeQueryBuilder;
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
getCommitTransactionSQL(): string;
getRollbackTransactionSQL(): string;
getSavepointSQL(savepointName: string): string;
getRollbackToSavepointSQL(savepointName: string): string;
getReleaseSavepointSQL(savepointName: string): string;
quoteValue(value: any): string;
getSearchJsonPropertySQL(path: string, type: string, aliased: boolean): string | RawQueryFragment;
getSearchJsonPropertyKey(path: string[], type: string, aliased: boolean, value?: unknown): string | RawQueryFragment;
/**
* Quotes a key for use inside a JSON path expression (e.g. `$.key`).
* Simple alphanumeric keys are left unquoted; others are wrapped in double quotes.
* @internal
*/
quoteJsonKey(key: string): string;
getJsonIndexDefinition(index: IndexDef): string[];
supportsUnionWhere(): boolean;
supportsSchemas(): boolean;
/** @inheritDoc */
generateCustomOrder(escapedColumn: string, values: unknown[]): string;
/**
* @internal
*/
getOrderByExpression(column: string, direction: string, collation?: string): string[];
/**
* Quotes a collation name for use in COLLATE clauses.
* @internal
*/
quoteCollation(collation: string): string;
/** @internal */
protected validateCollationName(collation: string): void;
/** @internal */
validateJsonPropertyName(name: string): void;
/**
* Returns FROM clause for JSON array iteration.
* @internal
*/
getJsonArrayFromSQL(
column: string,
alias: string,
_properties: {
name: string;
type: string;
}[],
): string;
/**
* Returns SQL expression to access an element's property within a JSON array iteration.
* @internal
*/
getJsonArrayElementPropertySQL(alias: string, property: string, _type: string): string;
/**
* Wraps JSON array FROM clause and WHERE condition into a full EXISTS condition.
* MySQL overrides this because `json_table` doesn't support correlated subqueries.
* @internal
*/
getJsonArrayExistsSQL(from: string, where: string): string;
/**
* Maps a runtime type name (e.g. 'string', 'number') to a driver-specific bind type constant.
* Used by NativeQueryBuilder for output bindings.
* @internal
*/
mapToBindType(type: string): unknown;
}

168
node_modules/@mikro-orm/sql/AbstractSqlPlatform.js generated vendored Normal file
View File

@@ -0,0 +1,168 @@
import { isRaw, JsonProperty, Platform, raw, Utils } from '@mikro-orm/core';
import { SqlEntityRepository } from './SqlEntityRepository.js';
import { SqlSchemaGenerator } from './schema/SqlSchemaGenerator.js';
import { NativeQueryBuilder } from './query/NativeQueryBuilder.js';
/** Base class for SQL database platforms, providing SQL generation and quoting utilities. */
export class AbstractSqlPlatform extends Platform {
static #JSON_PROPERTY_NAME_RE = /^[a-zA-Z_][a-zA-Z0-9_]*$/;
schemaHelper;
usesPivotTable() {
return true;
}
indexForeignKeys() {
return true;
}
getRepositoryClass() {
return SqlEntityRepository;
}
getSchemaHelper() {
return this.schemaHelper;
}
/** @inheritDoc */
lookupExtensions(orm) {
SqlSchemaGenerator.register(orm);
}
/* v8 ignore next: kept for type inference only */
getSchemaGenerator(driver, em) {
return new SqlSchemaGenerator(em ?? driver);
}
/** @internal */
/* v8 ignore next */
createNativeQueryBuilder() {
return new NativeQueryBuilder(this);
}
getBeginTransactionSQL(options) {
if (options?.isolationLevel) {
return [`set transaction isolation level ${options.isolationLevel}`, 'begin'];
}
return ['begin'];
}
getCommitTransactionSQL() {
return 'commit';
}
getRollbackTransactionSQL() {
return 'rollback';
}
getSavepointSQL(savepointName) {
return `savepoint ${this.quoteIdentifier(savepointName)}`;
}
getRollbackToSavepointSQL(savepointName) {
return `rollback to savepoint ${this.quoteIdentifier(savepointName)}`;
}
getReleaseSavepointSQL(savepointName) {
return `release savepoint ${this.quoteIdentifier(savepointName)}`;
}
quoteValue(value) {
if (isRaw(value)) {
return this.formatQuery(value.sql, value.params);
}
if (Utils.isPlainObject(value) || value?.[JsonProperty]) {
return this.escape(JSON.stringify(value));
}
return this.escape(value);
}
getSearchJsonPropertySQL(path, type, aliased) {
return this.getSearchJsonPropertyKey(path.split('->'), type, aliased);
}
getSearchJsonPropertyKey(path, type, aliased, value) {
const [a, ...b] = path;
if (aliased) {
return raw(
alias => `json_extract(${this.quoteIdentifier(`${alias}.${a}`)}, '$.${b.map(this.quoteJsonKey).join('.')}')`,
);
}
return raw(`json_extract(${this.quoteIdentifier(a)}, '$.${b.map(this.quoteJsonKey).join('.')}')`);
}
/**
* Quotes a key for use inside a JSON path expression (e.g. `$.key`).
* Simple alphanumeric keys are left unquoted; others are wrapped in double quotes.
* @internal
*/
quoteJsonKey(key) {
return /^[a-z]\w*$/i.exec(key) ? key : `"${key}"`;
}
getJsonIndexDefinition(index) {
return index.columnNames.map(column => {
if (!column.includes('.')) {
return column;
}
const [root, ...path] = column.split('.');
return `(json_extract(${root}, '$.${path.join('.')}'))`;
});
}
supportsUnionWhere() {
return true;
}
supportsSchemas() {
return false;
}
/** @inheritDoc */
generateCustomOrder(escapedColumn, values) {
let ret = '(case ';
values.forEach((v, i) => {
ret += `when ${escapedColumn} = ${this.quoteValue(v)} then ${i} `;
});
return ret + 'else null end)';
}
/**
* @internal
*/
getOrderByExpression(column, direction, collation) {
if (collation) {
return [`${column} collate ${this.quoteCollation(collation)} ${direction.toLowerCase()}`];
}
return [`${column} ${direction.toLowerCase()}`];
}
/**
* Quotes a collation name for use in COLLATE clauses.
* @internal
*/
quoteCollation(collation) {
this.validateCollationName(collation);
return this.quoteIdentifier(collation);
}
/** @internal */
validateCollationName(collation) {
if (!/^[\w]+$/.test(collation)) {
throw new Error(`Invalid collation name: '${collation}'. Collation names must contain only word characters.`);
}
}
/** @internal */
validateJsonPropertyName(name) {
if (!AbstractSqlPlatform.#JSON_PROPERTY_NAME_RE.test(name)) {
throw new Error(
`Invalid JSON property name: '${name}'. JSON property names must contain only alphanumeric characters and underscores.`,
);
}
}
/**
* Returns FROM clause for JSON array iteration.
* @internal
*/
getJsonArrayFromSQL(column, alias, _properties) {
return `json_each(${column}) as ${this.quoteIdentifier(alias)}`;
}
/**
* Returns SQL expression to access an element's property within a JSON array iteration.
* @internal
*/
getJsonArrayElementPropertySQL(alias, property, _type) {
return `${this.quoteIdentifier(alias)}.${this.quoteIdentifier(property)}`;
}
/**
* Wraps JSON array FROM clause and WHERE condition into a full EXISTS condition.
* MySQL overrides this because `json_table` doesn't support correlated subqueries.
* @internal
*/
getJsonArrayExistsSQL(from, where) {
return `exists (select 1 from ${from} where ${where})`;
}
/**
* Maps a runtime type name (e.g. 'string', 'number') to a driver-specific bind type constant.
* Used by NativeQueryBuilder for output bindings.
* @internal
*/
mapToBindType(type) {
return type;
}
}

21
node_modules/@mikro-orm/sql/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Martin Adámek
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,36 @@
import {
type Dictionary,
type EntityMetadata,
type EntityProperty,
type Primary,
type Transaction,
} from '@mikro-orm/core';
import { type AbstractSqlDriver } from './AbstractSqlDriver.js';
export declare class PivotCollectionPersister<Entity extends object> {
#private;
constructor(
meta: EntityMetadata<Entity>,
driver: AbstractSqlDriver,
ctx?: Transaction,
schema?: string,
loggerContext?: Dictionary,
);
enqueueUpdate(
prop: EntityProperty<Entity>,
insertDiff: Primary<Entity>[][],
deleteDiff: Primary<Entity>[][] | boolean,
pks: Primary<Entity>[],
isInitialized?: boolean,
): void;
private enqueueInsert;
private enqueueUpsert;
private createInsertStatement;
private enqueueDelete;
/**
* Build the keys and data arrays for pivot table operations.
* Handles polymorphic M:N by prepending the discriminator column/value.
*/
private buildPivotKeysAndData;
private collectStatements;
execute(): Promise<void>;
}

175
node_modules/@mikro-orm/sql/PivotCollectionPersister.js generated vendored Normal file
View File

@@ -0,0 +1,175 @@
class InsertStatement {
order;
#keys;
#data;
constructor(keys, data, order) {
this.order = order;
this.#keys = keys;
this.#data = data;
}
getHash() {
return JSON.stringify(this.#data);
}
getData() {
const data = {};
this.#keys.forEach((key, idx) => (data[key] = this.#data[idx]));
return data;
}
}
class DeleteStatement {
#keys;
#cond;
constructor(keys, cond) {
this.#keys = keys;
this.#cond = cond;
}
getHash() {
return JSON.stringify(this.#cond);
}
getCondition() {
const cond = {};
this.#keys.forEach((key, idx) => (cond[key] = this.#cond[idx]));
return cond;
}
}
export class PivotCollectionPersister {
#inserts = new Map();
#upserts = new Map();
#deletes = new Map();
#batchSize;
#order = 0;
#meta;
#driver;
#ctx;
#schema;
#loggerContext;
constructor(meta, driver, ctx, schema, loggerContext) {
this.#meta = meta;
this.#driver = driver;
this.#ctx = ctx;
this.#schema = schema;
this.#loggerContext = loggerContext;
this.#batchSize = this.#driver.config.get('batchSize');
}
enqueueUpdate(prop, insertDiff, deleteDiff, pks, isInitialized = true) {
if (insertDiff.length) {
if (isInitialized) {
this.enqueueInsert(prop, insertDiff, pks);
} else {
this.enqueueUpsert(prop, insertDiff, pks);
}
}
if (deleteDiff === true || (Array.isArray(deleteDiff) && deleteDiff.length)) {
this.enqueueDelete(prop, deleteDiff, pks);
}
}
enqueueInsert(prop, insertDiff, pks) {
for (const fks of insertDiff) {
const statement = this.createInsertStatement(prop, fks, pks);
const hash = statement.getHash();
if (prop.owner || !this.#inserts.has(hash)) {
this.#inserts.set(hash, statement);
}
}
}
enqueueUpsert(prop, insertDiff, pks) {
for (const fks of insertDiff) {
const statement = this.createInsertStatement(prop, fks, pks);
const hash = statement.getHash();
if (prop.owner || !this.#upserts.has(hash)) {
this.#upserts.set(hash, statement);
}
}
}
createInsertStatement(prop, fks, pks) {
const { data, keys } = this.buildPivotKeysAndData(prop, fks, pks);
return new InsertStatement(keys, data, this.#order++);
}
enqueueDelete(prop, deleteDiff, pks) {
if (deleteDiff === true) {
const { data, keys } = this.buildPivotKeysAndData(prop, [], pks, true);
const statement = new DeleteStatement(keys, data);
this.#deletes.set(statement.getHash(), statement);
return;
}
for (const fks of deleteDiff) {
const { data, keys } = this.buildPivotKeysAndData(prop, fks, pks);
const statement = new DeleteStatement(keys, data);
this.#deletes.set(statement.getHash(), statement);
}
}
/**
* Build the keys and data arrays for pivot table operations.
* Handles polymorphic M:N by prepending the discriminator column/value.
*/
buildPivotKeysAndData(prop, fks, pks, deleteAll = false) {
let data;
let keys;
if (deleteAll) {
data = pks;
keys = prop.joinColumns;
} else {
data = prop.owner ? [...fks, ...pks] : [...pks, ...fks];
keys = prop.owner
? [...prop.inverseJoinColumns, ...prop.joinColumns]
: [...prop.joinColumns, ...prop.inverseJoinColumns];
}
if (prop.polymorphic && prop.discriminatorColumn && prop.discriminatorValue) {
data = [prop.discriminatorValue, ...data];
keys = [prop.discriminatorColumn, ...keys];
}
return { data, keys };
}
collectStatements(statements) {
const items = [];
for (const statement of statements.values()) {
items[statement.order] = statement.getData();
}
return items.filter(Boolean);
}
async execute() {
if (this.#deletes.size > 0) {
const deletes = [...this.#deletes.values()];
for (let i = 0; i < deletes.length; i += this.#batchSize) {
const chunk = deletes.slice(i, i + this.#batchSize);
const cond = { $or: [] };
for (const item of chunk) {
cond.$or.push(item.getCondition());
}
await this.#driver.nativeDelete(this.#meta.class, cond, {
ctx: this.#ctx,
schema: this.#schema,
loggerContext: this.#loggerContext,
});
}
}
if (this.#inserts.size > 0) {
const filtered = this.collectStatements(this.#inserts);
for (let i = 0; i < filtered.length; i += this.#batchSize) {
const chunk = filtered.slice(i, i + this.#batchSize);
await this.#driver.nativeInsertMany(this.#meta.class, chunk, {
ctx: this.#ctx,
schema: this.#schema,
convertCustomTypes: false,
processCollections: false,
loggerContext: this.#loggerContext,
});
}
}
if (this.#upserts.size > 0) {
const filtered = this.collectStatements(this.#upserts);
for (let i = 0; i < filtered.length; i += this.#batchSize) {
const chunk = filtered.slice(i, i + this.#batchSize);
await this.#driver.nativeUpdateMany(this.#meta.class, [], chunk, {
ctx: this.#ctx,
schema: this.#schema,
convertCustomTypes: false,
processCollections: false,
upsert: true,
onConflictAction: 'ignore',
loggerContext: this.#loggerContext,
});
}
}
}
}

225
node_modules/@mikro-orm/sql/README.md generated vendored Normal file
View File

@@ -0,0 +1,225 @@
<h1 align="center">
<a href="https://mikro-orm.io"><img src="https://raw.githubusercontent.com/mikro-orm/mikro-orm/master/docs/static/img/logo-readme.svg?sanitize=true" alt="MikroORM" /></a>
</h1>
TypeScript ORM for Node.js based on Data Mapper, [Unit of Work](https://mikro-orm.io/docs/unit-of-work/) and [Identity Map](https://mikro-orm.io/docs/identity-map/) patterns. Supports MongoDB, MySQL, MariaDB, PostgreSQL, SQLite (including libSQL), MSSQL and Oracle databases.
> Heavily inspired by [Doctrine](https://www.doctrine-project.org/) and [Hibernate](https://hibernate.org/).
[![NPM version](https://img.shields.io/npm/v/@mikro-orm/core.svg)](https://npmx.dev/package/@mikro-orm/core)
[![NPM dev version](https://img.shields.io/npm/v/@mikro-orm/core/next.svg)](https://npmx.dev/package/@mikro-orm/core)
[![Chat on discord](https://img.shields.io/discord/1214904142443839538?label=discord&color=blue)](https://discord.gg/w8bjxFHS7X)
[![Downloads](https://img.shields.io/npm/dm/@mikro-orm/core.svg)](https://npmx.dev/package/@mikro-orm/core)
[![Coverage Status](https://img.shields.io/coveralls/mikro-orm/mikro-orm.svg)](https://coveralls.io/r/mikro-orm/mikro-orm?branch=master)
[![Build Status](https://github.com/mikro-orm/mikro-orm/workflows/tests/badge.svg?branch=master)](https://github.com/mikro-orm/mikro-orm/actions?workflow=tests)
## Quick Start
Install a driver package for your database:
```sh
npm install @mikro-orm/postgresql # PostgreSQL
npm install @mikro-orm/mysql # MySQL
npm install @mikro-orm/mariadb # MariaDB
npm install @mikro-orm/sqlite # SQLite
npm install @mikro-orm/libsql # libSQL / Turso
npm install @mikro-orm/mongodb # MongoDB
npm install @mikro-orm/mssql # MS SQL Server
npm install @mikro-orm/oracledb # Oracle
```
> If you use additional packages like `@mikro-orm/cli`, `@mikro-orm/migrations`, or `@mikro-orm/entity-generator`, install `@mikro-orm/core` explicitly as well. See the [quick start guide](https://mikro-orm.io/docs/quick-start) for details.
### Define Entities
The recommended way to define entities is using [`defineEntity`](https://mikro-orm.io/docs/define-entity) with `setClass`:
```typescript
import { defineEntity, p, MikroORM } from '@mikro-orm/postgresql';
const AuthorSchema = defineEntity({
name: 'Author',
properties: {
id: p.integer().primary(),
name: p.string(),
email: p.string(),
born: p.datetime().nullable(),
books: () => p.oneToMany(Book).mappedBy('author'),
},
});
export class Author extends AuthorSchema.class {}
AuthorSchema.setClass(Author);
const BookSchema = defineEntity({
name: 'Book',
properties: {
id: p.integer().primary(),
title: p.string(),
author: () => p.manyToOne(Author).inversedBy('books'),
},
});
export class Book extends BookSchema.class {}
BookSchema.setClass(Book);
```
You can also define entities using [decorators](https://mikro-orm.io/docs/defining-entities) or [`EntitySchema`](https://mikro-orm.io/docs/entity-schema). See the [defining entities guide](https://mikro-orm.io/docs/defining-entities) for all options.
### Initialize and Use
```typescript
import { MikroORM, RequestContext } from '@mikro-orm/postgresql';
const orm = await MikroORM.init({
entities: [Author, Book],
dbName: 'my-db',
});
// Create new entities
const author = orm.em.create(Author, {
name: 'Jon Snow',
email: 'snow@wall.st',
});
const book = orm.em.create(Book, {
title: 'My Life on The Wall',
author,
});
// Flush persists all tracked changes in a single transaction
await orm.em.flush();
```
### Querying
```typescript
// Find with relations
const authors = await orm.em.findAll(Author, {
populate: ['books'],
orderBy: { name: 'asc' },
});
// Type-safe QueryBuilder
const qb = orm.em.createQueryBuilder(Author);
const result = await qb
.select('*')
.where({ books: { title: { $like: '%Wall%' } } })
.getResult();
```
### Request Context
In web applications, use `RequestContext` to isolate the identity map per request:
```typescript
const app = express();
app.use((req, res, next) => {
RequestContext.create(orm.em, next);
});
```
More info about `RequestContext` is described [here](https://mikro-orm.io/docs/identity-map/#request-context).
## Unit of Work
> Unit of Work maintains a list of objects (_entities_) affected by a business transaction
> and coordinates the writing out of changes. [(Martin Fowler)](https://www.martinfowler.com/eaaCatalog/unitOfWork.html)
When you call `em.flush()`, all computed changes are queried inside a database transaction. This means you can control transaction boundaries simply by making changes to your entities and calling `flush()` when ready.
```typescript
const author = await em.findOneOrFail(Author, 1, {
populate: ['books'],
});
author.name = 'Jon Snow II';
author.books.getItems().forEach(book => book.title += ' (2nd ed.)');
author.books.add(orm.em.create(Book, { title: 'New Book', author }));
// Flush computes change sets and executes them in a single transaction
await em.flush();
```
The above flush will execute:
```sql
begin;
update "author" set "name" = 'Jon Snow II' where "id" = 1;
update "book"
set "title" = case
when ("id" = 1) then 'My Life on The Wall (2nd ed.)'
when ("id" = 2) then 'Another Book (2nd ed.)'
else "title" end
where "id" in (1, 2);
insert into "book" ("title", "author_id") values ('New Book', 1);
commit;
```
## Core Features
- [Clean and Simple Entity Definition](https://mikro-orm.io/docs/defining-entities) — decorators, `EntitySchema`, or `defineEntity`
- [Identity Map](https://mikro-orm.io/docs/identity-map) and [Unit of Work](https://mikro-orm.io/docs/unit-of-work) — automatic change tracking
- [Entity References](https://mikro-orm.io/docs/entity-references) and [Collections](https://mikro-orm.io/docs/collections)
- [QueryBuilder](https://mikro-orm.io/docs/query-builder) and [Kysely Integration](https://mikro-orm.io/docs/kysely)
- [Transactions](https://mikro-orm.io/docs/transactions) and [Cascading](https://mikro-orm.io/docs/cascading)
- [Populating Relations](https://mikro-orm.io/docs/populating-relations) and [Loading Strategies](https://mikro-orm.io/docs/loading-strategies)
- [Filters](https://mikro-orm.io/docs/filters) and [Lifecycle Hooks](https://mikro-orm.io/docs/events#hooks)
- [Schema Generator](https://mikro-orm.io/docs/schema-generator) and [Migrations](https://mikro-orm.io/docs/migrations)
- [Entity Generator](https://mikro-orm.io/docs/entity-generator) and [Seeding](https://mikro-orm.io/docs/seeding)
- [Embeddables](https://mikro-orm.io/docs/embeddables), [Custom Types](https://mikro-orm.io/docs/custom-types), and [Serialization](https://mikro-orm.io/docs/serializing)
- [Composite and Foreign Keys as Primary Key](https://mikro-orm.io/docs/composite-keys)
- [Entity Constructors](https://mikro-orm.io/docs/entity-constructors) and [Property Validation](https://mikro-orm.io/docs/property-validation)
- [Modelling Relationships](https://mikro-orm.io/docs/relationships) and [Vanilla JS Support](https://mikro-orm.io/docs/usage-with-js)
## Documentation
MikroORM documentation, included in this repo in the root directory, is built with [Docusaurus](https://docusaurus.io) and publicly hosted on GitHub Pages at https://mikro-orm.io.
There is also auto-generated [CHANGELOG.md](CHANGELOG.md) file based on commit messages (via `semantic-release`).
## Example Integrations
You can find example integrations for some popular frameworks in the [`mikro-orm-examples` repository](https://github.com/mikro-orm/mikro-orm-examples):
### TypeScript Examples
- [Express + MongoDB](https://github.com/mikro-orm/express-ts-example-app)
- [Nest + MySQL](https://github.com/mikro-orm/nestjs-example-app)
- [RealWorld example app (Nest + MySQL)](https://github.com/mikro-orm/nestjs-realworld-example-app)
- [Koa + SQLite](https://github.com/mikro-orm/koa-ts-example-app)
- [GraphQL + PostgreSQL](https://github.com/driescroons/mikro-orm-graphql-example)
- [Inversify + PostgreSQL](https://github.com/PodaruDragos/inversify-example-app)
- [NextJS + MySQL](https://github.com/jonahallibone/mikro-orm-nextjs)
- [Accounts.js REST and GraphQL authentication + SQLite](https://github.com/darkbasic/mikro-orm-accounts-example)
- [Nest + Shopify + PostgreSQL + GraphQL](https://github.com/Cloudshelf/Shopify_CSConnector)
- [Elysia.js + libSQL + Bun](https://github.com/mikro-orm/elysia-bun-example-app)
- [Electron.js + PostgreSQL](https://github.com/adnanlah/electron-mikro-orm-example-app)
### JavaScript Examples
- [Express + SQLite](https://github.com/mikro-orm/express-js-example-app)
## Contributing
Contributions, issues and feature requests are welcome. Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on the process for submitting pull requests to us.
## Authors
**Martin Adámek**
- Twitter: [@B4nan](https://twitter.com/B4nan)
- Github: [@b4nan](https://github.com/b4nan)
See also the list of contributors who [participated](https://github.com/mikro-orm/mikro-orm/contributors) in this project.
## Show Your Support
Please star this repository if this project helped you!
> If you'd like to support my open-source work, consider sponsoring me directly at [github.com/sponsors/b4nan](https://github.com/sponsors/b4nan).
## License
Copyright © 2018-present [Martin Adámek](https://github.com/b4nan).
This project is licensed under the MIT License - see the [LICENSE file](LICENSE) for details.

82
node_modules/@mikro-orm/sql/SqlEntityManager.d.ts generated vendored Normal file
View File

@@ -0,0 +1,82 @@
import {
type EntitySchemaWithMeta,
EntityManager,
type AnyEntity,
type ConnectionType,
type EntityData,
type EntityName,
type EntityRepository,
type GetRepository,
type QueryResult,
type FilterQuery,
type LoggingOptions,
type RawQueryFragment,
} from '@mikro-orm/core';
import type { AbstractSqlDriver } from './AbstractSqlDriver.js';
import type { NativeQueryBuilder } from './query/NativeQueryBuilder.js';
import type { QueryBuilder } from './query/QueryBuilder.js';
import type { SqlEntityRepository } from './SqlEntityRepository.js';
import type { Kysely } from 'kysely';
import type { InferClassEntityDB, InferKyselyDB } from './typings.js';
import { type MikroKyselyPluginOptions } from './plugin/index.js';
/** Options for `SqlEntityManager.getKysely()`. */
export interface GetKyselyOptions extends MikroKyselyPluginOptions {
/** Connection type to use (`'read'` or `'write'`). */
type?: ConnectionType;
}
/**
* @inheritDoc
*/
export declare class SqlEntityManager<
Driver extends AbstractSqlDriver = AbstractSqlDriver,
> extends EntityManager<Driver> {
/**
* Creates a QueryBuilder instance
*/
createQueryBuilder<Entity extends object, RootAlias extends string = never>(
entityName: EntityName<Entity> | QueryBuilder<Entity>,
alias?: RootAlias,
type?: ConnectionType,
loggerContext?: LoggingOptions,
): QueryBuilder<Entity, RootAlias>;
/**
* Shortcut for `createQueryBuilder()`
*/
qb<Entity extends object, RootAlias extends string = never>(
entityName: EntityName<Entity>,
alias?: RootAlias,
type?: ConnectionType,
loggerContext?: LoggingOptions,
): QueryBuilder<Entity, RootAlias>;
/**
* Returns configured Kysely instance.
*/
getKysely<TDB = undefined, TOptions extends GetKyselyOptions = GetKyselyOptions>(
options?: TOptions,
): Kysely<
TDB extends undefined
? InferKyselyDB<EntitiesFromManager<this>, TOptions> & InferClassEntityDB<AllEntitiesFromManager<this>, TOptions>
: TDB
>;
/** Executes a raw SQL query, using the current transaction context if available. */
execute<T extends QueryResult | EntityData<AnyEntity> | EntityData<AnyEntity>[] = EntityData<AnyEntity>[]>(
query: string | NativeQueryBuilder | RawQueryFragment,
params?: any[],
method?: 'all' | 'get' | 'run',
loggerContext?: LoggingOptions,
): Promise<T>;
getRepository<T extends object, U extends EntityRepository<T> = SqlEntityRepository<T>>(
entityName: EntityName<T>,
): GetRepository<T, U>;
protected applyDiscriminatorCondition<Entity extends object>(
entityName: EntityName<Entity>,
where: FilterQuery<Entity>,
): FilterQuery<Entity>;
}
type EntitiesFromManager<TEntityManager extends EntityManager<any>> =
NonNullable<TEntityManager['~entities']> extends any[]
? Extract<NonNullable<TEntityManager['~entities']>[number], EntitySchemaWithMeta>
: never;
type AllEntitiesFromManager<TEntityManager extends EntityManager<any>> =
NonNullable<TEntityManager['~entities']> extends any[] ? NonNullable<TEntityManager['~entities']>[number] : never;
export {};

61
node_modules/@mikro-orm/sql/SqlEntityManager.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
import { EntityManager } from '@mikro-orm/core';
import { MikroKyselyPlugin } from './plugin/index.js';
/**
* @inheritDoc
*/
export class SqlEntityManager extends EntityManager {
/**
* Creates a QueryBuilder instance
*/
createQueryBuilder(entityName, alias, type, loggerContext) {
const context = this.getContext(false);
return this.driver.createQueryBuilder(
entityName,
context.getTransactionContext(),
type,
true,
loggerContext ?? context.loggerContext,
alias,
this,
);
}
/**
* Shortcut for `createQueryBuilder()`
*/
qb(entityName, alias, type, loggerContext) {
return this.createQueryBuilder(entityName, alias, type, loggerContext);
}
/**
* Returns configured Kysely instance.
*/
getKysely(options = {}) {
let kysely = this.getConnection(options.type).getClient();
if (
options.columnNamingStrategy != null ||
options.tableNamingStrategy != null ||
options.processOnCreateHooks != null ||
options.processOnUpdateHooks != null ||
options.convertValues != null
) {
kysely = kysely.withPlugin(new MikroKyselyPlugin(this, options));
}
return kysely;
}
/** Executes a raw SQL query, using the current transaction context if available. */
async execute(query, params = [], method = 'all', loggerContext) {
return this.getDriver().execute(
query,
params,
method,
this.getContext(false).getTransactionContext(),
loggerContext,
);
}
getRepository(entityName) {
return super.getRepository(entityName);
}
applyDiscriminatorCondition(entityName, where) {
// this is handled in QueryBuilder now for SQL drivers
return where;
}
}

20
node_modules/@mikro-orm/sql/SqlEntityRepository.d.ts generated vendored Normal file
View File

@@ -0,0 +1,20 @@
import { EntityRepository, type EntityName } from '@mikro-orm/core';
import type { SqlEntityManager } from './SqlEntityManager.js';
import type { QueryBuilder } from './query/QueryBuilder.js';
/** SQL-specific entity repository with QueryBuilder support. */
export declare class SqlEntityRepository<Entity extends object> extends EntityRepository<Entity> {
protected readonly em: SqlEntityManager;
constructor(em: SqlEntityManager, entityName: EntityName<Entity>);
/**
* Creates a QueryBuilder instance
*/
createQueryBuilder<RootAlias extends string = never>(alias?: RootAlias): QueryBuilder<Entity, RootAlias>;
/**
* Shortcut for `createQueryBuilder()`
*/
qb<RootAlias extends string = never>(alias?: RootAlias): QueryBuilder<Entity, RootAlias>;
/**
* @inheritDoc
*/
getEntityManager(): SqlEntityManager;
}

27
node_modules/@mikro-orm/sql/SqlEntityRepository.js generated vendored Normal file
View File

@@ -0,0 +1,27 @@
import { EntityRepository } from '@mikro-orm/core';
/** SQL-specific entity repository with QueryBuilder support. */
export class SqlEntityRepository extends EntityRepository {
em;
constructor(em, entityName) {
super(em, entityName);
this.em = em;
}
/**
* Creates a QueryBuilder instance
*/
createQueryBuilder(alias) {
return this.getEntityManager().createQueryBuilder(this.entityName, alias);
}
/**
* Shortcut for `createQueryBuilder()`
*/
qb(alias) {
return this.createQueryBuilder(alias);
}
/**
* @inheritDoc
*/
getEntityManager() {
return this.em;
}
}

5
node_modules/@mikro-orm/sql/dialects/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,5 @@
export * from './mssql/index.js';
export * from './mysql/index.js';
export * from './postgresql/index.js';
export * from './sqlite/index.js';
export * from './oracledb/index.js';

5
node_modules/@mikro-orm/sql/dialects/index.js generated vendored Normal file
View File

@@ -0,0 +1,5 @@
export * from './mssql/index.js';
export * from './mysql/index.js';
export * from './postgresql/index.js';
export * from './sqlite/index.js';
export * from './oracledb/index.js';

View File

@@ -0,0 +1,16 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export declare class MsSqlNativeQueryBuilder extends NativeQueryBuilder {
compile(): {
sql: string;
params: unknown[];
};
protected compileInsert(): void;
private appendOutputTable;
private compileUpsert;
protected compileSelect(): void;
protected addLockClause(): void;
protected compileTruncate(): void;
/** MSSQL has no RECURSIVE keyword — CTEs are implicitly recursive. */
protected getCteKeyword(_hasRecursive: boolean): string;
}

View File

@@ -0,0 +1,202 @@
import { LockMode, QueryFlag, RawQueryFragment, Utils } from '@mikro-orm/core';
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
import { QueryType } from '../../query/enums.js';
/** @internal */
export class MsSqlNativeQueryBuilder extends NativeQueryBuilder {
compile() {
if (!this.type) {
throw new Error('No query type provided');
}
this.parts.length = 0;
this.params.length = 0;
if (this.options.flags?.has(QueryFlag.IDENTITY_INSERT)) {
this.parts.push(`set identity_insert ${this.getTableName()} on;`);
}
const { prefix, suffix } = this.appendOutputTable();
if (prefix) {
this.parts.push(prefix);
}
if (this.options.comment) {
this.parts.push(...this.options.comment.map(comment => `/* ${comment} */`));
}
this.compileCtes();
if (this.options.onConflict && !Utils.isEmpty(Utils.asArray(this.options.data)[0])) {
this.compileUpsert();
} else {
switch (this.type) {
case QueryType.SELECT:
case QueryType.COUNT:
this.compileSelect();
break;
case QueryType.INSERT:
this.compileInsert();
break;
case QueryType.UPDATE:
this.compileUpdate();
break;
case QueryType.DELETE:
this.compileDelete();
break;
case QueryType.TRUNCATE:
this.compileTruncate();
break;
}
if (suffix) {
this.parts[this.parts.length - 1] += ';';
this.parts.push(suffix);
} else if ([QueryType.INSERT, QueryType.UPDATE, QueryType.DELETE].includes(this.type)) {
this.parts[this.parts.length - 1] += '; select @@rowcount;';
}
}
if (this.options.flags?.has(QueryFlag.IDENTITY_INSERT)) {
this.parts.push(`set identity_insert ${this.getTableName()} off;`);
}
return this.combineParts();
}
compileInsert() {
if (!this.options.data) {
throw new Error('No data provided');
}
this.parts.push('insert');
this.addHintComment();
this.parts.push(`into ${this.getTableName()}`);
if (Object.keys(this.options.data).length === 0) {
this.addOutputClause('inserted');
this.parts.push('default values');
return;
}
const parts = this.processInsertData();
if (this.options.flags?.has(QueryFlag.OUTPUT_TABLE)) {
this.parts[this.parts.length - 2] += ' into #out ';
}
this.parts.push(parts.join(', '));
}
appendOutputTable() {
if (!this.options.flags?.has(QueryFlag.OUTPUT_TABLE)) {
return { prefix: '', suffix: '' };
}
const returningFields = this.options.returning;
const selections = returningFields.map(field => `[t].${this.platform.quoteIdentifier(field)}`).join(',');
return {
prefix: `select top(0) ${selections} into #out from ${this.getTableName()} as t left join ${this.getTableName()} on 0 = 1;`,
suffix: `select ${selections} from #out as t; drop table #out`,
};
}
compileUpsert() {
const clause = this.options.onConflict;
const dataAsArray = Utils.asArray(this.options.data);
const keys = Object.keys(dataAsArray[0]);
const values = keys.map(() => '?');
const parts = [];
for (const data of dataAsArray) {
for (const key of keys) {
this.params.push(data[key]);
}
parts.push(`(${values.join(', ')})`);
}
this.parts.push(`merge into ${this.getTableName()}`);
this.parts.push(`using (values ${parts.join(', ')}) as tsource(${keys.map(key => this.quote(key)).join(', ')})`);
if (clause.fields instanceof RawQueryFragment) {
this.parts.push(clause.fields.sql);
this.params.push(...clause.fields.params);
} else if (clause.fields.length > 0) {
const fields = clause.fields.map(field => {
const col = this.quote(field);
return `${this.getTableName()}.${col} = tsource.${col}`;
});
this.parts.push(`on ${fields.join(' and ')}`);
}
const sourceColumns = keys.map(field => `tsource.${this.quote(field)}`).join(', ');
const destinationColumns = keys.map(field => this.quote(field)).join(', ');
this.parts.push(`when not matched then insert (${destinationColumns}) values (${sourceColumns})`);
if (!clause.ignore) {
this.parts.push('when matched');
if (clause.where) {
this.parts.push(`and ${clause.where.sql}`);
this.params.push(...clause.where.params);
}
this.parts.push('then update set');
if (!clause.merge || Array.isArray(clause.merge)) {
const parts = (clause.merge || keys)
.filter(field => !Array.isArray(clause.fields) || !clause.fields.includes(field))
.map(column => `${this.quote(column)} = tsource.${this.quote(column)}`);
this.parts.push(parts.join(', '));
} else if (typeof clause.merge === 'object') {
const parts = Object.entries(clause.merge).map(([key, value]) => {
this.params.push(value);
return `${this.getTableName()}.${this.quote(key)} = ?`;
});
this.parts.push(parts.join(', '));
}
}
this.addOutputClause('inserted');
this.parts[this.parts.length - 1] += ';';
}
compileSelect() {
this.parts.push('select');
if (this.options.limit != null && this.options.offset == null) {
this.parts.push(`top (?)`);
this.params.push(this.options.limit);
}
this.addHintComment();
this.parts.push(`${this.getFields()} from ${this.getTableName()}`);
this.addLockClause();
if (this.options.joins) {
for (const join of this.options.joins) {
this.parts.push(join.sql);
this.params.push(...join.params);
}
}
if (this.options.where?.sql.trim()) {
this.parts.push(`where ${this.options.where.sql}`);
this.params.push(...this.options.where.params);
}
if (this.options.groupBy) {
const fields = this.options.groupBy.map(field => this.quote(field));
this.parts.push(`group by ${fields.join(', ')}`);
}
if (this.options.having) {
this.parts.push(`having ${this.options.having.sql}`);
this.params.push(...this.options.having.params);
}
if (this.options.orderBy) {
this.parts.push(`order by ${this.options.orderBy}`);
}
if (this.options.offset != null) {
/* v8 ignore next */
if (!this.options.orderBy) {
throw new Error('Order by clause is required for pagination');
}
this.parts.push(`offset ? rows`);
this.params.push(this.options.offset);
if (this.options.limit != null) {
this.parts.push(`fetch next ? rows only`);
this.params.push(this.options.limit);
}
}
}
addLockClause() {
if (
!this.options.lockMode ||
![LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_WRITE].includes(this.options.lockMode)
) {
return;
}
const map = {
[LockMode.PESSIMISTIC_READ]: 'with (holdlock)',
[LockMode.PESSIMISTIC_WRITE]: 'with (updlock)',
};
if (this.options.lockMode !== LockMode.OPTIMISTIC) {
this.parts.push(map[this.options.lockMode]);
}
}
compileTruncate() {
const tableName = this.getTableName();
const sql = `delete from ${tableName}; declare @count int = case @@rowcount when 0 then 1 else 0 end; dbcc checkident ('${tableName.replace(/[[\]]/g, '')}', reseed, @count)`;
this.parts.push(sql);
}
/** MSSQL has no RECURSIVE keyword — CTEs are implicitly recursive. */
getCteKeyword(_hasRecursive) {
return 'with';
}
}

View File

@@ -0,0 +1 @@
export * from './MsSqlNativeQueryBuilder.js';

1
node_modules/@mikro-orm/sql/dialects/mssql/index.js generated vendored Normal file
View File

@@ -0,0 +1 @@
export * from './MsSqlNativeQueryBuilder.js';

View File

@@ -0,0 +1,71 @@
import {
type SimpleColumnMeta,
type Type,
type TransformContext,
type MikroORM,
type IsolationLevel,
} from '@mikro-orm/core';
import { MySqlSchemaHelper } from './MySqlSchemaHelper.js';
import { MySqlExceptionConverter } from './MySqlExceptionConverter.js';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import type { IndexDef } from '../../typings.js';
import { MySqlNativeQueryBuilder } from './MySqlNativeQueryBuilder.js';
export declare class BaseMySqlPlatform extends AbstractSqlPlatform {
#private;
protected readonly schemaHelper: MySqlSchemaHelper;
protected readonly exceptionConverter: MySqlExceptionConverter;
protected readonly ORDER_BY_NULLS_TRANSLATE: {
readonly 'asc nulls first': 'is not null';
readonly 'asc nulls last': 'is null';
readonly 'desc nulls first': 'is not null';
readonly 'desc nulls last': 'is null';
};
/** @internal */
createNativeQueryBuilder(): MySqlNativeQueryBuilder;
getDefaultCharset(): string;
init(orm: MikroORM): void;
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
convertJsonToDatabaseValue(value: unknown, context?: TransformContext): unknown;
getJsonIndexDefinition(index: IndexDef): string[];
getBooleanTypeDeclarationSQL(): string;
normalizeColumnType(
type: string,
options: {
length?: number;
precision?: number;
scale?: number;
},
): string;
getDefaultMappedType(type: string): Type<unknown>;
isNumericColumn(mappedType: Type<unknown>): boolean;
supportsUnsigned(): boolean;
/**
* Returns the default name of index for the given columns
* cannot go past 64 character length for identifiers in MySQL
*/
getIndexName(
tableName: string,
columns: string[],
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
): string;
getDefaultPrimaryName(tableName: string, columns: string[]): string;
supportsCreatingFullTextIndex(): boolean;
getFullTextWhereClause(): string;
getFullTextIndexExpression(
indexName: string,
schemaName: string | undefined,
tableName: string,
columns: SimpleColumnMeta[],
): string;
getOrderByExpression(column: string, direction: string, collation?: string): string[];
getJsonArrayFromSQL(
column: string,
alias: string,
properties: {
name: string;
type: string;
}[],
): string;
getJsonArrayExistsSQL(from: string, where: string): string;
getDefaultClientUrl(): string;
}

View File

@@ -0,0 +1,140 @@
import { Utils, QueryOrder, DecimalType, DoubleType } from '@mikro-orm/core';
import { MySqlSchemaHelper } from './MySqlSchemaHelper.js';
import { MySqlExceptionConverter } from './MySqlExceptionConverter.js';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import { MySqlNativeQueryBuilder } from './MySqlNativeQueryBuilder.js';
export class BaseMySqlPlatform extends AbstractSqlPlatform {
schemaHelper = new MySqlSchemaHelper(this);
exceptionConverter = new MySqlExceptionConverter();
#jsonTypeCasts = {
string: 'text',
number: 'double',
bigint: 'bigint',
boolean: 'unsigned',
};
ORDER_BY_NULLS_TRANSLATE = {
[QueryOrder.asc_nulls_first]: 'is not null',
[QueryOrder.asc_nulls_last]: 'is null',
[QueryOrder.desc_nulls_first]: 'is not null',
[QueryOrder.desc_nulls_last]: 'is null',
};
/** @internal */
createNativeQueryBuilder() {
return new MySqlNativeQueryBuilder(this);
}
getDefaultCharset() {
return 'utf8mb4';
}
init(orm) {
super.init(orm);
orm.config.get('schemaGenerator').disableForeignKeysForClear ??= true;
}
getBeginTransactionSQL(options) {
if (options?.isolationLevel || options?.readOnly) {
const parts = [];
if (options.isolationLevel) {
parts.push(`isolation level ${options.isolationLevel}`);
}
if (options.readOnly) {
parts.push('read only');
}
const sql = `set transaction ${parts.join(', ')}`;
return [sql, 'begin'];
}
return ['begin'];
}
convertJsonToDatabaseValue(value, context) {
if (context?.mode === 'query') {
return value;
}
return JSON.stringify(value);
}
getJsonIndexDefinition(index) {
return index.columnNames.map(column => {
if (!column.includes('.')) {
return column;
}
const [root, ...path] = column.split('.');
return `(json_value(${this.quoteIdentifier(root)}, '$.${path.join('.')}' returning ${index.options?.returning ?? 'char(255)'}))`;
});
}
getBooleanTypeDeclarationSQL() {
return 'tinyint(1)';
}
normalizeColumnType(type, options) {
const simpleType = this.extractSimpleType(type);
if (['decimal', 'numeric'].includes(simpleType)) {
return this.getDecimalTypeDeclarationSQL(options);
}
return type;
}
getDefaultMappedType(type) {
if (type === 'tinyint(1)') {
return super.getDefaultMappedType('boolean');
}
return super.getDefaultMappedType(type);
}
isNumericColumn(mappedType) {
return super.isNumericColumn(mappedType) || [DecimalType, DoubleType].some(t => mappedType instanceof t);
}
supportsUnsigned() {
return true;
}
/**
* Returns the default name of index for the given columns
* cannot go past 64 character length for identifiers in MySQL
*/
getIndexName(tableName, columns, type) {
if (type === 'primary') {
return this.getDefaultPrimaryName(tableName, columns);
}
const indexName = super.getIndexName(tableName, columns, type);
if (indexName.length > 64) {
return `${indexName.substring(0, 56 - type.length)}_${Utils.hash(indexName, 5)}_${type}`;
}
return indexName;
}
getDefaultPrimaryName(tableName, columns) {
return 'PRIMARY'; // https://dev.mysql.com/doc/refman/8.0/en/create-table.html#create-table-indexes-keys
}
supportsCreatingFullTextIndex() {
return true;
}
getFullTextWhereClause() {
return `match(:column:) against (:query in boolean mode)`;
}
getFullTextIndexExpression(indexName, schemaName, tableName, columns) {
/* v8 ignore next */
const quotedTableName = this.quoteIdentifier(schemaName ? `${schemaName}.${tableName}` : tableName);
const quotedColumnNames = columns.map(c => this.quoteIdentifier(c.name));
const quotedIndexName = this.quoteIdentifier(indexName);
return `alter table ${quotedTableName} add fulltext index ${quotedIndexName}(${quotedColumnNames.join(',')})`;
}
getOrderByExpression(column, direction, collation) {
const ret = [];
const dir = direction.toLowerCase();
const col = collation ? `${column} collate ${this.quoteCollation(collation)}` : column;
if (dir in this.ORDER_BY_NULLS_TRANSLATE) {
ret.push(`${col} ${this.ORDER_BY_NULLS_TRANSLATE[dir]}`);
}
ret.push(`${col} ${dir.replace(/(\s|nulls|first|last)*/gi, '')}`);
return ret;
}
getJsonArrayFromSQL(column, alias, properties) {
const columns = properties
.map(
p =>
`${this.quoteIdentifier(p.name)} ${this.#jsonTypeCasts[p.type] ?? 'text'} path '$.${this.quoteJsonKey(p.name)}'`,
)
.join(', ');
return `json_table(${column}, '$[*]' columns (${columns})) as ${this.quoteIdentifier(alias)}`;
}
// MySQL does not support correlated json_table inside EXISTS subqueries,
// so we use a semi-join via the comma-join pattern instead.
getJsonArrayExistsSQL(from, where) {
return `(select 1 from ${from} where ${where} limit 1) is not null`;
}
getDefaultClientUrl() {
return 'mysql://root@127.0.0.1:3306';
}
}

View File

@@ -0,0 +1,9 @@
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
export declare class MySqlExceptionConverter extends ExceptionConverter {
/**
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-client.html
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractMySQLDriver.php
*/
convertException(exception: Error & Dictionary): DriverException;
}

View File

@@ -0,0 +1,94 @@
import {
DeadlockException,
LockWaitTimeoutException,
TableExistsException,
TableNotFoundException,
ForeignKeyConstraintViolationException,
UniqueConstraintViolationException,
InvalidFieldNameException,
NonUniqueFieldNameException,
SyntaxErrorException,
ConnectionException,
NotNullConstraintViolationException,
ExceptionConverter,
CheckConstraintViolationException,
} from '@mikro-orm/core';
export class MySqlExceptionConverter extends ExceptionConverter {
/**
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-client.html
* @see http://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractMySQLDriver.php
*/
convertException(exception) {
/* v8 ignore next */
switch (exception.errno) {
case 1213:
return new DeadlockException(exception);
case 1205:
return new LockWaitTimeoutException(exception);
case 1050:
return new TableExistsException(exception);
case 1051:
case 1146:
return new TableNotFoundException(exception);
case 1216:
case 1217:
case 1451:
case 1452:
case 1701:
return new ForeignKeyConstraintViolationException(exception);
case 3819:
case 4025:
return new CheckConstraintViolationException(exception);
case 1062:
case 1557:
case 1569:
case 1586:
return new UniqueConstraintViolationException(exception);
case 1054:
case 1166:
case 1611:
return new InvalidFieldNameException(exception);
case 1052:
case 1060:
case 1110:
return new NonUniqueFieldNameException(exception);
case 1064:
case 1149:
case 1287:
case 1341:
case 1342:
case 1343:
case 1344:
case 1382:
case 1479:
case 1541:
case 1554:
case 1626:
return new SyntaxErrorException(exception);
case 1044:
case 1045:
case 1046:
case 1049:
case 1095:
case 1142:
case 1143:
case 1227:
case 1370:
case 1429:
case 2002:
case 2005:
return new ConnectionException(exception);
case 1048:
case 1121:
case 1138:
case 1171:
case 1252:
case 1263:
case 1364:
case 1566:
return new NotNullConstraintViolationException(exception);
}
return super.convertException(exception);
}
}

View File

@@ -0,0 +1,7 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export declare class MySqlNativeQueryBuilder extends NativeQueryBuilder {
protected compileInsert(): void;
protected addLockClause(): void;
protected addOnConflictClause(): void;
}

View File

@@ -0,0 +1,74 @@
import { LockMode, RawQueryFragment, Utils } from '@mikro-orm/core';
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export class MySqlNativeQueryBuilder extends NativeQueryBuilder {
compileInsert() {
if (!this.options.data) {
throw new Error('No data provided');
}
this.parts.push('insert');
if (this.options.onConflict?.ignore) {
this.parts.push('ignore');
}
this.addHintComment();
this.parts.push(`into ${this.getTableName()}`);
if (Object.keys(this.options.data).length === 0) {
this.parts.push('default values');
return;
}
const parts = this.processInsertData();
this.parts.push(parts.join(', '));
}
addLockClause() {
if (!this.options.lockMode) {
return;
}
const map = {
[LockMode.PESSIMISTIC_READ]: 'lock in share mode',
[LockMode.PESSIMISTIC_WRITE]: 'for update',
[LockMode.PESSIMISTIC_PARTIAL_WRITE]: 'for update skip locked',
[LockMode.PESSIMISTIC_WRITE_OR_FAIL]: 'for update nowait',
[LockMode.PESSIMISTIC_PARTIAL_READ]: 'lock in share mode skip locked',
[LockMode.PESSIMISTIC_READ_OR_FAIL]: 'lock in share mode nowait',
};
if (this.options.lockMode !== LockMode.OPTIMISTIC) {
this.parts.push(map[this.options.lockMode]);
}
}
addOnConflictClause() {
const clause = this.options.onConflict;
if (!clause || clause.ignore) {
return;
}
if (clause.merge) {
this.parts.push('on duplicate key update');
if (Utils.isObject(clause.merge)) {
const fields = Object.keys(clause.merge).map(field => {
this.params.push(clause.merge[field]);
return `${this.quote(field)} = ?`;
});
this.parts.push(fields.join(', '));
} else if (clause.merge.length === 0) {
const dataAsArray = Utils.asArray(this.options.data);
const keys = Object.keys(dataAsArray[0]);
this.parts.push(keys.map(key => `${this.quote(key)} = values(${this.quote(key)})`).join(', '));
} else {
const fields = clause.merge.map(key => `${this.quote(key)} = values(${this.quote(key)})`);
this.parts.push(fields.join(', '));
}
if (clause.where) {
this.parts.push(`where ${clause.where.sql}`);
this.params.push(...clause.where.params);
}
return;
}
this.parts.push('on conflict');
if (clause.fields instanceof RawQueryFragment) {
this.parts.push(clause.fields.sql);
this.params.push(...clause.fields.params);
} else if (clause.fields.length > 0) {
const fields = clause.fields.map(field => this.quote(field));
this.parts.push(`(${fields.join(', ')})`);
}
}
}

View File

@@ -0,0 +1,47 @@
import { type Dictionary, type Type } from '@mikro-orm/core';
import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../../typings.js';
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
export declare class MySqlSchemaHelper extends SchemaHelper {
#private;
static readonly DEFAULT_VALUES: {
'now()': string[];
'current_timestamp(?)': string[];
'0': string[];
};
getSchemaBeginning(charset: string, disableForeignKeys?: boolean): string;
disableForeignKeysSQL(): string;
enableForeignKeysSQL(): string;
finalizeTable(table: DatabaseTable, charset: string, collate?: string): string;
getListTablesSQL(): string;
getListViewsSQL(): string;
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
loadInformationSchema(schema: DatabaseSchema, connection: AbstractSqlConnection, tables: Table[]): Promise<void>;
getAllIndexes(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<IndexDef[]>>;
getCreateIndexSQL(tableName: string, index: IndexDef, partialExpression?: boolean): string;
/**
* Build the column list for a MySQL index, with MySQL-specific handling for collation.
* MySQL requires collation to be specified as an expression: (column_name COLLATE collation_name)
*/
protected getIndexColumns(index: IndexDef): string;
/**
* Append MySQL-specific index suffixes like INVISIBLE.
*/
protected appendMySqlIndexSuffix(sql: string, index: IndexDef): string;
getAllColumns(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<Column[]>>;
getAllChecks(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<CheckDef[]>>;
getAllForeignKeys(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<Dictionary<ForeignKey>>>;
getPreAlterTable(tableDiff: TableDifference, safe: boolean): string[];
getRenameColumnSQL(tableName: string, oldColumnName: string, to: Column): string;
getRenameIndexSQL(tableName: string, index: IndexDef, oldIndexName: string): string[];
getChangeColumnCommentSQL(tableName: string, to: Column, schemaName?: string): string;
alterTableColumn(column: Column, table: DatabaseTable, changedProperties: Set<string>): string[];
private getColumnDeclarationSQL;
getAllEnumDefinitions(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<Dictionary<string[]>>>;
private supportsCheckConstraints;
protected getChecksSQL(tables: Table[]): string;
normalizeDefaultValue(defaultValue: string, length: number): string | number;
protected wrap(val: string | null | undefined, type: Type<unknown>): string | null | undefined;
}

View File

@@ -0,0 +1,379 @@
import { EnumType, StringType, TextType } from '@mikro-orm/core';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
export class MySqlSchemaHelper extends SchemaHelper {
#cache = {};
static DEFAULT_VALUES = {
'now()': ['now()', 'current_timestamp'],
'current_timestamp(?)': ['current_timestamp(?)'],
0: ['0', 'false'],
};
getSchemaBeginning(charset, disableForeignKeys) {
if (disableForeignKeys) {
return `set names ${charset};\n${this.disableForeignKeysSQL()}\n\n`;
}
return `set names ${charset};\n\n`;
}
disableForeignKeysSQL() {
return 'set foreign_key_checks = 0;';
}
enableForeignKeysSQL() {
return 'set foreign_key_checks = 1;';
}
finalizeTable(table, charset, collate) {
let sql = ` default character set ${charset}`;
if (collate) {
sql += ` collate ${collate}`;
}
sql += ' engine = InnoDB';
if (table.comment) {
sql += ` comment = ${this.platform.quoteValue(table.comment)}`;
}
return sql;
}
getListTablesSQL() {
return `select table_name as table_name, nullif(table_schema, schema()) as schema_name, table_comment as table_comment from information_schema.tables where table_type = 'BASE TABLE' and table_schema = schema()`;
}
getListViewsSQL() {
return `select table_name as view_name, nullif(table_schema, schema()) as schema_name, view_definition from information_schema.views where table_schema = schema()`;
}
async loadViews(schema, connection, schemaName) {
const views = await connection.execute(this.getListViewsSQL());
for (const view of views) {
// MySQL information_schema.views.view_definition requires SHOW VIEW privilege
// and may return NULL. Use SHOW CREATE VIEW as fallback.
let definition = view.view_definition?.trim();
if (!definition) {
const createView = await connection.execute(`show create view \`${view.view_name}\``);
if (createView[0]?.['Create View']) {
// Extract SELECT statement from CREATE VIEW ... AS SELECT ...
const match = /\bAS\s+(.+)$/is.exec(createView[0]['Create View']);
definition = match?.[1]?.trim();
}
}
if (definition) {
schema.addView(view.view_name, view.schema_name ?? undefined, definition);
}
}
}
async loadInformationSchema(schema, connection, tables) {
if (tables.length === 0) {
return;
}
const columns = await this.getAllColumns(connection, tables);
const indexes = await this.getAllIndexes(connection, tables);
const checks = await this.getAllChecks(connection, tables);
const fks = await this.getAllForeignKeys(connection, tables);
const enums = await this.getAllEnumDefinitions(connection, tables);
for (const t of tables) {
const key = this.getTableKey(t);
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
const pks = await this.getPrimaryKeys(connection, indexes[key], table.name, table.schema);
table.init(columns[key], indexes[key], checks[key], pks, fks[key], enums[key]);
}
}
async getAllIndexes(connection, tables) {
const sql = `select table_name as table_name, nullif(table_schema, schema()) as schema_name, index_name as index_name, non_unique as non_unique, column_name as column_name, index_type as index_type, sub_part as sub_part, collation as sort_order /*!80013 , expression as expression, is_visible as is_visible */
from information_schema.statistics where table_schema = database()
and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(', ')})
order by schema_name, table_name, index_name, seq_in_index`;
const allIndexes = await connection.execute(sql);
const ret = {};
for (const index of allIndexes) {
const key = this.getTableKey(index);
const indexDef = {
columnNames: [index.column_name],
keyName: index.index_name,
unique: !index.non_unique,
primary: index.index_name === 'PRIMARY',
constraint: !index.non_unique,
};
// Capture column options (prefix length, sort order)
if (index.sub_part != null || index.sort_order === 'D') {
indexDef.columns = [
{
name: index.column_name,
...(index.sub_part != null && { length: index.sub_part }),
...(index.sort_order === 'D' && { sort: 'DESC' }),
},
];
}
// Capture index type for fulltext and spatial indexes
if (index.index_type === 'FULLTEXT') {
indexDef.type = 'fulltext';
} else if (index.index_type === 'SPATIAL') {
/* v8 ignore next */
indexDef.type = 'spatial';
}
// Capture invisible flag (MySQL 8.0.13+)
if (index.is_visible === 'NO') {
indexDef.invisible = true;
}
if (!index.column_name || index.expression?.match(/ where /i)) {
indexDef.expression = index.expression; // required for the `getCreateIndexSQL()` call
indexDef.expression = this.getCreateIndexSQL(index.table_name, indexDef, !!index.expression);
}
ret[key] ??= [];
ret[key].push(indexDef);
}
for (const key of Object.keys(ret)) {
ret[key] = await this.mapIndexes(ret[key]);
}
return ret;
}
getCreateIndexSQL(tableName, index, partialExpression = false) {
/* v8 ignore next */
if (index.expression && !partialExpression) {
return index.expression;
}
tableName = this.quote(tableName);
const keyName = this.quote(index.keyName);
let sql = `alter table ${tableName} add ${index.unique ? 'unique' : 'index'} ${keyName} `;
if (index.expression && partialExpression) {
sql += `(${index.expression})`;
return this.appendMySqlIndexSuffix(sql, index);
}
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
if (index.columnNames.some(column => column.includes('.'))) {
const columns = this.platform.getJsonIndexDefinition(index);
sql = `alter table ${tableName} add ${index.unique ? 'unique ' : ''}index ${keyName} `;
sql += `(${columns.join(', ')})`;
return this.appendMySqlIndexSuffix(sql, index);
}
// Build column list with advanced options
const columns = this.getIndexColumns(index);
sql += `(${columns})`;
return this.appendMySqlIndexSuffix(sql, index);
}
/**
* Build the column list for a MySQL index, with MySQL-specific handling for collation.
* MySQL requires collation to be specified as an expression: (column_name COLLATE collation_name)
*/
getIndexColumns(index) {
if (index.columns?.length) {
return index.columns
.map(col => {
const quotedName = this.quote(col.name);
// MySQL supports collation via expression: (column_name COLLATE collation_name)
// When collation is specified, wrap in parentheses as an expression
if (col.collation) {
let expr = col.length ? `${quotedName}(${col.length})` : quotedName;
expr = `(${expr} collate ${col.collation})`;
// Sort order comes after the expression
if (col.sort) {
expr += ` ${col.sort}`;
}
return expr;
}
// Standard column definition without collation
let colDef = quotedName;
// MySQL supports prefix length
if (col.length) {
colDef += `(${col.length})`;
}
// MySQL supports sort order
if (col.sort) {
colDef += ` ${col.sort}`;
}
return colDef;
})
.join(', ');
}
return index.columnNames.map(c => this.quote(c)).join(', ');
}
/**
* Append MySQL-specific index suffixes like INVISIBLE.
*/
appendMySqlIndexSuffix(sql, index) {
// MySQL 8.0+ supports INVISIBLE indexes
if (index.invisible) {
sql += ' invisible';
}
return sql;
}
async getAllColumns(connection, tables) {
const sql = `select table_name as table_name,
nullif(table_schema, schema()) as schema_name,
column_name as column_name,
column_default as column_default,
nullif(column_comment, '') as column_comment,
is_nullable as is_nullable,
data_type as data_type,
column_type as column_type,
column_key as column_key,
extra as extra,
generation_expression as generation_expression,
numeric_precision as numeric_precision,
numeric_scale as numeric_scale,
ifnull(datetime_precision, character_maximum_length) length
from information_schema.columns where table_schema = database() and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name))})
order by ordinal_position`;
const allColumns = await connection.execute(sql);
const str = val => (val != null ? '' + val : val);
const extra = val =>
val.replace(/auto_increment|default_generated|(stored|virtual) generated/i, '').trim() || undefined;
const ret = {};
for (const col of allColumns) {
const mappedType = this.platform.getMappedType(col.column_type);
const defaultValue = str(
this.normalizeDefaultValue(
mappedType.compareAsType() === 'boolean' && ['0', '1'].includes(col.column_default)
? ['false', 'true'][+col.column_default]
: col.column_default,
col.length,
),
);
const key = this.getTableKey(col);
const generated = col.generation_expression
? `(${col.generation_expression.replaceAll(`\\'`, `'`)}) ${col.extra.match(/stored generated/i) ? 'stored' : 'virtual'}`
: undefined;
ret[key] ??= [];
ret[key].push({
name: col.column_name,
type: this.platform.isNumericColumn(mappedType)
? col.column_type.replace(/ unsigned$/, '').replace(/\(\d+\)$/, '')
: col.column_type,
mappedType,
unsigned: col.column_type.endsWith(' unsigned'),
length: col.length,
default: this.wrap(defaultValue, mappedType),
nullable: col.is_nullable === 'YES',
primary: col.column_key === 'PRI',
unique: col.column_key === 'UNI',
autoincrement: col.extra === 'auto_increment',
precision: col.numeric_precision,
scale: col.numeric_scale,
comment: col.column_comment,
extra: extra(col.extra),
generated,
});
}
return ret;
}
async getAllChecks(connection, tables) {
/* v8 ignore next */
if (!(await this.supportsCheckConstraints(connection))) {
return {};
}
const sql = this.getChecksSQL(tables);
const allChecks = await connection.execute(sql);
const ret = {};
for (const check of allChecks) {
const key = this.getTableKey(check);
ret[key] ??= [];
ret[key].push({
name: check.name,
columnName: check.column_name,
definition: `check ${check.expression}`,
expression: check.expression.replace(/^\((.*)\)$/, '$1'),
});
}
return ret;
}
async getAllForeignKeys(connection, tables) {
const sql = `select k.constraint_name as constraint_name, nullif(k.table_schema, schema()) as schema_name, k.table_name as table_name, k.column_name as column_name, k.referenced_table_name as referenced_table_name, k.referenced_column_name as referenced_column_name, c.update_rule as update_rule, c.delete_rule as delete_rule
from information_schema.key_column_usage k
inner join information_schema.referential_constraints c on c.constraint_name = k.constraint_name and c.table_name = k.table_name
where k.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(', ')})
and k.table_schema = database() and c.constraint_schema = database() and k.referenced_column_name is not null
order by constraint_name, k.ordinal_position`;
const allFks = await connection.execute(sql);
const ret = {};
for (const fk of allFks) {
const key = this.getTableKey(fk);
ret[key] ??= [];
ret[key].push(fk);
}
Object.keys(ret).forEach(key => {
const parts = key.split('.');
/* v8 ignore next */
const schemaName = parts.length > 1 ? parts[0] : undefined;
ret[key] = this.mapForeignKeys(ret[key], key, schemaName);
});
return ret;
}
getPreAlterTable(tableDiff, safe) {
// Dropping primary keys requires to unset autoincrement attribute on the particular column first.
const pk = Object.values(tableDiff.removedIndexes).find(idx => idx.primary);
if (!pk || safe) {
return [];
}
return pk.columnNames
.filter(col => tableDiff.fromTable.hasColumn(col))
.map(col => tableDiff.fromTable.getColumn(col))
.filter(col => col.autoincrement)
.map(
col =>
`alter table \`${tableDiff.name}\` modify \`${col.name}\` ${this.getColumnDeclarationSQL({ ...col, autoincrement: false })}`,
);
}
getRenameColumnSQL(tableName, oldColumnName, to) {
tableName = this.quote(tableName);
oldColumnName = this.quote(oldColumnName);
const columnName = this.quote(to.name);
return `alter table ${tableName} change ${oldColumnName} ${columnName} ${this.getColumnDeclarationSQL(to)}`;
}
getRenameIndexSQL(tableName, index, oldIndexName) {
tableName = this.quote(tableName);
oldIndexName = this.quote(oldIndexName);
const keyName = this.quote(index.keyName);
return [`alter table ${tableName} rename index ${oldIndexName} to ${keyName}`];
}
getChangeColumnCommentSQL(tableName, to, schemaName) {
tableName = this.quote(tableName);
const columnName = this.quote(to.name);
return `alter table ${tableName} modify ${columnName} ${this.getColumnDeclarationSQL(to)}`;
}
alterTableColumn(column, table, changedProperties) {
const col = this.createTableColumn(column, table, changedProperties);
return [`alter table ${table.getQuotedName()} modify ${col}`];
}
getColumnDeclarationSQL(col) {
let ret = col.type;
ret += col.unsigned ? ' unsigned' : '';
ret += col.autoincrement ? ' auto_increment' : '';
ret += ' ';
ret += col.nullable ? 'null' : 'not null';
ret += col.default ? ' default ' + col.default : '';
ret += col.comment ? ` comment ${this.platform.quoteValue(col.comment)}` : '';
return ret;
}
async getAllEnumDefinitions(connection, tables) {
const sql = `select column_name as column_name, column_type as column_type, table_name as table_name
from information_schema.columns
where data_type = 'enum' and table_name in (${tables.map(t => `'${t.table_name}'`).join(', ')}) and table_schema = database()`;
const enums = await connection.execute(sql);
return enums.reduce((o, item) => {
o[item.table_name] ??= {};
o[item.table_name][item.column_name] = item.column_type
.match(/enum\((.*)\)/)[1]
.split(',')
.map(item => /'(.*)'/.exec(item)[1]);
return o;
}, {});
}
async supportsCheckConstraints(connection) {
if (this.#cache.supportsCheckConstraints != null) {
return this.#cache.supportsCheckConstraints;
}
const sql = `select 1 from information_schema.tables where table_name = 'CHECK_CONSTRAINTS' and table_schema = 'information_schema'`;
const res = await connection.execute(sql);
return (this.#cache.supportsCheckConstraints = res.length > 0);
}
getChecksSQL(tables) {
return `select cc.constraint_schema as table_schema, tc.table_name as table_name, cc.constraint_name as name, cc.check_clause as expression
from information_schema.check_constraints cc
join information_schema.table_constraints tc
on tc.constraint_schema = cc.constraint_schema
and tc.constraint_name = cc.constraint_name
and constraint_type = 'CHECK'
where tc.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name))}) and tc.constraint_schema = database()
order by tc.constraint_name`;
}
normalizeDefaultValue(defaultValue, length) {
return super.normalizeDefaultValue(defaultValue, length, MySqlSchemaHelper.DEFAULT_VALUES);
}
wrap(val, type) {
const stringType = type instanceof StringType || type instanceof TextType || type instanceof EnumType;
return typeof val === 'string' && val.length > 0 && stringType ? this.platform.quoteValue(val) : val;
}
}

View File

@@ -0,0 +1,3 @@
export * from './MySqlSchemaHelper.js';
export * from './BaseMySqlPlatform.js';
export * from './MySqlNativeQueryBuilder.js';

3
node_modules/@mikro-orm/sql/dialects/mysql/index.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
export * from './MySqlSchemaHelper.js';
export * from './BaseMySqlPlatform.js';
export * from './MySqlNativeQueryBuilder.js';

View File

@@ -0,0 +1,107 @@
import {
type AliasNode,
CompiledQuery,
type DatabaseConnection,
type DatabaseIntrospector,
DefaultQueryCompiler,
type Dialect,
DialectAdapterBase,
type Driver,
type Kysely,
type QueryCompiler,
type QueryResult,
type TransactionSettings,
} from 'kysely';
/**
* Subset of oracledb's Pool interface used by the dialect.
* We define our own interface to avoid importing the `oracledb` package directly.
*/
export interface OraclePool {
getConnection(): Promise<OraclePoolConnection>;
close(drainTime?: number): Promise<void>;
}
/**
* Subset of oracledb's Connection interface used by the dialect.
*/
export interface OraclePoolConnection {
execute<R>(
sql: string,
params: unknown[],
options?: Record<string, unknown>,
): Promise<{
rows?: R[];
rowsAffected?: number;
resultSet?: OracleResultSet<R>;
outBinds?: unknown;
}>;
commit(): Promise<void>;
rollback(): Promise<void>;
close(): Promise<void>;
}
interface OracleResultSet<R> {
getRow(): Promise<R>;
close(): Promise<void>;
}
declare class OracleQueryCompiler extends DefaultQueryCompiler {
protected getLeftIdentifierWrapper(): string;
protected getRightIdentifierWrapper(): string;
protected visitAlias(node: AliasNode): void;
}
declare class OracleAdapter extends DialectAdapterBase {
#private;
get supportsReturning(): boolean;
get supportsTransactionalDdl(): boolean;
acquireMigrationLock(_: Kysely<any>): Promise<void>;
releaseMigrationLock(_: Kysely<any>): Promise<void>;
}
declare class OracleConnection implements DatabaseConnection {
#private;
readonly id: number;
constructor(connection: OraclePoolConnection, executeOptions?: Record<string, unknown>);
executeQuery<R>(compiledQuery: CompiledQuery): Promise<QueryResult<R>>;
formatQuery(query: CompiledQuery): {
sql: string;
bindParams: unknown[];
};
streamQuery<R>(compiledQuery: CompiledQuery, _chunkSize?: number): AsyncIterableIterator<QueryResult<R>>;
get connection(): OraclePoolConnection;
}
declare class OracleDriver implements Driver {
#private;
constructor(config: OracleDialectConfig);
init(): Promise<void>;
acquireConnection(): Promise<OracleConnection>;
savepoint(
connection: OracleConnection,
savepointName: string,
compileQuery: QueryCompiler['compileQuery'],
): Promise<void>;
rollbackToSavepoint(
connection: OracleConnection,
savepointName: string,
compileQuery: QueryCompiler['compileQuery'],
): Promise<void>;
releaseSavepoint(
connection: OracleConnection,
savepointName: string,
compileQuery: QueryCompiler['compileQuery'],
): Promise<void>;
beginTransaction(connection: OracleConnection, settings: TransactionSettings): Promise<void>;
commitTransaction(connection: OracleConnection): Promise<void>;
rollbackTransaction(connection: OracleConnection): Promise<void>;
releaseConnection(connection: OracleConnection): Promise<void>;
destroy(): Promise<void>;
}
export interface OracleDialectConfig {
pool: OraclePool;
executeOptions?: Record<string, unknown>;
}
export declare class OracleDialect implements Dialect {
#private;
constructor(config: OracleDialectConfig);
createDriver(): OracleDriver;
createAdapter(): OracleAdapter;
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
createQueryCompiler(): OracleQueryCompiler;
}
export {};

View File

@@ -0,0 +1,172 @@
// inlined https://github.com/griffiths-waite/kysely-oracledb with minor adjustments
/* v8 ignore start: internal Kysely driver integration, tested through the main Oracle driver */
import {
CompiledQuery,
createQueryId,
DefaultQueryCompiler,
DialectAdapterBase,
IdentifierNode,
RawNode,
} from 'kysely';
function parseSavepointCommand(command, savepointName) {
return RawNode.createWithChildren([
RawNode.createWithSql(`${command} `),
IdentifierNode.create(savepointName), // ensures savepointName gets sanitized
]);
}
class OracleQueryCompiler extends DefaultQueryCompiler {
getLeftIdentifierWrapper() {
return '';
}
getRightIdentifierWrapper() {
return '';
}
visitAlias(node) {
this.visitNode(node.node);
this.append(' ');
this.visitNode(node.alias);
}
}
class OracleAdapter extends DialectAdapterBase {
#supportsReturning = false;
#supportsTransactionalDdl = false;
get supportsReturning() {
return this.#supportsReturning;
}
get supportsTransactionalDdl() {
return this.#supportsTransactionalDdl;
}
async acquireMigrationLock(_) {
throw new Error('Not implemented');
}
async releaseMigrationLock(_) {
throw new Error('Not implemented');
}
}
const OUT_FORMAT_OBJECT = 4002;
let i = 0;
class OracleConnection {
id = i++;
#executeOptions;
#connection;
constructor(connection, executeOptions) {
this.#executeOptions = executeOptions ?? {};
this.#connection = connection;
}
async executeQuery(compiledQuery) {
const { sql, bindParams } = this.formatQuery(compiledQuery);
const result = await this.#connection.execute(sql, bindParams, {
autoCommit: compiledQuery.autoCommit,
outFormat: OUT_FORMAT_OBJECT,
...this.#executeOptions,
});
return {
rows: result?.rows || [],
numAffectedRows: result.rowsAffected ? BigInt(result.rowsAffected) : undefined,
// @ts-ignore internal extension for Oracle returning clause
outBinds: result.outBinds,
};
}
formatQuery(query) {
return {
sql: query.sql.replace(/\$(\d+)/g, (_match, p1) => `:${parseInt(p1, 10) - 1}`), // Format bind params in Oracle syntax :0, :1, etc.
bindParams: query.parameters,
};
}
async *streamQuery(compiledQuery, _chunkSize) {
const { sql, bindParams } = this.formatQuery(compiledQuery);
const result = await this.#connection.execute(sql, bindParams, {
resultSet: true,
autoCommit: compiledQuery.autoCommit,
outFormat: OUT_FORMAT_OBJECT,
...this.#executeOptions,
});
const rs = result.resultSet;
try {
let row;
while ((row = await rs.getRow())) {
yield { rows: [row] };
}
} finally {
await rs.close();
}
}
get connection() {
return this.#connection;
}
}
class OracleDriver {
#config;
#connections = new Set();
constructor(config) {
this.#config = config;
}
async init() {
//
}
async acquireConnection() {
const connection = new OracleConnection(await this.#config.pool.getConnection(), this.#config.executeOptions);
this.#connections.add(connection);
return connection;
}
async savepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('savepoint', savepointName), createQueryId()));
}
async rollbackToSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(
compileQuery(parseSavepointCommand('rollback to savepoint', savepointName), createQueryId()),
);
}
async releaseSavepoint(connection, savepointName, compileQuery) {
//
}
async beginTransaction(connection, settings) {
if (settings.accessMode) {
await connection.executeQuery(CompiledQuery.raw(`set transaction ${settings.accessMode}`));
return;
}
if (settings.isolationLevel) {
await connection.executeQuery(CompiledQuery.raw(`set transaction isolation level ${settings.isolationLevel}`));
}
}
async commitTransaction(connection) {
await connection.connection.commit();
}
async rollbackTransaction(connection) {
await connection.connection.rollback();
}
async releaseConnection(connection) {
try {
await connection.connection.close();
} catch (err) {
//
} finally {
this.#connections.delete(connection);
}
}
async destroy() {
for (const connection of this.#connections) {
await this.releaseConnection(connection);
}
await this.#config.pool?.close(0);
}
}
export class OracleDialect {
#config;
constructor(config) {
this.#config = config;
}
createDriver() {
return new OracleDriver(this.#config);
}
createAdapter() {
return new OracleAdapter();
}
createIntrospector(db) {
throw new Error('Not implemented');
}
createQueryCompiler() {
return new OracleQueryCompiler();
}
}
/* v8 ignore stop */

View File

@@ -0,0 +1,19 @@
import { type Dictionary } from '@mikro-orm/core';
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export declare function markOutBindings(obj: Dictionary): void;
/** @internal */
export declare class OracleNativeQueryBuilder extends NativeQueryBuilder {
as(alias: string): this;
compile(): {
sql: string;
params: unknown[];
};
protected compileTruncate(): void;
protected combineParts(): {
sql: string;
params: unknown[];
};
private compileUpsert;
protected compileSelect(): void;
}

View File

@@ -0,0 +1,245 @@
import { raw, RawQueryFragment, Utils } from '@mikro-orm/core';
import { QueryType } from '../../query/enums.js';
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export function markOutBindings(obj) {
Object.defineProperty(obj, '__outBindings', {
value: true,
writable: true,
configurable: true,
enumerable: false,
});
}
/** @internal */
export class OracleNativeQueryBuilder extends NativeQueryBuilder {
as(alias) {
this.wrap('(', `) ${this.platform.quoteIdentifier(alias)}`);
return this;
}
compile() {
if (!this.type) {
throw new Error('No query type provided');
}
this.parts.length = 0;
this.params.length = 0;
/* v8 ignore next 3: query comment branch */
if (this.options.comment) {
this.parts.push(...this.options.comment.map(comment => `/* ${comment} */`));
}
let copy;
if (this.options.onConflict && !Utils.isEmpty(Utils.asArray(this.options.data)[0])) {
this.compileUpsert();
} else {
if (this.options.returning && Array.isArray(this.options.data) && this.options.data.length > 1) {
copy = [...this.options.data];
this.options.data.length = 1;
}
switch (this.type) {
case QueryType.SELECT:
case QueryType.COUNT:
this.compileSelect();
break;
case QueryType.INSERT:
this.compileInsert();
break;
case QueryType.UPDATE:
this.compileUpdate();
break;
case QueryType.DELETE:
this.compileDelete();
break;
case QueryType.TRUNCATE:
this.compileTruncate();
break;
}
this.addOnConflictClause();
}
if (this.options.returning) {
const isUpsert = this.options.onConflict && !Utils.isEmpty(Utils.asArray(this.options.data)[0]);
const prefix = isUpsert ? `${this.getTableName()}.` : '';
const fields = this.options.returning.map(field => prefix + this.quote(Array.isArray(field) ? field[0] : field));
const into = this.options.returning.map(field => ':out_' + (Array.isArray(field) ? field[0] : field));
const outBindings = this.options.returning.map(field => {
const name = 'out_' + (Array.isArray(field) ? field[0] : field);
const type = Array.isArray(field) ? field[1] : 'string';
return [name, type];
});
markOutBindings(outBindings);
this.parts.push(`returning ${fields.join(', ')}`);
this.parts.push(`into ${into.join(', ')}`);
this.params.push(outBindings);
}
this.addLockClause();
if (!copy) {
return this.combineParts();
}
// multi insert with returning
const sql = this.parts.join(' ');
const blockLines = [];
const block2Lines = [];
const keys = Object.keys(copy[0]);
const last = this.params[this.params.length - 1];
/* v8 ignore next 3: defensive check — output bindings are always set by compile() */
if (!Array.isArray(last) || !('__outBindings' in last) || !last.__outBindings) {
throw new Error('Output bindings are required for multi insert with returning');
}
const outBindings = {};
markOutBindings(outBindings);
for (let i = 0; i < copy.length; i++) {
const params = [];
for (const key of keys) {
/* v8 ignore next 3: undefined value branch in multi-insert */
if (typeof copy[i][key] === 'undefined') {
params.push(this.platform.usesDefaultKeyword() ? raw('default') : null);
} else {
params.push(copy[i][key]);
}
}
// we need to interpolate to allow proper escaping
const formatted = this.platform.formatQuery(sql, params).replaceAll(`'`, `''`);
/* v8 ignore next 3: returning field type branches */
const using = this.options.returning.map(field => {
const name = Array.isArray(field) ? field[0] : field;
const type = Array.isArray(field) ? field[1] : 'string';
outBindings[`out_${name}__${i}`] = {
dir: this.platform.mapToBindType('out'),
type: this.platform.mapToBindType(type),
};
return `out :out_${name}__${i}`;
});
blockLines.push(` execute immediate '${formatted}' using ${using.join(', ')};`);
block2Lines.push(` execute immediate '${sql}' using ${using.join(', ')};`);
}
const block = `begin\n${blockLines.join('\n')}\n end;`;
const block2 = `begin\n${block2Lines.join('\n')}\n end;`;
// save raw query without interpolation for logging,
Object.defineProperty(outBindings, '__rawQuery', {
value: block2,
writable: true,
configurable: true,
enumerable: false,
});
this.options.data = copy;
return { sql: block, params: [outBindings] };
}
compileTruncate() {
super.compileTruncate();
this.parts.push('drop all storage cascade');
}
combineParts() {
let sql = this.parts.join(' ');
const last = this.params[this.params.length - 1];
if (this.options.wrap) {
const [a, b] = this.options.wrap;
sql = `${a}${sql}${b}`;
}
if (!(Array.isArray(last) && '__outBindings' in last && last.__outBindings)) {
return { sql, params: this.params };
}
const out = this.params.pop();
const outBindings = {};
markOutBindings(outBindings);
this.params.push(outBindings);
for (const item of out) {
outBindings[item[0]] = {
dir: this.platform.mapToBindType('out'),
type: this.platform.mapToBindType(item[1]),
};
}
return { sql, params: this.params };
}
compileUpsert() {
const clause = this.options.onConflict;
const dataAsArray = Utils.asArray(this.options.data);
const keys = Object.keys(dataAsArray[0]);
const parts = [];
for (const data of dataAsArray) {
for (const key of keys) {
this.params.push(data[key]);
}
parts.push(`select ${keys.map(k => `? as ${this.quote(k)}`).join(', ')} from dual`);
}
this.parts.push(`merge into ${this.getTableName()}`);
this.parts.push(`using (${parts.join(' union all ')}) tsource`);
/* v8 ignore next 4: RawQueryFragment conflict fields branch */
if (clause.fields instanceof RawQueryFragment) {
this.parts.push(clause.fields.sql);
this.params.push(...clause.fields.params);
} else if (clause.fields.length > 0) {
const fields = clause.fields.map(field => {
const col = this.quote(field);
return `${this.getTableName()}.${col} = tsource.${col}`;
});
this.parts.push(`on (${fields.join(' and ')})`);
}
const sourceColumns = keys.map(field => `tsource.${this.quote(field)}`).join(', ');
const destinationColumns = keys.map(field => this.quote(field)).join(', ');
this.parts.push(`when not matched then insert (${destinationColumns}) values (${sourceColumns})`);
if (!clause.ignore) {
/* v8 ignore next: merge type branch */
if (!clause.merge || Array.isArray(clause.merge)) {
const mergeParts = (clause.merge || keys)
.filter(field => !Array.isArray(clause.fields) || !clause.fields.includes(field))
.filter(field => keys.includes(field)) // only reference columns present in the source data
.map(column => `${this.quote(column)} = tsource.${this.quote(column)}`);
/* v8 ignore next 10: empty mergeParts branch */
if (mergeParts.length > 0) {
this.parts.push('when matched');
if (clause.where) {
this.parts.push(`and ${clause.where.sql}`);
this.params.push(...clause.where.params);
}
this.parts.push('then update set');
this.parts.push(mergeParts.join(', '));
}
} /* v8 ignore start: object-form merge branch */ else if (typeof clause.merge === 'object') {
this.parts.push('when matched');
if (clause.where) {
this.parts.push(`and ${clause.where.sql}`);
this.params.push(...clause.where.params);
}
this.parts.push('then update set');
const parts = Object.entries(clause.merge).map(([key, value]) => {
this.params.push(value);
return `${this.getTableName()}.${this.quote(key)} = ?`;
});
this.parts.push(parts.join(', '));
}
/* v8 ignore stop */
}
}
compileSelect() {
this.parts.push('select');
this.addHintComment();
this.parts.push(`${this.getFields()} from ${this.getTableName()}`);
if (this.options.joins) {
for (const join of this.options.joins) {
this.parts.push(join.sql);
this.params.push(...join.params);
}
}
if (this.options.where?.sql.trim()) {
this.parts.push(`where ${this.options.where.sql}`);
this.params.push(...this.options.where.params);
}
if (this.options.groupBy) {
const fields = this.options.groupBy.map(field => this.quote(field));
this.parts.push(`group by ${fields.join(', ')}`);
}
if (this.options.having) {
this.parts.push(`having ${this.options.having.sql}`);
this.params.push(...this.options.having.params);
}
if (this.options.orderBy) {
this.parts.push(`order by ${this.options.orderBy}`);
}
if (this.options.offset != null) {
this.parts.push(`offset ? rows`);
this.params.push(this.options.offset);
}
if (this.options.limit != null) {
this.parts.push(`fetch next ? rows only`);
this.params.push(this.options.limit);
}
}
}

View File

@@ -0,0 +1,2 @@
export * from './OracleDialect.js';
export * from './OracleNativeQueryBuilder.js';

View File

@@ -0,0 +1,2 @@
export * from './OracleDialect.js';
export * from './OracleNativeQueryBuilder.js';

View File

@@ -0,0 +1,115 @@
import {
type EntityProperty,
type IsolationLevel,
RawQueryFragment,
type SimpleColumnMeta,
Type,
} from '@mikro-orm/core';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import type { IndexDef } from '../../typings.js';
import { PostgreSqlNativeQueryBuilder } from './PostgreSqlNativeQueryBuilder.js';
import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper.js';
import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter.js';
export declare class BasePostgreSqlPlatform extends AbstractSqlPlatform {
#private;
protected readonly schemaHelper: PostgreSqlSchemaHelper;
protected readonly exceptionConverter: PostgreSqlExceptionConverter;
createNativeQueryBuilder(): PostgreSqlNativeQueryBuilder;
usesReturningStatement(): boolean;
usesCascadeStatement(): boolean;
supportsNativeEnums(): boolean;
usesEnumCheckConstraints(): boolean;
supportsMaterializedViews(): boolean;
supportsCustomPrimaryKeyNames(): boolean;
getCurrentTimestampSQL(length: number): string;
getDateTimeTypeDeclarationSQL(column: { length?: number }): string;
getDefaultDateTimeLength(): number;
getTimeTypeDeclarationSQL(): string;
getIntegerTypeDeclarationSQL(column: { length?: number; autoincrement?: boolean; generated?: string }): string;
getBigIntTypeDeclarationSQL(column: { autoincrement?: boolean }): string;
getTinyIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
getUuidTypeDeclarationSQL(column: { length?: number }): string;
getFullTextWhereClause(prop: EntityProperty): string;
supportsCreatingFullTextIndex(): boolean;
getFullTextIndexExpression(
indexName: string,
schemaName: string | undefined,
tableName: string,
columns: SimpleColumnMeta[],
): string;
normalizeColumnType(
type: string,
options: {
length?: number;
precision?: number;
scale?: number;
autoincrement?: boolean;
},
): string;
getMappedType(type: string): Type<unknown>;
getRegExpOperator(val?: unknown, flags?: string): string;
getRegExpValue(val: RegExp): {
$re: string;
$flags?: string;
};
isBigIntProperty(prop: EntityProperty): boolean;
getArrayDeclarationSQL(): string;
getFloatDeclarationSQL(): string;
getDoubleDeclarationSQL(): string;
getEnumTypeDeclarationSQL(column: { fieldNames: string[]; items?: unknown[]; nativeEnumName?: string }): string;
supportsMultipleStatements(): boolean;
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
marshallArray(values: string[]): string;
unmarshallArray(value: string): string[];
getVarcharTypeDeclarationSQL(column: { length?: number }): string;
getCharTypeDeclarationSQL(column: { length?: number }): string;
getIntervalTypeDeclarationSQL(column: { length?: number }): string;
getBlobDeclarationSQL(): string;
getJsonDeclarationSQL(): string;
getSearchJsonPropertyKey(
path: string[],
type: string | undefined | Type,
aliased: boolean,
value?: unknown,
): string | RawQueryFragment;
getJsonIndexDefinition(index: IndexDef): string[];
quoteIdentifier(
id:
| string
| {
toString: () => string;
},
quote?: string,
): string;
private pad;
/** @internal */
formatDate(date: Date): string;
indexForeignKeys(): boolean;
getDefaultMappedType(type: string): Type<unknown>;
supportsSchemas(): boolean;
getDefaultSchemaName(): string | undefined;
/**
* Returns the default name of index for the given columns
* cannot go past 63 character length for identifiers in MySQL
*/
getIndexName(
tableName: string,
columns: string[],
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
): string;
getDefaultPrimaryName(tableName: string, columns: string[]): string;
/**
* @inheritDoc
*/
castColumn(prop?: { columnTypes?: string[] }): string;
getJsonArrayFromSQL(
column: string,
alias: string,
_properties: {
name: string;
type: string;
}[],
): string;
getJsonArrayElementPropertySQL(alias: string, property: string, type: string): string;
getDefaultClientUrl(): string;
}

View File

@@ -0,0 +1,363 @@
import { ALIAS_REPLACEMENT, ARRAY_OPERATORS, raw, RawQueryFragment, Type, Utils } from '@mikro-orm/core';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import { PostgreSqlNativeQueryBuilder } from './PostgreSqlNativeQueryBuilder.js';
import { PostgreSqlSchemaHelper } from './PostgreSqlSchemaHelper.js';
import { PostgreSqlExceptionConverter } from './PostgreSqlExceptionConverter.js';
import { FullTextType } from './FullTextType.js';
export class BasePostgreSqlPlatform extends AbstractSqlPlatform {
schemaHelper = new PostgreSqlSchemaHelper(this);
exceptionConverter = new PostgreSqlExceptionConverter();
/** Maps JS runtime type names to PostgreSQL cast types for JSON property access. @internal */
#jsonTypeCasts = { number: 'float8', bigint: 'int8', boolean: 'bool' };
createNativeQueryBuilder() {
return new PostgreSqlNativeQueryBuilder(this);
}
usesReturningStatement() {
return true;
}
usesCascadeStatement() {
return true;
}
supportsNativeEnums() {
return true;
}
usesEnumCheckConstraints() {
return true;
}
supportsMaterializedViews() {
return true;
}
supportsCustomPrimaryKeyNames() {
return true;
}
getCurrentTimestampSQL(length) {
return `current_timestamp(${length})`;
}
getDateTimeTypeDeclarationSQL(column) {
/* v8 ignore next */
return 'timestamptz' + (column.length != null ? `(${column.length})` : '');
}
getDefaultDateTimeLength() {
return 6;
}
getTimeTypeDeclarationSQL() {
return 'time(0)';
}
getIntegerTypeDeclarationSQL(column) {
if (column.autoincrement && !column.generated) {
return 'serial';
}
return 'int';
}
getBigIntTypeDeclarationSQL(column) {
/* v8 ignore next */
if (column.autoincrement) {
return `bigserial`;
}
return 'bigint';
}
getTinyIntTypeDeclarationSQL(column) {
return 'smallint';
}
getUuidTypeDeclarationSQL(column) {
return `uuid`;
}
getFullTextWhereClause(prop) {
if (prop.customType instanceof FullTextType) {
return `:column: @@ plainto_tsquery('${prop.customType.regconfig}', :query)`;
}
/* v8 ignore next */
if (prop.columnTypes[0] === 'tsvector') {
return `:column: @@ plainto_tsquery('simple', :query)`;
}
return `to_tsvector('simple', :column:) @@ plainto_tsquery('simple', :query)`;
}
supportsCreatingFullTextIndex() {
return true;
}
getFullTextIndexExpression(indexName, schemaName, tableName, columns) {
/* v8 ignore next */
const quotedTableName = this.quoteIdentifier(schemaName ? `${schemaName}.${tableName}` : tableName);
const quotedColumnNames = columns.map(c => this.quoteIdentifier(c.name));
const quotedIndexName = this.quoteIdentifier(indexName);
if (columns.length === 1 && columns[0].type === 'tsvector') {
return `create index ${quotedIndexName} on ${quotedTableName} using gin(${quotedColumnNames[0]})`;
}
return `create index ${quotedIndexName} on ${quotedTableName} using gin(to_tsvector('simple', ${quotedColumnNames.join(` || ' ' || `)}))`;
}
normalizeColumnType(type, options) {
const simpleType = this.extractSimpleType(type);
if (['int', 'int4', 'integer'].includes(simpleType)) {
return this.getIntegerTypeDeclarationSQL({});
}
if (['bigint', 'int8'].includes(simpleType)) {
return this.getBigIntTypeDeclarationSQL({});
}
if (['smallint', 'int2'].includes(simpleType)) {
return this.getSmallIntTypeDeclarationSQL({});
}
if (['boolean', 'bool'].includes(simpleType)) {
return this.getBooleanTypeDeclarationSQL();
}
if (['varchar', 'character varying'].includes(simpleType)) {
return this.getVarcharTypeDeclarationSQL(options);
}
if (['char', 'bpchar'].includes(simpleType)) {
return this.getCharTypeDeclarationSQL(options);
}
if (['decimal', 'numeric'].includes(simpleType)) {
return this.getDecimalTypeDeclarationSQL(options);
}
if (['interval'].includes(simpleType)) {
return this.getIntervalTypeDeclarationSQL(options);
}
return super.normalizeColumnType(type, options);
}
getMappedType(type) {
switch (this.extractSimpleType(type)) {
case 'tsvector':
return Type.getType(FullTextType);
default:
return super.getMappedType(type);
}
}
getRegExpOperator(val, flags) {
/* v8 ignore next */
if ((val instanceof RegExp && val.flags.includes('i')) || flags?.includes('i')) {
return '~*';
}
return '~';
}
/* v8 ignore next */
getRegExpValue(val) {
if (val.flags.includes('i')) {
return { $re: val.source, $flags: val.flags };
}
return { $re: val.source };
}
isBigIntProperty(prop) {
return super.isBigIntProperty(prop) || ['bigserial', 'int8'].includes(prop.columnTypes?.[0]);
}
getArrayDeclarationSQL() {
return 'text[]';
}
getFloatDeclarationSQL() {
return 'real';
}
getDoubleDeclarationSQL() {
return 'double precision';
}
getEnumTypeDeclarationSQL(column) {
/* v8 ignore next */
if (column.nativeEnumName) {
return column.nativeEnumName;
}
if (column.items?.every(item => typeof item === 'string')) {
return 'text';
}
return `smallint`;
}
supportsMultipleStatements() {
return true;
}
getBeginTransactionSQL(options) {
if (options?.isolationLevel || options?.readOnly) {
let sql = 'start transaction';
sql += options.isolationLevel ? ` isolation level ${options.isolationLevel}` : '';
sql += options.readOnly ? ` read only` : '';
return [sql];
}
return ['begin'];
}
marshallArray(values) {
const quote = v => (v === '' || /["{},\\]/.exec(v) ? JSON.stringify(v) : v);
return `{${values.map(v => quote('' + v)).join(',')}}`;
}
/* v8 ignore next */
unmarshallArray(value) {
if (value === '{}') {
return [];
}
return value
.substring(1, value.length - 1)
.split(',')
.map(v => {
if (v === `""`) {
return '';
}
if (/"(.*)"/.exec(v)) {
return v.substring(1, v.length - 1).replaceAll('\\"', '"');
}
return v;
});
}
getVarcharTypeDeclarationSQL(column) {
if (column.length === -1) {
return 'varchar';
}
return super.getVarcharTypeDeclarationSQL(column);
}
getCharTypeDeclarationSQL(column) {
if (column.length === -1) {
return 'char';
}
return super.getCharTypeDeclarationSQL(column);
}
getIntervalTypeDeclarationSQL(column) {
return 'interval' + (column.length != null ? `(${column.length})` : '');
}
getBlobDeclarationSQL() {
return 'bytea';
}
getJsonDeclarationSQL() {
return 'jsonb';
}
getSearchJsonPropertyKey(path, type, aliased, value) {
const first = path.shift();
const last = path.pop();
const root = this.quoteIdentifier(aliased ? `${ALIAS_REPLACEMENT}.${first}` : first);
type = typeof type === 'string' ? this.getMappedType(type).runtimeType : String(type);
const cast = key => raw(type in this.#jsonTypeCasts ? `(${key})::${this.#jsonTypeCasts[type]}` : key);
let lastOperator = '->>';
// force `->` for operator payloads with array values
if (
Utils.isPlainObject(value) &&
Object.keys(value).every(key => ARRAY_OPERATORS.includes(key) && Array.isArray(value[key]))
) {
lastOperator = '->';
}
if (path.length === 0) {
return cast(`${root}${lastOperator}'${last}'`);
}
return cast(`${root}->${path.map(a => this.quoteValue(a)).join('->')}${lastOperator}'${last}'`);
}
getJsonIndexDefinition(index) {
return index.columnNames.map(column => {
if (!column.includes('.')) {
return column;
}
const path = column.split('.');
const first = path.shift();
const last = path.pop();
if (path.length === 0) {
return `(${this.quoteIdentifier(first)}->>${this.quoteValue(last)})`;
}
return `(${this.quoteIdentifier(first)}->${path.map(c => this.quoteValue(c)).join('->')}->>${this.quoteValue(last)})`;
});
}
quoteIdentifier(id, quote = '"') {
if (RawQueryFragment.isKnownFragment(id)) {
return super.quoteIdentifier(id);
}
return `${quote}${id.toString().replace('.', `${quote}.${quote}`)}${quote}`;
}
pad(number, digits) {
return String(number).padStart(digits, '0');
}
/** @internal */
formatDate(date) {
if (this.timezone === 'Z') {
return date.toISOString();
}
let offset = -date.getTimezoneOffset();
let year = date.getFullYear();
const isBCYear = year < 1;
/* v8 ignore next */
if (isBCYear) {
year = Math.abs(year) + 1;
}
const datePart = `${this.pad(year, 4)}-${this.pad(date.getMonth() + 1, 2)}-${this.pad(date.getDate(), 2)}`;
const timePart = `${this.pad(date.getHours(), 2)}:${this.pad(date.getMinutes(), 2)}:${this.pad(date.getSeconds(), 2)}.${this.pad(date.getMilliseconds(), 3)}`;
let ret = `${datePart}T${timePart}`;
/* v8 ignore next */
if (offset < 0) {
ret += '-';
offset *= -1;
} else {
ret += '+';
}
ret += this.pad(Math.floor(offset / 60), 2) + ':' + this.pad(offset % 60, 2);
/* v8 ignore next */
if (isBCYear) {
ret += ' BC';
}
return ret;
}
indexForeignKeys() {
return false;
}
getDefaultMappedType(type) {
const normalizedType = this.extractSimpleType(type);
const map = {
int2: 'smallint',
smallserial: 'smallint',
int: 'integer',
int4: 'integer',
serial: 'integer',
serial4: 'integer',
int8: 'bigint',
bigserial: 'bigint',
serial8: 'bigint',
numeric: 'decimal',
bool: 'boolean',
real: 'float',
float4: 'float',
float8: 'double',
timestamp: 'datetime',
timestamptz: 'datetime',
bytea: 'blob',
jsonb: 'json',
'character varying': 'varchar',
bpchar: 'character',
};
return super.getDefaultMappedType(map[normalizedType] ?? type);
}
supportsSchemas() {
return true;
}
getDefaultSchemaName() {
return 'public';
}
/**
* Returns the default name of index for the given columns
* cannot go past 63 character length for identifiers in MySQL
*/
getIndexName(tableName, columns, type) {
const indexName = super.getIndexName(tableName, columns, type);
if (indexName.length > 63) {
const suffix = type === 'primary' ? 'pkey' : type;
return `${indexName.substring(0, 55 - type.length)}_${Utils.hash(indexName, 5)}_${suffix}`;
}
return indexName;
}
getDefaultPrimaryName(tableName, columns) {
const indexName = `${tableName}_pkey`;
if (indexName.length > 63) {
return `${indexName.substring(0, 55 - 'pkey'.length)}_${Utils.hash(indexName, 5)}_pkey`;
}
return indexName;
}
/**
* @inheritDoc
*/
castColumn(prop) {
switch (prop?.columnTypes?.[0]) {
case this.getUuidTypeDeclarationSQL({}):
return '::text';
case this.getBooleanTypeDeclarationSQL():
return '::int';
default:
return '';
}
}
getJsonArrayFromSQL(column, alias, _properties) {
return `jsonb_array_elements(${column}) as ${this.quoteIdentifier(alias)}`;
}
getJsonArrayElementPropertySQL(alias, property, type) {
const expr = `${this.quoteIdentifier(alias)}->>${this.quoteValue(property)}`;
return type in this.#jsonTypeCasts ? `(${expr})::${this.#jsonTypeCasts[type]}` : expr;
}
getDefaultClientUrl() {
return 'postgresql://postgres@127.0.0.1:5432';
}
}

View File

@@ -0,0 +1,18 @@
import { Type, type TransformContext, type RawQueryFragment } from '@mikro-orm/core';
import type { BasePostgreSqlPlatform } from './BasePostgreSqlPlatform.js';
type FullTextWeight = 'A' | 'B' | 'C' | 'D';
export type WeightedFullTextValue = {
[K in FullTextWeight]?: string | null;
};
export declare class FullTextType extends Type<string | WeightedFullTextValue, string | null | RawQueryFragment> {
regconfig: string;
constructor(regconfig?: string);
compareAsType(): string;
getColumnType(): string;
convertToDatabaseValue(
value: string | WeightedFullTextValue,
platform: BasePostgreSqlPlatform,
context?: TransformContext | boolean,
): string | null | RawQueryFragment;
}
export {};

View File

@@ -0,0 +1,59 @@
import { raw, Type } from '@mikro-orm/core';
export class FullTextType extends Type {
regconfig;
constructor(regconfig = 'simple') {
super();
this.regconfig = regconfig;
}
compareAsType() {
return 'any';
}
getColumnType() {
return 'tsvector';
}
// Use convertToDatabaseValue to prepare insert queries as this method has
// access to the raw JS value. Return Knex#raw to prevent QueryBuilderHelper#mapData
// from sanitizing the returned chaing of SQL functions.
convertToDatabaseValue(value, platform, context) {
// Don't convert to values from select queries to the to_tsvector notation
// these should be compared as string using a special oparator or function
// this behaviour is defined in Platform#getFullTextWhereClause.
// This is always a string.
if (typeof context === 'object' && context.fromQuery) {
return value;
}
// Null values should not be processed
if (!value) {
return null;
}
// the object from that looks like { A: 'test data', B: 'test data2' ... }
// must be converted to
// setweight(to_tsvector(regconfig, value), A) || setweight(to_tsvector(regconfig, value), B)... etc
// use Knex#raw to do binding of the values sanitization of the boundvalues
// as we return a raw string which should not be sanitzed anymore
if (typeof value === 'object') {
const bindings = [];
const sqlParts = [];
for (const [weight, data] of Object.entries(value)) {
// Check whether the weight is valid according to Postgres,
// Postgres allows the weight to be upper and lowercase.
if (!['A', 'B', 'C', 'D'].includes(weight.toUpperCase())) {
throw new Error('Weight should be one of A, B, C, D.');
}
// Ignore all values that are not a string
if (typeof data === 'string') {
sqlParts.push('setweight(to_tsvector(?, ?), ?)');
bindings.push(this.regconfig, data, weight);
}
}
// Return null if the object has no valid strings
if (sqlParts.length === 0) {
return null;
}
// Join all the `setweight` parts using the PostgreSQL tsvector `||` concatenation operator
return raw(sqlParts.join(' || '), bindings);
}
// if it's not an object, it is expected to be string which does not have to be wrapped in setweight.
return raw('to_tsvector(?, ?)', [this.regconfig, value]);
}
}

View File

@@ -0,0 +1,8 @@
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
export declare class PostgreSqlExceptionConverter extends ExceptionConverter {
/**
* @see http://www.postgresql.org/docs/9.4/static/errcodes-appendix.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractPostgreSQLDriver.php
*/
convertException(exception: Error & Dictionary): DriverException;
}

View File

@@ -0,0 +1,59 @@
import {
DeadlockException,
ExceptionConverter,
ForeignKeyConstraintViolationException,
InvalidFieldNameException,
NonUniqueFieldNameException,
NotNullConstraintViolationException,
SyntaxErrorException,
TableExistsException,
TableNotFoundException,
UniqueConstraintViolationException,
CheckConstraintViolationException,
} from '@mikro-orm/core';
export class PostgreSqlExceptionConverter extends ExceptionConverter {
/**
* @see http://www.postgresql.org/docs/9.4/static/errcodes-appendix.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractPostgreSQLDriver.php
*/
convertException(exception) {
if (exception.detail?.toString().trim()) {
exception.message += '\n - detail: ' + exception.detail;
}
if (exception.hint?.toString().trim()) {
exception.message += '\n - hint: ' + exception.hint;
}
/* v8 ignore next */
switch (exception.code) {
case '40001':
case '40P01':
return new DeadlockException(exception);
case '0A000':
// Foreign key constraint violations during a TRUNCATE operation
// are considered "feature not supported" in PostgreSQL.
if (exception.message.includes('truncate')) {
return new ForeignKeyConstraintViolationException(exception);
}
break;
case '23502':
return new NotNullConstraintViolationException(exception);
case '23503':
return new ForeignKeyConstraintViolationException(exception);
case '23505':
return new UniqueConstraintViolationException(exception);
case '23514':
return new CheckConstraintViolationException(exception);
case '42601':
return new SyntaxErrorException(exception);
case '42702':
return new NonUniqueFieldNameException(exception);
case '42703':
return new InvalidFieldNameException(exception);
case '42P01':
return new TableNotFoundException(exception);
case '42P07':
return new TableExistsException(exception);
}
return super.convertException(exception);
}
}

View File

@@ -0,0 +1,5 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export declare class PostgreSqlNativeQueryBuilder extends NativeQueryBuilder {
protected compileTruncate(): void;
}

View File

@@ -0,0 +1,8 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export class PostgreSqlNativeQueryBuilder extends NativeQueryBuilder {
compileTruncate() {
super.compileTruncate();
this.parts.push('restart identity cascade');
}
}

View File

@@ -0,0 +1,110 @@
import { type Dictionary } from '@mikro-orm/core';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../../typings.js';
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
export declare class PostgreSqlSchemaHelper extends SchemaHelper {
static readonly DEFAULT_VALUES: {
'now()': string[];
'current_timestamp(?)': string[];
"('now'::text)::timestamp(?) with time zone": string[];
"('now'::text)::timestamp(?) without time zone": string[];
'null::character varying': string[];
'null::timestamp with time zone': string[];
'null::timestamp without time zone': string[];
};
getSchemaBeginning(charset: string, disableForeignKeys?: boolean): string;
getCreateDatabaseSQL(name: string): string;
getListTablesSQL(): string;
private getIgnoredViewsCondition;
getListViewsSQL(): string;
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection): Promise<void>;
getListMaterializedViewsSQL(): string;
loadMaterializedViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
createMaterializedView(name: string, schema: string | undefined, definition: string, withData?: boolean): string;
dropMaterializedViewIfExists(name: string, schema?: string): string;
refreshMaterializedView(name: string, schema?: string, concurrently?: boolean): string;
getNamespaces(connection: AbstractSqlConnection): Promise<string[]>;
private getIgnoredNamespacesConditionSQL;
loadInformationSchema(
schema: DatabaseSchema,
connection: AbstractSqlConnection,
tables: Table[],
schemas?: string[],
): Promise<void>;
getAllIndexes(connection: AbstractSqlConnection, tables: Table[]): Promise<Dictionary<IndexDef[]>>;
/**
* Parses column definitions from the full CREATE INDEX expression.
* Since pg_get_indexdef(oid, col_num, true) doesn't include sort modifiers,
* we extract them from the full expression instead.
*
* We use columnDefs (from individual pg_get_indexdef calls) as the source
* of column names, and find their modifiers in the expression.
*/
private parseIndexColumnsFromExpression;
/**
* Extracts the content inside parentheses starting at the given position.
* Handles nested parentheses correctly.
*/
private extractParenthesizedContent;
getAllColumns(
connection: AbstractSqlConnection,
tablesBySchemas: Map<string | undefined, Table[]>,
nativeEnums?: Dictionary<{
name: string;
schema?: string;
items: string[];
}>,
): Promise<Dictionary<Column[]>>;
getAllChecks(
connection: AbstractSqlConnection,
tablesBySchemas: Map<string | undefined, Table[]>,
): Promise<Dictionary<CheckDef[]>>;
getAllForeignKeys(
connection: AbstractSqlConnection,
tablesBySchemas: Map<string | undefined, Table[]>,
): Promise<Dictionary<Dictionary<ForeignKey>>>;
getNativeEnumDefinitions(
connection: AbstractSqlConnection,
schemas: string[],
): Promise<
Dictionary<{
name: string;
schema?: string;
items: string[];
}>
>;
getCreateNativeEnumSQL(name: string, values: unknown[], schema?: string): string;
getDropNativeEnumSQL(name: string, schema?: string): string;
getAlterNativeEnumSQL(name: string, schema?: string, value?: string, items?: string[], oldItems?: string[]): string;
private getEnumDefinitions;
createTableColumn(column: Column, table: DatabaseTable): string | undefined;
getPreAlterTable(tableDiff: TableDifference, safe: boolean): string[];
castColumn(name: string, type: string): string;
dropForeignKey(tableName: string, constraintName: string): string;
getPostAlterTable(tableDiff: TableDifference, safe: boolean): string[];
private getAlterColumnAutoincrement;
getChangeColumnCommentSQL(tableName: string, to: Column, schemaName?: string): string;
alterTableComment(table: DatabaseTable, comment?: string): string;
normalizeDefaultValue(defaultValue: string, length: number): string | number;
appendComments(table: DatabaseTable): string[];
getDatabaseExistsSQL(name: string): string;
getDatabaseNotExistsError(dbName: string): string;
getManagementDbName(): string;
disableForeignKeysSQL(): string;
enableForeignKeysSQL(): string;
getRenameIndexSQL(tableName: string, index: IndexDef, oldIndexName: string): string[];
dropIndex(table: string, index: IndexDef, oldIndexName?: string): string;
/**
* Build the column list for a PostgreSQL index.
*/
protected getIndexColumns(index: IndexDef): string;
/**
* PostgreSQL-specific index options like fill factor.
*/
protected getCreateIndexSuffix(index: IndexDef): string;
private getIndexesSQL;
private getChecksSQL;
inferLengthFromColumnType(type: string): number | undefined;
}

View File

@@ -0,0 +1,776 @@
import { DeferMode, EnumType, Type, Utils } from '@mikro-orm/core';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
/** PostGIS system views that should be automatically ignored */
const POSTGIS_VIEWS = ['geography_columns', 'geometry_columns'];
export class PostgreSqlSchemaHelper extends SchemaHelper {
static DEFAULT_VALUES = {
'now()': ['now()', 'current_timestamp'],
'current_timestamp(?)': ['current_timestamp(?)'],
"('now'::text)::timestamp(?) with time zone": ['current_timestamp(?)'],
"('now'::text)::timestamp(?) without time zone": ['current_timestamp(?)'],
'null::character varying': ['null'],
'null::timestamp with time zone': ['null'],
'null::timestamp without time zone': ['null'],
};
getSchemaBeginning(charset, disableForeignKeys) {
if (disableForeignKeys) {
return `set names '${charset}';\n${this.disableForeignKeysSQL()}\n\n`;
}
return `set names '${charset}';\n\n`;
}
getCreateDatabaseSQL(name) {
return `create database ${this.quote(name)}`;
}
getListTablesSQL() {
return (
`select table_name, table_schema as schema_name, ` +
`(select pg_catalog.obj_description(c.oid) from pg_catalog.pg_class c
where c.oid = (select ('"' || table_schema || '"."' || table_name || '"')::regclass::oid) and c.relname = table_name) as table_comment ` +
`from information_schema.tables ` +
`where ${this.getIgnoredNamespacesConditionSQL('table_schema')} ` +
`and table_name != 'geometry_columns' and table_name != 'spatial_ref_sys' and table_type != 'VIEW' ` +
`and table_name not in (select inhrelid::regclass::text from pg_inherits) ` +
`order by table_name`
);
}
getIgnoredViewsCondition() {
return POSTGIS_VIEWS.map(v => `table_name != '${v}'`).join(' and ');
}
getListViewsSQL() {
return (
`select table_name as view_name, table_schema as schema_name, view_definition ` +
`from information_schema.views ` +
`where ${this.getIgnoredNamespacesConditionSQL('table_schema')} ` +
`and ${this.getIgnoredViewsCondition()} ` +
`order by table_name`
);
}
async loadViews(schema, connection) {
const views = await connection.execute(this.getListViewsSQL());
for (const view of views) {
const definition = view.view_definition?.trim().replace(/;$/, '') ?? '';
if (definition) {
schema.addView(view.view_name, view.schema_name, definition);
}
}
}
getListMaterializedViewsSQL() {
return (
`select matviewname as view_name, schemaname as schema_name, definition as view_definition ` +
`from pg_matviews ` +
`where ${this.getIgnoredNamespacesConditionSQL('schemaname')} ` +
`order by matviewname`
);
}
async loadMaterializedViews(schema, connection, schemaName) {
const views = await connection.execute(this.getListMaterializedViewsSQL());
for (const view of views) {
const definition = view.view_definition?.trim().replace(/;$/, '') ?? '';
if (definition) {
schema.addView(view.view_name, view.schema_name, definition, true);
}
}
}
createMaterializedView(name, schema, definition, withData = true) {
const viewName = this.quote(this.getTableName(name, schema));
const dataClause = withData ? ' with data' : ' with no data';
return `create materialized view ${viewName} as ${definition}${dataClause}`;
}
dropMaterializedViewIfExists(name, schema) {
return `drop materialized view if exists ${this.quote(this.getTableName(name, schema))} cascade`;
}
refreshMaterializedView(name, schema, concurrently = false) {
const concurrent = concurrently ? ' concurrently' : '';
return `refresh materialized view${concurrent} ${this.quote(this.getTableName(name, schema))}`;
}
async getNamespaces(connection) {
const sql =
`select schema_name from information_schema.schemata ` +
`where ${this.getIgnoredNamespacesConditionSQL()} ` +
`order by schema_name`;
const res = await connection.execute(sql);
return res.map(row => row.schema_name);
}
getIgnoredNamespacesConditionSQL(column = 'schema_name') {
const ignored = [
'information_schema',
'tiger',
'topology',
/* v8 ignore next */
...(this.platform.getConfig().get('schemaGenerator').ignoreSchema ?? []),
]
.map(s => this.platform.quoteValue(s))
.join(', ');
const ignoredPrefixes = ['pg_', 'crdb_', '_timescaledb_'].map(p => `"${column}" not like '${p}%'`).join(' and ');
return `${ignoredPrefixes} and "${column}" not in (${ignored})`;
}
async loadInformationSchema(schema, connection, tables, schemas) {
schemas ??= tables.length === 0 ? [schema.name] : tables.map(t => t.schema_name);
const nativeEnums = await this.getNativeEnumDefinitions(connection, schemas);
schema.setNativeEnums(nativeEnums);
if (tables.length === 0) {
return;
}
const tablesBySchema = this.getTablesGroupedBySchemas(tables);
const columns = await this.getAllColumns(connection, tablesBySchema, nativeEnums);
const indexes = await this.getAllIndexes(connection, tables);
const checks = await this.getAllChecks(connection, tablesBySchema);
const fks = await this.getAllForeignKeys(connection, tablesBySchema);
for (const t of tables) {
const key = this.getTableKey(t);
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
const pks = await this.getPrimaryKeys(connection, indexes[key], table.name, table.schema);
const enums = this.getEnumDefinitions(checks[key] ?? []);
if (columns[key]) {
table.init(columns[key], indexes[key], checks[key], pks, fks[key], enums);
}
}
}
async getAllIndexes(connection, tables) {
const sql = this.getIndexesSQL(tables);
const unquote = str => str.replace(/['"`]/g, '');
const allIndexes = await connection.execute(sql);
const ret = {};
for (const index of allIndexes) {
const key = this.getTableKey(index);
// Extract INCLUDE columns from expression first, to filter them from key columns
const includeMatch = index.expression?.match(/include\s*\(([^)]+)\)/i);
const includeColumns = includeMatch ? includeMatch[1].split(',').map(col => unquote(col.trim())) : [];
// Filter out INCLUDE columns from the column definitions to get only key columns
const keyColumnDefs = index.index_def.filter(col => !includeColumns.includes(unquote(col)));
// Parse sort order and NULLS ordering from the full expression
// pg_get_indexdef individual columns don't include sort modifiers, so we parse from full expression
const columns = this.parseIndexColumnsFromExpression(index.expression, keyColumnDefs, unquote);
const columnNames = columns.map(col => col.name);
const hasAdvancedColumnOptions = columns.some(col => col.sort || col.nulls || col.collation);
const indexDef = {
columnNames,
composite: columnNames.length > 1,
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
constraint: index.contype === 'u',
keyName: index.constraint_name,
unique: index.unique,
primary: index.primary,
};
// Add columns array if there are advanced options
if (hasAdvancedColumnOptions) {
indexDef.columns = columns;
}
if (index.condeferrable) {
indexDef.deferMode = index.condeferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
}
if (index.index_def.some(col => /[(): ,"'`]/.exec(col)) || index.expression?.match(/ where /i)) {
indexDef.expression = index.expression;
}
if (index.deferrable) {
indexDef.deferMode = index.initially_deferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
}
// Extract fillFactor from reloptions
if (index.reloptions) {
const fillFactorMatch = index.reloptions.find(opt => opt.startsWith('fillfactor='));
if (fillFactorMatch) {
indexDef.fillFactor = parseInt(fillFactorMatch.split('=')[1], 10);
}
}
// Add INCLUDE columns (already extracted above)
if (includeColumns.length > 0) {
indexDef.include = includeColumns;
}
// Add index type if not btree (the default)
if (index.index_type && index.index_type !== 'btree') {
indexDef.type = index.index_type;
}
ret[key] ??= [];
ret[key].push(indexDef);
}
return ret;
}
/**
* Parses column definitions from the full CREATE INDEX expression.
* Since pg_get_indexdef(oid, col_num, true) doesn't include sort modifiers,
* we extract them from the full expression instead.
*
* We use columnDefs (from individual pg_get_indexdef calls) as the source
* of column names, and find their modifiers in the expression.
*/
parseIndexColumnsFromExpression(expression, columnDefs, unquote) {
// Extract just the column list from the expression (between first parens after USING)
// Pattern: ... USING method (...columns...) [INCLUDE (...)] [WHERE ...]
// Note: pg_get_indexdef always returns a valid expression with USING clause
const usingMatch = /using\s+\w+\s*\(/i.exec(expression);
const startIdx = usingMatch.index + usingMatch[0].length - 1; // Position of opening (
const columnsStr = this.extractParenthesizedContent(expression, startIdx);
// Use the column names from columnDefs and find their modifiers in the expression
return columnDefs.map(colDef => {
const name = unquote(colDef);
const result = { name };
// Find this column in the expression and extract modifiers
// Create a pattern that matches the column name (quoted or unquoted) followed by modifiers
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
const colPattern = new RegExp(`"?${escapedName}"?\\s*([^,)]*?)(?:,|$)`, 'i');
const colMatch = columnsStr.match(colPattern);
if (colMatch) {
const modifiers = colMatch[1];
// Extract sort order (PostgreSQL omits ASC in output as it's the default)
if (/\bdesc\b/i.test(modifiers)) {
result.sort = 'DESC';
}
// Extract NULLS ordering
const nullsMatch = /nulls\s+(first|last)/i.exec(modifiers);
if (nullsMatch) {
result.nulls = nullsMatch[1].toUpperCase();
}
// Extract collation
const collateMatch = /collate\s+"?([^"\s,)]+)"?/i.exec(modifiers);
if (collateMatch) {
result.collation = collateMatch[1];
}
}
return result;
});
}
/**
* Extracts the content inside parentheses starting at the given position.
* Handles nested parentheses correctly.
*/
extractParenthesizedContent(str, startIdx) {
let depth = 0;
const start = startIdx + 1;
for (let i = startIdx; i < str.length; i++) {
if (str[i] === '(') {
depth++;
} else if (str[i] === ')') {
depth--;
if (depth === 0) {
return str.slice(start, i);
}
}
}
/* v8 ignore next - pg_get_indexdef always returns balanced parentheses */
return '';
}
async getAllColumns(connection, tablesBySchemas, nativeEnums) {
const sql = `select table_schema as schema_name, table_name, column_name,
column_default,
is_nullable,
udt_name,
udt_schema,
coalesce(datetime_precision, character_maximum_length) length,
atttypmod custom_length,
numeric_precision,
numeric_scale,
data_type,
is_identity,
identity_generation,
generation_expression,
pg_catalog.col_description(pgc.oid, cols.ordinal_position::int) column_comment
from information_schema.columns cols
join pg_class pgc on cols.table_name = pgc.relname
join pg_attribute pga on pgc.oid = pga.attrelid and cols.column_name = pga.attname
where (${[...tablesBySchemas.entries()].map(([schema, tables]) => `(table_schema = ${this.platform.quoteValue(schema)} and table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}))`).join(' or ')})
order by ordinal_position`;
const allColumns = await connection.execute(sql);
const str = val => (val != null ? '' + val : val);
const ret = {};
for (const col of allColumns) {
const mappedType = connection.getPlatform().getMappedType(col.data_type);
const increments =
(col.column_default?.includes('nextval') || col.is_identity === 'YES') &&
connection.getPlatform().isNumericColumn(mappedType);
const key = this.getTableKey(col);
ret[key] ??= [];
let type = col.data_type.toLowerCase() === 'array' ? col.udt_name.replace(/^_(.*)$/, '$1[]') : col.udt_name;
if (
col.data_type === 'USER-DEFINED' &&
col.udt_schema &&
col.udt_schema !== this.platform.getDefaultSchemaName()
) {
type = `${col.udt_schema}.${type}`;
}
if (type === 'bpchar') {
type = 'char';
}
if (type === 'vector' && col.length == null && col.custom_length != null && col.custom_length !== -1) {
col.length = col.custom_length;
}
if (col.length != null && !type.endsWith(`(${col.length})`) && !['text', 'date'].includes(type)) {
type += `(${col.length})`;
}
if (type === 'numeric' && col.numeric_precision != null && col.numeric_scale != null) {
type += `(${col.numeric_precision},${col.numeric_scale})`;
}
const length = this.inferLengthFromColumnType(type) === -1 ? -1 : col.length;
const column = {
name: col.column_name,
type,
mappedType,
length,
precision: col.numeric_precision,
scale: col.numeric_scale,
nullable: col.is_nullable === 'YES',
default: str(this.normalizeDefaultValue(col.column_default, col.length)),
unsigned: increments,
autoincrement: increments,
generated:
col.is_identity === 'YES'
? col.identity_generation === 'BY DEFAULT'
? 'by default as identity'
: 'identity'
: col.generation_expression
? col.generation_expression + ' stored'
: undefined,
comment: col.column_comment,
};
if (nativeEnums?.[column.type]) {
column.mappedType = Type.getType(EnumType);
column.nativeEnumName = column.type;
column.enumItems = nativeEnums[column.type]?.items;
}
ret[key].push(column);
}
return ret;
}
async getAllChecks(connection, tablesBySchemas) {
const sql = this.getChecksSQL(tablesBySchemas);
const allChecks = await connection.execute(sql);
const ret = {};
const seen = new Set();
for (const check of allChecks) {
const key = this.getTableKey(check);
const dedupeKey = `${key}:${check.name}`;
if (seen.has(dedupeKey)) {
continue;
}
seen.add(dedupeKey);
ret[key] ??= [];
const m = /^check \(\((.*)\)\)$/is.exec(check.expression);
const def = m?.[1].replace(/\((.*?)\)::\w+/g, '$1');
ret[key].push({
name: check.name,
columnName: check.column_name,
definition: check.expression,
expression: def,
});
}
return ret;
}
async getAllForeignKeys(connection, tablesBySchemas) {
const sql = `select nsp1.nspname schema_name, cls1.relname table_name, nsp2.nspname referenced_schema_name,
cls2.relname referenced_table_name, a.attname column_name, af.attname referenced_column_name, conname constraint_name,
confupdtype update_rule, confdeltype delete_rule, array_position(con.conkey,a.attnum) as ord, condeferrable, condeferred,
pg_get_constraintdef(con.oid) as constraint_def
from pg_attribute a
join pg_constraint con on con.conrelid = a.attrelid AND a.attnum = ANY (con.conkey)
join pg_attribute af on af.attnum = con.confkey[array_position(con.conkey,a.attnum)] AND af.attrelid = con.confrelid
join pg_namespace nsp1 on nsp1.oid = con.connamespace
join pg_class cls1 on cls1.oid = con.conrelid
join pg_class cls2 on cls2.oid = confrelid
join pg_namespace nsp2 on nsp2.oid = cls2.relnamespace
where (${[...tablesBySchemas.entries()].map(([schema, tables]) => `(cls1.relname in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}) and nsp1.nspname = ${this.platform.quoteValue(schema)})`).join(' or ')})
and confrelid > 0
order by nsp1.nspname, cls1.relname, constraint_name, ord`;
const allFks = await connection.execute(sql);
const ret = {};
function mapReferentialIntegrity(value, def) {
const match = ['n', 'd'].includes(value) && /ON DELETE (SET (NULL|DEFAULT) \(.*?\))/.exec(def);
if (match) {
return match[1];
}
/* v8 ignore next */
switch (value) {
case 'r':
return 'RESTRICT';
case 'c':
return 'CASCADE';
case 'n':
return 'SET NULL';
case 'd':
return 'SET DEFAULT';
case 'a':
default:
return 'NO ACTION';
}
}
for (const fk of allFks) {
fk.update_rule = mapReferentialIntegrity(fk.update_rule, fk.constraint_def);
fk.delete_rule = mapReferentialIntegrity(fk.delete_rule, fk.constraint_def);
if (fk.condeferrable) {
fk.defer_mode = fk.condeferred ? DeferMode.INITIALLY_DEFERRED : DeferMode.INITIALLY_IMMEDIATE;
}
const key = this.getTableKey(fk);
ret[key] ??= [];
ret[key].push(fk);
}
Object.keys(ret).forEach(key => {
const [schemaName, tableName] = key.split('.');
ret[key] = this.mapForeignKeys(ret[key], tableName, schemaName);
});
return ret;
}
async getNativeEnumDefinitions(connection, schemas) {
const uniqueSchemas = Utils.unique(schemas);
const res = await connection.execute(
`select t.typname as enum_name, n.nspname as schema_name, array_agg(e.enumlabel order by e.enumsortorder) as enum_value
from pg_type t
join pg_enum e on t.oid = e.enumtypid
join pg_catalog.pg_namespace n on n.oid = t.typnamespace
where n.nspname in (${Array(uniqueSchemas.length).fill('?').join(', ')})
group by t.typname, n.nspname`,
uniqueSchemas,
);
return res.reduce((o, row) => {
let name = row.enum_name;
if (row.schema_name && row.schema_name !== this.platform.getDefaultSchemaName()) {
name = row.schema_name + '.' + name;
}
let items = row.enum_value;
if (!Array.isArray(items)) {
items = this.platform.unmarshallArray(row.enum_value);
}
o[name] = {
name: row.enum_name,
schema: row.schema_name,
items,
};
return o;
}, {});
}
getCreateNativeEnumSQL(name, values, schema) {
if (schema && schema !== this.platform.getDefaultSchemaName()) {
name = schema + '.' + name;
}
return `create type ${this.quote(name)} as enum (${values.map(value => this.platform.quoteValue(value)).join(', ')})`;
}
getDropNativeEnumSQL(name, schema) {
if (schema && schema !== this.platform.getDefaultSchemaName()) {
name = schema + '.' + name;
}
return `drop type ${this.quote(name)}`;
}
getAlterNativeEnumSQL(name, schema, value, items, oldItems) {
if (schema && schema !== this.platform.getDefaultSchemaName()) {
name = schema + '.' + name;
}
let suffix = '';
if (items && value && oldItems) {
const position = items.indexOf(value);
if (position > 0) {
suffix = ` after ${this.platform.quoteValue(items[position - 1])}`;
} else if (items.length > 1 && oldItems.length > 0) {
suffix = ` before ${this.platform.quoteValue(oldItems[0])}`;
}
}
return `alter type ${this.quote(name)} add value if not exists ${this.platform.quoteValue(value)}${suffix}`;
}
getEnumDefinitions(checks) {
return checks.reduce((o, item) => {
// check constraints are defined as one of:
// `CHECK ((type = ANY (ARRAY['local'::text, 'global'::text])))`
// `CHECK (("columnName" = ANY (ARRAY['local'::text, 'global'::text])))`
// `CHECK (((enum_test)::text = ANY ((ARRAY['a'::character varying, 'b'::character varying, 'c'::character varying])::text[])))`
// `CHECK ((("enumTest")::text = ANY ((ARRAY['a'::character varying, 'b'::character varying, 'c'::character varying])::text[])))`
// `CHECK ((type = 'a'::text))`
const m1 =
item.definition?.match(/check \(\(\("?(\w+)"?\)::/i) || item.definition?.match(/check \(\("?(\w+)"? = /i);
const m2 = item.definition?.match(/\(array\[(.*)]\)/i) || item.definition?.match(/ = (.*)\)/i);
if (item.columnName && m1 && m2) {
const m3 = m2[1].match(/('[^']*'::text)/g);
let items;
/* v8 ignore next */
if (m3) {
items = m3.map(item => /^\(?'(.*)'/.exec(item.trim())?.[1]);
} else {
items = m2[1].split(',').map(item => /^\(?'(.*)'/.exec(item.trim())?.[1]);
}
items = items.filter(item => item !== undefined);
if (items.length > 0) {
o[item.columnName] = items;
item.expression = `${this.quote(item.columnName)} in ('${items.join("', '")}')`;
item.definition = `check (${item.expression})`;
}
}
return o;
}, {});
}
createTableColumn(column, table) {
const pk = table.getPrimaryKey();
const compositePK = pk?.composite;
const primaryKey = !this.hasNonDefaultPrimaryKeyName(table);
const col = [this.quote(column.name)];
if (column.autoincrement && !column.generated && !compositePK) {
col.push(column.mappedType.getColumnType({ autoincrement: true }, this.platform));
} else {
let columnType = column.type;
if (column.nativeEnumName) {
const parts = column.type.split('.');
if (parts.length === 2 && parts[0] === '*') {
columnType = `${table.schema}.${parts[1]}`;
}
if (columnType.endsWith('[]')) {
columnType = this.quote(columnType.substring(0, columnType.length - 2)) + '[]';
} else {
columnType = this.quote(columnType);
}
}
if (column.generated === 'by default as identity') {
columnType += ` generated ${column.generated}`;
} else if (column.generated) {
columnType += ` generated always as ${column.generated}`;
}
col.push(columnType);
Utils.runIfNotEmpty(() => col.push('null'), column.nullable);
Utils.runIfNotEmpty(() => col.push('not null'), !column.nullable);
}
if (column.autoincrement && !compositePK) {
Utils.runIfNotEmpty(() => col.push('primary key'), primaryKey && column.primary);
}
const useDefault = column.default != null && column.default !== 'null' && !column.autoincrement;
Utils.runIfNotEmpty(() => col.push(`default ${column.default}`), useDefault);
return col.join(' ');
}
getPreAlterTable(tableDiff, safe) {
const ret = [];
const parts = tableDiff.name.split('.');
const tableName = parts.pop();
const schemaName = parts.pop();
/* v8 ignore next */
const name =
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
const quotedName = this.quote(name);
// detect that the column was an enum before and remove the check constraint in such case here
const changedEnums = Object.values(tableDiff.changedColumns).filter(
col => col.fromColumn.mappedType instanceof EnumType,
);
for (const col of changedEnums) {
if (!col.fromColumn.nativeEnumName && col.column.nativeEnumName && col.fromColumn.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" drop default`);
}
if (col.fromColumn.nativeEnumName && !col.column.nativeEnumName && col.fromColumn.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" drop default`);
}
}
// changing uuid column type requires to cast it to text first
const uuids = Object.values(tableDiff.changedColumns).filter(
col => col.changedProperties.has('type') && col.fromColumn.type === 'uuid',
);
for (const col of uuids) {
ret.push(
`alter table ${quotedName} alter column "${col.column.name}" type text using ("${col.column.name}"::text)`,
);
}
for (const { column } of Object.values(tableDiff.changedColumns).filter(diff =>
diff.changedProperties.has('autoincrement'),
)) {
if (!column.autoincrement && column.default == null) {
ret.push(`alter table ${quotedName} alter column ${this.quote(column.name)} drop default`);
}
}
return ret;
}
castColumn(name, type) {
if (type === 'uuid') {
type = 'text::uuid';
}
return ` using (${this.quote(name)}::${type})`;
}
dropForeignKey(tableName, constraintName) {
return `alter table ${this.quote(tableName)} drop constraint ${this.quote(constraintName)}`;
}
getPostAlterTable(tableDiff, safe) {
const ret = [];
const parts = tableDiff.name.split('.');
const tableName = parts.pop();
const schemaName = parts.pop();
/* v8 ignore next */
const name =
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
const quotedName = this.quote(name);
// detect that the column was an enum before and remove the check constraint in such a case here
const changedEnums = Object.values(tableDiff.changedColumns).filter(
col => col.fromColumn.mappedType instanceof EnumType,
);
for (const col of changedEnums) {
if (!col.fromColumn.nativeEnumName && col.column.nativeEnumName && col.column.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" set default ${col.column.default}`);
}
if (col.fromColumn.nativeEnumName && !col.column.nativeEnumName && col.column.default) {
ret.push(`alter table ${quotedName} alter column "${col.column.name}" set default ${col.column.default}`);
}
}
for (const { column } of Object.values(tableDiff.changedColumns).filter(diff =>
diff.changedProperties.has('autoincrement'),
)) {
ret.push(...this.getAlterColumnAutoincrement(tableName, column, schemaName));
}
return ret;
}
getAlterColumnAutoincrement(tableName, column, schemaName) {
const ret = [];
/* v8 ignore next */
const name =
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName;
if (column.autoincrement) {
const seqName = this.platform.getIndexName(tableName, [column.name], 'sequence');
ret.push(`create sequence if not exists ${this.quote(seqName)}`);
ret.push(`select setval('${seqName}', (select max(${this.quote(column.name)}) from ${this.quote(name)}))`);
ret.push(
`alter table ${this.quote(name)} alter column ${this.quote(column.name)} set default nextval('${seqName}')`,
);
}
return ret;
}
getChangeColumnCommentSQL(tableName, to, schemaName) {
const name = this.quote(
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName,
);
const value = to.comment ? this.platform.quoteValue(to.comment) : 'null';
return `comment on column ${name}.${this.quote(to.name)} is ${value}`;
}
alterTableComment(table, comment) {
return `comment on table ${table.getQuotedName()} is ${this.platform.quoteValue(comment ?? '')}`;
}
normalizeDefaultValue(defaultValue, length) {
if (!defaultValue || typeof defaultValue !== 'string') {
return super.normalizeDefaultValue(defaultValue, length, PostgreSqlSchemaHelper.DEFAULT_VALUES);
}
const match = /^'(.*)'::(.*)$/.exec(defaultValue);
if (match) {
if (match[2] === 'integer') {
return +match[1];
}
return `'${match[1]}'`;
}
return super.normalizeDefaultValue(defaultValue, length, PostgreSqlSchemaHelper.DEFAULT_VALUES);
}
appendComments(table) {
const sql = [];
if (table.comment) {
const comment = this.platform.quoteValue(this.processComment(table.comment));
sql.push(`comment on table ${table.getQuotedName()} is ${comment}`);
}
for (const column of table.getColumns()) {
if (column.comment) {
const comment = this.platform.quoteValue(this.processComment(column.comment));
sql.push(`comment on column ${table.getQuotedName()}.${this.quote(column.name)} is ${comment}`);
}
}
return sql;
}
getDatabaseExistsSQL(name) {
return `select 1 from pg_database where datname = '${name}'`;
}
getDatabaseNotExistsError(dbName) {
return `database ${this.quote(dbName)} does not exist`;
}
getManagementDbName() {
return this.platform.getConfig().get('schemaGenerator', {}).managementDbName ?? 'postgres';
}
disableForeignKeysSQL() {
return `set session_replication_role = 'replica';`;
}
enableForeignKeysSQL() {
return `set session_replication_role = 'origin';`;
}
getRenameIndexSQL(tableName, index, oldIndexName) {
oldIndexName = this.quote(oldIndexName);
const keyName = this.quote(index.keyName);
return [`alter index ${oldIndexName} rename to ${keyName}`];
}
dropIndex(table, index, oldIndexName = index.keyName) {
if (index.primary || (index.unique && index.constraint)) {
return `alter table ${this.quote(table)} drop constraint ${this.quote(oldIndexName)}`;
}
return `drop index ${this.quote(oldIndexName)}`;
}
/**
* Build the column list for a PostgreSQL index.
*/
getIndexColumns(index) {
if (index.columns?.length) {
return index.columns
.map(col => {
let colDef = this.quote(col.name);
// PostgreSQL supports collation with double quotes
if (col.collation) {
colDef += ` collate ${this.quote(col.collation)}`;
}
// PostgreSQL supports sort order
if (col.sort) {
colDef += ` ${col.sort}`;
}
// PostgreSQL supports NULLS FIRST/LAST
if (col.nulls) {
colDef += ` nulls ${col.nulls}`;
}
return colDef;
})
.join(', ');
}
return index.columnNames.map(c => this.quote(c)).join(', ');
}
/**
* PostgreSQL-specific index options like fill factor.
*/
getCreateIndexSuffix(index) {
const withOptions = [];
if (index.fillFactor != null) {
withOptions.push(`fillfactor = ${index.fillFactor}`);
}
if (withOptions.length > 0) {
return ` with (${withOptions.join(', ')})`;
}
return super.getCreateIndexSuffix(index);
}
getIndexesSQL(tables) {
return `select indrelid::regclass as table_name, ns.nspname as schema_name, relname as constraint_name, idx.indisunique as unique, idx.indisprimary as primary, contype, condeferrable, condeferred,
array(
select pg_get_indexdef(idx.indexrelid, k + 1, true)
from generate_subscripts(idx.indkey, 1) as k
order by k
) as index_def,
pg_get_indexdef(idx.indexrelid) as expression,
c.condeferrable as deferrable,
c.condeferred as initially_deferred,
i.reloptions,
am.amname as index_type
from pg_index idx
join pg_class as i on i.oid = idx.indexrelid
join pg_namespace as ns on i.relnamespace = ns.oid
join pg_am as am on am.oid = i.relam
left join pg_constraint as c on c.conname = i.relname
where indrelid in (${tables.map(t => `${this.platform.quoteValue(`${this.quote(t.schema_name)}.${this.quote(t.table_name)}`)}::regclass`).join(', ')})
order by relname`;
}
getChecksSQL(tablesBySchemas) {
return `select ccu.table_name as table_name, ccu.table_schema as schema_name, pgc.conname as name, conrelid::regclass as table_from, ccu.column_name as column_name, pg_get_constraintdef(pgc.oid) as expression
from pg_constraint pgc
join pg_namespace nsp on nsp.oid = pgc.connamespace
join pg_class cls on pgc.conrelid = cls.oid
join information_schema.constraint_column_usage ccu on pgc.conname = ccu.constraint_name and nsp.nspname = ccu.constraint_schema and cls.relname = ccu.table_name
where contype = 'c' and (${[...tablesBySchemas.entries()].map(([schema, tables]) => `ccu.table_name in (${tables.map(t => this.platform.quoteValue(t.table_name)).join(',')}) and ccu.table_schema = ${this.platform.quoteValue(schema)}`).join(' or ')})
order by pgc.conname`;
}
inferLengthFromColumnType(type) {
const match = /^(\w+(?:\s+\w+)*)\s*(?:\(\s*(\d+)\s*\)|$)/.exec(type);
if (!match) {
return;
}
if (!match[2]) {
switch (match[1]) {
case 'character varying':
case 'varchar':
case 'bpchar':
case 'char':
case 'character':
return -1;
case 'interval':
case 'time':
case 'timestamp':
case 'timestamptz':
return this.platform.getDefaultDateTimeLength();
}
return;
}
return +match[2];
}
}

View File

@@ -0,0 +1,4 @@
export * from './PostgreSqlNativeQueryBuilder.js';
export * from './BasePostgreSqlPlatform.js';
export * from './FullTextType.js';
export * from './PostgreSqlSchemaHelper.js';

View File

@@ -0,0 +1,4 @@
export * from './PostgreSqlNativeQueryBuilder.js';
export * from './BasePostgreSqlPlatform.js';
export * from './FullTextType.js';
export * from './PostgreSqlSchemaHelper.js';

View File

@@ -0,0 +1,8 @@
import { type Dialect } from 'kysely';
import type { Dictionary } from '@mikro-orm/core';
import { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
export declare class BaseSqliteConnection extends AbstractSqlConnection {
createKyselyDialect(options: Dictionary): Dialect;
connect(options?: { skipOnConnect?: boolean }): Promise<void>;
protected attachDatabases(): Promise<void>;
}

View File

@@ -0,0 +1,27 @@
import { CompiledQuery } from 'kysely';
import { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
export class BaseSqliteConnection extends AbstractSqlConnection {
createKyselyDialect(options) {
throw new Error(
'No SQLite dialect configured. Pass a Kysely dialect via the `driverOptions` config option, ' +
'e.g. `new NodeSqliteDialect(...)` for node:sqlite or a custom dialect for other libraries.',
);
}
async connect(options) {
await super.connect(options);
await this.getClient().executeQuery(CompiledQuery.raw('pragma foreign_keys = on'));
await this.attachDatabases();
}
async attachDatabases() {
const attachDatabases = this.config.get('attachDatabases');
if (!attachDatabases?.length) {
return;
}
const { fs } = await import('@mikro-orm/core/fs-utils');
const baseDir = this.config.get('baseDir');
for (const db of attachDatabases) {
const path = fs.absolutePath(db.path, baseDir);
await this.execute(`attach database '${path}' as ${this.platform.quoteIdentifier(db.name)}`);
}
}
}

View File

@@ -0,0 +1,21 @@
import { SqliteDialect } from 'kysely';
/**
* Kysely dialect for `node:sqlite` (Node.js 22.5+, Deno 2.2+).
*
* Bridges `node:sqlite`'s `DatabaseSync` to the `better-sqlite3` interface
* that Kysely's `SqliteDialect` expects.
*
* @example
* ```ts
* import { SqliteDriver, NodeSqliteDialect } from '@mikro-orm/sql';
*
* const orm = await MikroORM.init({
* driver: SqliteDriver,
* dbName: ':memory:',
* driverOptions: new NodeSqliteDialect(':memory:'),
* });
* ```
*/
export declare class NodeSqliteDialect extends SqliteDialect {
constructor(dbName: string);
}

View File

@@ -0,0 +1,43 @@
import { SqliteDialect } from 'kysely';
/**
* Kysely dialect for `node:sqlite` (Node.js 22.5+, Deno 2.2+).
*
* Bridges `node:sqlite`'s `DatabaseSync` to the `better-sqlite3` interface
* that Kysely's `SqliteDialect` expects.
*
* @example
* ```ts
* import { SqliteDriver, NodeSqliteDialect } from '@mikro-orm/sql';
*
* const orm = await MikroORM.init({
* driver: SqliteDriver,
* dbName: ':memory:',
* driverOptions: new NodeSqliteDialect(':memory:'),
* });
* ```
*/
export class NodeSqliteDialect extends SqliteDialect {
constructor(dbName) {
const { DatabaseSync } = globalThis.process.getBuiltinModule('node:sqlite');
super({
database: () => {
const db = new DatabaseSync(dbName);
return {
prepare(sql) {
const stmt = db.prepare(sql);
return {
reader: /^\s*(select|pragma|explain|with)/i.test(sql) || /\breturning\b/i.test(sql),
all: params => stmt.all(...params),
run: params => stmt.run(...params),
/* v8 ignore next */
get: params => stmt.get(...params),
};
},
close() {
db.close();
},
};
},
});
}
}

View File

@@ -0,0 +1,12 @@
import type { Configuration } from '@mikro-orm/core';
import { AbstractSqlDriver } from '../../AbstractSqlDriver.js';
import { BaseSqliteConnection } from './BaseSqliteConnection.js';
/**
* Generic SQLite driver that uses `driverOptions` for the Kysely dialect.
* Use this with any SQLite library by passing a Kysely dialect via `driverOptions`.
*
* For the default better-sqlite3 experience, use `@mikro-orm/sqlite` instead.
*/
export declare class SqliteDriver extends AbstractSqlDriver<BaseSqliteConnection> {
constructor(config: Configuration);
}

View File

@@ -0,0 +1,14 @@
import { AbstractSqlDriver } from '../../AbstractSqlDriver.js';
import { BaseSqliteConnection } from './BaseSqliteConnection.js';
import { SqlitePlatform } from './SqlitePlatform.js';
/**
* Generic SQLite driver that uses `driverOptions` for the Kysely dialect.
* Use this with any SQLite library by passing a Kysely dialect via `driverOptions`.
*
* For the default better-sqlite3 experience, use `@mikro-orm/sqlite` instead.
*/
export class SqliteDriver extends AbstractSqlDriver {
constructor(config) {
super(config, new SqlitePlatform(), BaseSqliteConnection, ['kysely']);
}
}

View File

@@ -0,0 +1,9 @@
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
export declare class SqliteExceptionConverter extends ExceptionConverter {
/**
* @inheritDoc
* @see http://www.sqlite.org/c3ref/c_abort.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractSQLiteDriver.php
*/
convertException(exception: Error & Dictionary): DriverException;
}

View File

@@ -0,0 +1,70 @@
import {
ConnectionException,
ExceptionConverter,
InvalidFieldNameException,
LockWaitTimeoutException,
NonUniqueFieldNameException,
CheckConstraintViolationException,
NotNullConstraintViolationException,
ReadOnlyException,
SyntaxErrorException,
TableExistsException,
TableNotFoundException,
UniqueConstraintViolationException,
ForeignKeyConstraintViolationException,
} from '@mikro-orm/core';
export class SqliteExceptionConverter extends ExceptionConverter {
/**
* @inheritDoc
* @see http://www.sqlite.org/c3ref/c_abort.html
* @see https://github.com/doctrine/dbal/blob/master/src/Driver/AbstractSQLiteDriver.php
*/
convertException(exception) {
/* v8 ignore next */
if (exception.message.includes('database is locked')) {
return new LockWaitTimeoutException(exception);
}
if (
exception.message.includes('must be unique') ||
exception.message.includes('is not unique') ||
exception.message.includes('are not unique') ||
exception.message.includes('UNIQUE constraint failed')
) {
return new UniqueConstraintViolationException(exception);
}
if (exception.message.includes('may not be NULL') || exception.message.includes('NOT NULL constraint failed')) {
return new NotNullConstraintViolationException(exception);
}
/* v8 ignore next */
if (exception.message.includes('CHECK constraint failed')) {
return new CheckConstraintViolationException(exception);
}
if (exception.message.includes('no such table:')) {
return new TableNotFoundException(exception);
}
if (exception.message.includes('already exists')) {
return new TableExistsException(exception);
}
if (exception.message.includes('no such column:')) {
return new InvalidFieldNameException(exception);
}
if (exception.message.includes('ambiguous column name')) {
return new NonUniqueFieldNameException(exception);
}
if (exception.message.includes('syntax error')) {
return new SyntaxErrorException(exception);
}
/* v8 ignore next */
if (exception.message.includes('attempt to write a readonly database')) {
return new ReadOnlyException(exception);
}
/* v8 ignore next */
if (exception.message.includes('unable to open database file')) {
return new ConnectionException(exception);
}
if (exception.message.includes('FOREIGN KEY constraint failed')) {
return new ForeignKeyConstraintViolationException(exception);
}
return super.convertException(exception);
}
}

View File

@@ -0,0 +1,6 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export declare class SqliteNativeQueryBuilder extends NativeQueryBuilder {
protected compileTruncate(): void;
protected addLockClause(): void;
}

View File

@@ -0,0 +1,11 @@
import { NativeQueryBuilder } from '../../query/NativeQueryBuilder.js';
/** @internal */
export class SqliteNativeQueryBuilder extends NativeQueryBuilder {
compileTruncate() {
const sql = `delete from ${this.getTableName()}`;
this.parts.push(sql);
}
addLockClause() {
return; // not supported
}
}

View File

@@ -0,0 +1,71 @@
import { type EntityProperty, type IsolationLevel } from '@mikro-orm/core';
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import { SqliteNativeQueryBuilder } from './SqliteNativeQueryBuilder.js';
import { SqliteSchemaHelper } from './SqliteSchemaHelper.js';
import { SqliteExceptionConverter } from './SqliteExceptionConverter.js';
export declare class SqlitePlatform extends AbstractSqlPlatform {
protected readonly schemaHelper: SqliteSchemaHelper;
protected readonly exceptionConverter: SqliteExceptionConverter;
/** @internal */
createNativeQueryBuilder(): SqliteNativeQueryBuilder;
usesDefaultKeyword(): boolean;
usesReturningStatement(): boolean;
usesEnumCheckConstraints(): boolean;
getCurrentTimestampSQL(length: number): string;
getDateTimeTypeDeclarationSQL(column: { length: number }): string;
getBeginTransactionSQL(options?: { isolationLevel?: IsolationLevel; readOnly?: boolean }): string[];
getEnumTypeDeclarationSQL(column: {
items?: unknown[];
fieldNames: string[];
length?: number;
unsigned?: boolean;
autoincrement?: boolean;
}): string;
getTinyIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
getSmallIntTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
getIntegerTypeDeclarationSQL(column: { length?: number; unsigned?: boolean; autoincrement?: boolean }): string;
getFloatDeclarationSQL(): string;
getBooleanTypeDeclarationSQL(): string;
getCharTypeDeclarationSQL(column: { length?: number }): string;
getVarcharTypeDeclarationSQL(column: { length?: number }): string;
normalizeColumnType(
type: string,
options: {
length?: number;
precision?: number;
scale?: number;
},
): string;
convertsJsonAutomatically(): boolean;
/**
* This is used to narrow the value of Date properties as they will be stored as timestamps in sqlite.
* We use this method to convert Dates to timestamps when computing the changeset, so we have the right
* data type in the payload as well as in original entity data. Without that, we would end up with diffs
* including all Date properties, as we would be comparing Date object with timestamp.
*/
processDateProperty(value: unknown): string | number | Date;
getIndexName(
tableName: string,
columns: string[],
type: 'index' | 'unique' | 'foreign' | 'primary' | 'sequence',
): string;
supportsDeferredUniqueConstraints(): boolean;
/**
* SQLite supports schemas via ATTACH DATABASE. Returns true when there are
* attached databases configured.
*/
supportsSchemas(): boolean;
getDefaultSchemaName(): string | undefined;
getFullTextWhereClause(): string;
escape(value: any): string;
convertVersionValue(
value: Date | number,
prop: EntityProperty,
):
| number
| {
$in: (string | number)[];
};
getJsonArrayElementPropertySQL(alias: string, property: string, _type: string): string;
quoteValue(value: any): string;
}

View File

@@ -0,0 +1,145 @@
import { AbstractSqlPlatform } from '../../AbstractSqlPlatform.js';
import { SqliteNativeQueryBuilder } from './SqliteNativeQueryBuilder.js';
import { SqliteSchemaHelper } from './SqliteSchemaHelper.js';
import { SqliteExceptionConverter } from './SqliteExceptionConverter.js';
export class SqlitePlatform extends AbstractSqlPlatform {
schemaHelper = new SqliteSchemaHelper(this);
exceptionConverter = new SqliteExceptionConverter();
/** @internal */
createNativeQueryBuilder() {
return new SqliteNativeQueryBuilder(this);
}
usesDefaultKeyword() {
return false;
}
usesReturningStatement() {
return true;
}
usesEnumCheckConstraints() {
return true;
}
getCurrentTimestampSQL(length) {
return `(strftime('%s', 'now') * 1000)`;
}
getDateTimeTypeDeclarationSQL(column) {
return 'datetime';
}
getBeginTransactionSQL(options) {
return ['begin'];
}
getEnumTypeDeclarationSQL(column) {
if (column.items?.every(item => typeof item === 'string')) {
return 'text';
}
/* v8 ignore next */
return this.getTinyIntTypeDeclarationSQL(column);
}
getTinyIntTypeDeclarationSQL(column) {
return this.getIntegerTypeDeclarationSQL(column);
}
getSmallIntTypeDeclarationSQL(column) {
return this.getIntegerTypeDeclarationSQL(column);
}
getIntegerTypeDeclarationSQL(column) {
return 'integer';
}
getFloatDeclarationSQL() {
return 'real';
}
getBooleanTypeDeclarationSQL() {
return 'integer';
}
getCharTypeDeclarationSQL(column) {
return 'text';
}
getVarcharTypeDeclarationSQL(column) {
return 'text';
}
normalizeColumnType(type, options) {
const simpleType = this.extractSimpleType(type);
if (['varchar', 'text'].includes(simpleType)) {
return this.getVarcharTypeDeclarationSQL(options);
}
return simpleType;
}
convertsJsonAutomatically() {
return false;
}
/**
* This is used to narrow the value of Date properties as they will be stored as timestamps in sqlite.
* We use this method to convert Dates to timestamps when computing the changeset, so we have the right
* data type in the payload as well as in original entity data. Without that, we would end up with diffs
* including all Date properties, as we would be comparing Date object with timestamp.
*/
processDateProperty(value) {
if (value instanceof Date) {
return +value;
}
return value;
}
getIndexName(tableName, columns, type) {
if (type === 'primary') {
return this.getDefaultPrimaryName(tableName, columns);
}
return super.getIndexName(tableName, columns, type);
}
supportsDeferredUniqueConstraints() {
return false;
}
/**
* SQLite supports schemas via ATTACH DATABASE. Returns true when there are
* attached databases configured.
*/
supportsSchemas() {
const attachDatabases = this.config.get('attachDatabases');
return !!attachDatabases?.length;
}
getDefaultSchemaName() {
// Return 'main' only when schema support is active (i.e., databases are attached)
return this.supportsSchemas() ? 'main' : undefined;
}
getFullTextWhereClause() {
return `:column: match :query`;
}
escape(value) {
if (value == null) {
return 'null';
}
if (typeof value === 'boolean') {
return value ? 'true' : 'false';
}
if (typeof value === 'number' || typeof value === 'bigint') {
return '' + value;
}
if (value instanceof Date) {
return '' + +value;
}
if (Array.isArray(value)) {
return value.map(v => this.escape(v)).join(', ');
}
if (Buffer.isBuffer(value)) {
return `X'${value.toString('hex')}'`;
}
return `'${String(value).replace(/'/g, "''")}'`;
}
convertVersionValue(value, prop) {
if (prop.runtimeType === 'Date') {
const ts = +value;
const str = new Date(ts)
.toISOString()
.replace('T', ' ')
.replace(/\.\d{3}Z$/, '');
return { $in: [ts, str] };
}
return value;
}
getJsonArrayElementPropertySQL(alias, property, _type) {
return `json_extract(${this.quoteIdentifier(alias)}.value, '$.${this.quoteJsonKey(property)}')`;
}
quoteValue(value) {
if (value instanceof Date) {
return '' + +value;
}
return super.quoteValue(value);
}
}

View File

@@ -0,0 +1,78 @@
import { type Connection } from '@mikro-orm/core';
import type { AbstractSqlConnection } from '../../AbstractSqlConnection.js';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
import type { Column, IndexDef, Table, TableDifference } from '../../typings.js';
import type { DatabaseTable } from '../../schema/DatabaseTable.js';
import type { DatabaseSchema } from '../../schema/DatabaseSchema.js';
export declare class SqliteSchemaHelper extends SchemaHelper {
disableForeignKeysSQL(): string;
enableForeignKeysSQL(): string;
supportsSchemaConstraints(): boolean;
getCreateNamespaceSQL(name: string): string;
getDropNamespaceSQL(name: string): string;
getListTablesSQL(): string;
getAllTables(connection: AbstractSqlConnection, schemas?: string[]): Promise<Table[]>;
getNamespaces(connection: AbstractSqlConnection): Promise<string[]>;
private getIgnoredViewsCondition;
getListViewsSQL(): string;
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
getDropDatabaseSQL(name: string): string;
loadInformationSchema(
schema: DatabaseSchema,
connection: AbstractSqlConnection,
tables: Table[],
schemas?: string[],
): Promise<void>;
createTable(table: DatabaseTable, alter?: boolean): string[];
createTableColumn(column: Column, table: DatabaseTable, _changedProperties?: Set<string>): string | undefined;
getAddColumnsSQL(table: DatabaseTable, columns: Column[], diff?: TableDifference): string[];
dropForeignKey(tableName: string, constraintName: string): string;
getDropColumnsSQL(tableName: string, columns: Column[], schemaName?: string): string;
getCreateIndexSQL(tableName: string, index: IndexDef): string;
private parseTableDefinition;
/**
* Returns schema prefix for pragma and sqlite_master queries.
* Returns empty string for main database (no prefix needed).
*/
private getSchemaPrefix;
/**
* Returns all database names excluding 'temp'.
*/
private getDatabaseList;
/**
* Extracts the SELECT part from a CREATE VIEW statement.
*/
private extractViewDefinition;
private getColumns;
/**
* SQLite strips outer parentheses from expression defaults (`DEFAULT (expr)` → `expr` in pragma).
* We need to add them back so they match what we generate in DDL.
*/
private wrapExpressionDefault;
private getEnumDefinitions;
getPrimaryKeys(
connection: AbstractSqlConnection,
indexes: IndexDef[],
tableName: string,
schemaName?: string,
): Promise<string[]>;
private getIndexes;
private getChecks;
private getColumnDefinitions;
private getForeignKeys;
getManagementDbName(): string;
getCreateDatabaseSQL(name: string): string;
databaseExists(connection: Connection, name: string): Promise<boolean>;
/**
* Implicit indexes will be ignored when diffing
*/
isImplicitIndex(name: string): boolean;
dropIndex(table: string, index: IndexDef, oldIndexName?: string): string;
/**
* SQLite does not support schema-qualified table names in REFERENCES clauses.
* Foreign key references can only point to tables in the same database.
*/
getReferencedTableName(referencedTableName: string, schema?: string): string;
alterTable(diff: TableDifference, safe?: boolean): string[];
private getAlterTempTableSQL;
}

View File

@@ -0,0 +1,543 @@
import { Utils } from '@mikro-orm/core';
import { SchemaHelper } from '../../schema/SchemaHelper.js';
/** SpatiaLite system views that should be automatically ignored */
const SPATIALITE_VIEWS = [
'geometry_columns',
'spatial_ref_sys',
'views_geometry_columns',
'virts_geometry_columns',
'geom_cols_ref_sys',
'spatial_ref_sys_aux',
'vector_layers',
'vector_layers_auth',
'vector_layers_field_infos',
'vector_layers_statistics',
'ElementaryGeometries',
];
export class SqliteSchemaHelper extends SchemaHelper {
disableForeignKeysSQL() {
return 'pragma foreign_keys = off;';
}
enableForeignKeysSQL() {
return 'pragma foreign_keys = on;';
}
supportsSchemaConstraints() {
return false;
}
getCreateNamespaceSQL(name) {
return '';
}
getDropNamespaceSQL(name) {
return '';
}
getListTablesSQL() {
return (
`select name as table_name from sqlite_master where type = 'table' and name != 'sqlite_sequence' and name != 'geometry_columns' and name != 'spatial_ref_sys' ` +
`union all select name as table_name from sqlite_temp_master where type = 'table' order by name`
);
}
async getAllTables(connection, schemas) {
const databases = await this.getDatabaseList(connection);
const hasAttachedDbs = databases.length > 1; // More than just 'main'
// If no attached databases, use original behavior
if (!hasAttachedDbs && !schemas?.length) {
return connection.execute(this.getListTablesSQL());
}
// With attached databases, query each one
const targetSchemas = schemas?.length ? schemas : databases;
const allTables = [];
for (const dbName of targetSchemas) {
const prefix = this.getSchemaPrefix(dbName);
const tables = await connection.execute(
`select name from ${prefix}sqlite_master where type = 'table' ` +
`and name != 'sqlite_sequence' and name != 'geometry_columns' and name != 'spatial_ref_sys'`,
);
for (const t of tables) {
allTables.push({ table_name: t.name, schema_name: dbName });
}
}
return allTables;
}
async getNamespaces(connection) {
return this.getDatabaseList(connection);
}
getIgnoredViewsCondition() {
return SPATIALITE_VIEWS.map(v => `name != '${v}'`).join(' and ');
}
getListViewsSQL() {
return `select name as view_name, sql as view_definition from sqlite_master where type = 'view' and ${this.getIgnoredViewsCondition()} order by name`;
}
async loadViews(schema, connection, schemaName) {
const databases = await this.getDatabaseList(connection);
const hasAttachedDbs = databases.length > 1; // More than just 'main'
// If no attached databases and no specific schema, use original behavior
if (!hasAttachedDbs && !schemaName) {
const views = await connection.execute(this.getListViewsSQL());
for (const view of views) {
schema.addView(view.view_name, schemaName, this.extractViewDefinition(view.view_definition));
}
return;
}
// With attached databases, query each one
/* v8 ignore next - schemaName branch not commonly used */
const targetDbs = schemaName ? [schemaName] : databases;
for (const dbName of targetDbs) {
const prefix = this.getSchemaPrefix(dbName);
const views = await connection.execute(
`select name as view_name, sql as view_definition from ${prefix}sqlite_master where type = 'view' and ${this.getIgnoredViewsCondition()} order by name`,
);
for (const view of views) {
schema.addView(view.view_name, dbName, this.extractViewDefinition(view.view_definition));
}
}
}
getDropDatabaseSQL(name) {
if (name === ':memory:') {
return '';
}
/* v8 ignore next */
return `drop database if exists ${this.quote(name)}`;
}
async loadInformationSchema(schema, connection, tables, schemas) {
for (const t of tables) {
const table = schema.addTable(t.table_name, t.schema_name, t.table_comment);
const cols = await this.getColumns(connection, table.name, table.schema);
const indexes = await this.getIndexes(connection, table.name, table.schema);
const checks = await this.getChecks(connection, table.name, table.schema);
const pks = await this.getPrimaryKeys(connection, indexes, table.name, table.schema);
const fks = await this.getForeignKeys(connection, table.name, table.schema);
const enums = await this.getEnumDefinitions(connection, table.name, table.schema);
table.init(cols, indexes, checks, pks, fks, enums);
}
}
createTable(table, alter) {
let sql = `create table ${table.getQuotedName()} (`;
const columns = table.getColumns();
const lastColumn = columns[columns.length - 1].name;
for (const column of columns) {
const col = this.createTableColumn(column, table);
if (col) {
const comma = column.name === lastColumn ? '' : ', ';
sql += col + comma;
}
}
const primaryKey = table.getPrimaryKey();
const createPrimary = primaryKey?.composite;
if (createPrimary && primaryKey) {
sql += `, primary key (${primaryKey.columnNames.map(c => this.quote(c)).join(', ')})`;
}
const parts = [];
for (const fk of Object.values(table.getForeignKeys())) {
parts.push(this.createForeignKey(table, fk, false));
}
for (const check of table.getChecks()) {
const sql = `constraint ${this.quote(check.name)} check (${check.expression})`;
parts.push(sql);
}
if (parts.length > 0) {
sql += ', ' + parts.join(', ');
}
sql += ')';
if (table.comment) {
sql += ` /* ${table.comment} */`;
}
const ret = [];
this.append(ret, sql);
for (const index of table.getIndexes()) {
this.append(ret, this.createIndex(index, table));
}
return ret;
}
createTableColumn(column, table, _changedProperties) {
const col = [this.quote(column.name)];
const checks = table.getChecks();
const check = checks.findIndex(check => check.columnName === column.name);
const useDefault = column.default != null && column.default !== 'null';
let columnType = column.type;
if (column.autoincrement) {
columnType = 'integer';
}
if (column.generated) {
columnType += ` generated always as ${column.generated}`;
}
col.push(columnType);
if (check !== -1) {
col.push(`check (${checks[check].expression})`);
checks.splice(check, 1);
}
Utils.runIfNotEmpty(() => col.push('null'), column.nullable);
Utils.runIfNotEmpty(() => col.push('not null'), !column.nullable && !column.generated);
Utils.runIfNotEmpty(() => col.push('primary key'), column.primary);
Utils.runIfNotEmpty(() => col.push('autoincrement'), column.autoincrement);
Utils.runIfNotEmpty(() => col.push(`default ${column.default}`), useDefault);
return col.join(' ');
}
getAddColumnsSQL(table, columns, diff) {
return columns.map(column => {
let sql = `alter table ${table.getQuotedName()} add column ${this.createTableColumn(column, table)}`;
const foreignKey = Object.values(diff.addedForeignKeys).find(
fk => fk.columnNames.length === 1 && fk.columnNames[0] === column.name,
);
if (foreignKey && this.options.createForeignKeyConstraints) {
delete diff.addedForeignKeys[foreignKey.constraintName];
sql += ' ' + this.createForeignKey(diff.toTable, foreignKey, false, true);
}
return sql;
});
}
dropForeignKey(tableName, constraintName) {
return '';
}
getDropColumnsSQL(tableName, columns, schemaName) {
/* v8 ignore next */
const name = this.quote(
(schemaName && schemaName !== this.platform.getDefaultSchemaName() ? schemaName + '.' : '') + tableName,
);
return columns
.map(column => {
return `alter table ${name} drop column ${this.quote(column.name)}`;
})
.join(';\n');
}
getCreateIndexSQL(tableName, index) {
/* v8 ignore next */
if (index.expression) {
return index.expression;
}
// SQLite requires: CREATE INDEX schema.index_name ON table_name (columns)
// NOT: CREATE INDEX index_name ON schema.table_name (columns)
const [schemaName, rawTableName] = this.splitTableName(tableName);
const quotedTableName = this.quote(rawTableName);
// If there's a schema, prefix the index name with it
let keyName;
if (schemaName && schemaName !== 'main') {
keyName = `${this.quote(schemaName)}.${this.quote(index.keyName)}`;
} else {
keyName = this.quote(index.keyName);
}
const sqlPrefix = `create ${index.unique ? 'unique ' : ''}index ${keyName} on ${quotedTableName}`;
/* v8 ignore next 4 */
if (index.columnNames.some(column => column.includes('.'))) {
// JSON columns can have unique index but not unique constraint, and we need to distinguish those, so we can properly drop them
const columns = this.platform.getJsonIndexDefinition(index);
return `${sqlPrefix} (${columns.join(', ')})`;
}
// Use getIndexColumns to support advanced options like sort order and collation
return `${sqlPrefix} (${this.getIndexColumns(index)})`;
}
parseTableDefinition(sql, cols) {
const columns = {};
const constraints = [];
// extract all columns definitions
let columnsDef = new RegExp(`create table [\`"']?.*?[\`"']? \\((.*)\\)`, 'i').exec(sql.replaceAll('\n', ''))?.[1];
/* v8 ignore next */
if (columnsDef) {
if (columnsDef.includes(', constraint ')) {
constraints.push(...columnsDef.substring(columnsDef.indexOf(', constraint') + 2).split(', '));
columnsDef = columnsDef.substring(0, columnsDef.indexOf(', constraint'));
}
for (let i = cols.length - 1; i >= 0; i--) {
const col = cols[i];
const re = ` *, *[\`"']?${col.name}[\`"']? (.*)`;
const columnDef = new RegExp(re, 'i').exec(columnsDef);
if (columnDef) {
columns[col.name] = { name: col.name, definition: columnDef[1] };
columnsDef = columnsDef.substring(0, columnDef.index);
}
}
}
return { columns, constraints };
}
/**
* Returns schema prefix for pragma and sqlite_master queries.
* Returns empty string for main database (no prefix needed).
*/
getSchemaPrefix(schemaName) {
if (!schemaName || schemaName === 'main') {
return '';
}
return `${this.platform.quoteIdentifier(schemaName)}.`;
}
/**
* Returns all database names excluding 'temp'.
*/
async getDatabaseList(connection) {
const databases = await connection.execute('pragma database_list');
return databases.filter(d => d.name !== 'temp').map(d => d.name);
}
/**
* Extracts the SELECT part from a CREATE VIEW statement.
*/
extractViewDefinition(viewDefinition) {
const match = /create\s+view\s+[`"']?\w+[`"']?\s+as\s+(.*)/is.exec(viewDefinition);
/* v8 ignore next - fallback for non-standard view definitions */
return match ? match[1] : viewDefinition;
}
async getColumns(connection, tableName, schemaName) {
const prefix = this.getSchemaPrefix(schemaName);
const columns = await connection.execute(`pragma ${prefix}table_xinfo('${tableName}')`);
const sql = `select sql from ${prefix}sqlite_master where type = ? and name = ?`;
const tableDefinition = await connection.execute(sql, ['table', tableName], 'get');
const composite = columns.reduce((count, col) => count + (col.pk ? 1 : 0), 0) > 1;
// there can be only one, so naive check like this should be enough
const hasAutoincrement = tableDefinition.sql.toLowerCase().includes('autoincrement');
const { columns: columnDefinitions } = this.parseTableDefinition(tableDefinition.sql, columns);
return columns.map(col => {
const mappedType = connection.getPlatform().getMappedType(col.type);
let generated;
if (col.hidden > 1) {
/* v8 ignore next */
const storage = col.hidden === 2 ? 'virtual' : 'stored';
const re = new RegExp(`(generated always)? as \\((.*)\\)( ${storage})?$`, 'i');
const match = columnDefinitions[col.name].definition.match(re);
if (match) {
generated = `${match[2]} ${storage}`;
}
}
return {
name: col.name,
type: col.type,
default: this.wrapExpressionDefault(col.dflt_value),
nullable: !col.notnull,
primary: !!col.pk,
mappedType,
unsigned: false,
autoincrement: !composite && col.pk && this.platform.isNumericColumn(mappedType) && hasAutoincrement,
generated,
};
});
}
/**
* SQLite strips outer parentheses from expression defaults (`DEFAULT (expr)` → `expr` in pragma).
* We need to add them back so they match what we generate in DDL.
*/
wrapExpressionDefault(value) {
if (value == null) {
return null;
}
// simple values that are returned as-is from pragma (no wrapping needed)
if (
/^-?\d/.test(value) ||
/^[xX]'/.test(value) ||
value.startsWith("'") ||
value.startsWith('"') ||
value.startsWith('(')
) {
return value;
}
const lower = value.toLowerCase();
if (['null', 'true', 'false', 'current_timestamp', 'current_date', 'current_time'].includes(lower)) {
return value;
}
// everything else is an expression that had its outer parens stripped
return `(${value})`;
}
async getEnumDefinitions(connection, tableName, schemaName) {
const prefix = this.getSchemaPrefix(schemaName);
const sql = `select sql from ${prefix}sqlite_master where type = ? and name = ?`;
const tableDefinition = await connection.execute(sql, ['table', tableName], 'get');
const checkConstraints = [...(tableDefinition.sql.match(/[`["'][^`\]"']+[`\]"'] text check \(.*?\)/gi) ?? [])];
return checkConstraints.reduce((o, item) => {
// check constraints are defined as (note that last closing paren is missing):
// `type` text check (`type` in ('local', 'global')
const match = /[`["']([^`\]"']+)[`\]"'] text check \(.* \((.*)\)/i.exec(item);
/* v8 ignore next */
if (match) {
o[match[1]] = match[2].split(',').map(item => /^\(?'(.*)'/.exec(item.trim())[1]);
}
return o;
}, {});
}
async getPrimaryKeys(connection, indexes, tableName, schemaName) {
const prefix = this.getSchemaPrefix(schemaName);
const sql = `pragma ${prefix}table_info(\`${tableName}\`)`;
const cols = await connection.execute(sql);
return cols.filter(col => !!col.pk).map(col => col.name);
}
async getIndexes(connection, tableName, schemaName) {
const prefix = this.getSchemaPrefix(schemaName);
const sql = `pragma ${prefix}table_info(\`${tableName}\`)`;
const cols = await connection.execute(sql);
const indexes = await connection.execute(`pragma ${prefix}index_list(\`${tableName}\`)`);
const ret = [];
for (const col of cols.filter(c => c.pk)) {
ret.push({
columnNames: [col.name],
keyName: 'primary',
constraint: true,
unique: true,
primary: true,
});
}
for (const index of indexes.filter(index => !this.isImplicitIndex(index.name))) {
const res = await connection.execute(`pragma ${prefix}index_info(\`${index.name}\`)`);
ret.push(
...res.map(row => ({
columnNames: [row.name],
keyName: index.name,
unique: !!index.unique,
constraint: !!index.unique,
primary: false,
})),
);
}
return this.mapIndexes(ret);
}
async getChecks(connection, tableName, schemaName) {
const { columns, constraints } = await this.getColumnDefinitions(connection, tableName, schemaName);
const checks = [];
for (const key of Object.keys(columns)) {
const column = columns[key];
const expression = / (check \((.*)\))/i.exec(column.definition);
if (expression) {
checks.push({
name: this.platform.getConfig().getNamingStrategy().indexName(tableName, [column.name], 'check'),
definition: expression[1],
expression: expression[2],
columnName: column.name,
});
}
}
for (const constraint of constraints) {
const expression = /constraint *[`"']?(.*?)[`"']? * (check \((.*)\))/i.exec(constraint);
if (expression) {
checks.push({
name: expression[1],
definition: expression[2],
expression: expression[3],
});
}
}
return checks;
}
async getColumnDefinitions(connection, tableName, schemaName) {
const prefix = this.getSchemaPrefix(schemaName);
const columns = await connection.execute(`pragma ${prefix}table_xinfo('${tableName}')`);
const sql = `select sql from ${prefix}sqlite_master where type = ? and name = ?`;
const tableDefinition = await connection.execute(sql, ['table', tableName], 'get');
return this.parseTableDefinition(tableDefinition.sql, columns);
}
async getForeignKeys(connection, tableName, schemaName) {
const { constraints } = await this.getColumnDefinitions(connection, tableName, schemaName);
const prefix = this.getSchemaPrefix(schemaName);
const fks = await connection.execute(`pragma ${prefix}foreign_key_list(\`${tableName}\`)`);
const qualifiedTableName = schemaName ? `${schemaName}.${tableName}` : tableName;
return fks.reduce((ret, fk) => {
const constraintName = this.platform.getIndexName(tableName, [fk.from], 'foreign');
const constraint = constraints?.find(c => c.includes(constraintName));
ret[constraintName] = {
constraintName,
columnName: fk.from,
columnNames: [fk.from],
localTableName: qualifiedTableName,
referencedTableName: fk.table,
referencedColumnName: fk.to,
referencedColumnNames: [fk.to],
updateRule: fk.on_update.toLowerCase(),
deleteRule: fk.on_delete.toLowerCase(),
deferMode: constraint?.match(/ deferrable initially (deferred|immediate)/i)?.[1].toLowerCase(),
};
return ret;
}, {});
}
getManagementDbName() {
return '';
}
getCreateDatabaseSQL(name) {
return '';
}
async databaseExists(connection, name) {
const tables = await connection.execute(this.getListTablesSQL());
return tables.length > 0;
}
/**
* Implicit indexes will be ignored when diffing
*/
isImplicitIndex(name) {
// Ignore indexes with reserved names, e.g. autoindexes
return name.startsWith('sqlite_');
}
dropIndex(table, index, oldIndexName = index.keyName) {
return `drop index ${this.quote(oldIndexName)}`;
}
/**
* SQLite does not support schema-qualified table names in REFERENCES clauses.
* Foreign key references can only point to tables in the same database.
*/
getReferencedTableName(referencedTableName, schema) {
const [schemaName, tableName] = this.splitTableName(referencedTableName);
// Strip any schema prefix - SQLite REFERENCES clause doesn't support it
return tableName;
}
alterTable(diff, safe) {
const ret = [];
const [schemaName, tableName] = this.splitTableName(diff.name);
if (
Utils.hasObjectKeys(diff.removedChecks) ||
Utils.hasObjectKeys(diff.changedChecks) ||
Utils.hasObjectKeys(diff.changedForeignKeys) ||
Utils.hasObjectKeys(diff.changedColumns)
) {
return this.getAlterTempTableSQL(diff);
}
for (const index of Object.values(diff.removedIndexes)) {
this.append(ret, this.dropIndex(diff.name, index));
}
for (const index of Object.values(diff.changedIndexes)) {
this.append(ret, this.dropIndex(diff.name, index));
}
/* v8 ignore next */
if (!safe && Object.values(diff.removedColumns).length > 0) {
this.append(ret, this.getDropColumnsSQL(tableName, Object.values(diff.removedColumns), schemaName));
}
if (Object.values(diff.addedColumns).length > 0) {
this.append(ret, this.getAddColumnsSQL(diff.toTable, Object.values(diff.addedColumns), diff));
}
if (Utils.hasObjectKeys(diff.addedForeignKeys) || Utils.hasObjectKeys(diff.addedChecks)) {
return this.getAlterTempTableSQL(diff);
}
for (const [oldColumnName, column] of Object.entries(diff.renamedColumns)) {
this.append(ret, this.getRenameColumnSQL(tableName, oldColumnName, column, schemaName));
}
for (const index of Object.values(diff.addedIndexes)) {
ret.push(this.createIndex(index, diff.toTable));
}
for (const index of Object.values(diff.changedIndexes)) {
ret.push(this.createIndex(index, diff.toTable, true));
}
for (const [oldIndexName, index] of Object.entries(diff.renamedIndexes)) {
if (index.unique) {
this.append(ret, this.dropIndex(diff.name, index, oldIndexName));
this.append(ret, this.createIndex(index, diff.toTable));
} else {
this.append(ret, this.getRenameIndexSQL(diff.name, index, oldIndexName));
}
}
return ret;
}
getAlterTempTableSQL(changedTable) {
const tempName = `${changedTable.toTable.name}__temp_alter`;
const quotedName = this.quote(changedTable.toTable.name);
const quotedTempName = this.quote(tempName);
const [first, ...rest] = this.createTable(changedTable.toTable);
const sql = [
'pragma foreign_keys = off;',
first.replace(`create table ${quotedName}`, `create table ${quotedTempName}`),
];
const columns = [];
for (const column of changedTable.toTable.getColumns()) {
const fromColumn = changedTable.fromTable.getColumn(column.name);
if (fromColumn) {
columns.push(this.quote(column.name));
} else {
columns.push(`null as ${this.quote(column.name)}`);
}
}
sql.push(`insert into ${quotedTempName} select ${columns.join(', ')} from ${quotedName};`);
sql.push(`drop table ${quotedName};`);
sql.push(`alter table ${quotedTempName} rename to ${quotedName};`);
sql.push(...rest);
sql.push('pragma foreign_keys = on;');
return sql;
}
}

View File

@@ -0,0 +1,6 @@
export * from './BaseSqliteConnection.js';
export * from './NodeSqliteDialect.js';
export * from './SqliteDriver.js';
export * from './SqlitePlatform.js';
export * from './SqliteSchemaHelper.js';
export * from './SqliteNativeQueryBuilder.js';

6
node_modules/@mikro-orm/sql/dialects/sqlite/index.js generated vendored Normal file
View File

@@ -0,0 +1,6 @@
export * from './BaseSqliteConnection.js';
export * from './NodeSqliteDialect.js';
export * from './SqliteDriver.js';
export * from './SqlitePlatform.js';
export * from './SqliteSchemaHelper.js';
export * from './SqliteNativeQueryBuilder.js';

19
node_modules/@mikro-orm/sql/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,19 @@
/**
* @packageDocumentation
* @module sql
*/
export { Kysely } from 'kysely';
export * from '@mikro-orm/core';
export * from './AbstractSqlConnection.js';
export * from './AbstractSqlDriver.js';
export * from './AbstractSqlPlatform.js';
export * from './SqlEntityManager.js';
export * from './SqlEntityRepository.js';
export * from './query/index.js';
export { raw } from './query/index.js';
export * from './schema/index.js';
export * from './dialects/index.js';
export type * from './typings.js';
export * from './plugin/index.js';
export { SqlEntityManager as EntityManager } from './SqlEntityManager.js';
export { SqlEntityRepository as EntityRepository } from './SqlEntityRepository.js';

18
node_modules/@mikro-orm/sql/index.js generated vendored Normal file
View File

@@ -0,0 +1,18 @@
/**
* @packageDocumentation
* @module sql
*/
export { Kysely } from 'kysely';
export * from '@mikro-orm/core';
export * from './AbstractSqlConnection.js';
export * from './AbstractSqlDriver.js';
export * from './AbstractSqlPlatform.js';
export * from './SqlEntityManager.js';
export * from './SqlEntityRepository.js';
export * from './query/index.js';
export { raw } from './query/index.js';
export * from './schema/index.js';
export * from './dialects/index.js';
export * from './plugin/index.js';
export { SqlEntityManager as EntityManager } from './SqlEntityManager.js';
export { SqlEntityRepository as EntityRepository } from './SqlEntityRepository.js';

61
node_modules/@mikro-orm/sql/package.json generated vendored Normal file
View File

@@ -0,0 +1,61 @@
{
"name": "@mikro-orm/sql",
"version": "7.0.2",
"description": "TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.",
"keywords": [
"data-mapper",
"ddd",
"entity",
"identity-map",
"javascript",
"js",
"mariadb",
"mikro-orm",
"mongo",
"mongodb",
"mysql",
"orm",
"postgresql",
"sqlite",
"sqlite3",
"ts",
"typescript",
"unit-of-work"
],
"homepage": "https://mikro-orm.io",
"bugs": {
"url": "https://github.com/mikro-orm/mikro-orm/issues"
},
"license": "MIT",
"author": "Martin Adámek",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/mikro-orm/mikro-orm.git"
},
"type": "module",
"exports": {
"./package.json": "./package.json",
".": "./index.js"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"build": "yarn compile && yarn copy",
"clean": "yarn run -T rimraf ./dist",
"compile": "yarn run -T tsc -p tsconfig.build.json",
"copy": "node ../../scripts/copy.mjs"
},
"dependencies": {
"kysely": "0.28.12"
},
"devDependencies": {
"@mikro-orm/core": "^7.0.2"
},
"peerDependencies": {
"@mikro-orm/core": "7.0.2"
},
"engines": {
"node": ">= 22.17.0"
}
}

49
node_modules/@mikro-orm/sql/plugin/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,49 @@
import {
type KyselyPlugin,
type PluginTransformQueryArgs,
type PluginTransformResultArgs,
type QueryResult,
type RootOperationNode,
type UnknownRow,
} from 'kysely';
import type { SqlEntityManager } from '../SqlEntityManager.js';
/** Configuration options for the MikroKyselyPlugin. */
export interface MikroKyselyPluginOptions {
/**
* Use database table names ('table') or entity names ('entity') in queries.
*
* @default 'table'
*/
tableNamingStrategy?: 'table' | 'entity';
/**
* Use database column names ('column') or property names ('property') in queries.
*
* @default 'column'
*/
columnNamingStrategy?: 'column' | 'property';
/**
* Automatically process entity `onCreate` hooks in INSERT queries.
*
* @default false
*/
processOnCreateHooks?: boolean;
/**
* Automatically process entity `onUpdate` hooks in UPDATE queries.
*
* @default false
*/
processOnUpdateHooks?: boolean;
/**
* Convert JavaScript values to database-compatible values (e.g., Date to timestamp, custom types).
*
* @default false
*/
convertValues?: boolean;
}
/** Kysely plugin that transforms queries and results to use MikroORM entity/property naming conventions. */
export declare class MikroKyselyPlugin implements KyselyPlugin {
#private;
constructor(em: SqlEntityManager, options?: MikroKyselyPluginOptions);
transformQuery(args: PluginTransformQueryArgs): RootOperationNode;
transformResult(args: PluginTransformResultArgs): Promise<QueryResult<UnknownRow>>;
}

50
node_modules/@mikro-orm/sql/plugin/index.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
import {
SelectQueryNode as SelectQueryNodeClass,
InsertQueryNode as InsertQueryNodeClass,
UpdateQueryNode as UpdateQueryNodeClass,
DeleteQueryNode as DeleteQueryNodeClass,
} from 'kysely';
import { MikroTransformer } from './transformer.js';
/** Kysely plugin that transforms queries and results to use MikroORM entity/property naming conventions. */
export class MikroKyselyPlugin {
static #queryNodeCache = new WeakMap();
#transformer;
#options;
constructor(em, options = {}) {
this.#options = options;
this.#transformer = new MikroTransformer(em, options);
}
transformQuery(args) {
this.#transformer.reset();
const result = this.#transformer.transformNode(args.node, args.queryId);
// Cache the entity map if it is one we can process (for use in transformResult)
if (
SelectQueryNodeClass.is(args.node) ||
InsertQueryNodeClass.is(args.node) ||
UpdateQueryNodeClass.is(args.node) ||
DeleteQueryNodeClass.is(args.node)
) {
// clone the entityMap because the transformer's internal map will be cleared and reused by the next query
const entityMap = new Map(this.#transformer.getOutputEntityMap());
MikroKyselyPlugin.#queryNodeCache.set(args.queryId, { entityMap });
}
return result;
}
async transformResult(args) {
// Only transform results if columnNamingStrategy is 'property' or convertValues is true
if (this.#options.columnNamingStrategy !== 'property' && !this.#options.convertValues) {
return args.result;
}
// Retrieve the cached query node and metadata
const cache = MikroKyselyPlugin.#queryNodeCache.get(args.queryId);
if (!cache) {
return args.result;
}
// Transform the result rows using the transformer
const transformedRows = this.#transformer.transformResult(args.result.rows ?? [], cache.entityMap);
return {
...args.result,
rows: transformedRows ?? [],
};
}
}

122
node_modules/@mikro-orm/sql/plugin/transformer.d.ts generated vendored Normal file
View File

@@ -0,0 +1,122 @@
import { type EntityMetadata, type EntityProperty } from '@mikro-orm/core';
import {
type CommonTableExpressionNameNode,
type DeleteQueryNode,
type IdentifierNode,
type InsertQueryNode,
type JoinNode,
type MergeQueryNode,
type QueryId,
type SelectQueryNode,
type UpdateQueryNode,
type WithNode,
ColumnNode,
OperationNodeTransformer,
TableNode,
} from 'kysely';
import type { MikroKyselyPluginOptions } from './index.js';
import type { SqlEntityManager } from '../SqlEntityManager.js';
export declare class MikroTransformer extends OperationNodeTransformer {
#private;
constructor(em: SqlEntityManager, options?: MikroKyselyPluginOptions);
reset(): void;
getOutputEntityMap(): Map<string, EntityMetadata>;
/** @internal */
getContextStack(): Map<string, EntityMetadata | undefined>[];
/** @internal */
getSubqueryAliasMap(): Map<string, EntityMetadata | undefined>;
transformSelectQuery(node: SelectQueryNode, queryId: QueryId): SelectQueryNode;
transformInsertQuery(node: InsertQueryNode, queryId?: QueryId): InsertQueryNode;
transformUpdateQuery(node: UpdateQueryNode, queryId?: QueryId): UpdateQueryNode;
transformDeleteQuery(node: DeleteQueryNode, queryId?: QueryId): DeleteQueryNode;
transformMergeQuery(node: MergeQueryNode, queryId?: QueryId): MergeQueryNode;
transformIdentifier(node: IdentifierNode, queryId: QueryId): IdentifierNode;
/**
* Find owner entity metadata for the current identifier in the context stack.
* Supports both aliased and non-aliased table references.
* Searches up the context stack to support correlated subqueries.
* Also checks subquery/CTE aliases to resolve to their source tables.
*/
findOwnerEntityInContext(): EntityMetadata | undefined;
processOnCreateHooks(node: InsertQueryNode, meta: EntityMetadata): InsertQueryNode;
processOnUpdateHooks(node: UpdateQueryNode, meta: EntityMetadata): UpdateQueryNode;
processInsertValues(node: InsertQueryNode, meta: EntityMetadata): InsertQueryNode;
processUpdateValues(node: UpdateQueryNode, meta: EntityMetadata): UpdateQueryNode;
mapColumnsToProperties(columns: readonly ColumnNode[], meta: EntityMetadata): (EntityProperty | undefined)[];
normalizeColumnName(identifier: IdentifierNode): string;
findProperty(meta: EntityMetadata | undefined, columnName?: string): EntityProperty | undefined;
shouldConvertValues(): boolean;
prepareInputValue(prop: EntityProperty | undefined, value: unknown, enabled: boolean): unknown;
/**
* Look up a table name/alias in the context stack.
* Searches from current scope (top of stack) to parent scopes (bottom).
* This supports correlated subqueries and references to outer query tables.
*/
lookupInContextStack(tableNameOrAlias: string): EntityMetadata | undefined;
/**
* Process WITH node (CTE definitions)
*/
processWithNode(withNode: WithNode, context: Map<string, EntityMetadata | undefined>): void;
/**
* Extract CTE name from CommonTableExpressionNameNode
*/
getCTEName(nameNode: CommonTableExpressionNameNode): string | undefined;
/**
* Process a FROM item (can be TableNode or AliasNode)
*/
processFromItem(
from: any, // OperationNode type - can be TableNode, AliasNode, or SelectQueryNode
context: Map<string, EntityMetadata | undefined>,
): void;
/**
* Process a JOIN node
*/
processJoinNode(join: JoinNode, context: Map<string, EntityMetadata | undefined>): void;
/**
* Extract the primary source table from a SELECT query
* This helps resolve columns from subqueries to their original entity tables
*/
extractSourceTableFromSelectQuery(selectQuery: SelectQueryNode): EntityMetadata | undefined;
/**
* Extract alias name from an alias node
*/
extractAliasName(alias: any): string | undefined;
/**
* Extract table name from a TableNode
*/
getTableName(node: TableNode | undefined): string | undefined;
/**
* Find entity metadata by table name or entity name
*/
findEntityMetadata(name: string): EntityMetadata | undefined;
/**
* Transform result rows by mapping database column names to property names
* This is called for SELECT queries when columnNamingStrategy is 'property'
*/
transformResult(
rows: Record<string, any>[] | undefined,
entityMap: Map<string, EntityMetadata>,
): Record<string, any>[] | undefined;
buildGlobalFieldMap(entityMap: Map<string, EntityMetadata>): Record<string, EntityProperty>;
buildGlobalRelationFieldMap(entityMap: Map<string, EntityMetadata>): Record<string, string>;
/**
* Build a mapping from database field names to property objects
* Format: { 'field_name': EntityProperty }
*/
buildFieldToPropertyMap(meta: EntityMetadata, alias?: string): Record<string, EntityProperty>;
/**
* Build a mapping for relation fields
* For ManyToOne relations, we need to map from the foreign key field to the relation property
* Format: { 'foreign_key_field': 'relationPropertyName' }
*/
buildRelationFieldMap(meta: EntityMetadata, alias?: string): Record<string, string>;
/**
* Transform a single row by mapping column names to property names
*/
transformRow(
row: Record<string, any>,
fieldToPropertyMap: Record<string, EntityProperty>,
relationFieldMap: Record<string, string>,
): Record<string, any>;
prepareOutputValue(prop: EntityProperty | undefined, value: unknown): unknown;
}

901
node_modules/@mikro-orm/sql/plugin/transformer.js generated vendored Normal file
View File

@@ -0,0 +1,901 @@
import { ReferenceKind, isRaw } from '@mikro-orm/core';
import {
AliasNode,
ColumnNode,
ColumnUpdateNode,
OperationNodeTransformer,
PrimitiveValueListNode,
ReferenceNode,
SchemableIdentifierNode,
TableNode,
ValueListNode,
ValueNode,
ValuesNode,
} from 'kysely';
export class MikroTransformer extends OperationNodeTransformer {
/**
* Context stack to support nested queries (subqueries, CTEs)
* Each level of query scope has its own Map of table aliases/names to EntityMetadata
* Top of stack (highest index) is the current scope
*/
#contextStack = [];
/**
* Subquery alias map: maps subquery/CTE alias to its source table metadata
* Used to resolve columns from subqueries/CTEs to their original table definitions
*/
#subqueryAliasMap = new Map();
#metadata;
#platform;
/**
* Global map of all entities involved in the query.
* Populated during AST transformation and used for result transformation.
*/
#entityMap = new Map();
#em;
#options;
constructor(em, options = {}) {
super();
this.#em = em;
this.#options = options;
this.#metadata = em.getMetadata();
this.#platform = em.getDriver().getPlatform();
}
reset() {
this.#subqueryAliasMap.clear();
this.#entityMap.clear();
}
getOutputEntityMap() {
return this.#entityMap;
}
/** @internal */
getContextStack() {
return this.#contextStack;
}
/** @internal */
getSubqueryAliasMap() {
return this.#subqueryAliasMap;
}
transformSelectQuery(node, queryId) {
// Push a new context for this query scope (starts with inherited parent context)
const currentContext = new Map();
this.#contextStack.push(currentContext);
try {
// Process WITH clause (CTEs) first - they define names available in this scope
if (node.with) {
this.processWithNode(node.with, currentContext);
}
// Process FROM clause - main tables in this scope
if (node.from?.froms) {
for (const from of node.from.froms) {
this.processFromItem(from, currentContext);
}
}
// Process JOINs - additional tables joined into this scope
if (node.joins) {
for (const join of node.joins) {
this.processJoinNode(join, currentContext);
}
}
return super.transformSelectQuery(node, queryId);
} finally {
// Pop the context when exiting this query scope
this.#contextStack.pop();
}
}
transformInsertQuery(node, queryId) {
const currentContext = new Map();
this.#contextStack.push(currentContext);
try {
let entityMeta;
if (node.into) {
const tableName = this.getTableName(node.into);
if (tableName) {
const meta = this.findEntityMetadata(tableName);
if (meta) {
entityMeta = meta;
currentContext.set(meta.tableName, meta);
this.#entityMap.set(meta.tableName, meta);
}
}
}
const nodeWithHooks =
this.#options.processOnCreateHooks && entityMeta ? this.processOnCreateHooks(node, entityMeta) : node;
const nodeWithConvertedValues =
this.#options.convertValues && entityMeta ? this.processInsertValues(nodeWithHooks, entityMeta) : nodeWithHooks;
// Handle ON CONFLICT clause
let finalNode = nodeWithConvertedValues;
if (node.onConflict?.updates && entityMeta) {
// Create a temporary UpdateQueryNode to reuse processOnUpdateHooks and processUpdateValues
// We only care about the updates part
const tempUpdateNode = {
kind: 'UpdateQueryNode',
table: node.into, // Dummy table
updates: node.onConflict.updates,
};
const updatesWithHooks = this.#options.processOnUpdateHooks
? this.processOnUpdateHooks(tempUpdateNode, entityMeta).updates
: node.onConflict.updates;
const tempUpdateNodeWithHooks = {
...tempUpdateNode,
updates: updatesWithHooks,
};
const updatesWithConvertedValues = this.#options.convertValues
? this.processUpdateValues(tempUpdateNodeWithHooks, entityMeta).updates
: updatesWithHooks;
if (updatesWithConvertedValues && updatesWithConvertedValues !== node.onConflict.updates) {
// Construct the new OnConflictNode with updated values
finalNode = {
...finalNode,
onConflict: {
...node.onConflict,
updates: updatesWithConvertedValues,
},
};
}
}
return super.transformInsertQuery(finalNode, queryId);
} finally {
this.#contextStack.pop();
}
}
transformUpdateQuery(node, queryId) {
const currentContext = new Map();
this.#contextStack.push(currentContext);
try {
let entityMeta;
if (node.table && TableNode.is(node.table)) {
const tableName = this.getTableName(node.table);
if (tableName) {
const meta = this.findEntityMetadata(tableName);
if (meta) {
entityMeta = meta;
currentContext.set(meta.tableName, meta);
this.#entityMap.set(meta.tableName, meta);
}
}
}
// Process FROM clause in UPDATE queries (for UPDATE with JOIN)
if (node.from) {
for (const fromItem of node.from.froms) {
this.processFromItem(fromItem, currentContext);
}
}
// Also process JOINs in UPDATE queries
if (node.joins) {
for (const join of node.joins) {
this.processJoinNode(join, currentContext);
}
}
const nodeWithHooks =
this.#options.processOnUpdateHooks && entityMeta ? this.processOnUpdateHooks(node, entityMeta) : node;
const nodeWithConvertedValues =
this.#options.convertValues && entityMeta ? this.processUpdateValues(nodeWithHooks, entityMeta) : nodeWithHooks;
return super.transformUpdateQuery(nodeWithConvertedValues, queryId);
} finally {
this.#contextStack.pop();
}
}
transformDeleteQuery(node, queryId) {
const currentContext = new Map();
this.#contextStack.push(currentContext);
try {
const froms = node.from?.froms;
if (froms && froms.length > 0) {
const firstFrom = froms[0];
if (TableNode.is(firstFrom)) {
const tableName = this.getTableName(firstFrom);
if (tableName) {
const meta = this.findEntityMetadata(tableName);
if (meta) {
currentContext.set(meta.tableName, meta);
this.#entityMap.set(meta.tableName, meta);
}
}
}
}
// Also process JOINs in DELETE queries
if (node.joins) {
for (const join of node.joins) {
this.processJoinNode(join, currentContext);
}
}
return super.transformDeleteQuery(node, queryId);
} finally {
this.#contextStack.pop();
}
}
transformMergeQuery(node, queryId) {
const currentContext = new Map();
this.#contextStack.push(currentContext);
try {
return super.transformMergeQuery(node, queryId);
} finally {
this.#contextStack.pop();
}
}
transformIdentifier(node, queryId) {
node = super.transformIdentifier(node, queryId);
const parent = this.nodeStack[this.nodeStack.length - 2];
// Transform table names when tableNamingStrategy is 'entity'
if (this.#options.tableNamingStrategy === 'entity' && parent && SchemableIdentifierNode.is(parent)) {
const meta = this.findEntityMetadata(node.name);
if (meta) {
return {
...node,
name: meta.tableName,
};
}
}
// Transform column names when columnNamingStrategy is 'property'
// Support ColumnNode, ColumnUpdateNode, and ReferenceNode (for JOIN conditions)
if (
this.#options.columnNamingStrategy === 'property' &&
parent &&
(ColumnNode.is(parent) || ColumnUpdateNode.is(parent) || ReferenceNode.is(parent))
) {
const ownerMeta = this.findOwnerEntityInContext();
if (ownerMeta) {
const prop = ownerMeta.properties[node.name];
const fieldName = prop?.fieldNames?.[0];
if (fieldName) {
return {
...node,
name: fieldName,
};
}
}
}
return node;
}
/**
* Find owner entity metadata for the current identifier in the context stack.
* Supports both aliased and non-aliased table references.
* Searches up the context stack to support correlated subqueries.
* Also checks subquery/CTE aliases to resolve to their source tables.
*/
findOwnerEntityInContext() {
// Check if current column has a table reference (e.g., u.firstName)
const reference = this.nodeStack.find(it => ReferenceNode.is(it));
if (reference?.table && TableNode.is(reference.table)) {
const tableName = this.getTableName(reference.table);
if (tableName) {
// First, check in subquery alias map (for CTE/subquery columns)
if (this.#subqueryAliasMap.has(tableName)) {
return this.#subqueryAliasMap.get(tableName);
}
// Find entity metadata to get the actual table name
// Context uses table names (meta.tableName) as keys, not entity names
const entityMeta = this.findEntityMetadata(tableName);
if (entityMeta) {
// Search in context stack using the actual table name
const meta = this.lookupInContextStack(entityMeta.tableName);
if (meta) {
return meta;
}
// Also try with the entity name (for cases where context uses entity name)
const metaByEntityName = this.lookupInContextStack(tableName);
if (metaByEntityName) {
return metaByEntityName;
}
} else {
// If entity metadata not found, try direct lookup (for CTE/subquery cases)
const meta = this.lookupInContextStack(tableName);
if (meta) {
return meta;
}
}
}
}
// If no explicit table reference, use the first entity in current context
if (this.#contextStack.length > 0) {
const currentContext = this.#contextStack[this.#contextStack.length - 1];
for (const [alias, meta] of currentContext.entries()) {
if (meta) {
return meta;
}
// If the context value is undefined but the alias is in subqueryAliasMap,
// use the mapped metadata (for CTE/subquery cases)
if (!meta && this.#subqueryAliasMap.has(alias)) {
const mappedMeta = this.#subqueryAliasMap.get(alias);
if (mappedMeta) {
return mappedMeta;
}
}
}
}
return undefined;
}
processOnCreateHooks(node, meta) {
if (!node.columns || !node.values || !ValuesNode.is(node.values)) {
return node;
}
const existingProps = new Set();
for (const col of node.columns) {
const prop = this.findProperty(meta, this.normalizeColumnName(col.column));
if (prop) {
existingProps.add(prop.name);
}
}
const missingProps = meta.props.filter(prop => prop.onCreate && !existingProps.has(prop.name));
if (missingProps.length === 0) {
return node;
}
const newColumns = [...node.columns];
for (const prop of missingProps) {
newColumns.push(ColumnNode.create(prop.name));
}
const newRows = node.values.values.map(row => {
const valuesToAdd = missingProps.map(prop => {
const val = prop.onCreate(undefined, this.#em);
return val;
});
if (ValueListNode.is(row)) {
const newValues = [...row.values, ...valuesToAdd.map(v => ValueNode.create(v))];
return ValueListNode.create(newValues);
}
if (PrimitiveValueListNode.is(row)) {
const newValues = [...row.values, ...valuesToAdd];
return PrimitiveValueListNode.create(newValues);
}
return row;
});
return {
...node,
columns: Object.freeze(newColumns),
values: ValuesNode.create(newRows),
};
}
processOnUpdateHooks(node, meta) {
if (!node.updates) {
return node;
}
const existingProps = new Set();
for (const update of node.updates) {
if (ColumnNode.is(update.column)) {
const prop = this.findProperty(meta, this.normalizeColumnName(update.column.column));
if (prop) {
existingProps.add(prop.name);
}
}
}
const missingProps = meta.props.filter(prop => prop.onUpdate && !existingProps.has(prop.name));
if (missingProps.length === 0) {
return node;
}
const newUpdates = [...node.updates];
for (const prop of missingProps) {
const val = prop.onUpdate(undefined, this.#em);
newUpdates.push(ColumnUpdateNode.create(ColumnNode.create(prop.name), ValueNode.create(val)));
}
return {
...node,
updates: Object.freeze(newUpdates),
};
}
processInsertValues(node, meta) {
if (!node.columns?.length || !node.values || !ValuesNode.is(node.values)) {
return node;
}
const columnProps = this.mapColumnsToProperties(node.columns, meta);
const shouldConvert = this.shouldConvertValues();
let changed = false;
const convertedRows = node.values.values.map(row => {
if (ValueListNode.is(row)) {
if (row.values.length !== columnProps.length) {
return row;
}
const values = row.values.map((valueNode, idx) => {
if (!ValueNode.is(valueNode)) {
return valueNode;
}
const converted = this.prepareInputValue(columnProps[idx], valueNode.value, shouldConvert);
if (converted === valueNode.value) {
return valueNode;
}
changed = true;
return valueNode.immediate ? ValueNode.createImmediate(converted) : ValueNode.create(converted);
});
return ValueListNode.create(values);
}
if (PrimitiveValueListNode.is(row)) {
if (row.values.length !== columnProps.length) {
return row;
}
const values = row.values.map((value, idx) => {
const converted = this.prepareInputValue(columnProps[idx], value, shouldConvert);
if (converted !== value) {
changed = true;
}
return converted;
});
return PrimitiveValueListNode.create(values);
}
return row;
});
if (!changed) {
return node;
}
return {
...node,
values: ValuesNode.create(convertedRows),
};
}
processUpdateValues(node, meta) {
if (!node.updates?.length) {
return node;
}
const shouldConvert = this.shouldConvertValues();
let changed = false;
const updates = node.updates.map(updateNode => {
if (!ValueNode.is(updateNode.value)) {
return updateNode;
}
const columnName = ColumnNode.is(updateNode.column)
? this.normalizeColumnName(updateNode.column.column)
: undefined;
const property = this.findProperty(meta, columnName);
const converted = this.prepareInputValue(property, updateNode.value.value, shouldConvert);
if (converted === updateNode.value.value) {
return updateNode;
}
changed = true;
const newValueNode = updateNode.value.immediate
? ValueNode.createImmediate(converted)
: ValueNode.create(converted);
return {
...updateNode,
value: newValueNode,
};
});
if (!changed) {
return node;
}
return {
...node,
updates,
};
}
mapColumnsToProperties(columns, meta) {
return columns.map(column => {
const columnName = this.normalizeColumnName(column.column);
return this.findProperty(meta, columnName);
});
}
normalizeColumnName(identifier) {
const name = identifier.name;
if (!name.includes('.')) {
return name;
}
const parts = name.split('.');
return parts[parts.length - 1] ?? name;
}
findProperty(meta, columnName) {
if (!meta || !columnName) {
return undefined;
}
if (meta.properties[columnName]) {
return meta.properties[columnName];
}
return meta.props.find(prop => prop.fieldNames?.includes(columnName));
}
shouldConvertValues() {
return !!this.#options.convertValues;
}
prepareInputValue(prop, value, enabled) {
if (!enabled || !prop || value == null) {
return value;
}
if (typeof value === 'object' && value !== null) {
if (isRaw(value)) {
return value;
}
if ('kind' in value) {
return value;
}
}
if (prop.customType && !isRaw(value)) {
return prop.customType.convertToDatabaseValue(value, this.#platform, {
fromQuery: true,
key: prop.name,
mode: 'query-data',
});
}
if (value instanceof Date) {
return this.#platform.processDateProperty(value);
}
return value;
}
/**
* Look up a table name/alias in the context stack.
* Searches from current scope (top of stack) to parent scopes (bottom).
* This supports correlated subqueries and references to outer query tables.
*/
lookupInContextStack(tableNameOrAlias) {
// Search from top of stack (current scope) to bottom (parent scopes)
for (let i = this.#contextStack.length - 1; i >= 0; i--) {
const context = this.#contextStack[i];
if (context.has(tableNameOrAlias)) {
return context.get(tableNameOrAlias);
}
}
return undefined;
}
/**
* Process WITH node (CTE definitions)
*/
processWithNode(withNode, context) {
for (const cte of withNode.expressions) {
const cteName = this.getCTEName(cte.name);
if (cteName) {
// CTEs are not entities, so map to undefined
// They will be transformed recursively by transformSelectQuery
context.set(cteName, undefined);
// Also try to extract the source table from the CTE's expression
// This helps resolve columns in subsequent queries that use the CTE
if (cte.expression?.kind === 'SelectQueryNode') {
const sourceMeta = this.extractSourceTableFromSelectQuery(cte.expression);
if (sourceMeta) {
this.#subqueryAliasMap.set(cteName, sourceMeta);
// Add CTE to entityMap so it can be used for result transformation if needed
// (though CTEs usually don't appear in result rows directly, but their columns might)
this.#entityMap.set(cteName, sourceMeta);
}
}
}
}
}
/**
* Extract CTE name from CommonTableExpressionNameNode
*/
getCTEName(nameNode) {
if (TableNode.is(nameNode.table)) {
return this.getTableName(nameNode.table);
}
return undefined;
}
/**
* Process a FROM item (can be TableNode or AliasNode)
*/
processFromItem(
from, // OperationNode type - can be TableNode, AliasNode, or SelectQueryNode
context,
) {
if (AliasNode.is(from)) {
if (TableNode.is(from.node)) {
// Regular table with alias
const tableName = this.getTableName(from.node);
if (tableName && from.alias) {
const meta = this.findEntityMetadata(tableName);
const aliasName = this.extractAliasName(from.alias);
if (aliasName) {
context.set(aliasName, meta);
if (meta) {
this.#entityMap.set(aliasName, meta);
}
// Also map the alias in subqueryAliasMap if the table name is a CTE
if (this.#subqueryAliasMap.has(tableName)) {
this.#subqueryAliasMap.set(aliasName, this.#subqueryAliasMap.get(tableName));
}
}
}
} else if (from.node?.kind === 'SelectQueryNode') {
// Subquery with alias
const aliasName = this.extractAliasName(from.alias);
if (aliasName) {
context.set(aliasName, undefined);
// Try to extract the source table from the subquery
const sourceMeta = this.extractSourceTableFromSelectQuery(from.node);
if (sourceMeta) {
this.#subqueryAliasMap.set(aliasName, sourceMeta);
}
}
} else {
// Other types with alias
const aliasName = this.extractAliasName(from.alias);
if (aliasName) {
context.set(aliasName, undefined);
}
}
} else if (TableNode.is(from)) {
// Table without alias
const tableName = this.getTableName(from);
if (tableName) {
const meta = this.findEntityMetadata(tableName);
context.set(tableName, meta);
if (meta) {
this.#entityMap.set(tableName, meta);
}
}
}
}
/**
* Process a JOIN node
*/
processJoinNode(join, context) {
const joinTable = join.table;
if (AliasNode.is(joinTable)) {
if (TableNode.is(joinTable.node)) {
// Regular table with alias in JOIN
const tableName = this.getTableName(joinTable.node);
if (tableName && joinTable.alias) {
const meta = this.findEntityMetadata(tableName);
const aliasName = this.extractAliasName(joinTable.alias);
if (aliasName) {
context.set(aliasName, meta);
if (meta) {
this.#entityMap.set(aliasName, meta);
}
// Also map the alias in subqueryAliasMap if the table name is a CTE
if (this.#subqueryAliasMap.has(tableName)) {
this.#subqueryAliasMap.set(aliasName, this.#subqueryAliasMap.get(tableName));
}
}
}
} else if (joinTable.node?.kind === 'SelectQueryNode') {
// Subquery with alias in JOIN
const aliasName = this.extractAliasName(joinTable.alias);
if (aliasName) {
context.set(aliasName, undefined);
// Try to extract the source table from the subquery
const sourceMeta = this.extractSourceTableFromSelectQuery(joinTable.node);
if (sourceMeta) {
this.#subqueryAliasMap.set(aliasName, sourceMeta);
}
}
} else {
// Other types with alias
const aliasName = this.extractAliasName(joinTable.alias);
if (aliasName) {
context.set(aliasName, undefined);
}
}
} else if (TableNode.is(joinTable)) {
// Table without alias in JOIN
const tableName = this.getTableName(joinTable);
if (tableName) {
const meta = this.findEntityMetadata(tableName);
// Use table name (meta.tableName) as key to match transformUpdateQuery behavior
if (meta) {
context.set(meta.tableName, meta);
this.#entityMap.set(meta.tableName, meta);
// Also set with entity name for backward compatibility
context.set(tableName, meta);
} else {
context.set(tableName, undefined);
}
}
}
}
/**
* Extract the primary source table from a SELECT query
* This helps resolve columns from subqueries to their original entity tables
*/
extractSourceTableFromSelectQuery(selectQuery) {
if (!selectQuery.from?.froms || selectQuery.from.froms.length === 0) {
return undefined;
}
// Get the first FROM table
const firstFrom = selectQuery.from.froms[0];
let sourceTable;
if (AliasNode.is(firstFrom) && TableNode.is(firstFrom.node)) {
sourceTable = firstFrom.node;
} else if (TableNode.is(firstFrom)) {
sourceTable = firstFrom;
}
if (sourceTable) {
const tableName = this.getTableName(sourceTable);
if (tableName) {
return this.findEntityMetadata(tableName);
}
}
return undefined;
}
/**
* Extract alias name from an alias node
*/
extractAliasName(alias) {
if (typeof alias === 'object' && 'name' in alias) {
return alias.name;
}
return undefined;
}
/**
* Extract table name from a TableNode
*/
getTableName(node) {
if (!node) {
return undefined;
}
if (TableNode.is(node) && SchemableIdentifierNode.is(node.table)) {
const identifier = node.table.identifier;
if (typeof identifier === 'object' && 'name' in identifier) {
return identifier.name;
}
}
return undefined;
}
/**
* Find entity metadata by table name or entity name
*/
findEntityMetadata(name) {
const byEntity = this.#metadata.getByClassName(name, false);
if (byEntity) {
return byEntity;
}
const allMetadata = Array.from(this.#metadata);
const byTable = allMetadata.find(m => m.tableName === name);
if (byTable) {
return byTable;
}
return undefined;
}
/**
* Transform result rows by mapping database column names to property names
* This is called for SELECT queries when columnNamingStrategy is 'property'
*/
transformResult(rows, entityMap) {
// Only transform if columnNamingStrategy is 'property' or convertValues is true, and we have data
if (
(this.#options.columnNamingStrategy !== 'property' && !this.#options.convertValues) ||
!rows ||
rows.length === 0
) {
return rows;
}
// If no entities found (e.g. raw query without known tables), return rows as is
if (entityMap.size === 0) {
return rows;
}
// Build a global mapping from database field names to property objects
const fieldToPropertyMap = this.buildGlobalFieldMap(entityMap);
const relationFieldMap = this.buildGlobalRelationFieldMap(entityMap);
// Transform each row
return rows.map(row => this.transformRow(row, fieldToPropertyMap, relationFieldMap));
}
buildGlobalFieldMap(entityMap) {
const map = {};
for (const [alias, meta] of entityMap.entries()) {
Object.assign(map, this.buildFieldToPropertyMap(meta, alias));
}
return map;
}
buildGlobalRelationFieldMap(entityMap) {
const map = {};
for (const [alias, meta] of entityMap.entries()) {
Object.assign(map, this.buildRelationFieldMap(meta, alias));
}
return map;
}
/**
* Build a mapping from database field names to property objects
* Format: { 'field_name': EntityProperty }
*/
buildFieldToPropertyMap(meta, alias) {
const map = {};
for (const prop of meta.props) {
if (prop.fieldNames && prop.fieldNames.length > 0) {
for (const fieldName of prop.fieldNames) {
if (!(fieldName in map)) {
map[fieldName] = prop;
}
if (alias) {
const dotted = `${alias}.${fieldName}`;
if (!(dotted in map)) {
map[dotted] = prop;
}
const underscored = `${alias}_${fieldName}`;
if (!(underscored in map)) {
map[underscored] = prop;
}
const doubleUnderscored = `${alias}__${fieldName}`;
if (!(doubleUnderscored in map)) {
map[doubleUnderscored] = prop;
}
}
}
}
if (!(prop.name in map)) {
map[prop.name] = prop;
}
}
return map;
}
/**
* Build a mapping for relation fields
* For ManyToOne relations, we need to map from the foreign key field to the relation property
* Format: { 'foreign_key_field': 'relationPropertyName' }
*/
buildRelationFieldMap(meta, alias) {
const map = {};
for (const prop of meta.props) {
// For ManyToOne/OneToOne relations, find the foreign key field
if (prop.kind === ReferenceKind.MANY_TO_ONE || prop.kind === ReferenceKind.ONE_TO_ONE) {
if (prop.fieldNames && prop.fieldNames.length > 0) {
const fieldName = prop.fieldNames[0];
map[fieldName] = prop.name;
if (alias) {
map[`${alias}.${fieldName}`] = prop.name;
map[`${alias}_${fieldName}`] = prop.name;
map[`${alias}__${fieldName}`] = prop.name;
}
}
}
}
return map;
}
/**
* Transform a single row by mapping column names to property names
*/
transformRow(row, fieldToPropertyMap, relationFieldMap) {
const transformed = { ...row };
// First pass: map regular fields from fieldName to propertyName and convert values
for (const [fieldName, prop] of Object.entries(fieldToPropertyMap)) {
if (!(fieldName in transformed)) {
continue;
}
const converted = this.prepareOutputValue(prop, transformed[fieldName]);
if (this.#options.columnNamingStrategy === 'property' && prop.name !== fieldName) {
if (!(prop.name in transformed)) {
transformed[prop.name] = converted;
} else {
transformed[prop.name] = converted;
}
delete transformed[fieldName];
continue;
}
if (this.#options.convertValues) {
transformed[fieldName] = converted;
}
}
// Second pass: handle relation fields
// Only run if columnNamingStrategy is 'property', as we don't want to rename FKs otherwise
if (this.#options.columnNamingStrategy === 'property') {
for (const [fieldName, relationPropertyName] of Object.entries(relationFieldMap)) {
if (fieldName in transformed && !(relationPropertyName in transformed)) {
// Move the foreign key value to the relation property name
transformed[relationPropertyName] = transformed[fieldName];
delete transformed[fieldName];
}
}
}
return transformed;
}
prepareOutputValue(prop, value) {
if (!this.#options.convertValues || !prop || value == null) {
return value;
}
if (prop.customType) {
return prop.customType.convertToJSValue(value, this.#platform);
}
// Aligned with EntityComparator.getResultMapper logic
if (prop.runtimeType === 'boolean') {
// Use !! conversion like EntityComparator: value == null ? value : !!value
return value == null ? value : !!value;
}
if (prop.runtimeType === 'Date' && !this.#platform.isNumericProperty(prop)) {
// Aligned with EntityComparator: exclude numeric timestamp properties
// If already Date instance or null, return as is
if (value == null || value instanceof Date) {
return value;
}
// Handle timezone like EntityComparator.parseDate
const tz = this.#platform.getTimezone();
if (!tz || tz === 'local') {
return this.#platform.parseDate(value);
}
// For non-local timezone, check if value already has timezone info
// Number (timestamp) doesn't need timezone handling, string needs check
if (
typeof value === 'number' ||
(typeof value === 'string' && (value.includes('+') || value.lastIndexOf('-') > 10 || value.endsWith('Z')))
) {
return this.#platform.parseDate(value);
}
// Append timezone if not present (only for string values)
return this.#platform.parseDate(value + tz);
}
// For all other runtimeTypes (number, string, bigint, Buffer, object, any, etc.)
// EntityComparator just assigns directly without conversion
return value;
}
}

View File

@@ -0,0 +1,11 @@
import { CriteriaNode } from './CriteriaNode.js';
import type { IQueryBuilder, ICriteriaNodeProcessOptions } from '../typings.js';
/**
* @internal
*/
export declare class ArrayCriteriaNode<T extends object> extends CriteriaNode<T> {
process(qb: IQueryBuilder<T>, options?: ICriteriaNodeProcessOptions): any;
unwrap(): any;
willAutoJoin(qb: IQueryBuilder<T>, alias?: string, options?: ICriteriaNodeProcessOptions): boolean;
isStrict(): boolean;
}

24
node_modules/@mikro-orm/sql/query/ArrayCriteriaNode.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
import { CriteriaNode } from './CriteriaNode.js';
/**
* @internal
*/
export class ArrayCriteriaNode extends CriteriaNode {
process(qb, options) {
return this.payload.map(node => {
return node.process(qb, options);
});
}
unwrap() {
return this.payload.map(node => {
return node.unwrap();
});
}
willAutoJoin(qb, alias, options) {
return this.payload.some(node => {
return node.willAutoJoin(qb, alias, options);
});
}
isStrict() {
return this.strict || this.payload.some(node => node.isStrict());
}
}

43
node_modules/@mikro-orm/sql/query/CriteriaNode.d.ts generated vendored Normal file
View File

@@ -0,0 +1,43 @@
import {
type EntityKey,
type EntityProperty,
type MetadataStorage,
type RawQueryFragmentSymbol,
type EntityName,
} from '@mikro-orm/core';
import type { ICriteriaNode, ICriteriaNodeProcessOptions, IQueryBuilder } from '../typings.js';
/**
* Helper for working with deeply nested where/orderBy/having criteria. Uses composite pattern to build tree from the payload.
* Auto-joins relations and converts payload from { books: { publisher: { name: '...' } } } to { 'publisher_alias.name': '...' }
* @internal
*/
export declare class CriteriaNode<T extends object> implements ICriteriaNode<T> {
protected readonly metadata: MetadataStorage;
readonly entityName: EntityName<T>;
readonly parent?: ICriteriaNode<T> | undefined;
readonly key?: (EntityKey<T> | RawQueryFragmentSymbol) | undefined;
readonly validate: boolean;
readonly strict: boolean;
payload: any;
prop?: EntityProperty<T>;
index?: number;
constructor(
metadata: MetadataStorage,
entityName: EntityName<T>,
parent?: ICriteriaNode<T> | undefined,
key?: (EntityKey<T> | RawQueryFragmentSymbol) | undefined,
validate?: boolean,
strict?: boolean,
);
process(qb: IQueryBuilder<T>, options?: ICriteriaNodeProcessOptions): any;
unwrap(): any;
shouldInline(payload: any): boolean;
willAutoJoin(qb: IQueryBuilder<T>, alias?: string, options?: ICriteriaNodeProcessOptions): boolean;
shouldRename(payload: any): boolean;
renameFieldToPK<T>(qb: IQueryBuilder<T>, ownerAlias?: string): string;
getPath(opts?: { addIndex?: boolean; parentPath?: string }): string;
private isPivotJoin;
getPivotPath(path: string): string;
aliased(field: string, alias?: string): string;
isStrict(): boolean;
}

150
node_modules/@mikro-orm/sql/query/CriteriaNode.js generated vendored Normal file
View File

@@ -0,0 +1,150 @@
import { RawQueryFragment, ReferenceKind, Utils, inspect } from '@mikro-orm/core';
/**
* Helper for working with deeply nested where/orderBy/having criteria. Uses composite pattern to build tree from the payload.
* Auto-joins relations and converts payload from { books: { publisher: { name: '...' } } } to { 'publisher_alias.name': '...' }
* @internal
*/
export class CriteriaNode {
metadata;
entityName;
parent;
key;
validate;
strict;
payload;
prop;
index;
constructor(metadata, entityName, parent, key, validate = true, strict = false) {
this.metadata = metadata;
this.entityName = entityName;
this.parent = parent;
this.key = key;
this.validate = validate;
this.strict = strict;
const meta = parent && metadata.find(parent.entityName);
if (meta && key && !RawQueryFragment.isKnownFragmentSymbol(key)) {
const pks = Utils.splitPrimaryKeys(key);
if (pks.length > 1) {
return;
}
for (const k of pks) {
this.prop = meta.props.find(
prop =>
prop.name === k || (prop.fieldNames?.length === 1 && prop.fieldNames[0] === k && prop.persist !== false),
);
const isProp = this.prop || meta.props.find(prop => (prop.fieldNames || []).includes(k));
// do not validate if the key is prefixed or type casted (e.g. `k::text`)
if (validate && !isProp && !k.includes('.') && !k.includes('::') && !Utils.isOperator(k)) {
throw new Error(`Trying to query by not existing property ${Utils.className(entityName)}.${k}`);
}
}
}
}
process(qb, options) {
return this.payload;
}
unwrap() {
return this.payload;
}
shouldInline(payload) {
return false;
}
willAutoJoin(qb, alias, options) {
return false;
}
shouldRename(payload) {
const type = this.prop ? this.prop.kind : null;
const composite = this.prop?.joinColumns ? this.prop.joinColumns.length > 1 : false;
const rawField = RawQueryFragment.isKnownFragmentSymbol(this.key);
const scalar =
payload === null ||
Utils.isPrimaryKey(payload) ||
payload instanceof RegExp ||
payload instanceof Date ||
rawField;
const operator =
Utils.isPlainObject(payload) && Utils.getObjectQueryKeys(payload).every(k => Utils.isOperator(k, false));
if (composite) {
return true;
}
switch (type) {
case ReferenceKind.MANY_TO_ONE:
return false;
case ReferenceKind.ONE_TO_ONE:
return !this.prop.owner;
case ReferenceKind.ONE_TO_MANY:
return scalar || operator;
case ReferenceKind.MANY_TO_MANY:
return scalar || operator;
default:
return false;
}
}
renameFieldToPK(qb, ownerAlias) {
const joinAlias = qb.getAliasForJoinPath(this.getPath(), { matchPopulateJoins: true });
if (
!joinAlias &&
this.parent &&
[ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(this.prop.kind) &&
this.prop.owner
) {
const alias = qb.getAliasForJoinPath(this.parent.getPath()) ?? ownerAlias ?? qb.alias;
return Utils.getPrimaryKeyHash(this.prop.joinColumns.map(col => `${alias}.${col}`));
}
const alias = joinAlias ?? ownerAlias ?? qb.alias;
if (this.prop.kind === ReferenceKind.MANY_TO_MANY) {
return Utils.getPrimaryKeyHash(this.prop.inverseJoinColumns.map(col => `${alias}.${col}`));
}
return Utils.getPrimaryKeyHash(this.prop.referencedColumnNames.map(col => `${alias}.${col}`));
}
getPath(opts) {
// use index on parent only if we are processing to-many relation
const addParentIndex =
this.prop && [ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(this.prop.kind);
const parentPath =
opts?.parentPath ?? this.parent?.getPath({ addIndex: addParentIndex }) ?? Utils.className(this.entityName);
const index = opts?.addIndex && this.index != null ? `[${this.index}]` : '';
// ignore group operators to allow easier mapping (e.g. for orderBy)
const key =
this.key && !RawQueryFragment.isKnownFragmentSymbol(this.key) && !['$and', '$or', '$not'].includes(this.key)
? '.' + this.key
: '';
const ret = parentPath + index + key;
if (this.isPivotJoin()) {
// distinguish pivot table join from target entity join
return this.getPivotPath(ret);
}
return ret;
}
isPivotJoin() {
if (!this.key || !this.prop) {
return false;
}
const rawField = RawQueryFragment.isKnownFragmentSymbol(this.key);
const scalar =
this.payload === null ||
Utils.isPrimaryKey(this.payload) ||
this.payload instanceof RegExp ||
this.payload instanceof Date ||
rawField;
const operator =
Utils.isObject(this.payload) && Utils.getObjectQueryKeys(this.payload).every(k => Utils.isOperator(k, false));
return this.prop.kind === ReferenceKind.MANY_TO_MANY && (scalar || operator);
}
getPivotPath(path) {
return `${path}[pivot]`;
}
aliased(field, alias) {
return alias ? `${alias}.${field}` : field;
}
isStrict() {
return this.strict;
}
/** @ignore */
/* v8 ignore next */
[Symbol.for('nodejs.util.inspect.custom')]() {
const o = {};
['entityName', 'key', 'index', 'payload'].filter(k => this[k] !== undefined).forEach(k => (o[k] = this[k]));
return `${this.constructor.name} ${inspect(o)}`;
}
}

View File

@@ -0,0 +1,55 @@
import {
type Dictionary,
type EntityKey,
type EntityMetadata,
type EntityName,
type MetadataStorage,
type RawQueryFragmentSymbol,
} from '@mikro-orm/core';
import type { ICriteriaNode } from '../typings.js';
/**
* @internal
*/
export declare class CriteriaNodeFactory {
static createNode<T extends object>(
metadata: MetadataStorage,
entityName: EntityName<T>,
payload: any,
parent?: ICriteriaNode<T>,
key?: EntityKey<T> | RawQueryFragmentSymbol,
validate?: boolean,
): ICriteriaNode<T>;
static createScalarNode<T extends object>(
metadata: MetadataStorage,
entityName: EntityName<T>,
payload: any,
parent?: ICriteriaNode<T>,
key?: EntityKey<T> | RawQueryFragmentSymbol,
validate?: boolean,
): ICriteriaNode<T>;
static createArrayNode<T extends object>(
metadata: MetadataStorage,
entityName: EntityName<T>,
payload: any[],
parent?: ICriteriaNode<T>,
key?: EntityKey<T>,
validate?: boolean,
): ICriteriaNode<T>;
static createObjectNode<T extends object>(
metadata: MetadataStorage,
entityName: EntityName<T>,
payload: Dictionary,
parent?: ICriteriaNode<T>,
key?: EntityKey<T>,
validate?: boolean,
): ICriteriaNode<T>;
static createObjectItemNode<T extends object>(
metadata: MetadataStorage,
entityName: EntityName<T>,
node: ICriteriaNode<T>,
payload: Dictionary,
key: EntityKey<T> | RawQueryFragmentSymbol,
meta?: EntityMetadata<T>,
validate?: boolean,
): ICriteriaNode<T>;
}

View File

@@ -0,0 +1,111 @@
import {
GroupOperator,
isRaw,
JsonType,
RawQueryFragment,
ReferenceKind,
Utils,
ValidationError,
} from '@mikro-orm/core';
import { ObjectCriteriaNode } from './ObjectCriteriaNode.js';
import { ArrayCriteriaNode } from './ArrayCriteriaNode.js';
import { ScalarCriteriaNode } from './ScalarCriteriaNode.js';
import { EMBEDDABLE_ARRAY_OPS } from './enums.js';
/**
* @internal
*/
export class CriteriaNodeFactory {
static createNode(metadata, entityName, payload, parent, key, validate = true) {
const rawField = RawQueryFragment.isKnownFragmentSymbol(key);
const scalar =
Utils.isPrimaryKey(payload) || isRaw(payload) || payload instanceof RegExp || payload instanceof Date || rawField;
if (Array.isArray(payload) && !scalar) {
return this.createArrayNode(metadata, entityName, payload, parent, key, validate);
}
if (Utils.isPlainObject(payload) && !scalar) {
return this.createObjectNode(metadata, entityName, payload, parent, key, validate);
}
return this.createScalarNode(metadata, entityName, payload, parent, key, validate);
}
static createScalarNode(metadata, entityName, payload, parent, key, validate = true) {
const node = new ScalarCriteriaNode(metadata, entityName, parent, key, validate);
node.payload = payload;
return node;
}
static createArrayNode(metadata, entityName, payload, parent, key, validate = true) {
const node = new ArrayCriteriaNode(metadata, entityName, parent, key, validate);
node.payload = payload.map((item, index) => {
const n = this.createNode(metadata, entityName, item, node, undefined, validate);
// we care about branching only for $and
if (key === '$and' && payload.length > 1) {
n.index = index;
}
return n;
});
return node;
}
static createObjectNode(metadata, entityName, payload, parent, key, validate = true) {
const meta = metadata.find(entityName);
const node = new ObjectCriteriaNode(metadata, entityName, parent, key, validate, payload.__strict);
node.payload = {};
for (const k of Utils.getObjectQueryKeys(payload)) {
node.payload[k] = this.createObjectItemNode(metadata, entityName, node, payload, k, meta, validate);
}
return node;
}
static createObjectItemNode(metadata, entityName, node, payload, key, meta, validate = true) {
const rawField = RawQueryFragment.isKnownFragmentSymbol(key);
const prop = rawField ? null : meta?.properties[key];
const childEntity = prop && prop.kind !== ReferenceKind.SCALAR ? prop.targetMeta.class : entityName;
const isNotEmbedded = rawField || prop?.kind !== ReferenceKind.EMBEDDED;
const val = payload[key];
if (isNotEmbedded && prop?.customType instanceof JsonType) {
return this.createScalarNode(metadata, childEntity, val, node, key, validate);
}
if (prop?.kind === ReferenceKind.SCALAR && val != null && Object.keys(val).some(f => f in GroupOperator)) {
throw ValidationError.cannotUseGroupOperatorsInsideScalars(entityName, prop.name, payload);
}
if (isNotEmbedded) {
return this.createNode(metadata, childEntity, val, node, key, validate);
}
if (val == null) {
const map = Object.keys(prop.embeddedProps).reduce((oo, k) => {
oo[prop.embeddedProps[k].name] = null;
return oo;
}, {});
return this.createNode(metadata, entityName, map, node, key, validate);
}
// For array embeddeds stored as real columns, route property-level queries
// as scalar nodes so QueryBuilderHelper generates EXISTS subqueries with
// JSON array iteration. Keys containing `~` indicate the property lives
// inside a parent's object-mode JSON column (MetadataDiscovery uses `~` as
// the glue for object embeds), where JSON path access is used instead.
if (prop.array && !String(key).includes('~')) {
const keys = Object.keys(val);
const hasOnlyArrayOps = keys.every(k => EMBEDDABLE_ARRAY_OPS.includes(k));
if (!hasOnlyArrayOps) {
return this.createScalarNode(metadata, entityName, val, node, key, validate);
}
}
// array operators can be used on embedded properties
const operator = Object.keys(val).some(f => Utils.isOperator(f) && !EMBEDDABLE_ARRAY_OPS.includes(f));
if (operator) {
throw ValidationError.cannotUseOperatorsInsideEmbeddables(entityName, prop.name, payload);
}
const map = Object.keys(val).reduce((oo, k) => {
const embeddedProp = prop.embeddedProps[k] ?? Object.values(prop.embeddedProps).find(p => p.name === k);
if (!embeddedProp && !EMBEDDABLE_ARRAY_OPS.includes(k)) {
throw ValidationError.invalidEmbeddableQuery(entityName, k, prop.type);
}
if (embeddedProp) {
oo[embeddedProp.name] = val[k];
} else if (typeof val[k] === 'object') {
oo[k] = JSON.stringify(val[k]);
} else {
oo[k] = val[k];
}
return oo;
}, {});
return this.createNode(metadata, entityName, map, node, key, validate);
}
}

View File

@@ -0,0 +1,137 @@
import { type Dictionary, LockMode, type QueryFlag, RawQueryFragment, type Subquery } from '@mikro-orm/core';
import { QueryType } from './enums.js';
import type { AbstractSqlPlatform } from '../AbstractSqlPlatform.js';
/** Options for Common Table Expression (CTE) definitions. */
export interface CteOptions {
/** Column names for the CTE. */
columns?: string[];
/** PostgreSQL: MATERIALIZED / NOT MATERIALIZED */
materialized?: boolean;
}
interface CteClause extends CteOptions {
name: string;
sql: string;
params: unknown[];
recursive?: boolean;
}
interface Options {
tableName?: string | RawQueryFragment;
indexHint?: string;
select?: (string | RawQueryFragment)[];
distinct?: boolean;
distinctOn?: string[];
joins?: {
sql: string;
params: unknown[];
}[];
groupBy?: (string | RawQueryFragment)[];
where?: {
sql: string;
params: unknown[];
};
having?: {
sql: string;
params: unknown[];
};
orderBy?: string;
limit?: number;
offset?: number;
data?: Dictionary;
onConflict?: OnConflictClause;
lockMode?: LockMode;
lockTables?: string[];
returning?: (string | RawQueryFragment | [name: string, type: unknown])[];
comment?: string[];
hintComment?: string[];
flags?: Set<QueryFlag>;
wrap?: [prefix: string, suffix: string];
ctes?: CteClause[];
}
/** Options for specifying the target table in FROM/INTO clauses. */
export interface TableOptions {
schema?: string;
indexHint?: string;
alias?: string;
}
interface OnConflictClause {
fields: string[] | RawQueryFragment;
ignore?: boolean;
merge?: Dictionary | (string | RawQueryFragment)[];
where?: {
sql: string;
params: unknown[];
};
}
/** @internal */
export declare class NativeQueryBuilder implements Subquery {
protected readonly platform: AbstractSqlPlatform;
readonly __subquery: true;
protected type?: QueryType;
protected parts: string[];
protected params: unknown[];
protected options: Options;
constructor(platform: AbstractSqlPlatform);
select(fields: string | RawQueryFragment | (string | RawQueryFragment)[]): this;
count(fields?: string | RawQueryFragment | (string | RawQueryFragment)[], distinct?: boolean): this;
into(tableName: string | RawQueryFragment | NativeQueryBuilder, options?: TableOptions): this;
from(tableName: string | RawQueryFragment | NativeQueryBuilder, options?: TableOptions): this;
where(sql: string, params: unknown[]): this;
having(sql: string, params: unknown[]): this;
groupBy(groupBy: (string | RawQueryFragment)[]): this;
join(sql: string, params: unknown[]): this;
orderBy(orderBy: string): this;
/**
* The sub-query is compiled eagerly at call time — later mutations to the
* sub-query builder will not be reflected in this CTE.
*/
with(name: string, query: NativeQueryBuilder | RawQueryFragment, options?: CteOptions): this;
/**
* Adds a recursive CTE (`WITH RECURSIVE` on PostgreSQL/MySQL/SQLite, plain `WITH` on MSSQL).
* The sub-query is compiled eagerly — later mutations will not be reflected.
*/
withRecursive(name: string, query: NativeQueryBuilder | RawQueryFragment, options?: CteOptions): this;
private addCte;
toString(): string;
compile(): {
sql: string;
params: unknown[];
};
protected addLockClause(): void;
protected addOnConflictClause(): void;
protected combineParts(): {
sql: string;
params: unknown[];
};
limit(limit: number): this;
offset(offset: number): this;
insert(data: Dictionary): this;
update(data: Dictionary): this;
delete(): this;
truncate(): this;
distinct(): this;
distinctOn(fields: string[]): this;
onConflict(options: OnConflictClause): OnConflictClause;
returning(fields: (string | RawQueryFragment | [name: string, type: unknown])[]): this;
lockMode(lockMode: LockMode, lockTables?: string[]): this;
comment(comment: string | string[]): this;
hintComment(comment: string | string[]): this;
setFlags(flags: Set<QueryFlag>): this;
clear(clause: keyof Options): this;
wrap(prefix: string, suffix: string): this;
as(alias: string): this;
toRaw(): RawQueryFragment;
protected compileSelect(): void;
protected getFields(): string;
protected compileInsert(): void;
protected addOutputClause(type: 'inserted' | 'deleted'): void;
protected processInsertData(): string[];
protected compileUpdate(): void;
protected compileDelete(): void;
protected compileTruncate(): void;
protected addHintComment(): void;
protected compileCtes(): void;
protected getCteKeyword(hasRecursive: boolean): string;
protected getTableName(): string;
protected quote(id: string | RawQueryFragment | NativeQueryBuilder): string;
}
export {};

490
node_modules/@mikro-orm/sql/query/NativeQueryBuilder.js generated vendored Normal file
View File

@@ -0,0 +1,490 @@
import { LockMode, raw, RawQueryFragment, Utils } from '@mikro-orm/core';
import { QueryType } from './enums.js';
/** @internal */
export class NativeQueryBuilder {
platform;
type;
parts = [];
params = [];
options = {};
constructor(platform) {
this.platform = platform;
}
select(fields) {
this.type = QueryType.SELECT;
this.options.select ??= [];
this.options.select.push(...Utils.asArray(fields));
return this;
}
count(fields = '*', distinct) {
this.type = QueryType.COUNT;
this.options.select = Utils.asArray(fields);
this.options.distinct = distinct;
return this;
}
into(tableName, options) {
return this.from(tableName, options);
}
from(tableName, options) {
if (tableName instanceof NativeQueryBuilder) {
tableName = tableName.toRaw();
}
if (typeof tableName === 'string') {
const asKeyword = this.platform.usesAsKeyword() ? ' as ' : ' ';
const alias = options?.alias ? `${asKeyword}${this.platform.quoteIdentifier(options.alias)}` : '';
const schema =
options?.schema && options.schema !== this.platform.getDefaultSchemaName() ? `${options.schema}.` : '';
tableName = this.quote(schema + tableName) + alias;
}
this.options.tableName = tableName;
this.options.indexHint = options?.indexHint;
return this;
}
where(sql, params) {
this.options.where = { sql, params };
return this;
}
having(sql, params) {
this.options.having = { sql, params };
return this;
}
groupBy(groupBy) {
this.options.groupBy = groupBy;
return this;
}
join(sql, params) {
this.options.joins ??= [];
this.options.joins.push({ sql, params });
return this;
}
orderBy(orderBy) {
this.options.orderBy = orderBy;
return this;
}
/**
* The sub-query is compiled eagerly at call time — later mutations to the
* sub-query builder will not be reflected in this CTE.
*/
with(name, query, options) {
return this.addCte(name, query, options);
}
/**
* Adds a recursive CTE (`WITH RECURSIVE` on PostgreSQL/MySQL/SQLite, plain `WITH` on MSSQL).
* The sub-query is compiled eagerly — later mutations will not be reflected.
*/
withRecursive(name, query, options) {
return this.addCte(name, query, options, true);
}
addCte(name, query, options, recursive) {
this.options.ctes ??= [];
if (this.options.ctes.some(cte => cte.name === name)) {
throw new Error(`CTE with name '${name}' already exists`);
}
const { sql, params } =
query instanceof NativeQueryBuilder ? query.compile() : { sql: query.sql, params: [...query.params] };
this.options.ctes.push({
name,
sql,
params,
recursive,
columns: options?.columns,
materialized: options?.materialized,
});
return this;
}
toString() {
const { sql, params } = this.compile();
return this.platform.formatQuery(sql, params);
}
compile() {
if (!this.type) {
throw new Error('No query type provided');
}
this.parts.length = 0;
this.params.length = 0;
if (this.options.comment) {
this.parts.push(...this.options.comment.map(comment => `/* ${comment} */`));
}
this.compileCtes();
switch (this.type) {
case QueryType.SELECT:
case QueryType.COUNT:
this.compileSelect();
break;
case QueryType.INSERT:
this.compileInsert();
break;
case QueryType.UPDATE:
this.compileUpdate();
break;
case QueryType.DELETE:
this.compileDelete();
break;
case QueryType.TRUNCATE:
this.compileTruncate();
break;
}
this.addOnConflictClause();
if (this.options.returning && this.platform.usesReturningStatement()) {
const fields = this.options.returning.map(field => this.quote(field));
this.parts.push(`returning ${fields.join(', ')}`);
}
this.addLockClause();
return this.combineParts();
}
addLockClause() {
if (!this.options.lockMode) {
return;
}
if (
[LockMode.PESSIMISTIC_READ, LockMode.PESSIMISTIC_PARTIAL_READ, LockMode.PESSIMISTIC_READ_OR_FAIL].includes(
this.options.lockMode,
)
) {
this.parts.push('for share');
}
if (
[LockMode.PESSIMISTIC_WRITE, LockMode.PESSIMISTIC_PARTIAL_WRITE, LockMode.PESSIMISTIC_WRITE_OR_FAIL].includes(
this.options.lockMode,
)
) {
this.parts.push('for update');
}
if (this.options.lockTables?.length) {
const fields = this.options.lockTables.map(field => this.quote(field));
this.parts.push(`of ${fields.join(', ')}`);
}
if ([LockMode.PESSIMISTIC_PARTIAL_READ, LockMode.PESSIMISTIC_PARTIAL_WRITE].includes(this.options.lockMode)) {
this.parts.push('skip locked');
}
if ([LockMode.PESSIMISTIC_READ_OR_FAIL, LockMode.PESSIMISTIC_WRITE_OR_FAIL].includes(this.options.lockMode)) {
this.parts.push('nowait');
}
}
addOnConflictClause() {
const clause = this.options.onConflict;
if (!clause) {
return;
}
this.parts.push('on conflict');
if (clause.fields instanceof RawQueryFragment) {
this.parts.push(clause.fields.sql);
this.params.push(...clause.fields.params);
} else if (clause.fields.length > 0) {
const fields = clause.fields.map(field => this.quote(field));
this.parts.push(`(${fields.join(', ')})`);
}
if (clause.ignore) {
this.parts.push('do nothing');
}
if (Utils.isObject(clause.merge)) {
this.parts.push('do update set');
const fields = Object.keys(clause.merge).map(field => {
this.params.push(clause.merge[field]);
return `${this.quote(field)} = ?`;
});
this.parts.push(fields.join(', '));
} else if (clause.merge) {
this.parts.push('do update set');
if (clause.merge.length) {
const fields = clause.merge.map(field => `${this.quote(field)} = excluded.${this.quote(field)}`);
this.parts.push(fields.join(', '));
} else {
const dataAsArray = Utils.asArray(this.options.data);
const keys = Object.keys(dataAsArray[0]);
const fields = keys.map(field => `${this.quote(field)} = excluded.${this.quote(field)}`);
this.parts.push(fields.join(', '));
}
}
if (clause.where) {
this.parts.push(`where ${clause.where.sql}`);
this.params.push(...clause.where.params);
}
}
combineParts() {
let sql = this.parts.join(' ');
if (this.options.wrap) {
const [a, b] = this.options.wrap;
sql = `${a}${sql}${b}`;
}
return { sql, params: this.params };
}
limit(limit) {
this.options.limit = limit;
return this;
}
offset(offset) {
this.options.offset = offset;
return this;
}
insert(data) {
this.type = QueryType.INSERT;
this.options.data = data;
return this;
}
update(data) {
this.type = QueryType.UPDATE;
this.options.data ??= {};
Object.assign(this.options.data, data);
return this;
}
delete() {
this.type = QueryType.DELETE;
return this;
}
truncate() {
this.type = QueryType.TRUNCATE;
return this;
}
distinct() {
this.options.distinct = true;
return this;
}
distinctOn(fields) {
this.options.distinctOn = fields;
return this;
}
onConflict(options) {
this.options.onConflict = options;
return options;
}
returning(fields) {
this.options.returning = fields;
return this;
}
lockMode(lockMode, lockTables) {
this.options.lockMode = lockMode;
this.options.lockTables = lockTables;
return this;
}
comment(comment) {
this.options.comment ??= [];
this.options.comment.push(...Utils.asArray(comment));
return this;
}
hintComment(comment) {
this.options.hintComment ??= [];
this.options.hintComment.push(...Utils.asArray(comment));
return this;
}
setFlags(flags) {
this.options.flags = flags;
return this;
}
clear(clause) {
delete this.options[clause];
return this;
}
wrap(prefix, suffix) {
this.options.wrap = [prefix, suffix];
return this;
}
as(alias) {
this.wrap('(', `) as ${this.platform.quoteIdentifier(alias)}`);
return this;
}
toRaw() {
const { sql, params } = this.compile();
return raw(sql, params);
}
compileSelect() {
this.parts.push('select');
this.addHintComment();
this.parts.push(`${this.getFields()} from ${this.getTableName()}`);
if (this.options.joins) {
for (const join of this.options.joins) {
this.parts.push(join.sql);
this.params.push(...join.params);
}
}
if (this.options.where?.sql.trim()) {
this.parts.push(`where ${this.options.where.sql}`);
this.options.where.params.forEach(p => this.params.push(p));
}
if (this.options.groupBy) {
const fields = this.options.groupBy.map(field => this.quote(field));
this.parts.push(`group by ${fields.join(', ')}`);
}
if (this.options.having) {
this.parts.push(`having ${this.options.having.sql}`);
this.params.push(...this.options.having.params);
}
if (this.options.orderBy) {
this.parts.push(`order by ${this.options.orderBy}`);
}
if (this.options.limit != null) {
this.parts.push(`limit ?`);
this.params.push(this.options.limit);
}
if (this.options.offset != null) {
this.parts.push(`offset ?`);
this.params.push(this.options.offset);
}
}
getFields() {
if (!this.options.select || this.options.select.length === 0) {
throw new Error('No fields selected');
}
let fields = this.options.select.map(field => this.quote(field)).join(', ');
if (this.options.distinct) {
fields = `distinct ${fields}`;
} else if (this.options.distinctOn) {
fields = `distinct on (${this.options.distinctOn.map(field => this.quote(field)).join(', ')}) ${fields}`;
}
if (this.type === QueryType.COUNT) {
fields = `count(${fields}) as ${this.quote('count')}`;
}
return fields;
}
compileInsert() {
if (!this.options.data) {
throw new Error('No data provided');
}
this.parts.push('insert');
this.addHintComment();
this.parts.push(`into ${this.getTableName()}`);
if (Object.keys(this.options.data).length === 0) {
this.addOutputClause('inserted');
this.parts.push('default values');
return;
}
const parts = this.processInsertData();
this.parts.push(parts.join(', '));
}
addOutputClause(type) {
if (this.options.returning && this.platform.usesOutputStatement()) {
const fields = this.options.returning.map(field => `${type}.${this.quote(field)}`);
this.parts.push(`output ${fields.join(', ')}`);
}
}
processInsertData() {
const dataAsArray = Utils.asArray(this.options.data);
const keys = Object.keys(dataAsArray[0]);
const values = keys.map(() => '?');
const parts = [];
this.parts.push(`(${keys.map(key => this.quote(key)).join(', ')})`);
this.addOutputClause('inserted');
this.parts.push('values');
for (const data of dataAsArray) {
for (const key of keys) {
if (typeof data[key] === 'undefined') {
this.params.push(this.platform.usesDefaultKeyword() ? raw('default') : null);
} else {
this.params.push(data[key]);
}
}
parts.push(`(${values.join(', ')})`);
}
return parts;
}
compileUpdate() {
if (!this.options.data || Object.keys(this.options.data).length === 0) {
throw new Error('No data provided');
}
this.parts.push('update');
this.addHintComment();
this.parts.push(this.getTableName());
if (this.options.joins) {
for (const join of this.options.joins) {
this.parts.push(join.sql);
this.params.push(...join.params);
}
}
this.parts.push('set');
if (this.options.data) {
const parts = [];
for (const key of Object.keys(this.options.data)) {
parts.push(`${this.quote(key)} = ?`);
this.params.push(this.options.data[key]);
}
this.parts.push(parts.join(', '));
}
this.addOutputClause('inserted');
if (this.options.where?.sql.trim()) {
this.parts.push(`where ${this.options.where.sql}`);
this.params.push(...this.options.where.params);
}
}
compileDelete() {
this.parts.push('delete');
this.addHintComment();
this.parts.push(`from ${this.getTableName()}`);
this.addOutputClause('deleted');
if (this.options.where?.sql.trim()) {
this.parts.push(`where ${this.options.where.sql}`);
this.params.push(...this.options.where.params);
}
}
compileTruncate() {
const sql = `truncate table ${this.getTableName()}`;
this.parts.push(sql);
}
addHintComment() {
if (this.options.hintComment) {
this.parts.push(`/*+ ${this.options.hintComment.join(' ')} */`);
}
}
compileCtes() {
const ctes = this.options.ctes;
if (!ctes || ctes.length === 0) {
return;
}
const hasRecursive = ctes.some(cte => cte.recursive);
const keyword = this.getCteKeyword(hasRecursive);
const cteParts = [];
for (const cte of ctes) {
let part = this.quote(cte.name);
if (cte.columns?.length) {
part += ` (${cte.columns.map(c => this.quote(c)).join(', ')})`;
}
part += ' as';
if (cte.materialized === true) {
part += ' materialized';
} else if (cte.materialized === false) {
part += ' not materialized';
}
part += ` (${cte.sql})`;
this.params.push(...cte.params);
cteParts.push(part);
}
this.parts.push(`${keyword} ${cteParts.join(', ')}`);
}
getCteKeyword(hasRecursive) {
return hasRecursive ? 'with recursive' : 'with';
}
getTableName() {
if (!this.options.tableName) {
throw new Error('No table name provided');
}
const indexHint = this.options.indexHint ? ' ' + this.options.indexHint : '';
if (this.options.tableName instanceof RawQueryFragment) {
this.params.push(...this.options.tableName.params);
return this.options.tableName.sql + indexHint;
}
return this.options.tableName + indexHint;
}
quote(id) {
if (id instanceof RawQueryFragment) {
return this.platform.formatQuery(id.sql, id.params);
}
if (id instanceof NativeQueryBuilder) {
const { sql, params } = id.compile();
return this.platform.formatQuery(sql, params);
}
if (id.endsWith('.*')) {
const schema = this.platform.quoteIdentifier(id.substring(0, id.indexOf('.')));
return schema + '.*';
}
if (id.toLowerCase().includes(' as ')) {
const parts = id.split(/ as /i);
const a = this.platform.quoteIdentifier(parts[0]);
const b = this.platform.quoteIdentifier(parts[1]);
const asKeyword = this.platform.usesAsKeyword() ? ' as ' : ' ';
return `${a}${asKeyword}${b}`;
}
if (id === '*') {
return id;
}
return this.platform.quoteIdentifier(id);
}
}

View File

@@ -0,0 +1,19 @@
import { CriteriaNode } from './CriteriaNode.js';
import type { ICriteriaNodeProcessOptions, IQueryBuilder } from '../typings.js';
/**
* @internal
*/
export declare class ObjectCriteriaNode<T extends object> extends CriteriaNode<T> {
process(qb: IQueryBuilder<T>, options?: ICriteriaNodeProcessOptions): any;
isStrict(): boolean;
unwrap(): any;
willAutoJoin(qb: IQueryBuilder<T>, alias?: string, options?: ICriteriaNodeProcessOptions): boolean;
shouldInline(payload: any): boolean;
private getChildKey;
private inlineArrayChildPayload;
private inlineChildPayload;
private inlineCondition;
private shouldAutoJoin;
private autoJoin;
private isPrefixed;
}

324
node_modules/@mikro-orm/sql/query/ObjectCriteriaNode.js generated vendored Normal file
View File

@@ -0,0 +1,324 @@
import {
ALIAS_REPLACEMENT,
GroupOperator,
QueryFlag,
raw,
RawQueryFragment,
ReferenceKind,
Utils,
} from '@mikro-orm/core';
import { CriteriaNode } from './CriteriaNode.js';
import { JoinType, QueryType } from './enums.js';
const COLLECTION_OPERATORS = ['$some', '$none', '$every', '$size'];
/**
* @internal
*/
export class ObjectCriteriaNode extends CriteriaNode {
process(qb, options) {
const matchPopulateJoins =
options?.matchPopulateJoins ||
(this.prop && [ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(this.prop.kind));
const nestedAlias = qb.getAliasForJoinPath(this.getPath(options), { ...options, matchPopulateJoins });
const ownerAlias = options?.alias || qb.alias;
const keys = Utils.getObjectQueryKeys(this.payload);
let alias = options?.alias;
if (nestedAlias) {
alias = nestedAlias;
}
if (this.shouldAutoJoin(qb, nestedAlias)) {
if (keys.some(k => COLLECTION_OPERATORS.includes(k))) {
if (![ReferenceKind.MANY_TO_MANY, ReferenceKind.ONE_TO_MANY].includes(this.prop.kind)) {
// ignore collection operators when used on a non-relational property - this can happen when they get into
// populateWhere via `infer` on m:n properties with select-in strategy
if (this.parent?.parent) {
// we validate only usage on top level
return {};
}
throw new Error(
`Collection operators can be used only inside a collection property context, but it was used for ${this.getPath()}.`,
);
}
const $and = [];
const knownKey =
[ReferenceKind.SCALAR, ReferenceKind.MANY_TO_ONE, ReferenceKind.EMBEDDED].includes(this.prop.kind) ||
(this.prop.kind === ReferenceKind.ONE_TO_ONE && this.prop.owner);
const parentMeta = this.metadata.find(this.parent.entityName);
const primaryKeys = parentMeta.primaryKeys.map(pk => {
return [QueryType.SELECT, QueryType.COUNT].includes(qb.type) ? `${knownKey ? alias : ownerAlias}.${pk}` : pk;
});
for (const key of keys) {
if (typeof key !== 'string' || !COLLECTION_OPERATORS.includes(key)) {
throw new Error('Mixing collection operators with other filters is not allowed.');
}
const payload = this.payload[key].unwrap();
const qb2 = qb.clone(true, ['schema']);
const joinAlias = qb2.getNextAlias(this.prop.targetMeta.class);
const sub = qb2
.from(parentMeta.class)
// eslint-disable-next-line no-unexpected-multiline
[key === '$size' ? 'leftJoin' : 'innerJoin'](this.key, joinAlias)
.select(parentMeta.primaryKeys);
if (key === '$size') {
const sizeCondition = typeof payload === 'number' ? { $eq: payload } : payload;
const pks = this.prop.referencedColumnNames;
const countExpr = raw(
`count(${pks.map(() => '??').join(', ')})`,
pks.map(pk => `${joinAlias}.${pk}`),
);
sub.groupBy(parentMeta.primaryKeys);
sub.having({
$and: Object.keys(sizeCondition).map(op => ({ [countExpr]: { [op]: sizeCondition[op] } })),
});
} else if (key === '$every') {
sub.where({ $not: { [this.key]: payload } });
} else {
sub.where({ [this.key]: payload });
}
const op = ['$size', '$some'].includes(key) ? '$in' : '$nin';
$and.push({
[Utils.getPrimaryKeyHash(primaryKeys)]: { [op]: sub.getNativeQuery().toRaw() },
});
}
if ($and.length === 1) {
return $and[0];
}
return { $and };
}
alias = this.autoJoin(qb, ownerAlias, options);
}
if (this.prop && nestedAlias) {
const toOneProperty = [ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(this.prop.kind);
// if the property is nullable and the filter is strict, we need to use left join, so we mimic the inner join behaviour
// with an exclusive condition on the join columns:
// - if the owning column is null, the row is missing, we don't apply the filter
// - if the target column is not null, the row is matched, we apply the filter
if (toOneProperty && this.prop.nullable && this.isStrict()) {
const key = this.prop.owner ? this.prop.name : this.prop.referencedPKs;
qb.andWhere({
$or: [
{ [ownerAlias + '.' + key]: null },
{ [nestedAlias + '.' + Utils.getPrimaryKeyHash(this.prop.referencedPKs)]: { $ne: null } },
],
});
}
}
return keys.reduce((o, field) => {
const childNode = this.payload[field];
const payload = childNode.process(qb, { ...options, alias: this.prop ? alias : ownerAlias });
const operator = Utils.isOperator(field);
const isRawField = RawQueryFragment.isKnownFragmentSymbol(field);
// we need to keep the prefixing for formulas otherwise we would lose aliasing context when nesting inside group operators
const virtual = childNode.prop?.persist === false && !childNode.prop?.formula && !!options?.type;
// if key is missing, we are inside group operator and we need to prefix with alias
const primaryKey = this.key && this.metadata.find(this.entityName)?.primaryKeys.includes(field);
const isToOne =
childNode.prop && [ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(childNode.prop.kind);
if (childNode.shouldInline(payload)) {
const childAlias = qb.getAliasForJoinPath(childNode.getPath(), { preferNoBranch: isToOne, ...options });
const a = qb.helper.isTableNameAliasRequired(qb.type) ? alias : undefined;
this.inlineChildPayload(o, payload, field, a, childAlias);
} else if (childNode.shouldRename(payload)) {
this.inlineCondition(childNode.renameFieldToPK(qb, alias), o, payload);
} else if (isRawField) {
const rawField = RawQueryFragment.getKnownFragment(field);
o[raw(rawField.sql.replaceAll(ALIAS_REPLACEMENT, alias), rawField.params)] = payload;
} else if (!childNode.validate && !childNode.prop && !field.includes('.') && !operator) {
// wrap unknown fields in raw() to prevent alias prefixing (e.g. raw SQL aliases in HAVING)
// use '??' placeholder to properly quote the identifier
o[raw('??', [field])] = payload;
} else if (
primaryKey ||
virtual ||
operator ||
field.includes('.') ||
![QueryType.SELECT, QueryType.COUNT].includes(qb.type)
) {
this.inlineCondition(field.replaceAll(ALIAS_REPLACEMENT, alias), o, payload);
} else {
this.inlineCondition(`${alias ?? qb.alias}.${field}`, o, payload);
}
return o;
}, {});
}
isStrict() {
return (
this.strict ||
Utils.getObjectQueryKeys(this.payload).some(key => {
return this.payload[key].isStrict();
})
);
}
unwrap() {
return Utils.getObjectQueryKeys(this.payload).reduce((o, field) => {
o[field] = this.payload[field].unwrap();
return o;
}, {});
}
willAutoJoin(qb, alias, options) {
const nestedAlias = qb.getAliasForJoinPath(this.getPath(options), options);
const ownerAlias = alias || qb.alias;
const keys = Utils.getObjectQueryKeys(this.payload);
if (nestedAlias) {
alias = nestedAlias;
}
if (this.shouldAutoJoin(qb, nestedAlias)) {
return !keys.some(k => COLLECTION_OPERATORS.includes(k));
}
return keys.some(field => {
const childNode = this.payload[field];
return childNode.willAutoJoin(qb, this.prop ? alias : ownerAlias, options);
});
}
shouldInline(payload) {
const rawField = RawQueryFragment.isKnownFragmentSymbol(this.key);
const scalar = Utils.isPrimaryKey(payload) || payload instanceof RegExp || payload instanceof Date || rawField;
const operator =
Utils.isObject(payload) &&
Utils.getObjectQueryKeys(payload).every(k => {
if (k === '$not' && Utils.isPlainObject(payload[k])) {
// $not wrapping non-operator conditions (entity props) should be inlined
return Utils.getObjectQueryKeys(payload[k]).every(ik => Utils.isOperator(ik, false));
}
return Utils.isOperator(k, false);
});
return !!this.prop && this.prop.kind !== ReferenceKind.SCALAR && !scalar && !operator;
}
getChildKey(k, prop, childAlias, alias) {
const idx = prop.referencedPKs.indexOf(k);
return idx !== -1 && !childAlias && ![ReferenceKind.ONE_TO_MANY, ReferenceKind.MANY_TO_MANY].includes(prop.kind)
? this.aliased(prop.joinColumns[idx], alias)
: k;
}
inlineArrayChildPayload(obj, payload, k, prop, childAlias, alias) {
const key = this.getChildKey(k, prop, childAlias);
const value = payload.map(child =>
Utils.getObjectQueryKeys(child).reduce((inner, childKey) => {
const key =
RawQueryFragment.isKnownFragmentSymbol(childKey) || this.isPrefixed(childKey) || Utils.isOperator(childKey)
? childKey
: this.aliased(childKey, childAlias);
inner[key] = child[childKey];
return inner;
}, {}),
);
this.inlineCondition(key, obj, value);
}
inlineChildPayload(o, payload, field, alias, childAlias) {
const prop = this.metadata.find(this.entityName).properties[field];
for (const k of Utils.getObjectQueryKeys(payload)) {
if (RawQueryFragment.isKnownFragmentSymbol(k)) {
o[k] = payload[k];
} else if (
k === '$not' &&
Utils.isPlainObject(payload[k]) &&
Utils.getObjectQueryKeys(payload[k]).some(ik => !Utils.isOperator(ik, false))
) {
// $not wraps entity conditions (from auto-join), inline at current level
this.inlineCondition(k, o, payload[k]);
} else if (Utils.isOperator(k, false)) {
const tmp = payload[k];
delete payload[k];
o[this.aliased(field, alias)] = { [k]: tmp, ...o[this.aliased(field, alias)] };
} else if (k in GroupOperator && Array.isArray(payload[k])) {
this.inlineArrayChildPayload(o, payload[k], k, prop, childAlias, alias);
} else if (this.isPrefixed(k) || Utils.isOperator(k) || !childAlias) {
const key = this.getChildKey(k, prop, childAlias, alias);
this.inlineCondition(key, o, payload[k]);
} else {
o[this.aliased(k, childAlias)] = payload[k];
}
}
}
inlineCondition(key, o, value) {
if (!(key in o)) {
o[key] = value;
return;
}
/* v8 ignore next */
if (key === '$and') {
o.$and.push({ [key]: value });
return;
}
const $and = o.$and ?? [];
$and.push({ [key]: o[key] }, { [key]: value });
delete o[key];
o.$and = $and;
}
shouldAutoJoin(qb, nestedAlias) {
if (!this.prop || !this.parent) {
return false;
}
const keys = Utils.getObjectQueryKeys(this.payload);
if (keys.every(k => typeof k === 'string' && k.includes('.') && k.startsWith(`${qb.alias}.`))) {
return false;
}
if (keys.some(k => COLLECTION_OPERATORS.includes(k))) {
return true;
}
const meta = this.metadata.find(this.entityName);
const embeddable = this.prop.kind === ReferenceKind.EMBEDDED;
const knownKey =
[ReferenceKind.SCALAR, ReferenceKind.MANY_TO_ONE, ReferenceKind.EMBEDDED].includes(this.prop.kind) ||
(this.prop.kind === ReferenceKind.ONE_TO_ONE && this.prop.owner);
const operatorKeys =
knownKey &&
keys.every(key => {
if (key === '$not') {
// $not wraps conditions like $and/$or, check if it wraps entity property conditions (needs auto-join)
// vs simple operator conditions on the FK (doesn't need auto-join)
const childPayload = this.payload[key].payload;
if (Utils.isPlainObject(childPayload)) {
return Utils.getObjectQueryKeys(childPayload).every(k => Utils.isOperator(k, false));
}
}
return Utils.isOperator(key, false);
});
const primaryKeys =
knownKey &&
keys.every(key => {
if (typeof key !== 'string' || !meta.primaryKeys.includes(key)) {
return false;
}
if (
!Utils.isPlainObject(this.payload[key].payload) ||
![ReferenceKind.ONE_TO_ONE, ReferenceKind.MANY_TO_ONE].includes(meta.properties[key].kind)
) {
return true;
}
return Utils.getObjectQueryKeys(this.payload[key].payload).every(
k => typeof k === 'string' && meta.properties[key].targetMeta.primaryKeys.includes(k),
);
});
return !primaryKeys && !nestedAlias && !operatorKeys && !embeddable;
}
autoJoin(qb, alias, options) {
const nestedAlias = qb.getNextAlias(this.prop?.pivotEntity ?? this.entityName);
const rawField = RawQueryFragment.isKnownFragmentSymbol(this.key);
const scalar =
Utils.isPrimaryKey(this.payload) || this.payload instanceof RegExp || this.payload instanceof Date || rawField;
const operator =
Utils.isPlainObject(this.payload) &&
Utils.getObjectQueryKeys(this.payload).every(k => Utils.isOperator(k, false));
const field = `${alias}.${this.prop.name}`;
const method = qb.hasFlag(QueryFlag.INFER_POPULATE) ? 'joinAndSelect' : 'join';
const path = this.getPath();
if (this.prop.kind === ReferenceKind.MANY_TO_MANY && (scalar || operator)) {
qb.join(field, nestedAlias, undefined, JoinType.pivotJoin, path);
} else {
const prev = qb.state.fields?.slice();
const toOneProperty = [ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(this.prop.kind);
const joinType = toOneProperty && !this.prop.nullable ? JoinType.innerJoin : JoinType.leftJoin;
qb[method](field, nestedAlias, undefined, joinType, path);
if (!qb.hasFlag(QueryFlag.INFER_POPULATE)) {
qb.state.fields = prev;
}
}
if (options?.type !== 'orderBy') {
qb.scheduleFilterCheck(path);
}
return nestedAlias;
}
isPrefixed(field) {
return !!/\w+\./.exec(field);
}
}

1646
node_modules/@mikro-orm/sql/query/QueryBuilder.d.ts generated vendored Normal file

File diff suppressed because it is too large Load Diff

2312
node_modules/@mikro-orm/sql/query/QueryBuilder.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,166 @@
import {
type Dictionary,
type EntityData,
type EntityKey,
type EntityMetadata,
type EntityName,
type EntityProperty,
type FilterQuery,
type FlatQueryOrderMap,
type FormulaTable,
LockMode,
type QueryOrderMap,
Raw,
type RawQueryFragment,
type RawQueryFragmentSymbol,
} from '@mikro-orm/core';
import { JoinType, QueryType } from './enums.js';
import type { InternalField, JoinOptions } from '../typings.js';
import type { AbstractSqlDriver } from '../AbstractSqlDriver.js';
import type { NativeQueryBuilder } from './NativeQueryBuilder.js';
/**
* @internal
*/
export declare class QueryBuilderHelper {
#private;
constructor(
entityName: EntityName,
alias: string,
aliasMap: Dictionary<Alias<any>>,
subQueries: Dictionary<string>,
driver: AbstractSqlDriver,
tptAliasMap?: Dictionary<string>,
);
/**
* For TPT inheritance, finds the correct alias for a property based on which entity owns it.
* Returns the main alias if not a TPT property or if the property belongs to the main entity.
*/
getTPTAliasForProperty(propName: string, defaultAlias: string): string;
mapper(field: string | Raw | RawQueryFragmentSymbol, type?: QueryType): string;
mapper(
field: string | Raw | RawQueryFragmentSymbol,
type?: QueryType,
value?: any,
alias?: string | null,
schema?: string,
): string;
processData(data: Dictionary, convertCustomTypes: boolean, multi?: boolean): any;
joinOneToReference(
prop: EntityProperty,
ownerAlias: string,
alias: string,
type: JoinType,
cond?: Dictionary,
schema?: string,
): JoinOptions;
joinManyToOneReference(
prop: EntityProperty,
ownerAlias: string,
alias: string,
type: JoinType,
cond?: Dictionary,
schema?: string,
): JoinOptions;
joinManyToManyReference(
prop: EntityProperty,
ownerAlias: string,
alias: string,
pivotAlias: string,
type: JoinType,
cond: Dictionary,
path: string,
schema?: string,
): Dictionary<JoinOptions>;
processJoins(qb: NativeQueryBuilder, joins: Dictionary<JoinOptions>, schema?: string, schemaOverride?: string): void;
createJoinExpression(
join: JoinOptions,
joins: Dictionary<JoinOptions>,
schema?: string,
schemaOverride?: string,
): {
sql: string;
params: unknown[];
};
mapJoinColumns(type: QueryType, join: JoinOptions): (string | Raw)[];
isOneToOneInverse(field: string, meta?: EntityMetadata): boolean;
getTableName(entityName: EntityName): string;
/**
* Checks whether the RE can be rewritten to simple LIKE query
*/
isSimpleRegExp(re: any): re is RegExp;
getRegExpParam(re: RegExp): string;
appendOnConflictClause<T>(type: QueryType, onConflict: OnConflictClause<T>[], qb: NativeQueryBuilder): void;
appendQueryCondition(
type: QueryType,
cond: any,
qb: NativeQueryBuilder,
operator?: '$and' | '$or',
method?: 'where' | 'having',
): void;
_appendQueryCondition(
type: QueryType,
cond: any,
operator?: '$and' | '$or',
): {
sql: string;
params: unknown[];
};
private append;
private appendQuerySubCondition;
private processObjectSubCondition;
private getValueReplacement;
private getOperatorReplacement;
validateQueryOrder<T>(orderBy: QueryOrderMap<T>): void;
getQueryOrder(
type: QueryType,
orderBy: FlatQueryOrderMap | FlatQueryOrderMap[],
populate: Dictionary<string>,
collation?: string,
): string[];
getQueryOrderFromObject(
type: QueryType,
orderBy: FlatQueryOrderMap,
populate: Dictionary<string>,
collation?: string,
): string[];
splitField<T>(field: EntityKey<T>, greedyAlias?: boolean): [string, EntityKey<T>, string | undefined];
getLockSQL(
qb: NativeQueryBuilder,
lockMode: LockMode,
lockTables?: string[],
joinsMap?: Dictionary<JoinOptions>,
): void;
updateVersionProperty(qb: NativeQueryBuilder, data: Dictionary): void;
private prefix;
private appendGroupCondition;
private isPrefixed;
private fieldName;
getProperty(field: string, alias?: string): EntityProperty | undefined;
isTableNameAliasRequired(type: QueryType): boolean;
private processEmbeddedArrayCondition;
private buildJsonArrayExists;
private resolveEmbeddedProp;
private buildEmbeddedArrayOperatorCondition;
private processJsonElemMatch;
/**
* Shared logic for building WHERE conditions inside JSON array EXISTS subqueries.
* Used by both embedded array queries (metadata-driven) and $elemMatch (type-inferred).
*/
private buildArrayElementWhere;
private inferJsonValueType;
processOnConflictCondition(cond: FilterQuery<any>, schema?: string): FilterQuery<any>;
createFormulaTable(alias: string, meta: EntityMetadata, schema?: string): FormulaTable;
}
export interface Alias<T> {
aliasName: string;
entityName: EntityName<T>;
meta: EntityMetadata<T>;
subQuery?: NativeQueryBuilder | RawQueryFragment;
rawTableName?: string;
}
export interface OnConflictClause<T> {
fields: string[] | Raw;
ignore?: boolean;
merge?: EntityData<T> | InternalField<T>[];
where?: FilterQuery<T>;
}

1113
node_modules/@mikro-orm/sql/query/QueryBuilderHelper.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,10 @@
import { CriteriaNode } from './CriteriaNode.js';
import type { ICriteriaNodeProcessOptions, IQueryBuilder } from '../typings.js';
/**
* @internal
*/
export declare class ScalarCriteriaNode<T extends object> extends CriteriaNode<T> {
process(qb: IQueryBuilder<T>, options?: ICriteriaNodeProcessOptions): any;
willAutoJoin(qb: IQueryBuilder<T>, alias?: string, options?: ICriteriaNodeProcessOptions): boolean;
private shouldJoin;
}

View File

@@ -0,0 +1,65 @@
import { ARRAY_OPERATORS, ReferenceKind } from '@mikro-orm/core';
import { CriteriaNode } from './CriteriaNode.js';
import { JoinType, QueryType } from './enums.js';
import { QueryBuilder } from './QueryBuilder.js';
/**
* @internal
*/
export class ScalarCriteriaNode extends CriteriaNode {
process(qb, options) {
const matchPopulateJoins =
options?.matchPopulateJoins ||
(this.prop && [ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(this.prop.kind));
const nestedAlias = qb.getAliasForJoinPath(this.getPath(options), { ...options, matchPopulateJoins });
if (this.shouldJoin(qb, nestedAlias)) {
const path = this.getPath();
const parentPath = this.parent.getPath(); // the parent is always there, otherwise `shouldJoin` would return `false`
const nestedAlias = qb.getAliasForJoinPath(path) || qb.getNextAlias(this.prop?.pivotEntity ?? this.entityName);
const field = this.aliased(this.prop.name, options?.alias);
const type = this.prop.kind === ReferenceKind.MANY_TO_MANY ? JoinType.pivotJoin : JoinType.leftJoin;
qb.join(field, nestedAlias, undefined, type, path);
// select the owner as virtual property when joining from 1:1 inverse side, but only if the parent is root entity
if (
this.prop.kind === ReferenceKind.ONE_TO_ONE &&
!parentPath.includes('.') &&
!qb.state.fields?.includes(field)
) {
qb.addSelect(field);
}
}
if (this.payload instanceof QueryBuilder) {
return this.payload.toRaw();
}
if (this.payload && typeof this.payload === 'object') {
const keys = Object.keys(this.payload).filter(
key => ARRAY_OPERATORS.includes(key) && Array.isArray(this.payload[key]),
);
for (const key of keys) {
this.payload[key] = JSON.stringify(this.payload[key]);
}
}
return this.payload;
}
willAutoJoin(qb, alias, options) {
return this.shouldJoin(qb, alias);
}
shouldJoin(qb, nestedAlias) {
if (
!this.parent ||
!this.prop ||
(nestedAlias && [QueryType.SELECT, QueryType.COUNT].includes(qb.type ?? QueryType.SELECT))
) {
return false;
}
switch (this.prop.kind) {
case ReferenceKind.ONE_TO_MANY:
return true;
case ReferenceKind.MANY_TO_MANY:
return true;
case ReferenceKind.ONE_TO_ONE:
return !this.prop.owner;
default:
return false; // SCALAR, MANY_TO_ONE
}
}
}

22
node_modules/@mikro-orm/sql/query/enums.d.ts generated vendored Normal file
View File

@@ -0,0 +1,22 @@
/** Type of SQL query to be generated. */
export declare enum QueryType {
TRUNCATE = 'TRUNCATE',
SELECT = 'SELECT',
COUNT = 'COUNT',
INSERT = 'INSERT',
UPDATE = 'UPDATE',
DELETE = 'DELETE',
UPSERT = 'UPSERT',
}
/** Operators that apply to the embedded array column itself, not to individual elements. */
export declare const EMBEDDABLE_ARRAY_OPS: string[];
/** Type of SQL JOIN clause. */
export declare enum JoinType {
leftJoin = 'left join',
innerJoin = 'inner join',
nestedLeftJoin = 'nested left join',
nestedInnerJoin = 'nested inner join',
pivotJoin = 'pivot join',
innerJoinLateral = 'inner join lateral',
leftJoinLateral = 'left join lateral',
}

24
node_modules/@mikro-orm/sql/query/enums.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
/** Type of SQL query to be generated. */
export var QueryType;
(function (QueryType) {
QueryType['TRUNCATE'] = 'TRUNCATE';
QueryType['SELECT'] = 'SELECT';
QueryType['COUNT'] = 'COUNT';
QueryType['INSERT'] = 'INSERT';
QueryType['UPDATE'] = 'UPDATE';
QueryType['DELETE'] = 'DELETE';
QueryType['UPSERT'] = 'UPSERT';
})(QueryType || (QueryType = {}));
/** Operators that apply to the embedded array column itself, not to individual elements. */
export const EMBEDDABLE_ARRAY_OPS = ['$contains', '$contained', '$overlap'];
/** Type of SQL JOIN clause. */
export var JoinType;
(function (JoinType) {
JoinType['leftJoin'] = 'left join';
JoinType['innerJoin'] = 'inner join';
JoinType['nestedLeftJoin'] = 'nested left join';
JoinType['nestedInnerJoin'] = 'nested inner join';
JoinType['pivotJoin'] = 'pivot join';
JoinType['innerJoinLateral'] = 'inner join lateral';
JoinType['leftJoinLateral'] = 'left join lateral';
})(JoinType || (JoinType = {}));

10
node_modules/@mikro-orm/sql/query/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,10 @@
export * from './enums.js';
export * from './QueryBuilderHelper.js';
export * from './QueryBuilder.js';
export * from './CriteriaNode.js';
export * from './ArrayCriteriaNode.js';
export * from './ObjectCriteriaNode.js';
export * from './ScalarCriteriaNode.js';
export * from './CriteriaNodeFactory.js';
export * from './NativeQueryBuilder.js';
export * from './raw.js';

10
node_modules/@mikro-orm/sql/query/index.js generated vendored Normal file
View File

@@ -0,0 +1,10 @@
export * from './enums.js';
export * from './QueryBuilderHelper.js';
export * from './QueryBuilder.js';
export * from './CriteriaNode.js';
export * from './ArrayCriteriaNode.js';
export * from './ObjectCriteriaNode.js';
export * from './ScalarCriteriaNode.js';
export * from './CriteriaNodeFactory.js';
export * from './NativeQueryBuilder.js';
export * from './raw.js';

77
node_modules/@mikro-orm/sql/query/raw.d.ts generated vendored Normal file
View File

@@ -0,0 +1,77 @@
import { type AnyString, type Dictionary, type EntityKey, type RawQueryFragment } from '@mikro-orm/core';
import type { SelectQueryBuilder as KyselySelectQueryBuilder } from 'kysely';
/** @internal Type for QueryBuilder instances passed to raw() - uses toRaw to distinguish from Kysely QueryBuilder */
type QueryBuilderLike = {
toQuery(): {
sql: string;
params: readonly unknown[];
};
toRaw(): RawQueryFragment;
};
/**
* Creates raw SQL query fragment that can be assigned to a property or part of a filter. This fragment is represented
* by `RawQueryFragment` class instance that can be serialized to a string, so it can be used both as an object value
* and key. When serialized, the fragment key gets cached and only such cached key will be recognized by the ORM.
* This adds a runtime safety to the raw query fragments.
*
* > **`raw()` helper is required since v6 to use a raw fragment in your query, both through EntityManager and QueryBuilder.**
*
* ```ts
* // as a value
* await em.find(User, { time: raw('now()') });
*
* // as a key
* await em.find(User, { [raw('lower(name)')]: name.toLowerCase() });
*
* // value can be empty array
* await em.find(User, { [raw('(select 1 = 1)')]: [] });
* ```
*
* The `raw` helper supports several signatures, you can pass in a callback that receives the current property alias:
*
* ```ts
* await em.find(User, { [raw(alias => `lower(${alias}.name)`)]: name.toLowerCase() });
* ```
*
* You can also use the `sql` tagged template function, which works the same, but supports only the simple string signature:
*
* ```ts
* await em.find(User, { [sql`lower(name)`]: name.toLowerCase() });
* ```
*
* When using inside filters, you might have to use a callback signature to create new raw instance for every filter usage.
*
* ```ts
* @Filter({ name: 'long', cond: () => ({ [raw('length(perex)')]: { $gt: 10000 } }) })
* ```
*
* The `raw` helper can be used within indexes and uniques to write database-agnostic SQL expressions. In that case, you can use `'??'` to tag your database identifiers (table name, column names, index name, ...) inside your expression, and pass those identifiers as a second parameter to the `raw` helper. Internally, those will automatically be quoted according to the database in use:
*
* ```ts
* // On postgres, will produce: create index "index custom_idx_on_name" on "library.author" ("country")
* // On mysql, will produce: create index `index custom_idx_on_name` on `library.author` (`country`)
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => raw(`create index ?? on ?? (??)`, ['custom_idx_on_name', table, columns.name]) })
* @Entity({ schema: 'library' })
* export class Author { ... }
* ```
*
* You can also use the `quote` tag function to write database-agnostic SQL expressions. The end-result is the same as using the `raw` function regarding database identifiers quoting, only to have a more elegant expression syntax:
*
* ```ts
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => quote`create index ${'custom_idx_on_name'} on ${table} (${columns.name})` })
* @Entity({ schema: 'library' })
* export class Author { ... }
* ```
*/
export declare function raw<R = RawQueryFragment & symbol, T extends object = any>(
sql:
| QueryBuilderLike
| KyselySelectQueryBuilder<any, any, any>
| EntityKey<T>
| EntityKey<T>[]
| AnyString
| ((alias: string) => string)
| RawQueryFragment,
params?: readonly unknown[] | Dictionary<unknown>,
): R;
export {};

67
node_modules/@mikro-orm/sql/query/raw.js generated vendored Normal file
View File

@@ -0,0 +1,67 @@
import { raw as raw_, Utils } from '@mikro-orm/core';
/**
* Creates raw SQL query fragment that can be assigned to a property or part of a filter. This fragment is represented
* by `RawQueryFragment` class instance that can be serialized to a string, so it can be used both as an object value
* and key. When serialized, the fragment key gets cached and only such cached key will be recognized by the ORM.
* This adds a runtime safety to the raw query fragments.
*
* > **`raw()` helper is required since v6 to use a raw fragment in your query, both through EntityManager and QueryBuilder.**
*
* ```ts
* // as a value
* await em.find(User, { time: raw('now()') });
*
* // as a key
* await em.find(User, { [raw('lower(name)')]: name.toLowerCase() });
*
* // value can be empty array
* await em.find(User, { [raw('(select 1 = 1)')]: [] });
* ```
*
* The `raw` helper supports several signatures, you can pass in a callback that receives the current property alias:
*
* ```ts
* await em.find(User, { [raw(alias => `lower(${alias}.name)`)]: name.toLowerCase() });
* ```
*
* You can also use the `sql` tagged template function, which works the same, but supports only the simple string signature:
*
* ```ts
* await em.find(User, { [sql`lower(name)`]: name.toLowerCase() });
* ```
*
* When using inside filters, you might have to use a callback signature to create new raw instance for every filter usage.
*
* ```ts
* @Filter({ name: 'long', cond: () => ({ [raw('length(perex)')]: { $gt: 10000 } }) })
* ```
*
* The `raw` helper can be used within indexes and uniques to write database-agnostic SQL expressions. In that case, you can use `'??'` to tag your database identifiers (table name, column names, index name, ...) inside your expression, and pass those identifiers as a second parameter to the `raw` helper. Internally, those will automatically be quoted according to the database in use:
*
* ```ts
* // On postgres, will produce: create index "index custom_idx_on_name" on "library.author" ("country")
* // On mysql, will produce: create index `index custom_idx_on_name` on `library.author` (`country`)
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => raw(`create index ?? on ?? (??)`, ['custom_idx_on_name', table, columns.name]) })
* @Entity({ schema: 'library' })
* export class Author { ... }
* ```
*
* You can also use the `quote` tag function to write database-agnostic SQL expressions. The end-result is the same as using the `raw` function regarding database identifiers quoting, only to have a more elegant expression syntax:
*
* ```ts
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => quote`create index ${'custom_idx_on_name'} on ${table} (${columns.name})` })
* @Entity({ schema: 'library' })
* export class Author { ... }
* ```
*/
export function raw(sql, params) {
if (Utils.isObject(sql) && 'compile' in sql) {
const query = sql.compile();
return raw_(query.sql, query.parameters);
}
if (Utils.isObject(sql) && 'toQuery' in sql) {
const query = sql.toQuery();
return raw_(query.sql, query.params);
}
return raw_(sql, params);
}

83
node_modules/@mikro-orm/sql/schema/DatabaseSchema.d.ts generated vendored Normal file
View File

@@ -0,0 +1,83 @@
import { type Configuration, type Dictionary, type EntityMetadata } from '@mikro-orm/core';
import { DatabaseTable } from './DatabaseTable.js';
import type { AbstractSqlConnection } from '../AbstractSqlConnection.js';
import type { DatabaseView } from '../typings.js';
import type { AbstractSqlPlatform } from '../AbstractSqlPlatform.js';
/**
* @internal
*/
export declare class DatabaseSchema {
#private;
readonly name: string;
constructor(platform: AbstractSqlPlatform, name: string);
addTable(name: string, schema: string | undefined | null, comment?: string): DatabaseTable;
getTables(): DatabaseTable[];
/** @internal */
setTables(tables: DatabaseTable[]): void;
/** @internal */
setNamespaces(namespaces: Set<string>): void;
getTable(name: string): DatabaseTable | undefined;
hasTable(name: string): boolean;
addView(
name: string,
schema: string | undefined | null,
definition: string,
materialized?: boolean,
withData?: boolean,
): DatabaseView;
getViews(): DatabaseView[];
/** @internal */
setViews(views: DatabaseView[]): void;
getView(name: string): DatabaseView | undefined;
hasView(name: string): boolean;
setNativeEnums(
nativeEnums: Dictionary<{
name: string;
schema?: string;
items: string[];
}>,
): void;
getNativeEnums(): Dictionary<{
name: string;
schema?: string;
items: string[];
}>;
getNativeEnum(name: string): {
name: string;
schema?: string;
items: string[];
};
hasNamespace(namespace: string): boolean;
hasNativeEnum(name: string): boolean;
getNamespaces(): string[];
static create(
connection: AbstractSqlConnection,
platform: AbstractSqlPlatform,
config: Configuration,
schemaName?: string,
schemas?: string[],
takeTables?: (string | RegExp)[],
skipTables?: (string | RegExp)[],
skipViews?: (string | RegExp)[],
): Promise<DatabaseSchema>;
static fromMetadata(
metadata: EntityMetadata[],
platform: AbstractSqlPlatform,
config: Configuration,
schemaName?: string,
em?: any,
): DatabaseSchema;
private static getViewDefinition;
private static getSchemaName;
/**
* Add a foreign key from a TPT child entity's PK to its parent entity's PK.
* This FK uses ON DELETE CASCADE to ensure child rows are deleted when parent is deleted.
*/
private static addTPTForeignKey;
private static matchName;
private static isNameAllowed;
private static isTableNameAllowed;
private static shouldHaveColumn;
toJSON(): Dictionary;
prune(schema: string | undefined, wildcardSchemaTables: string[]): void;
}

360
node_modules/@mikro-orm/sql/schema/DatabaseSchema.js generated vendored Normal file
View File

@@ -0,0 +1,360 @@
import { ReferenceKind, isRaw } from '@mikro-orm/core';
import { DatabaseTable } from './DatabaseTable.js';
/**
* @internal
*/
export class DatabaseSchema {
name;
#tables = [];
#views = [];
#namespaces = new Set();
#nativeEnums = {}; // for postgres
#platform;
constructor(platform, name) {
this.name = name;
this.#platform = platform;
}
addTable(name, schema, comment) {
const namespaceName = schema ?? this.name;
const table = new DatabaseTable(this.#platform, name, namespaceName);
table.nativeEnums = this.#nativeEnums;
table.comment = comment;
this.#tables.push(table);
if (namespaceName != null) {
this.#namespaces.add(namespaceName);
}
return table;
}
getTables() {
return this.#tables;
}
/** @internal */
setTables(tables) {
this.#tables = tables;
}
/** @internal */
setNamespaces(namespaces) {
this.#namespaces = namespaces;
}
getTable(name) {
return this.#tables.find(t => t.name === name || `${t.schema}.${t.name}` === name);
}
hasTable(name) {
return !!this.getTable(name);
}
addView(name, schema, definition, materialized, withData) {
const namespaceName = schema ?? this.name;
const view = { name, schema: namespaceName, definition, materialized, withData };
this.#views.push(view);
if (namespaceName != null) {
this.#namespaces.add(namespaceName);
}
return view;
}
getViews() {
return this.#views;
}
/** @internal */
setViews(views) {
this.#views = views;
}
getView(name) {
return this.#views.find(v => v.name === name || `${v.schema}.${v.name}` === name);
}
hasView(name) {
return !!this.getView(name);
}
setNativeEnums(nativeEnums) {
this.#nativeEnums = nativeEnums;
for (const nativeEnum of Object.values(nativeEnums)) {
if (nativeEnum.schema && nativeEnum.schema !== '*') {
this.#namespaces.add(nativeEnum.schema);
}
}
}
getNativeEnums() {
return this.#nativeEnums;
}
getNativeEnum(name) {
return this.#nativeEnums[name];
}
hasNamespace(namespace) {
return this.#namespaces.has(namespace);
}
hasNativeEnum(name) {
return name in this.#nativeEnums;
}
getNamespaces() {
return [...this.#namespaces];
}
static async create(connection, platform, config, schemaName, schemas, takeTables, skipTables, skipViews) {
const schema = new DatabaseSchema(platform, schemaName ?? config.get('schema') ?? platform.getDefaultSchemaName());
const allTables = await platform.getSchemaHelper().getAllTables(connection, schemas);
const parts = config.get('migrations').tableName.split('.');
const migrationsTableName = parts[1] ?? parts[0];
const migrationsSchemaName = parts.length > 1 ? parts[0] : config.get('schema', platform.getDefaultSchemaName());
const tables = allTables.filter(
t =>
this.isTableNameAllowed(t.table_name, takeTables, skipTables) &&
(t.table_name !== migrationsTableName || (t.schema_name && t.schema_name !== migrationsSchemaName)),
);
await platform
.getSchemaHelper()
.loadInformationSchema(schema, connection, tables, schemas && schemas.length > 0 ? schemas : undefined);
// Load views from database
await platform.getSchemaHelper().loadViews(schema, connection);
// Load materialized views (PostgreSQL only)
if (platform.supportsMaterializedViews()) {
await platform.getSchemaHelper().loadMaterializedViews(schema, connection, schemaName);
}
// Filter out skipped views
if (skipViews && skipViews.length > 0) {
schema.#views = schema.#views.filter(v => this.isNameAllowed(v.name, skipViews));
}
return schema;
}
static fromMetadata(metadata, platform, config, schemaName, em) {
const schema = new DatabaseSchema(platform, schemaName ?? config.get('schema'));
const nativeEnums = {};
const skipColumns = config.get('schemaGenerator').skipColumns || {};
for (const meta of metadata) {
// Skip view entities when collecting native enums
if (meta.view) {
continue;
}
for (const prop of meta.props) {
if (prop.nativeEnumName) {
let key = prop.nativeEnumName;
let enumName = prop.nativeEnumName;
let enumSchema = meta.schema ?? schema.name;
if (key.includes('.')) {
const [explicitSchema, ...parts] = prop.nativeEnumName.split('.');
enumName = parts.join('.');
key = enumName;
enumSchema = explicitSchema;
}
if (enumSchema && enumSchema !== '*' && enumSchema !== platform.getDefaultSchemaName()) {
key = enumSchema + '.' + key;
}
nativeEnums[key] = {
name: enumName,
schema: enumSchema,
items: prop.items?.map(val => '' + val) ?? [],
};
}
}
}
schema.setNativeEnums(nativeEnums);
for (const meta of metadata) {
// Handle view entities separately
if (meta.view) {
const viewDefinition = this.getViewDefinition(meta, em, platform);
if (viewDefinition) {
schema.addView(
meta.collection,
this.getSchemaName(meta, config, schemaName),
viewDefinition,
meta.materialized,
meta.withData,
);
}
continue;
}
const table = schema.addTable(meta.collection, this.getSchemaName(meta, config, schemaName));
table.comment = meta.comment;
// For TPT child entities, only use ownProps (properties defined in this entity only)
// For all other entities (including TPT root), use all props
const propsToProcess =
meta.inheritanceType === 'tpt' && meta.tptParent && meta.ownProps ? meta.ownProps : meta.props;
for (const prop of propsToProcess) {
if (!this.shouldHaveColumn(meta, prop, skipColumns)) {
continue;
}
table.addColumnFromProperty(prop, meta, config);
}
// For TPT child entities, always include the PK columns (they form the FK to parent)
if (meta.inheritanceType === 'tpt' && meta.tptParent) {
const pkProps = meta.primaryKeys.map(pk => meta.properties[pk]);
for (const pkProp of pkProps) {
// Only add if not already added (it might be in ownProps if defined in this entity)
if (!propsToProcess.includes(pkProp)) {
table.addColumnFromProperty(pkProp, meta, config);
}
// Child PK must not be autoincrement — it references the parent PK via FK
for (const field of pkProp.fieldNames) {
const col = table.getColumn(field);
if (col) {
col.autoincrement = false;
}
}
}
// Add FK from child PK to parent PK with ON DELETE CASCADE
this.addTPTForeignKey(table, meta, config, platform);
}
meta.indexes.forEach(index => table.addIndex(meta, index, 'index'));
meta.uniques.forEach(index => table.addIndex(meta, index, 'unique'));
// For TPT child entities, the PK is also defined here
const pkPropsForIndex =
meta.inheritanceType === 'tpt' && meta.tptParent
? meta.primaryKeys.map(pk => meta.properties[pk])
: meta.props.filter(prop => prop.primary);
table.addIndex(meta, { properties: pkPropsForIndex.map(prop => prop.name) }, 'primary');
for (const check of meta.checks) {
const columnName = check.property ? meta.properties[check.property].fieldNames[0] : undefined;
const expression = isRaw(check.expression)
? platform.formatQuery(check.expression.sql, check.expression.params)
: check.expression;
table.addCheck({
name: check.name,
expression,
definition: `check (${expression})`,
columnName,
});
}
}
return schema;
}
static getViewDefinition(meta, em, platform) {
if (typeof meta.expression === 'string') {
return meta.expression;
}
// Expression is a function, need to evaluate it
/* v8 ignore next */
if (!em) {
return undefined;
}
const result = meta.expression(em, {}, {});
// Async expressions are not supported for view entities
if (result && typeof result.then === 'function') {
throw new Error(
`View entity ${meta.className} expression returned a Promise. Async expressions are not supported for view entities.`,
);
}
/* v8 ignore next */
if (typeof result === 'string') {
return result;
}
/* v8 ignore next */
if (isRaw(result)) {
return platform.formatQuery(result.sql, result.params);
}
// Check if it's a QueryBuilder (has getFormattedQuery method)
if (result && typeof result.getFormattedQuery === 'function') {
return result.getFormattedQuery();
}
/* v8 ignore next - fallback for unknown result types */
return undefined;
}
static getSchemaName(meta, config, schema) {
return (meta.schema === '*' ? schema : meta.schema) ?? config.get('schema');
}
/**
* Add a foreign key from a TPT child entity's PK to its parent entity's PK.
* This FK uses ON DELETE CASCADE to ensure child rows are deleted when parent is deleted.
*/
static addTPTForeignKey(table, meta, config, platform) {
const parent = meta.tptParent;
const pkColumnNames = meta.primaryKeys.flatMap(pk => meta.properties[pk].fieldNames);
const parentPkColumnNames = parent.primaryKeys.flatMap(pk => parent.properties[pk].fieldNames);
// Determine the parent table name with schema
const parentSchema =
parent.schema === '*' ? undefined : (parent.schema ?? config.get('schema', platform.getDefaultSchemaName()));
const parentTableName = parentSchema ? `${parentSchema}.${parent.tableName}` : parent.tableName;
// Create FK constraint name
const constraintName = platform.getIndexName(table.name, pkColumnNames, 'foreign');
// Add the foreign key to the table
const fks = table.getForeignKeys();
fks[constraintName] = {
constraintName,
columnNames: pkColumnNames,
localTableName: table.getShortestName(false),
referencedColumnNames: parentPkColumnNames,
referencedTableName: parentTableName,
deleteRule: 'cascade', // TPT always uses cascade delete
updateRule: 'cascade', // TPT always uses cascade update
};
}
static matchName(name, nameToMatch) {
return typeof nameToMatch === 'string'
? name.toLocaleLowerCase() === nameToMatch.toLocaleLowerCase()
: nameToMatch.test(name);
}
static isNameAllowed(name, skipNames) {
return !(skipNames?.some(pattern => this.matchName(name, pattern)) ?? false);
}
static isTableNameAllowed(tableName, takeTables, skipTables) {
return (
(takeTables?.some(tableNameToMatch => this.matchName(tableName, tableNameToMatch)) ?? true) &&
this.isNameAllowed(tableName, skipTables)
);
}
static shouldHaveColumn(meta, prop, skipColumns) {
if (prop.persist === false || (prop.columnTypes?.length ?? 0) === 0) {
return false;
}
// Check if column should be skipped
if (skipColumns) {
const tableName = meta.tableName;
const tableSchema = meta.schema;
const fullTableName = tableSchema ? `${tableSchema}.${tableName}` : tableName;
// Check for skipColumns by table name or fully qualified table name
const columnsToSkip = skipColumns[tableName] || skipColumns[fullTableName];
if (columnsToSkip) {
for (const fieldName of prop.fieldNames) {
if (columnsToSkip.some(pattern => this.matchName(fieldName, pattern))) {
return false;
}
}
}
}
if (prop.kind === ReferenceKind.EMBEDDED && prop.object) {
return true;
}
const getRootProperty = prop => (prop.embedded ? getRootProperty(meta.properties[prop.embedded[0]]) : prop);
const rootProp = getRootProperty(prop);
if (rootProp.kind === ReferenceKind.EMBEDDED) {
return prop === rootProp || !rootProp.object;
}
return (
[ReferenceKind.SCALAR, ReferenceKind.MANY_TO_ONE].includes(prop.kind) ||
(prop.kind === ReferenceKind.ONE_TO_ONE && prop.owner)
);
}
toJSON() {
return {
name: this.name,
namespaces: [...this.#namespaces],
tables: this.#tables,
views: this.#views,
nativeEnums: this.#nativeEnums,
};
}
prune(schema, wildcardSchemaTables) {
const hasWildcardSchema = wildcardSchemaTables.length > 0;
this.#tables = this.#tables.filter(table => {
return (
(!schema && !hasWildcardSchema) || // no schema specified and we don't have any multi-schema entity
table.schema === schema || // specified schema matches the table's one
(!schema && !wildcardSchemaTables.includes(table.name))
); // no schema specified and the table has fixed one provided
});
this.#views = this.#views.filter(view => {
/* v8 ignore next */
return (
(!schema && !hasWildcardSchema) ||
view.schema === schema ||
(!schema && !wildcardSchemaTables.includes(view.name))
);
});
// remove namespaces of ignored tables and views
for (const ns of this.#namespaces) {
if (
!this.#tables.some(t => t.schema === ns) &&
!this.#views.some(v => v.schema === ns) &&
!Object.values(this.#nativeEnums).some(e => e.schema === ns)
) {
this.#namespaces.delete(ns);
}
}
}
}

105
node_modules/@mikro-orm/sql/schema/DatabaseTable.d.ts generated vendored Normal file
View File

@@ -0,0 +1,105 @@
import {
type Configuration,
type DeferMode,
type Dictionary,
type EntityMetadata,
type EntityProperty,
type IndexCallback,
type NamingStrategy,
} from '@mikro-orm/core';
import type { SchemaHelper } from './SchemaHelper.js';
import type { CheckDef, Column, ForeignKey, IndexDef } from '../typings.js';
import type { AbstractSqlPlatform } from '../AbstractSqlPlatform.js';
/**
* @internal
*/
export declare class DatabaseTable {
#private;
readonly name: string;
readonly schema?: string | undefined;
nativeEnums: Dictionary<{
name: string;
schema?: string;
items: string[];
}>;
comment?: string;
constructor(platform: AbstractSqlPlatform, name: string, schema?: string | undefined);
getQuotedName(): string;
getColumns(): Column[];
getColumn(name: string): Column | undefined;
removeColumn(name: string): void;
getIndexes(): IndexDef[];
getChecks(): CheckDef[];
/** @internal */
setIndexes(indexes: IndexDef[]): void;
/** @internal */
setChecks(checks: CheckDef[]): void;
/** @internal */
setForeignKeys(fks: Dictionary<ForeignKey>): void;
init(
cols: Column[],
indexes: IndexDef[] | undefined,
checks: CheckDef[] | undefined,
pks: string[],
fks?: Dictionary<ForeignKey>,
enums?: Dictionary<string[]>,
): void;
addColumn(column: Column): void;
addColumnFromProperty(prop: EntityProperty, meta: EntityMetadata, config: Configuration): void;
private getIndexName;
getEntityDeclaration(
namingStrategy: NamingStrategy,
schemaHelper: SchemaHelper,
scalarPropertiesForRelations: 'always' | 'never' | 'smart',
): EntityMetadata;
private foreignKeysToProps;
private findFkIndex;
private getIndexProperties;
private getSafeBaseNameForFkProp;
/**
* The shortest name is stripped of the default namespace. All other namespaced elements are returned as full-qualified names.
*/
getShortestName(skipDefaultSchema?: boolean): string;
getForeignKeys(): Dictionary<ForeignKey>;
hasColumn(columnName: string): boolean;
getIndex(indexName: string): IndexDef | undefined;
hasIndex(indexName: string): boolean;
getCheck(checkName: string): CheckDef | undefined;
hasCheck(checkName: string): boolean;
getPrimaryKey(): IndexDef | undefined;
hasPrimaryKey(): boolean;
private getForeignKeyDeclaration;
private getPropertyDeclaration;
private getReferenceKind;
private getPropertyName;
private getPropertyTypeForForeignKey;
private getPropertyTypeForColumn;
private getPropertyDefaultValue;
private processIndexExpression;
addIndex(
meta: EntityMetadata,
index: {
properties?: string | string[];
name?: string;
type?: string;
expression?: string | IndexCallback<any>;
deferMode?: DeferMode | `${DeferMode}`;
options?: Dictionary;
columns?: {
name: string;
sort?: 'ASC' | 'DESC' | 'asc' | 'desc';
nulls?: 'FIRST' | 'LAST' | 'first' | 'last';
length?: number;
collation?: string;
}[];
include?: string | string[];
fillFactor?: number;
invisible?: boolean;
disabled?: boolean;
clustered?: boolean;
},
type: 'index' | 'unique' | 'primary',
): void;
addCheck(check: CheckDef): void;
toJSON(): Dictionary;
}

1022
node_modules/@mikro-orm/sql/schema/DatabaseTable.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,68 @@
import { type Dictionary } from '@mikro-orm/core';
import type { Column, ForeignKey, IndexDef, SchemaDifference, TableDifference } from '../typings.js';
import type { DatabaseSchema } from './DatabaseSchema.js';
import type { DatabaseTable } from './DatabaseTable.js';
import type { AbstractSqlPlatform } from '../AbstractSqlPlatform.js';
/**
* Compares two Schemas and return an instance of SchemaDifference.
*/
export declare class SchemaComparator {
#private;
constructor(platform: AbstractSqlPlatform);
/**
* Returns a SchemaDifference object containing the differences between the schemas fromSchema and toSchema.
*
* The returned differences are returned in such a way that they contain the
* operations to change the schema stored in fromSchema to the schema that is
* stored in toSchema.
*/
compare(fromSchema: DatabaseSchema, toSchema: DatabaseSchema, inverseDiff?: SchemaDifference): SchemaDifference;
/**
* Returns the difference between the tables fromTable and toTable.
* If there are no differences this method returns the boolean false.
*/
diffTable(
fromTable: DatabaseTable,
toTable: DatabaseTable,
inverseTableDiff?: TableDifference,
): TableDifference | false;
/**
* Try to find columns that only changed their name, rename operations maybe cheaper than add/drop
* however ambiguities between different possibilities should not lead to renaming at all.
*/
private detectColumnRenamings;
/**
* Try to find indexes that only changed their name, rename operations maybe cheaper than add/drop
* however ambiguities between different possibilities should not lead to renaming at all.
*/
private detectIndexRenamings;
diffForeignKey(key1: ForeignKey, key2: ForeignKey, tableDifferences: TableDifference): boolean;
/**
* Returns the difference between the columns
*/
diffColumn(fromColumn: Column, toColumn: Column, fromTable: DatabaseTable, logging?: boolean): Set<string>;
diffEnumItems(items1?: string[], items2?: string[]): boolean;
diffComment(comment1?: string, comment2?: string): boolean;
/**
* Finds the difference between the indexes index1 and index2.
* Compares index1 with index2 and returns index2 if there are any differences or false in case there are no differences.
*/
diffIndex(index1: IndexDef, index2: IndexDef): boolean;
/**
* Checks if the other index already fulfills all the indexing and constraint needs of the current one.
*/
isIndexFulfilledBy(index1: IndexDef, index2: IndexDef): boolean;
/**
* Compare advanced column options between two indexes.
*/
private compareIndexColumns;
/**
* Compare two arrays for equality (order matters).
*/
private compareArrays;
diffExpression(expr1: string, expr2: string): boolean;
parseJsonDefault(defaultValue?: string | null): Dictionary | string | null;
hasSameDefaultValue(from: Column, to: Column): boolean;
private mapColumnToProperty;
private log;
}

753
node_modules/@mikro-orm/sql/schema/SchemaComparator.js generated vendored Normal file
View File

@@ -0,0 +1,753 @@
import { ArrayType, BooleanType, DateTimeType, inspect, JsonType, parseJsonSafe, Utils } from '@mikro-orm/core';
/**
* Compares two Schemas and return an instance of SchemaDifference.
*/
export class SchemaComparator {
#helper;
#logger;
#platform;
constructor(platform) {
this.#platform = platform;
this.#helper = this.#platform.getSchemaHelper();
this.#logger = this.#platform.getConfig().getLogger();
}
/**
* Returns a SchemaDifference object containing the differences between the schemas fromSchema and toSchema.
*
* The returned differences are returned in such a way that they contain the
* operations to change the schema stored in fromSchema to the schema that is
* stored in toSchema.
*/
compare(fromSchema, toSchema, inverseDiff) {
const diff = {
newTables: {},
removedTables: {},
changedTables: {},
newViews: {},
changedViews: {},
removedViews: {},
orphanedForeignKeys: [],
newNativeEnums: [],
removedNativeEnums: [],
newNamespaces: new Set(),
removedNamespaces: new Set(),
fromSchema,
};
const foreignKeysToTable = {};
for (const namespace of toSchema.getNamespaces()) {
if (fromSchema.hasNamespace(namespace) || namespace === this.#platform.getDefaultSchemaName()) {
continue;
}
diff.newNamespaces.add(namespace);
}
for (const namespace of fromSchema.getNamespaces()) {
if (toSchema.hasNamespace(namespace) || namespace === this.#platform.getDefaultSchemaName()) {
continue;
}
diff.removedNamespaces.add(namespace);
}
for (const [key, nativeEnum] of Object.entries(toSchema.getNativeEnums())) {
if (fromSchema.hasNativeEnum(key)) {
continue;
}
if (nativeEnum.schema === '*' && fromSchema.hasNativeEnum(`${toSchema.name}.${key}`)) {
continue;
}
diff.newNativeEnums.push(nativeEnum);
}
for (const [key, nativeEnum] of Object.entries(fromSchema.getNativeEnums())) {
if (toSchema.hasNativeEnum(key)) {
continue;
}
if (
key.startsWith(`${fromSchema.name}.`) &&
(fromSchema.name !== toSchema.name ||
toSchema.getNativeEnum(key.substring(fromSchema.name.length + 1))?.schema === '*')
) {
continue;
}
diff.removedNativeEnums.push(nativeEnum);
}
for (const table of toSchema.getTables()) {
const tableName = table.getShortestName(false);
if (!fromSchema.hasTable(tableName)) {
diff.newTables[tableName] = toSchema.getTable(tableName);
} else {
const tableDifferences = this.diffTable(
fromSchema.getTable(tableName),
toSchema.getTable(tableName),
inverseDiff?.changedTables[tableName],
);
if (tableDifferences !== false) {
diff.changedTables[tableName] = tableDifferences;
}
}
}
// Check if there are tables removed
for (let table of fromSchema.getTables()) {
const tableName = table.getShortestName();
table = fromSchema.getTable(tableName);
if (!toSchema.hasTable(tableName)) {
diff.removedTables[tableName] = table;
}
// also remember all foreign keys that point to a specific table
for (const foreignKey of Object.values(table.getForeignKeys())) {
if (!foreignKeysToTable[foreignKey.referencedTableName]) {
foreignKeysToTable[foreignKey.referencedTableName] = [];
}
foreignKeysToTable[foreignKey.referencedTableName].push(foreignKey);
}
}
for (const table of Object.values(diff.removedTables)) {
const tableName = (table.schema ? table.schema + '.' : '') + table.name;
if (!foreignKeysToTable[tableName]) {
continue;
}
diff.orphanedForeignKeys.push(...foreignKeysToTable[tableName]);
// Deleting duplicated foreign keys present both on the orphanedForeignKey and the removedForeignKeys from changedTables.
for (const foreignKey of foreignKeysToTable[tableName]) {
const localTableName = foreignKey.localTableName;
if (!diff.changedTables[localTableName]) {
continue;
}
for (const [key, fk] of Object.entries(diff.changedTables[localTableName].removedForeignKeys)) {
// We check if the key is from the removed table, if not we skip.
if (tableName !== fk.referencedTableName) {
continue;
}
delete diff.changedTables[localTableName].removedForeignKeys[key];
}
}
}
// Compare views
for (const toView of toSchema.getViews()) {
const viewName = toView.schema ? `${toView.schema}.${toView.name}` : toView.name;
if (!fromSchema.hasView(toView.name) && !fromSchema.hasView(viewName)) {
diff.newViews[viewName] = toView;
this.log(`view ${viewName} added`);
} else {
const fromView = fromSchema.getView(toView.name) ?? fromSchema.getView(viewName);
if (fromView && this.diffExpression(fromView.definition, toView.definition)) {
diff.changedViews[viewName] = { from: fromView, to: toView };
this.log(`view ${viewName} changed`);
}
}
}
// Check for removed views
for (const fromView of fromSchema.getViews()) {
const viewName = fromView.schema ? `${fromView.schema}.${fromView.name}` : fromView.name;
if (!toSchema.hasView(fromView.name) && !toSchema.hasView(viewName)) {
diff.removedViews[viewName] = fromView;
this.log(`view ${viewName} removed`);
}
}
return diff;
}
/**
* Returns the difference between the tables fromTable and toTable.
* If there are no differences this method returns the boolean false.
*/
diffTable(fromTable, toTable, inverseTableDiff) {
let changes = 0;
const tableDifferences = {
name: fromTable.getShortestName(),
addedColumns: {},
addedForeignKeys: {},
addedIndexes: {},
addedChecks: {},
changedColumns: {},
changedForeignKeys: {},
changedIndexes: {},
changedChecks: {},
removedColumns: {},
removedForeignKeys: {},
removedIndexes: {},
removedChecks: {},
renamedColumns: {},
renamedIndexes: {},
fromTable,
toTable,
};
if (this.diffComment(fromTable.comment, toTable.comment)) {
tableDifferences.changedComment = toTable.comment;
this.log(`table comment changed for ${tableDifferences.name}`, {
fromTableComment: fromTable.comment,
toTableComment: toTable.comment,
});
changes++;
}
const fromTableColumns = fromTable.getColumns();
const toTableColumns = toTable.getColumns();
// See if all the columns in "from" table exist in "to" table
for (const column of toTableColumns) {
if (fromTable.hasColumn(column.name)) {
continue;
}
tableDifferences.addedColumns[column.name] = column;
this.log(`column ${tableDifferences.name}.${column.name} of type ${column.type} added`);
changes++;
}
/* See if there are any removed columns in "to" table */
for (const column of fromTableColumns) {
// See if column is removed in "to" table.
if (!toTable.hasColumn(column.name)) {
tableDifferences.removedColumns[column.name] = column;
this.log(`column ${tableDifferences.name}.${column.name} removed`);
changes++;
continue;
}
// See if column has changed properties in "to" table.
const changedProperties = this.diffColumn(column, toTable.getColumn(column.name), fromTable, true);
if (changedProperties.size === 0) {
continue;
}
if (changedProperties.size === 1 && changedProperties.has('generated')) {
tableDifferences.addedColumns[column.name] = toTable.getColumn(column.name);
tableDifferences.removedColumns[column.name] = column;
changes++;
continue;
}
tableDifferences.changedColumns[column.name] = {
oldColumnName: column.name,
fromColumn: column,
column: toTable.getColumn(column.name),
changedProperties,
};
this.log(`column ${tableDifferences.name}.${column.name} changed`, { changedProperties });
changes++;
}
this.detectColumnRenamings(tableDifferences, inverseTableDiff);
const fromTableIndexes = fromTable.getIndexes();
const toTableIndexes = toTable.getIndexes();
// See if all the indexes in "from" table exist in "to" table
for (const index of Object.values(toTableIndexes)) {
if ((index.primary && fromTableIndexes.find(i => i.primary)) || fromTable.hasIndex(index.keyName)) {
continue;
}
tableDifferences.addedIndexes[index.keyName] = index;
this.log(`index ${index.keyName} added to table ${tableDifferences.name}`, { index });
changes++;
}
// See if there are any removed indexes in "to" table
for (const index of fromTableIndexes) {
// See if index is removed in "to" table.
if ((index.primary && !toTable.hasPrimaryKey()) || (!index.primary && !toTable.hasIndex(index.keyName))) {
tableDifferences.removedIndexes[index.keyName] = index;
this.log(`index ${index.keyName} removed from table ${tableDifferences.name}`);
changes++;
continue;
}
// See if index has changed in "to" table.
const toTableIndex = index.primary ? toTable.getPrimaryKey() : toTable.getIndex(index.keyName);
if (!this.diffIndex(index, toTableIndex)) {
continue;
}
tableDifferences.changedIndexes[index.keyName] = toTableIndex;
this.log(`index ${index.keyName} changed in table ${tableDifferences.name}`, {
fromTableIndex: index,
toTableIndex,
});
changes++;
}
this.detectIndexRenamings(tableDifferences);
const fromTableChecks = fromTable.getChecks();
const toTableChecks = toTable.getChecks();
// See if all the checks in "from" table exist in "to" table
for (const check of toTableChecks) {
if (fromTable.hasCheck(check.name)) {
continue;
}
tableDifferences.addedChecks[check.name] = check;
this.log(`check constraint ${check.name} added to table ${tableDifferences.name}`, { check });
changes++;
}
// See if there are any removed checks in "to" table
for (const check of fromTableChecks) {
if (!toTable.hasCheck(check.name)) {
tableDifferences.removedChecks[check.name] = check;
this.log(`check constraint ${check.name} removed from table ${tableDifferences.name}`);
changes++;
continue;
}
// See if check has changed in "to" table
const toTableCheck = toTable.getCheck(check.name);
const toColumn = toTable.getColumn(check.columnName);
const fromColumn = fromTable.getColumn(check.columnName);
if (!this.diffExpression(check.expression, toTableCheck.expression)) {
continue;
}
if (
fromColumn?.enumItems &&
toColumn?.enumItems &&
!this.diffEnumItems(fromColumn.enumItems, toColumn.enumItems)
) {
continue;
}
this.log(`check constraint ${check.name} changed in table ${tableDifferences.name}`, {
fromTableCheck: check,
toTableCheck,
});
tableDifferences.changedChecks[check.name] = toTableCheck;
changes++;
}
const fromForeignKeys = { ...fromTable.getForeignKeys() };
const toForeignKeys = { ...toTable.getForeignKeys() };
for (const fromConstraint of Object.values(fromForeignKeys)) {
for (const toConstraint of Object.values(toForeignKeys)) {
if (!this.diffForeignKey(fromConstraint, toConstraint, tableDifferences)) {
delete fromForeignKeys[fromConstraint.constraintName];
delete toForeignKeys[toConstraint.constraintName];
} else if (fromConstraint.constraintName.toLowerCase() === toConstraint.constraintName.toLowerCase()) {
this.log(`FK constraint ${fromConstraint.constraintName} changed in table ${tableDifferences.name}`, {
fromConstraint,
toConstraint,
});
tableDifferences.changedForeignKeys[toConstraint.constraintName] = toConstraint;
changes++;
delete fromForeignKeys[fromConstraint.constraintName];
delete toForeignKeys[toConstraint.constraintName];
}
}
}
for (const fromConstraint of Object.values(fromForeignKeys)) {
tableDifferences.removedForeignKeys[fromConstraint.constraintName] = fromConstraint;
this.log(`FK constraint ${fromConstraint.constraintName} removed from table ${tableDifferences.name}`);
changes++;
}
for (const toConstraint of Object.values(toForeignKeys)) {
tableDifferences.addedForeignKeys[toConstraint.constraintName] = toConstraint;
this.log(`FK constraint ${toConstraint.constraintName} added to table ${tableDifferences.name}`, {
constraint: toConstraint,
});
changes++;
}
return changes ? tableDifferences : false;
}
/**
* Try to find columns that only changed their name, rename operations maybe cheaper than add/drop
* however ambiguities between different possibilities should not lead to renaming at all.
*/
detectColumnRenamings(tableDifferences, inverseTableDiff) {
const renameCandidates = {};
const oldFKs = Object.values(tableDifferences.fromTable.getForeignKeys());
const newFKs = Object.values(tableDifferences.toTable.getForeignKeys());
for (const addedColumn of Object.values(tableDifferences.addedColumns)) {
for (const removedColumn of Object.values(tableDifferences.removedColumns)) {
const diff = this.diffColumn(addedColumn, removedColumn, tableDifferences.fromTable);
if (diff.size !== 0) {
continue;
}
const wasFK = oldFKs.some(fk => fk.columnNames.includes(removedColumn.name));
const isFK = newFKs.some(fk => fk.columnNames.includes(addedColumn.name));
if (wasFK !== isFK) {
continue;
}
const renamedColumn = inverseTableDiff?.renamedColumns[addedColumn.name];
if (renamedColumn && renamedColumn?.name !== removedColumn.name) {
continue;
}
renameCandidates[addedColumn.name] = renameCandidates[addedColumn.name] ?? [];
renameCandidates[addedColumn.name].push([removedColumn, addedColumn]);
}
}
for (const candidateColumns of Object.values(renameCandidates)) {
if (candidateColumns.length !== 1) {
continue;
}
const [removedColumn, addedColumn] = candidateColumns[0];
const removedColumnName = removedColumn.name;
const addedColumnName = addedColumn.name;
/* v8 ignore next */
if (tableDifferences.renamedColumns[removedColumnName]) {
continue;
}
tableDifferences.renamedColumns[removedColumnName] = addedColumn;
delete tableDifferences.addedColumns[addedColumnName];
delete tableDifferences.removedColumns[removedColumnName];
this.log(`renamed column detected in table ${tableDifferences.name}`, {
old: removedColumnName,
new: addedColumnName,
});
}
}
/**
* Try to find indexes that only changed their name, rename operations maybe cheaper than add/drop
* however ambiguities between different possibilities should not lead to renaming at all.
*/
detectIndexRenamings(tableDifferences) {
const renameCandidates = {};
// Gather possible rename candidates by comparing each added and removed index based on semantics.
for (const addedIndex of Object.values(tableDifferences.addedIndexes)) {
for (const removedIndex of Object.values(tableDifferences.removedIndexes)) {
if (this.diffIndex(addedIndex, removedIndex)) {
continue;
}
renameCandidates[addedIndex.keyName] = renameCandidates[addedIndex.keyName] ?? [];
renameCandidates[addedIndex.keyName].push([removedIndex, addedIndex]);
}
}
for (const candidateIndexes of Object.values(renameCandidates)) {
// If the current rename candidate contains exactly one semantically equal index, we can safely rename it.
// Otherwise it is unclear if a rename action is really intended, therefore we let those ambiguous indexes be added/dropped.
if (candidateIndexes.length !== 1) {
continue;
}
const [removedIndex, addedIndex] = candidateIndexes[0];
const removedIndexName = removedIndex.keyName;
const addedIndexName = addedIndex.keyName;
if (tableDifferences.renamedIndexes[removedIndexName]) {
continue;
}
tableDifferences.renamedIndexes[removedIndexName] = addedIndex;
delete tableDifferences.addedIndexes[addedIndexName];
delete tableDifferences.removedIndexes[removedIndexName];
this.log(`renamed index detected in table ${tableDifferences.name}`, {
old: removedIndexName,
new: addedIndexName,
});
}
}
diffForeignKey(key1, key2, tableDifferences) {
if (key1.columnNames.join('~').toLowerCase() !== key2.columnNames.join('~').toLowerCase()) {
return true;
}
if (key1.referencedColumnNames.join('~').toLowerCase() !== key2.referencedColumnNames.join('~').toLowerCase()) {
return true;
}
if (key1.constraintName !== key2.constraintName) {
return true;
}
if (key1.referencedTableName !== key2.referencedTableName) {
return true;
}
if (key1.deferMode !== key2.deferMode) {
return true;
}
if (key1.localTableName === key1.referencedTableName && !this.#platform.supportsMultipleCascadePaths()) {
return false;
}
if (key1.columnNames.some(col => tableDifferences.changedColumns[col]?.changedProperties.has('type'))) {
return true;
}
const defaultRule = ['restrict', 'no action'];
const rule = (key, method) => {
return (key[method] ?? defaultRule[0]).toLowerCase().replace(defaultRule[1], defaultRule[0]).replace(/"/g, '');
};
const compare = method => rule(key1, method) === rule(key2, method);
// Skip updateRule comparison for platforms that don't support ON UPDATE (e.g., Oracle)
const updateRuleDiffers = this.#platform.supportsOnUpdate() && !compare('updateRule');
return updateRuleDiffers || !compare('deleteRule');
}
/**
* Returns the difference between the columns
*/
diffColumn(fromColumn, toColumn, fromTable, logging) {
const changedProperties = new Set();
const fromProp = this.mapColumnToProperty({ ...fromColumn, autoincrement: false });
const toProp = this.mapColumnToProperty({ ...toColumn, autoincrement: false });
const fromColumnType = this.#platform.normalizeColumnType(
fromColumn.mappedType.getColumnType(fromProp, this.#platform).toLowerCase(),
fromProp,
);
const fromNativeEnum =
fromTable.nativeEnums[fromColumnType] ??
Object.values(fromTable.nativeEnums).find(e => e.name === fromColumnType && e.schema !== '*');
let toColumnType = this.#platform.normalizeColumnType(
toColumn.mappedType.getColumnType(toProp, this.#platform).toLowerCase(),
toProp,
);
const log = (msg, params) => {
if (logging) {
const copy = Utils.copy(params);
Utils.dropUndefinedProperties(copy);
this.log(msg, copy);
}
};
if (
fromColumnType !== toColumnType &&
(!fromNativeEnum || `${fromNativeEnum.schema}.${fromNativeEnum.name}` !== toColumnType) &&
!(fromColumn.ignoreSchemaChanges?.includes('type') || toColumn.ignoreSchemaChanges?.includes('type')) &&
!fromColumn.generated &&
!toColumn.generated
) {
if (
!toColumnType.includes('.') &&
fromTable.schema &&
fromTable.schema !== this.#platform.getDefaultSchemaName()
) {
toColumnType = `${fromTable.schema}.${toColumnType}`;
}
if (fromColumnType !== toColumnType) {
log(`'type' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumnType, toColumnType });
changedProperties.add('type');
}
}
if (!!fromColumn.nullable !== !!toColumn.nullable && !fromColumn.generated && !toColumn.generated) {
log(`'nullable' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('nullable');
}
if (this.diffExpression(fromColumn.generated, toColumn.generated)) {
log(`'generated' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('generated');
}
if (!!fromColumn.autoincrement !== !!toColumn.autoincrement) {
log(`'autoincrement' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('autoincrement');
}
if (!!fromColumn.unsigned !== !!toColumn.unsigned && this.#platform.supportsUnsigned()) {
log(`'unsigned' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('unsigned');
}
if (
!(fromColumn.ignoreSchemaChanges?.includes('default') || toColumn.ignoreSchemaChanges?.includes('default')) &&
!this.hasSameDefaultValue(fromColumn, toColumn)
) {
log(`'default' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('default');
}
if (this.diffComment(fromColumn.comment, toColumn.comment)) {
log(`'comment' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('comment');
}
if (
!(fromColumn.mappedType instanceof ArrayType) &&
!(toColumn.mappedType instanceof ArrayType) &&
this.diffEnumItems(fromColumn.enumItems, toColumn.enumItems)
) {
log(`'enumItems' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('enumItems');
}
if (
(fromColumn.extra || '').toLowerCase() !== (toColumn.extra || '').toLowerCase() &&
!(fromColumn.ignoreSchemaChanges?.includes('extra') || toColumn.ignoreSchemaChanges?.includes('extra'))
) {
log(`'extra' changed for column ${fromTable.name}.${fromColumn.name}`, { fromColumn, toColumn });
changedProperties.add('extra');
}
return changedProperties;
}
diffEnumItems(items1 = [], items2 = []) {
return items1.length !== items2.length || items1.some((v, i) => v !== items2[i]);
}
diffComment(comment1, comment2) {
// A null value and an empty string are actually equal for a comment so they should not trigger a change.
// eslint-disable-next-line eqeqeq
return comment1 != comment2 && !(comment1 == null && comment2 === '') && !(comment2 == null && comment1 === '');
}
/**
* Finds the difference between the indexes index1 and index2.
* Compares index1 with index2 and returns index2 if there are any differences or false in case there are no differences.
*/
diffIndex(index1, index2) {
// if one of them is a custom expression or full text index, compare only by name
if (index1.expression || index2.expression || index1.type === 'fulltext' || index2.type === 'fulltext') {
return index1.keyName !== index2.keyName;
}
return !this.isIndexFulfilledBy(index1, index2) || !this.isIndexFulfilledBy(index2, index1);
}
/**
* Checks if the other index already fulfills all the indexing and constraint needs of the current one.
*/
isIndexFulfilledBy(index1, index2) {
// allow the other index to be equally large only. It being larger is an option but it creates a problem with scenarios of the kind PRIMARY KEY(foo,bar) UNIQUE(foo)
if (index1.columnNames.length !== index2.columnNames.length) {
return false;
}
function spansColumns() {
for (let i = 0; i < index1.columnNames.length; i++) {
if (index1.columnNames[i] === index2.columnNames[i]) {
continue;
}
return false;
}
return true;
}
// Check if columns are the same, and even in the same order
if (!spansColumns()) {
return false;
}
// Compare advanced column options (sort order, nulls, length, collation)
if (!this.compareIndexColumns(index1, index2)) {
return false;
}
// Compare INCLUDE columns for covering indexes
if (!this.compareArrays(index1.include, index2.include)) {
return false;
}
// Compare fill factor
if (index1.fillFactor !== index2.fillFactor) {
return false;
}
// Compare invisible flag
if (!!index1.invisible !== !!index2.invisible) {
return false;
}
// Compare disabled flag
if (!!index1.disabled !== !!index2.disabled) {
return false;
}
// Compare clustered flag
if (!!index1.clustered !== !!index2.clustered) {
return false;
}
if (!index1.unique && !index1.primary) {
// this is a special case: If the current key is neither primary or unique, any unique or
// primary key will always have the same effect for the index and there cannot be any constraint
// overlaps. This means a primary or unique index can always fulfill the requirements of just an
// index that has no constraints.
return true;
}
if (this.#platform.supportsDeferredUniqueConstraints() && index1.deferMode !== index2.deferMode) {
return false;
}
return index1.primary === index2.primary && index1.unique === index2.unique;
}
/**
* Compare advanced column options between two indexes.
*/
compareIndexColumns(index1, index2) {
const cols1 = index1.columns ?? [];
const cols2 = index2.columns ?? [];
// If neither has column options, they match
if (cols1.length === 0 && cols2.length === 0) {
return true;
}
// If only one has column options, they don't match
if (cols1.length !== cols2.length) {
return false;
}
// Compare each column's options
// Note: We don't check c1.name !== c2.name because the indexes already have matching columnNames
// and the columns array is derived from those same column names
for (let i = 0; i < cols1.length; i++) {
const c1 = cols1[i];
const c2 = cols2[i];
const sort1 = c1.sort?.toUpperCase() ?? 'ASC';
const sort2 = c2.sort?.toUpperCase() ?? 'ASC';
if (sort1 !== sort2) {
return false;
}
const defaultNulls = s => (s === 'DESC' ? 'FIRST' : 'LAST');
const nulls1 = c1.nulls?.toUpperCase() ?? defaultNulls(sort1);
const nulls2 = c2.nulls?.toUpperCase() ?? defaultNulls(sort2);
if (nulls1 !== nulls2) {
return false;
}
if (c1.length !== c2.length) {
return false;
}
if (c1.collation !== c2.collation) {
return false;
}
}
return true;
}
/**
* Compare two arrays for equality (order matters).
*/
compareArrays(arr1, arr2) {
if (!arr1 && !arr2) {
return true;
}
if (!arr1 || !arr2 || arr1.length !== arr2.length) {
return false;
}
return arr1.every((val, i) => val === arr2[i]);
}
diffExpression(expr1, expr2) {
// expressions like check constraints might be normalized by the driver,
// e.g. quotes might be added (https://github.com/mikro-orm/mikro-orm/issues/3827)
const simplify = str => {
return (
str
?.replace(/_\w+'(.*?)'/g, '$1')
.replace(/in\s*\((.*?)\)/gi, '= any (array[$1])')
// MySQL normalizes count(*) to count(0)
.replace(/\bcount\s*\(\s*0\s*\)/gi, 'count(*)')
// Remove quotes first so we can process identifiers
.replace(/['"`]/g, '')
// MySQL adds table/alias prefixes to columns (e.g., a.name or table_name.column vs just column)
// Strip these prefixes - match word.word patterns and keep only the last part
.replace(/\b\w+\.(\w+)/g, '$1')
// Normalize JOIN syntax: inner join -> join (equivalent in SQL)
.replace(/\binner\s+join\b/gi, 'join')
// Remove redundant column aliases like `title AS title` -> `title`
.replace(/\b(\w+)\s+as\s+\1\b/gi, '$1')
// Remove AS keyword (optional in SQL, MySQL may add/remove it)
.replace(/\bas\b/gi, '')
// Remove remaining special chars, parentheses, type casts, asterisks, and normalize whitespace
.replace(/[()\n[\]*]|::\w+| +/g, '')
.replace(/anyarray\[(.*)]/gi, '$1')
.toLowerCase()
// PostgreSQL adds default aliases to aggregate functions (e.g., count(*) AS count)
// After removing AS and whitespace, this results in duplicate adjacent words
// Remove these duplicates: "countcount" -> "count", "minmin" -> "min"
// Use lookahead to match repeated patterns of 3+ chars (avoid false positives on short sequences)
.replace(/(\w{3,})\1/g, '$1')
// Remove trailing semicolon (PostgreSQL adds it to view definitions)
.replace(/;$/, '')
);
};
return simplify(expr1) !== simplify(expr2);
}
parseJsonDefault(defaultValue) {
/* v8 ignore next */
if (!defaultValue) {
return null;
}
const val = defaultValue.replace(/^(_\w+\\)?'(.*?)\\?'$/, '$2').replace(/^\(?'(.*?)'\)?$/, '$1');
return parseJsonSafe(val);
}
hasSameDefaultValue(from, to) {
if (
from.default == null ||
from.default.toString().toLowerCase() === 'null' ||
from.default.toString().startsWith('nextval(')
) {
return to.default == null || to.default.toLowerCase() === 'null';
}
if (to.mappedType instanceof BooleanType) {
const defaultValueFrom = !['0', 'false', 'f', 'n', 'no', 'off'].includes('' + from.default);
const defaultValueTo = !['0', 'false', 'f', 'n', 'no', 'off'].includes('' + to.default);
return defaultValueFrom === defaultValueTo;
}
if (to.mappedType instanceof JsonType) {
const defaultValueFrom = this.parseJsonDefault(from.default);
const defaultValueTo = this.parseJsonDefault(to.default);
return Utils.equals(defaultValueFrom, defaultValueTo);
}
if (to.mappedType instanceof DateTimeType && from.default && to.default) {
// normalize now/current_timestamp defaults, also remove `()` from the end of default expression
const defaultValueFrom = from.default.toLowerCase().replace('current_timestamp', 'now').replace(/\(\)$/, '');
const defaultValueTo = to.default.toLowerCase().replace('current_timestamp', 'now').replace(/\(\)$/, '');
return defaultValueFrom === defaultValueTo;
}
if (from.default && to.default) {
return from.default.toString().toLowerCase() === to.default.toString().toLowerCase();
}
if (['', this.#helper.getDefaultEmptyString()].includes(to.default) && from.default != null) {
return ['', this.#helper.getDefaultEmptyString()].includes(from.default.toString());
}
// eslint-disable-next-line eqeqeq
return from.default == to.default; // == intentionally
}
mapColumnToProperty(column) {
const length = /\w+\((\d+)\)/.exec(column.type);
const match = /\w+\((\d+), ?(\d+)\)/.exec(column.type);
return {
fieldNames: [column.name],
columnTypes: [column.type],
items: column.enumItems,
...column,
length: length ? +length[1] : column.length,
precision: match ? +match[1] : column.precision,
scale: match ? +match[2] : column.scale,
};
}
log(message, params) {
if (params) {
message += ' ' + inspect(params);
}
this.#logger.log('schema', message);
}
}

118
node_modules/@mikro-orm/sql/schema/SchemaHelper.d.ts generated vendored Normal file
View File

@@ -0,0 +1,118 @@
import { type Connection, type Dictionary, type Options, RawQueryFragment } from '@mikro-orm/core';
import type { AbstractSqlConnection } from '../AbstractSqlConnection.js';
import type { AbstractSqlPlatform } from '../AbstractSqlPlatform.js';
import type { CheckDef, Column, ForeignKey, IndexDef, Table, TableDifference } from '../typings.js';
import type { DatabaseSchema } from './DatabaseSchema.js';
import type { DatabaseTable } from './DatabaseTable.js';
/** Base class for database-specific schema helpers. Provides SQL generation for DDL operations. */
export declare abstract class SchemaHelper {
protected readonly platform: AbstractSqlPlatform;
constructor(platform: AbstractSqlPlatform);
/** Returns SQL to prepend to schema migration scripts (e.g., disabling FK checks). */
getSchemaBeginning(_charset: string, disableForeignKeys?: boolean): string;
/** Returns SQL to disable foreign key checks. */
disableForeignKeysSQL(): string;
/** Returns SQL to re-enable foreign key checks. */
enableForeignKeysSQL(): string;
/** Returns SQL to append to schema migration scripts (e.g., re-enabling FK checks). */
getSchemaEnd(disableForeignKeys?: boolean): string;
finalizeTable(table: DatabaseTable, charset: string, collate?: string): string;
appendComments(table: DatabaseTable): string[];
supportsSchemaConstraints(): boolean;
getPrimaryKeys(
connection: AbstractSqlConnection,
indexes: IndexDef[] | undefined,
tableName: string,
schemaName?: string,
): Promise<string[]>;
inferLengthFromColumnType(type: string): number | undefined;
protected getTableKey(t: Table): string;
getCreateNativeEnumSQL(name: string, values: unknown[], schema?: string): string;
getDropNativeEnumSQL(name: string, schema?: string): string;
getAlterNativeEnumSQL(name: string, schema?: string, value?: string, items?: string[], oldItems?: string[]): string;
/** Loads table metadata (columns, indexes, foreign keys) from the database information schema. */
abstract loadInformationSchema(
schema: DatabaseSchema,
connection: AbstractSqlConnection,
tables: Table[],
schemas?: string[],
): Promise<void>;
/** Returns the SQL query to list all tables in the database. */
getListTablesSQL(): string;
/** Retrieves all tables from the database. */
getAllTables(connection: AbstractSqlConnection, schemas?: string[]): Promise<Table[]>;
getListViewsSQL(): string;
loadViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
/** Returns SQL to rename a column in a table. */
getRenameColumnSQL(tableName: string, oldColumnName: string, to: Column, schemaName?: string): string;
/** Returns SQL to create an index on a table. */
getCreateIndexSQL(tableName: string, index: IndexDef): string;
/**
* Hook for adding driver-specific index options (e.g., fill factor for PostgreSQL).
*/
protected getCreateIndexSuffix(_index: IndexDef): string;
/**
* Build the column list for an index, supporting advanced options like sort order, nulls ordering, and collation.
* Note: Prefix length is only supported by MySQL/MariaDB which override this method.
*/
protected getIndexColumns(index: IndexDef): string;
/** Returns SQL to drop an index. */
getDropIndexSQL(tableName: string, index: IndexDef): string;
getRenameIndexSQL(tableName: string, index: IndexDef, oldIndexName: string): string[];
/** Returns SQL statements to apply a table difference (add/drop/alter columns, indexes, foreign keys). */
alterTable(diff: TableDifference, safe?: boolean): string[];
/** Returns SQL to add columns to an existing table. */
getAddColumnsSQL(table: DatabaseTable, columns: Column[]): string[];
getDropColumnsSQL(tableName: string, columns: Column[], schemaName?: string): string;
hasNonDefaultPrimaryKeyName(table: DatabaseTable): boolean;
castColumn(name: string, type: string): string;
alterTableColumn(column: Column, table: DatabaseTable, changedProperties: Set<string>): string[];
createTableColumn(column: Column, table: DatabaseTable, changedProperties?: Set<string>): string | undefined;
getPreAlterTable(tableDiff: TableDifference, safe: boolean): string[];
getPostAlterTable(tableDiff: TableDifference, safe: boolean): string[];
getChangeColumnCommentSQL(tableName: string, to: Column, schemaName?: string): string;
getNamespaces(connection: AbstractSqlConnection): Promise<string[]>;
protected mapIndexes(indexes: IndexDef[]): Promise<IndexDef[]>;
mapForeignKeys(fks: any[], tableName: string, schemaName?: string): Dictionary;
normalizeDefaultValue(
defaultValue: string | RawQueryFragment,
length?: number,
defaultValues?: Dictionary<string[]>,
): string | number;
getCreateDatabaseSQL(name: string): string;
getDropDatabaseSQL(name: string): string;
getCreateNamespaceSQL(name: string): string;
getDropNamespaceSQL(name: string): string;
getDatabaseExistsSQL(name: string): string;
getDatabaseNotExistsError(dbName: string): string;
getManagementDbName(): string;
getDefaultEmptyString(): string;
databaseExists(connection: Connection, name: string): Promise<boolean>;
append(array: string[], sql: string | string[], pad?: boolean): void;
/** Returns SQL statements to create a table with all its columns, primary key, indexes, and checks. */
createTable(table: DatabaseTable, alter?: boolean): string[];
alterTableComment(table: DatabaseTable, comment?: string): string;
/** Returns SQL to create a foreign key constraint on a table. */
createForeignKey(table: DatabaseTable, foreignKey: ForeignKey, alterTable?: boolean, inline?: boolean): string;
splitTableName(name: string, skipDefaultSchema?: boolean): [string | undefined, string];
getReferencedTableName(referencedTableName: string, schema?: string): string;
createIndex(index: IndexDef, table: DatabaseTable, createPrimary?: boolean): string;
createCheck(table: DatabaseTable, check: CheckDef): string;
protected getTableName(table: string, schema?: string): string;
getTablesGroupedBySchemas(tables: Table[]): Map<string | undefined, Table[]>;
get options(): NonNullable<Options['schemaGenerator']>;
protected processComment(comment: string): string;
protected quote(...keys: (string | undefined)[]): string;
dropForeignKey(tableName: string, constraintName: string): string;
dropIndex(table: string, index: IndexDef, oldIndexName?: string): string;
dropConstraint(table: string, name: string): string;
/** Returns SQL to drop a table if it exists. */
dropTableIfExists(name: string, schema?: string): string;
createView(name: string, schema: string | undefined, definition: string): string;
dropViewIfExists(name: string, schema?: string): string;
createMaterializedView(name: string, schema: string | undefined, definition: string, withData?: boolean): string;
dropMaterializedViewIfExists(name: string, schema?: string): string;
refreshMaterializedView(name: string, schema?: string, concurrently?: boolean): string;
getListMaterializedViewsSQL(): string;
loadMaterializedViews(schema: DatabaseSchema, connection: AbstractSqlConnection, schemaName?: string): Promise<void>;
}

Some files were not shown because too many files have changed in this diff Show More