Initial commit - Event Planner application
This commit is contained in:
121
node_modules/@mikro-orm/core/utils/AbstractMigrator.d.ts
generated
vendored
Normal file
121
node_modules/@mikro-orm/core/utils/AbstractMigrator.d.ts
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
import type {
|
||||
Constructor,
|
||||
IMigrationGenerator,
|
||||
IMigrationRunner,
|
||||
IMigrator,
|
||||
IMigratorStorage,
|
||||
MaybePromise,
|
||||
Migration,
|
||||
MigrationInfo,
|
||||
MigrationRow,
|
||||
MigratorEvent,
|
||||
} from '../typings.js';
|
||||
import type { Transaction } from '../connections/Connection.js';
|
||||
import type { Configuration, MigrationsOptions } from './Configuration.js';
|
||||
import type { EntityManagerType, IDatabaseDriver } from '../drivers/IDatabaseDriver.js';
|
||||
interface RunnableMigration {
|
||||
name: string;
|
||||
path?: string;
|
||||
up: () => MaybePromise<void>;
|
||||
down: () => MaybePromise<void>;
|
||||
}
|
||||
type MigrateOptions = {
|
||||
from?: string | number;
|
||||
to?: string | number;
|
||||
migrations?: string[];
|
||||
transaction?: Transaction;
|
||||
};
|
||||
export declare abstract class AbstractMigrator<D extends IDatabaseDriver> implements IMigrator {
|
||||
#private;
|
||||
protected readonly em: D[typeof EntityManagerType];
|
||||
protected runner: IMigrationRunner;
|
||||
protected storage: IMigratorStorage;
|
||||
protected generator: IMigrationGenerator;
|
||||
protected readonly driver: D;
|
||||
protected readonly config: Configuration;
|
||||
protected readonly options: MigrationsOptions;
|
||||
protected absolutePath: string;
|
||||
protected initialized: boolean;
|
||||
constructor(em: D[typeof EntityManagerType]);
|
||||
protected abstract createRunner(): IMigrationRunner;
|
||||
protected abstract createStorage(): IMigratorStorage;
|
||||
protected abstract getDefaultGenerator(): IMigrationGenerator;
|
||||
abstract create(
|
||||
path?: string,
|
||||
blank?: boolean,
|
||||
initial?: boolean,
|
||||
name?: string,
|
||||
): Promise<{
|
||||
fileName: string;
|
||||
code: string;
|
||||
diff: {
|
||||
up: string[];
|
||||
down: string[];
|
||||
};
|
||||
}>;
|
||||
abstract checkSchema(): Promise<boolean>;
|
||||
abstract createInitial(
|
||||
path?: string,
|
||||
name?: string,
|
||||
blank?: boolean,
|
||||
): Promise<{
|
||||
fileName: string;
|
||||
code: string;
|
||||
diff: {
|
||||
up: string[];
|
||||
down: string[];
|
||||
};
|
||||
}>;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
on(eventName: MigratorEvent, listener: (event: MigrationInfo) => MaybePromise<void>): this;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
off(eventName: MigratorEvent, listener: (event: MigrationInfo) => MaybePromise<void>): this;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
getExecuted(): Promise<MigrationRow[]>;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
getPending(): Promise<MigrationInfo[]>;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
up(options?: string | string[] | MigrateOptions): Promise<MigrationInfo[]>;
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
down(options?: string | string[] | Omit<MigrateOptions, 'from'>): Promise<MigrationInfo[]>;
|
||||
abstract getStorage(): IMigratorStorage;
|
||||
protected init(): Promise<void>;
|
||||
protected initServices(): void;
|
||||
protected resolve(params: { name: string; path: string }): RunnableMigration;
|
||||
protected initialize(MigrationClass: Constructor<Migration>, name: string): RunnableMigration;
|
||||
/**
|
||||
* Checks if `src` folder exists, it so, tries to adjust the migrations and seeders paths automatically to use it.
|
||||
* If there is a `dist` or `build` folder, it will be used for the JS variant (`path` option), while the `src` folder will be
|
||||
* used for the TS variant (`pathTs` option).
|
||||
*
|
||||
* If the default folder exists (e.g. `/migrations`), the config will respect that, so this auto-detection should not
|
||||
* break existing projects, only help with the new ones.
|
||||
*/
|
||||
private detectSourceFolder;
|
||||
private registerDefaultListeners;
|
||||
private emit;
|
||||
private discoverMigrations;
|
||||
private executeMigrations;
|
||||
private filterUp;
|
||||
private filterDown;
|
||||
private getMigrationFilename;
|
||||
private prefix;
|
||||
protected runMigrations(
|
||||
method: 'up' | 'down',
|
||||
options?: string | string[] | MigrateOptions,
|
||||
): Promise<MigrationInfo[]>;
|
||||
private runInTransaction;
|
||||
}
|
||||
export {};
|
||||
303
node_modules/@mikro-orm/core/utils/AbstractMigrator.js
generated
vendored
Normal file
303
node_modules/@mikro-orm/core/utils/AbstractMigrator.js
generated
vendored
Normal file
@@ -0,0 +1,303 @@
|
||||
import { Utils } from './Utils.js';
|
||||
export class AbstractMigrator {
|
||||
em;
|
||||
runner;
|
||||
storage;
|
||||
generator;
|
||||
driver;
|
||||
config;
|
||||
options;
|
||||
absolutePath;
|
||||
initialized = false;
|
||||
#listeners = new Map();
|
||||
constructor(em) {
|
||||
this.em = em;
|
||||
this.driver = this.em.getDriver();
|
||||
this.config = this.em.config;
|
||||
this.options = this.config.get('migrations');
|
||||
this.initServices();
|
||||
this.registerDefaultListeners();
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
on(eventName, listener) {
|
||||
if (!this.#listeners.has(eventName)) {
|
||||
this.#listeners.set(eventName, new Set());
|
||||
}
|
||||
this.#listeners.get(eventName).add(listener);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
off(eventName, listener) {
|
||||
this.#listeners.get(eventName)?.delete(listener);
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
async getExecuted() {
|
||||
await this.init();
|
||||
return this.storage.getExecutedMigrations();
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
async getPending() {
|
||||
await this.init();
|
||||
const all = await this.discoverMigrations();
|
||||
const executed = new Set(await this.storage.executed());
|
||||
return all.filter(m => !executed.has(m.name)).map(m => ({ name: m.name, path: m.path }));
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
async up(options) {
|
||||
return this.runMigrations('up', options);
|
||||
}
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
async down(options) {
|
||||
return this.runMigrations('down', options);
|
||||
}
|
||||
async init() {
|
||||
if (this.initialized) {
|
||||
return;
|
||||
}
|
||||
this.initialized = true;
|
||||
if (!this.options.migrationsList) {
|
||||
const { fs } = await import('@mikro-orm/core/fs-utils');
|
||||
this.detectSourceFolder(fs);
|
||||
/* v8 ignore next */
|
||||
const key =
|
||||
this.config.get('preferTs', Utils.detectTypeScriptSupport()) && this.options.pathTs ? 'pathTs' : 'path';
|
||||
this.absolutePath = fs.absolutePath(this.options[key], this.config.get('baseDir'));
|
||||
fs.ensureDir(this.absolutePath);
|
||||
}
|
||||
}
|
||||
initServices() {
|
||||
this.runner = this.createRunner();
|
||||
this.storage = this.createStorage();
|
||||
if (this.options.generator) {
|
||||
this.generator = new this.options.generator(this.driver, this.config.getNamingStrategy(), this.options);
|
||||
} else {
|
||||
this.generator = this.getDefaultGenerator();
|
||||
}
|
||||
}
|
||||
resolve(params) {
|
||||
const createMigrationHandler = async method => {
|
||||
const { fs } = await import('@mikro-orm/core/fs-utils');
|
||||
const migration = await fs.dynamicImport(params.path);
|
||||
const MigrationClass = Object.values(migration).find(
|
||||
cls => typeof cls === 'function' && typeof cls.constructor === 'function',
|
||||
);
|
||||
const instance = new MigrationClass(this.driver, this.config);
|
||||
await this.runner.run(instance, method);
|
||||
};
|
||||
return {
|
||||
name: this.storage.getMigrationName(params.name),
|
||||
path: params.path,
|
||||
up: () => createMigrationHandler('up'),
|
||||
down: () => createMigrationHandler('down'),
|
||||
};
|
||||
}
|
||||
initialize(MigrationClass, name) {
|
||||
const instance = new MigrationClass(this.driver, this.config);
|
||||
return {
|
||||
name: this.storage.getMigrationName(name),
|
||||
up: () => this.runner.run(instance, 'up'),
|
||||
down: () => this.runner.run(instance, 'down'),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Checks if `src` folder exists, it so, tries to adjust the migrations and seeders paths automatically to use it.
|
||||
* If there is a `dist` or `build` folder, it will be used for the JS variant (`path` option), while the `src` folder will be
|
||||
* used for the TS variant (`pathTs` option).
|
||||
*
|
||||
* If the default folder exists (e.g. `/migrations`), the config will respect that, so this auto-detection should not
|
||||
* break existing projects, only help with the new ones.
|
||||
*/
|
||||
detectSourceFolder(fs) {
|
||||
const baseDir = this.config.get('baseDir');
|
||||
const defaultPath = './migrations';
|
||||
if (!fs.pathExists(baseDir + '/src')) {
|
||||
this.options.path ??= defaultPath;
|
||||
return;
|
||||
}
|
||||
const exists = fs.pathExists(`${baseDir}/${defaultPath}`);
|
||||
const distDir = fs.pathExists(baseDir + '/dist');
|
||||
const buildDir = fs.pathExists(baseDir + '/build');
|
||||
// if neither `dist` nor `build` exist, we use the `src` folder as it might be a JS project without building, but with `src` folder
|
||||
/* v8 ignore next */
|
||||
const path = distDir ? './dist' : buildDir ? './build' : './src';
|
||||
// only if the user did not provide any values and if the default path does not exist
|
||||
if (!this.options.path && !this.options.pathTs && !exists) {
|
||||
this.options.path = `${path}/migrations`;
|
||||
this.options.pathTs = './src/migrations';
|
||||
}
|
||||
}
|
||||
registerDefaultListeners() {
|
||||
/* v8 ignore else */
|
||||
if (!this.options.silent) {
|
||||
const logger = this.config.getLogger();
|
||||
this.on('migrating', event => logger.log('migrator', `Processing '${event.name}'`, { enabled: true }));
|
||||
this.on('migrated', event => logger.log('migrator', `Applied '${event.name}'`, { enabled: true }));
|
||||
this.on('reverting', event => logger.log('migrator', `Processing '${event.name}'`, { enabled: true }));
|
||||
this.on('reverted', event => logger.log('migrator', `Reverted '${event.name}'`, { enabled: true }));
|
||||
}
|
||||
}
|
||||
async emit(event, data) {
|
||||
for (const listener of this.#listeners.get(event) ?? []) {
|
||||
await listener(data);
|
||||
}
|
||||
}
|
||||
async discoverMigrations() {
|
||||
if (this.options.migrationsList) {
|
||||
return this.options.migrationsList.map(migration => {
|
||||
if (typeof migration === 'function') {
|
||||
return this.initialize(migration, migration.name);
|
||||
}
|
||||
return this.initialize(migration.class, migration.name);
|
||||
});
|
||||
}
|
||||
const { fs } = await import('@mikro-orm/core/fs-utils');
|
||||
const pattern = fs.normalizePath(this.absolutePath, this.options.glob);
|
||||
const files = fs.glob(pattern).sort();
|
||||
return files.map(filePath =>
|
||||
this.resolve({
|
||||
name: filePath.replace(/\\/g, '/').split('/').pop(),
|
||||
path: filePath,
|
||||
}),
|
||||
);
|
||||
}
|
||||
async executeMigrations(method, options = {}) {
|
||||
const all = await this.discoverMigrations();
|
||||
const executed = await this.storage.executed();
|
||||
const executedSet = new Set(executed);
|
||||
let toRun;
|
||||
if (method === 'up') {
|
||||
toRun = this.filterUp(all, executedSet, options);
|
||||
} else {
|
||||
toRun = this.filterDown(all, executed, options);
|
||||
}
|
||||
const result = [];
|
||||
const eventBefore = method === 'up' ? 'migrating' : 'reverting';
|
||||
const eventAfter = method === 'up' ? 'migrated' : 'reverted';
|
||||
for (const migration of toRun) {
|
||||
const event = { name: migration.name, path: migration.path };
|
||||
await this.emit(eventBefore, event);
|
||||
await migration[method]();
|
||||
if (method === 'up') {
|
||||
await this.storage.logMigration({ name: migration.name });
|
||||
} else {
|
||||
await this.storage.unlogMigration({ name: migration.name });
|
||||
}
|
||||
await this.emit(eventAfter, event);
|
||||
result.push(event);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
filterUp(all, executed, options) {
|
||||
let pending = all.filter(m => !executed.has(m.name));
|
||||
if (options.migrations) {
|
||||
const set = new Set(options.migrations);
|
||||
return pending.filter(m => set.has(m.name));
|
||||
}
|
||||
if (options.from) {
|
||||
const idx = all.findIndex(m => m.name === options.from);
|
||||
if (idx >= 0) {
|
||||
const names = new Set(all.slice(idx + 1).map(m => m.name));
|
||||
pending = pending.filter(m => names.has(m.name));
|
||||
}
|
||||
}
|
||||
if (options.to && typeof options.to === 'string') {
|
||||
const idx = all.findIndex(m => m.name === options.to);
|
||||
if (idx >= 0) {
|
||||
const names = new Set(all.slice(0, idx + 1).map(m => m.name));
|
||||
pending = pending.filter(m => names.has(m.name));
|
||||
}
|
||||
}
|
||||
return pending;
|
||||
}
|
||||
filterDown(all, executed, options) {
|
||||
const migrationMap = new Map(all.map(m => [m.name, m]));
|
||||
const executedReversed = [...executed].reverse();
|
||||
if (options.migrations) {
|
||||
const set = new Set(options.migrations);
|
||||
return executedReversed
|
||||
.filter(name => set.has(name))
|
||||
.map(name => migrationMap.get(name))
|
||||
.filter(Boolean);
|
||||
}
|
||||
if (options.to === 0) {
|
||||
return executedReversed.map(name => migrationMap.get(name)).filter(Boolean);
|
||||
}
|
||||
if (options.to) {
|
||||
const result = [];
|
||||
for (const name of executedReversed) {
|
||||
if (name === String(options.to)) {
|
||||
break;
|
||||
}
|
||||
const m = migrationMap.get(name);
|
||||
if (m) {
|
||||
result.push(m);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
// Default: revert last 1
|
||||
if (executedReversed.length > 0) {
|
||||
const m = migrationMap.get(executedReversed[0]);
|
||||
return m ? [m] : [];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
getMigrationFilename(name) {
|
||||
name = name.replace(/\.[jt]s$/, '');
|
||||
return /^\d{14}$/.exec(name) ? this.options.fileName(name) : name;
|
||||
}
|
||||
prefix(options) {
|
||||
if (typeof options === 'string' || Array.isArray(options)) {
|
||||
return { migrations: Utils.asArray(options).map(name => this.getMigrationFilename(name)) };
|
||||
}
|
||||
if (!options) {
|
||||
return {};
|
||||
}
|
||||
const result = {};
|
||||
if (options.migrations) {
|
||||
result.migrations = options.migrations.map(name => this.getMigrationFilename(name));
|
||||
}
|
||||
if (options.from) {
|
||||
result.from = this.getMigrationFilename(String(options.from));
|
||||
}
|
||||
if (options.to && options.to !== 0) {
|
||||
result.to = this.getMigrationFilename(String(options.to));
|
||||
} else if (options.to === 0) {
|
||||
result.to = 0;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async runMigrations(method, options) {
|
||||
await this.init();
|
||||
if (!this.options.transactional || !this.options.allOrNothing) {
|
||||
return this.executeMigrations(method, this.prefix(options));
|
||||
}
|
||||
if (Utils.isObject(options) && options.transaction) {
|
||||
return this.runInTransaction(options.transaction, method, options);
|
||||
}
|
||||
return this.driver.getConnection().transactional(trx => this.runInTransaction(trx, method, options));
|
||||
}
|
||||
async runInTransaction(trx, method, options) {
|
||||
this.runner.setMasterMigration(trx);
|
||||
this.storage.setMasterMigration(trx);
|
||||
try {
|
||||
return await this.executeMigrations(method, this.prefix(options));
|
||||
} finally {
|
||||
this.runner.unsetMasterMigration();
|
||||
this.storage.unsetMasterMigration();
|
||||
}
|
||||
}
|
||||
}
|
||||
48
node_modules/@mikro-orm/core/utils/AbstractSchemaGenerator.d.ts
generated
vendored
Normal file
48
node_modules/@mikro-orm/core/utils/AbstractSchemaGenerator.d.ts
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import type {
|
||||
ClearDatabaseOptions,
|
||||
DropSchemaOptions,
|
||||
EntityMetadata,
|
||||
ISchemaGenerator,
|
||||
UpdateSchemaOptions,
|
||||
CreateSchemaOptions,
|
||||
RefreshDatabaseOptions,
|
||||
EnsureDatabaseOptions,
|
||||
} from '../typings.js';
|
||||
import { type EntityManagerType, type IDatabaseDriver } from '../drivers/IDatabaseDriver.js';
|
||||
import type { MetadataStorage } from '../metadata/MetadataStorage.js';
|
||||
import type { Configuration } from './Configuration.js';
|
||||
export declare abstract class AbstractSchemaGenerator<D extends IDatabaseDriver> implements ISchemaGenerator {
|
||||
protected readonly em?: D[typeof EntityManagerType];
|
||||
protected readonly driver: D;
|
||||
protected readonly config: Configuration;
|
||||
protected readonly metadata: MetadataStorage;
|
||||
protected readonly platform: ReturnType<D['getPlatform']>;
|
||||
protected readonly connection: ReturnType<D['getConnection']>;
|
||||
constructor(em: D | D[typeof EntityManagerType]);
|
||||
create(options?: CreateSchemaOptions): Promise<void>;
|
||||
/**
|
||||
* Returns true if the database was created.
|
||||
*/
|
||||
ensureDatabase(options?: EnsureDatabaseOptions): Promise<boolean>;
|
||||
refresh(options?: RefreshDatabaseOptions): Promise<void>;
|
||||
clear(options?: ClearDatabaseOptions): Promise<void>;
|
||||
protected clearIdentityMap(): void;
|
||||
getCreateSchemaSQL(options?: CreateSchemaOptions): Promise<string>;
|
||||
drop(options?: DropSchemaOptions): Promise<void>;
|
||||
getDropSchemaSQL(options?: Omit<DropSchemaOptions, 'dropDb'>): Promise<string>;
|
||||
update(options?: UpdateSchemaOptions): Promise<void>;
|
||||
getUpdateSchemaSQL(options?: UpdateSchemaOptions): Promise<string>;
|
||||
getUpdateSchemaMigrationSQL(options?: UpdateSchemaOptions): Promise<{
|
||||
up: string;
|
||||
down: string;
|
||||
}>;
|
||||
/**
|
||||
* creates new database and connects to it
|
||||
*/
|
||||
createDatabase(name?: string): Promise<void>;
|
||||
dropDatabase(name?: string): Promise<void>;
|
||||
execute(query: string): Promise<void>;
|
||||
ensureIndexes(): Promise<void>;
|
||||
protected getOrderedMetadata(schema?: string): EntityMetadata[];
|
||||
protected notImplemented(): never;
|
||||
}
|
||||
125
node_modules/@mikro-orm/core/utils/AbstractSchemaGenerator.js
generated
vendored
Normal file
125
node_modules/@mikro-orm/core/utils/AbstractSchemaGenerator.js
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
import { CommitOrderCalculator } from '../unit-of-work/CommitOrderCalculator.js';
|
||||
import { EntityManager } from '../EntityManager.js';
|
||||
export class AbstractSchemaGenerator {
|
||||
em;
|
||||
driver;
|
||||
config;
|
||||
metadata;
|
||||
platform;
|
||||
connection;
|
||||
constructor(em) {
|
||||
this.em = em instanceof EntityManager ? em : undefined;
|
||||
this.driver = em instanceof EntityManager ? em.getDriver() : em;
|
||||
this.config = this.driver.config;
|
||||
this.metadata = this.driver.getMetadata();
|
||||
this.platform = this.driver.getPlatform();
|
||||
this.connection = this.driver.getConnection();
|
||||
}
|
||||
async create(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
/**
|
||||
* Returns true if the database was created.
|
||||
*/
|
||||
async ensureDatabase(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async refresh(options) {
|
||||
if (options?.dropDb) {
|
||||
const name = this.config.get('dbName');
|
||||
await this.dropDatabase(name);
|
||||
await this.createDatabase(name);
|
||||
} else {
|
||||
await this.ensureDatabase();
|
||||
await this.drop(options);
|
||||
}
|
||||
if (options?.createSchema !== false) {
|
||||
await this.create(options);
|
||||
}
|
||||
}
|
||||
async clear(options) {
|
||||
for (const meta of this.getOrderedMetadata(options?.schema).reverse()) {
|
||||
await this.driver.nativeDelete(meta.class, {}, options);
|
||||
}
|
||||
if (options?.clearIdentityMap ?? true) {
|
||||
this.clearIdentityMap();
|
||||
}
|
||||
}
|
||||
clearIdentityMap() {
|
||||
/* v8 ignore next */
|
||||
if (!this.em) {
|
||||
return;
|
||||
}
|
||||
const allowGlobalContext = this.config.get('allowGlobalContext');
|
||||
this.config.set('allowGlobalContext', true);
|
||||
this.em.clear();
|
||||
this.config.set('allowGlobalContext', allowGlobalContext);
|
||||
}
|
||||
async getCreateSchemaSQL(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async drop(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async getDropSchemaSQL(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async update(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async getUpdateSchemaSQL(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async getUpdateSchemaMigrationSQL(options) {
|
||||
this.notImplemented();
|
||||
}
|
||||
/**
|
||||
* creates new database and connects to it
|
||||
*/
|
||||
async createDatabase(name) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async dropDatabase(name) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async execute(query) {
|
||||
this.notImplemented();
|
||||
}
|
||||
async ensureIndexes() {
|
||||
this.notImplemented();
|
||||
}
|
||||
getOrderedMetadata(schema) {
|
||||
const metadata = [...this.metadata.getAll().values()].filter(meta => {
|
||||
const isRootEntity = meta.root.class === meta.class;
|
||||
const isTPTChild = meta.inheritanceType === 'tpt' && meta.tptParent;
|
||||
return (isRootEntity || isTPTChild) && !meta.embeddable && !meta.virtual;
|
||||
});
|
||||
const calc = new CommitOrderCalculator();
|
||||
metadata.forEach(meta => {
|
||||
const nodeId = meta.inheritanceType === 'tpt' && meta.tptParent ? meta._id : meta.root._id;
|
||||
calc.addNode(nodeId);
|
||||
});
|
||||
let meta = metadata.pop();
|
||||
while (meta) {
|
||||
const nodeId = meta.inheritanceType === 'tpt' && meta.tptParent ? meta._id : meta.root._id;
|
||||
for (const prop of meta.relations) {
|
||||
calc.discoverProperty(prop, nodeId);
|
||||
}
|
||||
if (meta.inheritanceType === 'tpt' && meta.tptParent) {
|
||||
const parentId = meta.tptParent._id;
|
||||
calc.addDependency(parentId, nodeId, 1);
|
||||
}
|
||||
meta = metadata.pop();
|
||||
}
|
||||
return calc
|
||||
.sort()
|
||||
.map(cls => this.metadata.getById(cls))
|
||||
.filter(meta => {
|
||||
const targetSchema = meta.schema ?? this.config.get('schema', this.platform.getDefaultSchemaName());
|
||||
return schema ? [schema, '*'].includes(targetSchema) : meta.schema !== '*';
|
||||
});
|
||||
}
|
||||
notImplemented() {
|
||||
throw new Error(`This method is not supported by ${this.driver.constructor.name} driver`);
|
||||
}
|
||||
}
|
||||
6
node_modules/@mikro-orm/core/utils/AsyncContext.d.ts
generated
vendored
Normal file
6
node_modules/@mikro-orm/core/utils/AsyncContext.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export interface AsyncContext<T> {
|
||||
getStore(): T | undefined;
|
||||
run<R>(store: T, callback: () => R): R;
|
||||
enterWith(store: T): void;
|
||||
}
|
||||
export declare function createAsyncContext<T>(): AsyncContext<T>;
|
||||
41
node_modules/@mikro-orm/core/utils/AsyncContext.js
generated
vendored
Normal file
41
node_modules/@mikro-orm/core/utils/AsyncContext.js
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
function getNodeAsyncContext() {
|
||||
const mod = globalThis.process?.getBuiltinModule?.('node:async_hooks');
|
||||
/* v8 ignore next */
|
||||
if (!mod?.AsyncLocalStorage) {
|
||||
throw new Error('AsyncLocalStorage not available');
|
||||
}
|
||||
return new mod.AsyncLocalStorage();
|
||||
}
|
||||
/* v8 ignore next */
|
||||
function createFallbackAsyncContext() {
|
||||
let store;
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn('AsyncLocalStorage not available');
|
||||
return {
|
||||
getStore: () => store,
|
||||
enterWith: value => (store = value),
|
||||
run: (value, cb) => {
|
||||
const prev = store;
|
||||
store = value;
|
||||
try {
|
||||
return cb();
|
||||
} finally {
|
||||
store = prev;
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
export function createAsyncContext() {
|
||||
/* v8 ignore next */
|
||||
const ALS = globalThis.AsyncLocalStorage;
|
||||
/* v8 ignore next */
|
||||
if (typeof ALS === 'function' && ALS.prototype?.run) {
|
||||
return new ALS();
|
||||
}
|
||||
/* v8 ignore else */
|
||||
if (globalThis.process?.versions?.node) {
|
||||
return getNodeAsyncContext();
|
||||
}
|
||||
/* v8 ignore next */
|
||||
return createFallbackAsyncContext();
|
||||
}
|
||||
954
node_modules/@mikro-orm/core/utils/Configuration.d.ts
generated
vendored
Normal file
954
node_modules/@mikro-orm/core/utils/Configuration.d.ts
generated
vendored
Normal file
@@ -0,0 +1,954 @@
|
||||
import type { NamingStrategy } from '../naming-strategy/NamingStrategy.js';
|
||||
import { type CacheAdapter, type SyncCacheAdapter } from '../cache/CacheAdapter.js';
|
||||
import type { EntityRepository } from '../entity/EntityRepository.js';
|
||||
import type {
|
||||
AnyEntity,
|
||||
CompiledFunctions,
|
||||
Constructor,
|
||||
Dictionary,
|
||||
EnsureDatabaseOptions,
|
||||
EntityClass,
|
||||
EntityMetadata,
|
||||
FilterDef,
|
||||
GenerateOptions,
|
||||
Highlighter,
|
||||
HydratorConstructor,
|
||||
IHydrator,
|
||||
IMigrationGenerator,
|
||||
IPrimaryKey,
|
||||
MaybePromise,
|
||||
Migration,
|
||||
MigrationObject,
|
||||
Seeder,
|
||||
SeederObject,
|
||||
} from '../typings.js';
|
||||
import { type Logger, type LoggerNamespace, type LoggerOptions } from '../logging/Logger.js';
|
||||
import type { EntityManager } from '../EntityManager.js';
|
||||
import type { Platform } from '../platforms/Platform.js';
|
||||
import type { EntitySchema } from '../metadata/EntitySchema.js';
|
||||
import { MetadataProvider } from '../metadata/MetadataProvider.js';
|
||||
import type { MetadataStorage } from '../metadata/MetadataStorage.js';
|
||||
import type { EventSubscriber } from '../events/EventSubscriber.js';
|
||||
import type { AssignOptions } from '../entity/EntityAssigner.js';
|
||||
import type { EntityManagerType, IDatabaseDriver } from '../drivers/IDatabaseDriver.js';
|
||||
import { DataloaderType, FlushMode, LoadStrategy, PopulateHint, type EmbeddedPrefixMode } from '../enums.js';
|
||||
import { EntityComparator } from './EntityComparator.js';
|
||||
import type { Type } from '../types/Type.js';
|
||||
import type { MikroORM } from '../MikroORM.js';
|
||||
/** Holds and validates all ORM configuration options, providing access to drivers, loggers, cache adapters, and other services. */
|
||||
export declare class Configuration<
|
||||
D extends IDatabaseDriver = IDatabaseDriver,
|
||||
EM extends EntityManager<D> = D[typeof EntityManagerType] & EntityManager<D>,
|
||||
> {
|
||||
#private;
|
||||
constructor(options: Partial<Options>, validate?: boolean);
|
||||
/** Returns the database platform instance. */
|
||||
getPlatform(): ReturnType<D['getPlatform']>;
|
||||
/**
|
||||
* Gets specific configuration option. Falls back to specified `defaultValue` if provided.
|
||||
*/
|
||||
get<T extends keyof Options<D, EM>, U extends Options<D, EM>[T]>(key: T, defaultValue?: U): U;
|
||||
/** Returns all configuration options. */
|
||||
getAll(): Options<D, EM>;
|
||||
/**
|
||||
* Overrides specified configuration value.
|
||||
*/
|
||||
set<T extends keyof Options<D, EM>, U extends Options<D, EM>[T]>(key: T, value: U): void;
|
||||
/**
|
||||
* Resets the configuration to its default value
|
||||
*/
|
||||
reset<T extends keyof Options<D, EM>>(key: T): void;
|
||||
/**
|
||||
* Gets Logger instance.
|
||||
*/
|
||||
getLogger(): Logger;
|
||||
/**
|
||||
* Gets the logger instance for slow queries.
|
||||
* Falls back to the main logger if no custom slow query logger factory is configured.
|
||||
*/
|
||||
getSlowQueryLogger(): Logger;
|
||||
/** Returns the configured dataloader type, normalizing boolean values. */
|
||||
getDataloaderType(): DataloaderType;
|
||||
/** Returns the configured schema name, optionally skipping the platform's default schema. */
|
||||
getSchema(skipDefaultSchema?: boolean): string | undefined;
|
||||
/**
|
||||
* Gets current database driver instance.
|
||||
*/
|
||||
getDriver(): D;
|
||||
/** Registers a lazily-initialized extension by name. */
|
||||
registerExtension(name: string, cb: () => unknown): void;
|
||||
/** Returns a previously registered extension by name, initializing it on first access. */
|
||||
getExtension<T>(name: string): T | undefined;
|
||||
/**
|
||||
* Gets instance of NamingStrategy. (cached)
|
||||
*/
|
||||
getNamingStrategy(): NamingStrategy;
|
||||
/**
|
||||
* Gets instance of Hydrator. (cached)
|
||||
*/
|
||||
getHydrator(metadata: MetadataStorage): IHydrator;
|
||||
/**
|
||||
* Gets instance of Comparator. (cached)
|
||||
*/
|
||||
getComparator(metadata: MetadataStorage): EntityComparator;
|
||||
/**
|
||||
* Gets instance of MetadataProvider. (cached)
|
||||
*/
|
||||
getMetadataProvider(): MetadataProvider;
|
||||
/**
|
||||
* Gets instance of metadata CacheAdapter. (cached)
|
||||
*/
|
||||
getMetadataCacheAdapter(): SyncCacheAdapter;
|
||||
/**
|
||||
* Gets instance of CacheAdapter for result cache. (cached)
|
||||
*/
|
||||
getResultCacheAdapter(): CacheAdapter;
|
||||
/**
|
||||
* Gets EntityRepository class to be instantiated.
|
||||
*/
|
||||
getRepositoryClass(repository: () => EntityClass<EntityRepository<AnyEntity>>): Options<D, EM>['entityRepository'];
|
||||
/**
|
||||
* Creates instance of given service and caches it.
|
||||
*/
|
||||
getCachedService<
|
||||
T extends {
|
||||
new (...args: any[]): InstanceType<T>;
|
||||
},
|
||||
>(cls: T, ...args: ConstructorParameters<T>): InstanceType<T>;
|
||||
/** Clears the cached service instances, forcing re-creation on next access. */
|
||||
resetServiceCache(): void;
|
||||
private init;
|
||||
private sync;
|
||||
private validateOptions;
|
||||
}
|
||||
/**
|
||||
* Type helper to make it easier to use `mikro-orm.config.js`.
|
||||
*/
|
||||
export declare function defineConfig<
|
||||
D extends IDatabaseDriver = IDatabaseDriver,
|
||||
EM extends EntityManager<D> = EntityManager<D>,
|
||||
Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (
|
||||
| string
|
||||
| EntityClass<AnyEntity>
|
||||
| EntitySchema
|
||||
)[],
|
||||
>(options: Partial<Options<D, EM, Entities>>): Partial<Options<D, EM, Entities>>;
|
||||
/**
|
||||
* Connection configuration options for database connections.
|
||||
* @see https://mikro-orm.io/docs/configuration#connection
|
||||
*/
|
||||
export interface ConnectionOptions {
|
||||
/** Name of the database to connect to. */
|
||||
dbName?: string;
|
||||
/** Default database schema to use. */
|
||||
schema?: string;
|
||||
/** Name of the connection (used for logging when replicas are used). */
|
||||
name?: string;
|
||||
/** Full client connection URL. Overrides individual connection options. */
|
||||
clientUrl?: string;
|
||||
/** Database server hostname. */
|
||||
host?: string;
|
||||
/** Database server port number. */
|
||||
port?: number;
|
||||
/** Database user name. */
|
||||
user?: string;
|
||||
/**
|
||||
* Database password. Can be a string or a callback function that returns the password.
|
||||
* The callback is useful for short-lived tokens from cloud providers.
|
||||
* @example
|
||||
* password: async () => someCallToGetTheToken()
|
||||
*/
|
||||
password?: string | (() => MaybePromise<string>);
|
||||
/** Character set for the connection. */
|
||||
charset?: string;
|
||||
/** Collation for the connection. */
|
||||
collate?: string;
|
||||
/**
|
||||
* Enable multiple statements in a single query.
|
||||
* Required for importing database dump files.
|
||||
* Should be disabled in production for security.
|
||||
* @default false
|
||||
*/
|
||||
multipleStatements?: boolean;
|
||||
/** Connection pool configuration. */
|
||||
pool?: PoolConfig;
|
||||
/**
|
||||
* Additional driver-specific options.
|
||||
* The object will be deeply merged with internal driver options.
|
||||
*/
|
||||
driverOptions?: Dictionary;
|
||||
/** Callback to execute when a new connection is created. */
|
||||
onCreateConnection?: (connection: unknown) => Promise<void>;
|
||||
/**
|
||||
* SQLite/libSQL: databases to attach on connection.
|
||||
* Each attached database acts as a schema, accessible via `schema.table` syntax.
|
||||
* Entities can reference attached databases via `@Entity({ schema: 'db_name' })`.
|
||||
* Note: Not supported for remote libSQL connections.
|
||||
* @example
|
||||
* attachDatabases: [
|
||||
* { name: 'users_db', path: './users.db' },
|
||||
* { name: 'logs_db', path: '/var/data/logs.db' },
|
||||
* ]
|
||||
*/
|
||||
attachDatabases?: {
|
||||
name: string;
|
||||
path: string;
|
||||
}[];
|
||||
}
|
||||
/**
|
||||
* Configuration options for database migrations.
|
||||
* @see https://mikro-orm.io/docs/migrations
|
||||
*/
|
||||
export type MigrationsOptions = {
|
||||
/**
|
||||
* Name of the migrations table.
|
||||
* @default 'mikro_orm_migrations'
|
||||
*/
|
||||
tableName?: string;
|
||||
/**
|
||||
* Path to the folder with migration files (for compiled JavaScript files).
|
||||
* @default './migrations'
|
||||
*/
|
||||
path?: string;
|
||||
/**
|
||||
* Path to the folder with migration files (for TypeScript source files).
|
||||
* Used when running in TypeScript mode.
|
||||
*/
|
||||
pathTs?: string;
|
||||
/**
|
||||
* Glob pattern to match migration files.
|
||||
* @default '!(*.d).{js,ts,cjs}'
|
||||
*/
|
||||
glob?: string;
|
||||
/**
|
||||
* Disable logging for migration operations.
|
||||
* @default false
|
||||
*/
|
||||
silent?: boolean;
|
||||
/**
|
||||
* Run each migration inside a transaction.
|
||||
* @default true
|
||||
*/
|
||||
transactional?: boolean;
|
||||
/**
|
||||
* Try to disable foreign key checks during migrations.
|
||||
* @default false
|
||||
*/
|
||||
disableForeignKeys?: boolean;
|
||||
/**
|
||||
* Run all migrations in the current batch in a master transaction.
|
||||
* @default true
|
||||
*/
|
||||
allOrNothing?: boolean;
|
||||
/**
|
||||
* Allow dropping tables during schema diff.
|
||||
* @default true
|
||||
*/
|
||||
dropTables?: boolean;
|
||||
/**
|
||||
* Safe mode - only allow adding new tables and columns, never dropping existing ones.
|
||||
* @default false
|
||||
*/
|
||||
safe?: boolean;
|
||||
/**
|
||||
* Create a snapshot of the current schema after migration generation.
|
||||
* @default true
|
||||
*/
|
||||
snapshot?: boolean;
|
||||
/** Custom name for the snapshot file. */
|
||||
snapshotName?: string;
|
||||
/**
|
||||
* File extension for generated migration files.
|
||||
* @default 'ts'
|
||||
*/
|
||||
emit?: 'js' | 'ts' | 'cjs';
|
||||
/** Custom migration generator class. */
|
||||
generator?: Constructor<IMigrationGenerator>;
|
||||
/**
|
||||
* Custom function to generate migration file names.
|
||||
* @default (timestamp, name) => `Migration${timestamp}${name ? '_' + name : ''}`
|
||||
*/
|
||||
fileName?: (timestamp: string, name?: string) => string;
|
||||
/** List of migration classes or objects to use instead of file-based discovery. */
|
||||
migrationsList?: (MigrationObject | Constructor<Migration>)[];
|
||||
};
|
||||
/**
|
||||
* Configuration options for database seeders.
|
||||
* @see https://mikro-orm.io/docs/seeding
|
||||
*/
|
||||
export interface SeederOptions {
|
||||
/**
|
||||
* Path to the folder with seeder files (for compiled JavaScript files).
|
||||
* @default './seeders'
|
||||
*/
|
||||
path?: string;
|
||||
/**
|
||||
* Path to the folder with seeder files (for TypeScript source files).
|
||||
* Used when running in TypeScript mode.
|
||||
*/
|
||||
pathTs?: string;
|
||||
/**
|
||||
* Glob pattern to match seeder files.
|
||||
* @default '!(*.d).{js,ts}'
|
||||
*/
|
||||
glob?: string;
|
||||
/**
|
||||
* Name of the default seeder class to run.
|
||||
* @default 'DatabaseSeeder'
|
||||
*/
|
||||
defaultSeeder?: string;
|
||||
/**
|
||||
* File extension for generated seeder files.
|
||||
* @default 'ts'
|
||||
*/
|
||||
emit?: 'js' | 'ts';
|
||||
/**
|
||||
* Custom function to generate seeder file names.
|
||||
* @default (className) => className
|
||||
*/
|
||||
fileName?: (className: string) => string;
|
||||
/** List of seeder classes or objects to use instead of file-based discovery. */
|
||||
seedersList?: (SeederObject | Constructor<Seeder>)[];
|
||||
}
|
||||
/**
|
||||
* Connection pool configuration.
|
||||
* @see https://mikro-orm.io/docs/configuration#connection
|
||||
*/
|
||||
export interface PoolConfig {
|
||||
/** Minimum number of connections to keep in the pool. */
|
||||
min?: number;
|
||||
/** Maximum number of connections allowed in the pool. */
|
||||
max?: number;
|
||||
/** Time in milliseconds before an idle connection is closed. */
|
||||
idleTimeoutMillis?: number;
|
||||
}
|
||||
/**
|
||||
* Configuration options for metadata discovery.
|
||||
* @see https://mikro-orm.io/docs/configuration#entity-discovery
|
||||
*/
|
||||
export interface MetadataDiscoveryOptions {
|
||||
/**
|
||||
* Throw an error when no entities are discovered.
|
||||
* @default true
|
||||
*/
|
||||
warnWhenNoEntities?: boolean;
|
||||
/**
|
||||
* Check for duplicate table names and throw an error if found.
|
||||
* @default true
|
||||
*/
|
||||
checkDuplicateTableNames?: boolean;
|
||||
/**
|
||||
* Check for duplicate field names and throw an error if found.
|
||||
* @default true
|
||||
*/
|
||||
checkDuplicateFieldNames?: boolean;
|
||||
/**
|
||||
* Check for composite primary keys marked as `persist: false` and throw an error if found.
|
||||
* @default true
|
||||
*/
|
||||
checkNonPersistentCompositeProps?: boolean;
|
||||
/**
|
||||
* Infer default values from property initializers when possible
|
||||
* (if the constructor can be invoked without parameters).
|
||||
* @default true
|
||||
*/
|
||||
inferDefaultValues?: boolean;
|
||||
/**
|
||||
* Custom callback to override default type mapping.
|
||||
* Allows customizing how property types are mapped to database column types.
|
||||
* @example
|
||||
* getMappedType(type, platform) {
|
||||
* if (type === 'string') {
|
||||
* return Type.getType(TextType);
|
||||
* }
|
||||
* return platform.getDefaultMappedType(type);
|
||||
* }
|
||||
*/
|
||||
getMappedType?: (type: string, platform: Platform) => Type<unknown> | undefined;
|
||||
/**
|
||||
* Hook called for each entity metadata during discovery.
|
||||
* Can be used to modify metadata dynamically before defaults are filled in.
|
||||
* The hook can be async when using `MikroORM.init()`.
|
||||
*/
|
||||
onMetadata?: (meta: EntityMetadata, platform: Platform) => MaybePromise<void>;
|
||||
/**
|
||||
* Hook called after all entities are discovered.
|
||||
* Can be used to access and modify all metadata at once.
|
||||
*/
|
||||
afterDiscovered?: (storage: MetadataStorage, platform: Platform) => MaybePromise<void>;
|
||||
/** Path to the TypeScript configuration file for ts-morph metadata provider. */
|
||||
tsConfigPath?: string;
|
||||
/** @internal */
|
||||
skipSyncDiscovery?: boolean;
|
||||
}
|
||||
/**
|
||||
* MikroORM configuration options.
|
||||
* @see https://mikro-orm.io/docs/configuration
|
||||
*/
|
||||
export interface Options<
|
||||
Driver extends IDatabaseDriver = IDatabaseDriver,
|
||||
EM extends EntityManager<Driver> & Driver[typeof EntityManagerType] = EntityManager<Driver> &
|
||||
Driver[typeof EntityManagerType],
|
||||
Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (
|
||||
| string
|
||||
| EntityClass<AnyEntity>
|
||||
| EntitySchema
|
||||
)[],
|
||||
> extends ConnectionOptions {
|
||||
/** Connection pool configuration. */
|
||||
pool: PoolConfig;
|
||||
/**
|
||||
* Additional driver-specific options.
|
||||
* The object will be deeply merged with internal driver options.
|
||||
*/
|
||||
driverOptions: Dictionary;
|
||||
/**
|
||||
* Array of entity classes or paths to entity modules.
|
||||
* Paths support glob patterns for automatic discovery.
|
||||
* @example
|
||||
* entities: [Author, Book, Publisher] // class references
|
||||
* entities: ['./dist/entities'] // folder paths
|
||||
*/
|
||||
entities: Entities;
|
||||
/**
|
||||
* Array of TypeScript entity source paths.
|
||||
* Used when running in TypeScript mode (e.g., via `tsx` or `swc`).
|
||||
* Should always be specified when using folder-based discovery.
|
||||
* @example
|
||||
* entitiesTs: ['./src/entities']
|
||||
*/
|
||||
entitiesTs: Entities;
|
||||
/**
|
||||
* ORM extensions to register (e.g., Migrator, EntityGenerator, SeedManager).
|
||||
* Extensions registered here are available via shortcuts like `orm.migrator`.
|
||||
* @example
|
||||
* extensions: [Migrator, EntityGenerator, SeedManager]
|
||||
*/
|
||||
extensions: {
|
||||
register: (orm: MikroORM) => void;
|
||||
}[];
|
||||
/**
|
||||
* Event subscribers to register.
|
||||
* Can be class references or instances.
|
||||
*/
|
||||
subscribers: Iterable<EventSubscriber | Constructor<EventSubscriber>>;
|
||||
/**
|
||||
* Global entity filters to apply.
|
||||
* Filters are applied by default unless explicitly disabled.
|
||||
* @see https://mikro-orm.io/docs/filters
|
||||
*/
|
||||
filters: Dictionary<
|
||||
{
|
||||
name?: string;
|
||||
} & Omit<FilterDef, 'name'>
|
||||
>;
|
||||
/**
|
||||
* Metadata discovery configuration options.
|
||||
* Controls how entities are discovered and validated.
|
||||
*/
|
||||
discovery: MetadataDiscoveryOptions;
|
||||
/**
|
||||
* Database driver class to use.
|
||||
* Should be imported from the specific driver package (e.g. `@mikro-orm/mysql`, `@mikro-orm/postgresql`).
|
||||
* Alternatively, use the `defineConfig` helper or `MikroORM` class exported from the driver package.
|
||||
* @example
|
||||
* import { MySqlDriver } from '@mikro-orm/mysql';
|
||||
*
|
||||
* MikroORM.init({
|
||||
* driver: MySqlDriver,
|
||||
* dbName: 'my_db',
|
||||
* });
|
||||
*/
|
||||
driver?: {
|
||||
new (config: Configuration): Driver;
|
||||
};
|
||||
/**
|
||||
* Custom naming strategy class for mapping entity/property names to database table/column names.
|
||||
* Built-in options: `UnderscoreNamingStrategy`, `MongoNamingStrategy`, `EntityCaseNamingStrategy`.
|
||||
* @see https://mikro-orm.io/docs/naming-strategy
|
||||
*/
|
||||
namingStrategy?: {
|
||||
new (): NamingStrategy;
|
||||
};
|
||||
/**
|
||||
* Enable implicit transactions for all write operations.
|
||||
* When enabled, all queries will be wrapped in a transaction.
|
||||
* Disabled for MongoDB driver by default.
|
||||
*/
|
||||
implicitTransactions?: boolean;
|
||||
/**
|
||||
* Disable all transactions.
|
||||
* When enabled, no queries will be wrapped in transactions, even when explicitly requested.
|
||||
* @default false
|
||||
*/
|
||||
disableTransactions?: boolean;
|
||||
/**
|
||||
* Enable verbose logging of internal operations.
|
||||
* @default false
|
||||
*/
|
||||
verbose: boolean;
|
||||
/**
|
||||
* Ignore `undefined` values in find queries instead of treating them as `null`.
|
||||
* @default false
|
||||
* @example
|
||||
* // With ignoreUndefinedInQuery: true
|
||||
* em.find(User, { email: undefined }) // resolves to em.find(User, {})
|
||||
*/
|
||||
ignoreUndefinedInQuery: boolean;
|
||||
/**
|
||||
* Hook to modify SQL queries before execution.
|
||||
* Useful for adding observability hints or query modifications.
|
||||
* @param sql - The generated SQL query
|
||||
* @param params - Query parameters
|
||||
* @returns Modified SQL query
|
||||
*/
|
||||
onQuery: (sql: string, params: readonly unknown[]) => string;
|
||||
/**
|
||||
* Automatically join the owning side of 1:1 relations when querying the inverse side.
|
||||
* @default true
|
||||
*/
|
||||
autoJoinOneToOneOwner: boolean;
|
||||
/**
|
||||
* Automatically join M:1 and 1:1 relations when filters are defined on them.
|
||||
* Important for implementing soft deletes via filters.
|
||||
* @default true
|
||||
*/
|
||||
autoJoinRefsForFilters: boolean;
|
||||
/**
|
||||
* Apply filters to relations in queries.
|
||||
* @default true
|
||||
*/
|
||||
filtersOnRelations: boolean;
|
||||
/**
|
||||
* Enable propagation of changes on entity prototypes.
|
||||
* @default true
|
||||
*/
|
||||
propagationOnPrototype: boolean;
|
||||
/**
|
||||
* Mark all relations as populated after flush for new entities.
|
||||
* This aligns serialized output of loaded entities and just-inserted ones.
|
||||
* @default true
|
||||
*/
|
||||
populateAfterFlush: boolean;
|
||||
/**
|
||||
* Serialization options for `toJSON()` and `serialize()` methods.
|
||||
*/
|
||||
serialization: {
|
||||
/**
|
||||
* Include primary keys in serialized output.
|
||||
* @default true
|
||||
*/
|
||||
includePrimaryKeys?: boolean;
|
||||
/**
|
||||
* Enforce unpopulated references to be returned as objects.
|
||||
* When enabled, references are serialized as `{ author: { id: 1 } }` instead of `{ author: 1 }`.
|
||||
* @default false
|
||||
*/
|
||||
forceObject?: boolean;
|
||||
};
|
||||
/**
|
||||
* Default options for entity assignment via `em.assign()`.
|
||||
* @see https://mikro-orm.io/docs/entity-helper
|
||||
*/
|
||||
assign: AssignOptions<boolean>;
|
||||
/**
|
||||
* Automatically call `em.persist()` on entities created via `em.create()`.
|
||||
* @default true
|
||||
*/
|
||||
persistOnCreate: boolean;
|
||||
/**
|
||||
* When upsert creates a new entity, mark it as managed in the identity map.
|
||||
* @default true
|
||||
*/
|
||||
upsertManaged: boolean;
|
||||
/**
|
||||
* Force use of entity constructors when creating entity instances.
|
||||
* Required when using native private properties inside entities.
|
||||
* Can be `true` for all entities or an array of specific entity classes/names.
|
||||
* @default false
|
||||
*/
|
||||
forceEntityConstructor: boolean | (Constructor<AnyEntity> | string)[];
|
||||
/**
|
||||
* Convert `null` values from database to `undefined` when hydrating entities.
|
||||
* @default false
|
||||
*/
|
||||
forceUndefined: boolean;
|
||||
/**
|
||||
* Property `onCreate` hooks are normally executed during `flush` operation.
|
||||
* With this option, they will be processed early inside `em.create()` method.
|
||||
* @default true
|
||||
*/
|
||||
processOnCreateHooksEarly: boolean;
|
||||
/**
|
||||
* Force `Date` values to be stored in UTC for datetime columns without timezone.
|
||||
* Works for MySQL (`datetime` type), PostgreSQL (`timestamp` type), and MSSQL (`datetime`/`datetime2` types).
|
||||
* SQLite does this by default.
|
||||
* @default true
|
||||
*/
|
||||
forceUtcTimezone: boolean;
|
||||
/**
|
||||
* Timezone to use for date operations.
|
||||
* @example '+02:00'
|
||||
*/
|
||||
timezone?: string;
|
||||
/**
|
||||
* Ensure the database exists when initializing the ORM.
|
||||
* When `true`, will create the database if it doesn't exist.
|
||||
* @default true
|
||||
*/
|
||||
ensureDatabase: boolean | EnsureDatabaseOptions;
|
||||
/**
|
||||
* Ensure database indexes exist on startup. This option works only with the MongoDB driver.
|
||||
* When enabled, indexes will be created based on entity metadata.
|
||||
* @default false
|
||||
*/
|
||||
ensureIndexes: boolean;
|
||||
/**
|
||||
* Use batch insert queries for better performance.
|
||||
* @default true
|
||||
*/
|
||||
useBatchInserts?: boolean;
|
||||
/**
|
||||
* Use batch update queries for better performance.
|
||||
* @default true
|
||||
*/
|
||||
useBatchUpdates?: boolean;
|
||||
/**
|
||||
* Number of entities to process in each batch for batch inserts/updates.
|
||||
* @default 300
|
||||
*/
|
||||
batchSize: number;
|
||||
/**
|
||||
* Custom hydrator class for assigning database values to entities.
|
||||
* @default ObjectHydrator
|
||||
*/
|
||||
hydrator: HydratorConstructor;
|
||||
/**
|
||||
* Pre-generated compiled functions for hydration and comparison.
|
||||
* Use the `compile` CLI command to create these functions.
|
||||
* Enables deployment to runtimes that prohibit `new Function`/eval (e.g. Cloudflare Workers).
|
||||
*/
|
||||
compiledFunctions?: CompiledFunctions;
|
||||
/**
|
||||
* Default loading strategy for relations.
|
||||
* - `'joined'`: Use SQL JOINs (single query, may cause cartesian product)
|
||||
* - `'select-in'`: Use separate SELECT IN queries (multiple queries)
|
||||
* - `'balanced'`: Decides based on relation type and context.
|
||||
* @default 'balanced'
|
||||
*/
|
||||
loadStrategy: LoadStrategy | `${LoadStrategy}`;
|
||||
/**
|
||||
* Enable dataloader for batching reference loading.
|
||||
* - `true` or `DataloaderType.ALL`: Enable for all relation types
|
||||
* - `false` or `DataloaderType.NONE`: Disable dataloader
|
||||
* - `DataloaderType.REFERENCE`: Enable only for scalar references
|
||||
* - `DataloaderType.COLLECTION`: Enable only for collections
|
||||
* @default DataloaderType.NONE
|
||||
*/
|
||||
dataloader: DataloaderType | boolean;
|
||||
/**
|
||||
* Determines how where conditions are applied during population.
|
||||
* - `'all'`: Populate all matching relations (default in v5+)
|
||||
* - `'infer'`: Infer conditions from the original query (v4 behavior)
|
||||
* @default 'all'
|
||||
*/
|
||||
populateWhere: PopulateHint | `${PopulateHint}`;
|
||||
/**
|
||||
* Default flush mode for the entity manager.
|
||||
* - `'commit'`: Flush only on explicit commit
|
||||
* - `'auto'`: Flush before queries when needed
|
||||
* - `'always'`: Always flush before queries
|
||||
* @default 'auto'
|
||||
*/
|
||||
flushMode: FlushMode | `${FlushMode}`;
|
||||
/**
|
||||
* Custom base repository class for all entities.
|
||||
* Entity-specific repositories can still be defined and will take precedence.
|
||||
* @see https://mikro-orm.io/docs/repositories
|
||||
*/
|
||||
entityRepository?: EntityClass<EntityRepository<any>>;
|
||||
/**
|
||||
* Custom entity manager class to use.
|
||||
*/
|
||||
entityManager?: Constructor<EM>;
|
||||
/**
|
||||
* Read replica connection configurations.
|
||||
* Each replica can override parts of the main connection options.
|
||||
* @see https://mikro-orm.io/docs/read-connections
|
||||
*/
|
||||
replicas?: ConnectionOptions[];
|
||||
/**
|
||||
* Validate that required properties are set on new entities before insert.
|
||||
* @default true
|
||||
*/
|
||||
validateRequired: boolean;
|
||||
/**
|
||||
* Callback to get the current request context's EntityManager.
|
||||
* Used for automatic context propagation in web frameworks.
|
||||
* @default RequestContext.getEntityManager
|
||||
*/
|
||||
context: (name: string) => EntityManager | undefined;
|
||||
/**
|
||||
* Name of the context for multi-ORM setups.
|
||||
* @default 'default'
|
||||
*/
|
||||
contextName: string;
|
||||
/**
|
||||
* Allow using the global EntityManager without a request context.
|
||||
* Not recommended for production - each request should have its own context.
|
||||
* Can also be set via `MIKRO_ORM_ALLOW_GLOBAL_CONTEXT` environment variable.
|
||||
* @default false
|
||||
*/
|
||||
allowGlobalContext: boolean;
|
||||
/**
|
||||
* When enabled, environment variables take precedence over explicitly provided config options.
|
||||
* By default, explicit options win over env vars.
|
||||
* @default false
|
||||
*/
|
||||
preferEnvVars?: boolean;
|
||||
/**
|
||||
* Disable the identity map.
|
||||
* When disabled, each query returns new entity instances.
|
||||
* Not recommended for most use cases.
|
||||
* @default false
|
||||
*/
|
||||
disableIdentityMap?: boolean;
|
||||
/**
|
||||
* Custom logger function for ORM output.
|
||||
* @default console.log
|
||||
*/
|
||||
logger: (message: string) => void;
|
||||
/**
|
||||
* Enable colored output in logs.
|
||||
* @default true
|
||||
*/
|
||||
colors: boolean;
|
||||
/**
|
||||
* Factory function to create a custom logger instance.
|
||||
* @default DefaultLogger.create
|
||||
*/
|
||||
loggerFactory?: (options: LoggerOptions) => Logger;
|
||||
/**
|
||||
* Threshold in milliseconds for logging slow queries.
|
||||
* Queries taking at least this long will be logged via the 'slow-query' namespace at warning level.
|
||||
* Slow query logs are always emitted when the threshold is met, regardless of the `debug` setting.
|
||||
* Set to `0` to log every query as slow.
|
||||
* @default undefined (slow query logging disabled)
|
||||
*/
|
||||
slowQueryThreshold?: number;
|
||||
/**
|
||||
* Factory function to create a custom logger instance for slow queries.
|
||||
* Has the same shape as `loggerFactory`. When not provided, the main logger instance is used.
|
||||
*
|
||||
* Note: slow query log entries are emitted with `context.enabled = true` to bypass the
|
||||
* debug-mode check. Custom logger implementations must respect `context.enabled` in their
|
||||
* `isEnabled()` method (as `DefaultLogger` does) to ensure slow query logs are always emitted.
|
||||
* @default undefined (falls back to main logger)
|
||||
*/
|
||||
slowQueryLoggerFactory?: (options: LoggerOptions) => Logger;
|
||||
/**
|
||||
* Custom error handler for `em.findOneOrFail()` when no entity is found.
|
||||
* @param entityName - Name of the entity being queried
|
||||
* @param where - Query conditions
|
||||
* @returns Error instance to throw
|
||||
*/
|
||||
findOneOrFailHandler: (entityName: string, where: Dictionary | IPrimaryKey) => Error;
|
||||
/**
|
||||
* Custom error handler for `em.findExactlyOneOrFail()` when entity count is not exactly one.
|
||||
* Used when strict mode is enabled.
|
||||
* @param entityName - Name of the entity being queried
|
||||
* @param where - Query conditions
|
||||
* @returns Error instance to throw
|
||||
*/
|
||||
findExactlyOneOrFailHandler: (entityName: string, where: Dictionary | IPrimaryKey) => Error;
|
||||
/**
|
||||
* Enable debug logging.
|
||||
* Can be `true` for all namespaces or an array of specific namespaces.
|
||||
* Available namespaces: `'query'`, `'query-params'`, `'discovery'`, `'info'`.
|
||||
* @default false
|
||||
* @see https://mikro-orm.io/docs/logging
|
||||
*/
|
||||
debug: boolean | LoggerNamespace[];
|
||||
/**
|
||||
* Ignore deprecation warnings.
|
||||
* Can be `true` to ignore all or an array of specific deprecation labels.
|
||||
* @default false
|
||||
* @see https://mikro-orm.io/docs/logging#deprecation-warnings
|
||||
*/
|
||||
ignoreDeprecations: boolean | string[];
|
||||
/**
|
||||
* Syntax highlighter for SQL queries in logs.
|
||||
* @default NullHighlighter
|
||||
*/
|
||||
highlighter: Highlighter;
|
||||
/**
|
||||
* Force the ORM to use TypeScript options regardless of detection.
|
||||
* Uses `entitiesTs` for discovery and `pathTs` for migrations/seeders.
|
||||
* Should only be used for tests, not production builds.
|
||||
* @default false
|
||||
*/
|
||||
preferTs?: boolean;
|
||||
/**
|
||||
* Base directory for resolving relative paths.
|
||||
* @default process.cwd()
|
||||
*/
|
||||
baseDir: string;
|
||||
/**
|
||||
* Migration configuration options.
|
||||
* @see https://mikro-orm.io/docs/migrations
|
||||
*/
|
||||
migrations: MigrationsOptions;
|
||||
/**
|
||||
* Schema generator configuration options.
|
||||
*/
|
||||
schemaGenerator: {
|
||||
/**
|
||||
* Try to disable foreign key checks during schema operations.
|
||||
* @default false
|
||||
*/
|
||||
disableForeignKeys?: boolean;
|
||||
/**
|
||||
* Try to disable foreign key checks during `schema.clear()`. Enabled by default for MySQL/MariaDB.
|
||||
*/
|
||||
disableForeignKeysForClear?: boolean;
|
||||
/**
|
||||
* Generate foreign key constraints.
|
||||
* @default true
|
||||
*/
|
||||
createForeignKeyConstraints?: boolean;
|
||||
/**
|
||||
* Schema names to ignore when comparing schemas.
|
||||
* @default []
|
||||
*/
|
||||
ignoreSchema?: string[];
|
||||
/**
|
||||
* Table names or patterns to skip during schema generation.
|
||||
* @default []
|
||||
*/
|
||||
skipTables?: (string | RegExp)[];
|
||||
/**
|
||||
* View names or patterns to skip during schema generation (e.g. PostGIS system views).
|
||||
* @default []
|
||||
*/
|
||||
skipViews?: (string | RegExp)[];
|
||||
/**
|
||||
* Column names or patterns to skip during schema generation, keyed by table name.
|
||||
* @default {}
|
||||
*/
|
||||
skipColumns?: Dictionary<(string | RegExp)[]>;
|
||||
/**
|
||||
* Database name to use for management operations (e.g., creating/dropping databases).
|
||||
*/
|
||||
managementDbName?: string;
|
||||
/**
|
||||
* Default ON UPDATE rule for foreign keys.
|
||||
* When not set, no rule is emitted and the database uses its native default (NO ACTION/RESTRICT).
|
||||
*/
|
||||
defaultUpdateRule?: 'cascade' | 'no action' | 'set null' | 'set default' | 'restrict';
|
||||
/**
|
||||
* Default ON DELETE rule for foreign keys.
|
||||
* When not set, no rule is emitted and the database uses its native default (NO ACTION/RESTRICT).
|
||||
*/
|
||||
defaultDeleteRule?: 'cascade' | 'no action' | 'set null' | 'set default' | 'restrict';
|
||||
tableSpace?: string;
|
||||
};
|
||||
/**
|
||||
* Embeddable entity configuration options.
|
||||
*/
|
||||
embeddables: {
|
||||
/**
|
||||
* Mode for generating column prefixes for embedded properties.
|
||||
* @default 'relative'
|
||||
*/
|
||||
prefixMode: EmbeddedPrefixMode;
|
||||
};
|
||||
/**
|
||||
* Entity generator (code generation) configuration options.
|
||||
* @see https://mikro-orm.io/docs/entity-generator
|
||||
*/
|
||||
entityGenerator: GenerateOptions;
|
||||
/**
|
||||
* Metadata cache configuration for improved startup performance.
|
||||
* @see https://mikro-orm.io/docs/metadata-cache
|
||||
*/
|
||||
metadataCache: {
|
||||
/**
|
||||
* Enable metadata caching.
|
||||
* Defaults based on the metadata provider's `useCache()` method.
|
||||
*/
|
||||
enabled?: boolean;
|
||||
/**
|
||||
* Combine all metadata into a single cache file.
|
||||
* Can be `true` for default path or a custom path string.
|
||||
*/
|
||||
combined?: boolean | string;
|
||||
/**
|
||||
* Pretty print JSON cache files.
|
||||
* @default false
|
||||
*/
|
||||
pretty?: boolean;
|
||||
/**
|
||||
* Cache adapter class to use. When cache is enabled, and no adapter is provided explicitly, {@link FileCacheAdapter} is used automatically - but only if you use the async `MikroORM.init()` method.
|
||||
*/
|
||||
adapter?: {
|
||||
new (...params: any[]): SyncCacheAdapter;
|
||||
};
|
||||
/**
|
||||
* Options passed to the cache adapter constructor.
|
||||
* @default { cacheDir: process.cwd() + '/temp' }
|
||||
*/
|
||||
options?: Dictionary;
|
||||
};
|
||||
/**
|
||||
* Result cache configuration for query result caching.
|
||||
*/
|
||||
resultCache: {
|
||||
/**
|
||||
* Default cache expiration time in milliseconds.
|
||||
* @default 1000
|
||||
*/
|
||||
expiration?: number;
|
||||
/**
|
||||
* Cache adapter class to use.
|
||||
* @default MemoryCacheAdapter
|
||||
*/
|
||||
adapter?: {
|
||||
new (...params: any[]): CacheAdapter;
|
||||
};
|
||||
/**
|
||||
* Options passed to the cache adapter constructor.
|
||||
* @default {}
|
||||
*/
|
||||
options?: Dictionary;
|
||||
/**
|
||||
* Enable global result caching for all queries.
|
||||
* Can be `true`, an expiration number, or a tuple of `[key, expiration]`.
|
||||
*/
|
||||
global?: boolean | number | [string, number];
|
||||
};
|
||||
/**
|
||||
* Metadata provider class for entity discovery.
|
||||
* Built-in options: `ReflectMetadataProvider` (default), `TsMorphMetadataProvider`.
|
||||
* @default ReflectMetadataProvider
|
||||
* @see https://mikro-orm.io/docs/metadata-providers
|
||||
*/
|
||||
metadataProvider: {
|
||||
new (config: Configuration): MetadataProvider;
|
||||
useCache?: MetadataProvider['useCache'];
|
||||
};
|
||||
/**
|
||||
* Seeder configuration options.
|
||||
* @see https://mikro-orm.io/docs/seeding
|
||||
*/
|
||||
seeder: SeederOptions;
|
||||
/**
|
||||
* Prefer read replicas for read operations when available.
|
||||
* @default true
|
||||
*/
|
||||
preferReadReplicas: boolean;
|
||||
/**
|
||||
* Custom dynamic import provider for loading modules.
|
||||
* @default (id) => import(id)
|
||||
*/
|
||||
dynamicImportProvider: (id: string) => Promise<unknown>;
|
||||
}
|
||||
405
node_modules/@mikro-orm/core/utils/Configuration.js
generated
vendored
Normal file
405
node_modules/@mikro-orm/core/utils/Configuration.js
generated
vendored
Normal file
@@ -0,0 +1,405 @@
|
||||
import { NullCacheAdapter } from '../cache/NullCacheAdapter.js';
|
||||
import { ObjectHydrator } from '../hydration/ObjectHydrator.js';
|
||||
import { NullHighlighter } from '../utils/NullHighlighter.js';
|
||||
import { DefaultLogger } from '../logging/DefaultLogger.js';
|
||||
import { colors } from '../logging/colors.js';
|
||||
import { Utils } from '../utils/Utils.js';
|
||||
import { MetadataProvider } from '../metadata/MetadataProvider.js';
|
||||
import { NotFoundError } from '../errors.js';
|
||||
import { RequestContext } from './RequestContext.js';
|
||||
import { DataloaderType, FlushMode, LoadStrategy, PopulateHint } from '../enums.js';
|
||||
import { MemoryCacheAdapter } from '../cache/MemoryCacheAdapter.js';
|
||||
import { EntityComparator } from './EntityComparator.js';
|
||||
import { setEnv } from './env-vars.js';
|
||||
const DEFAULTS = {
|
||||
pool: {},
|
||||
entities: [],
|
||||
entitiesTs: [],
|
||||
extensions: [],
|
||||
subscribers: [],
|
||||
filters: {},
|
||||
discovery: {
|
||||
warnWhenNoEntities: true,
|
||||
checkDuplicateTableNames: true,
|
||||
checkDuplicateFieldNames: true,
|
||||
checkDuplicateEntities: true,
|
||||
checkNonPersistentCompositeProps: true,
|
||||
inferDefaultValues: true,
|
||||
},
|
||||
validateRequired: true,
|
||||
context: name => RequestContext.getEntityManager(name),
|
||||
contextName: 'default',
|
||||
allowGlobalContext: false,
|
||||
// eslint-disable-next-line no-console
|
||||
logger: console.log.bind(console),
|
||||
colors: true,
|
||||
findOneOrFailHandler: (entityName, where) => NotFoundError.findOneFailed(entityName, where),
|
||||
findExactlyOneOrFailHandler: (entityName, where) => NotFoundError.findExactlyOneFailed(entityName, where),
|
||||
baseDir: globalThis.process?.cwd?.(),
|
||||
hydrator: ObjectHydrator,
|
||||
flushMode: FlushMode.AUTO,
|
||||
loadStrategy: LoadStrategy.BALANCED,
|
||||
dataloader: DataloaderType.NONE,
|
||||
populateWhere: PopulateHint.ALL,
|
||||
ignoreUndefinedInQuery: false,
|
||||
onQuery: sql => sql,
|
||||
autoJoinOneToOneOwner: true,
|
||||
autoJoinRefsForFilters: true,
|
||||
filtersOnRelations: true,
|
||||
propagationOnPrototype: true,
|
||||
populateAfterFlush: true,
|
||||
serialization: {
|
||||
includePrimaryKeys: true,
|
||||
},
|
||||
assign: {
|
||||
updateNestedEntities: true,
|
||||
updateByPrimaryKey: true,
|
||||
mergeObjectProperties: false,
|
||||
mergeEmbeddedProperties: true,
|
||||
ignoreUndefined: false,
|
||||
},
|
||||
persistOnCreate: true,
|
||||
upsertManaged: true,
|
||||
forceEntityConstructor: false,
|
||||
forceUndefined: false,
|
||||
forceUtcTimezone: true,
|
||||
processOnCreateHooksEarly: true,
|
||||
ensureDatabase: true,
|
||||
ensureIndexes: false,
|
||||
batchSize: 300,
|
||||
debug: false,
|
||||
ignoreDeprecations: false,
|
||||
verbose: false,
|
||||
driverOptions: {},
|
||||
migrations: {
|
||||
tableName: 'mikro_orm_migrations',
|
||||
glob: '!(*.d).{js,ts,cjs}',
|
||||
silent: false,
|
||||
transactional: true,
|
||||
allOrNothing: true,
|
||||
dropTables: true,
|
||||
safe: false,
|
||||
snapshot: true,
|
||||
emit: 'ts',
|
||||
fileName: (timestamp, name) => `Migration${timestamp}${name ? '_' + name : ''}`,
|
||||
},
|
||||
schemaGenerator: {
|
||||
createForeignKeyConstraints: true,
|
||||
ignoreSchema: [],
|
||||
skipTables: [],
|
||||
skipViews: [],
|
||||
skipColumns: {},
|
||||
},
|
||||
embeddables: {
|
||||
prefixMode: 'relative',
|
||||
},
|
||||
entityGenerator: {
|
||||
forceUndefined: true,
|
||||
undefinedDefaults: false,
|
||||
scalarTypeInDecorator: false,
|
||||
bidirectionalRelations: true,
|
||||
identifiedReferences: true,
|
||||
scalarPropertiesForRelations: 'never',
|
||||
entityDefinition: 'defineEntity',
|
||||
decorators: 'legacy',
|
||||
enumMode: 'dictionary',
|
||||
/* v8 ignore next */
|
||||
fileName: className => className,
|
||||
onlyPurePivotTables: false,
|
||||
outputPurePivotTables: false,
|
||||
readOnlyPivotTables: false,
|
||||
useCoreBaseEntity: false,
|
||||
},
|
||||
metadataCache: {},
|
||||
resultCache: {
|
||||
adapter: MemoryCacheAdapter,
|
||||
expiration: 1000, // 1s
|
||||
options: {},
|
||||
},
|
||||
metadataProvider: MetadataProvider,
|
||||
highlighter: new NullHighlighter(),
|
||||
seeder: {
|
||||
defaultSeeder: 'DatabaseSeeder',
|
||||
glob: '!(*.d).{js,ts}',
|
||||
emit: 'ts',
|
||||
fileName: className => className,
|
||||
},
|
||||
preferReadReplicas: true,
|
||||
dynamicImportProvider: /* v8 ignore next */ id => import(id),
|
||||
};
|
||||
/** Holds and validates all ORM configuration options, providing access to drivers, loggers, cache adapters, and other services. */
|
||||
export class Configuration {
|
||||
#options;
|
||||
#logger;
|
||||
#slowQueryLogger;
|
||||
#driver;
|
||||
#platform;
|
||||
#cache = new Map();
|
||||
#extensions = new Map();
|
||||
constructor(options, validate = true) {
|
||||
if (options.dynamicImportProvider) {
|
||||
globalThis.dynamicImportProvider = options.dynamicImportProvider;
|
||||
}
|
||||
this.#options = Utils.mergeConfig({}, DEFAULTS, options);
|
||||
if (validate) {
|
||||
this.validateOptions();
|
||||
}
|
||||
this.#options.loggerFactory ??= DefaultLogger.create;
|
||||
this.#logger = this.#options.loggerFactory({
|
||||
debugMode: this.#options.debug,
|
||||
ignoreDeprecations: this.#options.ignoreDeprecations,
|
||||
usesReplicas: (this.#options.replicas?.length ?? 0) > 0,
|
||||
highlighter: this.#options.highlighter,
|
||||
writer: this.#options.logger,
|
||||
});
|
||||
const cf = this.#options.compiledFunctions;
|
||||
if (cf && cf.__version !== Utils.getORMVersion()) {
|
||||
this.#logger.warn(
|
||||
'discovery',
|
||||
`Compiled functions were generated with MikroORM v${cf.__version ?? 'unknown'}, but the current version is v${Utils.getORMVersion()}. Please regenerate with \`npx mikro-orm compile\`.`,
|
||||
);
|
||||
}
|
||||
if (this.#options.driver) {
|
||||
this.#driver = new this.#options.driver(this);
|
||||
this.#platform = this.#driver.getPlatform();
|
||||
this.#platform.setConfig(this);
|
||||
this.init(validate);
|
||||
}
|
||||
}
|
||||
/** Returns the database platform instance. */
|
||||
getPlatform() {
|
||||
return this.#platform;
|
||||
}
|
||||
/**
|
||||
* Gets specific configuration option. Falls back to specified `defaultValue` if provided.
|
||||
*/
|
||||
get(key, defaultValue) {
|
||||
if (typeof this.#options[key] !== 'undefined') {
|
||||
return this.#options[key];
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
/** Returns all configuration options. */
|
||||
getAll() {
|
||||
return this.#options;
|
||||
}
|
||||
/**
|
||||
* Overrides specified configuration value.
|
||||
*/
|
||||
set(key, value) {
|
||||
this.#options[key] = value;
|
||||
this.sync();
|
||||
}
|
||||
/**
|
||||
* Resets the configuration to its default value
|
||||
*/
|
||||
reset(key) {
|
||||
this.#options[key] = DEFAULTS[key];
|
||||
}
|
||||
/**
|
||||
* Gets Logger instance.
|
||||
*/
|
||||
getLogger() {
|
||||
return this.#logger;
|
||||
}
|
||||
/**
|
||||
* Gets the logger instance for slow queries.
|
||||
* Falls back to the main logger if no custom slow query logger factory is configured.
|
||||
*/
|
||||
getSlowQueryLogger() {
|
||||
this.#slowQueryLogger ??=
|
||||
this.#options.slowQueryLoggerFactory?.({
|
||||
debugMode: this.#options.debug,
|
||||
writer: this.#options.logger,
|
||||
highlighter: this.#options.highlighter,
|
||||
usesReplicas: (this.#options.replicas?.length ?? 0) > 0,
|
||||
}) ?? this.#logger;
|
||||
return this.#slowQueryLogger;
|
||||
}
|
||||
/** Returns the configured dataloader type, normalizing boolean values. */
|
||||
getDataloaderType() {
|
||||
if (typeof this.#options.dataloader === 'boolean') {
|
||||
return this.#options.dataloader ? DataloaderType.ALL : DataloaderType.NONE;
|
||||
}
|
||||
return this.#options.dataloader;
|
||||
}
|
||||
/** Returns the configured schema name, optionally skipping the platform's default schema. */
|
||||
getSchema(skipDefaultSchema = false) {
|
||||
if (skipDefaultSchema && this.#options.schema === this.#platform.getDefaultSchemaName()) {
|
||||
return undefined;
|
||||
}
|
||||
return this.#options.schema;
|
||||
}
|
||||
/**
|
||||
* Gets current database driver instance.
|
||||
*/
|
||||
getDriver() {
|
||||
return this.#driver;
|
||||
}
|
||||
/** Registers a lazily-initialized extension by name. */
|
||||
registerExtension(name, cb) {
|
||||
this.#extensions.set(name, cb);
|
||||
}
|
||||
/** Returns a previously registered extension by name, initializing it on first access. */
|
||||
getExtension(name) {
|
||||
if (this.#cache.has(name)) {
|
||||
return this.#cache.get(name);
|
||||
}
|
||||
const ext = this.#extensions.get(name);
|
||||
/* v8 ignore next */
|
||||
if (!ext) {
|
||||
return undefined;
|
||||
}
|
||||
this.#cache.set(name, ext());
|
||||
return this.#cache.get(name);
|
||||
}
|
||||
/**
|
||||
* Gets instance of NamingStrategy. (cached)
|
||||
*/
|
||||
getNamingStrategy() {
|
||||
return this.getCachedService(this.#options.namingStrategy || this.#platform.getNamingStrategy());
|
||||
}
|
||||
/**
|
||||
* Gets instance of Hydrator. (cached)
|
||||
*/
|
||||
getHydrator(metadata) {
|
||||
return this.getCachedService(this.#options.hydrator, metadata, this.#platform, this);
|
||||
}
|
||||
/**
|
||||
* Gets instance of Comparator. (cached)
|
||||
*/
|
||||
getComparator(metadata) {
|
||||
return this.getCachedService(EntityComparator, metadata, this.#platform, this);
|
||||
}
|
||||
/**
|
||||
* Gets instance of MetadataProvider. (cached)
|
||||
*/
|
||||
getMetadataProvider() {
|
||||
return this.getCachedService(this.#options.metadataProvider, this);
|
||||
}
|
||||
/**
|
||||
* Gets instance of metadata CacheAdapter. (cached)
|
||||
*/
|
||||
getMetadataCacheAdapter() {
|
||||
return this.getCachedService(
|
||||
this.#options.metadataCache.adapter,
|
||||
this.#options.metadataCache.options,
|
||||
this.#options.baseDir,
|
||||
this.#options.metadataCache.pretty,
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Gets instance of CacheAdapter for result cache. (cached)
|
||||
*/
|
||||
getResultCacheAdapter() {
|
||||
return this.getCachedService(this.#options.resultCache.adapter, {
|
||||
expiration: this.#options.resultCache.expiration,
|
||||
...this.#options.resultCache.options,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Gets EntityRepository class to be instantiated.
|
||||
*/
|
||||
getRepositoryClass(repository) {
|
||||
if (repository) {
|
||||
return repository();
|
||||
}
|
||||
if (this.#options.entityRepository) {
|
||||
return this.#options.entityRepository;
|
||||
}
|
||||
return this.#platform.getRepositoryClass();
|
||||
}
|
||||
/**
|
||||
* Creates instance of given service and caches it.
|
||||
*/
|
||||
getCachedService(cls, ...args) {
|
||||
if (!this.#cache.has(cls.name)) {
|
||||
this.#cache.set(cls.name, new cls(...args));
|
||||
}
|
||||
return this.#cache.get(cls.name);
|
||||
}
|
||||
/** Clears the cached service instances, forcing re-creation on next access. */
|
||||
resetServiceCache() {
|
||||
this.#cache.clear();
|
||||
}
|
||||
init(validate) {
|
||||
const useCache = this.getMetadataProvider().useCache();
|
||||
const metadataCache = this.#options.metadataCache;
|
||||
if (!useCache) {
|
||||
metadataCache.adapter = NullCacheAdapter;
|
||||
}
|
||||
metadataCache.enabled ??= useCache;
|
||||
this.#options.clientUrl ??= this.#platform.getDefaultClientUrl();
|
||||
this.#options.implicitTransactions ??= this.#platform.usesImplicitTransactions();
|
||||
if (validate && metadataCache.enabled && !metadataCache.adapter) {
|
||||
throw new Error(
|
||||
'No metadata cache adapter specified, please fill in `metadataCache.adapter` option or use the async MikroORM.init() method which can autoload it.',
|
||||
);
|
||||
}
|
||||
try {
|
||||
const url = new URL(this.#options.clientUrl);
|
||||
if (url.pathname) {
|
||||
this.#options.dbName = this.get('dbName', decodeURIComponent(url.pathname).substring(1));
|
||||
}
|
||||
} catch {
|
||||
const url = /:\/\/.*\/([^?]+)/.exec(this.#options.clientUrl);
|
||||
if (url) {
|
||||
this.#options.dbName = this.get('dbName', decodeURIComponent(url[1]));
|
||||
}
|
||||
}
|
||||
if (validate && !this.#options.dbName && this.#options.clientUrl) {
|
||||
throw new Error("No database specified, `clientUrl` option provided but it's missing the pathname.");
|
||||
}
|
||||
this.#options.schema ??= this.#platform.getDefaultSchemaName();
|
||||
this.#options.charset ??= this.#platform.getDefaultCharset();
|
||||
Object.keys(this.#options.filters).forEach(key => {
|
||||
this.#options.filters[key].default ??= true;
|
||||
});
|
||||
if (!this.#options.filtersOnRelations) {
|
||||
this.#options.autoJoinRefsForFilters ??= false;
|
||||
}
|
||||
this.#options.subscribers = [...this.#options.subscribers].map(subscriber => {
|
||||
return subscriber.constructor.name === 'Function' ? new subscriber() : subscriber;
|
||||
});
|
||||
this.sync();
|
||||
if (!colors.enabled()) {
|
||||
this.#options.highlighter = new NullHighlighter();
|
||||
}
|
||||
}
|
||||
sync() {
|
||||
setEnv('MIKRO_ORM_COLORS', this.#options.colors);
|
||||
this.#logger.setDebugMode(this.#options.debug);
|
||||
this.#slowQueryLogger = undefined;
|
||||
}
|
||||
validateOptions() {
|
||||
/* v8 ignore next */
|
||||
if ('type' in this.#options) {
|
||||
throw new Error(
|
||||
"The `type` option has been removed in v6, please fill in the `driver` option instead or use `defineConfig` helper (to define your ORM config) or `MikroORM` class (to call the `init` method) exported from the driver package (e.g. `import { defineConfig } from '@mikro-orm/mysql'; export default defineConfig({ ... })`).",
|
||||
);
|
||||
}
|
||||
if (!this.#options.driver) {
|
||||
throw new Error(
|
||||
"No driver specified, please fill in the `driver` option or use `defineConfig` helper (to define your ORM config) or `MikroORM` class (to call the `init` method) exported from the driver package (e.g. `import { defineConfig } from '@mikro-orm/mysql'; export defineConfig({ ... })`).",
|
||||
);
|
||||
}
|
||||
if (!this.#options.dbName && !this.#options.clientUrl) {
|
||||
throw new Error('No database specified, please fill in `dbName` or `clientUrl` option');
|
||||
}
|
||||
if (this.#options.entities.length === 0 && this.#options.discovery.warnWhenNoEntities) {
|
||||
throw new Error('No entities found, please use `entities` option');
|
||||
}
|
||||
if (
|
||||
typeof this.#options.driverOptions === 'function' &&
|
||||
this.#options.driverOptions.constructor.name === 'AsyncFunction'
|
||||
) {
|
||||
throw new Error('`driverOptions` callback cannot be async');
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Type helper to make it easier to use `mikro-orm.config.js`.
|
||||
*/
|
||||
export function defineConfig(options) {
|
||||
return options;
|
||||
}
|
||||
1
node_modules/@mikro-orm/core/utils/ConfigurationLoader.d.ts
generated
vendored
Normal file
1
node_modules/@mikro-orm/core/utils/ConfigurationLoader.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
1
node_modules/@mikro-orm/core/utils/ConfigurationLoader.js
generated
vendored
Normal file
1
node_modules/@mikro-orm/core/utils/ConfigurationLoader.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export {};
|
||||
91
node_modules/@mikro-orm/core/utils/Cursor.d.ts
generated
vendored
Normal file
91
node_modules/@mikro-orm/core/utils/Cursor.d.ts
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
import type { EntityKey, EntityMetadata, FilterObject, Loaded } from '../typings.js';
|
||||
import type { FindByCursorOptions, OrderDefinition } from '../drivers/IDatabaseDriver.js';
|
||||
import { type QueryOrder } from '../enums.js';
|
||||
/**
|
||||
* As an alternative to the offset-based pagination with `limit` and `offset`, we can paginate based on a cursor.
|
||||
* A cursor is an opaque string that defines a specific place in ordered entity graph. You can use `em.findByCursor()`
|
||||
* to access those options. Under the hood, it will call `em.find()` and `em.count()` just like the `em.findAndCount()`
|
||||
* method, but will use the cursor options instead.
|
||||
*
|
||||
* Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option is required.
|
||||
*
|
||||
* Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination.
|
||||
*
|
||||
* - `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time
|
||||
* - `before` and `after` specify the previous cursor value
|
||||
*
|
||||
* ```ts
|
||||
* const currentCursor = await em.findByCursor(User, {}, {
|
||||
* first: 10,
|
||||
* after: previousCursor, // can be either string or `Cursor` instance
|
||||
* orderBy: { id: 'desc' },
|
||||
* });
|
||||
*
|
||||
* // to fetch next page
|
||||
* const nextCursor = await em.findByCursor(User, {}, {
|
||||
* first: 10,
|
||||
* after: currentCursor.endCursor, // or currentCursor.endCursor
|
||||
* orderBy: { id: 'desc' },
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The `Cursor` object provides the following interface:
|
||||
*
|
||||
* ```ts
|
||||
* Cursor<User> {
|
||||
* items: [
|
||||
* User { ... },
|
||||
* User { ... },
|
||||
* User { ... },
|
||||
* ...
|
||||
* ],
|
||||
* totalCount: 50,
|
||||
* length: 10,
|
||||
* startCursor: 'WzRd',
|
||||
* endCursor: 'WzZd',
|
||||
* hasPrevPage: true,
|
||||
* hasNextPage: true,
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export declare class Cursor<
|
||||
Entity extends object,
|
||||
Hint extends string = never,
|
||||
Fields extends string = '*',
|
||||
Excludes extends string = never,
|
||||
IncludeCount extends boolean = true,
|
||||
> {
|
||||
#private;
|
||||
readonly items: Loaded<Entity, Hint, Fields, Excludes>[];
|
||||
readonly totalCount: IncludeCount extends true ? number : undefined;
|
||||
readonly hasPrevPage: boolean;
|
||||
readonly hasNextPage: boolean;
|
||||
constructor(
|
||||
items: Loaded<Entity, Hint, Fields, Excludes>[],
|
||||
totalCount: IncludeCount extends true ? number : undefined,
|
||||
options: FindByCursorOptions<Entity, Hint, Fields, Excludes, IncludeCount>,
|
||||
meta: EntityMetadata<Entity>,
|
||||
);
|
||||
get startCursor(): string | null;
|
||||
get endCursor(): string | null;
|
||||
/**
|
||||
* Computes the cursor value for a given entity.
|
||||
*/
|
||||
from(entity: Entity | Loaded<Entity, Hint, Fields, Excludes>): string;
|
||||
[Symbol.iterator](): IterableIterator<Loaded<Entity, Hint, Fields, Excludes>>;
|
||||
get length(): number;
|
||||
/**
|
||||
* Computes the cursor value for given entity and order definition.
|
||||
*/
|
||||
static for<Entity extends object>(
|
||||
meta: EntityMetadata<Entity>,
|
||||
entity: FilterObject<Entity>,
|
||||
orderBy: OrderDefinition<Entity>,
|
||||
): string;
|
||||
static encode(value: unknown[]): string;
|
||||
static decode(value: string): unknown[];
|
||||
static getDefinition<Entity extends object>(
|
||||
meta: EntityMetadata<Entity>,
|
||||
orderBy: OrderDefinition<Entity>,
|
||||
): [EntityKey, QueryOrder][];
|
||||
}
|
||||
196
node_modules/@mikro-orm/core/utils/Cursor.js
generated
vendored
Normal file
196
node_modules/@mikro-orm/core/utils/Cursor.js
generated
vendored
Normal file
@@ -0,0 +1,196 @@
|
||||
import { Utils } from './Utils.js';
|
||||
import { ReferenceKind } from '../enums.js';
|
||||
import { Reference } from '../entity/Reference.js';
|
||||
import { helper } from '../entity/wrap.js';
|
||||
import { Raw } from '../utils/RawQueryFragment.js';
|
||||
import { CursorError } from '../errors.js';
|
||||
import { inspect } from '../logging/inspect.js';
|
||||
/**
|
||||
* As an alternative to the offset-based pagination with `limit` and `offset`, we can paginate based on a cursor.
|
||||
* A cursor is an opaque string that defines a specific place in ordered entity graph. You can use `em.findByCursor()`
|
||||
* to access those options. Under the hood, it will call `em.find()` and `em.count()` just like the `em.findAndCount()`
|
||||
* method, but will use the cursor options instead.
|
||||
*
|
||||
* Supports `before`, `after`, `first` and `last` options while disallowing `limit` and `offset`. Explicit `orderBy` option is required.
|
||||
*
|
||||
* Use `first` and `after` for forward pagination, or `last` and `before` for backward pagination.
|
||||
*
|
||||
* - `first` and `last` are numbers and serve as an alternative to `offset`, those options are mutually exclusive, use only one at a time
|
||||
* - `before` and `after` specify the previous cursor value
|
||||
*
|
||||
* ```ts
|
||||
* const currentCursor = await em.findByCursor(User, {}, {
|
||||
* first: 10,
|
||||
* after: previousCursor, // can be either string or `Cursor` instance
|
||||
* orderBy: { id: 'desc' },
|
||||
* });
|
||||
*
|
||||
* // to fetch next page
|
||||
* const nextCursor = await em.findByCursor(User, {}, {
|
||||
* first: 10,
|
||||
* after: currentCursor.endCursor, // or currentCursor.endCursor
|
||||
* orderBy: { id: 'desc' },
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* The `Cursor` object provides the following interface:
|
||||
*
|
||||
* ```ts
|
||||
* Cursor<User> {
|
||||
* items: [
|
||||
* User { ... },
|
||||
* User { ... },
|
||||
* User { ... },
|
||||
* ...
|
||||
* ],
|
||||
* totalCount: 50,
|
||||
* length: 10,
|
||||
* startCursor: 'WzRd',
|
||||
* endCursor: 'WzZd',
|
||||
* hasPrevPage: true,
|
||||
* hasNextPage: true,
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export class Cursor {
|
||||
items;
|
||||
totalCount;
|
||||
hasPrevPage;
|
||||
hasNextPage;
|
||||
#definition;
|
||||
constructor(items, totalCount, options, meta) {
|
||||
this.items = items;
|
||||
this.totalCount = totalCount;
|
||||
const { first, last, before, after, orderBy, overfetch } = options;
|
||||
const limit = first ?? last;
|
||||
const isLast = !first && !!last;
|
||||
const hasMorePages = !!overfetch && limit != null && items.length > limit;
|
||||
this.hasPrevPage = isLast ? hasMorePages : !!after;
|
||||
this.hasNextPage = isLast ? !!before : hasMorePages;
|
||||
if (hasMorePages) {
|
||||
if (isLast) {
|
||||
items.shift();
|
||||
} else {
|
||||
items.pop();
|
||||
}
|
||||
}
|
||||
this.#definition = Cursor.getDefinition(meta, orderBy);
|
||||
}
|
||||
get startCursor() {
|
||||
if (this.items.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return this.from(this.items[0]);
|
||||
}
|
||||
get endCursor() {
|
||||
if (this.items.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return this.from(this.items[this.items.length - 1]);
|
||||
}
|
||||
/**
|
||||
* Computes the cursor value for a given entity.
|
||||
*/
|
||||
from(entity) {
|
||||
const processEntity = (entity, prop, direction, object = false) => {
|
||||
if (Utils.isPlainObject(direction)) {
|
||||
const unwrapped = Reference.unwrapReference(entity[prop]);
|
||||
// Check if the relation is loaded - for nested properties, undefined means not populated
|
||||
if (Utils.isEntity(unwrapped) && !helper(unwrapped).isInitialized()) {
|
||||
throw CursorError.entityNotPopulated(entity, prop);
|
||||
}
|
||||
return Utils.keys(direction).reduce((o, key) => {
|
||||
Object.assign(o, processEntity(unwrapped, key, direction[key], true));
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
let value = entity[prop];
|
||||
// Allow null/undefined values in cursor - they will be handled in createCursorCondition
|
||||
// undefined can occur with forceUndefined config option which converts null to undefined
|
||||
if (value == null) {
|
||||
return object ? { [prop]: null } : null;
|
||||
}
|
||||
if (Utils.isEntity(value, true)) {
|
||||
value = helper(value).getPrimaryKey();
|
||||
}
|
||||
if (Utils.isScalarReference(value)) {
|
||||
value = value.unwrap();
|
||||
}
|
||||
if (object) {
|
||||
return { [prop]: value };
|
||||
}
|
||||
return value;
|
||||
};
|
||||
const value = this.#definition.map(([key, direction]) => processEntity(entity, key, direction));
|
||||
return Cursor.encode(value);
|
||||
}
|
||||
*[Symbol.iterator]() {
|
||||
for (const item of this.items) {
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
get length() {
|
||||
return this.items.length;
|
||||
}
|
||||
/**
|
||||
* Computes the cursor value for given entity and order definition.
|
||||
*/
|
||||
static for(meta, entity, orderBy) {
|
||||
const definition = this.getDefinition(meta, orderBy);
|
||||
return Cursor.encode(
|
||||
definition.map(([key]) => {
|
||||
const value = entity[key];
|
||||
if (value === undefined) {
|
||||
throw CursorError.missingValue(meta.className, key);
|
||||
}
|
||||
return value;
|
||||
}),
|
||||
);
|
||||
}
|
||||
static encode(value) {
|
||||
return Buffer.from(JSON.stringify(value)).toString('base64url');
|
||||
}
|
||||
static decode(value) {
|
||||
return JSON.parse(Buffer.from(value, 'base64url').toString('utf8')).map(value => {
|
||||
if (typeof value === 'string' && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}/.exec(value)) {
|
||||
return new Date(value);
|
||||
}
|
||||
return value;
|
||||
});
|
||||
}
|
||||
static getDefinition(meta, orderBy) {
|
||||
return Utils.asArray(orderBy).flatMap(order => {
|
||||
const ret = [];
|
||||
for (const key of Utils.getObjectQueryKeys(order)) {
|
||||
if (Raw.isKnownFragmentSymbol(key)) {
|
||||
ret.push([key, order[key]]);
|
||||
continue;
|
||||
}
|
||||
const prop = meta.properties[key];
|
||||
/* v8 ignore next */
|
||||
if (
|
||||
!prop ||
|
||||
!(
|
||||
[ReferenceKind.SCALAR, ReferenceKind.EMBEDDED, ReferenceKind.MANY_TO_ONE].includes(prop.kind) ||
|
||||
(prop.kind === ReferenceKind.ONE_TO_ONE && prop.owner)
|
||||
)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
ret.push([prop.name, order[prop.name]]);
|
||||
}
|
||||
return ret;
|
||||
});
|
||||
}
|
||||
/** @ignore */
|
||||
/* v8 ignore next */
|
||||
[Symbol.for('nodejs.util.inspect.custom')]() {
|
||||
const type = this.items[0]?.constructor.name;
|
||||
const { items, startCursor, endCursor, hasPrevPage, hasNextPage, totalCount, length } = this;
|
||||
const options = inspect(
|
||||
{ startCursor, endCursor, totalCount, hasPrevPage, hasNextPage, items, length },
|
||||
{ depth: 0 },
|
||||
);
|
||||
return `Cursor${type ? `<${type}>` : ''} ${options.replace('items: [Array]', 'items: [...]')}`;
|
||||
}
|
||||
}
|
||||
66
node_modules/@mikro-orm/core/utils/DataloaderUtils.d.ts
generated
vendored
Normal file
66
node_modules/@mikro-orm/core/utils/DataloaderUtils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
import type { Constructor, Primary, Ref } from '../typings.js';
|
||||
import { Collection, type InitCollectionOptions } from '../entity/Collection.js';
|
||||
import { type EntityManager } from '../EntityManager.js';
|
||||
import { type LoadReferenceOptions } from '../entity/Reference.js';
|
||||
type BatchLoadFn<K, V> = (keys: readonly K[]) => PromiseLike<ArrayLike<V | Error>>;
|
||||
export declare class DataloaderUtils {
|
||||
private static DataLoader?;
|
||||
/**
|
||||
* Groups identified references by entity and returns a Map with the
|
||||
* class name as the index and the corresponding primary keys as the value.
|
||||
*/
|
||||
static groupPrimaryKeysByEntityAndOpts(
|
||||
refsWithOpts: readonly [Ref<any>, Omit<LoadReferenceOptions<any, any>, 'dataloader'>?][],
|
||||
): Map<string, Set<Primary<any>>>;
|
||||
/**
|
||||
* Returns the reference dataloader batchLoadFn, which aggregates references by entity,
|
||||
* makes one query per entity and maps each input reference to the corresponding result.
|
||||
*/
|
||||
static getRefBatchLoadFn(
|
||||
em: EntityManager,
|
||||
): BatchLoadFn<[Ref<any>, Omit<LoadReferenceOptions<any, any>, 'dataloader'>?], any>;
|
||||
/**
|
||||
* Groups collections by entity and returns a Map whose keys are the entity names and whose values are filter Maps
|
||||
* which we can use to narrow down the find query to return just the items of the collections that have been dataloaded.
|
||||
* The entries of the filter Map will be used as the values of an $or operator so we end up with a query per entity.
|
||||
*/
|
||||
static groupInversedOrMappedKeysByEntityAndOpts(
|
||||
collsWithOpts: readonly [Collection<any>, Omit<InitCollectionOptions<any, any>, 'dataloader'>?][],
|
||||
): Map<string, Map<string, Set<Primary<any>>>>;
|
||||
/**
|
||||
* Turn the entity+options map into actual queries.
|
||||
* The keys are the entity names + a stringified version of the options and the values are filter Maps which will be used as the values of an $or operator so we end up with a query per entity+opts.
|
||||
* We must populate the inverse side of the relationship in order to be able to later retrieve the PK(s) from its item(s).
|
||||
* Together with the query the promises will also return the key which can be used to narrow down the results pertaining to a certain set of options.
|
||||
*/
|
||||
static entitiesAndOptsMapToQueries(
|
||||
entitiesAndOptsMap: Map<string, Map<string, Set<Primary<any>>>>,
|
||||
em: EntityManager,
|
||||
): Promise<[string, any[]]>[];
|
||||
/**
|
||||
* Creates a filter which returns the results pertaining to a certain collection.
|
||||
* First checks if the Entity type matches, then retrieves the inverse side of the relationship
|
||||
* where the filtering will be done in order to match the target collection.
|
||||
*/
|
||||
static getColFilter<T, S extends T>(collection: Collection<any>): (result: T) => result is S;
|
||||
/**
|
||||
* Returns the 1:M collection dataloader batchLoadFn, which aggregates collections by entity,
|
||||
* makes one query per entity and maps each input collection to the corresponding result.
|
||||
*/
|
||||
static getColBatchLoadFn(
|
||||
em: EntityManager,
|
||||
): BatchLoadFn<[Collection<any>, Omit<InitCollectionOptions<any, any>, 'dataloader'>?], any>;
|
||||
/**
|
||||
* Returns the M:N collection dataloader batchLoadFn, which aggregates collections by entity,
|
||||
* makes one query per entity and maps each input collection to the corresponding result.
|
||||
*/
|
||||
static getManyToManyColBatchLoadFn(
|
||||
em: EntityManager,
|
||||
): BatchLoadFn<[Collection<any>, Omit<InitCollectionOptions<any, any>, 'dataloader'>?], any>;
|
||||
static getDataLoader(): Promise<
|
||||
Constructor<{
|
||||
load: (...args: unknown[]) => Promise<unknown>;
|
||||
}>
|
||||
>;
|
||||
}
|
||||
export {};
|
||||
238
node_modules/@mikro-orm/core/utils/DataloaderUtils.js
generated
vendored
Normal file
238
node_modules/@mikro-orm/core/utils/DataloaderUtils.js
generated
vendored
Normal file
@@ -0,0 +1,238 @@
|
||||
import { Collection } from '../entity/Collection.js';
|
||||
import { helper } from '../entity/wrap.js';
|
||||
import { Reference } from '../entity/Reference.js';
|
||||
import { Utils } from './Utils.js';
|
||||
export class DataloaderUtils {
|
||||
static DataLoader;
|
||||
/**
|
||||
* Groups identified references by entity and returns a Map with the
|
||||
* class name as the index and the corresponding primary keys as the value.
|
||||
*/
|
||||
static groupPrimaryKeysByEntityAndOpts(refsWithOpts) {
|
||||
const map = new Map();
|
||||
for (const [ref, opts] of refsWithOpts) {
|
||||
/* The key is a combination of the uniqueName (a unique table name based identifier) and a stringified version if the load options because we want
|
||||
to map each combination of entities/options into separate find queries in order to return accurate results.
|
||||
This could be further optimized finding the "lowest common denominator" among the different options
|
||||
for each Entity and firing a single query for each Entity instead of Entity+options combination.
|
||||
The former is the approach taken by the out-of-tree "find" dataloader: https://github.com/darkbasic/mikro-orm-dataloaders
|
||||
In real-world scenarios (GraphQL) most of the time you will end up batching the same sets of options anyway,
|
||||
so we end up getting most of the benefits with the much simpler implementation.
|
||||
Also there are scenarios where the single query per entity implementation may end up being slower, for example
|
||||
if the vast majority of the references batched for a certain entity don't have populate options while a few ones have
|
||||
a wildcard populate so you end up doing the additional joins for all the entities.
|
||||
Thus such approach should probably be configurable, if not opt-in.
|
||||
NOTE: meta + opts multi maps (https://github.com/martian17/ds-js) might be a more elegant way
|
||||
to implement this but not necessarily faster. */
|
||||
const key = `${helper(ref).__meta.uniqueName}|${JSON.stringify(opts ?? {})}`;
|
||||
let primaryKeysSet = map.get(key);
|
||||
if (primaryKeysSet == null) {
|
||||
primaryKeysSet = new Set();
|
||||
map.set(key, primaryKeysSet);
|
||||
}
|
||||
primaryKeysSet.add(helper(ref).getPrimaryKey());
|
||||
}
|
||||
return map;
|
||||
}
|
||||
/**
|
||||
* Returns the reference dataloader batchLoadFn, which aggregates references by entity,
|
||||
* makes one query per entity and maps each input reference to the corresponding result.
|
||||
*/
|
||||
static getRefBatchLoadFn(em) {
|
||||
return async refsWithOpts => {
|
||||
const groupedIdsMap = DataloaderUtils.groupPrimaryKeysByEntityAndOpts(refsWithOpts);
|
||||
const promises = Array.from(groupedIdsMap).map(([key, idsSet]) => {
|
||||
const uniqueName = key.substring(0, key.indexOf('|'));
|
||||
const opts = JSON.parse(key.substring(key.indexOf('|') + 1));
|
||||
const meta = em.getMetadata().getByUniqueName(uniqueName);
|
||||
return em.find(meta.class, Array.from(idsSet), opts);
|
||||
});
|
||||
await Promise.all(promises);
|
||||
/* Instead of assigning each find result to the original reference we use a shortcut
|
||||
which takes advantage of the already existing Mikro-ORM caching mechanism:
|
||||
when it calls ref.unwrap it will automatically retrieve the entity
|
||||
from the cache (it will hit the cache because of the previous find query).
|
||||
This trick won't be possible for collections where we will be forced to map the results. */
|
||||
return refsWithOpts.map(([ref]) => ref.unwrap());
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Groups collections by entity and returns a Map whose keys are the entity names and whose values are filter Maps
|
||||
* which we can use to narrow down the find query to return just the items of the collections that have been dataloaded.
|
||||
* The entries of the filter Map will be used as the values of an $or operator so we end up with a query per entity.
|
||||
*/
|
||||
static groupInversedOrMappedKeysByEntityAndOpts(collsWithOpts) {
|
||||
const entitiesMap = new Map();
|
||||
for (const [col, opts] of collsWithOpts) {
|
||||
/*
|
||||
We first get the entity name of the Collection and together with its options (see groupPrimaryKeysByEntityAndOpts
|
||||
for a full explanation) we use it as the key of the first Map.
|
||||
With that we know that we have to look for entities of this type (and with the same options) in order to fulfill the collection.
|
||||
The value is another Map which we can use to filter the find query to get results pertaining to the collections that have been dataloaded:
|
||||
its keys are the props we are going to filter to and its values are the corresponding PKs.
|
||||
*/
|
||||
const key = `${col.property.targetMeta.uniqueName}|${JSON.stringify(opts ?? {})}`;
|
||||
let filterMap = entitiesMap.get(key); // We are going to use this map to filter the entities pertaining to the collections that have been dataloaded.
|
||||
if (filterMap == null) {
|
||||
filterMap = new Map();
|
||||
entitiesMap.set(key, filterMap);
|
||||
}
|
||||
// The Collection dataloader relies on the inverse side of the relationship (inversedBy/mappedBy), which is going to be
|
||||
// the key of the filter Map and it's the prop that we use to filter the results pertaining to the Collection.
|
||||
const inversedProp = col.property.inversedBy ?? col.property.mappedBy; // Many to Many vs One to Many
|
||||
let primaryKeys = filterMap.get(inversedProp);
|
||||
if (primaryKeys == null) {
|
||||
primaryKeys = new Set();
|
||||
filterMap.set(inversedProp, primaryKeys);
|
||||
}
|
||||
// This is the PK that in conjunction with the filter Map key (the prop) will lead to this specific Collection
|
||||
primaryKeys.add(helper(col.owner).getPrimaryKey());
|
||||
}
|
||||
return entitiesMap;
|
||||
}
|
||||
/**
|
||||
* Turn the entity+options map into actual queries.
|
||||
* The keys are the entity names + a stringified version of the options and the values are filter Maps which will be used as the values of an $or operator so we end up with a query per entity+opts.
|
||||
* We must populate the inverse side of the relationship in order to be able to later retrieve the PK(s) from its item(s).
|
||||
* Together with the query the promises will also return the key which can be used to narrow down the results pertaining to a certain set of options.
|
||||
*/
|
||||
static entitiesAndOptsMapToQueries(entitiesAndOptsMap, em) {
|
||||
return Array.from(entitiesAndOptsMap, async ([key, filterMap]) => {
|
||||
const uniqueName = key.substring(0, key.indexOf('|'));
|
||||
const opts = JSON.parse(key.substring(key.indexOf('|') + 1));
|
||||
const meta = em.getMetadata().getByUniqueName(uniqueName);
|
||||
const res = await em.find(
|
||||
meta.class,
|
||||
opts?.where != null && Object.keys(opts.where).length > 0
|
||||
? {
|
||||
$and: [
|
||||
{
|
||||
$or: Array.from(filterMap.entries()).map(([prop, pks]) => {
|
||||
return { [prop]: Array.from(pks) };
|
||||
}),
|
||||
},
|
||||
opts.where,
|
||||
],
|
||||
}
|
||||
: {
|
||||
// The entries of the filter Map will be used as the values of the $or operator
|
||||
$or: Array.from(filterMap.entries()).map(([prop, pks]) => {
|
||||
return { [prop]: Array.from(pks) };
|
||||
}),
|
||||
},
|
||||
{
|
||||
...opts,
|
||||
// We need to populate the inverse side of the relationship in order to be able to later retrieve the PK(s) from its item(s)
|
||||
populate: [
|
||||
...(opts.populate === false ? [] : (opts.populate ?? [])),
|
||||
...Array.from(filterMap.keys()).filter(
|
||||
// We need to do so only if the inverse side is a collection, because we can already retrieve the PK from a reference without having to load it
|
||||
prop => meta.properties[prop]?.ref !== true,
|
||||
),
|
||||
],
|
||||
},
|
||||
);
|
||||
return [key, res];
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Creates a filter which returns the results pertaining to a certain collection.
|
||||
* First checks if the Entity type matches, then retrieves the inverse side of the relationship
|
||||
* where the filtering will be done in order to match the target collection.
|
||||
*/
|
||||
static getColFilter(collection) {
|
||||
return result => {
|
||||
// There is no need to check if Entity matches because we already matched the key which is entity+options.
|
||||
// This is the inverse side of the relationship where the filtering will be done in order to match the target collection
|
||||
// Either inversedBy or mappedBy exist because we already checked in groupInversedOrMappedKeysByEntity
|
||||
const inverseProp = collection.property.inversedBy ?? collection.property.mappedBy;
|
||||
const target = Reference.unwrapReference(result[inverseProp]);
|
||||
if (target instanceof Collection) {
|
||||
for (const item of target) {
|
||||
if (item === collection.owner) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else if (target) {
|
||||
return target === collection.owner;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns the 1:M collection dataloader batchLoadFn, which aggregates collections by entity,
|
||||
* makes one query per entity and maps each input collection to the corresponding result.
|
||||
*/
|
||||
static getColBatchLoadFn(em) {
|
||||
return async collsWithOpts => {
|
||||
const entitiesAndOptsMap = DataloaderUtils.groupInversedOrMappedKeysByEntityAndOpts(collsWithOpts);
|
||||
const promises = DataloaderUtils.entitiesAndOptsMapToQueries(entitiesAndOptsMap, em);
|
||||
const resultsMap = new Map(await Promise.all(promises));
|
||||
// We need to filter the results in order to map each input collection
|
||||
// to a subset of each query matching the collection items.
|
||||
return collsWithOpts.map(([col, opts]) => {
|
||||
const key = `${col.property.targetMeta.uniqueName}|${JSON.stringify(opts ?? {})}`;
|
||||
const entities = resultsMap.get(key);
|
||||
if (entities == null) {
|
||||
// Should never happen
|
||||
/* v8 ignore next */
|
||||
throw new Error('Cannot match results');
|
||||
}
|
||||
return entities.filter(DataloaderUtils.getColFilter(col));
|
||||
});
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Returns the M:N collection dataloader batchLoadFn, which aggregates collections by entity,
|
||||
* makes one query per entity and maps each input collection to the corresponding result.
|
||||
*/
|
||||
static getManyToManyColBatchLoadFn(em) {
|
||||
return async collsWithOpts => {
|
||||
const groups = new Map();
|
||||
for (const [col, opts] of collsWithOpts) {
|
||||
const key = `${col.property.targetMeta.uniqueName}.${col.property.name}|${JSON.stringify(opts ?? {})}`;
|
||||
const value = groups.get(key) ?? [];
|
||||
value.push([col, opts ?? {}]);
|
||||
groups.set(key, value);
|
||||
}
|
||||
const ret = [];
|
||||
for (const group of groups.values()) {
|
||||
const prop = group[0][0].property;
|
||||
const options = {};
|
||||
const wrap = cond => ({ [prop.name]: cond });
|
||||
const orderBy = Utils.asArray(group[0][1]?.orderBy).map(o => wrap(o));
|
||||
const populate = wrap(group[0][1]?.populate);
|
||||
const owners = group.map(c => c[0].owner);
|
||||
const $or = [];
|
||||
// a bit of a hack, but we need to prefix the key, since we have only a column name, not a property name
|
||||
const alias = em.config.getNamingStrategy().aliasName(Utils.className(prop.pivotEntity), 0);
|
||||
const fk = `${alias}.${Utils.getPrimaryKeyHash(prop.joinColumns)}`;
|
||||
for (const c of group) {
|
||||
$or.push({ $and: [c[1]?.where ?? {}, { [fk]: c[0].owner }] });
|
||||
options.refresh ??= c[1]?.refresh;
|
||||
}
|
||||
options.where = wrap({ $or });
|
||||
const r = await em
|
||||
.getEntityLoader()
|
||||
.findChildrenFromPivotTable(owners, prop, options, orderBy, populate, group[0][1]?.ref);
|
||||
ret.push(...r);
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
}
|
||||
static async getDataLoader() {
|
||||
if (this.DataLoader) {
|
||||
return this.DataLoader;
|
||||
}
|
||||
try {
|
||||
const mod = await import('dataloader');
|
||||
const DataLoader = mod.default;
|
||||
return (this.DataLoader ??= DataLoader);
|
||||
} catch {
|
||||
/* v8 ignore next */
|
||||
throw new Error(
|
||||
"DataLoader is not found, make sure `dataloader` package is installed in your project's dependencies.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
110
node_modules/@mikro-orm/core/utils/EntityComparator.d.ts
generated
vendored
Normal file
110
node_modules/@mikro-orm/core/utils/EntityComparator.d.ts
generated
vendored
Normal file
@@ -0,0 +1,110 @@
|
||||
import type {
|
||||
EntityData,
|
||||
EntityDictionary,
|
||||
EntityMetadata,
|
||||
EntityName,
|
||||
EntityProperty,
|
||||
IMetadataStorage,
|
||||
Primary,
|
||||
} from '../typings.js';
|
||||
import type { Platform } from '../platforms/Platform.js';
|
||||
import type { Configuration } from './Configuration.js';
|
||||
type Comparator<T> = (
|
||||
a: T,
|
||||
b: T,
|
||||
options?: {
|
||||
includeInverseSides?: boolean;
|
||||
},
|
||||
) => EntityData<T>;
|
||||
type ResultMapper<T> = (result: EntityData<T>) => EntityData<T> | null;
|
||||
type SnapshotGenerator<T> = (entity: T) => EntityData<T>;
|
||||
type PkGetter<T> = (entity: T) => Primary<T>;
|
||||
type PkSerializer<T> = (entity: T) => string;
|
||||
type CompositeKeyPart = string | CompositeKeyPart[];
|
||||
/** @internal Generates and caches JIT-compiled functions for comparing, snapshotting, and mapping entity data. */
|
||||
export declare class EntityComparator {
|
||||
#private;
|
||||
constructor(metadata: IMetadataStorage, platform: Platform, config?: Configuration);
|
||||
/**
|
||||
* Computes difference between two entities.
|
||||
*/
|
||||
diffEntities<T extends object>(
|
||||
entityName: EntityName<T>,
|
||||
a: EntityData<T>,
|
||||
b: EntityData<T>,
|
||||
options?: {
|
||||
includeInverseSides?: boolean;
|
||||
},
|
||||
): EntityData<T>;
|
||||
/** Returns true if two entity snapshots are identical (no differences). */
|
||||
matching<T extends object>(entityName: EntityName<T>, a: EntityData<T>, b: EntityData<T>): boolean;
|
||||
/**
|
||||
* Removes ORM specific code from entities and prepares it for serializing. Used before change set computation.
|
||||
* References will be mapped to primary keys, collections to arrays of primary keys.
|
||||
*/
|
||||
prepareEntity<T extends object>(entity: T): EntityData<T>;
|
||||
/**
|
||||
* Maps database columns to properties.
|
||||
*/
|
||||
mapResult<T>(meta: EntityMetadata<T>, result: EntityDictionary<T>): EntityData<T>;
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getPkGetter<T>(meta: EntityMetadata<T>): PkGetter<T>;
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getPkGetterConverted<T>(meta: EntityMetadata<T>): PkGetter<T>;
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getPkSerializer<T>(meta: EntityMetadata<T>): PkSerializer<T>;
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getSnapshotGenerator<T>(entityName: EntityName<T>): SnapshotGenerator<T>;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
propName(name: string, parent?: string): string;
|
||||
/**
|
||||
* @internal respects nested composite keys, e.g. `[1, [2, 3]]`
|
||||
*/
|
||||
createCompositeKeyArray(prop: EntityProperty, parents?: EntityProperty[]): string;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
formatCompositeKeyPart(part: CompositeKeyPart): string;
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getResultMapper<T>(meta: EntityMetadata<T>): ResultMapper<T>;
|
||||
private getPropertyCondition;
|
||||
private getEmbeddedArrayPropertySnapshot;
|
||||
/**
|
||||
* we need to serialize only object embeddables, and only the top level ones, so root object embeddable
|
||||
* properties and first child nested object embeddables with inlined parent
|
||||
*/
|
||||
private shouldSerialize;
|
||||
private getEmbeddedPropertySnapshot;
|
||||
private registerCustomType;
|
||||
private getPropertySnapshot;
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getEntityComparator<T extends object>(entityName: EntityName<T>): Comparator<T>;
|
||||
private getGenericComparator;
|
||||
private getPropertyComparator;
|
||||
private wrap;
|
||||
private safeKey;
|
||||
/**
|
||||
* Sets the toArray helper in the context if not already set.
|
||||
* Used for converting composite PKs to arrays.
|
||||
*/
|
||||
private setToArrayHelper;
|
||||
/**
|
||||
* perf: used to generate list of comparable properties during discovery, so we speed up the runtime comparison
|
||||
*/
|
||||
static isComparable<T>(prop: EntityProperty<T>, root: EntityMetadata): boolean;
|
||||
}
|
||||
export {};
|
||||
843
node_modules/@mikro-orm/core/utils/EntityComparator.js
generated
vendored
Normal file
843
node_modules/@mikro-orm/core/utils/EntityComparator.js
generated
vendored
Normal file
@@ -0,0 +1,843 @@
|
||||
import { clone } from './clone.js';
|
||||
import { ReferenceKind } from '../enums.js';
|
||||
import {
|
||||
compareArrays,
|
||||
compareBooleans,
|
||||
compareBuffers,
|
||||
compareObjects,
|
||||
equals,
|
||||
parseJsonSafe,
|
||||
Utils,
|
||||
} from './Utils.js';
|
||||
import { JsonType } from '../types/JsonType.js';
|
||||
import { Raw } from './RawQueryFragment.js';
|
||||
import { EntityIdentifier } from '../entity/EntityIdentifier.js';
|
||||
import { PolymorphicRef } from '../entity/PolymorphicRef.js';
|
||||
/** @internal Generates and caches JIT-compiled functions for comparing, snapshotting, and mapping entity data. */
|
||||
export class EntityComparator {
|
||||
#comparators = new Map();
|
||||
#mappers = new Map();
|
||||
#snapshotGenerators = new Map();
|
||||
#pkGetters = new Map();
|
||||
#pkGettersConverted = new Map();
|
||||
#pkSerializers = new Map();
|
||||
#tmpIndex = 0;
|
||||
#metadata;
|
||||
#platform;
|
||||
#config;
|
||||
constructor(metadata, platform, config) {
|
||||
this.#metadata = metadata;
|
||||
this.#platform = platform;
|
||||
this.#config = config;
|
||||
}
|
||||
/**
|
||||
* Computes difference between two entities.
|
||||
*/
|
||||
diffEntities(entityName, a, b, options) {
|
||||
const comparator = this.getEntityComparator(entityName);
|
||||
return Utils.callCompiledFunction(comparator, a, b, options);
|
||||
}
|
||||
/** Returns true if two entity snapshots are identical (no differences). */
|
||||
matching(entityName, a, b) {
|
||||
const diff = this.diffEntities(entityName, a, b);
|
||||
return Utils.getObjectKeysSize(diff) === 0;
|
||||
}
|
||||
/**
|
||||
* Removes ORM specific code from entities and prepares it for serializing. Used before change set computation.
|
||||
* References will be mapped to primary keys, collections to arrays of primary keys.
|
||||
*/
|
||||
prepareEntity(entity) {
|
||||
const generator = this.getSnapshotGenerator(entity.constructor);
|
||||
return Utils.callCompiledFunction(generator, entity);
|
||||
}
|
||||
/**
|
||||
* Maps database columns to properties.
|
||||
*/
|
||||
mapResult(meta, result) {
|
||||
const mapper = this.getResultMapper(meta);
|
||||
return Utils.callCompiledFunction(mapper, result);
|
||||
}
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getPkGetter(meta) {
|
||||
const exists = this.#pkGetters.get(meta);
|
||||
/* v8 ignore next */
|
||||
if (exists) {
|
||||
return exists;
|
||||
}
|
||||
const lines = [];
|
||||
const context = new Map();
|
||||
if (meta.primaryKeys.length > 1) {
|
||||
lines.push(` const cond = {`);
|
||||
meta.primaryKeys.forEach(pk => {
|
||||
if (meta.properties[pk].kind !== ReferenceKind.SCALAR) {
|
||||
lines.push(
|
||||
` ${pk}: (entity${this.wrap(pk)} != null && (entity${this.wrap(pk)}.__entity || entity${this.wrap(pk)}.__reference)) ? entity${this.wrap(pk)}.__helper.getPrimaryKey() : entity${this.wrap(pk)},`,
|
||||
);
|
||||
} else {
|
||||
lines.push(` ${pk}: entity${this.wrap(pk)},`);
|
||||
}
|
||||
});
|
||||
lines.push(` };`);
|
||||
lines.push(` if (${meta.primaryKeys.map(pk => `cond.${pk} == null`).join(' || ')}) return null;`);
|
||||
lines.push(` return cond;`);
|
||||
} else {
|
||||
const pk = meta.primaryKeys[0];
|
||||
if (meta.properties[pk].kind !== ReferenceKind.SCALAR) {
|
||||
lines.push(
|
||||
` if (entity${this.wrap(pk)} != null && (entity${this.wrap(pk)}.__entity || entity${this.wrap(pk)}.__reference)) {`,
|
||||
);
|
||||
lines.push(` const pk = entity${this.wrap(pk)}.__helper.getPrimaryKey();`);
|
||||
if (meta.properties[pk].targetMeta.compositePK) {
|
||||
lines.push(` if (typeof pk === 'object' && pk != null) {`);
|
||||
lines.push(` return [`);
|
||||
for (const childPK of meta.properties[pk].targetMeta.primaryKeys) {
|
||||
lines.push(` pk${this.wrap(childPK)},`);
|
||||
}
|
||||
lines.push(` ];`);
|
||||
lines.push(` }`);
|
||||
}
|
||||
lines.push(` return pk;`);
|
||||
lines.push(` }`);
|
||||
}
|
||||
lines.push(` return entity${this.wrap(pk)};`);
|
||||
}
|
||||
const code =
|
||||
`// compiled pk getter for entity ${meta.className}\n` + `return function(entity) {\n${lines.join('\n')}\n}`;
|
||||
const fnKey = `pkGetter-${meta.uniqueName}`;
|
||||
const pkSerializer = Utils.createFunction(context, code, this.#config?.get('compiledFunctions'), fnKey);
|
||||
this.#pkGetters.set(meta, pkSerializer);
|
||||
return pkSerializer;
|
||||
}
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getPkGetterConverted(meta) {
|
||||
const exists = this.#pkGettersConverted.get(meta);
|
||||
/* v8 ignore next */
|
||||
if (exists) {
|
||||
return exists;
|
||||
}
|
||||
const lines = [];
|
||||
const context = new Map();
|
||||
if (meta.primaryKeys.length > 1) {
|
||||
lines.push(` const cond = {`);
|
||||
meta.primaryKeys.forEach(pk => {
|
||||
if (meta.properties[pk].kind !== ReferenceKind.SCALAR) {
|
||||
lines.push(
|
||||
` ${pk}: (entity${this.wrap(pk)} != null && (entity${this.wrap(pk)}.__entity || entity${this.wrap(pk)}.__reference)) ? entity${this.wrap(pk)}.__helper.getPrimaryKey(true) : entity${this.wrap(pk)},`,
|
||||
);
|
||||
} else {
|
||||
if (meta.properties[pk].customType) {
|
||||
const convertorKey = this.registerCustomType(meta.properties[pk], context);
|
||||
lines.push(` ${pk}: convertToDatabaseValue_${convertorKey}(entity${this.wrap(pk)}),`);
|
||||
} else {
|
||||
lines.push(` ${pk}: entity${this.wrap(pk)},`);
|
||||
}
|
||||
}
|
||||
});
|
||||
lines.push(` };`);
|
||||
lines.push(` if (${meta.primaryKeys.map(pk => `cond.${pk} == null`).join(' || ')}) return null;`);
|
||||
lines.push(` return cond;`);
|
||||
} else {
|
||||
const pk = meta.primaryKeys[0];
|
||||
if (meta.properties[pk].kind !== ReferenceKind.SCALAR) {
|
||||
lines.push(
|
||||
` if (entity${this.wrap(pk)} != null && (entity${this.wrap(pk)}.__entity || entity${this.wrap(pk)}.__reference)) return entity${this.wrap(pk)}.__helper.getPrimaryKey(true);`,
|
||||
);
|
||||
}
|
||||
if (meta.properties[pk].customType) {
|
||||
const convertorKey = this.registerCustomType(meta.properties[pk], context);
|
||||
lines.push(` return convertToDatabaseValue_${convertorKey}(entity${this.wrap(pk)});`);
|
||||
} else {
|
||||
lines.push(` return entity${this.wrap(pk)};`);
|
||||
}
|
||||
}
|
||||
const code =
|
||||
`// compiled pk getter (with converted custom types) for entity ${meta.className}\n` +
|
||||
`return function(entity) {\n${lines.join('\n')}\n}`;
|
||||
const fnKey = `pkGetterConverted-${meta.uniqueName}`;
|
||||
const pkSerializer = Utils.createFunction(context, code, this.#config?.get('compiledFunctions'), fnKey);
|
||||
this.#pkGettersConverted.set(meta, pkSerializer);
|
||||
return pkSerializer;
|
||||
}
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getPkSerializer(meta) {
|
||||
const exists = this.#pkSerializers.get(meta);
|
||||
/* v8 ignore next */
|
||||
if (exists) {
|
||||
return exists;
|
||||
}
|
||||
const lines = [];
|
||||
const context = new Map();
|
||||
context.set('getCompositeKeyValue', val =>
|
||||
Utils.flatten(Utils.getCompositeKeyValue(val, meta, 'convertToDatabaseValue', this.#platform)),
|
||||
);
|
||||
context.set('getPrimaryKeyHash', val => Utils.getPrimaryKeyHash(Utils.asArray(val)));
|
||||
if (meta.primaryKeys.length > 1) {
|
||||
lines.push(` const pks = entity.__helper.__pk ? getCompositeKeyValue(entity.__helper.__pk) : [`);
|
||||
meta.primaryKeys.forEach(pk => {
|
||||
if (meta.properties[pk].kind !== ReferenceKind.SCALAR) {
|
||||
lines.push(
|
||||
` (entity${this.wrap(pk)} != null && (entity${this.wrap(pk)}.__entity || entity${this.wrap(pk)}.__reference)) ? entity${this.wrap(pk)}.__helper.getSerializedPrimaryKey() : entity${this.wrap(pk)},`,
|
||||
);
|
||||
} else {
|
||||
lines.push(` entity${this.wrap(pk)},`);
|
||||
}
|
||||
});
|
||||
lines.push(` ];`);
|
||||
lines.push(` return pks.join('${Utils.PK_SEPARATOR}');`);
|
||||
} else {
|
||||
const pk = meta.primaryKeys[0];
|
||||
const prop = meta.properties[pk];
|
||||
if (prop.kind !== ReferenceKind.SCALAR) {
|
||||
lines.push(
|
||||
` if (entity${this.wrap(pk)} != null && (entity${this.wrap(pk)}.__entity || entity${this.wrap(pk)}.__reference)) return entity${this.wrap(pk)}.__helper.getSerializedPrimaryKey();`,
|
||||
);
|
||||
}
|
||||
const serializedPrimaryKey = meta.props.find(p => p.serializedPrimaryKey);
|
||||
if (serializedPrimaryKey) {
|
||||
lines.push(` return '' + entity.${serializedPrimaryKey.name};`);
|
||||
} else if (prop.customType) {
|
||||
const convertorKey = this.registerCustomType(meta.properties[pk], context);
|
||||
const idx = this.#tmpIndex++;
|
||||
lines.push(` const val_${idx} = convertToDatabaseValue_${convertorKey}(entity${this.wrap(pk)});`);
|
||||
lines.push(` return getPrimaryKeyHash(val_${idx});`);
|
||||
} else {
|
||||
lines.push(` return '' + entity${this.wrap(pk)};`);
|
||||
}
|
||||
}
|
||||
const code =
|
||||
`// compiled pk serializer for entity ${meta.className}\n` + `return function(entity) {\n${lines.join('\n')}\n}`;
|
||||
const fnKey = `pkSerializer-${meta.uniqueName}`;
|
||||
const pkSerializer = Utils.createFunction(context, code, this.#config?.get('compiledFunctions'), fnKey);
|
||||
this.#pkSerializers.set(meta, pkSerializer);
|
||||
return pkSerializer;
|
||||
}
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getSnapshotGenerator(entityName) {
|
||||
const meta = this.#metadata.find(entityName);
|
||||
const exists = this.#snapshotGenerators.get(meta);
|
||||
if (exists) {
|
||||
return exists;
|
||||
}
|
||||
const lines = [];
|
||||
const context = new Map();
|
||||
context.set('clone', clone);
|
||||
context.set('cloneEmbeddable', o => this.#platform.cloneEmbeddable(o)); // do not clone prototypes
|
||||
if (meta.root.inheritanceType === 'sti' && meta.discriminatorValue) {
|
||||
lines.push(` ret${this.wrap(meta.root.discriminatorColumn)} = '${meta.discriminatorValue}'`);
|
||||
}
|
||||
const getRootProperty = prop => (prop.embedded ? getRootProperty(meta.properties[prop.embedded[0]]) : prop);
|
||||
// copy all comparable props, ignore collections and references, process custom types
|
||||
meta.comparableProps
|
||||
.filter(prop => {
|
||||
const root = getRootProperty(prop);
|
||||
return prop === root || root.kind !== ReferenceKind.EMBEDDED;
|
||||
})
|
||||
.forEach(prop =>
|
||||
lines.push(
|
||||
this.getPropertySnapshot(meta, prop, context, this.wrap(prop.name), this.wrap(prop.name), [prop.name]),
|
||||
),
|
||||
);
|
||||
const code = `return function(entity) {\n const ret = {};\n${lines.join('\n')}\n return ret;\n}`;
|
||||
const fnKey = `snapshotGenerator-${meta.uniqueName}`;
|
||||
const snapshotGenerator = Utils.createFunction(context, code, this.#config?.get('compiledFunctions'), fnKey);
|
||||
this.#snapshotGenerators.set(meta, snapshotGenerator);
|
||||
return snapshotGenerator;
|
||||
}
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
propName(name, parent = 'result') {
|
||||
return parent + this.wrap(name);
|
||||
}
|
||||
/**
|
||||
* @internal respects nested composite keys, e.g. `[1, [2, 3]]`
|
||||
*/
|
||||
createCompositeKeyArray(prop, parents = []) {
|
||||
if (!prop.targetMeta) {
|
||||
let fieldName = prop.fieldNames[0];
|
||||
// traverse all parents, mapping my field name to each parent's field name until we reach the root
|
||||
for (let i = parents.length - 1; i >= 0; i--) {
|
||||
const parent = parents[i];
|
||||
// skip m:n since it does not represent any column directly
|
||||
if (parent.pivotEntity) {
|
||||
continue;
|
||||
}
|
||||
const idx = parent.referencedColumnNames.indexOf(fieldName);
|
||||
fieldName = parent.fieldNames[idx];
|
||||
}
|
||||
return this.propName(fieldName);
|
||||
}
|
||||
const parts = [];
|
||||
prop.targetMeta.getPrimaryProps().forEach(pk => {
|
||||
const part = this.createCompositeKeyArray(pk, [...parents, prop]);
|
||||
parts.push(part);
|
||||
});
|
||||
return this.formatCompositeKeyPart(parts);
|
||||
}
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
formatCompositeKeyPart(part) {
|
||||
if (!Array.isArray(part)) {
|
||||
return part;
|
||||
}
|
||||
if (part.length === 1) {
|
||||
return this.formatCompositeKeyPart(part[0]);
|
||||
}
|
||||
const formatted = part.map(p => this.formatCompositeKeyPart(p)).join(', ');
|
||||
return `[${formatted}]`;
|
||||
}
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getResultMapper(meta) {
|
||||
const exists = this.#mappers.get(meta);
|
||||
if (exists) {
|
||||
return exists;
|
||||
}
|
||||
const lines = [];
|
||||
const context = new Map();
|
||||
context.set('PolymorphicRef', PolymorphicRef);
|
||||
const tz = this.#platform.getTimezone();
|
||||
const parseDate = (key, value, padding = '') => {
|
||||
lines.push(`${padding} if (${value} == null || ${value} instanceof Date) {`);
|
||||
lines.push(`${padding} ${key} = ${value};`);
|
||||
if (!tz || tz === 'local') {
|
||||
lines.push(`${padding} } else if (typeof ${value} === 'bigint') {`);
|
||||
lines.push(`${padding} ${key} = parseDate(Number(${value}));`);
|
||||
lines.push(`${padding} } else {`);
|
||||
lines.push(`${padding} ${key} = parseDate(${value});`);
|
||||
} else {
|
||||
lines.push(`${padding} } else if (typeof ${value} === 'bigint') {`);
|
||||
lines.push(`${padding} ${key} = parseDate(Number(${value}));`);
|
||||
lines.push(
|
||||
`${padding} } else if (typeof ${value} === 'number' || ${value}.includes('+') || ${value}.lastIndexOf('-') > 10 || ${value}.endsWith('Z')) {`,
|
||||
);
|
||||
lines.push(`${padding} ${key} = parseDate(${value});`);
|
||||
lines.push(`${padding} } else {`);
|
||||
lines.push(`${padding} ${key} = parseDate(${value} + '${tz}');`);
|
||||
}
|
||||
lines.push(`${padding} }`);
|
||||
};
|
||||
lines.push(` const mapped = {};`);
|
||||
const mapEntityProperties = (meta, padding = '') => {
|
||||
for (const prop of meta.props) {
|
||||
if (!prop.fieldNames) {
|
||||
continue;
|
||||
}
|
||||
if (prop.polymorphic && prop.fieldNames.length >= 2) {
|
||||
const discriminatorField = prop.fieldNames[0];
|
||||
const idFields = prop.fieldNames.slice(1);
|
||||
lines.push(
|
||||
`${padding} if (${prop.fieldNames.map(field => `typeof ${this.propName(field)} === 'undefined'`).join(' && ')}) {`,
|
||||
);
|
||||
lines.push(
|
||||
`${padding} } else if (${prop.fieldNames.map(field => `${this.propName(field)} != null`).join(' && ')}) {`,
|
||||
);
|
||||
if (idFields.length === 1) {
|
||||
lines.push(
|
||||
`${padding} ret${this.wrap(prop.name)} = new PolymorphicRef(${this.propName(discriminatorField)}, ${this.propName(idFields[0])});`,
|
||||
);
|
||||
} else {
|
||||
lines.push(
|
||||
`${padding} ret${this.wrap(prop.name)} = new PolymorphicRef(${this.propName(discriminatorField)}, [${idFields.map(f => this.propName(f)).join(', ')}]);`,
|
||||
);
|
||||
}
|
||||
lines.push(...prop.fieldNames.map(field => `${padding} ${this.propName(field, 'mapped')} = true;`));
|
||||
lines.push(
|
||||
`${padding} } else if (${prop.fieldNames.map(field => `${this.propName(field)} == null`).join(' && ')}) {\n${padding} ret${this.wrap(prop.name)} = null;`,
|
||||
);
|
||||
lines.push(...prop.fieldNames.map(field => `${padding} ${this.propName(field, 'mapped')} = true;`), ' }');
|
||||
continue;
|
||||
}
|
||||
if (prop.targetMeta && prop.fieldNames.length > 1) {
|
||||
lines.push(
|
||||
`${padding} if (${prop.fieldNames.map(field => `typeof ${this.propName(field)} === 'undefined'`).join(' && ')}) {`,
|
||||
);
|
||||
lines.push(
|
||||
`${padding} } else if (${prop.fieldNames.map(field => `${this.propName(field)} != null`).join(' && ')}) {`,
|
||||
);
|
||||
lines.push(`${padding} ret${this.wrap(prop.name)} = ${this.createCompositeKeyArray(prop)};`);
|
||||
lines.push(...prop.fieldNames.map(field => `${padding} ${this.propName(field, 'mapped')} = true;`));
|
||||
lines.push(
|
||||
`${padding} } else if (${prop.fieldNames.map(field => `${this.propName(field)} == null`).join(' && ')}) {\n${padding} ret${this.wrap(prop.name)} = null;`,
|
||||
);
|
||||
lines.push(...prop.fieldNames.map(field => `${padding} ${this.propName(field, 'mapped')} = true;`), ' }');
|
||||
continue;
|
||||
}
|
||||
if (prop.embedded && (meta.embeddable || meta.properties[prop.embedded[0]].object)) {
|
||||
continue;
|
||||
}
|
||||
if (prop.runtimeType === 'boolean') {
|
||||
lines.push(`${padding} if (typeof ${this.propName(prop.fieldNames[0])} !== 'undefined') {`);
|
||||
lines.push(
|
||||
`${padding} ret${this.wrap(prop.name)} = ${this.propName(prop.fieldNames[0])} == null ? ${this.propName(prop.fieldNames[0])} : !!${this.propName(prop.fieldNames[0])};`,
|
||||
);
|
||||
lines.push(`${padding} ${this.propName(prop.fieldNames[0], 'mapped')} = true;`);
|
||||
lines.push(`${padding} }`);
|
||||
} else if (prop.runtimeType === 'Date' && !this.#platform.isNumericProperty(prop)) {
|
||||
lines.push(`${padding} if (typeof ${this.propName(prop.fieldNames[0])} !== 'undefined') {`);
|
||||
context.set('parseDate', value => this.#platform.parseDate(value));
|
||||
parseDate('ret' + this.wrap(prop.name), this.propName(prop.fieldNames[0]), padding);
|
||||
lines.push(`${padding} ${this.propName(prop.fieldNames[0], 'mapped')} = true;`);
|
||||
lines.push(`${padding} }`);
|
||||
} else if (prop.kind === ReferenceKind.EMBEDDED && (prop.object || meta.embeddable)) {
|
||||
const idx = this.#tmpIndex++;
|
||||
context.set(`mapEmbeddedResult_${idx}`, data => {
|
||||
const item = parseJsonSafe(data);
|
||||
if (Array.isArray(item)) {
|
||||
return item.map(row => (row == null ? row : this.getResultMapper(prop.targetMeta)(row)));
|
||||
}
|
||||
return item == null ? item : this.getResultMapper(prop.targetMeta)(item);
|
||||
});
|
||||
lines.push(`${padding} if (typeof ${this.propName(prop.fieldNames[0])} !== 'undefined') {`);
|
||||
lines.push(
|
||||
`${padding} ret${this.wrap(prop.name)} = ${this.propName(prop.fieldNames[0])} == null ? ${this.propName(prop.fieldNames[0])} : mapEmbeddedResult_${idx}(${this.propName(prop.fieldNames[0])});`,
|
||||
);
|
||||
lines.push(`${padding} ${this.propName(prop.fieldNames[0], 'mapped')} = true;`);
|
||||
lines.push(`${padding} }`);
|
||||
} else if (prop.kind !== ReferenceKind.EMBEDDED) {
|
||||
lines.push(`${padding} if (typeof ${this.propName(prop.fieldNames[0])} !== 'undefined') {`);
|
||||
lines.push(`${padding} ret${this.wrap(prop.name)} = ${this.propName(prop.fieldNames[0])};`);
|
||||
lines.push(`${padding} ${this.propName(prop.fieldNames[0], 'mapped')} = true;`);
|
||||
lines.push(`${padding} }`);
|
||||
}
|
||||
}
|
||||
};
|
||||
if (meta.polymorphs && meta.discriminatorColumn) {
|
||||
for (const polymorph of meta.polymorphs) {
|
||||
const first = polymorph === meta.polymorphs[0];
|
||||
lines.push(
|
||||
` ${first ? '' : 'else '}if (${this.propName(meta.discriminatorColumn)} == '${polymorph.discriminatorValue}') {`,
|
||||
);
|
||||
mapEntityProperties(polymorph, ' ');
|
||||
lines.push(` }`);
|
||||
}
|
||||
lines.push(` else {`);
|
||||
mapEntityProperties(meta, ' ');
|
||||
lines.push(` }`);
|
||||
} else {
|
||||
mapEntityProperties(meta);
|
||||
}
|
||||
lines.push(
|
||||
` for (let k in result) { if (Object.hasOwn(result, k) && !mapped[k] && ret[k] === undefined) ret[k] = result[k]; }`,
|
||||
);
|
||||
const code =
|
||||
`// compiled mapper for entity ${meta.className}\n` +
|
||||
`return function(result) {\n const ret = {};\n${lines.join('\n')}\n return ret;\n}`;
|
||||
const fnKey = `resultMapper-${meta.uniqueName}`;
|
||||
const resultMapper = Utils.createFunction(context, code, this.#config?.get('compiledFunctions'), fnKey);
|
||||
this.#mappers.set(meta, resultMapper);
|
||||
return resultMapper;
|
||||
}
|
||||
getPropertyCondition(path) {
|
||||
const parts = path.slice(); // copy first
|
||||
if (parts.length > 1) {
|
||||
parts.pop();
|
||||
}
|
||||
let tail = '';
|
||||
return parts
|
||||
.map(k => {
|
||||
if (/^\[idx_\d+]$/.exec(k)) {
|
||||
tail += k;
|
||||
return '';
|
||||
}
|
||||
const mapped = `typeof entity${tail ? '.' + tail : ''}${this.wrap(k)} !== 'undefined'`;
|
||||
tail += tail ? '.' + k : k;
|
||||
return mapped;
|
||||
})
|
||||
.filter(k => k)
|
||||
.join(' && ');
|
||||
}
|
||||
getEmbeddedArrayPropertySnapshot(meta, prop, context, level, path, dataKey) {
|
||||
const entityKey = path.map(k => this.wrap(k)).join('');
|
||||
const ret = [];
|
||||
const padding = ' '.repeat(level * 2);
|
||||
const idx = this.#tmpIndex++;
|
||||
ret.push(`${padding}if (Array.isArray(entity${entityKey})) {`);
|
||||
ret.push(`${padding} ret${dataKey} = [];`);
|
||||
ret.push(`${padding} entity${entityKey}.forEach((_, idx_${idx}) => {`);
|
||||
ret.push(
|
||||
this.getEmbeddedPropertySnapshot(
|
||||
meta,
|
||||
prop,
|
||||
context,
|
||||
level + 2,
|
||||
[...path, `[idx_${idx}]`],
|
||||
`${dataKey}[idx_${idx}]`,
|
||||
true,
|
||||
),
|
||||
);
|
||||
ret.push(`${padding} });`);
|
||||
if (this.shouldSerialize(prop, dataKey)) {
|
||||
ret.push(`${padding} ret${dataKey} = cloneEmbeddable(ret${dataKey});`);
|
||||
}
|
||||
ret.push(`${padding}}`);
|
||||
return ret.join('\n');
|
||||
}
|
||||
/**
|
||||
* we need to serialize only object embeddables, and only the top level ones, so root object embeddable
|
||||
* properties and first child nested object embeddables with inlined parent
|
||||
*/
|
||||
shouldSerialize(prop, dataKey) {
|
||||
dataKey = dataKey.replace(/^\./, '');
|
||||
const contains = (str, re) => (str.match(re) || []).length > 0;
|
||||
const a = contains(dataKey, /\./g);
|
||||
const b = contains(dataKey, /\[idx_/g);
|
||||
return !!prop.object && !a && !b;
|
||||
}
|
||||
getEmbeddedPropertySnapshot(meta, prop, context, level, path, dataKey, object = prop.object) {
|
||||
const padding = ' '.repeat(level * 2);
|
||||
const nullCond = `entity${path.map(k => this.wrap(k)).join('')} === null`;
|
||||
let ret = level === 1 ? '' : '\n';
|
||||
if (object) {
|
||||
ret += `${padding}if (${nullCond}) ret${dataKey} = null;\n`;
|
||||
} else {
|
||||
ret += `${padding}if (${nullCond}) {\n`;
|
||||
ret +=
|
||||
meta.props
|
||||
.filter(
|
||||
p =>
|
||||
p.embedded?.[0] === prop.name &&
|
||||
// object for JSON embeddable
|
||||
(p.object || p.persist !== false),
|
||||
)
|
||||
.map(childProp => {
|
||||
const childDataKey =
|
||||
meta.embeddable || prop.object ? dataKey + this.wrap(childProp.embedded[1]) : this.wrap(childProp.name);
|
||||
return `${padding} ret${childDataKey} = null;`;
|
||||
})
|
||||
.join('\n') + `\n`;
|
||||
ret += `${padding}}\n`;
|
||||
}
|
||||
const cond = `entity${path.map(k => this.wrap(k)).join('')} != null`;
|
||||
ret += `${padding}if (${cond}) {\n`;
|
||||
if (object) {
|
||||
ret += `${padding} ret${dataKey} = {};\n`;
|
||||
}
|
||||
function shouldProcessCustomType(childProp) {
|
||||
if (!childProp.customType) {
|
||||
return false;
|
||||
}
|
||||
if (childProp.customType instanceof JsonType) {
|
||||
return !prop.object;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
ret +=
|
||||
meta.props
|
||||
.filter(
|
||||
p =>
|
||||
p.embedded?.[0] === prop.name &&
|
||||
// object for JSON embeddable
|
||||
(p.object || p.persist !== false),
|
||||
)
|
||||
.map(childProp => {
|
||||
const childDataKey =
|
||||
meta.embeddable || prop.object ? dataKey + this.wrap(childProp.embedded[1]) : this.wrap(childProp.name);
|
||||
const childEntityKey = [...path, childProp.embedded[1]].map(k => this.wrap(k)).join('');
|
||||
const childCond = `typeof entity${childEntityKey} !== 'undefined'`;
|
||||
if (childProp.kind === ReferenceKind.EMBEDDED) {
|
||||
return this.getPropertySnapshot(
|
||||
meta,
|
||||
childProp,
|
||||
context,
|
||||
childDataKey,
|
||||
childEntityKey,
|
||||
[...path, childProp.embedded[1]],
|
||||
level + 1,
|
||||
prop.object,
|
||||
);
|
||||
}
|
||||
if (childProp.kind !== ReferenceKind.SCALAR) {
|
||||
return this.getPropertySnapshot(
|
||||
meta,
|
||||
childProp,
|
||||
context,
|
||||
childDataKey,
|
||||
childEntityKey,
|
||||
[...path, childProp.embedded[1]],
|
||||
level,
|
||||
prop.object,
|
||||
)
|
||||
.split('\n')
|
||||
.map(l => padding + l)
|
||||
.join('\n');
|
||||
}
|
||||
if (shouldProcessCustomType(childProp)) {
|
||||
const convertorKey = this.registerCustomType(childProp, context);
|
||||
if (
|
||||
['number', 'string', 'boolean', 'bigint'].includes(childProp.customType.compareAsType().toLowerCase())
|
||||
) {
|
||||
return `${padding} if (${childCond}) ret${childDataKey} = convertToDatabaseValue_${convertorKey}(entity${childEntityKey});`;
|
||||
}
|
||||
return `${padding} if (${childCond}) ret${childDataKey} = clone(convertToDatabaseValue_${convertorKey}(entity${childEntityKey}));`;
|
||||
}
|
||||
return `${padding} if (${childCond}) ret${childDataKey} = clone(entity${childEntityKey});`;
|
||||
})
|
||||
.join('\n') + `\n`;
|
||||
if (this.shouldSerialize(prop, dataKey)) {
|
||||
return `${ret + padding} ret${dataKey} = cloneEmbeddable(ret${dataKey});\n${padding}}`;
|
||||
}
|
||||
return `${ret}${padding}}`;
|
||||
}
|
||||
registerCustomType(prop, context) {
|
||||
const convertorKey = this.safeKey(prop.name);
|
||||
context.set(`convertToDatabaseValue_${convertorKey}`, val => {
|
||||
/* v8 ignore next */
|
||||
if (Raw.isKnownFragment(val)) {
|
||||
return val;
|
||||
}
|
||||
return prop.customType.convertToDatabaseValue(val, this.#platform, { mode: 'serialization' });
|
||||
});
|
||||
return convertorKey;
|
||||
}
|
||||
getPropertySnapshot(meta, prop, context, dataKey, entityKey, path, level = 1, object) {
|
||||
const unwrap = prop.ref ? '?.unwrap()' : '';
|
||||
let ret = ` if (${this.getPropertyCondition(path)}) {\n`;
|
||||
if (['number', 'string', 'boolean'].includes(prop.type.toLowerCase())) {
|
||||
return ret + ` ret${dataKey} = entity${entityKey}${unwrap};\n }\n`;
|
||||
}
|
||||
if (prop.kind === ReferenceKind.EMBEDDED) {
|
||||
if (prop.array) {
|
||||
return this.getEmbeddedArrayPropertySnapshot(meta, prop, context, level, path, dataKey) + '\n';
|
||||
}
|
||||
return this.getEmbeddedPropertySnapshot(meta, prop, context, level, path, dataKey, object) + '\n';
|
||||
}
|
||||
if (prop.kind === ReferenceKind.ONE_TO_ONE || prop.kind === ReferenceKind.MANY_TO_ONE) {
|
||||
if (prop.mapToPk) {
|
||||
if (prop.customType) {
|
||||
const convertorKey = this.registerCustomType(prop, context);
|
||||
ret += ` ret${dataKey} = convertToDatabaseValue_${convertorKey}(entity${entityKey});\n`;
|
||||
} else {
|
||||
ret += ` ret${dataKey} = entity${entityKey};\n`;
|
||||
}
|
||||
} else if (prop.polymorphic) {
|
||||
const discriminatorMapKey = `discriminatorMapReverse_${prop.name}`;
|
||||
const reverseMap = new Map();
|
||||
for (const [key, value] of Object.entries(prop.discriminatorMap)) {
|
||||
reverseMap.set(value, key);
|
||||
}
|
||||
context.set(discriminatorMapKey, reverseMap);
|
||||
this.setToArrayHelper(context);
|
||||
context.set('EntityIdentifier', EntityIdentifier);
|
||||
context.set('PolymorphicRef', PolymorphicRef);
|
||||
ret += ` if (entity${entityKey} === null) {\n`;
|
||||
ret += ` ret${dataKey} = null;\n`;
|
||||
ret += ` } else if (typeof entity${entityKey} !== 'undefined') {\n`;
|
||||
ret += ` const val${level} = entity${entityKey}${unwrap};\n`;
|
||||
ret += ` const discriminator = ${discriminatorMapKey}.get(val${level}?.constructor);\n`;
|
||||
ret += ` const pk = val${level}?.__helper?.__identifier && !val${level}?.__helper?.hasPrimaryKey()\n`;
|
||||
ret += ` ? val${level}.__helper.__identifier\n`;
|
||||
ret += ` : toArray(val${level}?.__helper?.getPrimaryKey(true));\n`;
|
||||
ret += ` ret${dataKey} = new PolymorphicRef(discriminator, pk);\n`;
|
||||
ret += ` }\n`;
|
||||
} else if (prop.targetKey) {
|
||||
// When targetKey is set, extract that property value instead of the PK
|
||||
const targetProp = prop.targetMeta?.properties[prop.targetKey];
|
||||
ret += ` if (entity${entityKey} === null) {\n`;
|
||||
ret += ` ret${dataKey} = null;\n`;
|
||||
ret += ` } else if (typeof entity${entityKey} !== 'undefined') {\n`;
|
||||
ret += ` const val${level} = entity${entityKey}${unwrap};\n`;
|
||||
if (targetProp?.customType) {
|
||||
// If targetKey property has a custom type, convert to database value
|
||||
const convertorKey = this.registerCustomType(targetProp, context);
|
||||
ret += ` ret${dataKey} = convertToDatabaseValue_${convertorKey}(val${level}?.${prop.targetKey});\n`;
|
||||
} else {
|
||||
ret += ` ret${dataKey} = val${level}?.${prop.targetKey};\n`;
|
||||
}
|
||||
ret += ` }\n`;
|
||||
} else {
|
||||
this.setToArrayHelper(context);
|
||||
context.set('EntityIdentifier', EntityIdentifier);
|
||||
ret += ` if (entity${entityKey} === null) {\n`;
|
||||
ret += ` ret${dataKey} = null;\n`;
|
||||
ret += ` } else if (entity${entityKey}?.__helper.__identifier && !entity${entityKey}.__helper.hasPrimaryKey()) {\n`;
|
||||
ret += ` ret${dataKey} = entity${entityKey}?.__helper.__identifier;\n`;
|
||||
ret += ` } else if (typeof entity${entityKey} !== 'undefined') {\n`;
|
||||
ret += ` ret${dataKey} = toArray(entity${entityKey}.__helper.getPrimaryKey(true));\n`;
|
||||
ret += ` }\n`;
|
||||
}
|
||||
return ret + ' }\n';
|
||||
}
|
||||
if (prop.customType) {
|
||||
const convertorKey = this.registerCustomType(prop, context);
|
||||
if (['number', 'string', 'boolean', 'bigint'].includes(prop.customType.compareAsType().toLowerCase())) {
|
||||
return ret + ` ret${dataKey} = convertToDatabaseValue_${convertorKey}(entity${entityKey}${unwrap});\n }\n`;
|
||||
}
|
||||
return (
|
||||
ret + ` ret${dataKey} = clone(convertToDatabaseValue_${convertorKey}(entity${entityKey}${unwrap}));\n }\n`
|
||||
);
|
||||
}
|
||||
if (prop.runtimeType === 'Date') {
|
||||
context.set('processDateProperty', this.#platform.processDateProperty.bind(this.#platform));
|
||||
return ret + ` ret${dataKey} = clone(processDateProperty(entity${entityKey}${unwrap}));\n }\n`;
|
||||
}
|
||||
return ret + ` ret${dataKey} = clone(entity${entityKey}${unwrap});\n }\n`;
|
||||
}
|
||||
/**
|
||||
* @internal Highly performance-sensitive method.
|
||||
*/
|
||||
getEntityComparator(entityName) {
|
||||
const meta = this.#metadata.find(entityName);
|
||||
const exists = this.#comparators.get(meta);
|
||||
if (exists) {
|
||||
return exists;
|
||||
}
|
||||
const lines = [];
|
||||
const context = new Map();
|
||||
context.set('compareArrays', compareArrays);
|
||||
context.set('compareBooleans', compareBooleans);
|
||||
context.set('compareBuffers', compareBuffers);
|
||||
context.set('compareObjects', compareObjects);
|
||||
context.set('equals', equals);
|
||||
for (const prop of meta.comparableProps) {
|
||||
lines.push(this.getPropertyComparator(prop, context));
|
||||
}
|
||||
// also compare 1:1 inverse sides, important for `factory.mergeData`
|
||||
lines.push(`if (options?.includeInverseSides) {`);
|
||||
for (const prop of meta.bidirectionalRelations) {
|
||||
if (prop.kind === ReferenceKind.ONE_TO_ONE && !prop.owner && prop.hydrate !== false) {
|
||||
lines.push(this.getPropertyComparator(prop, context));
|
||||
}
|
||||
}
|
||||
lines.push(`}`);
|
||||
const code =
|
||||
`// compiled comparator for entity ${meta.className}\n` +
|
||||
`return function(last, current, options) {\n const diff = {};\n${lines.join('\n')}\n return diff;\n}`;
|
||||
const fnKey = `comparator-${meta.uniqueName}`;
|
||||
const comparator = Utils.createFunction(context, code, this.#config?.get('compiledFunctions'), fnKey);
|
||||
this.#comparators.set(meta, comparator);
|
||||
return comparator;
|
||||
}
|
||||
getGenericComparator(prop, cond) {
|
||||
return (
|
||||
` if (current${prop} === null && last${prop} === undefined) {\n` +
|
||||
` diff${prop} = current${prop};\n` +
|
||||
` } else if (current${prop} == null && last${prop} == null) {\n\n` +
|
||||
` } else if ((current${prop} != null && last${prop} == null) || (current${prop} == null && last${prop} != null)) {\n` +
|
||||
` diff${prop} = current${prop};\n` +
|
||||
` } else if (${cond}) {\n` +
|
||||
` diff${prop} = current${prop};\n` +
|
||||
` }\n`
|
||||
);
|
||||
}
|
||||
getPropertyComparator(prop, context) {
|
||||
let type = prop.type.toLowerCase();
|
||||
if (prop.kind !== ReferenceKind.SCALAR && prop.kind !== ReferenceKind.EMBEDDED) {
|
||||
if (prop.polymorphic) {
|
||||
type = 'object';
|
||||
} else {
|
||||
const meta2 = prop.targetMeta;
|
||||
if (meta2.primaryKeys.length > 1) {
|
||||
type = 'array';
|
||||
} else {
|
||||
type = meta2.getPrimaryProp().type.toLowerCase();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (prop.customType) {
|
||||
if (prop.customType.compareValues) {
|
||||
const idx = this.#tmpIndex++;
|
||||
context.set(`compareValues_${idx}`, (a, b) => {
|
||||
if (Raw.isKnownFragment(a) || Raw.isKnownFragment(b)) {
|
||||
return Raw.getKnownFragment(a) === Raw.getKnownFragment(b);
|
||||
}
|
||||
return prop.customType.compareValues(a, b);
|
||||
});
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`!compareValues_${idx}(last${this.wrap(prop.name)}, current${this.wrap(prop.name)})`,
|
||||
);
|
||||
}
|
||||
type = prop.customType.compareAsType().toLowerCase();
|
||||
}
|
||||
if (type.endsWith('[]')) {
|
||||
type = 'array';
|
||||
}
|
||||
if (['string', 'number', 'bigint'].includes(type)) {
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`last${this.wrap(prop.name)} !== current${this.wrap(prop.name)}`,
|
||||
);
|
||||
}
|
||||
if (type === 'boolean') {
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`!compareBooleans(last${this.wrap(prop.name)}, current${this.wrap(prop.name)})`,
|
||||
);
|
||||
}
|
||||
if (['array'].includes(type) || type.endsWith('[]')) {
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`!compareArrays(last${this.wrap(prop.name)}, current${this.wrap(prop.name)})`,
|
||||
);
|
||||
}
|
||||
if (['buffer', 'uint8array'].includes(type)) {
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`!compareBuffers(last${this.wrap(prop.name)}, current${this.wrap(prop.name)})`,
|
||||
);
|
||||
}
|
||||
if (type === 'date') {
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`last${this.wrap(prop.name)}.valueOf() !== current${this.wrap(prop.name)}.valueOf()`,
|
||||
);
|
||||
}
|
||||
if (type === 'objectid') {
|
||||
// We might be comparing PK to object, in case we compare with cached data of populated entity
|
||||
// in such case we just ignore the comparison and fallback to `equals()` (which will still mark
|
||||
// it as not equal as we compare PK to plain object).
|
||||
const cond = `last${this.wrap(prop.name)}.toHexString?.() !== current${this.wrap(prop.name)}.toHexString?.()`;
|
||||
return this.getGenericComparator(this.wrap(prop.name), cond);
|
||||
}
|
||||
return this.getGenericComparator(
|
||||
this.wrap(prop.name),
|
||||
`!equals(last${this.wrap(prop.name)}, current${this.wrap(prop.name)})`,
|
||||
);
|
||||
}
|
||||
wrap(key) {
|
||||
if (/^\[.*]$/.exec(key)) {
|
||||
return key;
|
||||
}
|
||||
return /^\w+$/.exec(key) ? `.${key}` : `['${key}']`;
|
||||
}
|
||||
safeKey(key) {
|
||||
return key.replace(/\W/g, '_');
|
||||
}
|
||||
/**
|
||||
* Sets the toArray helper in the context if not already set.
|
||||
* Used for converting composite PKs to arrays.
|
||||
*/
|
||||
setToArrayHelper(context) {
|
||||
if (context.has('toArray')) {
|
||||
return;
|
||||
}
|
||||
const toArray = val => {
|
||||
if (Utils.isPlainObject(val)) {
|
||||
return Object.values(val).map(v => toArray(v));
|
||||
}
|
||||
return val;
|
||||
};
|
||||
context.set('toArray', toArray);
|
||||
}
|
||||
/**
|
||||
* perf: used to generate list of comparable properties during discovery, so we speed up the runtime comparison
|
||||
*/
|
||||
static isComparable(prop, root) {
|
||||
const virtual = prop.persist === false || (prop.generated && !prop.primary);
|
||||
const inverse = prop.kind === ReferenceKind.ONE_TO_ONE && !prop.owner;
|
||||
const discriminator = prop.name === root.discriminatorColumn;
|
||||
const collection = prop.kind === ReferenceKind.ONE_TO_MANY || prop.kind === ReferenceKind.MANY_TO_MANY;
|
||||
return !virtual && !collection && !inverse && !discriminator && !prop.version;
|
||||
}
|
||||
}
|
||||
5
node_modules/@mikro-orm/core/utils/NullHighlighter.d.ts
generated
vendored
Normal file
5
node_modules/@mikro-orm/core/utils/NullHighlighter.d.ts
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import type { Highlighter } from '../typings.js';
|
||||
/** No-op highlighter that returns SQL text unchanged. Used as the default when no syntax highlighting is configured. */
|
||||
export declare class NullHighlighter implements Highlighter {
|
||||
highlight(text: string): string;
|
||||
}
|
||||
6
node_modules/@mikro-orm/core/utils/NullHighlighter.js
generated
vendored
Normal file
6
node_modules/@mikro-orm/core/utils/NullHighlighter.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/** No-op highlighter that returns SQL text unchanged. Used as the default when no syntax highlighting is configured. */
|
||||
export class NullHighlighter {
|
||||
highlight(text) {
|
||||
return text;
|
||||
}
|
||||
}
|
||||
90
node_modules/@mikro-orm/core/utils/QueryHelper.d.ts
generated
vendored
Normal file
90
node_modules/@mikro-orm/core/utils/QueryHelper.d.ts
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
import type { Dictionary, EntityMetadata, EntityName, EntityProperty, FilterDef, FilterQuery } from '../typings.js';
|
||||
import { type QueryOrderMap } from '../enums.js';
|
||||
import type { Platform } from '../platforms/Platform.js';
|
||||
import type { MetadataStorage } from '../metadata/MetadataStorage.js';
|
||||
import type { FilterOptions } from '../drivers/IDatabaseDriver.js';
|
||||
/** @internal */
|
||||
export declare class QueryHelper {
|
||||
static readonly SUPPORTED_OPERATORS: string[];
|
||||
/**
|
||||
* Finds the discriminator value (key) for a given entity class in a discriminator map.
|
||||
*/
|
||||
static findDiscriminatorValue<T>(discriminatorMap: Dictionary<T>, targetClass: T): string | undefined;
|
||||
static processParams(params: unknown): any;
|
||||
static processObjectParams<T extends Dictionary>(params?: T): T;
|
||||
/**
|
||||
* converts `{ account: { $or: [ [Object], [Object] ] } }`
|
||||
* to `{ $or: [ { account: [Object] }, { account: [Object] } ] }`
|
||||
*/
|
||||
static liftGroupOperators<T extends object>(
|
||||
where: Dictionary,
|
||||
meta: EntityMetadata<T>,
|
||||
metadata: MetadataStorage,
|
||||
key?: string,
|
||||
): string | undefined;
|
||||
static inlinePrimaryKeyObjects<T extends object>(
|
||||
where: Dictionary,
|
||||
meta: EntityMetadata<T>,
|
||||
metadata: MetadataStorage,
|
||||
key?: string,
|
||||
): boolean;
|
||||
static processWhere<T extends object>(options: ProcessWhereOptions<T>): FilterQuery<T>;
|
||||
static getActiveFilters<T>(
|
||||
meta: EntityMetadata<T>,
|
||||
options: FilterOptions | undefined,
|
||||
filters: Dictionary<FilterDef>,
|
||||
): FilterDef[];
|
||||
static mergePropertyFilters(
|
||||
propFilters: FilterOptions | undefined,
|
||||
options: FilterOptions | undefined,
|
||||
): FilterOptions | undefined;
|
||||
static isFilterActive<T>(
|
||||
meta: EntityMetadata<T>,
|
||||
filterName: string,
|
||||
filter: FilterDef,
|
||||
options: Dictionary<boolean | Dictionary>,
|
||||
): boolean;
|
||||
static processCustomType<T extends object>(
|
||||
prop: EntityProperty<T>,
|
||||
cond: FilterQuery<T>,
|
||||
platform: Platform,
|
||||
key?: string,
|
||||
fromQuery?: boolean,
|
||||
): FilterQuery<T>;
|
||||
private static isSupportedOperator;
|
||||
private static processJsonCondition;
|
||||
static findProperty<T>(fieldName: string, options: ProcessWhereOptions<T>): EntityProperty<T> | undefined;
|
||||
/**
|
||||
* Converts entity references for composite FK properties into flat arrays
|
||||
* of correctly-ordered join column values, before processParams flattens them
|
||||
* incorrectly due to shared FK columns.
|
||||
*/
|
||||
private static convertCompositeEntityRefs;
|
||||
/**
|
||||
* Extracts values for a FK's join columns from an entity by traversing the FK chain.
|
||||
* Handles shared FK columns (e.g., tenant_id referenced by multiple FKs) correctly.
|
||||
*/
|
||||
private static extractJoinColumnValues;
|
||||
/**
|
||||
* Extracts the value for a specific column from an entity by finding which PK property
|
||||
* owns that column and recursively traversing FK references.
|
||||
*/
|
||||
private static extractColumnValue;
|
||||
/**
|
||||
* Merges multiple orderBy sources with key-level deduplication (first-seen key wins).
|
||||
* RawQueryFragment symbol keys are never deduped (each is unique).
|
||||
*/
|
||||
static mergeOrderBy<T>(...sources: (QueryOrderMap<T> | QueryOrderMap<T>[] | undefined)[]): QueryOrderMap<T>[];
|
||||
}
|
||||
interface ProcessWhereOptions<T> {
|
||||
where: FilterQuery<T>;
|
||||
entityName: EntityName<T>;
|
||||
metadata: MetadataStorage;
|
||||
platform: Platform;
|
||||
aliased?: boolean;
|
||||
aliasMap?: Dictionary<EntityName>;
|
||||
convertCustomTypes?: boolean;
|
||||
root?: boolean;
|
||||
type?: 'where' | 'orderBy';
|
||||
}
|
||||
export {};
|
||||
397
node_modules/@mikro-orm/core/utils/QueryHelper.js
generated
vendored
Normal file
397
node_modules/@mikro-orm/core/utils/QueryHelper.js
generated
vendored
Normal file
@@ -0,0 +1,397 @@
|
||||
import { Reference } from '../entity/Reference.js';
|
||||
import { Utils } from './Utils.js';
|
||||
import { ARRAY_OPERATORS, GroupOperator, JSON_KEY_OPERATORS, ReferenceKind } from '../enums.js';
|
||||
import { JsonType } from '../types/JsonType.js';
|
||||
import { helper } from '../entity/wrap.js';
|
||||
import { isRaw, Raw } from './RawQueryFragment.js';
|
||||
/** @internal */
|
||||
export class QueryHelper {
|
||||
static SUPPORTED_OPERATORS = ['>', '<', '<=', '>=', '!', '!='];
|
||||
/**
|
||||
* Finds the discriminator value (key) for a given entity class in a discriminator map.
|
||||
*/
|
||||
static findDiscriminatorValue(discriminatorMap, targetClass) {
|
||||
return Object.entries(discriminatorMap).find(([, cls]) => cls === targetClass)?.[0];
|
||||
}
|
||||
static processParams(params) {
|
||||
if (Reference.isReference(params)) {
|
||||
params = params.unwrap();
|
||||
}
|
||||
if (Utils.isEntity(params)) {
|
||||
if (helper(params).__meta.compositePK) {
|
||||
return helper(params).__primaryKeys;
|
||||
}
|
||||
return helper(params).getPrimaryKey();
|
||||
}
|
||||
if (params === undefined) {
|
||||
return null;
|
||||
}
|
||||
if (Array.isArray(params)) {
|
||||
return params.map(item => QueryHelper.processParams(item));
|
||||
}
|
||||
if (Utils.isPlainObject(params)) {
|
||||
QueryHelper.processObjectParams(params);
|
||||
}
|
||||
return params;
|
||||
}
|
||||
static processObjectParams(params = {}) {
|
||||
Utils.getObjectQueryKeys(params).forEach(k => {
|
||||
params[k] = QueryHelper.processParams(params[k]);
|
||||
});
|
||||
return params;
|
||||
}
|
||||
/**
|
||||
* converts `{ account: { $or: [ [Object], [Object] ] } }`
|
||||
* to `{ $or: [ { account: [Object] }, { account: [Object] } ] }`
|
||||
*/
|
||||
static liftGroupOperators(where, meta, metadata, key) {
|
||||
if (!Utils.isPlainObject(where)) {
|
||||
return undefined;
|
||||
}
|
||||
const keys = Object.keys(where);
|
||||
const groupOperator = keys.find(k => {
|
||||
return (
|
||||
k in GroupOperator &&
|
||||
Array.isArray(where[k]) &&
|
||||
where[k].every(cond => {
|
||||
return (
|
||||
Utils.isPlainObject(cond) &&
|
||||
Object.keys(cond).every(k2 => {
|
||||
if (Utils.isOperator(k2, false)) {
|
||||
if (k2 === '$not') {
|
||||
return Object.keys(cond[k2]).every(k3 => meta.primaryKeys.includes(k3));
|
||||
}
|
||||
/* v8 ignore next */
|
||||
return true;
|
||||
}
|
||||
return meta.primaryKeys.includes(k2);
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
if (groupOperator) {
|
||||
return groupOperator;
|
||||
}
|
||||
for (const k of keys) {
|
||||
const value = where[k];
|
||||
const prop = meta.properties[k];
|
||||
// Polymorphic relations use multiple columns (discriminator + FK), so they cannot
|
||||
// participate in the standard single-column FK expansion. Query by discriminator
|
||||
// column directly instead, e.g. { likeableType: 'post', likeableId: 1 }.
|
||||
if (!prop || ![ReferenceKind.MANY_TO_ONE, ReferenceKind.ONE_TO_ONE].includes(prop.kind) || prop.polymorphic) {
|
||||
continue;
|
||||
}
|
||||
const op = this.liftGroupOperators(value, prop.targetMeta, metadata, k);
|
||||
if (op) {
|
||||
delete where[k];
|
||||
where[op] = value[op].map(v => {
|
||||
return { [k]: v };
|
||||
});
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
static inlinePrimaryKeyObjects(where, meta, metadata, key) {
|
||||
if (Array.isArray(where)) {
|
||||
where.forEach((item, i) => {
|
||||
if (this.inlinePrimaryKeyObjects(item, meta, metadata, key)) {
|
||||
where[i] = Utils.getPrimaryKeyValues(item, meta, false);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (!Utils.isPlainObject(where) || (key && meta.properties[key]?.customType instanceof JsonType)) {
|
||||
return false;
|
||||
}
|
||||
if (meta.primaryKeys.every(pk => pk in where) && Utils.getObjectKeysSize(where) === meta.primaryKeys.length) {
|
||||
return (
|
||||
!!key &&
|
||||
!GroupOperator[key] &&
|
||||
key !== '$not' &&
|
||||
Object.keys(where).every(
|
||||
k =>
|
||||
!Utils.isPlainObject(where[k]) ||
|
||||
Object.keys(where[k]).every(v => {
|
||||
if (Utils.isOperator(v, false)) {
|
||||
return true;
|
||||
}
|
||||
if (
|
||||
meta.properties[k].primary &&
|
||||
[ReferenceKind.ONE_TO_ONE, ReferenceKind.MANY_TO_ONE].includes(meta.properties[k].kind)
|
||||
) {
|
||||
return this.inlinePrimaryKeyObjects(where[k], meta.properties[k].targetMeta, metadata, v);
|
||||
}
|
||||
/* v8 ignore next */
|
||||
return true;
|
||||
}),
|
||||
)
|
||||
);
|
||||
}
|
||||
Object.keys(where).forEach(k => {
|
||||
const meta2 = metadata.find(meta.properties[k]?.targetMeta?.class) || meta;
|
||||
if (this.inlinePrimaryKeyObjects(where[k], meta2, metadata, k)) {
|
||||
where[k] = Utils.getPrimaryKeyValues(where[k], meta2, true);
|
||||
}
|
||||
});
|
||||
return false;
|
||||
}
|
||||
static processWhere(options) {
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { where, entityName, metadata, platform, aliased = true, convertCustomTypes = true, root = true } = options;
|
||||
const meta = metadata.find(entityName);
|
||||
// inline PK-only objects in M:N queries, so we don't join the target entity when not needed
|
||||
if (meta && root) {
|
||||
QueryHelper.liftGroupOperators(where, meta, metadata);
|
||||
QueryHelper.inlinePrimaryKeyObjects(where, meta, metadata);
|
||||
}
|
||||
if (meta && root) {
|
||||
QueryHelper.convertCompositeEntityRefs(where, meta);
|
||||
}
|
||||
if (platform.getConfig().get('ignoreUndefinedInQuery') && where && typeof where === 'object') {
|
||||
Utils.dropUndefinedProperties(where);
|
||||
}
|
||||
where = QueryHelper.processParams(where) ?? {};
|
||||
/* v8 ignore next */
|
||||
if (!root && Utils.isPrimaryKey(where)) {
|
||||
return where;
|
||||
}
|
||||
if (meta && Utils.isPrimaryKey(where, meta.compositePK)) {
|
||||
where = { [Utils.getPrimaryKeyHash(meta.primaryKeys)]: where };
|
||||
}
|
||||
if (Array.isArray(where) && root) {
|
||||
const rootPrimaryKey = meta ? Utils.getPrimaryKeyHash(meta.primaryKeys) : Utils.className(entityName);
|
||||
let cond = { [rootPrimaryKey]: { $in: where } };
|
||||
// @ts-ignore
|
||||
// detect tuple comparison, use `$or` in case the number of constituents don't match
|
||||
if (
|
||||
meta &&
|
||||
!where.every(
|
||||
c =>
|
||||
Utils.isPrimaryKey(c) ||
|
||||
(Array.isArray(c) && c.length === meta.primaryKeys.length && c.every(i => Utils.isPrimaryKey(i))),
|
||||
)
|
||||
) {
|
||||
cond = { $or: where };
|
||||
}
|
||||
return QueryHelper.processWhere({ ...options, where: cond, root: false });
|
||||
}
|
||||
if (!Utils.isPlainObject(where)) {
|
||||
return where;
|
||||
}
|
||||
return Utils.getObjectQueryKeys(where).reduce((o, key) => {
|
||||
let value = where[key];
|
||||
const customExpression = Raw.isKnownFragmentSymbol(key);
|
||||
if (Array.isArray(value) && value.length === 0 && customExpression) {
|
||||
o[key] = value;
|
||||
return o;
|
||||
}
|
||||
if (key in GroupOperator) {
|
||||
o[key] = value.map(sub => QueryHelper.processWhere({ ...options, where: sub, root }));
|
||||
return o;
|
||||
}
|
||||
// wrap top level operators (except platform allowed operators) with PK
|
||||
if (Utils.isOperator(key) && root && meta && !platform.isAllowedTopLevelOperator(key)) {
|
||||
const rootPrimaryKey = Utils.getPrimaryKeyHash(meta.primaryKeys);
|
||||
o[rootPrimaryKey] = { [key]: QueryHelper.processWhere({ ...options, where: value, root: false }) };
|
||||
return o;
|
||||
}
|
||||
const prop = customExpression ? null : this.findProperty(key, options);
|
||||
const keys = prop?.joinColumns?.length ?? 0;
|
||||
const composite = keys > 1;
|
||||
if (prop?.customType && convertCustomTypes && !isRaw(value)) {
|
||||
value = QueryHelper.processCustomType(prop, value, platform, undefined, true);
|
||||
}
|
||||
// oxfmt-ignore
|
||||
const isJsonProperty = prop?.customType instanceof JsonType && !isRaw(value) && (Utils.isPlainObject(value) ? !['$eq', '$elemMatch'].includes(Object.keys(value)[0]) : !Array.isArray(value));
|
||||
if (isJsonProperty && prop?.kind !== ReferenceKind.EMBEDDED) {
|
||||
return this.processJsonCondition(o, value, [prop.fieldNames[0]], platform, aliased);
|
||||
}
|
||||
// oxfmt-ignore
|
||||
if (Array.isArray(value) && !Utils.isOperator(key) && !QueryHelper.isSupportedOperator(key) && !(customExpression && Raw.getKnownFragment(key).params.length > 0) && options.type !== 'orderBy') {
|
||||
// comparing single composite key - use $eq instead of $in
|
||||
const op = composite && !value.every(v => Array.isArray(v)) ? '$eq' : '$in';
|
||||
o[key] = { [op]: value };
|
||||
return o;
|
||||
}
|
||||
if (Utils.isPlainObject(value)) {
|
||||
o[key] = QueryHelper.processWhere({
|
||||
...options,
|
||||
where: value,
|
||||
entityName: prop?.targetMeta?.class ?? entityName,
|
||||
root: false,
|
||||
});
|
||||
} else {
|
||||
o[key] = value;
|
||||
}
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
static getActiveFilters(meta, options, filters) {
|
||||
if (options === false) {
|
||||
return [];
|
||||
}
|
||||
const opts = {};
|
||||
if (Array.isArray(options)) {
|
||||
options.forEach(filter => (opts[filter] = true));
|
||||
} else if (Utils.isPlainObject(options)) {
|
||||
Object.keys(options).forEach(filter => (opts[filter] = options[filter]));
|
||||
}
|
||||
return Object.keys(filters)
|
||||
.filter(f => QueryHelper.isFilterActive(meta, f, filters[f], opts))
|
||||
.map(f => {
|
||||
filters[f].name = f;
|
||||
return filters[f];
|
||||
});
|
||||
}
|
||||
static mergePropertyFilters(propFilters, options) {
|
||||
if (!options || !propFilters || options === true || propFilters === true) {
|
||||
return options ?? propFilters;
|
||||
}
|
||||
if (Array.isArray(propFilters)) {
|
||||
propFilters = propFilters.reduce((o, item) => {
|
||||
o[item] = true;
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
if (Array.isArray(options)) {
|
||||
options = options.reduce((o, item) => {
|
||||
o[item] = true;
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
return Utils.mergeConfig({}, propFilters, options);
|
||||
}
|
||||
static isFilterActive(meta, filterName, filter, options) {
|
||||
if (filter.entity && !filter.entity.includes(meta.className)) {
|
||||
return false;
|
||||
}
|
||||
if (options[filterName] === false) {
|
||||
return false;
|
||||
}
|
||||
return filter.default || filterName in options;
|
||||
}
|
||||
static processCustomType(prop, cond, platform, key, fromQuery) {
|
||||
if (Utils.isPlainObject(cond)) {
|
||||
return Utils.getObjectQueryKeys(cond).reduce((o, k) => {
|
||||
if (!Raw.isKnownFragmentSymbol(k) && (Utils.isOperator(k, true) || prop.referencedPKs?.includes(k))) {
|
||||
o[k] = QueryHelper.processCustomType(prop, cond[k], platform, k, fromQuery);
|
||||
} else {
|
||||
o[k] = cond[k];
|
||||
}
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
if (key && JSON_KEY_OPERATORS.includes(key)) {
|
||||
return Array.isArray(cond) ? platform.marshallArray(cond) : cond;
|
||||
}
|
||||
if (Array.isArray(cond) && !(key && ARRAY_OPERATORS.includes(key))) {
|
||||
return cond.map(v => QueryHelper.processCustomType(prop, v, platform, key, fromQuery));
|
||||
}
|
||||
if (isRaw(cond)) {
|
||||
return cond;
|
||||
}
|
||||
return prop.customType.convertToDatabaseValue(cond, platform, { fromQuery, key, mode: 'query' });
|
||||
}
|
||||
static isSupportedOperator(key) {
|
||||
return !!QueryHelper.SUPPORTED_OPERATORS.find(op => key === op);
|
||||
}
|
||||
static processJsonCondition(o, value, path, platform, alias) {
|
||||
return platform.processJsonCondition(o, value, path, alias);
|
||||
}
|
||||
static findProperty(fieldName, options) {
|
||||
const parts = fieldName.split('.');
|
||||
const propName = parts.pop();
|
||||
const alias = parts.length > 0 ? parts.join('.') : undefined;
|
||||
const entityName = alias ? options.aliasMap?.[alias] : options.entityName;
|
||||
const meta = entityName ? options.metadata.find(entityName) : undefined;
|
||||
return meta?.properties[propName];
|
||||
}
|
||||
/**
|
||||
* Converts entity references for composite FK properties into flat arrays
|
||||
* of correctly-ordered join column values, before processParams flattens them
|
||||
* incorrectly due to shared FK columns.
|
||||
*/
|
||||
static convertCompositeEntityRefs(where, meta) {
|
||||
if (!Utils.isPlainObject(where)) {
|
||||
return;
|
||||
}
|
||||
for (const k of Object.keys(where)) {
|
||||
if (k in GroupOperator) {
|
||||
if (Array.isArray(where[k])) {
|
||||
where[k].forEach(sub => this.convertCompositeEntityRefs(sub, meta));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (k === '$not') {
|
||||
this.convertCompositeEntityRefs(where[k], meta);
|
||||
continue;
|
||||
}
|
||||
const prop = meta.properties[k];
|
||||
if (!prop?.joinColumns || prop.joinColumns.length <= 1) {
|
||||
continue;
|
||||
}
|
||||
const w = where[k];
|
||||
if (Utils.isEntity(w)) {
|
||||
where[k] = this.extractJoinColumnValues(w, prop);
|
||||
} else if (Utils.isPlainObject(w)) {
|
||||
for (const op of Object.keys(w)) {
|
||||
if (Utils.isOperator(op, false) && Array.isArray(w[op])) {
|
||||
w[op] = w[op].map(item => (Utils.isEntity(item) ? this.extractJoinColumnValues(item, prop) : item));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Extracts values for a FK's join columns from an entity by traversing the FK chain.
|
||||
* Handles shared FK columns (e.g., tenant_id referenced by multiple FKs) correctly.
|
||||
*/
|
||||
static extractJoinColumnValues(entity, prop) {
|
||||
return prop.referencedColumnNames.map(refCol => {
|
||||
return this.extractColumnValue(entity, prop.targetMeta, refCol);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Extracts the value for a specific column from an entity by finding which PK property
|
||||
* owns that column and recursively traversing FK references.
|
||||
*/
|
||||
static extractColumnValue(entity, meta, columnName) {
|
||||
for (const pk of meta.primaryKeys) {
|
||||
const pkProp = meta.properties[pk];
|
||||
const colIdx = pkProp.fieldNames.indexOf(columnName);
|
||||
if (colIdx !== -1) {
|
||||
const value = entity[pk];
|
||||
if (pkProp.targetMeta && Utils.isEntity(value, true)) {
|
||||
const refCol = pkProp.referencedColumnNames[colIdx];
|
||||
return this.extractColumnValue(value, pkProp.targetMeta, refCol);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Merges multiple orderBy sources with key-level deduplication (first-seen key wins).
|
||||
* RawQueryFragment symbol keys are never deduped (each is unique).
|
||||
*/
|
||||
static mergeOrderBy(...sources) {
|
||||
const result = [];
|
||||
const seenKeys = new Set();
|
||||
for (const source of sources) {
|
||||
if (source == null) {
|
||||
continue;
|
||||
}
|
||||
for (const item of Utils.asArray(source)) {
|
||||
for (const key of Utils.getObjectQueryKeys(item)) {
|
||||
if (typeof key === 'symbol') {
|
||||
result.push({ [key]: item[key] });
|
||||
} else if (!seenKeys.has(key)) {
|
||||
seenKeys.add(key);
|
||||
result.push({ [key]: item[key] });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
152
node_modules/@mikro-orm/core/utils/RawQueryFragment.d.ts
generated
vendored
Normal file
152
node_modules/@mikro-orm/core/utils/RawQueryFragment.d.ts
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
import type { AnyString, Dictionary, EntityKey } from '../typings.js';
|
||||
declare const rawFragmentSymbolBrand: unique symbol;
|
||||
/** Branded symbol type used as a unique key for tracking raw SQL fragments in object properties. */
|
||||
export type RawQueryFragmentSymbol = symbol & {
|
||||
readonly [rawFragmentSymbolBrand]: true;
|
||||
};
|
||||
/** Represents a raw SQL fragment with optional parameters, usable as both a value and an object key via Symbol coercion. */
|
||||
export declare class RawQueryFragment<Alias extends string = string> {
|
||||
#private;
|
||||
readonly sql: string;
|
||||
readonly params: unknown[];
|
||||
/** @internal Type-level only - used to track the alias for type inference */
|
||||
private readonly __alias?;
|
||||
constructor(sql: string, params?: unknown[]);
|
||||
/** Returns a unique symbol key for this fragment, creating and caching it on first access. */
|
||||
get key(): RawQueryFragmentSymbol;
|
||||
/** Creates a new fragment with an alias appended via `as ??`. */
|
||||
as<A extends string>(alias: A): RawQueryFragment<A>;
|
||||
[Symbol.toPrimitive](hint: 'string'): RawQueryFragmentSymbol;
|
||||
get [Symbol.toStringTag](): string;
|
||||
toJSON(): string;
|
||||
clone(): this;
|
||||
/** Checks whether the given value is a symbol that maps to a known raw query fragment. */
|
||||
static isKnownFragmentSymbol(key: unknown): key is RawQueryFragmentSymbol;
|
||||
/** Checks whether an object has any symbol keys that are known raw query fragments. */
|
||||
static hasObjectFragments(object: unknown): boolean;
|
||||
/** Checks whether the given value is a RawQueryFragment instance or a known fragment symbol. */
|
||||
static isKnownFragment(key: unknown): key is RawQueryFragment | symbol;
|
||||
/** Retrieves the RawQueryFragment associated with the given key (instance or symbol). */
|
||||
static getKnownFragment(key: unknown): RawQueryFragment | undefined;
|
||||
}
|
||||
export { RawQueryFragment as Raw };
|
||||
/** Checks whether the given value is a `RawQueryFragment` instance. */
|
||||
export declare function isRaw(value: unknown): value is RawQueryFragment;
|
||||
/** @internal */
|
||||
export declare const ALIAS_REPLACEMENT = '[::alias::]';
|
||||
/** @internal */
|
||||
export declare const ALIAS_REPLACEMENT_RE = '\\[::alias::\\]';
|
||||
/**
|
||||
* Creates raw SQL query fragment that can be assigned to a property or part of a filter. This fragment is represented
|
||||
* by `RawQueryFragment` class instance that can be serialized to a string, so it can be used both as an object value
|
||||
* and key. When serialized, the fragment key gets cached and only such cached key will be recognized by the ORM.
|
||||
* This adds a runtime safety to the raw query fragments.
|
||||
*
|
||||
* > **`raw()` helper is required since v6 to use a raw fragment in your query, both through EntityManager and QueryBuilder.**
|
||||
*
|
||||
* ```ts
|
||||
* // as a value
|
||||
* await em.find(User, { time: raw('now()') });
|
||||
*
|
||||
* // as a key
|
||||
* await em.find(User, { [raw('lower(name)')]: name.toLowerCase() });
|
||||
*
|
||||
* // value can be empty array
|
||||
* await em.find(User, { [raw('(select 1 = 1)')]: [] });
|
||||
* ```
|
||||
*
|
||||
* The `raw` helper supports several signatures, you can pass in a callback that receives the current property alias:
|
||||
*
|
||||
* ```ts
|
||||
* await em.find(User, { [raw(alias => `lower(${alias}.name)`)]: name.toLowerCase() });
|
||||
* ```
|
||||
*
|
||||
* You can also use the `sql` tagged template function, which works the same, but supports only the simple string signature:
|
||||
*
|
||||
* ```ts
|
||||
* await em.find(User, { [sql`lower(name)`]: name.toLowerCase() });
|
||||
* ```
|
||||
*
|
||||
* When using inside filters, you might have to use a callback signature to create new raw instance for every filter usage.
|
||||
*
|
||||
* ```ts
|
||||
* @Filter({ name: 'long', cond: () => ({ [raw('length(perex)')]: { $gt: 10000 } }) })
|
||||
* ```
|
||||
*
|
||||
* The `raw` helper can be used within indexes and uniques to write database-agnostic SQL expressions. In that case, you can use `'??'` to tag your database identifiers (table name, column names, index name, ...) inside your expression, and pass those identifiers as a second parameter to the `raw` helper. Internally, those will automatically be quoted according to the database in use:
|
||||
*
|
||||
* ```ts
|
||||
* // On postgres, will produce: create index "index custom_idx_on_name" on "library.author" ("country")
|
||||
* // On mysql, will produce: create index `index custom_idx_on_name` on `library.author` (`country`)
|
||||
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => raw(`create index ?? on ?? (??)`, ['custom_idx_on_name', table, columns.name]) })
|
||||
* @Entity({ schema: 'library' })
|
||||
* export class Author { ... }
|
||||
* ```
|
||||
*
|
||||
* You can also use the `quote` tag function to write database-agnostic SQL expressions. The end-result is the same as using the `raw` function regarding database identifiers quoting, only to have a more elegant expression syntax:
|
||||
*
|
||||
* ```ts
|
||||
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => quote`create index ${'custom_idx_on_name'} on ${table} (${columns.name})` })
|
||||
* @Entity({ schema: 'library' })
|
||||
* export class Author { ... }
|
||||
* ```
|
||||
*/
|
||||
export declare function raw<R = RawQueryFragment & symbol, T extends object = any>(
|
||||
sql: EntityKey<T> | EntityKey<T>[] | AnyString | ((alias: string) => string) | RawQueryFragment,
|
||||
params?: readonly unknown[] | Dictionary<unknown>,
|
||||
): R;
|
||||
/**
|
||||
* Alternative to the `raw()` helper allowing to use it as a tagged template function for the simple cases.
|
||||
*
|
||||
* ```ts
|
||||
* // as a value
|
||||
* await em.find(User, { time: sql`now()` });
|
||||
*
|
||||
* // as a key
|
||||
* await em.find(User, { [sql`lower(name)`]: name.toLowerCase() });
|
||||
*
|
||||
* // value can be empty array
|
||||
* await em.find(User, { [sql`(select 1 = 1)`]: [] });
|
||||
*
|
||||
* // with type parameter for assignment without casting
|
||||
* entity.date = sql<Date>`now()`;
|
||||
* ```
|
||||
*/
|
||||
export declare function sql<R = RawQueryFragment & symbol>(sql: readonly string[], ...values: unknown[]): R;
|
||||
export declare namespace sql {
|
||||
var ref: <T extends object = any>(...keys: string[]) => RawQueryFragment & symbol;
|
||||
var now: (length?: number) => RawQueryFragment & symbol;
|
||||
var lower: <R = RawQueryFragment<string> & symbol, T extends object = any>(
|
||||
key: string | ((alias: string) => string),
|
||||
) => R;
|
||||
var upper: <R = RawQueryFragment<string> & symbol, T extends object = any>(
|
||||
key: string | ((alias: string) => string),
|
||||
) => R;
|
||||
}
|
||||
/** Creates a raw SQL function expression wrapping the given key (e.g., `lower(name)`). */
|
||||
export declare function createSqlFunction<R = RawQueryFragment & symbol, T extends object = any>(
|
||||
func: string,
|
||||
key: string | ((alias: string) => string),
|
||||
): R;
|
||||
/**
|
||||
* Tag function providing quoting of db identifiers (table name, columns names, index names, ...).
|
||||
*
|
||||
* Within the template literal on which the tag function is applied, all placeholders are considered to be database identifiers, and will thus be quoted as so according to the database in use.
|
||||
*
|
||||
* ```ts
|
||||
* // On postgres, will produce: create index "index custom_idx_on_name" on "library.author" ("name")
|
||||
* // On mysql, will produce: create index `index custom_idx_on_name` on `library.author` (`name`)
|
||||
* @Index({ name: 'custom_idx_on_name', expression: (table, columns, indexName) => quote`create index ${indexName} on ${table} (${columns.name})` })
|
||||
* @Entity({ schema: 'library' })
|
||||
* export class Author { ... }
|
||||
* ```
|
||||
*/
|
||||
export declare function quote(
|
||||
expParts: readonly string[],
|
||||
...values: (
|
||||
| string
|
||||
| {
|
||||
toString(): string;
|
||||
}
|
||||
)[]
|
||||
): RawQueryFragment & symbol;
|
||||
217
node_modules/@mikro-orm/core/utils/RawQueryFragment.js
generated
vendored
Normal file
217
node_modules/@mikro-orm/core/utils/RawQueryFragment.js
generated
vendored
Normal file
@@ -0,0 +1,217 @@
|
||||
import { Utils } from './Utils.js';
|
||||
/** Represents a raw SQL fragment with optional parameters, usable as both a value and an object key via Symbol coercion. */
|
||||
export class RawQueryFragment {
|
||||
sql;
|
||||
params;
|
||||
static #rawQueryReferences = new WeakMap();
|
||||
#key;
|
||||
constructor(sql, params = []) {
|
||||
this.sql = sql;
|
||||
this.params = params;
|
||||
}
|
||||
/** Returns a unique symbol key for this fragment, creating and caching it on first access. */
|
||||
get key() {
|
||||
if (!this.#key) {
|
||||
this.#key = Symbol(this.toJSON());
|
||||
RawQueryFragment.#rawQueryReferences.set(this.#key, this);
|
||||
}
|
||||
return this.#key;
|
||||
}
|
||||
/** Creates a new fragment with an alias appended via `as ??`. */
|
||||
as(alias) {
|
||||
return new RawQueryFragment(`${this.sql} as ??`, [...this.params, alias]);
|
||||
}
|
||||
[Symbol.toPrimitive](hint) {
|
||||
// if a fragment is converted to string (used as an object key), return a unique symbol
|
||||
// and save a weak reference to map so we can retrieve it when compiling the query
|
||||
if (hint === 'string') {
|
||||
return this.key;
|
||||
}
|
||||
throw new Error(`Trying to modify raw SQL fragment: '${this.sql}'`);
|
||||
}
|
||||
get [Symbol.toStringTag]() {
|
||||
return this.toJSON();
|
||||
}
|
||||
toJSON() {
|
||||
return `raw('${this.sql}')`;
|
||||
}
|
||||
clone() {
|
||||
return this;
|
||||
}
|
||||
/** Checks whether the given value is a symbol that maps to a known raw query fragment. */
|
||||
static isKnownFragmentSymbol(key) {
|
||||
return typeof key === 'symbol' && this.#rawQueryReferences.has(key);
|
||||
}
|
||||
/** Checks whether an object has any symbol keys that are known raw query fragments. */
|
||||
static hasObjectFragments(object) {
|
||||
return (
|
||||
Utils.isPlainObject(object) &&
|
||||
Object.getOwnPropertySymbols(object).some(symbol => this.isKnownFragmentSymbol(symbol))
|
||||
);
|
||||
}
|
||||
/** Checks whether the given value is a RawQueryFragment instance or a known fragment symbol. */
|
||||
static isKnownFragment(key) {
|
||||
if (key instanceof RawQueryFragment) {
|
||||
return true;
|
||||
}
|
||||
return this.isKnownFragmentSymbol(key);
|
||||
}
|
||||
/** Retrieves the RawQueryFragment associated with the given key (instance or symbol). */
|
||||
static getKnownFragment(key) {
|
||||
if (key instanceof RawQueryFragment) {
|
||||
return key;
|
||||
}
|
||||
if (typeof key !== 'symbol') {
|
||||
return;
|
||||
}
|
||||
return this.#rawQueryReferences.get(key);
|
||||
}
|
||||
/** @ignore */
|
||||
/* v8 ignore next */
|
||||
[Symbol.for('nodejs.util.inspect.custom')]() {
|
||||
if (this.params) {
|
||||
return { sql: this.sql, params: this.params };
|
||||
}
|
||||
return { sql: this.sql };
|
||||
}
|
||||
}
|
||||
export { RawQueryFragment as Raw };
|
||||
Object.defineProperties(RawQueryFragment.prototype, {
|
||||
__raw: { value: true, enumerable: false },
|
||||
});
|
||||
/** Checks whether the given value is a `RawQueryFragment` instance. */
|
||||
export function isRaw(value) {
|
||||
return typeof value === 'object' && value !== null && '__raw' in value;
|
||||
}
|
||||
/** @internal */
|
||||
export const ALIAS_REPLACEMENT = '[::alias::]';
|
||||
/** @internal */
|
||||
export const ALIAS_REPLACEMENT_RE = '\\[::alias::\\]';
|
||||
/**
|
||||
* Creates raw SQL query fragment that can be assigned to a property or part of a filter. This fragment is represented
|
||||
* by `RawQueryFragment` class instance that can be serialized to a string, so it can be used both as an object value
|
||||
* and key. When serialized, the fragment key gets cached and only such cached key will be recognized by the ORM.
|
||||
* This adds a runtime safety to the raw query fragments.
|
||||
*
|
||||
* > **`raw()` helper is required since v6 to use a raw fragment in your query, both through EntityManager and QueryBuilder.**
|
||||
*
|
||||
* ```ts
|
||||
* // as a value
|
||||
* await em.find(User, { time: raw('now()') });
|
||||
*
|
||||
* // as a key
|
||||
* await em.find(User, { [raw('lower(name)')]: name.toLowerCase() });
|
||||
*
|
||||
* // value can be empty array
|
||||
* await em.find(User, { [raw('(select 1 = 1)')]: [] });
|
||||
* ```
|
||||
*
|
||||
* The `raw` helper supports several signatures, you can pass in a callback that receives the current property alias:
|
||||
*
|
||||
* ```ts
|
||||
* await em.find(User, { [raw(alias => `lower(${alias}.name)`)]: name.toLowerCase() });
|
||||
* ```
|
||||
*
|
||||
* You can also use the `sql` tagged template function, which works the same, but supports only the simple string signature:
|
||||
*
|
||||
* ```ts
|
||||
* await em.find(User, { [sql`lower(name)`]: name.toLowerCase() });
|
||||
* ```
|
||||
*
|
||||
* When using inside filters, you might have to use a callback signature to create new raw instance for every filter usage.
|
||||
*
|
||||
* ```ts
|
||||
* @Filter({ name: 'long', cond: () => ({ [raw('length(perex)')]: { $gt: 10000 } }) })
|
||||
* ```
|
||||
*
|
||||
* The `raw` helper can be used within indexes and uniques to write database-agnostic SQL expressions. In that case, you can use `'??'` to tag your database identifiers (table name, column names, index name, ...) inside your expression, and pass those identifiers as a second parameter to the `raw` helper. Internally, those will automatically be quoted according to the database in use:
|
||||
*
|
||||
* ```ts
|
||||
* // On postgres, will produce: create index "index custom_idx_on_name" on "library.author" ("country")
|
||||
* // On mysql, will produce: create index `index custom_idx_on_name` on `library.author` (`country`)
|
||||
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => raw(`create index ?? on ?? (??)`, ['custom_idx_on_name', table, columns.name]) })
|
||||
* @Entity({ schema: 'library' })
|
||||
* export class Author { ... }
|
||||
* ```
|
||||
*
|
||||
* You can also use the `quote` tag function to write database-agnostic SQL expressions. The end-result is the same as using the `raw` function regarding database identifiers quoting, only to have a more elegant expression syntax:
|
||||
*
|
||||
* ```ts
|
||||
* @Index({ name: 'custom_idx_on_name', expression: (table, columns) => quote`create index ${'custom_idx_on_name'} on ${table} (${columns.name})` })
|
||||
* @Entity({ schema: 'library' })
|
||||
* export class Author { ... }
|
||||
* ```
|
||||
*/
|
||||
export function raw(sql, params) {
|
||||
if (sql instanceof RawQueryFragment) {
|
||||
return sql;
|
||||
}
|
||||
if (sql instanceof Function) {
|
||||
sql = sql(ALIAS_REPLACEMENT);
|
||||
}
|
||||
if (sql === '??' && Array.isArray(params)) {
|
||||
return new RawQueryFragment(sql, params);
|
||||
}
|
||||
if (Array.isArray(sql)) {
|
||||
// for composite FK we return just a simple string
|
||||
return Utils.getPrimaryKeyHash(sql);
|
||||
}
|
||||
if (typeof params === 'object' && !Array.isArray(params)) {
|
||||
const pairs = Object.entries(params);
|
||||
const objectParams = [];
|
||||
for (const [key, value] of pairs) {
|
||||
sql = sql.replace(`:${key}:`, '??');
|
||||
sql = sql.replace(`:${key}`, '?');
|
||||
objectParams.push(value);
|
||||
}
|
||||
return new RawQueryFragment(sql, objectParams);
|
||||
}
|
||||
return new RawQueryFragment(sql, params);
|
||||
}
|
||||
/**
|
||||
* Alternative to the `raw()` helper allowing to use it as a tagged template function for the simple cases.
|
||||
*
|
||||
* ```ts
|
||||
* // as a value
|
||||
* await em.find(User, { time: sql`now()` });
|
||||
*
|
||||
* // as a key
|
||||
* await em.find(User, { [sql`lower(name)`]: name.toLowerCase() });
|
||||
*
|
||||
* // value can be empty array
|
||||
* await em.find(User, { [sql`(select 1 = 1)`]: [] });
|
||||
*
|
||||
* // with type parameter for assignment without casting
|
||||
* entity.date = sql<Date>`now()`;
|
||||
* ```
|
||||
*/
|
||||
export function sql(sql, ...values) {
|
||||
return raw(sql.join('?'), values);
|
||||
}
|
||||
/** Creates a raw SQL function expression wrapping the given key (e.g., `lower(name)`). */
|
||||
export function createSqlFunction(func, key) {
|
||||
if (typeof key === 'string') {
|
||||
return raw(`${func}(${key})`);
|
||||
}
|
||||
return raw(a => `${func}(${key(a)})`);
|
||||
}
|
||||
sql.ref = (...keys) => raw('??', [keys.join('.')]);
|
||||
sql.now = length => raw('current_timestamp' + (length == null ? '' : `(${length})`));
|
||||
sql.lower = key => createSqlFunction('lower', key);
|
||||
sql.upper = key => createSqlFunction('upper', key);
|
||||
/**
|
||||
* Tag function providing quoting of db identifiers (table name, columns names, index names, ...).
|
||||
*
|
||||
* Within the template literal on which the tag function is applied, all placeholders are considered to be database identifiers, and will thus be quoted as so according to the database in use.
|
||||
*
|
||||
* ```ts
|
||||
* // On postgres, will produce: create index "index custom_idx_on_name" on "library.author" ("name")
|
||||
* // On mysql, will produce: create index `index custom_idx_on_name` on `library.author` (`name`)
|
||||
* @Index({ name: 'custom_idx_on_name', expression: (table, columns, indexName) => quote`create index ${indexName} on ${table} (${columns.name})` })
|
||||
* @Entity({ schema: 'library' })
|
||||
* export class Author { ... }
|
||||
* ```
|
||||
*/
|
||||
export function quote(expParts, ...values) {
|
||||
return raw(expParts.join('??'), values);
|
||||
}
|
||||
42
node_modules/@mikro-orm/core/utils/RequestContext.d.ts
generated
vendored
Normal file
42
node_modules/@mikro-orm/core/utils/RequestContext.d.ts
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { EntityManager } from '../EntityManager.js';
|
||||
import { type LoggingOptions } from '../logging/Logger.js';
|
||||
/**
|
||||
* Uses `AsyncLocalStorage` to create async context that holds the current EM fork.
|
||||
*/
|
||||
export declare class RequestContext {
|
||||
readonly map: Map<string, EntityManager>;
|
||||
private static storage;
|
||||
private static counter;
|
||||
readonly id: number;
|
||||
constructor(map: Map<string, EntityManager>);
|
||||
/**
|
||||
* Returns default EntityManager.
|
||||
*/
|
||||
get em(): EntityManager | undefined;
|
||||
/**
|
||||
* Creates new RequestContext instance and runs the code inside its domain.
|
||||
* If the handler is async, the return value needs to be awaited.
|
||||
* Uses `AsyncLocalStorage.run()`, suitable for regular express style middlewares with a `next` callback.
|
||||
*/
|
||||
static create<T>(em: EntityManager | EntityManager[], next: (...args: any[]) => T, options?: CreateContextOptions): T;
|
||||
/**
|
||||
* Creates new RequestContext instance and runs the code inside its domain.
|
||||
* If the handler is async, the return value needs to be awaited.
|
||||
* Uses `AsyncLocalStorage.enterWith()`, suitable for elysia style middlewares without a `next` callback.
|
||||
*/
|
||||
static enter(em: EntityManager | EntityManager[], options?: CreateContextOptions): void;
|
||||
/**
|
||||
* Returns current RequestContext (if available).
|
||||
*/
|
||||
static currentRequestContext(): RequestContext | undefined;
|
||||
/**
|
||||
* Returns current EntityManager (if available).
|
||||
*/
|
||||
static getEntityManager(name?: string): EntityManager | undefined;
|
||||
private static createContext;
|
||||
}
|
||||
/** Options for creating a new RequestContext, allowing schema and logger overrides. */
|
||||
export interface CreateContextOptions {
|
||||
schema?: string;
|
||||
loggerContext?: LoggingOptions;
|
||||
}
|
||||
59
node_modules/@mikro-orm/core/utils/RequestContext.js
generated
vendored
Normal file
59
node_modules/@mikro-orm/core/utils/RequestContext.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
import { createAsyncContext } from './AsyncContext.js';
|
||||
/**
|
||||
* Uses `AsyncLocalStorage` to create async context that holds the current EM fork.
|
||||
*/
|
||||
export class RequestContext {
|
||||
map;
|
||||
static storage = createAsyncContext();
|
||||
static counter = 1;
|
||||
id = RequestContext.counter++;
|
||||
constructor(map) {
|
||||
this.map = map;
|
||||
}
|
||||
/**
|
||||
* Returns default EntityManager.
|
||||
*/
|
||||
get em() {
|
||||
return this.map.get('default');
|
||||
}
|
||||
/**
|
||||
* Creates new RequestContext instance and runs the code inside its domain.
|
||||
* If the handler is async, the return value needs to be awaited.
|
||||
* Uses `AsyncLocalStorage.run()`, suitable for regular express style middlewares with a `next` callback.
|
||||
*/
|
||||
static create(em, next, options = {}) {
|
||||
const ctx = this.createContext(em, options);
|
||||
return this.storage.run(ctx, next);
|
||||
}
|
||||
/**
|
||||
* Creates new RequestContext instance and runs the code inside its domain.
|
||||
* If the handler is async, the return value needs to be awaited.
|
||||
* Uses `AsyncLocalStorage.enterWith()`, suitable for elysia style middlewares without a `next` callback.
|
||||
*/
|
||||
static enter(em, options = {}) {
|
||||
const ctx = this.createContext(em, options);
|
||||
this.storage.enterWith(ctx);
|
||||
}
|
||||
/**
|
||||
* Returns current RequestContext (if available).
|
||||
*/
|
||||
static currentRequestContext() {
|
||||
return this.storage.getStore();
|
||||
}
|
||||
/**
|
||||
* Returns current EntityManager (if available).
|
||||
*/
|
||||
static getEntityManager(name = 'default') {
|
||||
const context = RequestContext.currentRequestContext();
|
||||
return context ? context.map.get(name) : undefined;
|
||||
}
|
||||
static createContext(em, options = {}) {
|
||||
const forks = new Map();
|
||||
if (Array.isArray(em)) {
|
||||
em.forEach(em => forks.set(em.name, em.fork({ useContext: true, ...options })));
|
||||
} else {
|
||||
forks.set(em.name, em.fork({ useContext: true, ...options }));
|
||||
}
|
||||
return new RequestContext(forks);
|
||||
}
|
||||
}
|
||||
20
node_modules/@mikro-orm/core/utils/TransactionContext.d.ts
generated
vendored
Normal file
20
node_modules/@mikro-orm/core/utils/TransactionContext.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { EntityManager } from '../EntityManager.js';
|
||||
/** Uses `AsyncLocalStorage` to maintain a transaction-scoped EntityManager context across async operations. */
|
||||
export declare class TransactionContext {
|
||||
readonly em: EntityManager;
|
||||
private static storage;
|
||||
readonly id: number;
|
||||
constructor(em: EntityManager);
|
||||
/**
|
||||
* Creates new TransactionContext instance and runs the code inside its domain.
|
||||
*/
|
||||
static create<T>(em: EntityManager, next: (...args: any[]) => T): T;
|
||||
/**
|
||||
* Returns current TransactionContext (if available).
|
||||
*/
|
||||
static currentTransactionContext(): TransactionContext | undefined;
|
||||
/**
|
||||
* Returns current EntityManager (if available).
|
||||
*/
|
||||
static getEntityManager(name?: string): EntityManager | undefined;
|
||||
}
|
||||
31
node_modules/@mikro-orm/core/utils/TransactionContext.js
generated
vendored
Normal file
31
node_modules/@mikro-orm/core/utils/TransactionContext.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
import { createAsyncContext } from './AsyncContext.js';
|
||||
/** Uses `AsyncLocalStorage` to maintain a transaction-scoped EntityManager context across async operations. */
|
||||
export class TransactionContext {
|
||||
em;
|
||||
static storage = createAsyncContext();
|
||||
id;
|
||||
constructor(em) {
|
||||
this.em = em;
|
||||
this.id = this.em._id;
|
||||
}
|
||||
/**
|
||||
* Creates new TransactionContext instance and runs the code inside its domain.
|
||||
*/
|
||||
static create(em, next) {
|
||||
const context = new TransactionContext(em);
|
||||
return this.storage.run(context, next);
|
||||
}
|
||||
/**
|
||||
* Returns current TransactionContext (if available).
|
||||
*/
|
||||
static currentTransactionContext() {
|
||||
return this.storage.getStore();
|
||||
}
|
||||
/**
|
||||
* Returns current EntityManager (if available).
|
||||
*/
|
||||
static getEntityManager(name = 'default') {
|
||||
const context = TransactionContext.currentTransactionContext();
|
||||
return context?.em.name === name ? context.em : undefined;
|
||||
}
|
||||
}
|
||||
65
node_modules/@mikro-orm/core/utils/TransactionManager.d.ts
generated
vendored
Normal file
65
node_modules/@mikro-orm/core/utils/TransactionManager.d.ts
generated
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { EntityManager } from '../EntityManager.js';
|
||||
import { type TransactionOptions } from '../enums.js';
|
||||
/**
|
||||
* Manages transaction lifecycle and propagation for EntityManager.
|
||||
*/
|
||||
export declare class TransactionManager {
|
||||
private readonly em;
|
||||
constructor(em: EntityManager);
|
||||
/**
|
||||
* Main entry point for handling transactional operations with propagation support.
|
||||
*/
|
||||
handle<T>(cb: (em: EntityManager) => T | Promise<T>, options?: TransactionOptions): Promise<T>;
|
||||
/**
|
||||
* Executes the callback with the specified propagation type.
|
||||
*/
|
||||
private executeWithPropagation;
|
||||
/**
|
||||
* Suspends the current transaction and returns the suspended resources.
|
||||
*/
|
||||
private suspendTransaction;
|
||||
/**
|
||||
* Resumes a previously suspended transaction.
|
||||
*/
|
||||
private resumeTransaction;
|
||||
/**
|
||||
* Executes operation without transaction context.
|
||||
*/
|
||||
private executeWithoutTransaction;
|
||||
/**
|
||||
* Creates new independent transaction, suspending any existing one.
|
||||
*/
|
||||
private executeWithNewTransaction;
|
||||
/**
|
||||
* Creates new transaction context.
|
||||
*/
|
||||
private createNewTransaction;
|
||||
/**
|
||||
* Executes nested transaction with savepoint.
|
||||
*/
|
||||
private executeNestedTransaction;
|
||||
/**
|
||||
* Creates a fork of the EntityManager with the given options.
|
||||
*/
|
||||
private createFork;
|
||||
/**
|
||||
* Determines if changes should be propagated to the upper context.
|
||||
*/
|
||||
private shouldPropagateToUpperContext;
|
||||
/**
|
||||
* Merges entities from fork to parent EntityManager.
|
||||
*/
|
||||
private mergeEntitiesToParent;
|
||||
/**
|
||||
* Registers a deletion handler to unset entity identities after flush.
|
||||
*/
|
||||
private registerDeletionHandler;
|
||||
/**
|
||||
* Processes transaction execution.
|
||||
*/
|
||||
private processTransaction;
|
||||
/**
|
||||
* Executes transaction workflow with entity synchronization.
|
||||
*/
|
||||
private executeTransactionFlow;
|
||||
}
|
||||
225
node_modules/@mikro-orm/core/utils/TransactionManager.js
generated
vendored
Normal file
225
node_modules/@mikro-orm/core/utils/TransactionManager.js
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
import { ReferenceKind, TransactionPropagation } from '../enums.js';
|
||||
import { TransactionEventBroadcaster } from '../events/TransactionEventBroadcaster.js';
|
||||
import { TransactionContext } from '../utils/TransactionContext.js';
|
||||
import { ChangeSetType } from '../unit-of-work/ChangeSet.js';
|
||||
import { TransactionStateError } from '../errors.js';
|
||||
import { helper } from '../entity/wrap.js';
|
||||
/**
|
||||
* Manages transaction lifecycle and propagation for EntityManager.
|
||||
*/
|
||||
export class TransactionManager {
|
||||
em;
|
||||
constructor(em) {
|
||||
this.em = em;
|
||||
}
|
||||
/**
|
||||
* Main entry point for handling transactional operations with propagation support.
|
||||
*/
|
||||
async handle(cb, options = {}) {
|
||||
const em = this.em.getContext(false);
|
||||
options.propagation ??= TransactionPropagation.NESTED;
|
||||
options.ctx ??= em.getTransactionContext();
|
||||
const hasExistingTransaction = !!em.getTransactionContext();
|
||||
return this.executeWithPropagation(options.propagation, em, cb, options, hasExistingTransaction);
|
||||
}
|
||||
/**
|
||||
* Executes the callback with the specified propagation type.
|
||||
*/
|
||||
async executeWithPropagation(propagation, em, cb, options, hasExistingTransaction) {
|
||||
switch (propagation) {
|
||||
case TransactionPropagation.NOT_SUPPORTED:
|
||||
return this.executeWithoutTransaction(em, cb, options);
|
||||
case TransactionPropagation.REQUIRES_NEW:
|
||||
return this.executeWithNewTransaction(em, cb, options, hasExistingTransaction);
|
||||
case TransactionPropagation.REQUIRED:
|
||||
if (hasExistingTransaction) {
|
||||
return cb(em);
|
||||
}
|
||||
return this.createNewTransaction(em, cb, options);
|
||||
case TransactionPropagation.NESTED:
|
||||
if (hasExistingTransaction) {
|
||||
return this.executeNestedTransaction(em, cb, options);
|
||||
}
|
||||
return this.createNewTransaction(em, cb, options);
|
||||
case TransactionPropagation.SUPPORTS:
|
||||
if (hasExistingTransaction) {
|
||||
return cb(em);
|
||||
}
|
||||
return this.executeWithoutTransaction(em, cb, options);
|
||||
case TransactionPropagation.MANDATORY:
|
||||
if (!hasExistingTransaction) {
|
||||
throw TransactionStateError.requiredTransactionNotFound(propagation);
|
||||
}
|
||||
return cb(em);
|
||||
case TransactionPropagation.NEVER:
|
||||
if (hasExistingTransaction) {
|
||||
throw TransactionStateError.transactionNotAllowed(propagation);
|
||||
}
|
||||
return this.executeWithoutTransaction(em, cb, options);
|
||||
default:
|
||||
throw TransactionStateError.invalidPropagation(propagation);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Suspends the current transaction and returns the suspended resources.
|
||||
*/
|
||||
suspendTransaction(em) {
|
||||
const suspended = em.getTransactionContext();
|
||||
em.resetTransactionContext();
|
||||
return suspended;
|
||||
}
|
||||
/**
|
||||
* Resumes a previously suspended transaction.
|
||||
*/
|
||||
resumeTransaction(em, suspended) {
|
||||
if (suspended != null) {
|
||||
em.setTransactionContext(suspended);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Executes operation without transaction context.
|
||||
*/
|
||||
async executeWithoutTransaction(em, cb, options) {
|
||||
const suspended = this.suspendTransaction(em);
|
||||
const fork = this.createFork(em, { ...options, disableTransactions: true });
|
||||
const propagateToUpperContext = this.shouldPropagateToUpperContext(em);
|
||||
try {
|
||||
return await this.executeTransactionFlow(fork, cb, propagateToUpperContext, em);
|
||||
} finally {
|
||||
this.resumeTransaction(em, suspended);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Creates new independent transaction, suspending any existing one.
|
||||
*/
|
||||
async executeWithNewTransaction(em, cb, options, hasExistingTransaction) {
|
||||
const fork = this.createFork(em, options);
|
||||
let suspended = null;
|
||||
// Suspend existing transaction if present
|
||||
if (hasExistingTransaction) {
|
||||
suspended = this.suspendTransaction(em);
|
||||
}
|
||||
const newOptions = { ...options, ctx: undefined };
|
||||
try {
|
||||
return await this.processTransaction(em, fork, cb, newOptions);
|
||||
} finally {
|
||||
if (suspended != null) {
|
||||
this.resumeTransaction(em, suspended);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Creates new transaction context.
|
||||
*/
|
||||
async createNewTransaction(em, cb, options) {
|
||||
const fork = this.createFork(em, options);
|
||||
return this.processTransaction(em, fork, cb, options);
|
||||
}
|
||||
/**
|
||||
* Executes nested transaction with savepoint.
|
||||
*/
|
||||
async executeNestedTransaction(em, cb, options) {
|
||||
const fork = this.createFork(em, options);
|
||||
// Pass existing context to create savepoint
|
||||
const nestedOptions = { ...options, ctx: em.getTransactionContext() };
|
||||
return this.processTransaction(em, fork, cb, nestedOptions);
|
||||
}
|
||||
/**
|
||||
* Creates a fork of the EntityManager with the given options.
|
||||
*/
|
||||
createFork(em, options) {
|
||||
return em.fork({
|
||||
clear: options.clear ?? false,
|
||||
flushMode: options.flushMode,
|
||||
cloneEventManager: true,
|
||||
disableTransactions: options.ignoreNestedTransactions,
|
||||
loggerContext: options.loggerContext,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Determines if changes should be propagated to the upper context.
|
||||
*/
|
||||
shouldPropagateToUpperContext(em) {
|
||||
return !em.global || this.em.config.get('allowGlobalContext');
|
||||
}
|
||||
/**
|
||||
* Merges entities from fork to parent EntityManager.
|
||||
*/
|
||||
mergeEntitiesToParent(fork, parent) {
|
||||
const parentUoW = parent.getUnitOfWork(false);
|
||||
// perf: if parent is empty, we can just move all entities from the fork to skip the `em.merge` overhead
|
||||
if (parentUoW.getIdentityMap().keys().length === 0) {
|
||||
for (const entity of fork.getUnitOfWork(false).getIdentityMap()) {
|
||||
parentUoW.getIdentityMap().store(entity);
|
||||
helper(entity).__em = parent;
|
||||
}
|
||||
return;
|
||||
}
|
||||
for (const entity of fork.getUnitOfWork(false).getIdentityMap()) {
|
||||
const wrapped = helper(entity);
|
||||
const meta = wrapped.__meta;
|
||||
const parentEntity = parentUoW.getById(meta.class, wrapped.getPrimaryKey(), parent.schema, true);
|
||||
if (parentEntity && parentEntity !== entity) {
|
||||
const parentWrapped = helper(parentEntity);
|
||||
parentWrapped.__data = wrapped.__data;
|
||||
parentWrapped.__originalEntityData = wrapped.__originalEntityData;
|
||||
for (const prop of meta.hydrateProps) {
|
||||
if (prop.kind === ReferenceKind.SCALAR) {
|
||||
parentEntity[prop.name] = entity[prop.name];
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parentUoW.merge(entity, new Set([entity]));
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Registers a deletion handler to unset entity identities after flush.
|
||||
*/
|
||||
registerDeletionHandler(fork, parent) {
|
||||
fork.getEventManager().registerSubscriber({
|
||||
afterFlush: args => {
|
||||
const deletionChangeSets = args.uow
|
||||
.getChangeSets()
|
||||
.filter(cs => cs.type === ChangeSetType.DELETE || cs.type === ChangeSetType.DELETE_EARLY);
|
||||
for (const cs of deletionChangeSets) {
|
||||
parent.getUnitOfWork(false).unsetIdentity(cs.entity);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Processes transaction execution.
|
||||
*/
|
||||
async processTransaction(em, fork, cb, options) {
|
||||
const propagateToUpperContext = this.shouldPropagateToUpperContext(em);
|
||||
const eventBroadcaster = new TransactionEventBroadcaster(fork, undefined);
|
||||
return TransactionContext.create(fork, () =>
|
||||
fork.getConnection().transactional(
|
||||
async trx => {
|
||||
fork.setTransactionContext(trx);
|
||||
return this.executeTransactionFlow(fork, cb, propagateToUpperContext, em);
|
||||
},
|
||||
{ ...options, eventBroadcaster },
|
||||
),
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Executes transaction workflow with entity synchronization.
|
||||
*/
|
||||
async executeTransactionFlow(fork, cb, propagateToUpperContext, parentEm) {
|
||||
if (!propagateToUpperContext) {
|
||||
const ret = await cb(fork);
|
||||
await fork.flush();
|
||||
return ret;
|
||||
}
|
||||
// Setup: Register deletion handler before execution
|
||||
this.registerDeletionHandler(fork, parentEm);
|
||||
// Execute callback and flush
|
||||
const ret = await cb(fork);
|
||||
await fork.flush();
|
||||
// Synchronization: Merge entities back to the parent
|
||||
this.mergeEntitiesToParent(fork, parentEm);
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
225
node_modules/@mikro-orm/core/utils/Utils.d.ts
generated
vendored
Normal file
225
node_modules/@mikro-orm/core/utils/Utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
import type {
|
||||
CompiledFunctions,
|
||||
Dictionary,
|
||||
EntityData,
|
||||
EntityDictionary,
|
||||
EntityKey,
|
||||
EntityMetadata,
|
||||
EntityName,
|
||||
EntityProperty,
|
||||
Primary,
|
||||
} from '../typings.js';
|
||||
import type { Collection } from '../entity/Collection.js';
|
||||
import type { Platform } from '../platforms/Platform.js';
|
||||
import { type ScalarReference } from '../entity/Reference.js';
|
||||
import { type RawQueryFragmentSymbol } from './RawQueryFragment.js';
|
||||
/** Deeply compares two objects for equality, handling dates, regexes, and raw fragments. */
|
||||
export declare function compareObjects(a: any, b: any): boolean;
|
||||
/** Compares two arrays element-by-element for deep equality. */
|
||||
export declare function compareArrays(a: any[] | string, b: any[] | string): boolean;
|
||||
/** Compares two boolean values, treating numeric 0/1 as false/true. */
|
||||
export declare function compareBooleans(a: unknown, b: unknown): boolean;
|
||||
/** Compares two byte arrays element-by-element. */
|
||||
export declare function compareBuffers(a: Uint8Array, b: Uint8Array): boolean;
|
||||
/**
|
||||
* Checks if arguments are deeply (but not strictly) equal.
|
||||
*/
|
||||
export declare function equals(a: any, b: any): boolean;
|
||||
/** Parses a JSON string safely, returning the original value if parsing fails. */
|
||||
export declare function parseJsonSafe<T = unknown>(value: unknown): T;
|
||||
/** Collection of general-purpose utility methods used throughout the ORM. */
|
||||
export declare class Utils {
|
||||
#private;
|
||||
static readonly PK_SEPARATOR = '~~~';
|
||||
/**
|
||||
* Checks if the argument is instance of `Object`. Returns false for arrays.
|
||||
*/
|
||||
static isObject<T = Dictionary>(o: any): o is T;
|
||||
/**
|
||||
* Removes `undefined` properties (recursively) so they are not saved as nulls
|
||||
*/
|
||||
static dropUndefinedProperties(o: any, value?: null, visited?: Set<unknown>): void;
|
||||
/**
|
||||
* Returns the number of properties on `obj`. This is 20x faster than Object.keys(obj).length.
|
||||
* @see https://github.com/deepkit/deepkit-framework/blob/master/packages/core/src/core.ts
|
||||
*/
|
||||
static getObjectKeysSize(object: Dictionary): number;
|
||||
/**
|
||||
* Returns true if `obj` has at least one property. This is 20x faster than Object.keys(obj).length.
|
||||
* @see https://github.com/deepkit/deepkit-framework/blob/master/packages/core/src/core.ts
|
||||
*/
|
||||
static hasObjectKeys(object: Dictionary): boolean;
|
||||
/**
|
||||
* Checks if arguments are deeply (but not strictly) equal.
|
||||
*/
|
||||
static equals(a: any, b: any): boolean;
|
||||
/**
|
||||
* Gets array without duplicates.
|
||||
*/
|
||||
static unique<T = string>(items: T[]): T[];
|
||||
/**
|
||||
* Merges all sources into the target recursively.
|
||||
*/
|
||||
static merge(target: any, ...sources: any[]): any;
|
||||
/**
|
||||
* Merges all sources into the target recursively. Ignores `undefined` values.
|
||||
*/
|
||||
static mergeConfig<T>(target: T, ...sources: Dictionary[]): T;
|
||||
/**
|
||||
* Merges all sources into the target recursively.
|
||||
*/
|
||||
private static _merge;
|
||||
/**
|
||||
* Creates deep copy of given object.
|
||||
*/
|
||||
static copy<T>(entity: T, respectCustomCloneMethod?: boolean): T;
|
||||
/**
|
||||
* Normalize the argument to always be an array.
|
||||
*/
|
||||
static asArray<T>(data?: T | readonly T[] | Iterable<T>, strict?: boolean): T[];
|
||||
/**
|
||||
* Checks if the value is iterable, but considers strings and buffers as not iterable.
|
||||
*/
|
||||
static isIterable<T>(value: unknown): value is Iterable<T>;
|
||||
/**
|
||||
* Renames object key, keeps order of properties.
|
||||
*/
|
||||
static renameKey<T>(payload: T, from: string | keyof T, to: string): void;
|
||||
/**
|
||||
* Returns array of functions argument names. Uses basic regex for source code analysis, might not work with advanced syntax.
|
||||
*/
|
||||
static getConstructorParams(func: { toString(): string }): string[] | undefined;
|
||||
/**
|
||||
* Checks whether the argument looks like primary key (string, number or ObjectId).
|
||||
*/
|
||||
static isPrimaryKey<T>(key: any, allowComposite?: boolean): key is Primary<T>;
|
||||
/**
|
||||
* Extracts primary key from `data`. Accepts objects or primary keys directly.
|
||||
*/
|
||||
static extractPK<T extends object>(data: any, meta?: EntityMetadata<T>, strict?: boolean): Primary<T> | string | null;
|
||||
static getCompositeKeyValue<T>(
|
||||
data: EntityData<T>,
|
||||
meta: EntityMetadata<T>,
|
||||
convertCustomTypes?: boolean | 'convertToDatabaseValue' | 'convertToJSValue',
|
||||
platform?: Platform,
|
||||
): Primary<T>;
|
||||
static getCompositeKeyHash<T>(
|
||||
data: EntityData<T>,
|
||||
meta: EntityMetadata<T>,
|
||||
convertCustomTypes?: boolean,
|
||||
platform?: Platform,
|
||||
flat?: boolean,
|
||||
): string;
|
||||
static getPrimaryKeyHash(pks: (string | Buffer | Date)[]): string;
|
||||
static splitPrimaryKeys<T extends object>(key: string): EntityKey<T>[];
|
||||
static getPrimaryKeyValues<T>(
|
||||
entity: T,
|
||||
meta: EntityMetadata<T>,
|
||||
allowScalar?: boolean,
|
||||
convertCustomTypes?: boolean,
|
||||
): any;
|
||||
static getPrimaryKeyCond<T>(entity: T, primaryKeys: EntityKey<T>[]): Record<string, Primary<T>> | null;
|
||||
/**
|
||||
* Maps nested FKs from `[1, 2, 3]` to `[1, [2, 3]]`.
|
||||
*/
|
||||
static mapFlatCompositePrimaryKey(
|
||||
fk: Primary<any>[],
|
||||
prop: EntityProperty,
|
||||
fieldNames?: string[],
|
||||
idx?: number,
|
||||
): Primary<any> | Primary<any>[];
|
||||
static getPrimaryKeyCondFromArray<T extends object>(
|
||||
pks: Primary<T>[],
|
||||
meta: EntityMetadata<T>,
|
||||
): Record<string, Primary<T>>;
|
||||
static getOrderedPrimaryKeys<T>(
|
||||
id: Primary<T> | Record<string, Primary<T>>,
|
||||
meta: EntityMetadata<T>,
|
||||
platform?: Platform,
|
||||
convertCustomTypes?: boolean,
|
||||
allowScalar?: boolean,
|
||||
): Primary<T>[];
|
||||
/**
|
||||
* Checks whether given object is an entity instance.
|
||||
*/
|
||||
static isEntity<T = unknown>(data: any, allowReference?: boolean): data is T & {};
|
||||
/**
|
||||
* Checks whether given object is a scalar reference.
|
||||
*/
|
||||
static isScalarReference<T = unknown>(data: any, allowReference?: boolean): data is ScalarReference<any> & {};
|
||||
/**
|
||||
* Checks whether the argument is empty (array without items, object without keys or falsy value).
|
||||
*/
|
||||
static isEmpty(data: any): boolean;
|
||||
/**
|
||||
* Gets string name of given class.
|
||||
*/
|
||||
static className<T>(classOrName: string | EntityName<T>): string;
|
||||
static extractChildElements(items: string[], prefix: string, allSymbol?: string): string[];
|
||||
/**
|
||||
* Tries to detect TypeScript support.
|
||||
*/
|
||||
static detectTypeScriptSupport(): boolean;
|
||||
/**
|
||||
* Gets the type of the argument.
|
||||
*/
|
||||
static getObjectType(value: any): string;
|
||||
/**
|
||||
* Checks whether the value is POJO (e.g. `{ foo: 'bar' }`, and not instance of `Foo`)
|
||||
*/
|
||||
static isPlainObject<T extends Dictionary>(value: any): value is T;
|
||||
/**
|
||||
* Executes the `cb` promise serially on every element of the `items` array and returns array of resolved values.
|
||||
*/
|
||||
static runSerial<T = any, U = any>(items: Iterable<U>, cb: (item: U) => Promise<T>): Promise<T[]>;
|
||||
static isCollection<T extends object, O extends object = object>(item: any): item is Collection<T, O>;
|
||||
static hash(data: string, length?: number): string;
|
||||
static runIfNotEmpty(clause: () => any, data: any): void;
|
||||
static defaultValue<T extends Dictionary>(prop: T, option: keyof T, defaultValue: any): void;
|
||||
static findDuplicates<T>(items: T[]): T[];
|
||||
static removeDuplicates<T>(items: T[]): T[];
|
||||
static randomInt(min: number, max: number): number;
|
||||
/**
|
||||
* Extracts all possible values of a TS enum. Works with both string and numeric enums.
|
||||
*/
|
||||
static extractEnumValues(target: Dictionary): (string | number)[];
|
||||
static flatten<T>(arrays: T[][], deep?: boolean): T[];
|
||||
static isOperator(key: PropertyKey, includeGroupOperators?: boolean): boolean;
|
||||
static hasNestedKey(object: unknown, key: string): boolean;
|
||||
static getORMVersion(): string;
|
||||
static createFunction(
|
||||
context: Map<string, any>,
|
||||
code: string,
|
||||
compiledFunctions?: CompiledFunctions,
|
||||
key?: string,
|
||||
): any;
|
||||
static callCompiledFunction<T extends unknown[], R>(fn: (...args: T) => R, ...args: T): R;
|
||||
static unwrapProperty<T>(
|
||||
entity: T,
|
||||
meta: EntityMetadata<T>,
|
||||
prop: EntityProperty<T>,
|
||||
payload?: boolean,
|
||||
): [unknown, number[]][];
|
||||
static setPayloadProperty<T>(
|
||||
entity: EntityDictionary<T>,
|
||||
meta: EntityMetadata<T>,
|
||||
prop: EntityProperty<T>,
|
||||
value: unknown,
|
||||
idx: number[],
|
||||
): void;
|
||||
static tryImport<T extends Dictionary = any>({
|
||||
module,
|
||||
warning,
|
||||
}: {
|
||||
module: string;
|
||||
warning?: string;
|
||||
}): Promise<T | undefined>;
|
||||
static xor(a: boolean, b: boolean): boolean;
|
||||
static keys<T extends object>(obj: T): (keyof T)[];
|
||||
static values<T extends object>(obj: T): T[keyof T][];
|
||||
static entries<T extends object>(obj: T): [keyof T, T[keyof T]][];
|
||||
static primaryKeyToObject<T>(meta: EntityMetadata<T>, primaryKey: Primary<T> | T, visible?: (keyof T)[]): T;
|
||||
static getObjectQueryKeys<T extends Dictionary, K extends string = Extract<keyof T, string>>(
|
||||
obj: T,
|
||||
): (K | RawQueryFragmentSymbol)[];
|
||||
}
|
||||
852
node_modules/@mikro-orm/core/utils/Utils.js
generated
vendored
Normal file
852
node_modules/@mikro-orm/core/utils/Utils.js
generated
vendored
Normal file
@@ -0,0 +1,852 @@
|
||||
import { clone } from './clone.js';
|
||||
import { GroupOperator, PlainObject, QueryOperator, ReferenceKind } from '../enums.js';
|
||||
import { helper } from '../entity/wrap.js';
|
||||
import { Raw } from './RawQueryFragment.js';
|
||||
function compareConstructors(a, b) {
|
||||
if (a.constructor === b.constructor) {
|
||||
return true;
|
||||
}
|
||||
if (!a.constructor) {
|
||||
return b.constructor === Object;
|
||||
}
|
||||
if (!b.constructor) {
|
||||
return a.constructor === Object;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/** Deeply compares two objects for equality, handling dates, regexes, and raw fragments. */
|
||||
export function compareObjects(a, b) {
|
||||
if (a === b || (a == null && b == null)) {
|
||||
return true;
|
||||
}
|
||||
if (!a || !b || typeof a !== 'object' || typeof b !== 'object' || !compareConstructors(a, b)) {
|
||||
return false;
|
||||
}
|
||||
if (a.__raw && b.__raw) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
return a.sql === b.sql && compareArrays(a.params, b.params);
|
||||
}
|
||||
if (a instanceof Date && b instanceof Date) {
|
||||
const timeA = a.getTime();
|
||||
const timeB = b.getTime();
|
||||
if (isNaN(timeA) || isNaN(timeB)) {
|
||||
throw new Error('Comparing invalid dates is not supported');
|
||||
}
|
||||
return timeA === timeB;
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (
|
||||
(typeof a === 'function' && typeof b === 'function') ||
|
||||
(a instanceof RegExp && b instanceof RegExp) ||
|
||||
(a instanceof String && b instanceof String) ||
|
||||
(a instanceof Number && b instanceof Number)
|
||||
) {
|
||||
return a.toString() === b.toString();
|
||||
}
|
||||
const keys = Object.keys(a);
|
||||
const length = keys.length;
|
||||
if (length !== Object.keys(b).length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = length; i-- !== 0; ) {
|
||||
if (!Object.hasOwn(b, keys[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (let i = length; i-- !== 0; ) {
|
||||
const key = keys[i];
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
if (!equals(a[key], b[key])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/** Compares two arrays element-by-element for deep equality. */
|
||||
export function compareArrays(a, b) {
|
||||
const length = a.length;
|
||||
if (length !== b.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = length; i-- !== 0; ) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
if (!equals(a[i], b[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/** Compares two boolean values, treating numeric 0/1 as false/true. */
|
||||
export function compareBooleans(a, b) {
|
||||
a = typeof a === 'number' ? Boolean(a) : a;
|
||||
b = typeof b === 'number' ? Boolean(b) : b;
|
||||
return a === b;
|
||||
}
|
||||
/** Compares two byte arrays element-by-element. */
|
||||
export function compareBuffers(a, b) {
|
||||
const length = a.length;
|
||||
if (length !== b.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = length; i-- !== 0; ) {
|
||||
if (a[i] !== b[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Checks if arguments are deeply (but not strictly) equal.
|
||||
*/
|
||||
export function equals(a, b) {
|
||||
if (a === b) {
|
||||
return true;
|
||||
}
|
||||
if (a && b && typeof a === 'object' && typeof b === 'object') {
|
||||
if (Array.isArray(a)) {
|
||||
return compareArrays(a, b);
|
||||
}
|
||||
if (ArrayBuffer.isView(a) && ArrayBuffer.isView(b)) {
|
||||
return compareBuffers(a, b);
|
||||
}
|
||||
return compareObjects(a, b);
|
||||
}
|
||||
return Number.isNaN(a) && Number.isNaN(b);
|
||||
}
|
||||
const equalsFn = equals;
|
||||
/** Parses a JSON string safely, returning the original value if parsing fails. */
|
||||
export function parseJsonSafe(value) {
|
||||
if (typeof value === 'string') {
|
||||
/* v8 ignore next */
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
// ignore and return the value, as sometimes we get the parsed value,
|
||||
// e.g. when it is a string value in JSON column
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
/** Collection of general-purpose utility methods used throughout the ORM. */
|
||||
export class Utils {
|
||||
static PK_SEPARATOR = '~~~';
|
||||
static #ORM_VERSION = '7.0.2';
|
||||
/**
|
||||
* Checks if the argument is instance of `Object`. Returns false for arrays.
|
||||
*/
|
||||
static isObject(o) {
|
||||
return !!o && typeof o === 'object' && !Array.isArray(o);
|
||||
}
|
||||
/**
|
||||
* Removes `undefined` properties (recursively) so they are not saved as nulls
|
||||
*/
|
||||
static dropUndefinedProperties(o, value, visited = new Set()) {
|
||||
if (Array.isArray(o)) {
|
||||
for (const item of o) {
|
||||
Utils.dropUndefinedProperties(item, value, visited);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (!Utils.isPlainObject(o) || visited.has(o)) {
|
||||
return;
|
||||
}
|
||||
visited.add(o);
|
||||
for (const key of Object.keys(o)) {
|
||||
if (o[key] === value) {
|
||||
delete o[key];
|
||||
continue;
|
||||
}
|
||||
Utils.dropUndefinedProperties(o[key], value, visited);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns the number of properties on `obj`. This is 20x faster than Object.keys(obj).length.
|
||||
* @see https://github.com/deepkit/deepkit-framework/blob/master/packages/core/src/core.ts
|
||||
*/
|
||||
static getObjectKeysSize(object) {
|
||||
let size = 0;
|
||||
for (const key in object) {
|
||||
if (Object.hasOwn(object, key)) {
|
||||
size++;
|
||||
}
|
||||
}
|
||||
return size;
|
||||
}
|
||||
/**
|
||||
* Returns true if `obj` has at least one property. This is 20x faster than Object.keys(obj).length.
|
||||
* @see https://github.com/deepkit/deepkit-framework/blob/master/packages/core/src/core.ts
|
||||
*/
|
||||
static hasObjectKeys(object) {
|
||||
for (const key in object) {
|
||||
if (Object.hasOwn(object, key)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Checks if arguments are deeply (but not strictly) equal.
|
||||
*/
|
||||
static equals(a, b) {
|
||||
return equalsFn(a, b);
|
||||
}
|
||||
/**
|
||||
* Gets array without duplicates.
|
||||
*/
|
||||
static unique(items) {
|
||||
if (items.length < 2) {
|
||||
return items;
|
||||
}
|
||||
return [...new Set(items)];
|
||||
}
|
||||
/**
|
||||
* Merges all sources into the target recursively.
|
||||
*/
|
||||
static merge(target, ...sources) {
|
||||
return Utils._merge(target, sources, false);
|
||||
}
|
||||
/**
|
||||
* Merges all sources into the target recursively. Ignores `undefined` values.
|
||||
*/
|
||||
static mergeConfig(target, ...sources) {
|
||||
return Utils._merge(target, sources, true);
|
||||
}
|
||||
/**
|
||||
* Merges all sources into the target recursively.
|
||||
*/
|
||||
static _merge(target, sources, ignoreUndefined) {
|
||||
if (!sources.length) {
|
||||
return target;
|
||||
}
|
||||
const source = sources.shift();
|
||||
if (Utils.isObject(target) && Utils.isPlainObject(source)) {
|
||||
for (const [key, value] of Object.entries(source)) {
|
||||
if (ignoreUndefined && typeof value === 'undefined') {
|
||||
continue;
|
||||
}
|
||||
if (Utils.isPlainObject(value)) {
|
||||
if (!Utils.isObject(target[key])) {
|
||||
target[key] = Utils.copy(value);
|
||||
continue;
|
||||
}
|
||||
/* v8 ignore next */
|
||||
if (!(key in target)) {
|
||||
Object.assign(target, { [key]: {} });
|
||||
}
|
||||
Utils._merge(target[key], [value], ignoreUndefined);
|
||||
} else {
|
||||
Object.assign(target, { [key]: value });
|
||||
}
|
||||
}
|
||||
}
|
||||
return Utils._merge(target, sources, ignoreUndefined);
|
||||
}
|
||||
/**
|
||||
* Creates deep copy of given object.
|
||||
*/
|
||||
static copy(entity, respectCustomCloneMethod = true) {
|
||||
return clone(entity, respectCustomCloneMethod);
|
||||
}
|
||||
/**
|
||||
* Normalize the argument to always be an array.
|
||||
*/
|
||||
static asArray(data, strict = false) {
|
||||
if (typeof data === 'undefined' && !strict) {
|
||||
return [];
|
||||
}
|
||||
if (this.isIterable(data)) {
|
||||
return Array.from(data);
|
||||
}
|
||||
return [data];
|
||||
}
|
||||
/**
|
||||
* Checks if the value is iterable, but considers strings and buffers as not iterable.
|
||||
*/
|
||||
static isIterable(value) {
|
||||
if (value == null || typeof value === 'string' || ArrayBuffer.isView(value)) {
|
||||
return false;
|
||||
}
|
||||
return typeof Object(value)[Symbol.iterator] === 'function';
|
||||
}
|
||||
/**
|
||||
* Renames object key, keeps order of properties.
|
||||
*/
|
||||
static renameKey(payload, from, to) {
|
||||
if (Utils.isObject(payload) && from in payload && !(to in payload)) {
|
||||
for (const key of Object.keys(payload)) {
|
||||
const value = payload[key];
|
||||
delete payload[key];
|
||||
payload[from === key ? to : key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns array of functions argument names. Uses basic regex for source code analysis, might not work with advanced syntax.
|
||||
*/
|
||||
static getConstructorParams(func) {
|
||||
const source = func.toString();
|
||||
const i = source.indexOf('constructor');
|
||||
if (i === -1) {
|
||||
return undefined;
|
||||
}
|
||||
const start = source.indexOf('(', i);
|
||||
if (start === -1) {
|
||||
return undefined;
|
||||
}
|
||||
let depth = 0;
|
||||
let end = start;
|
||||
for (; end < source.length; end++) {
|
||||
if (source[end] === '(') {
|
||||
depth++;
|
||||
}
|
||||
if (source[end] === ')') {
|
||||
depth--;
|
||||
}
|
||||
if (depth === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
const raw = source.slice(start + 1, end);
|
||||
return raw
|
||||
.split(',')
|
||||
.map(s => s.trim().replace(/=.*$/, '').trim())
|
||||
.filter(Boolean)
|
||||
.map(raw => (raw.startsWith('{') && raw.endsWith('}') ? '' : raw));
|
||||
}
|
||||
/**
|
||||
* Checks whether the argument looks like primary key (string, number or ObjectId).
|
||||
*/
|
||||
static isPrimaryKey(key, allowComposite = false) {
|
||||
if (['string', 'number', 'bigint'].includes(typeof key)) {
|
||||
return true;
|
||||
}
|
||||
if (allowComposite && Array.isArray(key) && key.every(v => Utils.isPrimaryKey(v, true))) {
|
||||
return true;
|
||||
}
|
||||
if (Utils.isObject(key)) {
|
||||
if (key.constructor?.name === 'ObjectId') {
|
||||
return true;
|
||||
}
|
||||
if (!Utils.isPlainObject(key) && !Utils.isEntity(key, true)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Extracts primary key from `data`. Accepts objects or primary keys directly.
|
||||
*/
|
||||
static extractPK(data, meta, strict = false) {
|
||||
if (Utils.isPrimaryKey(data)) {
|
||||
return data;
|
||||
}
|
||||
if (Utils.isEntity(data, true)) {
|
||||
const wrapped = helper(data);
|
||||
if (wrapped.__meta.compositePK) {
|
||||
return wrapped.getPrimaryKeys();
|
||||
}
|
||||
return wrapped.getPrimaryKey();
|
||||
}
|
||||
if (strict && meta && Utils.getObjectKeysSize(data) !== meta.primaryKeys.length) {
|
||||
return null;
|
||||
}
|
||||
if (Utils.isPlainObject(data) && meta) {
|
||||
if (meta.compositePK) {
|
||||
return this.getCompositeKeyValue(data, meta);
|
||||
}
|
||||
return data[meta.primaryKeys[0]] ?? data[meta.serializedPrimaryKey] ?? null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
static getCompositeKeyValue(data, meta, convertCustomTypes = false, platform) {
|
||||
return meta.primaryKeys.map((pk, idx) => {
|
||||
const value = Array.isArray(data) ? data[idx] : data[pk];
|
||||
const prop = meta.properties[pk];
|
||||
if (prop.targetMeta && Utils.isPlainObject(value)) {
|
||||
return this.getCompositeKeyValue(value, prop.targetMeta);
|
||||
}
|
||||
if (prop.customType && platform && convertCustomTypes) {
|
||||
const method = typeof convertCustomTypes === 'string' ? convertCustomTypes : 'convertToJSValue';
|
||||
return prop.customType[method](value, platform);
|
||||
}
|
||||
return value;
|
||||
});
|
||||
}
|
||||
static getCompositeKeyHash(data, meta, convertCustomTypes = false, platform, flat = false) {
|
||||
let pks = this.getCompositeKeyValue(data, meta, convertCustomTypes, platform);
|
||||
if (flat) {
|
||||
pks = Utils.flatten(pks);
|
||||
}
|
||||
return Utils.getPrimaryKeyHash(pks);
|
||||
}
|
||||
static getPrimaryKeyHash(pks) {
|
||||
return pks
|
||||
.map(pk => {
|
||||
if (Buffer.isBuffer(pk)) {
|
||||
return pk.toString('hex');
|
||||
}
|
||||
if (pk instanceof Date) {
|
||||
return pk.toISOString();
|
||||
}
|
||||
return pk;
|
||||
})
|
||||
.join(this.PK_SEPARATOR);
|
||||
}
|
||||
static splitPrimaryKeys(key) {
|
||||
return key.split(this.PK_SEPARATOR);
|
||||
}
|
||||
static getPrimaryKeyValues(entity, meta, allowScalar = false, convertCustomTypes = false) {
|
||||
/* v8 ignore next */
|
||||
if (entity == null) {
|
||||
return entity;
|
||||
}
|
||||
function toArray(val) {
|
||||
if (Utils.isPlainObject(val)) {
|
||||
return Object.values(val).flatMap(v => toArray(v));
|
||||
}
|
||||
return val;
|
||||
}
|
||||
let pk;
|
||||
if (Utils.isEntity(entity, true)) {
|
||||
pk = helper(entity).getPrimaryKey(convertCustomTypes);
|
||||
} else {
|
||||
pk = meta.primaryKeys.reduce((o, pk) => {
|
||||
const targetMeta = meta.properties[pk].targetMeta;
|
||||
if (targetMeta && Utils.isPlainObject(entity[pk])) {
|
||||
o[pk] = Utils.getPrimaryKeyValues(entity[pk], targetMeta, allowScalar, convertCustomTypes);
|
||||
} else {
|
||||
o[pk] = entity[pk];
|
||||
}
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
if (meta.primaryKeys.length > 1) {
|
||||
return toArray(pk);
|
||||
}
|
||||
if (allowScalar) {
|
||||
if (Utils.isPlainObject(pk)) {
|
||||
return pk[meta.primaryKeys[0]];
|
||||
}
|
||||
return pk;
|
||||
}
|
||||
return [pk];
|
||||
}
|
||||
static getPrimaryKeyCond(entity, primaryKeys) {
|
||||
const cond = primaryKeys.reduce((o, pk) => {
|
||||
o[pk] = Utils.extractPK(entity[pk]);
|
||||
return o;
|
||||
}, {});
|
||||
if (Object.values(cond).some(v => v === null)) {
|
||||
return null;
|
||||
}
|
||||
return cond;
|
||||
}
|
||||
/**
|
||||
* Maps nested FKs from `[1, 2, 3]` to `[1, [2, 3]]`.
|
||||
*/
|
||||
static mapFlatCompositePrimaryKey(fk, prop, fieldNames = prop.fieldNames, idx = 0) {
|
||||
if (!prop.targetMeta) {
|
||||
return fk[idx++];
|
||||
}
|
||||
const parts = [];
|
||||
for (const pk of prop.targetMeta.getPrimaryProps()) {
|
||||
parts.push(this.mapFlatCompositePrimaryKey(fk, pk, fieldNames, idx));
|
||||
idx += pk.fieldNames.length;
|
||||
}
|
||||
if (parts.length < 2) {
|
||||
return parts[0];
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
static getPrimaryKeyCondFromArray(pks, meta) {
|
||||
return meta.getPrimaryProps().reduce((o, pk, idx) => {
|
||||
if (Array.isArray(pks[idx]) && pk.targetMeta) {
|
||||
o[pk.name] = pks[idx];
|
||||
} else {
|
||||
o[pk.name] = Utils.extractPK(pks[idx], meta);
|
||||
}
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
static getOrderedPrimaryKeys(id, meta, platform, convertCustomTypes = false, allowScalar = false) {
|
||||
const data = Utils.isPrimaryKey(id) ? { [meta.primaryKeys[0]]: id } : id;
|
||||
const pks = meta.primaryKeys.map((pk, idx) => {
|
||||
const prop = meta.properties[pk];
|
||||
// `data` can be a composite PK in form of array of PKs, or a DTO
|
||||
let value = Array.isArray(data) ? data[idx] : (data[pk] ?? data);
|
||||
if (convertCustomTypes && platform && prop.customType && !prop.targetMeta) {
|
||||
value = prop.customType.convertToJSValue(value, platform);
|
||||
}
|
||||
if (prop.kind !== ReferenceKind.SCALAR && prop.targetMeta) {
|
||||
const value2 = this.getOrderedPrimaryKeys(value, prop.targetMeta, platform, convertCustomTypes, allowScalar);
|
||||
value = value2.length > 1 ? value2 : value2[0];
|
||||
}
|
||||
return value;
|
||||
});
|
||||
if (allowScalar && pks.length === 1) {
|
||||
return pks[0];
|
||||
}
|
||||
// we need to flatten the PKs as composite PKs can be build from another composite PKs
|
||||
// and this method is used to get the PK hash in identity map, that expects flat array
|
||||
return Utils.flatten(pks);
|
||||
}
|
||||
/**
|
||||
* Checks whether given object is an entity instance.
|
||||
*/
|
||||
static isEntity(data, allowReference = false) {
|
||||
if (!Utils.isObject(data)) {
|
||||
return false;
|
||||
}
|
||||
if (allowReference && !!data.__reference) {
|
||||
return true;
|
||||
}
|
||||
return !!data.__entity;
|
||||
}
|
||||
/**
|
||||
* Checks whether given object is a scalar reference.
|
||||
*/
|
||||
static isScalarReference(data, allowReference = false) {
|
||||
return typeof data === 'object' && data?.__scalarReference;
|
||||
}
|
||||
/**
|
||||
* Checks whether the argument is empty (array without items, object without keys or falsy value).
|
||||
*/
|
||||
static isEmpty(data) {
|
||||
if (Array.isArray(data)) {
|
||||
return data.length === 0;
|
||||
}
|
||||
if (Utils.isObject(data)) {
|
||||
return !Utils.hasObjectKeys(data);
|
||||
}
|
||||
return !data;
|
||||
}
|
||||
/**
|
||||
* Gets string name of given class.
|
||||
*/
|
||||
static className(classOrName) {
|
||||
if (typeof classOrName === 'string') {
|
||||
return classOrName;
|
||||
}
|
||||
return classOrName.name;
|
||||
}
|
||||
static extractChildElements(items, prefix, allSymbol) {
|
||||
return items
|
||||
.filter(field => field === allSymbol || field.startsWith(`${prefix}.`))
|
||||
.map(field => (field === allSymbol ? allSymbol : field.substring(prefix.length + 1)));
|
||||
}
|
||||
/**
|
||||
* Tries to detect TypeScript support.
|
||||
*/
|
||||
static detectTypeScriptSupport() {
|
||||
/* v8 ignore next */
|
||||
const process = globalThis.process ?? {};
|
||||
/* v8 ignore next */
|
||||
return (
|
||||
process.argv?.[0]?.endsWith('ts-node') || // running via ts-node directly
|
||||
!!process.env?.MIKRO_ORM_CLI_ALWAYS_ALLOW_TS || // forced explicitly or enabled via `registerTypeScriptSupport()`
|
||||
!!process.env?.TS_JEST || // check if ts-jest is used
|
||||
!!process.env?.VITEST || // check if vitest is used
|
||||
!!process.versions?.bun || // check if bun is used
|
||||
process.argv?.slice(1).some(arg => /\.([mc]?ts|tsx)$/.exec(arg)) || // executing `.ts` file
|
||||
process.execArgv?.some(arg => {
|
||||
return (
|
||||
arg.includes('ts-node') || // check for ts-node loader
|
||||
arg.includes('@swc-node/register') || // check for swc-node/register loader
|
||||
arg.includes('node_modules/tsx/')
|
||||
); // check for tsx loader
|
||||
})
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Gets the type of the argument.
|
||||
*/
|
||||
static getObjectType(value) {
|
||||
const simple = typeof value;
|
||||
if (['string', 'number', 'boolean', 'bigint'].includes(simple)) {
|
||||
return simple;
|
||||
}
|
||||
const objectType = Object.prototype.toString.call(value);
|
||||
const type = /^\[object (.+)]$/.exec(objectType)[1];
|
||||
if (type === 'Uint8Array') {
|
||||
return 'Buffer';
|
||||
}
|
||||
return type;
|
||||
}
|
||||
/**
|
||||
* Checks whether the value is POJO (e.g. `{ foo: 'bar' }`, and not instance of `Foo`)
|
||||
*/
|
||||
static isPlainObject(value) {
|
||||
return (
|
||||
(value !== null &&
|
||||
typeof value === 'object' &&
|
||||
typeof value.constructor === 'function' &&
|
||||
(Object.hasOwn(value.constructor.prototype, 'isPrototypeOf') ||
|
||||
Object.getPrototypeOf(value.constructor.prototype) === null)) ||
|
||||
(value && Object.getPrototypeOf(value) === null) ||
|
||||
value instanceof PlainObject
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Executes the `cb` promise serially on every element of the `items` array and returns array of resolved values.
|
||||
*/
|
||||
static async runSerial(items, cb) {
|
||||
const ret = [];
|
||||
for (const item of items) {
|
||||
ret.push(await cb(item));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
static isCollection(item) {
|
||||
return item?.__collection;
|
||||
}
|
||||
// FNV-1a 64-bit
|
||||
static hash(data, length) {
|
||||
let h1 = 0xcbf29ce484222325n;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
h1 ^= BigInt(data.charCodeAt(i));
|
||||
h1 = (h1 * 0x100000001b3n) & 0xffffffffffffffffn;
|
||||
}
|
||||
const hash = h1.toString(16).padStart(16, '0');
|
||||
if (length) {
|
||||
return hash.substring(0, length);
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
static runIfNotEmpty(clause, data) {
|
||||
if (!Utils.isEmpty(data)) {
|
||||
clause();
|
||||
}
|
||||
}
|
||||
static defaultValue(prop, option, defaultValue) {
|
||||
prop[option] = option in prop ? prop[option] : defaultValue;
|
||||
}
|
||||
static findDuplicates(items) {
|
||||
return items.reduce((acc, v, i, arr) => {
|
||||
return arr.indexOf(v) !== i && !acc.includes(v) ? acc.concat(v) : acc;
|
||||
}, []);
|
||||
}
|
||||
static removeDuplicates(items) {
|
||||
const ret = [];
|
||||
const contains = (arr, val) => !!arr.find(v => equals(val, v));
|
||||
for (const item of items) {
|
||||
if (!contains(ret, item)) {
|
||||
ret.push(item);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
static randomInt(min, max) {
|
||||
return Math.round(Math.random() * (max - min)) + min;
|
||||
}
|
||||
/**
|
||||
* Extracts all possible values of a TS enum. Works with both string and numeric enums.
|
||||
*/
|
||||
static extractEnumValues(target) {
|
||||
const keys = Object.keys(target);
|
||||
const values = Object.values(target);
|
||||
const numeric = !!values.find(v => typeof v === 'number');
|
||||
const constEnum =
|
||||
values.length % 2 === 0 && // const enum will have even number of items
|
||||
values.slice(0, values.length / 2).every(v => typeof v === 'string') && // first half are strings
|
||||
values.slice(values.length / 2).every(v => typeof v === 'number') && // second half are numbers
|
||||
this.equals(
|
||||
keys,
|
||||
values
|
||||
.slice(values.length / 2)
|
||||
.concat(values.slice(0, values.length / 2))
|
||||
.map(v => '' + v),
|
||||
); // and when swapped, it will match the keys
|
||||
if (numeric || constEnum) {
|
||||
return values.filter(val => !keys.includes(val));
|
||||
}
|
||||
return values;
|
||||
}
|
||||
static flatten(arrays, deep) {
|
||||
return arrays.flatMap(v => (deep && Array.isArray(v) ? this.flatten(v, true) : v));
|
||||
}
|
||||
static isOperator(key, includeGroupOperators = true) {
|
||||
if (!includeGroupOperators) {
|
||||
return key in QueryOperator;
|
||||
}
|
||||
return key in GroupOperator || key in QueryOperator;
|
||||
}
|
||||
static hasNestedKey(object, key) {
|
||||
if (!object) {
|
||||
return false;
|
||||
}
|
||||
if (Array.isArray(object)) {
|
||||
return object.some(o => this.hasNestedKey(o, key));
|
||||
}
|
||||
if (typeof object === 'object') {
|
||||
return Object.entries(object).some(([k, v]) => k === key || this.hasNestedKey(v, key));
|
||||
}
|
||||
return false;
|
||||
}
|
||||
static getORMVersion() {
|
||||
return this.#ORM_VERSION;
|
||||
}
|
||||
static createFunction(context, code, compiledFunctions, key) {
|
||||
if (key && compiledFunctions?.[key]) {
|
||||
return compiledFunctions[key](...context.values());
|
||||
}
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-implied-eval
|
||||
return new Function(...context.keys(), `'use strict';\n` + code)(...context.values());
|
||||
/* v8 ignore next */
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(code);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
static callCompiledFunction(fn, ...args) {
|
||||
try {
|
||||
return fn(...args);
|
||||
} catch (e) {
|
||||
/* v8 ignore next */
|
||||
if ([SyntaxError, TypeError, EvalError, ReferenceError].some(t => e instanceof t)) {
|
||||
const position = e.stack.match(/<anonymous>:(\d+):(\d+)/);
|
||||
let code = fn.toString();
|
||||
if (position) {
|
||||
const lines = code.split('\n').map((line, idx) => {
|
||||
if (idx === +position[1] - 5) {
|
||||
return '> ' + line;
|
||||
}
|
||||
return ' ' + line;
|
||||
});
|
||||
lines.splice(+position[1] - 4, 0, ' '.repeat(+position[2]) + '^');
|
||||
code = lines.join('\n');
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(`JIT runtime error: ${e.message}\n\n${code}`);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
static unwrapProperty(entity, meta, prop, payload = false) {
|
||||
let p = prop;
|
||||
const path = [];
|
||||
if (!prop.object && !prop.array && !prop.embedded) {
|
||||
return entity[prop.name] != null ? [[entity[prop.name], []]] : [];
|
||||
}
|
||||
while (p.embedded) {
|
||||
const child = meta.properties[p.embedded[0]];
|
||||
if (payload && !child.object && !child.array) {
|
||||
break;
|
||||
}
|
||||
path.shift();
|
||||
path.unshift(p.embedded[0], p.embedded[1]);
|
||||
p = child;
|
||||
}
|
||||
const ret = [];
|
||||
const follow = (t, idx = 0, i = []) => {
|
||||
const k = path[idx];
|
||||
if (Array.isArray(t)) {
|
||||
for (const t1 of t) {
|
||||
const ii = t.indexOf(t1);
|
||||
follow(t1, idx, [...i, ii]);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (t == null) {
|
||||
return;
|
||||
}
|
||||
const target = t[k];
|
||||
if (path[++idx]) {
|
||||
follow(target, idx, i);
|
||||
} else if (target != null) {
|
||||
ret.push([target, i]);
|
||||
}
|
||||
};
|
||||
follow(entity);
|
||||
return ret;
|
||||
}
|
||||
static setPayloadProperty(entity, meta, prop, value, idx) {
|
||||
if (!prop.object && !prop.array && !prop.embedded) {
|
||||
entity[prop.name] = value;
|
||||
return;
|
||||
}
|
||||
let target = entity;
|
||||
let p = prop;
|
||||
const path = [];
|
||||
while (p.embedded) {
|
||||
path.shift();
|
||||
path.unshift(p.embedded[0], p.embedded[1]);
|
||||
const prev = p;
|
||||
p = meta.properties[p.embedded[0]];
|
||||
if (!p.object) {
|
||||
path.shift();
|
||||
path[0] = prev.name;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let j = 0;
|
||||
for (const k of path) {
|
||||
const i = path.indexOf(k);
|
||||
if (i === path.length - 1) {
|
||||
if (Array.isArray(target)) {
|
||||
target[idx[j++]][k] = value;
|
||||
} else {
|
||||
target[k] = value;
|
||||
}
|
||||
} else {
|
||||
if (Array.isArray(target)) {
|
||||
target = target[idx[j++]][k];
|
||||
} else {
|
||||
target = target[k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
static async tryImport({ module, warning }) {
|
||||
try {
|
||||
return await import(module);
|
||||
} catch (err) {
|
||||
if (err.code === 'ERR_MODULE_NOT_FOUND') {
|
||||
if (warning) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(warning);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
static xor(a, b) {
|
||||
return (a || b) && !(a && b);
|
||||
}
|
||||
static keys(obj) {
|
||||
return Object.keys(obj);
|
||||
}
|
||||
static values(obj) {
|
||||
return Object.values(obj);
|
||||
}
|
||||
static entries(obj) {
|
||||
return Object.entries(obj);
|
||||
}
|
||||
static primaryKeyToObject(meta, primaryKey, visible) {
|
||||
const pks =
|
||||
meta.compositePK && Utils.isPlainObject(primaryKey) ? Object.values(primaryKey) : Utils.asArray(primaryKey);
|
||||
const pkProps = meta.getPrimaryProps();
|
||||
return meta.primaryKeys.reduce((o, pk, idx) => {
|
||||
const pkProp = pkProps[idx];
|
||||
if (visible && !visible.includes(pkProp.name)) {
|
||||
return o;
|
||||
}
|
||||
if (Utils.isPlainObject(pks[idx]) && pkProp.targetMeta) {
|
||||
o[pk] = Utils.getOrderedPrimaryKeys(pks[idx], pkProp.targetMeta);
|
||||
return o;
|
||||
}
|
||||
o[pk] = pks[idx];
|
||||
return o;
|
||||
}, {});
|
||||
}
|
||||
static getObjectQueryKeys(obj) {
|
||||
return Reflect.ownKeys(obj).filter(key => {
|
||||
if (!Object.prototype.propertyIsEnumerable.call(obj, key)) {
|
||||
return false;
|
||||
}
|
||||
return typeof key === 'string' || Raw.isKnownFragmentSymbol(key);
|
||||
});
|
||||
}
|
||||
}
|
||||
6
node_modules/@mikro-orm/core/utils/clone.d.ts
generated
vendored
Normal file
6
node_modules/@mikro-orm/core/utils/clone.d.ts
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* Inspired by https://github.com/pvorb/clone but simplified and never tries to
|
||||
* clone `EventEmitter`s to get around https://github.com/mikro-orm/mikro-orm/issues/2748
|
||||
* @internal
|
||||
*/
|
||||
export declare function clone<T>(parent: T, respectCustomCloneMethod?: boolean): T;
|
||||
121
node_modules/@mikro-orm/core/utils/clone.js
generated
vendored
Normal file
121
node_modules/@mikro-orm/core/utils/clone.js
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
/**
|
||||
* Inspired by https://github.com/pvorb/clone but simplified and never tries to
|
||||
* clone `EventEmitter`s to get around https://github.com/mikro-orm/mikro-orm/issues/2748
|
||||
* @internal
|
||||
*/
|
||||
/**
|
||||
* Get the property descriptor of a property on an object or its prototype chain.
|
||||
*
|
||||
* @param obj - The object to get the property descriptor from.
|
||||
* @param prop - The property to get the descriptor for.
|
||||
*/
|
||||
function getPropertyDescriptor(obj, prop) {
|
||||
const descriptor = Object.getOwnPropertyDescriptor(obj, prop);
|
||||
if (descriptor) {
|
||||
return descriptor;
|
||||
}
|
||||
const proto = Object.getPrototypeOf(obj);
|
||||
if (proto) {
|
||||
return getPropertyDescriptor(proto, prop);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const TypedArray = Object.getPrototypeOf(Uint8Array);
|
||||
export function clone(parent, respectCustomCloneMethod = true) {
|
||||
const allParents = [];
|
||||
const allChildren = [];
|
||||
function _clone(parent) {
|
||||
// cloning null always returns null
|
||||
if (parent === null) {
|
||||
return null;
|
||||
}
|
||||
if (typeof parent !== 'object') {
|
||||
return parent;
|
||||
}
|
||||
if (respectCustomCloneMethod && 'clone' in parent && typeof parent.clone === 'function') {
|
||||
return parent.clone();
|
||||
}
|
||||
let child;
|
||||
let proto;
|
||||
if (parent instanceof Map) {
|
||||
child = new Map();
|
||||
} else if (parent instanceof Set) {
|
||||
child = new Set();
|
||||
} else if (parent instanceof Promise) {
|
||||
child = new Promise((resolve, reject) => {
|
||||
parent.then(resolve.bind(null, _clone), reject.bind(null, _clone));
|
||||
});
|
||||
} else if (Array.isArray(parent)) {
|
||||
child = [];
|
||||
} else if (parent instanceof RegExp) {
|
||||
let flags = '';
|
||||
if (parent.global) {
|
||||
flags += 'g';
|
||||
}
|
||||
if (parent.ignoreCase) {
|
||||
flags += 'i';
|
||||
}
|
||||
if (parent.multiline) {
|
||||
flags += 'm';
|
||||
}
|
||||
child = new RegExp(parent.source, flags);
|
||||
if (parent.lastIndex) {
|
||||
child.lastIndex = parent.lastIndex;
|
||||
}
|
||||
} else if (parent instanceof Date) {
|
||||
child = new Date(parent.getTime());
|
||||
} else if (Buffer.isBuffer(parent)) {
|
||||
child = Buffer.allocUnsafe(parent.length);
|
||||
parent.copy(child);
|
||||
return child;
|
||||
} else if (parent instanceof TypedArray) {
|
||||
child = parent.slice();
|
||||
return child;
|
||||
} else if (parent instanceof Error) {
|
||||
child = new parent.constructor(parent.message);
|
||||
} else {
|
||||
proto = Object.getPrototypeOf(parent);
|
||||
child = Object.create(proto);
|
||||
}
|
||||
const index = allParents.indexOf(parent);
|
||||
if (index !== -1) {
|
||||
return allChildren[index];
|
||||
}
|
||||
allParents.push(parent);
|
||||
allChildren.push(child);
|
||||
if (parent instanceof Map) {
|
||||
parent.forEach((value, key) => {
|
||||
const keyChild = _clone(key);
|
||||
const valueChild = _clone(value);
|
||||
child.set(keyChild, valueChild);
|
||||
});
|
||||
}
|
||||
if (parent instanceof Set) {
|
||||
parent.forEach(value => {
|
||||
const entryChild = _clone(value);
|
||||
child.add(entryChild);
|
||||
});
|
||||
}
|
||||
for (const i in parent) {
|
||||
let attrs;
|
||||
if (proto) {
|
||||
attrs = getPropertyDescriptor(proto, i);
|
||||
}
|
||||
if (attrs && typeof attrs.get === 'function' && attrs.set == null) {
|
||||
continue;
|
||||
}
|
||||
child[i] = _clone(parent[i]);
|
||||
}
|
||||
const symbols = Object.getOwnPropertySymbols(parent);
|
||||
for (let i = 0; i < symbols.length; i++) {
|
||||
const symbol = symbols[i];
|
||||
const descriptor = Object.getOwnPropertyDescriptor(parent, symbol);
|
||||
if (descriptor && !descriptor.enumerable) {
|
||||
continue;
|
||||
}
|
||||
child[symbol] = _clone(parent[symbol]);
|
||||
}
|
||||
return child;
|
||||
}
|
||||
return _clone(parent);
|
||||
}
|
||||
7
node_modules/@mikro-orm/core/utils/env-vars.d.ts
generated
vendored
Normal file
7
node_modules/@mikro-orm/core/utils/env-vars.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { type Options } from './Configuration.js';
|
||||
/** @internal */
|
||||
export declare function setEnv(key: string, value: unknown): void;
|
||||
/** @internal */
|
||||
export declare function getEnv(key: string): string | undefined;
|
||||
/** @internal */
|
||||
export declare function loadEnvironmentVars(): Partial<Options>;
|
||||
100
node_modules/@mikro-orm/core/utils/env-vars.js
generated
vendored
Normal file
100
node_modules/@mikro-orm/core/utils/env-vars.js
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
import { Utils } from './Utils.js';
|
||||
/** @internal */
|
||||
export function setEnv(key, value) {
|
||||
if (globalThis.process?.env) {
|
||||
globalThis.process.env[key] = String(value);
|
||||
}
|
||||
}
|
||||
/** @internal */
|
||||
export function getEnv(key) {
|
||||
return globalThis.process?.env?.[key];
|
||||
}
|
||||
/** @internal */
|
||||
export function loadEnvironmentVars() {
|
||||
const ret = {};
|
||||
const getEnvKey = (key, envPrefix = 'MIKRO_ORM_') => {
|
||||
return (
|
||||
envPrefix +
|
||||
key
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
||||
.replace(/([A-Z])([A-Z][a-z])/g, '$1_$2')
|
||||
.toUpperCase()
|
||||
);
|
||||
};
|
||||
const array = v => v.split(',').map(vv => vv.trim());
|
||||
const bool = v => ['true', 't', '1'].includes(v.toLowerCase());
|
||||
const num = v => +v;
|
||||
const read = (o, envPrefix, key, mapper = v => v) => {
|
||||
const envKey = getEnvKey(key, envPrefix);
|
||||
/* v8 ignore next */
|
||||
if (envKey in (globalThis.process?.env ?? {})) {
|
||||
o[key] = mapper(getEnv(envKey));
|
||||
}
|
||||
};
|
||||
const cleanup = (o, k) => (Utils.hasObjectKeys(o[k]) ? {} : delete o[k]);
|
||||
const read0 = read.bind(null, ret, 'MIKRO_ORM_');
|
||||
read0('baseDir');
|
||||
read0('entities', array);
|
||||
read0('entitiesTs', array);
|
||||
read0('clientUrl');
|
||||
read0('host');
|
||||
read0('port', num);
|
||||
read0('user');
|
||||
read0('password');
|
||||
read0('dbName');
|
||||
read0('schema');
|
||||
read0('loadStrategy');
|
||||
read0('batchSize', num);
|
||||
read0('useBatchInserts', bool);
|
||||
read0('useBatchUpdates', bool);
|
||||
read0('allowGlobalContext', bool);
|
||||
read0('autoJoinOneToOneOwner', bool);
|
||||
read0('populateAfterFlush', bool);
|
||||
read0('forceEntityConstructor', bool);
|
||||
read0('forceUndefined', bool);
|
||||
read0('forceUtcTimezone', bool);
|
||||
read0('timezone');
|
||||
read0('ensureIndexes', bool);
|
||||
read0('implicitTransactions', bool);
|
||||
read0('debug', bool);
|
||||
read0('colors', bool);
|
||||
ret.discovery = {};
|
||||
const read1 = read.bind(null, ret.discovery, 'MIKRO_ORM_DISCOVERY_');
|
||||
read1('warnWhenNoEntities', bool);
|
||||
read1('checkDuplicateTableNames', bool);
|
||||
read1('checkDuplicateFieldNames', bool);
|
||||
read1('checkNonPersistentCompositeProps', bool);
|
||||
read1('inferDefaultValues', bool);
|
||||
read1('tsConfigPath');
|
||||
cleanup(ret, 'discovery');
|
||||
ret.migrations = {};
|
||||
const read2 = read.bind(null, ret.migrations, 'MIKRO_ORM_MIGRATIONS_');
|
||||
read2('tableName');
|
||||
read2('path');
|
||||
read2('pathTs');
|
||||
read2('glob');
|
||||
read2('transactional', bool);
|
||||
read2('disableForeignKeys', bool);
|
||||
read2('allOrNothing', bool);
|
||||
read2('dropTables', bool);
|
||||
read2('safe', bool);
|
||||
read2('silent', bool);
|
||||
read2('emit');
|
||||
read2('snapshot', bool);
|
||||
read2('snapshotName');
|
||||
cleanup(ret, 'migrations');
|
||||
ret.schemaGenerator = {};
|
||||
const read3 = read.bind(null, ret.schemaGenerator, 'MIKRO_ORM_SCHEMA_GENERATOR_');
|
||||
read3('disableForeignKeys', bool);
|
||||
read3('createForeignKeyConstraints', bool);
|
||||
cleanup(ret, 'schemaGenerator');
|
||||
ret.seeder = {};
|
||||
const read4 = read.bind(null, ret.seeder, 'MIKRO_ORM_SEEDER_');
|
||||
read4('path');
|
||||
read4('pathTs');
|
||||
read4('glob');
|
||||
read4('emit');
|
||||
read4('defaultSeeder');
|
||||
cleanup(ret, 'seeder');
|
||||
return ret;
|
||||
}
|
||||
20
node_modules/@mikro-orm/core/utils/fs-utils.d.ts
generated
vendored
Normal file
20
node_modules/@mikro-orm/core/utils/fs-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { type Dictionary } from '../typings.js';
|
||||
export interface FsUtils {
|
||||
init(): Promise<void>;
|
||||
pathExists(path: string): boolean;
|
||||
ensureDir(path: string): void;
|
||||
readJSONSync<T = Dictionary>(path: string): T;
|
||||
glob(input: string | string[], cwd?: string): string[];
|
||||
resolveGlob(input: string | string[], cwd?: string): string[];
|
||||
getPackageConfig<T extends Dictionary>(basePath?: string): T;
|
||||
getORMPackages(): Set<string>;
|
||||
getORMPackageVersion(name: string): string | undefined;
|
||||
checkPackageVersion(): void;
|
||||
normalizePath(...parts: string[]): string;
|
||||
relativePath(path: string, relativeTo: string): string;
|
||||
absolutePath(path: string, baseDir?: string): string;
|
||||
writeFile(path: string, data: string, options?: Record<string, any>): Promise<void>;
|
||||
dynamicImport<T = any>(id: string): Promise<T>;
|
||||
}
|
||||
export declare const fs: FsUtils;
|
||||
export * from '../cache/FileCacheAdapter.js';
|
||||
192
node_modules/@mikro-orm/core/utils/fs-utils.js
generated
vendored
Normal file
192
node_modules/@mikro-orm/core/utils/fs-utils.js
generated
vendored
Normal file
@@ -0,0 +1,192 @@
|
||||
import { existsSync, globSync as nodeGlobSync, mkdirSync, readFileSync, realpathSync, statSync } from 'node:fs';
|
||||
import { writeFile as nodeWriteFile } from 'node:fs/promises';
|
||||
import { isAbsolute, join, normalize, relative } from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL } from 'node:url';
|
||||
import { Utils } from './Utils.js';
|
||||
import { colors } from '../logging/colors.js';
|
||||
let globSync = (patterns, options) => {
|
||||
const files = nodeGlobSync(patterns, { ...options, withFileTypes: true });
|
||||
return files.filter(f => f.isFile()).map(f => join(f.parentPath, f.name));
|
||||
};
|
||||
export const fs = {
|
||||
async init() {
|
||||
const tinyGlobby = await import('tinyglobby').catch(() => null);
|
||||
if (tinyGlobby) {
|
||||
globSync = (patterns, options) => {
|
||||
patterns = Utils.asArray(patterns).map(p => p.replace(/\\/g, '/'));
|
||||
if (options?.cwd) {
|
||||
options = { ...options, cwd: options.cwd.replace(/\\/g, '/') };
|
||||
}
|
||||
return tinyGlobby.globSync(patterns, { ...options, expandDirectories: false });
|
||||
};
|
||||
}
|
||||
},
|
||||
pathExists(path) {
|
||||
if (/[*?[\]]/.test(path)) {
|
||||
return globSync(path).length > 0;
|
||||
}
|
||||
return existsSync(path);
|
||||
},
|
||||
ensureDir(path) {
|
||||
if (!existsSync(path)) {
|
||||
mkdirSync(path, { recursive: true });
|
||||
}
|
||||
},
|
||||
readJSONSync(path) {
|
||||
const file = readFileSync(path);
|
||||
return JSON.parse(file.toString());
|
||||
},
|
||||
glob(input, cwd) {
|
||||
const patterns = Array.isArray(input) ? input : [input];
|
||||
const positive = [];
|
||||
const negative = [];
|
||||
for (const p of patterns) {
|
||||
if (p.startsWith('!')) {
|
||||
negative.push(p.slice(1));
|
||||
} else {
|
||||
positive.push(p);
|
||||
}
|
||||
}
|
||||
const included = new Set(this.resolveGlob(positive, cwd));
|
||||
if (included.size > 0 && negative.length > 0) {
|
||||
const excluded = this.resolveGlob(negative, cwd);
|
||||
for (const file of excluded) {
|
||||
included.delete(file);
|
||||
}
|
||||
}
|
||||
return [...included];
|
||||
},
|
||||
resolveGlob(input, cwd) {
|
||||
if (Array.isArray(input)) {
|
||||
return input.flatMap(paths => this.resolveGlob(paths, cwd));
|
||||
}
|
||||
const hasGlobChars = /[*?[\]]/.test(input);
|
||||
if (!hasGlobChars) {
|
||||
try {
|
||||
const s = statSync(cwd ? this.normalizePath(cwd, input) : input);
|
||||
if (s.isDirectory()) {
|
||||
return globSync(join(input, '**'), { cwd });
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return globSync(input, { cwd });
|
||||
},
|
||||
getPackageConfig(basePath = process.cwd()) {
|
||||
if (this.pathExists(`${basePath}/package.json`)) {
|
||||
try {
|
||||
const path = this.normalizePath(import.meta.resolve(`${basePath}/package.json`));
|
||||
return this.readJSONSync(path);
|
||||
} catch (e) {
|
||||
/* v8 ignore next */
|
||||
return {};
|
||||
}
|
||||
}
|
||||
const parentFolder = realpathSync(`${basePath}/..`);
|
||||
// we reached the root folder
|
||||
if (basePath === parentFolder) {
|
||||
return {};
|
||||
}
|
||||
return this.getPackageConfig(parentFolder);
|
||||
},
|
||||
getORMPackages() {
|
||||
const pkg = this.getPackageConfig();
|
||||
return new Set([...Object.keys(pkg.dependencies ?? {}), ...Object.keys(pkg.devDependencies ?? {})]);
|
||||
},
|
||||
getORMPackageVersion(name) {
|
||||
try {
|
||||
const path = import.meta.resolve(`${name}/package.json`);
|
||||
const pkg = this.readJSONSync(fileURLToPath(path));
|
||||
return pkg?.version;
|
||||
} catch (e) {
|
||||
return undefined;
|
||||
}
|
||||
},
|
||||
// inspired by https://github.com/facebook/docusaurus/pull/3386
|
||||
checkPackageVersion() {
|
||||
const coreVersion = Utils.getORMVersion();
|
||||
if (process.env.MIKRO_ORM_ALLOW_VERSION_MISMATCH || coreVersion === '[[MIKRO_ORM_VERSION]]') {
|
||||
return;
|
||||
}
|
||||
const deps = this.getORMPackages();
|
||||
const exceptions = new Set(['nestjs', 'sql-highlighter', 'mongo-highlighter']);
|
||||
const ormPackages = [...deps].filter(
|
||||
d => d.startsWith('@mikro-orm/') && d !== '@mikro-orm/core' && !exceptions.has(d.substring('@mikro-orm/'.length)),
|
||||
);
|
||||
for (const ormPackage of ormPackages) {
|
||||
const version = this.getORMPackageVersion(ormPackage);
|
||||
if (version != null && version !== coreVersion) {
|
||||
throw new Error(
|
||||
`Bad ${colors.cyan(ormPackage)} version ${colors.yellow('' + version)}.\n` +
|
||||
`All official @mikro-orm/* packages need to have the exact same version as @mikro-orm/core (${colors.green(coreVersion)}).\n` +
|
||||
`Only exceptions are packages that don't live in the 'mikro-orm' repository: ${[...exceptions].join(', ')}.\n` +
|
||||
`Maybe you want to check, or regenerate your yarn.lock or package-lock.json file?`,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Resolves and normalizes a series of path parts relative to each preceding part.
|
||||
* If any part is a `file:` URL, it is converted to a local path. If any part is an
|
||||
* absolute path, it replaces preceding paths (similar to `path.resolve` in NodeJS).
|
||||
* Trailing directory separators are removed, and all directory separators are converted
|
||||
* to POSIX-style separators (`/`).
|
||||
*/
|
||||
normalizePath(...parts) {
|
||||
let start = 0;
|
||||
for (let i = 0; i < parts.length; i++) {
|
||||
const part = parts[i];
|
||||
if (isAbsolute(part)) {
|
||||
start = i;
|
||||
} else if (part.startsWith('file:')) {
|
||||
start = i;
|
||||
parts[i] = fileURLToPath(part);
|
||||
}
|
||||
}
|
||||
if (start > 0) {
|
||||
parts = parts.slice(start);
|
||||
}
|
||||
let path = parts.join('/').replace(/\\/g, '/').replace(/\/$/, '');
|
||||
path = normalize(path).replace(/\\/g, '/');
|
||||
return /^[/.]|[a-zA-Z]:/.exec(path) || path.startsWith('!') ? path : './' + path;
|
||||
},
|
||||
/**
|
||||
* Determines the relative path between two paths. If either path is a `file:` URL,
|
||||
* it is converted to a local path.
|
||||
*/
|
||||
relativePath(path, relativeTo) {
|
||||
if (!path) {
|
||||
return path;
|
||||
}
|
||||
path = this.normalizePath(path);
|
||||
if (path.startsWith('.')) {
|
||||
return path;
|
||||
}
|
||||
path = relative(this.normalizePath(relativeTo), path);
|
||||
return this.normalizePath(path);
|
||||
},
|
||||
/**
|
||||
* Computes the absolute path to for the given path relative to the provided base directory.
|
||||
* If either `path` or `baseDir` are `file:` URLs, they are converted to local paths.
|
||||
*/
|
||||
absolutePath(path, baseDir = process.cwd()) {
|
||||
if (!path) {
|
||||
return this.normalizePath(baseDir);
|
||||
}
|
||||
if (!isAbsolute(path) && !path.startsWith('file://')) {
|
||||
path = baseDir + '/' + path;
|
||||
}
|
||||
return this.normalizePath(path);
|
||||
},
|
||||
async writeFile(path, data, options) {
|
||||
await nodeWriteFile(path, data, options);
|
||||
},
|
||||
async dynamicImport(id) {
|
||||
/* v8 ignore next */
|
||||
const specifier = id.startsWith('file://') ? id : pathToFileURL(id).href;
|
||||
const dynamicImportProvider = globalThis.dynamicImportProvider ?? (id => import(id));
|
||||
return dynamicImportProvider(specifier);
|
||||
},
|
||||
};
|
||||
export * from '../cache/FileCacheAdapter.js';
|
||||
12
node_modules/@mikro-orm/core/utils/index.d.ts
generated
vendored
Normal file
12
node_modules/@mikro-orm/core/utils/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export * from './Configuration.js';
|
||||
export * from './Cursor.js';
|
||||
export * from './Utils.js';
|
||||
export * from './RequestContext.js';
|
||||
export * from './TransactionContext.js';
|
||||
export * from './TransactionManager.js';
|
||||
export * from './QueryHelper.js';
|
||||
export * from './NullHighlighter.js';
|
||||
export * from './EntityComparator.js';
|
||||
export * from './RawQueryFragment.js';
|
||||
export * from './env-vars.js';
|
||||
export * from './upsert-utils.js';
|
||||
12
node_modules/@mikro-orm/core/utils/index.js
generated
vendored
Normal file
12
node_modules/@mikro-orm/core/utils/index.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
export * from './Configuration.js';
|
||||
export * from './Cursor.js';
|
||||
export * from './Utils.js';
|
||||
export * from './RequestContext.js';
|
||||
export * from './TransactionContext.js';
|
||||
export * from './TransactionManager.js';
|
||||
export * from './QueryHelper.js';
|
||||
export * from './NullHighlighter.js';
|
||||
export * from './EntityComparator.js';
|
||||
export * from './RawQueryFragment.js';
|
||||
export * from './env-vars.js';
|
||||
export * from './upsert-utils.js';
|
||||
27
node_modules/@mikro-orm/core/utils/upsert-utils.d.ts
generated
vendored
Normal file
27
node_modules/@mikro-orm/core/utils/upsert-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { EntityData, EntityMetadata, FilterQuery } from '../typings.js';
|
||||
import type { UpsertOptions } from '../drivers/IDatabaseDriver.js';
|
||||
import { type Raw } from '../utils/RawQueryFragment.js';
|
||||
/** @internal */
|
||||
export declare function getOnConflictFields<T>(
|
||||
meta: EntityMetadata<T> | undefined,
|
||||
data: EntityData<T>,
|
||||
uniqueFields: (keyof T)[] | Raw,
|
||||
options: UpsertOptions<T>,
|
||||
): (keyof T)[];
|
||||
/** @internal */
|
||||
export declare function getOnConflictReturningFields<T, P extends string>(
|
||||
meta: EntityMetadata<T> | undefined,
|
||||
data: EntityData<T>,
|
||||
uniqueFields: (keyof T)[] | Raw,
|
||||
options: UpsertOptions<T, P>,
|
||||
): (keyof T)[] | '*';
|
||||
/** @internal */
|
||||
export declare function getWhereCondition<T extends object>(
|
||||
meta: EntityMetadata<T>,
|
||||
onConflictFields: (keyof T)[] | Raw | undefined,
|
||||
data: EntityData<T>,
|
||||
where: FilterQuery<T>,
|
||||
): {
|
||||
where: FilterQuery<T>;
|
||||
propIndex: number | false;
|
||||
};
|
||||
142
node_modules/@mikro-orm/core/utils/upsert-utils.js
generated
vendored
Normal file
142
node_modules/@mikro-orm/core/utils/upsert-utils.js
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
import { isRaw } from '../utils/RawQueryFragment.js';
|
||||
import { Utils } from './Utils.js';
|
||||
function expandEmbeddedProperties(prop, key) {
|
||||
if (prop.object) {
|
||||
return [prop.name];
|
||||
}
|
||||
return Object.values(prop.embeddedProps).flatMap(p => {
|
||||
/* v8 ignore next */
|
||||
if (p.embeddable && !p.object) {
|
||||
return expandEmbeddedProperties(p);
|
||||
}
|
||||
return [p.name];
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Expands dot paths and stars
|
||||
*/
|
||||
function expandFields(meta, fields) {
|
||||
return fields.flatMap(f => {
|
||||
if (f === '*' && meta) {
|
||||
return meta.comparableProps.filter(p => !p.lazy && !p.embeddable).map(p => p.name);
|
||||
}
|
||||
if (f.includes('.')) {
|
||||
const [k, ...tmp] = f.split('.');
|
||||
const rest = tmp.join('.');
|
||||
const prop = meta?.properties[k];
|
||||
if (prop?.embeddable) {
|
||||
if (rest === '*') {
|
||||
return expandEmbeddedProperties(prop);
|
||||
}
|
||||
return expandEmbeddedProperties(prop, rest);
|
||||
}
|
||||
}
|
||||
const prop = meta?.properties[f];
|
||||
if (prop?.embeddable) {
|
||||
return expandEmbeddedProperties(prop);
|
||||
}
|
||||
return [f];
|
||||
});
|
||||
}
|
||||
/** @internal */
|
||||
export function getOnConflictFields(meta, data, uniqueFields, options) {
|
||||
if (options.onConflictMergeFields) {
|
||||
const onConflictMergeFields = expandFields(meta, options.onConflictMergeFields);
|
||||
return onConflictMergeFields.flatMap(f => {
|
||||
const prop = meta?.properties[f];
|
||||
/* v8 ignore next */
|
||||
if (prop?.embeddable && !prop.object) {
|
||||
return Object.values(prop.embeddedProps).map(p => p.name);
|
||||
}
|
||||
return f;
|
||||
});
|
||||
}
|
||||
const keys = Object.keys(data).flatMap(f => {
|
||||
if (!(Array.isArray(uniqueFields) && !uniqueFields.includes(f))) {
|
||||
return [];
|
||||
}
|
||||
const prop = meta?.properties[f];
|
||||
if (prop?.embeddable && !prop.object) {
|
||||
return expandEmbeddedProperties(prop);
|
||||
}
|
||||
return [f];
|
||||
});
|
||||
if (options.onConflictExcludeFields) {
|
||||
const onConflictExcludeFields = expandFields(meta, options.onConflictExcludeFields);
|
||||
return keys.filter(f => !onConflictExcludeFields.includes(f));
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
/** @internal */
|
||||
export function getOnConflictReturningFields(meta, data, uniqueFields, options) {
|
||||
/* v8 ignore next */
|
||||
if (!meta) {
|
||||
return '*';
|
||||
}
|
||||
const keys = meta.comparableProps
|
||||
.filter(p => {
|
||||
if (p.lazy || p.embeddable) {
|
||||
return false;
|
||||
}
|
||||
if (p.autoincrement) {
|
||||
return true;
|
||||
}
|
||||
return Array.isArray(uniqueFields) && !uniqueFields.includes(p.name);
|
||||
})
|
||||
.map(p => p.name);
|
||||
if (meta.versionProperty) {
|
||||
keys.push(meta.versionProperty);
|
||||
}
|
||||
if (options.onConflictAction === 'ignore') {
|
||||
return keys;
|
||||
}
|
||||
if (options.onConflictMergeFields) {
|
||||
const onConflictMergeFields = expandFields(meta, options.onConflictMergeFields);
|
||||
return keys.filter(key => !onConflictMergeFields.includes(key));
|
||||
}
|
||||
if (options.onConflictExcludeFields) {
|
||||
const onConflictExcludeFields = expandFields(meta, options.onConflictExcludeFields);
|
||||
return [...new Set(keys.concat(...onConflictExcludeFields))];
|
||||
}
|
||||
return keys.filter(key => !(key in data));
|
||||
}
|
||||
function getPropertyValue(obj, key) {
|
||||
if (!key.includes('.')) {
|
||||
return obj[key];
|
||||
}
|
||||
const parts = key.split('.');
|
||||
let curr = obj;
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
curr[parts[i]] ??= {};
|
||||
curr = curr[parts[i]];
|
||||
}
|
||||
return curr[parts[parts.length - 1]];
|
||||
}
|
||||
/** @internal */
|
||||
export function getWhereCondition(meta, onConflictFields, data, where) {
|
||||
const unique = onConflictFields ?? meta.props.filter(p => p.unique).map(p => p.name);
|
||||
const propIndex = !isRaw(unique) && unique.findIndex(p => data[p] ?? data[p.substring(0, p.indexOf('.'))] != null);
|
||||
if (onConflictFields || where == null) {
|
||||
if (propIndex !== false && propIndex >= 0) {
|
||||
let key = unique[propIndex];
|
||||
if (key.includes('.')) {
|
||||
const prop = meta.properties[key.substring(0, key.indexOf('.'))];
|
||||
if (prop) {
|
||||
key = `${prop.fieldNames[0]}${key.substring(key.indexOf('.'))}`;
|
||||
}
|
||||
}
|
||||
where = { [key]: getPropertyValue(data, unique[propIndex]) };
|
||||
} else if (meta.uniques.length > 0) {
|
||||
for (const u of meta.uniques) {
|
||||
if (Utils.asArray(u.properties).every(p => data[p] != null)) {
|
||||
where = Utils.asArray(u.properties).reduce((o, key) => {
|
||||
o[key] = data[key];
|
||||
return o;
|
||||
}, {});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { where, propIndex };
|
||||
}
|
||||
Reference in New Issue
Block a user