Initial commit - Event Planner application

This commit is contained in:
mberlin
2026-03-18 14:55:56 -03:00
commit 86d779eb4d
7548 changed files with 1006324 additions and 0 deletions

View File

@@ -0,0 +1,62 @@
/**
* An interface for getting the database metadata (names of the tables and columns etc.)
*/
export interface DatabaseIntrospector {
/**
* Get schema metadata.
*/
getSchemas(): Promise<SchemaMetadata[]>;
/**
* Get tables and views metadata.
*/
getTables(options?: DatabaseMetadataOptions): Promise<TableMetadata[]>;
/**
* Get the database metadata such as table and column names.
*
* @deprecated Use getTables() instead.
*/
getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata>;
}
export interface DatabaseMetadataOptions {
/**
* If this is true, the metadata contains the internal kysely tables
* such as the migration tables.
*/
withInternalKyselyTables: boolean;
}
export interface SchemaMetadata {
readonly name: string;
}
export interface DatabaseMetadata {
/**
* The tables and views found in the database.
* The propery isView can be used to tell them apart.
*/
readonly tables: TableMetadata[];
}
export interface TableMetadata {
readonly name: string;
readonly isView: boolean;
readonly columns: ColumnMetadata[];
readonly schema?: string;
}
export interface ColumnMetadata {
readonly name: string;
/**
* The data type of the column as reported by the database.
*
* NOTE: This value is whatever the database engine returns and it will be
* different on different dialects even if you run the same migrations.
* For example `integer` datatype in a migration will produce `int4`
* on PostgreSQL, `INTEGER` on SQLite and `int` on MySQL.
*/
readonly dataType: string;
/**
* The schema this column's data type was created in.
*/
readonly dataTypeSchema?: string;
readonly isAutoIncrementing: boolean;
readonly isNullable: boolean;
readonly hasDefaultValue: boolean;
readonly comment?: string;
}

View File

@@ -0,0 +1,2 @@
/// <reference types="./database-introspector.d.ts" />
export {};

View File

@@ -0,0 +1,94 @@
import type { Kysely } from '../kysely.js';
import type { DialectAdapter, MigrationLockOptions } from './dialect-adapter.js';
/**
* A basic implementation of `DialectAdapter` with sensible default values.
* Third-party dialects can extend this instead of implementing the `DialectAdapter`
* interface from scratch. That way all new settings will get default values when
* they are added and there will be less breaking changes.
*/
export declare abstract class DialectAdapterBase implements DialectAdapter {
/**
* Whether or not this dialect supports `if not exists` in creation of tables/schemas/views/etc.
*
* If this is false, Kysely's internal migrations tables and schemas are created
* without `if not exists` in migrations. This is not a problem if the dialect
* supports transactional DDL.
*/
get supportsCreateIfNotExists(): boolean;
/**
* Whether or not this dialect supports transactional DDL.
*
* If this is true, migrations are executed inside a transaction.
*/
get supportsTransactionalDdl(): boolean;
/**
* Whether or not this dialect supports the `returning` in inserts
* updates and deletes.
*/
get supportsReturning(): boolean;
get supportsOutput(): boolean;
/**
* This method is used to acquire a lock for the migrations so that
* it's not possible for two migration operations to run in parallel.
*
* Most dialects have explicit locks that can be used, like advisory locks
* in PostgreSQL and the get_lock function in MySQL.
*
* If the dialect doesn't have explicit locks the {@link MigrationLockOptions.lockTable}
* created by Kysely can be used instead. You can access it through the `options` object.
* The lock table has two columns `id` and `is_locked` and there's only one row in the table
* whose id is {@link MigrationLockOptions.lockRowId}. `is_locked` is an integer. Kysely
* takes care of creating the lock table and inserting the one single row to it before this
* method is executed. If the dialect supports schemas and the user has specified a custom
* schema in their migration settings, the options object also contains the schema name in
* {@link MigrationLockOptions.lockTableSchema}.
*
* Here's an example of how you might implement this method for a dialect that doesn't
* have explicit locks but supports `FOR UPDATE` row locks and transactional DDL:
*
* ```ts
* import { DialectAdapterBase, type MigrationLockOptions, Kysely } from 'kysely'
*
* export class MyAdapter extends DialectAdapterBase {
* override async acquireMigrationLock(
* db: Kysely<any>,
* options: MigrationLockOptions
* ): Promise<void> {
* const queryDb = options.lockTableSchema
* ? db.withSchema(options.lockTableSchema)
* : db
*
* // Since our imaginary dialect supports transactional DDL and has
* // row locks, we can simply take a row lock here and it will guarantee
* // all subsequent calls to this method from other transactions will
* // wait until this transaction finishes.
* await queryDb
* .selectFrom(options.lockTable)
* .selectAll()
* .where('id', '=', options.lockRowId)
* .forUpdate()
* .execute()
* }
*
* override async releaseMigrationLock() {
* // noop
* }
* }
* ```
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations will be executed. Otherwise
* `db` is a single connection (session) that will be used to execute the
* migrations.
*/
abstract acquireMigrationLock(db: Kysely<any>, options: MigrationLockOptions): Promise<void>;
/**
* Releases the migration lock. See {@link acquireMigrationLock}.
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations were executed. Otherwise `db`
* is a single connection (session) that was used to execute the migrations
* and the `acquireMigrationLock` call.
*/
abstract releaseMigrationLock(db: Kysely<any>, options: MigrationLockOptions): Promise<void>;
}

View File

@@ -0,0 +1,21 @@
/// <reference types="./dialect-adapter-base.d.ts" />
/**
* A basic implementation of `DialectAdapter` with sensible default values.
* Third-party dialects can extend this instead of implementing the `DialectAdapter`
* interface from scratch. That way all new settings will get default values when
* they are added and there will be less breaking changes.
*/
export class DialectAdapterBase {
get supportsCreateIfNotExists() {
return true;
}
get supportsTransactionalDdl() {
return false;
}
get supportsReturning() {
return false;
}
get supportsOutput() {
return false;
}
}

View File

@@ -0,0 +1,115 @@
import type { Kysely } from '../kysely.js';
/**
* A `DialectAdapter` encapsulates all differences between dialects outside
* of `Driver` and `QueryCompiler`.
*
* For example, some databases support transactional DDL and therefore we want
* to run migrations inside a transaction, while other databases don't support
* it. For that there's a `supportsTransactionalDdl` boolean in this interface.
*/
export interface DialectAdapter {
/**
* Whether or not this dialect supports `if not exists` in creation of tables/schemas/views/etc.
*
* If this is false, Kysely's internal migrations tables and schemas are created
* without `if not exists` in migrations. This is not a problem if the dialect
* supports transactional DDL.
*/
readonly supportsCreateIfNotExists: boolean;
/**
* Whether or not this dialect supports transactional DDL.
*
* If this is true, migrations are executed inside a transaction.
*/
readonly supportsTransactionalDdl: boolean;
/**
* Whether or not this dialect supports the `returning` in inserts
* updates and deletes.
*/
readonly supportsReturning: boolean;
/**
* Whether or not this dialect supports the `output` clause in inserts
* updates and deletes.
*/
readonly supportsOutput?: boolean;
/**
* This method is used to acquire a lock for the migrations so that
* it's not possible for two migration operations to run in parallel.
*
* Most dialects have explicit locks that can be used, like advisory locks
* in PostgreSQL and the get_lock function in MySQL.
*
* If the dialect doesn't have explicit locks the {@link MigrationLockOptions.lockTable}
* created by Kysely can be used instead. You can access it through the `options` object.
* The lock table has two columns `id` and `is_locked` and there's only one row in the table
* whose id is {@link MigrationLockOptions.lockRowId}. `is_locked` is an integer. Kysely
* takes care of creating the lock table and inserting the one single row to it before this
* method is executed. If the dialect supports schemas and the user has specified a custom
* schema in their migration settings, the options object also contains the schema name in
* {@link MigrationLockOptions.lockTableSchema}.
*
* Here's an example of how you might implement this method for a dialect that doesn't
* have explicit locks but supports `FOR UPDATE` row locks and transactional DDL:
*
* ```ts
* import { DialectAdapterBase, type MigrationLockOptions, Kysely } from 'kysely'
*
* export class MyAdapter extends DialectAdapterBase {
* override async acquireMigrationLock(
* db: Kysely<any>,
* options: MigrationLockOptions
* ): Promise<void> {
* const queryDb = options.lockTableSchema
* ? db.withSchema(options.lockTableSchema)
* : db
*
* // Since our imaginary dialect supports transactional DDL and has
* // row locks, we can simply take a row lock here and it will guarantee
* // all subsequent calls to this method from other transactions will
* // wait until this transaction finishes.
* await queryDb
* .selectFrom(options.lockTable)
* .selectAll()
* .where('id', '=', options.lockRowId)
* .forUpdate()
* .execute()
* }
*
* override async releaseMigrationLock() {
* // noop
* }
* }
* ```
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations will be executed. Otherwise
* `db` is a single connection (session) that will be used to execute the
* migrations.
*/
acquireMigrationLock(db: Kysely<any>, options: MigrationLockOptions): Promise<void>;
/**
* Releases the migration lock. See {@link acquireMigrationLock}.
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations were executed. Otherwise `db`
* is a single connection (session) that was used to execute the migrations
* and the `acquireMigrationLock` call.
*/
releaseMigrationLock(db: Kysely<any>, options: MigrationLockOptions): Promise<void>;
}
export interface MigrationLockOptions {
/**
* The name of the migration lock table.
*/
readonly lockTable: string;
/**
* The id of the only row in the migration lock table.
*/
readonly lockRowId: string;
/**
* The schema in which the migration lock table lives. This is only
* defined if the user has specified a custom schema in the migration
* settings.
*/
readonly lockTableSchema?: string;
}

View File

@@ -0,0 +1,2 @@
/// <reference types="./dialect-adapter.d.ts" />
export {};

34
node_modules/kysely/dist/esm/dialect/dialect.d.ts generated vendored Normal file
View File

@@ -0,0 +1,34 @@
import type { Driver } from '../driver/driver.js';
import type { Kysely } from '../kysely.js';
import type { QueryCompiler } from '../query-compiler/query-compiler.js';
import type { DatabaseIntrospector } from './database-introspector.js';
import type { DialectAdapter } from './dialect-adapter.js';
/**
* A Dialect is the glue between Kysely and the underlying database engine.
*
* See the built-in {@link PostgresDialect} as an example of a dialect.
* Users can implement their own dialects and use them by passing it
* in the {@link KyselyConfig.dialect} property.
*/
export interface Dialect {
/**
* Creates a driver for the dialect.
*/
createDriver(): Driver;
/**
* Creates a query compiler for the dialect.
*/
createQueryCompiler(): QueryCompiler;
/**
* Creates an adapter for the dialect.
*/
createAdapter(): DialectAdapter;
/**
* Creates a database introspector that can be used to get database metadata
* such as the table names and column names of those tables.
*
* `db` never has any plugins installed. It's created using
* {@link Kysely.withoutPlugins}.
*/
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
}

2
node_modules/kysely/dist/esm/dialect/dialect.js generated vendored Normal file
View File

@@ -0,0 +1,2 @@
/// <reference types="./dialect.d.ts" />
export {};

View File

@@ -0,0 +1,83 @@
import type { Kysely } from '../../kysely.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
export declare class MssqlAdapter extends DialectAdapterBase {
/**
* Whether or not this dialect supports `if not exists` in creation of tables/schemas/views/etc.
*
* If this is false, Kysely's internal migrations tables and schemas are created
* without `if not exists` in migrations. This is not a problem if the dialect
* supports transactional DDL.
*/
get supportsCreateIfNotExists(): boolean;
/**
* Whether or not this dialect supports transactional DDL.
*
* If this is true, migrations are executed inside a transaction.
*/
get supportsTransactionalDdl(): boolean;
get supportsOutput(): boolean;
/**
* This method is used to acquire a lock for the migrations so that
* it's not possible for two migration operations to run in parallel.
*
* Most dialects have explicit locks that can be used, like advisory locks
* in PostgreSQL and the get_lock function in MySQL.
*
* If the dialect doesn't have explicit locks the {@link MigrationLockOptions.lockTable}
* created by Kysely can be used instead. You can access it through the `options` object.
* The lock table has two columns `id` and `is_locked` and there's only one row in the table
* whose id is {@link MigrationLockOptions.lockRowId}. `is_locked` is an integer. Kysely
* takes care of creating the lock table and inserting the one single row to it before this
* method is executed. If the dialect supports schemas and the user has specified a custom
* schema in their migration settings, the options object also contains the schema name in
* {@link MigrationLockOptions.lockTableSchema}.
*
* Here's an example of how you might implement this method for a dialect that doesn't
* have explicit locks but supports `FOR UPDATE` row locks and transactional DDL:
*
* ```ts
* import { DialectAdapterBase, type MigrationLockOptions, Kysely } from 'kysely'
*
* export class MyAdapter extends DialectAdapterBase {
* override async acquireMigrationLock(
* db: Kysely<any>,
* options: MigrationLockOptions
* ): Promise<void> {
* const queryDb = options.lockTableSchema
* ? db.withSchema(options.lockTableSchema)
* : db
*
* // Since our imaginary dialect supports transactional DDL and has
* // row locks, we can simply take a row lock here and it will guarantee
* // all subsequent calls to this method from other transactions will
* // wait until this transaction finishes.
* await queryDb
* .selectFrom(options.lockTable)
* .selectAll()
* .where('id', '=', options.lockRowId)
* .forUpdate()
* .execute()
* }
*
* override async releaseMigrationLock() {
* // noop
* }
* }
* ```
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations will be executed. Otherwise
* `db` is a single connection (session) that will be used to execute the
* migrations.
*/
acquireMigrationLock(db: Kysely<any>): Promise<void>;
/**
* Releases the migration lock. See {@link acquireMigrationLock}.
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations were executed. Otherwise `db`
* is a single connection (session) that was used to execute the migrations
* and the `acquireMigrationLock` call.
*/
releaseMigrationLock(): Promise<void>;
}

View File

@@ -0,0 +1,25 @@
/// <reference types="./mssql-adapter.d.ts" />
import { DEFAULT_MIGRATION_TABLE } from '../../migration/migrator.js';
import { sql } from '../../raw-builder/sql.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
export class MssqlAdapter extends DialectAdapterBase {
get supportsCreateIfNotExists() {
return false;
}
get supportsTransactionalDdl() {
return true;
}
get supportsOutput() {
return true;
}
async acquireMigrationLock(db) {
// Acquire a transaction-level exclusive lock on the migrations table.
// https://learn.microsoft.com/en-us/sql/relational-databases/system-stored-procedures/sp-getapplock-transact-sql?view=sql-server-ver16
await sql `exec sp_getapplock @DbPrincipal = ${sql.lit('dbo')}, @Resource = ${sql.lit(DEFAULT_MIGRATION_TABLE)}, @LockMode = ${sql.lit('Exclusive')}`.execute(db);
}
async releaseMigrationLock() {
// Nothing to do here. `sp_getapplock` is automatically released at the
// end of the transaction and since `supportsTransactionalDdl` true, we know
// the `db` instance passed to acquireMigrationLock is actually a transaction.
}
}

View File

@@ -0,0 +1,181 @@
import type { KyselyTypeError } from '../../util/type-error.js';
export interface MssqlDialectConfig {
/**
* When `true`, connections are reset to their initial states when released
* back to the pool, resulting in additional requests to the database.
*
* Defaults to `false`.
*/
resetConnectionsOnRelease?: boolean;
/**
* This dialect uses the `tarn` package to manage the connection pool to your
* database. To use it as a peer dependency and not bundle it with Kysely's code,
* you need to pass the `tarn` package itself. You also need to pass some pool options
* (excluding `create`, `destroy` and `validate` functions which are controlled by this dialect),
* `min` & `max` connections at the very least.
*
* ### Examples
*
* ```ts
* import { MssqlDialect } from 'kysely'
* import * as Tarn from 'tarn'
* import * as Tedious from 'tedious'
*
* const dialect = new MssqlDialect({
* tarn: { ...Tarn, options: { max: 10, min: 0 } },
* tedious: {
* ...Tedious,
* connectionFactory: () => new Tedious.Connection({
* // ...
* server: 'localhost',
* // ...
* }),
* }
* })
* ```
*/
tarn: Tarn;
/**
* This dialect uses the `tedious` package to communicate with your MS SQL Server
* database. To use it as a peer dependency and not bundle it with Kysely's code,
* you need to pass the `tedious` package itself. You also need to pass a factory
* function that creates new `tedious` `Connection` instances on demand.
*
* ### Examples
*
* ```ts
* import { MssqlDialect } from 'kysely'
* import * as Tarn from 'tarn'
* import * as Tedious from 'tedious'
*
* const dialect = new MssqlDialect({
* tarn: { ...Tarn, options: { max: 10, min: 0 } },
* tedious: {
* ...Tedious,
* connectionFactory: () => new Tedious.Connection({
* // ...
* server: 'localhost',
* // ...
* }),
* }
* })
* ```
*/
tedious: Tedious;
/**
* When `true`, connections are validated before being acquired from the pool,
* resulting in additional requests to the database.
*
* Defaults to `true`.
*/
validateConnections?: boolean;
}
export interface Tedious {
connectionFactory: () => TediousConnection | Promise<TediousConnection>;
ISOLATION_LEVEL: TediousIsolationLevel;
Request: TediousRequestClass;
/**
* @deprecated use {@link MssqlDialectConfig.resetConnectionsOnRelease} instead.
*/
resetConnectionOnRelease?: KyselyTypeError<'deprecated: use `MssqlDialectConfig.resetConnectionsOnRelease` instead'>;
TYPES: TediousTypes;
}
export interface TediousConnection {
beginTransaction(callback: (err: Error | null | undefined, transactionDescriptor?: any) => void, name?: string | undefined, isolationLevel?: number | undefined): void;
cancel(): boolean;
close(): void;
commitTransaction(callback: (err: Error | null | undefined) => void, name?: string | undefined): void;
connect(connectListener: (err?: Error) => void): void;
execSql(request: TediousRequest): void;
off(event: 'error', listener: (error: unknown) => void): this;
off(event: string, listener: (...args: any[]) => void): this;
on(event: 'error', listener: (error: unknown) => void): this;
on(event: string, listener: (...args: any[]) => void): this;
once(event: 'end', listener: () => void): this;
once(event: string, listener: (...args: any[]) => void): this;
reset(callback: (err: Error | null | undefined) => void): void;
rollbackTransaction(callback: (err: Error | null | undefined) => void, name?: string | undefined): void;
saveTransaction(callback: (err: Error | null | undefined) => void, name: string): void;
}
export type TediousIsolationLevel = Record<string, number>;
export interface TediousRequestClass {
new (sqlTextOrProcedure: string | undefined, callback: (error?: Error | null, rowCount?: number, rows?: any) => void, options?: {
statementColumnEncryptionSetting?: any;
}): TediousRequest;
}
export declare class TediousRequest {
addParameter(name: string, dataType: TediousDataType, value?: unknown, options?: Readonly<{
output?: boolean;
length?: number;
precision?: number;
scale?: number;
}> | null): void;
off(event: 'row', listener: (columns: any) => void): this;
off(event: string, listener: (...args: any[]) => void): this;
on(event: 'row', listener: (columns: any) => void): this;
on(event: string, listener: (...args: any[]) => void): this;
once(event: 'requestCompleted', listener: () => void): this;
once(event: string, listener: (...args: any[]) => void): this;
pause(): void;
resume(): void;
}
export interface TediousTypes {
NVarChar: TediousDataType;
BigInt: TediousDataType;
Int: TediousDataType;
Float: TediousDataType;
Bit: TediousDataType;
DateTime: TediousDataType;
VarBinary: TediousDataType;
[x: string]: TediousDataType;
}
export interface TediousDataType {
}
export interface TediousColumnValue {
metadata: {
colName: string;
};
value: any;
}
export interface Tarn {
/**
* Tarn.js' pool options, excluding `create`, `destroy` and `validate` functions,
* which must be implemented by this dialect.
*/
options: Omit<TarnPoolOptions<any>, 'create' | 'destroy' | 'validate'> & {
/**
* @deprecated use {@link MssqlDialectConfig.validateConnections} instead.
*/
validateConnections?: KyselyTypeError<'deprecated: use `MssqlDialectConfig.validateConnections` instead'>;
};
/**
* Tarn.js' Pool class.
*/
Pool: typeof TarnPool;
}
export declare class TarnPool<R> {
constructor(opt: TarnPoolOptions<R>);
acquire(): TarnPendingRequest<R>;
destroy(): any;
release(resource: R): void;
}
export interface TarnPoolOptions<R> {
acquireTimeoutMillis?: number;
create(cb: (err: Error | null, resource: R) => void): any | (() => Promise<R>);
createRetryIntervalMillis?: number;
createTimeoutMillis?: number;
destroy(resource: R): any;
destroyTimeoutMillis?: number;
idleTimeoutMillis?: number;
log?(msg: string): any;
max: number;
min: number;
propagateCreateError?: boolean;
reapIntervalMillis?: number;
validate?(resource: R): boolean;
}
export interface TarnPendingRequest<R> {
promise: Promise<R>;
resolve: (resource: R) => void;
reject: (err: Error) => void;
}

View File

@@ -0,0 +1,2 @@
/// <reference types="./mssql-dialect-config.d.ts" />
export {};

View File

@@ -0,0 +1,70 @@
import type { Driver } from '../../driver/driver.js';
import type { Kysely } from '../../kysely.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { DatabaseIntrospector } from '../database-introspector.js';
import type { DialectAdapter } from '../dialect-adapter.js';
import type { Dialect } from '../dialect.js';
import type { MssqlDialectConfig } from './mssql-dialect-config.js';
/**
* MS SQL Server dialect that uses the [tedious](https://tediousjs.github.io/tedious)
* library.
*
* The constructor takes an instance of {@link MssqlDialectConfig}.
*
* ```ts
* import * as Tedious from 'tedious'
* import * as Tarn from 'tarn'
*
* const dialect = new MssqlDialect({
* tarn: {
* ...Tarn,
* options: {
* min: 0,
* max: 10,
* },
* },
* tedious: {
* ...Tedious,
* connectionFactory: () => new Tedious.Connection({
* authentication: {
* options: {
* password: 'password',
* userName: 'username',
* },
* type: 'default',
* },
* options: {
* database: 'some_db',
* port: 1433,
* trustServerCertificate: true,
* },
* server: 'localhost',
* }),
* },
* })
* ```
*/
export declare class MssqlDialect implements Dialect {
#private;
constructor(config: MssqlDialectConfig);
/**
* Creates a driver for the dialect.
*/
createDriver(): Driver;
/**
* Creates a query compiler for the dialect.
*/
createQueryCompiler(): QueryCompiler;
/**
* Creates an adapter for the dialect.
*/
createAdapter(): DialectAdapter;
/**
* Creates a database introspector that can be used to get database metadata
* such as the table names and column names of those tables.
*
* `db` never has any plugins installed. It's created using
* {@link Kysely.withoutPlugins}.
*/
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
}

View File

@@ -0,0 +1,62 @@
/// <reference types="./mssql-dialect.d.ts" />
import { MssqlAdapter } from './mssql-adapter.js';
import { MssqlDriver } from './mssql-driver.js';
import { MssqlIntrospector } from './mssql-introspector.js';
import { MssqlQueryCompiler } from './mssql-query-compiler.js';
/**
* MS SQL Server dialect that uses the [tedious](https://tediousjs.github.io/tedious)
* library.
*
* The constructor takes an instance of {@link MssqlDialectConfig}.
*
* ```ts
* import * as Tedious from 'tedious'
* import * as Tarn from 'tarn'
*
* const dialect = new MssqlDialect({
* tarn: {
* ...Tarn,
* options: {
* min: 0,
* max: 10,
* },
* },
* tedious: {
* ...Tedious,
* connectionFactory: () => new Tedious.Connection({
* authentication: {
* options: {
* password: 'password',
* userName: 'username',
* },
* type: 'default',
* },
* options: {
* database: 'some_db',
* port: 1433,
* trustServerCertificate: true,
* },
* server: 'localhost',
* }),
* },
* })
* ```
*/
export class MssqlDialect {
#config;
constructor(config) {
this.#config = config;
}
createDriver() {
return new MssqlDriver(this.#config);
}
createQueryCompiler() {
return new MssqlQueryCompiler();
}
createAdapter() {
return new MssqlAdapter();
}
createIntrospector(db) {
return new MssqlIntrospector(db);
}
}

View File

@@ -0,0 +1,59 @@
import type { DatabaseConnection, QueryResult } from '../../driver/database-connection.js';
import type { Driver, TransactionSettings } from '../../driver/driver.js';
import type { MssqlDialectConfig, Tedious, TediousConnection } from './mssql-dialect-config.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
declare const PRIVATE_RESET_METHOD: unique symbol;
declare const PRIVATE_DESTROY_METHOD: unique symbol;
declare const PRIVATE_VALIDATE_METHOD: unique symbol;
export declare class MssqlDriver implements Driver {
#private;
constructor(config: MssqlDialectConfig);
/**
* Initializes the driver.
*
* After calling this method the driver should be usable and `acquireConnection` etc.
* methods should be callable.
*/
init(): Promise<void>;
/**
* Acquires a new connection from the pool.
*/
acquireConnection(): Promise<DatabaseConnection>;
/**
* Begins a transaction.
*/
beginTransaction(connection: MssqlConnection, settings: TransactionSettings): Promise<void>;
/**
* Commits a transaction.
*/
commitTransaction(connection: MssqlConnection): Promise<void>;
/**
* Rolls back a transaction.
*/
rollbackTransaction(connection: MssqlConnection): Promise<void>;
savepoint(connection: MssqlConnection, savepointName: string): Promise<void>;
rollbackToSavepoint(connection: MssqlConnection, savepointName: string): Promise<void>;
/**
* Releases a connection back to the pool.
*/
releaseConnection(connection: MssqlConnection): Promise<void>;
/**
* Destroys the driver and releases all resources.
*/
destroy(): Promise<void>;
}
declare class MssqlConnection implements DatabaseConnection {
#private;
constructor(connection: TediousConnection, tedious: Tedious);
beginTransaction(settings: TransactionSettings): Promise<void>;
commitTransaction(): Promise<void>;
connect(): Promise<this>;
executeQuery<O>(compiledQuery: CompiledQuery): Promise<QueryResult<O>>;
rollbackTransaction(savepointName?: string): Promise<void>;
savepoint(savepointName: string): Promise<void>;
streamQuery<O>(compiledQuery: CompiledQuery, chunkSize: number): AsyncIterableIterator<QueryResult<O>>;
[PRIVATE_DESTROY_METHOD](): Promise<void>;
[PRIVATE_RESET_METHOD](): Promise<void>;
[PRIVATE_VALIDATE_METHOD](): Promise<boolean>;
}
export {};

View File

@@ -0,0 +1,356 @@
/// <reference types="./mssql-driver.d.ts" />
import { freeze, isBigInt, isBoolean, isBuffer, isDate, isNull, isNumber, isString, isUndefined, } from '../../util/object-utils.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
import { extendStackTrace } from '../../util/stack-trace-utils.js';
import { randomString } from '../../util/random-string.js';
import { Deferred } from '../../util/deferred.js';
const PRIVATE_RESET_METHOD = Symbol();
const PRIVATE_DESTROY_METHOD = Symbol();
const PRIVATE_VALIDATE_METHOD = Symbol();
export class MssqlDriver {
#config;
#pool;
constructor(config) {
this.#config = freeze({ ...config });
const { tarn, tedious, validateConnections } = this.#config;
const { validateConnections: deprecatedValidateConnections, ...poolOptions } = tarn.options;
this.#pool = new tarn.Pool({
...poolOptions,
create: async () => {
const connection = await tedious.connectionFactory();
return await new MssqlConnection(connection, tedious).connect();
},
destroy: async (connection) => {
await connection[PRIVATE_DESTROY_METHOD]();
},
// @ts-ignore `tarn` accepts a function that returns a promise here, but
// the types are not aligned and it type errors.
validate: validateConnections === false ||
deprecatedValidateConnections === false
? undefined
: (connection) => connection[PRIVATE_VALIDATE_METHOD](),
});
}
async init() {
// noop
}
async acquireConnection() {
return await this.#pool.acquire().promise;
}
async beginTransaction(connection, settings) {
await connection.beginTransaction(settings);
}
async commitTransaction(connection) {
await connection.commitTransaction();
}
async rollbackTransaction(connection) {
await connection.rollbackTransaction();
}
async savepoint(connection, savepointName) {
await connection.savepoint(savepointName);
}
async rollbackToSavepoint(connection, savepointName) {
await connection.rollbackTransaction(savepointName);
}
async releaseConnection(connection) {
if (this.#config.resetConnectionsOnRelease ||
this.#config.tedious.resetConnectionOnRelease) {
await connection[PRIVATE_RESET_METHOD]();
}
this.#pool.release(connection);
}
async destroy() {
await this.#pool.destroy();
}
}
class MssqlConnection {
#connection;
#hasSocketError;
#tedious;
constructor(connection, tedious) {
this.#connection = connection;
this.#hasSocketError = false;
this.#tedious = tedious;
}
async beginTransaction(settings) {
const { isolationLevel } = settings;
await new Promise((resolve, reject) => this.#connection.beginTransaction((error) => {
if (error)
reject(error);
else
resolve(undefined);
}, isolationLevel ? randomString(8) : undefined, isolationLevel
? this.#getTediousIsolationLevel(isolationLevel)
: undefined));
}
async commitTransaction() {
await new Promise((resolve, reject) => this.#connection.commitTransaction((error) => {
if (error)
reject(error);
else
resolve(undefined);
}));
}
async connect() {
const { promise: waitForConnected, reject, resolve } = new Deferred();
this.#connection.connect((error) => {
if (error) {
return reject(error);
}
resolve();
});
this.#connection.on('error', (error) => {
if (error instanceof Error &&
'code' in error &&
error.code === 'ESOCKET') {
this.#hasSocketError = true;
}
console.error(error);
reject(error);
});
function endListener() {
reject(new Error('The connection ended without ever completing the connection'));
}
this.#connection.once('end', endListener);
await waitForConnected;
this.#connection.off('end', endListener);
return this;
}
async executeQuery(compiledQuery) {
try {
const deferred = new Deferred();
const request = new MssqlRequest({
compiledQuery,
tedious: this.#tedious,
onDone: deferred,
});
this.#connection.execSql(request.request);
const { rowCount, rows } = await deferred.promise;
return {
numAffectedRows: rowCount !== undefined ? BigInt(rowCount) : undefined,
rows,
};
}
catch (err) {
throw extendStackTrace(err, new Error());
}
}
async rollbackTransaction(savepointName) {
await new Promise((resolve, reject) => this.#connection.rollbackTransaction((error) => {
if (error)
reject(error);
else
resolve(undefined);
}, savepointName));
}
async savepoint(savepointName) {
await new Promise((resolve, reject) => this.#connection.saveTransaction((error) => {
if (error)
reject(error);
else
resolve(undefined);
}, savepointName));
}
async *streamQuery(compiledQuery, chunkSize) {
if (!Number.isInteger(chunkSize) || chunkSize <= 0) {
throw new Error('chunkSize must be a positive integer');
}
const request = new MssqlRequest({
compiledQuery,
streamChunkSize: chunkSize,
tedious: this.#tedious,
});
this.#connection.execSql(request.request);
try {
while (true) {
const rows = await request.readChunk();
if (rows.length === 0) {
break;
}
yield { rows };
if (rows.length < chunkSize) {
break;
}
}
}
finally {
await this.#cancelRequest(request);
}
}
#getTediousIsolationLevel(isolationLevel) {
const { ISOLATION_LEVEL } = this.#tedious;
const mapper = {
'read committed': ISOLATION_LEVEL.READ_COMMITTED,
'read uncommitted': ISOLATION_LEVEL.READ_UNCOMMITTED,
'repeatable read': ISOLATION_LEVEL.REPEATABLE_READ,
serializable: ISOLATION_LEVEL.SERIALIZABLE,
snapshot: ISOLATION_LEVEL.SNAPSHOT,
};
const tediousIsolationLevel = mapper[isolationLevel];
if (tediousIsolationLevel === undefined) {
throw new Error(`Unknown isolation level: ${isolationLevel}`);
}
return tediousIsolationLevel;
}
#cancelRequest(request) {
return new Promise((resolve) => {
request.request.once('requestCompleted', resolve);
const wasCanceled = this.#connection.cancel();
if (!wasCanceled) {
request.request.off('requestCompleted', resolve);
resolve();
}
});
}
[PRIVATE_DESTROY_METHOD]() {
if ('closed' in this.#connection && this.#connection.closed) {
return Promise.resolve();
}
return new Promise((resolve) => {
this.#connection.once('end', resolve);
this.#connection.close();
});
}
async [PRIVATE_RESET_METHOD]() {
await new Promise((resolve, reject) => {
this.#connection.reset((error) => {
if (error) {
return reject(error);
}
resolve();
});
});
}
async [PRIVATE_VALIDATE_METHOD]() {
if (this.#hasSocketError || this.#isConnectionClosed()) {
return false;
}
try {
const deferred = new Deferred();
const request = new MssqlRequest({
compiledQuery: CompiledQuery.raw('select 1'),
onDone: deferred,
tedious: this.#tedious,
});
this.#connection.execSql(request.request);
await deferred.promise;
return true;
}
catch {
return false;
}
}
#isConnectionClosed() {
return 'closed' in this.#connection && Boolean(this.#connection.closed);
}
}
class MssqlRequest {
#request;
#rows;
#streamChunkSize;
#subscribers;
#tedious;
#rowCount;
constructor(props) {
const { compiledQuery, onDone, streamChunkSize, tedious } = props;
this.#rows = [];
this.#streamChunkSize = streamChunkSize;
this.#subscribers = {};
this.#tedious = tedious;
if (onDone) {
const subscriptionKey = 'onDone';
this.#subscribers[subscriptionKey] = (event, error) => {
if (event === 'chunkReady') {
return;
}
delete this.#subscribers[subscriptionKey];
if (event === 'error') {
return onDone.reject(error);
}
onDone.resolve({
rowCount: this.#rowCount,
rows: this.#rows,
});
};
}
this.#request = new this.#tedious.Request(compiledQuery.sql, (err, rowCount) => {
if (err) {
return Object.values(this.#subscribers).forEach((subscriber) => subscriber('error', err instanceof AggregateError ? err.errors : err));
}
this.#rowCount = rowCount;
});
this.#addParametersToRequest(compiledQuery.parameters);
this.#attachListeners();
}
get request() {
return this.#request;
}
readChunk() {
const subscriptionKey = this.readChunk.name;
return new Promise((resolve, reject) => {
this.#subscribers[subscriptionKey] = (event, error) => {
delete this.#subscribers[subscriptionKey];
if (event === 'error') {
return reject(error);
}
resolve(this.#rows.splice(0, this.#streamChunkSize));
};
this.#request.resume();
});
}
#addParametersToRequest(parameters) {
for (let i = 0; i < parameters.length; i++) {
const parameter = parameters[i];
this.#request.addParameter(String(i + 1), this.#getTediousDataType(parameter), parameter);
}
}
#attachListeners() {
const pauseAndEmitChunkReady = this.#streamChunkSize
? () => {
if (this.#streamChunkSize <= this.#rows.length) {
this.#request.pause();
Object.values(this.#subscribers).forEach((subscriber) => subscriber('chunkReady'));
}
}
: () => { };
const rowListener = (columns) => {
const row = {};
for (const column of columns) {
row[column.metadata.colName] = column.value;
}
this.#rows.push(row);
pauseAndEmitChunkReady();
};
this.#request.on('row', rowListener);
this.#request.once('requestCompleted', () => {
Object.values(this.#subscribers).forEach((subscriber) => subscriber('completed'));
this.#request.off('row', rowListener);
});
}
#getTediousDataType(value) {
if (isNull(value) || isUndefined(value) || isString(value)) {
return this.#tedious.TYPES.NVarChar;
}
if (isBigInt(value) || (isNumber(value) && value % 1 === 0)) {
if (value < -2147483648 || value > 2147483647) {
return this.#tedious.TYPES.BigInt;
}
else {
return this.#tedious.TYPES.Int;
}
}
if (isNumber(value)) {
return this.#tedious.TYPES.Float;
}
if (isBoolean(value)) {
return this.#tedious.TYPES.Bit;
}
if (isDate(value)) {
return this.#tedious.TYPES.DateTime;
}
if (isBuffer(value)) {
return this.#tedious.TYPES.VarBinary;
}
return this.#tedious.TYPES.NVarChar;
}
}

View File

@@ -0,0 +1,20 @@
import type { Kysely } from '../../kysely.js';
import type { DatabaseIntrospector, DatabaseMetadata, DatabaseMetadataOptions, SchemaMetadata, TableMetadata } from '../database-introspector.js';
export declare class MssqlIntrospector implements DatabaseIntrospector {
#private;
constructor(db: Kysely<any>);
/**
* Get schema metadata.
*/
getSchemas(): Promise<SchemaMetadata[]>;
/**
* Get tables and views metadata.
*/
getTables(options?: DatabaseMetadataOptions): Promise<TableMetadata[]>;
/**
* Get the database metadata such as table and column names.
*
* @deprecated Use getTables() instead.
*/
getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata>;
}

View File

@@ -0,0 +1,107 @@
/// <reference types="./mssql-introspector.d.ts" />
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, } from '../../migration/migrator.js';
import { freeze } from '../../util/object-utils.js';
export class MssqlIntrospector {
#db;
constructor(db) {
this.#db = db;
}
async getSchemas() {
return await this.#db.selectFrom('sys.schemas').select('name').execute();
}
async getTables(options = { withInternalKyselyTables: false }) {
const rawColumns = await this.#db
.selectFrom('sys.tables as tables')
.leftJoin('sys.schemas as table_schemas', 'table_schemas.schema_id', 'tables.schema_id')
.innerJoin('sys.columns as columns', 'columns.object_id', 'tables.object_id')
.innerJoin('sys.types as types', 'types.user_type_id', 'columns.user_type_id')
.leftJoin('sys.schemas as type_schemas', 'type_schemas.schema_id', 'types.schema_id')
.leftJoin('sys.extended_properties as comments', (join) => join
.onRef('comments.major_id', '=', 'tables.object_id')
.onRef('comments.minor_id', '=', 'columns.column_id')
.on('comments.name', '=', 'MS_Description'))
.$if(!options.withInternalKyselyTables, (qb) => qb
.where('tables.name', '!=', DEFAULT_MIGRATION_TABLE)
.where('tables.name', '!=', DEFAULT_MIGRATION_LOCK_TABLE))
.select([
'tables.name as table_name',
(eb) => eb
.ref('tables.type')
.$castTo()
.as('table_type'),
'table_schemas.name as table_schema_name',
'columns.default_object_id as column_default_object_id',
'columns.generated_always_type_desc as column_generated_always_type',
'columns.is_computed as column_is_computed',
'columns.is_identity as column_is_identity',
'columns.is_nullable as column_is_nullable',
'columns.is_rowguidcol as column_is_rowguidcol',
'columns.name as column_name',
'types.is_nullable as type_is_nullable',
'types.name as type_name',
'type_schemas.name as type_schema_name',
'comments.value as column_comment',
])
.unionAll(this.#db
.selectFrom('sys.views as views')
.leftJoin('sys.schemas as view_schemas', 'view_schemas.schema_id', 'views.schema_id')
.innerJoin('sys.columns as columns', 'columns.object_id', 'views.object_id')
.innerJoin('sys.types as types', 'types.user_type_id', 'columns.user_type_id')
.leftJoin('sys.schemas as type_schemas', 'type_schemas.schema_id', 'types.schema_id')
.leftJoin('sys.extended_properties as comments', (join) => join
.onRef('comments.major_id', '=', 'views.object_id')
.onRef('comments.minor_id', '=', 'columns.column_id')
.on('comments.name', '=', 'MS_Description'))
.select([
'views.name as table_name',
'views.type as table_type',
'view_schemas.name as table_schema_name',
'columns.default_object_id as column_default_object_id',
'columns.generated_always_type_desc as column_generated_always_type',
'columns.is_computed as column_is_computed',
'columns.is_identity as column_is_identity',
'columns.is_nullable as column_is_nullable',
'columns.is_rowguidcol as column_is_rowguidcol',
'columns.name as column_name',
'types.is_nullable as type_is_nullable',
'types.name as type_name',
'type_schemas.name as type_schema_name',
'comments.value as column_comment',
]))
.orderBy('table_schema_name')
.orderBy('table_name')
.orderBy('column_name')
.execute();
const tableDictionary = {};
for (const rawColumn of rawColumns) {
const key = `${rawColumn.table_schema_name}.${rawColumn.table_name}`;
const table = (tableDictionary[key] =
tableDictionary[key] ||
freeze({
columns: [],
isView: rawColumn.table_type === 'V ',
name: rawColumn.table_name,
schema: rawColumn.table_schema_name ?? undefined,
}));
table.columns.push(freeze({
dataType: rawColumn.type_name,
dataTypeSchema: rawColumn.type_schema_name ?? undefined,
hasDefaultValue: rawColumn.column_default_object_id > 0 ||
rawColumn.column_generated_always_type !== 'NOT_APPLICABLE' ||
rawColumn.column_is_identity ||
rawColumn.column_is_computed ||
rawColumn.column_is_rowguidcol,
isAutoIncrementing: rawColumn.column_is_identity,
isNullable: rawColumn.column_is_nullable && rawColumn.type_is_nullable,
name: rawColumn.column_name,
comment: rawColumn.column_comment ?? undefined,
}));
}
return Object.values(tableDictionary);
}
async getMetadata(options) {
return {
tables: await this.getTables(options),
};
}
}

View File

@@ -0,0 +1,17 @@
import type { AddColumnNode } from '../../operation-node/add-column-node.js';
import type { AlterTableColumnAlterationNode } from '../../operation-node/alter-table-node.js';
import type { DropColumnNode } from '../../operation-node/drop-column-node.js';
import type { OffsetNode } from '../../operation-node/offset-node.js';
import type { MergeQueryNode } from '../../operation-node/merge-query-node.js';
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
import type { CollateNode } from '../../operation-node/collate-node.js';
export declare class MssqlQueryCompiler extends DefaultQueryCompiler {
protected getCurrentParameterPlaceholder(): string;
protected visitOffset(node: OffsetNode): void;
protected compileColumnAlterations(columnAlterations: readonly AlterTableColumnAlterationNode[]): void;
protected visitAddColumn(node: AddColumnNode): void;
protected visitDropColumn(node: DropColumnNode): void;
protected visitMergeQuery(node: MergeQueryNode): void;
protected visitCollate(node: CollateNode): void;
protected announcesNewColumnDataType(): boolean;
}

View File

@@ -0,0 +1,80 @@
/// <reference types="./mssql-query-compiler.d.ts" />
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
const COLLATION_CHAR_REGEX = /^[a-z0-9_]$/i;
export class MssqlQueryCompiler extends DefaultQueryCompiler {
getCurrentParameterPlaceholder() {
return `@${this.numParameters}`;
}
visitOffset(node) {
super.visitOffset(node);
this.append(' rows');
}
// mssql allows multi-column alterations in a single statement,
// but you can only use the command keyword/s once.
// it also doesn't support multiple kinds of commands in the same
// alter table statement, but we compile that anyway for the sake
// of WYSIWYG.
compileColumnAlterations(columnAlterations) {
const nodesByKind = {};
for (const columnAlteration of columnAlterations) {
if (!nodesByKind[columnAlteration.kind]) {
nodesByKind[columnAlteration.kind] = [];
}
nodesByKind[columnAlteration.kind].push(columnAlteration);
}
let first = true;
if (nodesByKind.AddColumnNode) {
this.append('add ');
this.compileList(nodesByKind.AddColumnNode);
first = false;
}
// multiple of these are not really supported by mssql,
// but for the sake of WYSIWYG.
if (nodesByKind.AlterColumnNode) {
if (!first)
this.append(', ');
this.compileList(nodesByKind.AlterColumnNode);
}
if (nodesByKind.DropColumnNode) {
if (!first)
this.append(', ');
this.append('drop column ');
this.compileList(nodesByKind.DropColumnNode);
}
// not really supported by mssql, but for the sake of WYSIWYG.
if (nodesByKind.ModifyColumnNode) {
if (!first)
this.append(', ');
this.compileList(nodesByKind.ModifyColumnNode);
}
// not really supported by mssql, but for the sake of WYSIWYG.
if (nodesByKind.RenameColumnNode) {
if (!first)
this.append(', ');
this.compileList(nodesByKind.RenameColumnNode);
}
}
visitAddColumn(node) {
this.visitNode(node.column);
}
visitDropColumn(node) {
this.visitNode(node.column);
}
visitMergeQuery(node) {
super.visitMergeQuery(node);
this.append(';');
}
visitCollate(node) {
this.append('collate ');
const { name } = node.collation;
for (const char of name) {
if (!COLLATION_CHAR_REGEX.test(char)) {
throw new Error(`Invalid collation: ${name}`);
}
}
this.append(name);
}
announcesNewColumnDataType() {
return false;
}
}

View File

@@ -0,0 +1,80 @@
import type { Kysely } from '../../kysely.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
import type { MigrationLockOptions } from '../dialect-adapter.js';
export declare class MysqlAdapter extends DialectAdapterBase {
/**
* Whether or not this dialect supports transactional DDL.
*
* If this is true, migrations are executed inside a transaction.
*/
get supportsTransactionalDdl(): boolean;
/**
* Whether or not this dialect supports the `returning` in inserts
* updates and deletes.
*/
get supportsReturning(): boolean;
/**
* This method is used to acquire a lock for the migrations so that
* it's not possible for two migration operations to run in parallel.
*
* Most dialects have explicit locks that can be used, like advisory locks
* in PostgreSQL and the get_lock function in MySQL.
*
* If the dialect doesn't have explicit locks the {@link MigrationLockOptions.lockTable}
* created by Kysely can be used instead. You can access it through the `options` object.
* The lock table has two columns `id` and `is_locked` and there's only one row in the table
* whose id is {@link MigrationLockOptions.lockRowId}. `is_locked` is an integer. Kysely
* takes care of creating the lock table and inserting the one single row to it before this
* method is executed. If the dialect supports schemas and the user has specified a custom
* schema in their migration settings, the options object also contains the schema name in
* {@link MigrationLockOptions.lockTableSchema}.
*
* Here's an example of how you might implement this method for a dialect that doesn't
* have explicit locks but supports `FOR UPDATE` row locks and transactional DDL:
*
* ```ts
* import { DialectAdapterBase, type MigrationLockOptions, Kysely } from 'kysely'
*
* export class MyAdapter extends DialectAdapterBase {
* override async acquireMigrationLock(
* db: Kysely<any>,
* options: MigrationLockOptions
* ): Promise<void> {
* const queryDb = options.lockTableSchema
* ? db.withSchema(options.lockTableSchema)
* : db
*
* // Since our imaginary dialect supports transactional DDL and has
* // row locks, we can simply take a row lock here and it will guarantee
* // all subsequent calls to this method from other transactions will
* // wait until this transaction finishes.
* await queryDb
* .selectFrom(options.lockTable)
* .selectAll()
* .where('id', '=', options.lockRowId)
* .forUpdate()
* .execute()
* }
*
* override async releaseMigrationLock() {
* // noop
* }
* }
* ```
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations will be executed. Otherwise
* `db` is a single connection (session) that will be used to execute the
* migrations.
*/
acquireMigrationLock(db: Kysely<any>, _opt: MigrationLockOptions): Promise<void>;
/**
* Releases the migration lock. See {@link acquireMigrationLock}.
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations were executed. Otherwise `db`
* is a single connection (session) that was used to execute the migrations
* and the `acquireMigrationLock` call.
*/
releaseMigrationLock(db: Kysely<any>, _opt: MigrationLockOptions): Promise<void>;
}

View File

@@ -0,0 +1,25 @@
/// <reference types="./mysql-adapter.d.ts" />
import { sql } from '../../raw-builder/sql.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
const LOCK_ID = 'ea586330-2c93-47c8-908d-981d9d270f9d';
const LOCK_TIMEOUT_SECONDS = 60 * 60;
export class MysqlAdapter extends DialectAdapterBase {
get supportsTransactionalDdl() {
return false;
}
get supportsReturning() {
return false;
}
async acquireMigrationLock(db, _opt) {
// Kysely uses a single connection to run the migrations. Because of that, we
// can take a lock using `get_lock`. Locks acquired using `get_lock` get
// released when the connection is destroyed (session ends) or when the lock
// is released using `release_lock`. This way we know that the lock is either
// released by us after successfull or failed migrations OR it's released by
// MySQL if the process gets killed for some reason.
await sql `select get_lock(${sql.lit(LOCK_ID)}, ${sql.lit(LOCK_TIMEOUT_SECONDS)})`.execute(db);
}
async releaseMigrationLock(db, _opt) {
await sql `select release_lock(${sql.lit(LOCK_ID)})`.execute(db);
}
}

View File

@@ -0,0 +1,56 @@
import type { DatabaseConnection } from '../../driver/database-connection.js';
/**
* Config for the MySQL dialect.
*
* https://github.com/sidorares/node-mysql2#using-connection-pools
*/
export interface MysqlDialectConfig {
/**
* A mysql2 Pool instance or a function that returns one.
*
* If a function is provided, it's called once when the first query is executed.
*
* https://github.com/sidorares/node-mysql2#using-connection-pools
*/
pool: MysqlPool | (() => Promise<MysqlPool>);
/**
* Called once for each created connection.
*/
onCreateConnection?: (connection: DatabaseConnection) => Promise<void>;
/**
* Called every time a connection is acquired from the connection pool.
*/
onReserveConnection?: (connection: DatabaseConnection) => Promise<void>;
}
/**
* This interface is the subset of mysql2 driver's `Pool` class that
* kysely needs.
*
* We don't use the type from `mysql2` here to not have a dependency to it.
*
* https://github.com/sidorares/node-mysql2#using-connection-pools
*/
export interface MysqlPool {
getConnection(callback: (error: unknown, connection: MysqlPoolConnection) => void): void;
end(callback: (error: unknown) => void): void;
}
export interface MysqlPoolConnection {
query(sql: string, parameters: Array<unknown>): {
stream: <T>(options: MysqlStreamOptions) => MysqlStream<T>;
};
query(sql: string, parameters: Array<unknown>, callback: (error: unknown, result: MysqlQueryResult) => void): void;
release(): void;
}
export interface MysqlStreamOptions {
highWaterMark?: number;
objectMode?: true;
}
export interface MysqlStream<T> {
[Symbol.asyncIterator](): AsyncIterableIterator<T>;
}
export interface MysqlOkPacket {
affectedRows: number;
changedRows: number;
insertId: number;
}
export type MysqlQueryResult = MysqlOkPacket | Record<string, unknown>[];

View File

@@ -0,0 +1,2 @@
/// <reference types="./mysql-dialect-config.d.ts" />
export {};

View File

@@ -0,0 +1,61 @@
import type { Driver } from '../../driver/driver.js';
import type { Kysely } from '../../kysely.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { Dialect } from '../dialect.js';
import type { DatabaseIntrospector } from '../database-introspector.js';
import type { DialectAdapter } from '../dialect-adapter.js';
import type { MysqlDialectConfig } from './mysql-dialect-config.js';
/**
* MySQL dialect that uses the [mysql2](https://github.com/sidorares/node-mysql2#readme) library.
*
* The constructor takes an instance of {@link MysqlDialectConfig}.
*
* ```ts
* import { createPool } from 'mysql2'
*
* new MysqlDialect({
* pool: createPool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*
* If you want the pool to only be created once it's first used, `pool`
* can be a function:
*
* ```ts
* import { createPool } from 'mysql2'
*
* new MysqlDialect({
* pool: async () => createPool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*/
export declare class MysqlDialect implements Dialect {
#private;
constructor(config: MysqlDialectConfig);
/**
* Creates a driver for the dialect.
*/
createDriver(): Driver;
/**
* Creates a query compiler for the dialect.
*/
createQueryCompiler(): QueryCompiler;
/**
* Creates an adapter for the dialect.
*/
createAdapter(): DialectAdapter;
/**
* Creates a database introspector that can be used to get database metadata
* such as the table names and column names of those tables.
*
* `db` never has any plugins installed. It's created using
* {@link Kysely.withoutPlugins}.
*/
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
}

View File

@@ -0,0 +1,53 @@
/// <reference types="./mysql-dialect.d.ts" />
import { MysqlDriver } from './mysql-driver.js';
import { MysqlQueryCompiler } from './mysql-query-compiler.js';
import { MysqlIntrospector } from './mysql-introspector.js';
import { MysqlAdapter } from './mysql-adapter.js';
/**
* MySQL dialect that uses the [mysql2](https://github.com/sidorares/node-mysql2#readme) library.
*
* The constructor takes an instance of {@link MysqlDialectConfig}.
*
* ```ts
* import { createPool } from 'mysql2'
*
* new MysqlDialect({
* pool: createPool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*
* If you want the pool to only be created once it's first used, `pool`
* can be a function:
*
* ```ts
* import { createPool } from 'mysql2'
*
* new MysqlDialect({
* pool: async () => createPool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*/
export class MysqlDialect {
#config;
constructor(config) {
this.#config = config;
}
createDriver() {
return new MysqlDriver(this.#config);
}
createQueryCompiler() {
return new MysqlQueryCompiler();
}
createAdapter() {
return new MysqlAdapter();
}
createIntrospector(db) {
return new MysqlIntrospector(db);
}
}

View File

@@ -0,0 +1,52 @@
import type { DatabaseConnection, QueryResult } from '../../driver/database-connection.js';
import type { Driver, TransactionSettings } from '../../driver/driver.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { MysqlDialectConfig, MysqlPoolConnection } from './mysql-dialect-config.js';
declare const PRIVATE_RELEASE_METHOD: unique symbol;
export declare class MysqlDriver implements Driver {
#private;
constructor(configOrPool: MysqlDialectConfig);
/**
* Initializes the driver.
*
* After calling this method the driver should be usable and `acquireConnection` etc.
* methods should be callable.
*/
init(): Promise<void>;
/**
* Acquires a new connection from the pool.
*/
acquireConnection(): Promise<DatabaseConnection>;
/**
* Begins a transaction.
*/
beginTransaction(connection: DatabaseConnection, settings: TransactionSettings): Promise<void>;
/**
* Commits a transaction.
*/
commitTransaction(connection: DatabaseConnection): Promise<void>;
/**
* Rolls back a transaction.
*/
rollbackTransaction(connection: DatabaseConnection): Promise<void>;
savepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
rollbackToSavepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
releaseSavepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
/**
* Releases a connection back to the pool.
*/
releaseConnection(connection: MysqlConnection): Promise<void>;
/**
* Destroys the driver and releases all resources.
*/
destroy(): Promise<void>;
}
declare class MysqlConnection implements DatabaseConnection {
#private;
constructor(rawConnection: MysqlPoolConnection);
executeQuery<O>(compiledQuery: CompiledQuery): Promise<QueryResult<O>>;
streamQuery<O>(compiledQuery: CompiledQuery, _chunkSize: number): AsyncIterableIterator<QueryResult<O>>;
[PRIVATE_RELEASE_METHOD](): void;
}
export {};

View File

@@ -0,0 +1,177 @@
/// <reference types="./mysql-driver.d.ts" />
import { parseSavepointCommand } from '../../parser/savepoint-parser.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
import { isFunction, isObject, freeze } from '../../util/object-utils.js';
import { createQueryId } from '../../util/query-id.js';
import { extendStackTrace } from '../../util/stack-trace-utils.js';
const PRIVATE_RELEASE_METHOD = Symbol();
export class MysqlDriver {
#config;
#connections = new WeakMap();
#pool;
constructor(configOrPool) {
this.#config = freeze({ ...configOrPool });
}
async init() {
this.#pool = isFunction(this.#config.pool)
? await this.#config.pool()
: this.#config.pool;
}
async acquireConnection() {
const rawConnection = await this.#acquireConnection();
let connection = this.#connections.get(rawConnection);
if (!connection) {
connection = new MysqlConnection(rawConnection);
this.#connections.set(rawConnection, connection);
// The driver must take care of calling `onCreateConnection` when a new
// connection is created. The `mysql2` module doesn't provide an async hook
// for the connection creation. We need to call the method explicitly.
if (this.#config?.onCreateConnection) {
await this.#config.onCreateConnection(connection);
}
}
if (this.#config?.onReserveConnection) {
await this.#config.onReserveConnection(connection);
}
return connection;
}
async #acquireConnection() {
return new Promise((resolve, reject) => {
this.#pool.getConnection(async (err, rawConnection) => {
if (err) {
reject(err);
}
else {
resolve(rawConnection);
}
});
});
}
async beginTransaction(connection, settings) {
if (settings.isolationLevel || settings.accessMode) {
const parts = [];
if (settings.isolationLevel) {
parts.push(`isolation level ${settings.isolationLevel}`);
}
if (settings.accessMode) {
parts.push(settings.accessMode);
}
const sql = `set transaction ${parts.join(', ')}`;
// On MySQL this sets the isolation level of the next transaction.
await connection.executeQuery(CompiledQuery.raw(sql));
}
await connection.executeQuery(CompiledQuery.raw('begin'));
}
async commitTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('commit'));
}
async rollbackTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('rollback'));
}
async savepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('savepoint', savepointName), createQueryId()));
}
async rollbackToSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('rollback to', savepointName), createQueryId()));
}
async releaseSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('release savepoint', savepointName), createQueryId()));
}
async releaseConnection(connection) {
connection[PRIVATE_RELEASE_METHOD]();
}
async destroy() {
return new Promise((resolve, reject) => {
this.#pool.end((err) => {
if (err) {
reject(err);
}
else {
resolve();
}
});
});
}
}
function isOkPacket(obj) {
return isObject(obj) && 'insertId' in obj && 'affectedRows' in obj;
}
class MysqlConnection {
#rawConnection;
constructor(rawConnection) {
this.#rawConnection = rawConnection;
}
async executeQuery(compiledQuery) {
try {
const result = await this.#executeQuery(compiledQuery);
if (isOkPacket(result)) {
const { insertId, affectedRows, changedRows } = result;
return {
insertId: insertId !== undefined &&
insertId !== null &&
insertId.toString() !== '0'
? BigInt(insertId)
: undefined,
numAffectedRows: affectedRows !== undefined && affectedRows !== null
? BigInt(affectedRows)
: undefined,
numChangedRows: changedRows !== undefined && changedRows !== null
? BigInt(changedRows)
: undefined,
rows: [],
};
}
else if (Array.isArray(result)) {
return {
rows: result,
};
}
return {
rows: [],
};
}
catch (err) {
throw extendStackTrace(err, new Error());
}
}
#executeQuery(compiledQuery) {
return new Promise((resolve, reject) => {
this.#rawConnection.query(compiledQuery.sql, compiledQuery.parameters, (err, result) => {
if (err) {
reject(err);
}
else {
resolve(result);
}
});
});
}
async *streamQuery(compiledQuery, _chunkSize) {
const stream = this.#rawConnection
.query(compiledQuery.sql, compiledQuery.parameters)
.stream({
objectMode: true,
});
try {
for await (const row of stream) {
yield {
rows: [row],
};
}
}
catch (ex) {
if (ex &&
typeof ex === 'object' &&
'code' in ex &&
// @ts-ignore
ex.code === 'ERR_STREAM_PREMATURE_CLOSE') {
// Most likely because of https://github.com/mysqljs/mysql/blob/master/lib/protocol/sequences/Query.js#L220
return;
}
throw ex;
}
}
[PRIVATE_RELEASE_METHOD]() {
this.#rawConnection.release();
}
}

View File

@@ -0,0 +1,20 @@
import type { DatabaseIntrospector, DatabaseMetadata, DatabaseMetadataOptions, SchemaMetadata, TableMetadata } from '../database-introspector.js';
import type { Kysely } from '../../kysely.js';
export declare class MysqlIntrospector implements DatabaseIntrospector {
#private;
constructor(db: Kysely<any>);
/**
* Get schema metadata.
*/
getSchemas(): Promise<SchemaMetadata[]>;
/**
* Get tables and views metadata.
*/
getTables(options?: DatabaseMetadataOptions): Promise<TableMetadata[]>;
/**
* Get the database metadata such as table and column names.
*
* @deprecated Use getTables() instead.
*/
getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata>;
}

View File

@@ -0,0 +1,76 @@
/// <reference types="./mysql-introspector.d.ts" />
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, } from '../../migration/migrator.js';
import { freeze } from '../../util/object-utils.js';
import { sql } from '../../raw-builder/sql.js';
export class MysqlIntrospector {
#db;
constructor(db) {
this.#db = db;
}
async getSchemas() {
let rawSchemas = await this.#db
.selectFrom('information_schema.schemata')
.select('schema_name')
.$castTo()
.execute();
return rawSchemas.map((it) => ({ name: it.SCHEMA_NAME }));
}
async getTables(options = { withInternalKyselyTables: false }) {
let query = this.#db
.selectFrom('information_schema.columns as columns')
.innerJoin('information_schema.tables as tables', (b) => b
.onRef('columns.TABLE_CATALOG', '=', 'tables.TABLE_CATALOG')
.onRef('columns.TABLE_SCHEMA', '=', 'tables.TABLE_SCHEMA')
.onRef('columns.TABLE_NAME', '=', 'tables.TABLE_NAME'))
.select([
'columns.COLUMN_NAME',
'columns.COLUMN_DEFAULT',
'columns.TABLE_NAME',
'columns.TABLE_SCHEMA',
'tables.TABLE_TYPE',
'columns.IS_NULLABLE',
'columns.DATA_TYPE',
'columns.EXTRA',
'columns.COLUMN_COMMENT',
])
.where('columns.TABLE_SCHEMA', '=', sql `database()`)
.orderBy('columns.TABLE_NAME')
.orderBy('columns.ORDINAL_POSITION')
.$castTo();
if (!options.withInternalKyselyTables) {
query = query
.where('columns.TABLE_NAME', '!=', DEFAULT_MIGRATION_TABLE)
.where('columns.TABLE_NAME', '!=', DEFAULT_MIGRATION_LOCK_TABLE);
}
const rawColumns = await query.execute();
return this.#parseTableMetadata(rawColumns);
}
async getMetadata(options) {
return {
tables: await this.getTables(options),
};
}
#parseTableMetadata(columns) {
return columns.reduce((tables, it) => {
let table = tables.find((tbl) => tbl.name === it.TABLE_NAME);
if (!table) {
table = freeze({
name: it.TABLE_NAME,
isView: it.TABLE_TYPE === 'VIEW',
schema: it.TABLE_SCHEMA,
columns: [],
});
tables.push(table);
}
table.columns.push(freeze({
name: it.COLUMN_NAME,
dataType: it.DATA_TYPE,
isNullable: it.IS_NULLABLE === 'YES',
isAutoIncrementing: it.EXTRA.toLowerCase().includes('auto_increment'),
hasDefaultValue: it.COLUMN_DEFAULT !== null,
comment: it.COLUMN_COMMENT === '' ? undefined : it.COLUMN_COMMENT,
}));
return tables;
}, []);
}
}

View File

@@ -0,0 +1,13 @@
import type { CreateIndexNode } from '../../operation-node/create-index-node.js';
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
export declare class MysqlQueryCompiler extends DefaultQueryCompiler {
protected getCurrentParameterPlaceholder(): string;
protected getLeftExplainOptionsWrapper(): string;
protected getExplainOptionAssignment(): string;
protected getExplainOptionsDelimiter(): string;
protected getRightExplainOptionsWrapper(): string;
protected getLeftIdentifierWrapper(): string;
protected getRightIdentifierWrapper(): string;
protected sanitizeIdentifier(identifier: string): string;
protected visitCreateIndex(node: CreateIndexNode): void;
}

View File

@@ -0,0 +1,57 @@
/// <reference types="./mysql-query-compiler.d.ts" />
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
const ID_WRAP_REGEX = /`/g;
export class MysqlQueryCompiler extends DefaultQueryCompiler {
getCurrentParameterPlaceholder() {
return '?';
}
getLeftExplainOptionsWrapper() {
return '';
}
getExplainOptionAssignment() {
return '=';
}
getExplainOptionsDelimiter() {
return ' ';
}
getRightExplainOptionsWrapper() {
return '';
}
getLeftIdentifierWrapper() {
return '`';
}
getRightIdentifierWrapper() {
return '`';
}
sanitizeIdentifier(identifier) {
return identifier.replace(ID_WRAP_REGEX, '``');
}
visitCreateIndex(node) {
this.append('create ');
if (node.unique) {
this.append('unique ');
}
this.append('index ');
if (node.ifNotExists) {
this.append('if not exists ');
}
this.visitNode(node.name);
if (node.using) {
this.append(' using ');
this.visitNode(node.using);
}
if (node.table) {
this.append(' on ');
this.visitNode(node.table);
}
if (node.columns) {
this.append(' (');
this.compileList(node.columns);
this.append(')');
}
if (node.where) {
this.append(' ');
this.visitNode(node.where);
}
}
}

View File

@@ -0,0 +1,80 @@
import type { Kysely } from '../../kysely.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
import type { MigrationLockOptions } from '../dialect-adapter.js';
export declare class PostgresAdapter extends DialectAdapterBase {
/**
* Whether or not this dialect supports transactional DDL.
*
* If this is true, migrations are executed inside a transaction.
*/
get supportsTransactionalDdl(): boolean;
/**
* Whether or not this dialect supports the `returning` in inserts
* updates and deletes.
*/
get supportsReturning(): boolean;
/**
* This method is used to acquire a lock for the migrations so that
* it's not possible for two migration operations to run in parallel.
*
* Most dialects have explicit locks that can be used, like advisory locks
* in PostgreSQL and the get_lock function in MySQL.
*
* If the dialect doesn't have explicit locks the {@link MigrationLockOptions.lockTable}
* created by Kysely can be used instead. You can access it through the `options` object.
* The lock table has two columns `id` and `is_locked` and there's only one row in the table
* whose id is {@link MigrationLockOptions.lockRowId}. `is_locked` is an integer. Kysely
* takes care of creating the lock table and inserting the one single row to it before this
* method is executed. If the dialect supports schemas and the user has specified a custom
* schema in their migration settings, the options object also contains the schema name in
* {@link MigrationLockOptions.lockTableSchema}.
*
* Here's an example of how you might implement this method for a dialect that doesn't
* have explicit locks but supports `FOR UPDATE` row locks and transactional DDL:
*
* ```ts
* import { DialectAdapterBase, type MigrationLockOptions, Kysely } from 'kysely'
*
* export class MyAdapter extends DialectAdapterBase {
* override async acquireMigrationLock(
* db: Kysely<any>,
* options: MigrationLockOptions
* ): Promise<void> {
* const queryDb = options.lockTableSchema
* ? db.withSchema(options.lockTableSchema)
* : db
*
* // Since our imaginary dialect supports transactional DDL and has
* // row locks, we can simply take a row lock here and it will guarantee
* // all subsequent calls to this method from other transactions will
* // wait until this transaction finishes.
* await queryDb
* .selectFrom(options.lockTable)
* .selectAll()
* .where('id', '=', options.lockRowId)
* .forUpdate()
* .execute()
* }
*
* override async releaseMigrationLock() {
* // noop
* }
* }
* ```
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations will be executed. Otherwise
* `db` is a single connection (session) that will be used to execute the
* migrations.
*/
acquireMigrationLock(db: Kysely<any>, _opt: MigrationLockOptions): Promise<void>;
/**
* Releases the migration lock. See {@link acquireMigrationLock}.
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations were executed. Otherwise `db`
* is a single connection (session) that was used to execute the migrations
* and the `acquireMigrationLock` call.
*/
releaseMigrationLock(_db: Kysely<any>, _opt: MigrationLockOptions): Promise<void>;
}

View File

@@ -0,0 +1,22 @@
/// <reference types="./postgres-adapter.d.ts" />
import { sql } from '../../raw-builder/sql.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
// Random id for our transaction lock.
const LOCK_ID = BigInt('3853314791062309107');
export class PostgresAdapter extends DialectAdapterBase {
get supportsTransactionalDdl() {
return true;
}
get supportsReturning() {
return true;
}
async acquireMigrationLock(db, _opt) {
// Acquire a transaction level advisory lock.
await sql `select pg_advisory_xact_lock(${sql.lit(LOCK_ID)})`.execute(db);
}
async releaseMigrationLock(_db, _opt) {
// Nothing to do here. `pg_advisory_xact_lock` is automatically released at the
// end of the transaction and since `supportsTransactionalDdl` true, we know
// the `db` instance passed to acquireMigrationLock is actually a transaction.
}
}

View File

@@ -0,0 +1,68 @@
import type { DatabaseConnection } from '../../driver/database-connection.js';
/**
* Config for the PostgreSQL dialect.
*/
export interface PostgresDialectConfig {
/**
* A postgres Pool instance or a function that returns one.
*
* If a function is provided, it's called once when the first query is executed.
*
* https://node-postgres.com/apis/pool
*/
pool: PostgresPool | (() => Promise<PostgresPool>);
/**
* https://github.com/brianc/node-postgres/tree/master/packages/pg-cursor
*
* ```ts
* import { PostgresDialect } from 'kysely'
* import { Pool } from 'pg'
* import Cursor from 'pg-cursor'
* // or import * as Cursor from 'pg-cursor'
*
* new PostgresDialect({
* cursor: Cursor,
* pool: new Pool('postgres://localhost:5432/mydb')
* })
* ```
*/
cursor?: PostgresCursorConstructor;
/**
* Called once for each created connection.
*/
onCreateConnection?: (connection: DatabaseConnection) => Promise<void>;
/**
* Called every time a connection is acquired from the pool.
*/
onReserveConnection?: (connection: DatabaseConnection) => Promise<void>;
}
/**
* This interface is the subset of pg driver's `Pool` class that
* kysely needs.
*
* We don't use the type from `pg` here to not have a dependency to it.
*
* https://node-postgres.com/apis/pool
*/
export interface PostgresPool {
connect(): Promise<PostgresPoolClient>;
end(): Promise<void>;
}
export interface PostgresPoolClient {
query<R>(sql: string, parameters: ReadonlyArray<unknown>): Promise<PostgresQueryResult<R>>;
query<R>(cursor: PostgresCursor<R>): PostgresCursor<R>;
release(): void;
}
export interface PostgresCursor<T> {
read(rowsCount: number): Promise<T[]>;
close(): Promise<void>;
}
export type PostgresCursorConstructor = new <T>(sql: string, parameters: unknown[]) => PostgresCursor<T>;
export interface PostgresQueryResult<R> {
command: 'UPDATE' | 'DELETE' | 'INSERT' | 'SELECT' | 'MERGE';
rowCount: number;
rows: R[];
}
export interface PostgresStream<T> {
[Symbol.asyncIterator](): AsyncIterableIterator<T>;
}

View File

@@ -0,0 +1,2 @@
/// <reference types="./postgres-dialect-config.d.ts" />
export {};

View File

@@ -0,0 +1,61 @@
import type { Driver } from '../../driver/driver.js';
import type { Kysely } from '../../kysely.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { Dialect } from '../dialect.js';
import type { DatabaseIntrospector } from '../database-introspector.js';
import type { DialectAdapter } from '../dialect-adapter.js';
import type { PostgresDialectConfig } from './postgres-dialect-config.js';
/**
* PostgreSQL dialect that uses the [pg](https://node-postgres.com/) library.
*
* The constructor takes an instance of {@link PostgresDialectConfig}.
*
* ```ts
* import { Pool } from 'pg'
*
* new PostgresDialect({
* pool: new Pool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*
* If you want the pool to only be created once it's first used, `pool`
* can be a function:
*
* ```ts
* import { Pool } from 'pg'
*
* new PostgresDialect({
* pool: async () => new Pool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*/
export declare class PostgresDialect implements Dialect {
#private;
constructor(config: PostgresDialectConfig);
/**
* Creates a driver for the dialect.
*/
createDriver(): Driver;
/**
* Creates a query compiler for the dialect.
*/
createQueryCompiler(): QueryCompiler;
/**
* Creates an adapter for the dialect.
*/
createAdapter(): DialectAdapter;
/**
* Creates a database introspector that can be used to get database metadata
* such as the table names and column names of those tables.
*
* `db` never has any plugins installed. It's created using
* {@link Kysely.withoutPlugins}.
*/
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
}

View File

@@ -0,0 +1,53 @@
/// <reference types="./postgres-dialect.d.ts" />
import { PostgresDriver } from './postgres-driver.js';
import { PostgresIntrospector } from './postgres-introspector.js';
import { PostgresQueryCompiler } from './postgres-query-compiler.js';
import { PostgresAdapter } from './postgres-adapter.js';
/**
* PostgreSQL dialect that uses the [pg](https://node-postgres.com/) library.
*
* The constructor takes an instance of {@link PostgresDialectConfig}.
*
* ```ts
* import { Pool } from 'pg'
*
* new PostgresDialect({
* pool: new Pool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*
* If you want the pool to only be created once it's first used, `pool`
* can be a function:
*
* ```ts
* import { Pool } from 'pg'
*
* new PostgresDialect({
* pool: async () => new Pool({
* database: 'some_db',
* host: 'localhost',
* })
* })
* ```
*/
export class PostgresDialect {
#config;
constructor(config) {
this.#config = config;
}
createDriver() {
return new PostgresDriver(this.#config);
}
createQueryCompiler() {
return new PostgresQueryCompiler();
}
createAdapter() {
return new PostgresAdapter();
}
createIntrospector(db) {
return new PostgresIntrospector(db);
}
}

View File

@@ -0,0 +1,55 @@
import type { DatabaseConnection, QueryResult } from '../../driver/database-connection.js';
import type { Driver, TransactionSettings } from '../../driver/driver.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { PostgresCursorConstructor, PostgresDialectConfig, PostgresPoolClient } from './postgres-dialect-config.js';
declare const PRIVATE_RELEASE_METHOD: unique symbol;
export declare class PostgresDriver implements Driver {
#private;
constructor(config: PostgresDialectConfig);
/**
* Initializes the driver.
*
* After calling this method the driver should be usable and `acquireConnection` etc.
* methods should be callable.
*/
init(): Promise<void>;
/**
* Acquires a new connection from the pool.
*/
acquireConnection(): Promise<DatabaseConnection>;
/**
* Begins a transaction.
*/
beginTransaction(connection: DatabaseConnection, settings: TransactionSettings): Promise<void>;
/**
* Commits a transaction.
*/
commitTransaction(connection: DatabaseConnection): Promise<void>;
/**
* Rolls back a transaction.
*/
rollbackTransaction(connection: DatabaseConnection): Promise<void>;
savepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
rollbackToSavepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
releaseSavepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
/**
* Releases a connection back to the pool.
*/
releaseConnection(connection: PostgresConnection): Promise<void>;
/**
* Destroys the driver and releases all resources.
*/
destroy(): Promise<void>;
}
interface PostgresConnectionOptions {
cursor: PostgresCursorConstructor | null;
}
declare class PostgresConnection implements DatabaseConnection {
#private;
constructor(client: PostgresPoolClient, options: PostgresConnectionOptions);
executeQuery<O>(compiledQuery: CompiledQuery): Promise<QueryResult<O>>;
streamQuery<O>(compiledQuery: CompiledQuery, chunkSize: number): AsyncIterableIterator<QueryResult<O>>;
[PRIVATE_RELEASE_METHOD](): void;
}
export {};

View File

@@ -0,0 +1,131 @@
/// <reference types="./postgres-driver.d.ts" />
import { parseSavepointCommand } from '../../parser/savepoint-parser.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
import { isFunction, freeze } from '../../util/object-utils.js';
import { createQueryId } from '../../util/query-id.js';
import { extendStackTrace } from '../../util/stack-trace-utils.js';
const PRIVATE_RELEASE_METHOD = Symbol();
export class PostgresDriver {
#config;
#connections = new WeakMap();
#pool;
constructor(config) {
this.#config = freeze({ ...config });
}
async init() {
this.#pool = isFunction(this.#config.pool)
? await this.#config.pool()
: this.#config.pool;
}
async acquireConnection() {
const client = await this.#pool.connect();
let connection = this.#connections.get(client);
if (!connection) {
connection = new PostgresConnection(client, {
cursor: this.#config.cursor ?? null,
});
this.#connections.set(client, connection);
// The driver must take care of calling `onCreateConnection` when a new
// connection is created. The `pg` module doesn't provide an async hook
// for the connection creation. We need to call the method explicitly.
if (this.#config.onCreateConnection) {
await this.#config.onCreateConnection(connection);
}
}
if (this.#config.onReserveConnection) {
await this.#config.onReserveConnection(connection);
}
return connection;
}
async beginTransaction(connection, settings) {
if (settings.isolationLevel || settings.accessMode) {
let sql = 'start transaction';
if (settings.isolationLevel) {
sql += ` isolation level ${settings.isolationLevel}`;
}
if (settings.accessMode) {
sql += ` ${settings.accessMode}`;
}
await connection.executeQuery(CompiledQuery.raw(sql));
}
else {
await connection.executeQuery(CompiledQuery.raw('begin'));
}
}
async commitTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('commit'));
}
async rollbackTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('rollback'));
}
async savepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('savepoint', savepointName), createQueryId()));
}
async rollbackToSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('rollback to', savepointName), createQueryId()));
}
async releaseSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('release', savepointName), createQueryId()));
}
async releaseConnection(connection) {
connection[PRIVATE_RELEASE_METHOD]();
}
async destroy() {
if (this.#pool) {
const pool = this.#pool;
this.#pool = undefined;
await pool.end();
}
}
}
class PostgresConnection {
#client;
#options;
constructor(client, options) {
this.#client = client;
this.#options = options;
}
async executeQuery(compiledQuery) {
try {
const { command, rowCount, rows } = await this.#client.query(compiledQuery.sql, [...compiledQuery.parameters]);
return {
numAffectedRows: command === 'INSERT' ||
command === 'UPDATE' ||
command === 'DELETE' ||
command === 'MERGE'
? BigInt(rowCount)
: undefined,
rows: rows ?? [],
};
}
catch (err) {
throw extendStackTrace(err, new Error());
}
}
async *streamQuery(compiledQuery, chunkSize) {
if (!this.#options.cursor) {
throw new Error("'cursor' is not present in your postgres dialect config. It's required to make streaming work in postgres.");
}
if (!Number.isInteger(chunkSize) || chunkSize <= 0) {
throw new Error('chunkSize must be a positive integer');
}
const cursor = this.#client.query(new this.#options.cursor(compiledQuery.sql, compiledQuery.parameters.slice()));
try {
while (true) {
const rows = await cursor.read(chunkSize);
if (rows.length === 0) {
break;
}
yield {
rows,
};
}
}
finally {
await cursor.close();
}
}
[PRIVATE_RELEASE_METHOD]() {
this.#client.release();
}
}

View File

@@ -0,0 +1,20 @@
import type { DatabaseIntrospector, DatabaseMetadata, DatabaseMetadataOptions, SchemaMetadata, TableMetadata } from '../database-introspector.js';
import type { Kysely } from '../../kysely.js';
export declare class PostgresIntrospector implements DatabaseIntrospector {
#private;
constructor(db: Kysely<any>);
/**
* Get schema metadata.
*/
getSchemas(): Promise<SchemaMetadata[]>;
/**
* Get tables and views metadata.
*/
getTables(options?: DatabaseMetadataOptions): Promise<TableMetadata[]>;
/**
* Get the database metadata such as table and column names.
*
* @deprecated Use getTables() instead.
*/
getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata>;
}

View File

@@ -0,0 +1,97 @@
/// <reference types="./postgres-introspector.d.ts" />
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, } from '../../migration/migrator.js';
import { freeze } from '../../util/object-utils.js';
import { sql } from '../../raw-builder/sql.js';
export class PostgresIntrospector {
#db;
constructor(db) {
this.#db = db;
}
async getSchemas() {
let rawSchemas = await this.#db
.selectFrom('pg_catalog.pg_namespace')
.select('nspname')
.$castTo()
.execute();
return rawSchemas.map((it) => ({ name: it.nspname }));
}
async getTables(options = { withInternalKyselyTables: false }) {
let query = this.#db
// column
.selectFrom('pg_catalog.pg_attribute as a')
// table
.innerJoin('pg_catalog.pg_class as c', 'a.attrelid', 'c.oid')
// table schema
.innerJoin('pg_catalog.pg_namespace as ns', 'c.relnamespace', 'ns.oid')
// column data type
.innerJoin('pg_catalog.pg_type as typ', 'a.atttypid', 'typ.oid')
// column data type schema
.innerJoin('pg_catalog.pg_namespace as dtns', 'typ.typnamespace', 'dtns.oid')
.select([
'a.attname as column',
'a.attnotnull as not_null',
'a.atthasdef as has_default',
'c.relname as table',
'c.relkind as table_type',
'ns.nspname as schema',
'typ.typname as type',
'dtns.nspname as type_schema',
sql `col_description(a.attrelid, a.attnum)`.as('column_description'),
sql `pg_get_serial_sequence(quote_ident(ns.nspname) || '.' || quote_ident(c.relname), a.attname)`.as('auto_incrementing'),
])
.where('c.relkind', 'in', [
'r' /*regular table*/,
'v' /*view*/,
'p' /*partitioned table*/,
])
.where('ns.nspname', '!~', '^pg_')
.where('ns.nspname', '!=', 'information_schema')
// Filter out internal cockroachdb schema
.where('ns.nspname', '!=', 'crdb_internal')
// Only schemas where we are allowed access
.where(sql `has_schema_privilege(ns.nspname, 'USAGE')`)
// No system columns
.where('a.attnum', '>=', 0)
.where('a.attisdropped', '!=', true)
.orderBy('ns.nspname')
.orderBy('c.relname')
.orderBy('a.attnum')
.$castTo();
if (!options.withInternalKyselyTables) {
query = query
.where('c.relname', '!=', DEFAULT_MIGRATION_TABLE)
.where('c.relname', '!=', DEFAULT_MIGRATION_LOCK_TABLE);
}
const rawColumns = await query.execute();
return this.#parseTableMetadata(rawColumns);
}
async getMetadata(options) {
return {
tables: await this.getTables(options),
};
}
#parseTableMetadata(columns) {
return columns.reduce((tables, it) => {
let table = tables.find((tbl) => tbl.name === it.table && tbl.schema === it.schema);
if (!table) {
table = freeze({
name: it.table,
isView: it.table_type === 'v',
schema: it.schema,
columns: [],
});
tables.push(table);
}
table.columns.push(freeze({
name: it.column,
dataType: it.type,
dataTypeSchema: it.type_schema,
isNullable: !it.not_null,
isAutoIncrementing: it.auto_incrementing !== null,
hasDefaultValue: it.has_default,
comment: it.column_description ?? undefined,
}));
return tables;
}, []);
}
}

View File

@@ -0,0 +1,4 @@
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
export declare class PostgresQueryCompiler extends DefaultQueryCompiler {
protected sanitizeIdentifier(identifier: string): string;
}

View File

@@ -0,0 +1,8 @@
/// <reference types="./postgres-query-compiler.d.ts" />
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
const ID_WRAP_REGEX = /"/g;
export class PostgresQueryCompiler extends DefaultQueryCompiler {
sanitizeIdentifier(identifier) {
return identifier.replace(ID_WRAP_REGEX, '""');
}
}

View File

@@ -0,0 +1,80 @@
import type { Kysely } from '../../kysely.js';
import { DialectAdapterBase } from '../dialect-adapter-base.js';
import type { MigrationLockOptions } from '../dialect-adapter.js';
export declare class SqliteAdapter extends DialectAdapterBase {
/**
* Whether or not this dialect supports transactional DDL.
*
* If this is true, migrations are executed inside a transaction.
*/
get supportsTransactionalDdl(): boolean;
/**
* Whether or not this dialect supports the `returning` in inserts
* updates and deletes.
*/
get supportsReturning(): boolean;
/**
* This method is used to acquire a lock for the migrations so that
* it's not possible for two migration operations to run in parallel.
*
* Most dialects have explicit locks that can be used, like advisory locks
* in PostgreSQL and the get_lock function in MySQL.
*
* If the dialect doesn't have explicit locks the {@link MigrationLockOptions.lockTable}
* created by Kysely can be used instead. You can access it through the `options` object.
* The lock table has two columns `id` and `is_locked` and there's only one row in the table
* whose id is {@link MigrationLockOptions.lockRowId}. `is_locked` is an integer. Kysely
* takes care of creating the lock table and inserting the one single row to it before this
* method is executed. If the dialect supports schemas and the user has specified a custom
* schema in their migration settings, the options object also contains the schema name in
* {@link MigrationLockOptions.lockTableSchema}.
*
* Here's an example of how you might implement this method for a dialect that doesn't
* have explicit locks but supports `FOR UPDATE` row locks and transactional DDL:
*
* ```ts
* import { DialectAdapterBase, type MigrationLockOptions, Kysely } from 'kysely'
*
* export class MyAdapter extends DialectAdapterBase {
* override async acquireMigrationLock(
* db: Kysely<any>,
* options: MigrationLockOptions
* ): Promise<void> {
* const queryDb = options.lockTableSchema
* ? db.withSchema(options.lockTableSchema)
* : db
*
* // Since our imaginary dialect supports transactional DDL and has
* // row locks, we can simply take a row lock here and it will guarantee
* // all subsequent calls to this method from other transactions will
* // wait until this transaction finishes.
* await queryDb
* .selectFrom(options.lockTable)
* .selectAll()
* .where('id', '=', options.lockRowId)
* .forUpdate()
* .execute()
* }
*
* override async releaseMigrationLock() {
* // noop
* }
* }
* ```
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations will be executed. Otherwise
* `db` is a single connection (session) that will be used to execute the
* migrations.
*/
acquireMigrationLock(_db: Kysely<any>, _opt: MigrationLockOptions): Promise<void>;
/**
* Releases the migration lock. See {@link acquireMigrationLock}.
*
* If `supportsTransactionalDdl` is `true` then the `db` passed to this method
* is a transaction inside which the migrations were executed. Otherwise `db`
* is a single connection (session) that was used to execute the migrations
* and the `acquireMigrationLock` call.
*/
releaseMigrationLock(_db: Kysely<any>, _opt: MigrationLockOptions): Promise<void>;
}

View File

@@ -0,0 +1,20 @@
/// <reference types="./sqlite-adapter.d.ts" />
import { DialectAdapterBase } from '../dialect-adapter-base.js';
export class SqliteAdapter extends DialectAdapterBase {
get supportsTransactionalDdl() {
return false;
}
get supportsReturning() {
return true;
}
async acquireMigrationLock(_db, _opt) {
// SQLite only has one connection that's reserved by the migration system
// for the whole time between acquireMigrationLock and releaseMigrationLock.
// We don't need to do anything here.
}
async releaseMigrationLock(_db, _opt) {
// SQLite only has one connection that's reserved by the migration system
// for the whole time between acquireMigrationLock and releaseMigrationLock.
// We don't need to do anything here.
}
}

View File

@@ -0,0 +1,41 @@
import type { DatabaseConnection } from '../../driver/database-connection.js';
/**
* Config for the SQLite dialect.
*/
export interface SqliteDialectConfig {
/**
* An sqlite Database instance or a function that returns one.
*
* If a function is provided, it's called once when the first query is executed.
*
* https://github.com/JoshuaWise/better-sqlite3/blob/master/docs/api.md#new-databasepath-options
*/
database: SqliteDatabase | (() => Promise<SqliteDatabase>);
/**
* Called once when the first query is executed.
*
* This is a Kysely specific feature and does not come from the `better-sqlite3` module.
*/
onCreateConnection?: (connection: DatabaseConnection) => Promise<void>;
}
/**
* This interface is the subset of better-sqlite3 driver's `Database` class that
* kysely needs.
*
* We don't use the type from `better-sqlite3` here to not have a dependency to it.
*
* https://github.com/JoshuaWise/better-sqlite3/blob/master/docs/api.md#new-databasepath-options
*/
export interface SqliteDatabase {
close(): void;
prepare(sql: string): SqliteStatement;
}
export interface SqliteStatement {
readonly reader: boolean;
all(parameters: ReadonlyArray<unknown>): unknown[];
run(parameters: ReadonlyArray<unknown>): {
changes: number | bigint;
lastInsertRowid: number | bigint;
};
iterate(parameters: ReadonlyArray<unknown>): IterableIterator<unknown>;
}

View File

@@ -0,0 +1,2 @@
/// <reference types="./sqlite-dialect-config.d.ts" />
export {};

View File

@@ -0,0 +1,55 @@
import type { Driver } from '../../driver/driver.js';
import type { Kysely } from '../../kysely.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { Dialect } from '../dialect.js';
import type { DatabaseIntrospector } from '../database-introspector.js';
import type { DialectAdapter } from '../dialect-adapter.js';
import type { SqliteDialectConfig } from './sqlite-dialect-config.js';
/**
* SQLite dialect that uses the [better-sqlite3](https://github.com/JoshuaWise/better-sqlite3) library.
*
* The constructor takes an instance of {@link SqliteDialectConfig}.
*
* ```ts
* import Database from 'better-sqlite3'
*
* new SqliteDialect({
* database: new Database('db.sqlite')
* })
* ```
*
* If you want the pool to only be created once it's first used, `database`
* can be a function:
*
* ```ts
* import Database from 'better-sqlite3'
*
* new SqliteDialect({
* database: async () => new Database('db.sqlite')
* })
* ```
*/
export declare class SqliteDialect implements Dialect {
#private;
constructor(config: SqliteDialectConfig);
/**
* Creates a driver for the dialect.
*/
createDriver(): Driver;
/**
* Creates a query compiler for the dialect.
*/
createQueryCompiler(): QueryCompiler;
/**
* Creates an adapter for the dialect.
*/
createAdapter(): DialectAdapter;
/**
* Creates a database introspector that can be used to get database metadata
* such as the table names and column names of those tables.
*
* `db` never has any plugins installed. It's created using
* {@link Kysely.withoutPlugins}.
*/
createIntrospector(db: Kysely<any>): DatabaseIntrospector;
}

View File

@@ -0,0 +1,48 @@
/// <reference types="./sqlite-dialect.d.ts" />
import { SqliteDriver } from './sqlite-driver.js';
import { SqliteQueryCompiler } from './sqlite-query-compiler.js';
import { SqliteIntrospector } from './sqlite-introspector.js';
import { SqliteAdapter } from './sqlite-adapter.js';
import { freeze } from '../../util/object-utils.js';
/**
* SQLite dialect that uses the [better-sqlite3](https://github.com/JoshuaWise/better-sqlite3) library.
*
* The constructor takes an instance of {@link SqliteDialectConfig}.
*
* ```ts
* import Database from 'better-sqlite3'
*
* new SqliteDialect({
* database: new Database('db.sqlite')
* })
* ```
*
* If you want the pool to only be created once it's first used, `database`
* can be a function:
*
* ```ts
* import Database from 'better-sqlite3'
*
* new SqliteDialect({
* database: async () => new Database('db.sqlite')
* })
* ```
*/
export class SqliteDialect {
#config;
constructor(config) {
this.#config = freeze({ ...config });
}
createDriver() {
return new SqliteDriver(this.#config);
}
createQueryCompiler() {
return new SqliteQueryCompiler();
}
createAdapter() {
return new SqliteAdapter();
}
createIntrospector(db) {
return new SqliteIntrospector(db);
}
}

View File

@@ -0,0 +1,42 @@
import type { DatabaseConnection } from '../../driver/database-connection.js';
import type { Driver } from '../../driver/driver.js';
import type { QueryCompiler } from '../../query-compiler/query-compiler.js';
import type { SqliteDialectConfig } from './sqlite-dialect-config.js';
export declare class SqliteDriver implements Driver {
#private;
constructor(config: SqliteDialectConfig);
/**
* Initializes the driver.
*
* After calling this method the driver should be usable and `acquireConnection` etc.
* methods should be callable.
*/
init(): Promise<void>;
/**
* Acquires a new connection from the pool.
*/
acquireConnection(): Promise<DatabaseConnection>;
/**
* Begins a transaction.
*/
beginTransaction(connection: DatabaseConnection): Promise<void>;
/**
* Commits a transaction.
*/
commitTransaction(connection: DatabaseConnection): Promise<void>;
/**
* Rolls back a transaction.
*/
rollbackTransaction(connection: DatabaseConnection): Promise<void>;
savepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
rollbackToSavepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
releaseSavepoint(connection: DatabaseConnection, savepointName: string, compileQuery: QueryCompiler['compileQuery']): Promise<void>;
/**
* Releases a connection back to the pool.
*/
releaseConnection(): Promise<void>;
/**
* Destroys the driver and releases all resources.
*/
destroy(): Promise<void>;
}

View File

@@ -0,0 +1,110 @@
/// <reference types="./sqlite-driver.d.ts" />
import { SelectQueryNode } from '../../operation-node/select-query-node.js';
import { parseSavepointCommand } from '../../parser/savepoint-parser.js';
import { CompiledQuery } from '../../query-compiler/compiled-query.js';
import { freeze, isFunction } from '../../util/object-utils.js';
import { createQueryId } from '../../util/query-id.js';
export class SqliteDriver {
#config;
#connectionMutex = new ConnectionMutex();
#db;
#connection;
constructor(config) {
this.#config = freeze({ ...config });
}
async init() {
this.#db = isFunction(this.#config.database)
? await this.#config.database()
: this.#config.database;
this.#connection = new SqliteConnection(this.#db);
if (this.#config.onCreateConnection) {
await this.#config.onCreateConnection(this.#connection);
}
}
async acquireConnection() {
// SQLite only has one single connection. We use a mutex here to wait
// until the single connection has been released.
await this.#connectionMutex.lock();
return this.#connection;
}
async beginTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('begin'));
}
async commitTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('commit'));
}
async rollbackTransaction(connection) {
await connection.executeQuery(CompiledQuery.raw('rollback'));
}
async savepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('savepoint', savepointName), createQueryId()));
}
async rollbackToSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('rollback to', savepointName), createQueryId()));
}
async releaseSavepoint(connection, savepointName, compileQuery) {
await connection.executeQuery(compileQuery(parseSavepointCommand('release', savepointName), createQueryId()));
}
async releaseConnection() {
this.#connectionMutex.unlock();
}
async destroy() {
this.#db?.close();
}
}
class SqliteConnection {
#db;
constructor(db) {
this.#db = db;
}
executeQuery(compiledQuery) {
const { sql, parameters } = compiledQuery;
const stmt = this.#db.prepare(sql);
if (stmt.reader) {
return Promise.resolve({
rows: stmt.all(parameters),
});
}
const { changes, lastInsertRowid } = stmt.run(parameters);
return Promise.resolve({
numAffectedRows: changes !== undefined && changes !== null ? BigInt(changes) : undefined,
insertId: lastInsertRowid !== undefined && lastInsertRowid !== null
? BigInt(lastInsertRowid)
: undefined,
rows: [],
});
}
async *streamQuery(compiledQuery, _chunkSize) {
const { sql, parameters, query } = compiledQuery;
const stmt = this.#db.prepare(sql);
if (SelectQueryNode.is(query)) {
const iter = stmt.iterate(parameters);
for (const row of iter) {
yield {
rows: [row],
};
}
}
else {
throw new Error('Sqlite driver only supports streaming of select queries');
}
}
}
class ConnectionMutex {
#promise;
#resolve;
async lock() {
while (this.#promise) {
await this.#promise;
}
this.#promise = new Promise((resolve) => {
this.#resolve = resolve;
});
}
unlock() {
const resolve = this.#resolve;
this.#promise = undefined;
this.#resolve = undefined;
resolve?.();
}
}

View File

@@ -0,0 +1,20 @@
import type { DatabaseIntrospector, DatabaseMetadata, DatabaseMetadataOptions, SchemaMetadata, TableMetadata } from '../database-introspector.js';
import type { Kysely } from '../../kysely.js';
export declare class SqliteIntrospector implements DatabaseIntrospector {
#private;
constructor(db: Kysely<any>);
/**
* Get schema metadata.
*/
getSchemas(): Promise<SchemaMetadata[]>;
/**
* Get tables and views metadata.
*/
getTables(options?: DatabaseMetadataOptions): Promise<TableMetadata[]>;
/**
* Get the database metadata such as table and column names.
*
* @deprecated Use getTables() instead.
*/
getMetadata(options?: DatabaseMetadataOptions): Promise<DatabaseMetadata>;
}

View File

@@ -0,0 +1,91 @@
/// <reference types="./sqlite-introspector.d.ts" />
import { DEFAULT_MIGRATION_LOCK_TABLE, DEFAULT_MIGRATION_TABLE, } from '../../migration/migrator.js';
import { sql } from '../../raw-builder/sql.js';
export class SqliteIntrospector {
#db;
constructor(db) {
this.#db = db;
}
async getSchemas() {
// Sqlite doesn't support schemas.
return [];
}
async getTables(options = { withInternalKyselyTables: false }) {
return await this.#getTableMetadata(options);
}
async getMetadata(options) {
return {
tables: await this.getTables(options),
};
}
#tablesQuery(qb, options) {
let tablesQuery = qb
.selectFrom('sqlite_master')
.where('type', 'in', ['table', 'view'])
.where('name', 'not like', 'sqlite_%')
.select(['name', 'sql', 'type'])
.orderBy('name');
if (!options.withInternalKyselyTables) {
tablesQuery = tablesQuery
.where('name', '!=', DEFAULT_MIGRATION_TABLE)
.where('name', '!=', DEFAULT_MIGRATION_LOCK_TABLE);
}
return tablesQuery;
}
async #getTableMetadata(options) {
const tablesResult = await this.#tablesQuery(this.#db, options).execute();
const tableMetadata = await this.#db
.with('table_list', (qb) => this.#tablesQuery(qb, options))
.selectFrom([
'table_list as tl',
sql `pragma_table_info(tl.name)`.as('p'),
])
.select([
'tl.name as table',
'p.cid',
'p.name',
'p.type',
'p.notnull',
'p.dflt_value',
'p.pk',
])
.orderBy('tl.name')
.orderBy('p.cid')
.execute();
const columnsByTable = {};
for (const row of tableMetadata) {
columnsByTable[row.table] ??= [];
columnsByTable[row.table].push(row);
}
return tablesResult.map(({ name, sql, type }) => {
// // Try to find the name of the column that has `autoincrement` 🤦
let autoIncrementCol = sql
?.split(/[\(\),]/)
?.find((it) => it.toLowerCase().includes('autoincrement'))
?.trimStart()
?.split(/\s+/)?.[0]
?.replace(/["`]/g, '');
const columns = columnsByTable[name] ?? [];
// Otherwise, check for an INTEGER PRIMARY KEY
// https://www.sqlite.org/autoinc.html
if (!autoIncrementCol) {
const pkCols = columns.filter((r) => r.pk > 0);
if (pkCols.length === 1 && pkCols[0].type.toLowerCase() === 'integer') {
autoIncrementCol = pkCols[0].name;
}
}
return {
name: name,
isView: type === 'view',
columns: columns.map((col) => ({
name: col.name,
dataType: col.type,
isNullable: !col.notnull,
isAutoIncrementing: col.name === autoIncrementCol,
hasDefaultValue: col.dflt_value != null,
comment: undefined,
})),
};
});
}
}

View File

@@ -0,0 +1,14 @@
import type { DefaultInsertValueNode } from '../../operation-node/default-insert-value-node.js';
import type { OrActionNode } from '../../operation-node/or-action-node.js';
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
export declare class SqliteQueryCompiler extends DefaultQueryCompiler {
protected visitOrAction(node: OrActionNode): void;
protected getCurrentParameterPlaceholder(): string;
protected getLeftExplainOptionsWrapper(): string;
protected getRightExplainOptionsWrapper(): string;
protected getLeftIdentifierWrapper(): string;
protected getRightIdentifierWrapper(): string;
protected getAutoIncrement(): string;
protected sanitizeIdentifier(identifier: string): string;
protected visitDefaultInsertValue(_: DefaultInsertValueNode): void;
}

View File

@@ -0,0 +1,34 @@
/// <reference types="./sqlite-query-compiler.d.ts" />
import { DefaultQueryCompiler } from '../../query-compiler/default-query-compiler.js';
const ID_WRAP_REGEX = /"/g;
export class SqliteQueryCompiler extends DefaultQueryCompiler {
visitOrAction(node) {
this.append('or ');
this.append(node.action);
}
getCurrentParameterPlaceholder() {
return '?';
}
getLeftExplainOptionsWrapper() {
return '';
}
getRightExplainOptionsWrapper() {
return '';
}
getLeftIdentifierWrapper() {
return '"';
}
getRightIdentifierWrapper() {
return '"';
}
getAutoIncrement() {
return 'autoincrement';
}
sanitizeIdentifier(identifier) {
return identifier.replace(ID_WRAP_REGEX, '""');
}
visitDefaultInsertValue(_) {
// sqlite doesn't support the `default` keyword in inserts.
this.append('null');
}
}