node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

37
server/node_modules/@strapi/database/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,37 @@
Copyright (c) 2015-present Strapi Solutions SAS
Portions of the Strapi software are licensed as follows:
* All software that resides under an "ee/" directory (the “EE Software”), if that directory exists, is licensed under the license defined below.
Enterprise License
If you or the company you represent has entered into a written agreement referencing the Enterprise Edition of the Strapi source code available at
https://github.com/strapi/strapi, then such agreement applies to your use of the Enterprise Edition of the Strapi Software. If you or the company you
represent is using the Enterprise Edition of the Strapi Software in connection with a subscription to our cloud offering, then the agreement you have
agreed to with respect to our cloud offering and the licenses included in such agreement apply to your use of the Enterprise Edition of the Strapi Software.
Otherwise, the Strapi Enterprise Software License Agreement (found here https://strapi.io/enterprise-terms) applies to your use of the Enterprise Edition of the Strapi Software.
BY ACCESSING OR USING THE ENTERPRISE EDITION OF THE STRAPI SOFTWARE, YOU ARE AGREEING TO BE BOUND BY THE RELEVANT REFERENCED AGREEMENT.
IF YOU ARE NOT AUTHORIZED TO ACCEPT THESE TERMS ON BEHALF OF THE COMPANY YOU REPRESENT OR IF YOU DO NOT AGREE TO ALL OF THE RELEVANT TERMS AND CONDITIONS REFERENCED AND YOU
HAVE NOT OTHERWISE EXECUTED A WRITTEN AGREEMENT WITH STRAPI, YOU ARE NOT AUTHORIZED TO ACCESS OR USE OR ALLOW ANY USER TO ACCESS OR USE ANY PART OF
THE ENTERPRISE EDITION OF THE STRAPI SOFTWARE. YOUR ACCESS RIGHTS ARE CONDITIONAL ON YOUR CONSENT TO THE RELEVANT REFERENCED TERMS TO THE EXCLUSION OF ALL OTHER TERMS;
IF THE RELEVANT REFERENCED TERMS ARE CONSIDERED AN OFFER BY YOU, ACCEPTANCE IS EXPRESSLY LIMITED TO THE RELEVANT REFERENCED TERMS.
* All software outside of the above-mentioned directories or restrictions above is available under the "MIT Expat" license as set forth below.
MIT Expat License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

3
server/node_modules/@strapi/database/README.md generated vendored Normal file
View File

@@ -0,0 +1,3 @@
# @strapi/database
> ⚠️ This package is only meant to be used inside Strapi

View File

@@ -0,0 +1,4 @@
import knex from 'knex';
import type { Knex } from 'knex';
export declare const createConnection: (userConfig: Knex.Config, strapiConfig?: Partial<Knex.Config>) => knex.Knex<any, unknown[]>;
//# sourceMappingURL=connection.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"connection.d.ts","sourceRoot":"","sources":["../src/connection.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAYjC,eAAO,MAAM,gBAAgB,eAAgB,KAAK,MAAM,iBAAiB,QAAQ,KAAK,MAAM,CAAC,8BA8B5F,CAAC"}

View File

@@ -0,0 +1,43 @@
'use strict';
var knex = require('knex');
const clientMap = {
sqlite: 'better-sqlite3',
mysql: 'mysql2',
postgres: 'pg'
};
function isClientValid(config) {
return Object.keys(clientMap).includes(config.client);
}
const createConnection = (userConfig, strapiConfig)=>{
if (!isClientValid(userConfig)) {
throw new Error(`Unsupported database client ${userConfig.client}`);
}
const knexConfig = {
...userConfig,
client: clientMap[userConfig.client]
};
// initialization code to run upon opening a new connection
if (strapiConfig?.pool?.afterCreate) {
knexConfig.pool = knexConfig.pool || {};
// if the user has set their own afterCreate in config, we will replace it and call it
const userAfterCreate = knexConfig.pool?.afterCreate;
const strapiAfterCreate = strapiConfig.pool.afterCreate;
knexConfig.pool.afterCreate = (conn, done)=>{
strapiAfterCreate(conn, (err, nativeConn)=>{
if (err) {
return done(err, nativeConn);
}
if (userAfterCreate) {
return userAfterCreate(nativeConn, done);
}
return done(null, nativeConn);
});
};
}
return knex(knexConfig);
};
exports.createConnection = createConnection;
//# sourceMappingURL=connection.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"connection.js","sources":["../src/connection.ts"],"sourcesContent":["import knex from 'knex';\nimport type { Knex } from 'knex';\n\nconst clientMap = {\n sqlite: 'better-sqlite3',\n mysql: 'mysql2',\n postgres: 'pg',\n};\n\nfunction isClientValid(config: { client?: unknown }): config is { client: keyof typeof clientMap } {\n return Object.keys(clientMap).includes(config.client as string);\n}\n\nexport const createConnection = (userConfig: Knex.Config, strapiConfig?: Partial<Knex.Config>) => {\n if (!isClientValid(userConfig)) {\n throw new Error(`Unsupported database client ${userConfig.client}`);\n }\n\n const knexConfig: Knex.Config = { ...userConfig, client: (clientMap as any)[userConfig.client] };\n\n // initialization code to run upon opening a new connection\n if (strapiConfig?.pool?.afterCreate) {\n knexConfig.pool = knexConfig.pool || {};\n // if the user has set their own afterCreate in config, we will replace it and call it\n const userAfterCreate = knexConfig.pool?.afterCreate;\n const strapiAfterCreate = strapiConfig.pool.afterCreate;\n knexConfig.pool.afterCreate = (\n conn: unknown,\n done: (err: Error | null | undefined, connection: any) => void\n ) => {\n strapiAfterCreate(conn, (err: Error | null | undefined, nativeConn: any) => {\n if (err) {\n return done(err, nativeConn);\n }\n if (userAfterCreate) {\n return userAfterCreate(nativeConn, done);\n }\n return done(null, nativeConn);\n });\n };\n }\n\n return knex(knexConfig);\n};\n"],"names":["clientMap","sqlite","mysql","postgres","isClientValid","config","Object","keys","includes","client","createConnection","userConfig","strapiConfig","Error","knexConfig","pool","afterCreate","userAfterCreate","strapiAfterCreate","conn","done","err","nativeConn","knex"],"mappings":";;;;AAGA,MAAMA,SAAY,GAAA;IAChBC,MAAQ,EAAA,gBAAA;IACRC,KAAO,EAAA,QAAA;IACPC,QAAU,EAAA;AACZ,CAAA;AAEA,SAASC,cAAcC,MAA4B,EAAA;AACjD,IAAA,OAAOC,OAAOC,IAAI,CAACP,WAAWQ,QAAQ,CAACH,OAAOI,MAAM,CAAA;AACtD;AAEO,MAAMC,gBAAmB,GAAA,CAACC,UAAyBC,EAAAA,YAAAA,GAAAA;IACxD,IAAI,CAACR,cAAcO,UAAa,CAAA,EAAA;QAC9B,MAAM,IAAIE,MAAM,CAAC,4BAA4B,EAAEF,UAAWF,CAAAA,MAAM,CAAC,CAAC,CAAA;AACpE;AAEA,IAAA,MAAMK,UAA0B,GAAA;AAAE,QAAA,GAAGH,UAAU;AAAEF,QAAAA,MAAAA,EAAQ,SAAkB,CAACE,UAAAA,CAAWF,MAAM;AAAE,KAAA;;IAG/F,IAAIG,YAAAA,EAAcG,MAAMC,WAAa,EAAA;AACnCF,QAAAA,UAAAA,CAAWC,IAAI,GAAGD,UAAWC,CAAAA,IAAI,IAAI,EAAC;;QAEtC,MAAME,eAAAA,GAAkBH,UAAWC,CAAAA,IAAI,EAAEC,WAAAA;AACzC,QAAA,MAAME,iBAAoBN,GAAAA,YAAAA,CAAaG,IAAI,CAACC,WAAW;AACvDF,QAAAA,UAAAA,CAAWC,IAAI,CAACC,WAAW,GAAG,CAC5BG,IACAC,EAAAA,IAAAA,GAAAA;YAEAF,iBAAkBC,CAAAA,IAAAA,EAAM,CAACE,GAA+BC,EAAAA,UAAAA,GAAAA;AACtD,gBAAA,IAAID,GAAK,EAAA;AACP,oBAAA,OAAOD,KAAKC,GAAKC,EAAAA,UAAAA,CAAAA;AACnB;AACA,gBAAA,IAAIL,eAAiB,EAAA;AACnB,oBAAA,OAAOA,gBAAgBK,UAAYF,EAAAA,IAAAA,CAAAA;AACrC;AACA,gBAAA,OAAOA,KAAK,IAAME,EAAAA,UAAAA,CAAAA;AACpB,aAAA,CAAA;AACF,SAAA;AACF;AAEA,IAAA,OAAOC,IAAKT,CAAAA,UAAAA,CAAAA;AACd;;;;"}

View File

@@ -0,0 +1,41 @@
import knex from 'knex';
const clientMap = {
sqlite: 'better-sqlite3',
mysql: 'mysql2',
postgres: 'pg'
};
function isClientValid(config) {
return Object.keys(clientMap).includes(config.client);
}
const createConnection = (userConfig, strapiConfig)=>{
if (!isClientValid(userConfig)) {
throw new Error(`Unsupported database client ${userConfig.client}`);
}
const knexConfig = {
...userConfig,
client: clientMap[userConfig.client]
};
// initialization code to run upon opening a new connection
if (strapiConfig?.pool?.afterCreate) {
knexConfig.pool = knexConfig.pool || {};
// if the user has set their own afterCreate in config, we will replace it and call it
const userAfterCreate = knexConfig.pool?.afterCreate;
const strapiAfterCreate = strapiConfig.pool.afterCreate;
knexConfig.pool.afterCreate = (conn, done)=>{
strapiAfterCreate(conn, (err, nativeConn)=>{
if (err) {
return done(err, nativeConn);
}
if (userAfterCreate) {
return userAfterCreate(nativeConn, done);
}
return done(null, nativeConn);
});
};
}
return knex(knexConfig);
};
export { createConnection };
//# sourceMappingURL=connection.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"connection.mjs","sources":["../src/connection.ts"],"sourcesContent":["import knex from 'knex';\nimport type { Knex } from 'knex';\n\nconst clientMap = {\n sqlite: 'better-sqlite3',\n mysql: 'mysql2',\n postgres: 'pg',\n};\n\nfunction isClientValid(config: { client?: unknown }): config is { client: keyof typeof clientMap } {\n return Object.keys(clientMap).includes(config.client as string);\n}\n\nexport const createConnection = (userConfig: Knex.Config, strapiConfig?: Partial<Knex.Config>) => {\n if (!isClientValid(userConfig)) {\n throw new Error(`Unsupported database client ${userConfig.client}`);\n }\n\n const knexConfig: Knex.Config = { ...userConfig, client: (clientMap as any)[userConfig.client] };\n\n // initialization code to run upon opening a new connection\n if (strapiConfig?.pool?.afterCreate) {\n knexConfig.pool = knexConfig.pool || {};\n // if the user has set their own afterCreate in config, we will replace it and call it\n const userAfterCreate = knexConfig.pool?.afterCreate;\n const strapiAfterCreate = strapiConfig.pool.afterCreate;\n knexConfig.pool.afterCreate = (\n conn: unknown,\n done: (err: Error | null | undefined, connection: any) => void\n ) => {\n strapiAfterCreate(conn, (err: Error | null | undefined, nativeConn: any) => {\n if (err) {\n return done(err, nativeConn);\n }\n if (userAfterCreate) {\n return userAfterCreate(nativeConn, done);\n }\n return done(null, nativeConn);\n });\n };\n }\n\n return knex(knexConfig);\n};\n"],"names":["clientMap","sqlite","mysql","postgres","isClientValid","config","Object","keys","includes","client","createConnection","userConfig","strapiConfig","Error","knexConfig","pool","afterCreate","userAfterCreate","strapiAfterCreate","conn","done","err","nativeConn","knex"],"mappings":";;AAGA,MAAMA,SAAY,GAAA;IAChBC,MAAQ,EAAA,gBAAA;IACRC,KAAO,EAAA,QAAA;IACPC,QAAU,EAAA;AACZ,CAAA;AAEA,SAASC,cAAcC,MAA4B,EAAA;AACjD,IAAA,OAAOC,OAAOC,IAAI,CAACP,WAAWQ,QAAQ,CAACH,OAAOI,MAAM,CAAA;AACtD;AAEO,MAAMC,gBAAmB,GAAA,CAACC,UAAyBC,EAAAA,YAAAA,GAAAA;IACxD,IAAI,CAACR,cAAcO,UAAa,CAAA,EAAA;QAC9B,MAAM,IAAIE,MAAM,CAAC,4BAA4B,EAAEF,UAAWF,CAAAA,MAAM,CAAC,CAAC,CAAA;AACpE;AAEA,IAAA,MAAMK,UAA0B,GAAA;AAAE,QAAA,GAAGH,UAAU;AAAEF,QAAAA,MAAAA,EAAQ,SAAkB,CAACE,UAAAA,CAAWF,MAAM;AAAE,KAAA;;IAG/F,IAAIG,YAAAA,EAAcG,MAAMC,WAAa,EAAA;AACnCF,QAAAA,UAAAA,CAAWC,IAAI,GAAGD,UAAWC,CAAAA,IAAI,IAAI,EAAC;;QAEtC,MAAME,eAAAA,GAAkBH,UAAWC,CAAAA,IAAI,EAAEC,WAAAA;AACzC,QAAA,MAAME,iBAAoBN,GAAAA,YAAAA,CAAaG,IAAI,CAACC,WAAW;AACvDF,QAAAA,UAAAA,CAAWC,IAAI,CAACC,WAAW,GAAG,CAC5BG,IACAC,EAAAA,IAAAA,GAAAA;YAEAF,iBAAkBC,CAAAA,IAAAA,EAAM,CAACE,GAA+BC,EAAAA,UAAAA,GAAAA;AACtD,gBAAA,IAAID,GAAK,EAAA;AACP,oBAAA,OAAOD,KAAKC,GAAKC,EAAAA,UAAAA,CAAAA;AACnB;AACA,gBAAA,IAAIL,eAAiB,EAAA;AACnB,oBAAA,OAAOA,gBAAgBK,UAAYF,EAAAA,IAAAA,CAAAA;AACrC;AACA,gBAAA,OAAOA,KAAK,IAAME,EAAAA,UAAAA,CAAAA;AACpB,aAAA,CAAA;AACF,SAAA;AACF;AAEA,IAAA,OAAOC,IAAKT,CAAAA,UAAAA,CAAAA;AACd;;;;"}

View File

@@ -0,0 +1,30 @@
import type { Database } from '..';
import type { ForeignKey, Index, Schema } from '../schema';
export interface SchemaInspector {
getSchema(): Promise<Schema>;
getIndexes(tableName: string): Promise<Index[]>;
getForeignKeys(tableName: string): Promise<ForeignKey[]>;
getTables(): Promise<string[]>;
}
export default class Dialect {
db: Database;
schemaInspector: SchemaInspector;
client: string;
constructor(db: Database, client: string);
configure(conn?: any): void;
initialize(_nativeConnection?: unknown): Promise<void>;
getTables(): void;
getSqlType(type: unknown): unknown;
canAlterConstraints(): boolean;
usesForeignKeys(): boolean;
useReturning(): boolean;
supportsUnsigned(): boolean;
supportsOperator(operator?: string): boolean;
startSchemaUpdate(): Promise<void>;
endSchemaUpdate(): Promise<void>;
transformErrors(error: Error | {
message: string;
}): void;
canAddIncrements(): boolean;
}
//# sourceMappingURL=dialect.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"dialect.d.ts","sourceRoot":"","sources":["../../src/dialects/dialect.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AACnC,OAAO,KAAK,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAE3D,MAAM,WAAW,eAAe;IAC9B,SAAS,IAAI,OAAO,CAAC,MAAM,CAAC,CAAC;IAC7B,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;IAChD,cAAc,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC,CAAC;IACzD,SAAS,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC;CAChC;AAED,MAAM,CAAC,OAAO,OAAO,OAAO;IAC1B,EAAE,EAAE,QAAQ,CAAC;IAEb,eAAe,EAAE,eAAe,CAAyB;IAEzD,MAAM,EAAE,MAAM,CAAC;gBAEH,EAAE,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM;IAMxC,SAAS,CAAC,IAAI,CAAC,EAAE,GAAG;IAGd,UAAU,CAAC,iBAAiB,CAAC,EAAE,OAAO;IAI5C,SAAS;IAIT,UAAU,CAAC,IAAI,EAAE,OAAO;IAIxB,mBAAmB;IAInB,eAAe;IAIf,YAAY;IAIZ,gBAAgB;IAIhB,gBAAgB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,OAAO;IAKtC,iBAAiB;IAIjB,eAAe;IAIrB,eAAe,CAAC,KAAK,EAAE,KAAK,GAAG;QAAE,OAAO,EAAE,MAAM,CAAA;KAAE;IAQlD,gBAAgB;CAGjB"}

View File

@@ -0,0 +1,54 @@
'use strict';
class Dialect {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
configure(conn) {}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async initialize(_nativeConnection) {
// noop
}
getTables() {
throw new Error('getTables not implemented for this dialect');
}
getSqlType(type) {
return type;
}
canAlterConstraints() {
return true;
}
usesForeignKeys() {
return false;
}
useReturning() {
return false;
}
supportsUnsigned() {
return false;
}
supportsOperator() {
return true;
}
async startSchemaUpdate() {
// noop
}
async endSchemaUpdate() {
// noop
}
transformErrors(error) {
if (error instanceof Error) {
throw error;
}
throw new Error(error.message);
}
canAddIncrements() {
return true;
}
constructor(db, client){
this.schemaInspector = {};
this.db = db;
this.client = client;
}
}
module.exports = Dialect;
//# sourceMappingURL=dialect.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"dialect.js","sources":["../../src/dialects/dialect.ts"],"sourcesContent":["import type { Database } from '..';\nimport type { ForeignKey, Index, Schema } from '../schema';\n\nexport interface SchemaInspector {\n getSchema(): Promise<Schema>;\n getIndexes(tableName: string): Promise<Index[]>;\n getForeignKeys(tableName: string): Promise<ForeignKey[]>;\n getTables(): Promise<string[]>;\n}\n\nexport default class Dialect {\n db: Database;\n\n schemaInspector: SchemaInspector = {} as SchemaInspector;\n\n client: string;\n\n constructor(db: Database, client: string) {\n this.db = db;\n this.client = client;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n configure(conn?: any) {}\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async initialize(_nativeConnection?: unknown) {\n // noop\n }\n\n getTables() {\n throw new Error('getTables not implemented for this dialect');\n }\n\n getSqlType(type: unknown) {\n return type;\n }\n\n canAlterConstraints() {\n return true;\n }\n\n usesForeignKeys() {\n return false;\n }\n\n useReturning() {\n return false;\n }\n\n supportsUnsigned() {\n return false;\n }\n\n supportsOperator(operator?: string): boolean;\n supportsOperator(): boolean {\n return true;\n }\n\n async startSchemaUpdate() {\n // noop\n }\n\n async endSchemaUpdate() {\n // noop\n }\n\n transformErrors(error: Error | { message: string }) {\n if (error instanceof Error) {\n throw error;\n }\n\n throw new Error(error.message);\n }\n\n canAddIncrements() {\n return true;\n }\n}\n"],"names":["Dialect","configure","conn","initialize","_nativeConnection","getTables","Error","getSqlType","type","canAlterConstraints","usesForeignKeys","useReturning","supportsUnsigned","supportsOperator","startSchemaUpdate","endSchemaUpdate","transformErrors","error","message","canAddIncrements","constructor","db","client","schemaInspector"],"mappings":";;AAUe,MAAMA,OAAAA,CAAAA;;IAanBC,SAAUC,CAAAA,IAAU,EAAE;;IAGtB,MAAMC,UAAAA,CAAWC,iBAA2B,EAAE;;AAE9C;IAEAC,SAAY,GAAA;AACV,QAAA,MAAM,IAAIC,KAAM,CAAA,4CAAA,CAAA;AAClB;AAEAC,IAAAA,UAAAA,CAAWC,IAAa,EAAE;QACxB,OAAOA,IAAAA;AACT;IAEAC,mBAAsB,GAAA;QACpB,OAAO,IAAA;AACT;IAEAC,eAAkB,GAAA;QAChB,OAAO,KAAA;AACT;IAEAC,YAAe,GAAA;QACb,OAAO,KAAA;AACT;IAEAC,gBAAmB,GAAA;QACjB,OAAO,KAAA;AACT;IAGAC,gBAA4B,GAAA;QAC1B,OAAO,IAAA;AACT;AAEA,IAAA,MAAMC,iBAAoB,GAAA;;AAE1B;AAEA,IAAA,MAAMC,eAAkB,GAAA;;AAExB;AAEAC,IAAAA,eAAAA,CAAgBC,KAAkC,EAAE;AAClD,QAAA,IAAIA,iBAAiBX,KAAO,EAAA;YAC1B,MAAMW,KAAAA;AACR;QAEA,MAAM,IAAIX,KAAMW,CAAAA,KAAAA,CAAMC,OAAO,CAAA;AAC/B;IAEAC,gBAAmB,GAAA;QACjB,OAAO,IAAA;AACT;IA5DAC,WAAYC,CAAAA,EAAY,EAAEC,MAAc,CAAE;AAJ1CC,QAAAA,IAAAA,CAAAA,eAAAA,GAAmC,EAAC;QAKlC,IAAI,CAACF,EAAE,GAAGA,EAAAA;QACV,IAAI,CAACC,MAAM,GAAGA,MAAAA;AAChB;AA0DF;;;;"}

View File

@@ -0,0 +1,52 @@
class Dialect {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
configure(conn) {}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
async initialize(_nativeConnection) {
// noop
}
getTables() {
throw new Error('getTables not implemented for this dialect');
}
getSqlType(type) {
return type;
}
canAlterConstraints() {
return true;
}
usesForeignKeys() {
return false;
}
useReturning() {
return false;
}
supportsUnsigned() {
return false;
}
supportsOperator() {
return true;
}
async startSchemaUpdate() {
// noop
}
async endSchemaUpdate() {
// noop
}
transformErrors(error) {
if (error instanceof Error) {
throw error;
}
throw new Error(error.message);
}
canAddIncrements() {
return true;
}
constructor(db, client){
this.schemaInspector = {};
this.db = db;
this.client = client;
}
}
export { Dialect as default };
//# sourceMappingURL=dialect.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"dialect.mjs","sources":["../../src/dialects/dialect.ts"],"sourcesContent":["import type { Database } from '..';\nimport type { ForeignKey, Index, Schema } from '../schema';\n\nexport interface SchemaInspector {\n getSchema(): Promise<Schema>;\n getIndexes(tableName: string): Promise<Index[]>;\n getForeignKeys(tableName: string): Promise<ForeignKey[]>;\n getTables(): Promise<string[]>;\n}\n\nexport default class Dialect {\n db: Database;\n\n schemaInspector: SchemaInspector = {} as SchemaInspector;\n\n client: string;\n\n constructor(db: Database, client: string) {\n this.db = db;\n this.client = client;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n configure(conn?: any) {}\n\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n async initialize(_nativeConnection?: unknown) {\n // noop\n }\n\n getTables() {\n throw new Error('getTables not implemented for this dialect');\n }\n\n getSqlType(type: unknown) {\n return type;\n }\n\n canAlterConstraints() {\n return true;\n }\n\n usesForeignKeys() {\n return false;\n }\n\n useReturning() {\n return false;\n }\n\n supportsUnsigned() {\n return false;\n }\n\n supportsOperator(operator?: string): boolean;\n supportsOperator(): boolean {\n return true;\n }\n\n async startSchemaUpdate() {\n // noop\n }\n\n async endSchemaUpdate() {\n // noop\n }\n\n transformErrors(error: Error | { message: string }) {\n if (error instanceof Error) {\n throw error;\n }\n\n throw new Error(error.message);\n }\n\n canAddIncrements() {\n return true;\n }\n}\n"],"names":["Dialect","configure","conn","initialize","_nativeConnection","getTables","Error","getSqlType","type","canAlterConstraints","usesForeignKeys","useReturning","supportsUnsigned","supportsOperator","startSchemaUpdate","endSchemaUpdate","transformErrors","error","message","canAddIncrements","constructor","db","client","schemaInspector"],"mappings":"AAUe,MAAMA,OAAAA,CAAAA;;IAanBC,SAAUC,CAAAA,IAAU,EAAE;;IAGtB,MAAMC,UAAAA,CAAWC,iBAA2B,EAAE;;AAE9C;IAEAC,SAAY,GAAA;AACV,QAAA,MAAM,IAAIC,KAAM,CAAA,4CAAA,CAAA;AAClB;AAEAC,IAAAA,UAAAA,CAAWC,IAAa,EAAE;QACxB,OAAOA,IAAAA;AACT;IAEAC,mBAAsB,GAAA;QACpB,OAAO,IAAA;AACT;IAEAC,eAAkB,GAAA;QAChB,OAAO,KAAA;AACT;IAEAC,YAAe,GAAA;QACb,OAAO,KAAA;AACT;IAEAC,gBAAmB,GAAA;QACjB,OAAO,KAAA;AACT;IAGAC,gBAA4B,GAAA;QAC1B,OAAO,IAAA;AACT;AAEA,IAAA,MAAMC,iBAAoB,GAAA;;AAE1B;AAEA,IAAA,MAAMC,eAAkB,GAAA;;AAExB;AAEAC,IAAAA,eAAAA,CAAgBC,KAAkC,EAAE;AAClD,QAAA,IAAIA,iBAAiBX,KAAO,EAAA;YAC1B,MAAMW,KAAAA;AACR;QAEA,MAAM,IAAIX,KAAMW,CAAAA,KAAAA,CAAMC,OAAO,CAAA;AAC/B;IAEAC,gBAAmB,GAAA;QACjB,OAAO,IAAA;AACT;IA5DAC,WAAYC,CAAAA,EAAY,EAAEC,MAAc,CAAE;AAJ1CC,QAAAA,IAAAA,CAAAA,eAAAA,GAAmC,EAAC;QAKlC,IAAI,CAACF,EAAE,GAAGA,EAAAA;QACV,IAAI,CAACC,MAAM,GAAGA,MAAAA;AAChB;AA0DF;;;;"}

View File

@@ -0,0 +1,5 @@
import type { Database } from '..';
import Dialect from './dialect';
declare const getDialect: (db: Database) => Dialect;
export { Dialect, getDialect };
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/dialects/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AACnC,OAAO,OAAO,MAAM,WAAW,CAAC;AAqChC,QAAA,MAAM,UAAU,OAAQ,QAAQ,YAQ/B,CAAC;AAEF,OAAO,EAAE,OAAO,EAAE,UAAU,EAAE,CAAC"}

View File

@@ -0,0 +1,44 @@
'use strict';
var index$2 = require('./postgresql/index.js');
var index$1 = require('./mysql/index.js');
var index = require('./sqlite/index.js');
/**
* Require our dialect-specific code
*/ const getDialectClass = (client)=>{
switch(client){
case 'postgres':
return index$2;
case 'mysql':
return index$1;
case 'sqlite':
return index;
default:
throw new Error(`Unknown dialect ${client}`);
}
};
/**
* Get the dialect of a database client
*/ const getDialectName = (client)=>{
switch(client){
case 'postgres':
return 'postgres';
case 'mysql':
return 'mysql';
case 'sqlite':
return 'sqlite';
default:
throw new Error(`Unknown dialect ${client}`);
}
};
const getDialect = (db)=>{
const { client } = db.config.connection;
const dialectName = getDialectName(client);
const constructor = getDialectClass(dialectName);
const dialect = new constructor(db, dialectName);
return dialect;
};
exports.getDialect = getDialect;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../src/dialects/index.ts"],"sourcesContent":["import type { Database } from '..';\nimport Dialect from './dialect';\nimport PostgresClass from './postgresql';\nimport MysqlClass from './mysql';\nimport SqliteClass from './sqlite';\n\n/**\n * Require our dialect-specific code\n */\nconst getDialectClass = (client: string): typeof Dialect => {\n switch (client) {\n case 'postgres':\n return PostgresClass;\n case 'mysql':\n return MysqlClass;\n case 'sqlite':\n return SqliteClass;\n default:\n throw new Error(`Unknown dialect ${client}`);\n }\n};\n\n/**\n * Get the dialect of a database client\n */\nconst getDialectName = (client: unknown) => {\n switch (client) {\n case 'postgres':\n return 'postgres';\n case 'mysql':\n return 'mysql';\n case 'sqlite':\n return 'sqlite';\n default:\n throw new Error(`Unknown dialect ${client}`);\n }\n};\n\nconst getDialect = (db: Database) => {\n const { client } = db.config.connection;\n const dialectName = getDialectName(client);\n\n const constructor = getDialectClass(dialectName);\n const dialect = new constructor(db, dialectName);\n\n return dialect;\n};\n\nexport { Dialect, getDialect };\n"],"names":["getDialectClass","client","PostgresClass","MysqlClass","SqliteClass","Error","getDialectName","getDialect","db","config","connection","dialectName","constructor","dialect"],"mappings":";;;;;;AAMA;;IAGA,MAAMA,kBAAkB,CAACC,MAAAA,GAAAA;IACvB,OAAQA,MAAAA;QACN,KAAK,UAAA;YACH,OAAOC,OAAAA;QACT,KAAK,OAAA;YACH,OAAOC,OAAAA;QACT,KAAK,QAAA;YACH,OAAOC,KAAAA;AACT,QAAA;AACE,YAAA,MAAM,IAAIC,KAAM,CAAA,CAAC,gBAAgB,EAAEJ,OAAO,CAAC,CAAA;AAC/C;AACF,CAAA;AAEA;;IAGA,MAAMK,iBAAiB,CAACL,MAAAA,GAAAA;IACtB,OAAQA,MAAAA;QACN,KAAK,UAAA;YACH,OAAO,UAAA;QACT,KAAK,OAAA;YACH,OAAO,OAAA;QACT,KAAK,QAAA;YACH,OAAO,QAAA;AACT,QAAA;AACE,YAAA,MAAM,IAAII,KAAM,CAAA,CAAC,gBAAgB,EAAEJ,OAAO,CAAC,CAAA;AAC/C;AACF,CAAA;AAEA,MAAMM,aAAa,CAACC,EAAAA,GAAAA;AAClB,IAAA,MAAM,EAAEP,MAAM,EAAE,GAAGO,EAAGC,CAAAA,MAAM,CAACC,UAAU;AACvC,IAAA,MAAMC,cAAcL,cAAeL,CAAAA,MAAAA,CAAAA;AAEnC,IAAA,MAAMW,cAAcZ,eAAgBW,CAAAA,WAAAA,CAAAA;IACpC,MAAME,OAAAA,GAAU,IAAID,WAAAA,CAAYJ,EAAIG,EAAAA,WAAAA,CAAAA;IAEpC,OAAOE,OAAAA;AACT;;;;"}

View File

@@ -0,0 +1,42 @@
import PostgresDialect from './postgresql/index.mjs';
import MysqlDialect from './mysql/index.mjs';
import SqliteDialect from './sqlite/index.mjs';
/**
* Require our dialect-specific code
*/ const getDialectClass = (client)=>{
switch(client){
case 'postgres':
return PostgresDialect;
case 'mysql':
return MysqlDialect;
case 'sqlite':
return SqliteDialect;
default:
throw new Error(`Unknown dialect ${client}`);
}
};
/**
* Get the dialect of a database client
*/ const getDialectName = (client)=>{
switch(client){
case 'postgres':
return 'postgres';
case 'mysql':
return 'mysql';
case 'sqlite':
return 'sqlite';
default:
throw new Error(`Unknown dialect ${client}`);
}
};
const getDialect = (db)=>{
const { client } = db.config.connection;
const dialectName = getDialectName(client);
const constructor = getDialectClass(dialectName);
const dialect = new constructor(db, dialectName);
return dialect;
};
export { getDialect };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../src/dialects/index.ts"],"sourcesContent":["import type { Database } from '..';\nimport Dialect from './dialect';\nimport PostgresClass from './postgresql';\nimport MysqlClass from './mysql';\nimport SqliteClass from './sqlite';\n\n/**\n * Require our dialect-specific code\n */\nconst getDialectClass = (client: string): typeof Dialect => {\n switch (client) {\n case 'postgres':\n return PostgresClass;\n case 'mysql':\n return MysqlClass;\n case 'sqlite':\n return SqliteClass;\n default:\n throw new Error(`Unknown dialect ${client}`);\n }\n};\n\n/**\n * Get the dialect of a database client\n */\nconst getDialectName = (client: unknown) => {\n switch (client) {\n case 'postgres':\n return 'postgres';\n case 'mysql':\n return 'mysql';\n case 'sqlite':\n return 'sqlite';\n default:\n throw new Error(`Unknown dialect ${client}`);\n }\n};\n\nconst getDialect = (db: Database) => {\n const { client } = db.config.connection;\n const dialectName = getDialectName(client);\n\n const constructor = getDialectClass(dialectName);\n const dialect = new constructor(db, dialectName);\n\n return dialect;\n};\n\nexport { Dialect, getDialect };\n"],"names":["getDialectClass","client","PostgresClass","MysqlClass","SqliteClass","Error","getDialectName","getDialect","db","config","connection","dialectName","constructor","dialect"],"mappings":";;;;AAMA;;IAGA,MAAMA,kBAAkB,CAACC,MAAAA,GAAAA;IACvB,OAAQA,MAAAA;QACN,KAAK,UAAA;YACH,OAAOC,eAAAA;QACT,KAAK,OAAA;YACH,OAAOC,YAAAA;QACT,KAAK,QAAA;YACH,OAAOC,aAAAA;AACT,QAAA;AACE,YAAA,MAAM,IAAIC,KAAM,CAAA,CAAC,gBAAgB,EAAEJ,OAAO,CAAC,CAAA;AAC/C;AACF,CAAA;AAEA;;IAGA,MAAMK,iBAAiB,CAACL,MAAAA,GAAAA;IACtB,OAAQA,MAAAA;QACN,KAAK,UAAA;YACH,OAAO,UAAA;QACT,KAAK,OAAA;YACH,OAAO,OAAA;QACT,KAAK,QAAA;YACH,OAAO,QAAA;AACT,QAAA;AACE,YAAA,MAAM,IAAII,KAAM,CAAA,CAAC,gBAAgB,EAAEJ,OAAO,CAAC,CAAA;AAC/C;AACF,CAAA;AAEA,MAAMM,aAAa,CAACC,EAAAA,GAAAA;AAClB,IAAA,MAAM,EAAEP,MAAM,EAAE,GAAGO,EAAGC,CAAAA,MAAM,CAACC,UAAU;AACvC,IAAA,MAAMC,cAAcL,cAAeL,CAAAA,MAAAA,CAAAA;AAEnC,IAAA,MAAMW,cAAcZ,eAAgBW,CAAAA,WAAAA,CAAAA;IACpC,MAAME,OAAAA,GAAU,IAAID,WAAAA,CAAYJ,EAAIG,EAAAA,WAAAA,CAAAA;IAEpC,OAAOE,OAAAA;AACT;;;;"}

View File

@@ -0,0 +1,3 @@
export declare const MYSQL = "MYSQL";
export declare const MARIADB = "MARIADB";
//# sourceMappingURL=constants.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../src/dialects/mysql/constants.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,KAAK,UAAU,CAAC;AAC7B,eAAO,MAAM,OAAO,YAAY,CAAC"}

View File

@@ -0,0 +1,8 @@
'use strict';
const MYSQL = 'MYSQL';
const MARIADB = 'MARIADB';
exports.MARIADB = MARIADB;
exports.MYSQL = MYSQL;
//# sourceMappingURL=constants.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.js","sources":["../../../src/dialects/mysql/constants.ts"],"sourcesContent":["export const MYSQL = 'MYSQL';\nexport const MARIADB = 'MARIADB';\n"],"names":["MYSQL","MARIADB"],"mappings":";;AAAO,MAAMA,QAAQ;AACd,MAAMC,UAAU;;;;;"}

View File

@@ -0,0 +1,5 @@
const MYSQL = 'MYSQL';
const MARIADB = 'MARIADB';
export { MARIADB, MYSQL };
//# sourceMappingURL=constants.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.mjs","sources":["../../../src/dialects/mysql/constants.ts"],"sourcesContent":["export const MYSQL = 'MYSQL';\nexport const MARIADB = 'MARIADB';\n"],"names":["MYSQL","MARIADB"],"mappings":"AAAO,MAAMA,QAAQ;AACd,MAAMC,UAAU;;;;"}

View File

@@ -0,0 +1,12 @@
import { MARIADB, MYSQL } from './constants';
import type { Database } from '../..';
export interface Information {
database: typeof MARIADB | typeof MYSQL | null;
version: string | null;
}
export default class MysqlDatabaseInspector {
db: Database;
constructor(db: Database);
getInformation(nativeConnection?: unknown): Promise<Information>;
}
//# sourceMappingURL=database-inspector.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"database-inspector.d.ts","sourceRoot":"","sources":["../../../src/dialects/mysql/database-inspector.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,KAAK,EAAE,MAAM,aAAa,CAAC;AAC7C,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAEtC,MAAM,WAAW,WAAW;IAC1B,QAAQ,EAAE,OAAO,OAAO,GAAG,OAAO,KAAK,GAAG,IAAI,CAAC;IAC/C,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CACxB;AAMD,MAAM,CAAC,OAAO,OAAO,sBAAsB;IACzC,EAAE,EAAE,QAAQ,CAAC;gBAED,EAAE,EAAE,QAAQ;IAIlB,cAAc,CAAC,gBAAgB,CAAC,EAAE,OAAO,GAAG,OAAO,CAAC,WAAW,CAAC;CAuBvE"}

View File

@@ -0,0 +1,35 @@
'use strict';
var constants = require('./constants.js');
const SQL_QUERIES = {
VERSION: `SELECT version() as version`
};
class MysqlDatabaseInspector {
async getInformation(nativeConnection) {
let database;
let versionNumber;
try {
const [results] = await this.db.connection.raw(SQL_QUERIES.VERSION).connection(nativeConnection);
const versionSplit = results[0].version.split('-');
const databaseName = versionSplit[1];
versionNumber = versionSplit[0];
database = databaseName && databaseName.toLowerCase() === 'mariadb' ? constants.MARIADB : constants.MYSQL;
} catch (e) {
return {
database: null,
version: null
};
}
return {
database,
version: versionNumber
};
}
constructor(db){
this.db = db;
}
}
module.exports = MysqlDatabaseInspector;
//# sourceMappingURL=database-inspector.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"database-inspector.js","sources":["../../../src/dialects/mysql/database-inspector.ts"],"sourcesContent":["import { MARIADB, MYSQL } from './constants';\nimport type { Database } from '../..';\n\nexport interface Information {\n database: typeof MARIADB | typeof MYSQL | null;\n version: string | null;\n}\n\nconst SQL_QUERIES = {\n VERSION: `SELECT version() as version`,\n};\n\nexport default class MysqlDatabaseInspector {\n db: Database;\n\n constructor(db: Database) {\n this.db = db;\n }\n\n async getInformation(nativeConnection?: unknown): Promise<Information> {\n let database: Information['database'];\n let versionNumber: Information['version'];\n try {\n const [results] = await this.db.connection\n .raw(SQL_QUERIES.VERSION)\n .connection(nativeConnection);\n const versionSplit = results[0].version.split('-');\n const databaseName = versionSplit[1];\n versionNumber = versionSplit[0];\n database = databaseName && databaseName.toLowerCase() === 'mariadb' ? MARIADB : MYSQL;\n } catch (e) {\n return {\n database: null,\n version: null,\n };\n }\n\n return {\n database,\n version: versionNumber,\n };\n }\n}\n"],"names":["SQL_QUERIES","VERSION","MysqlDatabaseInspector","getInformation","nativeConnection","database","versionNumber","results","db","connection","raw","versionSplit","version","split","databaseName","toLowerCase","MARIADB","MYSQL","e","constructor"],"mappings":";;;;AAQA,MAAMA,WAAc,GAAA;IAClBC,OAAS,EAAA,CAAC,2BAA2B;AACvC,CAAA;AAEe,MAAMC,sBAAAA,CAAAA;IAOnB,MAAMC,cAAAA,CAAeC,gBAA0B,EAAwB;QACrE,IAAIC,QAAAA;QACJ,IAAIC,aAAAA;QACJ,IAAI;AACF,YAAA,MAAM,CAACC,OAAQ,CAAA,GAAG,MAAM,IAAI,CAACC,EAAE,CAACC,UAAU,CACvCC,GAAG,CAACV,WAAAA,CAAYC,OAAO,CAAA,CACvBQ,UAAU,CAACL,gBAAAA,CAAAA;YACd,MAAMO,YAAAA,GAAeJ,OAAO,CAAC,CAAA,CAAE,CAACK,OAAO,CAACC,KAAK,CAAC,GAAA,CAAA;YAC9C,MAAMC,YAAAA,GAAeH,YAAY,CAAC,CAAE,CAAA;YACpCL,aAAgBK,GAAAA,YAAY,CAAC,CAAE,CAAA;AAC/BN,YAAAA,QAAAA,GAAWS,YAAgBA,IAAAA,YAAAA,CAAaC,WAAW,EAAA,KAAO,YAAYC,iBAAUC,GAAAA,eAAAA;AAClF,SAAA,CAAE,OAAOC,CAAG,EAAA;YACV,OAAO;gBACLb,QAAU,EAAA,IAAA;gBACVO,OAAS,EAAA;AACX,aAAA;AACF;QAEA,OAAO;AACLP,YAAAA,QAAAA;YACAO,OAASN,EAAAA;AACX,SAAA;AACF;AA1BAa,IAAAA,WAAAA,CAAYX,EAAY,CAAE;QACxB,IAAI,CAACA,EAAE,GAAGA,EAAAA;AACZ;AAyBF;;;;"}

View File

@@ -0,0 +1,33 @@
import { MARIADB, MYSQL } from './constants.mjs';
const SQL_QUERIES = {
VERSION: `SELECT version() as version`
};
class MysqlDatabaseInspector {
async getInformation(nativeConnection) {
let database;
let versionNumber;
try {
const [results] = await this.db.connection.raw(SQL_QUERIES.VERSION).connection(nativeConnection);
const versionSplit = results[0].version.split('-');
const databaseName = versionSplit[1];
versionNumber = versionSplit[0];
database = databaseName && databaseName.toLowerCase() === 'mariadb' ? MARIADB : MYSQL;
} catch (e) {
return {
database: null,
version: null
};
}
return {
database,
version: versionNumber
};
}
constructor(db){
this.db = db;
}
}
export { MysqlDatabaseInspector as default };
//# sourceMappingURL=database-inspector.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"database-inspector.mjs","sources":["../../../src/dialects/mysql/database-inspector.ts"],"sourcesContent":["import { MARIADB, MYSQL } from './constants';\nimport type { Database } from '../..';\n\nexport interface Information {\n database: typeof MARIADB | typeof MYSQL | null;\n version: string | null;\n}\n\nconst SQL_QUERIES = {\n VERSION: `SELECT version() as version`,\n};\n\nexport default class MysqlDatabaseInspector {\n db: Database;\n\n constructor(db: Database) {\n this.db = db;\n }\n\n async getInformation(nativeConnection?: unknown): Promise<Information> {\n let database: Information['database'];\n let versionNumber: Information['version'];\n try {\n const [results] = await this.db.connection\n .raw(SQL_QUERIES.VERSION)\n .connection(nativeConnection);\n const versionSplit = results[0].version.split('-');\n const databaseName = versionSplit[1];\n versionNumber = versionSplit[0];\n database = databaseName && databaseName.toLowerCase() === 'mariadb' ? MARIADB : MYSQL;\n } catch (e) {\n return {\n database: null,\n version: null,\n };\n }\n\n return {\n database,\n version: versionNumber,\n };\n }\n}\n"],"names":["SQL_QUERIES","VERSION","MysqlDatabaseInspector","getInformation","nativeConnection","database","versionNumber","results","db","connection","raw","versionSplit","version","split","databaseName","toLowerCase","MARIADB","MYSQL","e","constructor"],"mappings":";;AAQA,MAAMA,WAAc,GAAA;IAClBC,OAAS,EAAA,CAAC,2BAA2B;AACvC,CAAA;AAEe,MAAMC,sBAAAA,CAAAA;IAOnB,MAAMC,cAAAA,CAAeC,gBAA0B,EAAwB;QACrE,IAAIC,QAAAA;QACJ,IAAIC,aAAAA;QACJ,IAAI;AACF,YAAA,MAAM,CAACC,OAAQ,CAAA,GAAG,MAAM,IAAI,CAACC,EAAE,CAACC,UAAU,CACvCC,GAAG,CAACV,WAAAA,CAAYC,OAAO,CAAA,CACvBQ,UAAU,CAACL,gBAAAA,CAAAA;YACd,MAAMO,YAAAA,GAAeJ,OAAO,CAAC,CAAA,CAAE,CAACK,OAAO,CAACC,KAAK,CAAC,GAAA,CAAA;YAC9C,MAAMC,YAAAA,GAAeH,YAAY,CAAC,CAAE,CAAA;YACpCL,aAAgBK,GAAAA,YAAY,CAAC,CAAE,CAAA;AAC/BN,YAAAA,QAAAA,GAAWS,YAAgBA,IAAAA,YAAAA,CAAaC,WAAW,EAAA,KAAO,YAAYC,OAAUC,GAAAA,KAAAA;AAClF,SAAA,CAAE,OAAOC,CAAG,EAAA;YACV,OAAO;gBACLb,QAAU,EAAA,IAAA;gBACVO,OAAS,EAAA;AACX,aAAA;AACF;QAEA,OAAO;AACLP,YAAAA,QAAAA;YACAO,OAASN,EAAAA;AACX,SAAA;AACF;AA1BAa,IAAAA,WAAAA,CAAYX,EAAY,CAAE;QACxB,IAAI,CAACA,EAAE,GAAGA,EAAAA;AACZ;AAyBF;;;;"}

View File

@@ -0,0 +1,19 @@
import Dialect from '../dialect';
import MysqlSchemaInspector from './schema-inspector';
import MysqlDatabaseInspector from './database-inspector';
import type { Database } from '../..';
import type { Information } from './database-inspector';
export default class MysqlDialect extends Dialect {
schemaInspector: MysqlSchemaInspector;
databaseInspector: MysqlDatabaseInspector;
info: Information | null;
constructor(db: Database);
configure(): void;
initialize(nativeConnection: unknown): Promise<void>;
startSchemaUpdate(): Promise<void>;
endSchemaUpdate(): Promise<void>;
supportsUnsigned(): boolean;
usesForeignKeys(): boolean;
transformErrors(error: Error): void;
}
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/dialects/mysql/index.ts"],"names":[],"mappings":"AAEA,OAAO,OAAO,MAAM,YAAY,CAAC;AACjC,OAAO,oBAAoB,MAAM,oBAAoB,CAAC;AACtD,OAAO,sBAAsB,MAAM,sBAAsB,CAAC;AAC1D,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAEtC,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAExD,MAAM,CAAC,OAAO,OAAO,YAAa,SAAQ,OAAO;IAC/C,eAAe,EAAE,oBAAoB,CAAC;IAEtC,iBAAiB,EAAE,sBAAsB,CAAC;IAE1C,IAAI,EAAE,WAAW,GAAG,IAAI,CAAQ;gBAEpB,EAAE,EAAE,QAAQ;IAOxB,SAAS;IA+BH,UAAU,CAAC,gBAAgB,EAAE,OAAO;IAoBpC,iBAAiB;IASjB,eAAe;IAIrB,gBAAgB;IAIhB,eAAe;IAIf,eAAe,CAAC,KAAK,EAAE,KAAK;CAG7B"}

View File

@@ -0,0 +1,75 @@
'use strict';
var dialect = require('../dialect.js');
var schemaInspector = require('./schema-inspector.js');
var databaseInspector = require('./database-inspector.js');
class MysqlDialect extends dialect {
configure() {
const connection = this.db.config.connection.connection;
connection.supportBigNumbers = true;
// Only allow bigNumberStrings option set to be true if no connection option passed
// Otherwise bigNumberStrings option should be allowed to used from DB config
if (connection.bigNumberStrings === undefined) {
connection.bigNumberStrings = true;
}
connection.typeCast = (field, next)=>{
if (field.type === 'DECIMAL' || field.type === 'NEWDECIMAL') {
const value = field.string();
return value === null ? null : Number(value);
}
if (field.type === 'TINY' && field.length === 1) {
const value = field.string();
return value ? value === '1' : null;
}
if (field.type === 'DATE') {
return field.string();
}
return next();
};
}
async initialize(nativeConnection) {
try {
await this.db.connection.raw(`set session sql_require_primary_key = 0;`).connection(nativeConnection);
} catch (err) {
// Ignore error due to lack of session permissions
}
// We only need to get info on the first connection in the pool
/**
* Note: There is a race condition here where if two connections are opened at the same time, both will retrieve
* db info, but it doesn't cause issues, it's just one wasted query one time, so we can safely leave it to avoid
* adding extra complexity
* */ if (!this.info) {
this.info = await this.databaseInspector.getInformation(nativeConnection);
}
}
async startSchemaUpdate() {
try {
await this.db.connection.raw(`set foreign_key_checks = 0;`);
await this.db.connection.raw(`set session sql_require_primary_key = 0;`);
} catch (err) {
// Ignore error due to lack of session permissions
}
}
async endSchemaUpdate() {
await this.db.connection.raw(`set foreign_key_checks = 1;`);
}
supportsUnsigned() {
return true;
}
usesForeignKeys() {
return true;
}
transformErrors(error) {
super.transformErrors(error);
}
constructor(db){
super(db, 'mysql');
this.info = null;
this.schemaInspector = new schemaInspector(db);
this.databaseInspector = new databaseInspector(db);
}
}
module.exports = MysqlDialect;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,73 @@
import Dialect from '../dialect.mjs';
import MysqlSchemaInspector from './schema-inspector.mjs';
import MysqlDatabaseInspector from './database-inspector.mjs';
class MysqlDialect extends Dialect {
configure() {
const connection = this.db.config.connection.connection;
connection.supportBigNumbers = true;
// Only allow bigNumberStrings option set to be true if no connection option passed
// Otherwise bigNumberStrings option should be allowed to used from DB config
if (connection.bigNumberStrings === undefined) {
connection.bigNumberStrings = true;
}
connection.typeCast = (field, next)=>{
if (field.type === 'DECIMAL' || field.type === 'NEWDECIMAL') {
const value = field.string();
return value === null ? null : Number(value);
}
if (field.type === 'TINY' && field.length === 1) {
const value = field.string();
return value ? value === '1' : null;
}
if (field.type === 'DATE') {
return field.string();
}
return next();
};
}
async initialize(nativeConnection) {
try {
await this.db.connection.raw(`set session sql_require_primary_key = 0;`).connection(nativeConnection);
} catch (err) {
// Ignore error due to lack of session permissions
}
// We only need to get info on the first connection in the pool
/**
* Note: There is a race condition here where if two connections are opened at the same time, both will retrieve
* db info, but it doesn't cause issues, it's just one wasted query one time, so we can safely leave it to avoid
* adding extra complexity
* */ if (!this.info) {
this.info = await this.databaseInspector.getInformation(nativeConnection);
}
}
async startSchemaUpdate() {
try {
await this.db.connection.raw(`set foreign_key_checks = 0;`);
await this.db.connection.raw(`set session sql_require_primary_key = 0;`);
} catch (err) {
// Ignore error due to lack of session permissions
}
}
async endSchemaUpdate() {
await this.db.connection.raw(`set foreign_key_checks = 1;`);
}
supportsUnsigned() {
return true;
}
usesForeignKeys() {
return true;
}
transformErrors(error) {
super.transformErrors(error);
}
constructor(db){
super(db, 'mysql');
this.info = null;
this.schemaInspector = new MysqlSchemaInspector(db);
this.databaseInspector = new MysqlDatabaseInspector(db);
}
}
export { MysqlDialect as default };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,13 @@
import type { Column, ForeignKey, Index, Schema } from '../../schema/types';
import type { SchemaInspector } from '../dialect';
import type { Database } from '../..';
export default class MysqlSchemaInspector implements SchemaInspector {
db: Database;
constructor(db: Database);
getSchema(): Promise<Schema>;
getTables(): Promise<string[]>;
getColumns(tableName: string): Promise<Column[]>;
getIndexes(tableName: string): Promise<Index[]>;
getForeignKeys(tableName: string): Promise<ForeignKey[]>;
}
//# sourceMappingURL=schema-inspector.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"schema-inspector.d.ts","sourceRoot":"","sources":["../../../src/dialects/mysql/schema-inspector.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AAClD,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAsItC,MAAM,CAAC,OAAO,OAAO,oBAAqB,YAAW,eAAe;IAClE,EAAE,EAAE,QAAQ,CAAC;gBAED,EAAE,EAAE,QAAQ;IAIlB,SAAS;IAuBT,SAAS,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAM9B,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAoBhD,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IA4B/C,cAAc,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;CA+C/D"}

View File

@@ -0,0 +1,297 @@
'use strict';
const SQL_QUERIES = {
TABLE_LIST: /* sql */ `
SELECT
t.table_name as table_name
FROM information_schema.tables t
WHERE table_type = 'BASE TABLE'
AND table_schema = schema();
`,
LIST_COLUMNS: /* sql */ `
SELECT
c.data_type as data_type,
c.column_name as column_name,
c.character_maximum_length as character_maximum_length,
c.column_default as column_default,
c.is_nullable as is_nullable,
c.column_type as column_type,
c.column_key as column_key
FROM information_schema.columns c
WHERE table_schema = database()
AND table_name = ?;
`,
INDEX_LIST: /* sql */ `
show index from ??;
`,
FOREIGN_KEY_LIST: /* sql */ `
SELECT
tc.constraint_name as constraint_name
FROM information_schema.table_constraints tc
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_schema = database()
AND tc.table_name = ?;
`,
FOREIGN_KEY_REFERENCES: /* sql */ `
SELECT
kcu.constraint_name as constraint_name,
kcu.column_name as column_name,
kcu.referenced_table_name as referenced_table_name,
kcu.referenced_column_name as referenced_column_name
FROM information_schema.key_column_usage kcu
WHERE kcu.constraint_name in (?)
AND kcu.table_schema = database()
AND kcu.table_name = ?;
`,
FOREIGN_KEY_REFERENTIALS_CONSTRAINTS: /* sql */ `
SELECT
rc.constraint_name as constraint_name,
rc.update_rule as on_update,
rc.delete_rule as on_delete
FROM information_schema.referential_constraints AS rc
WHERE rc.constraint_name in (?)
AND rc.constraint_schema = database()
AND rc.table_name = ?;
`
};
const toStrapiType = (column)=>{
const rootType = column.data_type.toLowerCase().match(/[^(), ]+/)?.[0];
switch(rootType){
case 'int':
{
if (column.column_key === 'PRI') {
return {
type: 'increments',
args: [
{
primary: true,
primaryKey: true
}
],
unsigned: false
};
}
return {
type: 'integer'
};
}
case 'decimal':
{
return {
type: 'decimal',
args: [
10,
2
]
};
}
case 'double':
{
return {
type: 'double'
};
}
case 'bigint':
{
return {
type: 'bigInteger'
};
}
case 'enum':
{
return {
type: 'string'
};
}
case 'tinyint':
{
return {
type: 'boolean'
};
}
case 'longtext':
{
return {
type: 'text',
args: [
'longtext'
]
};
}
case 'varchar':
{
return {
type: 'string',
args: [
column.character_maximum_length
]
};
}
case 'datetime':
{
return {
type: 'datetime',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'date':
{
return {
type: 'date'
};
}
case 'time':
{
return {
type: 'time',
args: [
{
precision: 3
}
]
};
}
case 'timestamp':
{
return {
type: 'timestamp',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'json':
{
return {
type: 'jsonb'
};
}
default:
{
return {
type: 'specificType',
args: [
column.data_type
]
};
}
}
};
class MysqlSchemaInspector {
async getSchema() {
const schema = {
tables: []
};
const tables = await this.getTables();
schema.tables = await Promise.all(tables.map(async (tableName)=>{
const columns = await this.getColumns(tableName);
const indexes = await this.getIndexes(tableName);
const foreignKeys = await this.getForeignKeys(tableName);
return {
name: tableName,
columns,
indexes,
foreignKeys
};
}));
return schema;
}
async getTables() {
const [rows] = await this.db.connection.raw(SQL_QUERIES.TABLE_LIST);
return rows.map((row)=>row.table_name);
}
async getColumns(tableName) {
const [rows] = await this.db.connection.raw(SQL_QUERIES.LIST_COLUMNS, [
tableName
]);
return rows.map((row)=>{
const { type, args = [], ...rest } = toStrapiType(row);
return {
type,
args,
defaultTo: row.column_default,
name: row.column_name,
notNullable: row.is_nullable === 'NO',
unsigned: row.column_type.endsWith(' unsigned'),
...rest
};
});
}
async getIndexes(tableName) {
const [rows] = await this.db.connection.raw(SQL_QUERIES.INDEX_LIST, [
tableName
]);
const ret = {};
for (const index of rows){
if (index.Column_name === 'id') {
continue;
}
if (!ret[index.Key_name]) {
const indexInfo = {
columns: [
index.Column_name
],
name: index.Key_name
};
if (!index.Non_unique || index.Non_unique === '0') {
indexInfo.type = 'unique';
}
ret[index.Key_name] = indexInfo;
} else {
ret[index.Key_name].columns.push(index.Column_name);
}
}
return Object.values(ret);
}
async getForeignKeys(tableName) {
const [rows] = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_LIST, [
tableName
]);
const ret = {};
for (const fk of rows){
ret[fk.constraint_name] = {
name: fk.constraint_name,
columns: [],
referencedColumns: [],
referencedTable: null,
onUpdate: null,
onDelete: null
};
}
const contraintNames = Object.keys(ret);
if (contraintNames.length > 0) {
const [fkReferences] = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES, [
contraintNames,
tableName
]);
for (const fkReference of fkReferences){
ret[fkReference.constraint_name].referencedTable = fkReference.referenced_table_name;
ret[fkReference.constraint_name].columns.push(fkReference.column_name);
ret[fkReference.constraint_name].referencedColumns.push(fkReference.referenced_column_name);
}
const [fkReferentialConstraints] = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENTIALS_CONSTRAINTS, [
contraintNames,
tableName
]);
for (const fkReferentialConstraint of fkReferentialConstraints){
ret[fkReferentialConstraint.constraint_name].onUpdate = fkReferentialConstraint.on_update.toUpperCase();
ret[fkReferentialConstraint.constraint_name].onDelete = fkReferentialConstraint.on_delete.toUpperCase();
}
}
return Object.values(ret);
}
constructor(db){
this.db = db;
}
}
module.exports = MysqlSchemaInspector;
//# sourceMappingURL=schema-inspector.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,295 @@
const SQL_QUERIES = {
TABLE_LIST: /* sql */ `
SELECT
t.table_name as table_name
FROM information_schema.tables t
WHERE table_type = 'BASE TABLE'
AND table_schema = schema();
`,
LIST_COLUMNS: /* sql */ `
SELECT
c.data_type as data_type,
c.column_name as column_name,
c.character_maximum_length as character_maximum_length,
c.column_default as column_default,
c.is_nullable as is_nullable,
c.column_type as column_type,
c.column_key as column_key
FROM information_schema.columns c
WHERE table_schema = database()
AND table_name = ?;
`,
INDEX_LIST: /* sql */ `
show index from ??;
`,
FOREIGN_KEY_LIST: /* sql */ `
SELECT
tc.constraint_name as constraint_name
FROM information_schema.table_constraints tc
WHERE tc.constraint_type = 'FOREIGN KEY'
AND tc.table_schema = database()
AND tc.table_name = ?;
`,
FOREIGN_KEY_REFERENCES: /* sql */ `
SELECT
kcu.constraint_name as constraint_name,
kcu.column_name as column_name,
kcu.referenced_table_name as referenced_table_name,
kcu.referenced_column_name as referenced_column_name
FROM information_schema.key_column_usage kcu
WHERE kcu.constraint_name in (?)
AND kcu.table_schema = database()
AND kcu.table_name = ?;
`,
FOREIGN_KEY_REFERENTIALS_CONSTRAINTS: /* sql */ `
SELECT
rc.constraint_name as constraint_name,
rc.update_rule as on_update,
rc.delete_rule as on_delete
FROM information_schema.referential_constraints AS rc
WHERE rc.constraint_name in (?)
AND rc.constraint_schema = database()
AND rc.table_name = ?;
`
};
const toStrapiType = (column)=>{
const rootType = column.data_type.toLowerCase().match(/[^(), ]+/)?.[0];
switch(rootType){
case 'int':
{
if (column.column_key === 'PRI') {
return {
type: 'increments',
args: [
{
primary: true,
primaryKey: true
}
],
unsigned: false
};
}
return {
type: 'integer'
};
}
case 'decimal':
{
return {
type: 'decimal',
args: [
10,
2
]
};
}
case 'double':
{
return {
type: 'double'
};
}
case 'bigint':
{
return {
type: 'bigInteger'
};
}
case 'enum':
{
return {
type: 'string'
};
}
case 'tinyint':
{
return {
type: 'boolean'
};
}
case 'longtext':
{
return {
type: 'text',
args: [
'longtext'
]
};
}
case 'varchar':
{
return {
type: 'string',
args: [
column.character_maximum_length
]
};
}
case 'datetime':
{
return {
type: 'datetime',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'date':
{
return {
type: 'date'
};
}
case 'time':
{
return {
type: 'time',
args: [
{
precision: 3
}
]
};
}
case 'timestamp':
{
return {
type: 'timestamp',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'json':
{
return {
type: 'jsonb'
};
}
default:
{
return {
type: 'specificType',
args: [
column.data_type
]
};
}
}
};
class MysqlSchemaInspector {
async getSchema() {
const schema = {
tables: []
};
const tables = await this.getTables();
schema.tables = await Promise.all(tables.map(async (tableName)=>{
const columns = await this.getColumns(tableName);
const indexes = await this.getIndexes(tableName);
const foreignKeys = await this.getForeignKeys(tableName);
return {
name: tableName,
columns,
indexes,
foreignKeys
};
}));
return schema;
}
async getTables() {
const [rows] = await this.db.connection.raw(SQL_QUERIES.TABLE_LIST);
return rows.map((row)=>row.table_name);
}
async getColumns(tableName) {
const [rows] = await this.db.connection.raw(SQL_QUERIES.LIST_COLUMNS, [
tableName
]);
return rows.map((row)=>{
const { type, args = [], ...rest } = toStrapiType(row);
return {
type,
args,
defaultTo: row.column_default,
name: row.column_name,
notNullable: row.is_nullable === 'NO',
unsigned: row.column_type.endsWith(' unsigned'),
...rest
};
});
}
async getIndexes(tableName) {
const [rows] = await this.db.connection.raw(SQL_QUERIES.INDEX_LIST, [
tableName
]);
const ret = {};
for (const index of rows){
if (index.Column_name === 'id') {
continue;
}
if (!ret[index.Key_name]) {
const indexInfo = {
columns: [
index.Column_name
],
name: index.Key_name
};
if (!index.Non_unique || index.Non_unique === '0') {
indexInfo.type = 'unique';
}
ret[index.Key_name] = indexInfo;
} else {
ret[index.Key_name].columns.push(index.Column_name);
}
}
return Object.values(ret);
}
async getForeignKeys(tableName) {
const [rows] = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_LIST, [
tableName
]);
const ret = {};
for (const fk of rows){
ret[fk.constraint_name] = {
name: fk.constraint_name,
columns: [],
referencedColumns: [],
referencedTable: null,
onUpdate: null,
onDelete: null
};
}
const contraintNames = Object.keys(ret);
if (contraintNames.length > 0) {
const [fkReferences] = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES, [
contraintNames,
tableName
]);
for (const fkReference of fkReferences){
ret[fkReference.constraint_name].referencedTable = fkReference.referenced_table_name;
ret[fkReference.constraint_name].columns.push(fkReference.column_name);
ret[fkReference.constraint_name].referencedColumns.push(fkReference.referenced_column_name);
}
const [fkReferentialConstraints] = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENTIALS_CONSTRAINTS, [
contraintNames,
tableName
]);
for (const fkReferentialConstraint of fkReferentialConstraints){
ret[fkReferentialConstraint.constraint_name].onUpdate = fkReferentialConstraint.on_update.toUpperCase();
ret[fkReferentialConstraint.constraint_name].onDelete = fkReferentialConstraint.on_delete.toUpperCase();
}
}
return Object.values(ret);
}
constructor(db){
this.db = db;
}
}
export { MysqlSchemaInspector as default };
//# sourceMappingURL=schema-inspector.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,14 @@
/// <reference types="node" />
import type { Database } from '../..';
import Dialect from '../dialect';
import PostgresqlSchemaInspector from './schema-inspector';
export default class PostgresDialect extends Dialect {
schemaInspector: PostgresqlSchemaInspector;
constructor(db: Database);
useReturning(): boolean;
initialize(nativeConnection: unknown): Promise<void>;
usesForeignKeys(): boolean;
getSqlType(type: string): string;
transformErrors(error: NodeJS.ErrnoException): void;
}
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/dialects/postgresql/index.ts"],"names":[],"mappings":";AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AACtC,OAAO,OAAO,MAAM,YAAY,CAAC;AACjC,OAAO,yBAAyB,MAAM,oBAAoB,CAAC;AAE3D,MAAM,CAAC,OAAO,OAAO,eAAgB,SAAQ,OAAO;IAClD,eAAe,EAAE,yBAAyB,CAAC;gBAE/B,EAAE,EAAE,QAAQ;IAMxB,YAAY;IAIN,UAAU,CAAC,gBAAgB,EAAE,OAAO;IA+B1C,eAAe;IAIf,UAAU,CAAC,IAAI,EAAE,MAAM;IAWvB,eAAe,CAAC,KAAK,EAAE,MAAM,CAAC,cAAc;CAY7C"}

View File

@@ -0,0 +1,62 @@
'use strict';
var notNull = require('../../errors/not-null.js');
var dialect = require('../dialect.js');
var schemaInspector = require('./schema-inspector.js');
class PostgresDialect extends dialect {
useReturning() {
return true;
}
async initialize(nativeConnection) {
// Don't cast DATE string to Date()
this.db.connection.client.driver.types.setTypeParser(this.db.connection.client.driver.types.builtins.DATE, 'text', (v)=>v);
// Don't parse JSONB automatically
this.db.connection.client.driver.types.setTypeParser(this.db.connection.client.driver.types.builtins.JSONB, 'text', (v)=>v);
this.db.connection.client.driver.types.setTypeParser(this.db.connection.client.driver.types.builtins.NUMERIC, 'text', parseFloat);
// If we're using a schema, set the default path for all table names in queries to use that schema
// Ideally we would rely on Knex config.searchPath to do this for us
// However, createConnection must remain synchronous and if the user is using a connection function,
// we do not know what their schema is until after the connection is resolved
const schemaName = this.db.getSchemaName();
if (schemaName) {
await this.db.connection.raw(`SET search_path TO "${schemaName}"`).connection(nativeConnection);
}
}
usesForeignKeys() {
return true;
}
getSqlType(type) {
switch(type){
case 'timestamp':
{
return 'datetime';
}
default:
{
return type;
}
}
}
transformErrors(error) {
switch(error.code){
case '23502':
{
throw new notNull({
column: 'column' in error ? `${error.column}` : undefined
});
}
default:
{
super.transformErrors(error);
}
}
}
constructor(db){
super(db, 'postgres');
this.schemaInspector = new schemaInspector(db);
}
}
module.exports = PostgresDialect;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../../src/dialects/postgresql/index.ts"],"sourcesContent":["import * as errors from '../../errors';\nimport type { Database } from '../..';\nimport Dialect from '../dialect';\nimport PostgresqlSchemaInspector from './schema-inspector';\n\nexport default class PostgresDialect extends Dialect {\n schemaInspector: PostgresqlSchemaInspector;\n\n constructor(db: Database) {\n super(db, 'postgres');\n\n this.schemaInspector = new PostgresqlSchemaInspector(db);\n }\n\n useReturning() {\n return true;\n }\n\n async initialize(nativeConnection: unknown) {\n // Don't cast DATE string to Date()\n this.db.connection.client.driver.types.setTypeParser(\n this.db.connection.client.driver.types.builtins.DATE,\n 'text',\n (v: unknown) => v\n );\n // Don't parse JSONB automatically\n this.db.connection.client.driver.types.setTypeParser(\n this.db.connection.client.driver.types.builtins.JSONB,\n 'text',\n (v: unknown) => v\n );\n this.db.connection.client.driver.types.setTypeParser(\n this.db.connection.client.driver.types.builtins.NUMERIC,\n 'text',\n parseFloat\n );\n\n // If we're using a schema, set the default path for all table names in queries to use that schema\n // Ideally we would rely on Knex config.searchPath to do this for us\n // However, createConnection must remain synchronous and if the user is using a connection function,\n // we do not know what their schema is until after the connection is resolved\n const schemaName = this.db.getSchemaName();\n if (schemaName) {\n await this.db.connection\n .raw(`SET search_path TO \"${schemaName}\"`)\n .connection(nativeConnection);\n }\n }\n\n usesForeignKeys() {\n return true;\n }\n\n getSqlType(type: string) {\n switch (type) {\n case 'timestamp': {\n return 'datetime';\n }\n default: {\n return type;\n }\n }\n }\n\n transformErrors(error: NodeJS.ErrnoException) {\n switch (error.code) {\n case '23502': {\n throw new errors.NotNullError({\n column: 'column' in error ? `${error.column}` : undefined,\n });\n }\n default: {\n super.transformErrors(error);\n }\n }\n }\n}\n"],"names":["PostgresDialect","Dialect","useReturning","initialize","nativeConnection","db","connection","client","driver","types","setTypeParser","builtins","DATE","v","JSONB","NUMERIC","parseFloat","schemaName","getSchemaName","raw","usesForeignKeys","getSqlType","type","transformErrors","error","code","errors","column","undefined","constructor","schemaInspector","PostgresqlSchemaInspector"],"mappings":";;;;;;AAKe,MAAMA,eAAwBC,SAAAA,OAAAA,CAAAA;IAS3CC,YAAe,GAAA;QACb,OAAO,IAAA;AACT;IAEA,MAAMC,UAAAA,CAAWC,gBAAyB,EAAE;;AAE1C,QAAA,IAAI,CAACC,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACC,aAAa,CAClD,IAAI,CAACL,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACE,QAAQ,CAACC,IAAI,EACpD,MAAA,EACA,CAACC,CAAeA,GAAAA,CAAAA,CAAAA;;AAGlB,QAAA,IAAI,CAACR,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACC,aAAa,CAClD,IAAI,CAACL,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACE,QAAQ,CAACG,KAAK,EACrD,MAAA,EACA,CAACD,CAAeA,GAAAA,CAAAA,CAAAA;AAElB,QAAA,IAAI,CAACR,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACC,aAAa,CAClD,IAAI,CAACL,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACE,QAAQ,CAACI,OAAO,EACvD,MACAC,EAAAA,UAAAA,CAAAA;;;;;AAOF,QAAA,MAAMC,UAAa,GAAA,IAAI,CAACZ,EAAE,CAACa,aAAa,EAAA;AACxC,QAAA,IAAID,UAAY,EAAA;AACd,YAAA,MAAM,IAAI,CAACZ,EAAE,CAACC,UAAU,CACrBa,GAAG,CAAC,CAAC,oBAAoB,EAAEF,UAAW,CAAA,CAAC,CAAC,CAAA,CACxCX,UAAU,CAACF,gBAAAA,CAAAA;AAChB;AACF;IAEAgB,eAAkB,GAAA;QAChB,OAAO,IAAA;AACT;AAEAC,IAAAA,UAAAA,CAAWC,IAAY,EAAE;QACvB,OAAQA,IAAAA;YACN,KAAK,WAAA;AAAa,gBAAA;oBAChB,OAAO,UAAA;AACT;AACA,YAAA;AAAS,gBAAA;oBACP,OAAOA,IAAAA;AACT;AACF;AACF;AAEAC,IAAAA,eAAAA,CAAgBC,KAA4B,EAAE;AAC5C,QAAA,OAAQA,MAAMC,IAAI;YAChB,KAAK,OAAA;AAAS,gBAAA;oBACZ,MAAM,IAAIC,OAAmB,CAAC;wBAC5BC,MAAQ,EAAA,QAAA,IAAYH,QAAQ,CAAC,EAAEA,MAAMG,MAAM,CAAC,CAAC,GAAGC;AAClD,qBAAA,CAAA;AACF;AACA,YAAA;AAAS,gBAAA;AACP,oBAAA,KAAK,CAACL,eAAgBC,CAAAA,KAAAA,CAAAA;AACxB;AACF;AACF;AAnEAK,IAAAA,WAAAA,CAAYxB,EAAY,CAAE;AACxB,QAAA,KAAK,CAACA,EAAI,EAAA,UAAA,CAAA;AAEV,QAAA,IAAI,CAACyB,eAAe,GAAG,IAAIC,eAA0B1B,CAAAA,EAAAA,CAAAA;AACvD;AAgEF;;;;"}

View File

@@ -0,0 +1,60 @@
import NotNullError from '../../errors/not-null.mjs';
import Dialect from '../dialect.mjs';
import PostgresqlSchemaInspector from './schema-inspector.mjs';
class PostgresDialect extends Dialect {
useReturning() {
return true;
}
async initialize(nativeConnection) {
// Don't cast DATE string to Date()
this.db.connection.client.driver.types.setTypeParser(this.db.connection.client.driver.types.builtins.DATE, 'text', (v)=>v);
// Don't parse JSONB automatically
this.db.connection.client.driver.types.setTypeParser(this.db.connection.client.driver.types.builtins.JSONB, 'text', (v)=>v);
this.db.connection.client.driver.types.setTypeParser(this.db.connection.client.driver.types.builtins.NUMERIC, 'text', parseFloat);
// If we're using a schema, set the default path for all table names in queries to use that schema
// Ideally we would rely on Knex config.searchPath to do this for us
// However, createConnection must remain synchronous and if the user is using a connection function,
// we do not know what their schema is until after the connection is resolved
const schemaName = this.db.getSchemaName();
if (schemaName) {
await this.db.connection.raw(`SET search_path TO "${schemaName}"`).connection(nativeConnection);
}
}
usesForeignKeys() {
return true;
}
getSqlType(type) {
switch(type){
case 'timestamp':
{
return 'datetime';
}
default:
{
return type;
}
}
}
transformErrors(error) {
switch(error.code){
case '23502':
{
throw new NotNullError({
column: 'column' in error ? `${error.column}` : undefined
});
}
default:
{
super.transformErrors(error);
}
}
}
constructor(db){
super(db, 'postgres');
this.schemaInspector = new PostgresqlSchemaInspector(db);
}
}
export { PostgresDialect as default };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../../src/dialects/postgresql/index.ts"],"sourcesContent":["import * as errors from '../../errors';\nimport type { Database } from '../..';\nimport Dialect from '../dialect';\nimport PostgresqlSchemaInspector from './schema-inspector';\n\nexport default class PostgresDialect extends Dialect {\n schemaInspector: PostgresqlSchemaInspector;\n\n constructor(db: Database) {\n super(db, 'postgres');\n\n this.schemaInspector = new PostgresqlSchemaInspector(db);\n }\n\n useReturning() {\n return true;\n }\n\n async initialize(nativeConnection: unknown) {\n // Don't cast DATE string to Date()\n this.db.connection.client.driver.types.setTypeParser(\n this.db.connection.client.driver.types.builtins.DATE,\n 'text',\n (v: unknown) => v\n );\n // Don't parse JSONB automatically\n this.db.connection.client.driver.types.setTypeParser(\n this.db.connection.client.driver.types.builtins.JSONB,\n 'text',\n (v: unknown) => v\n );\n this.db.connection.client.driver.types.setTypeParser(\n this.db.connection.client.driver.types.builtins.NUMERIC,\n 'text',\n parseFloat\n );\n\n // If we're using a schema, set the default path for all table names in queries to use that schema\n // Ideally we would rely on Knex config.searchPath to do this for us\n // However, createConnection must remain synchronous and if the user is using a connection function,\n // we do not know what their schema is until after the connection is resolved\n const schemaName = this.db.getSchemaName();\n if (schemaName) {\n await this.db.connection\n .raw(`SET search_path TO \"${schemaName}\"`)\n .connection(nativeConnection);\n }\n }\n\n usesForeignKeys() {\n return true;\n }\n\n getSqlType(type: string) {\n switch (type) {\n case 'timestamp': {\n return 'datetime';\n }\n default: {\n return type;\n }\n }\n }\n\n transformErrors(error: NodeJS.ErrnoException) {\n switch (error.code) {\n case '23502': {\n throw new errors.NotNullError({\n column: 'column' in error ? `${error.column}` : undefined,\n });\n }\n default: {\n super.transformErrors(error);\n }\n }\n }\n}\n"],"names":["PostgresDialect","Dialect","useReturning","initialize","nativeConnection","db","connection","client","driver","types","setTypeParser","builtins","DATE","v","JSONB","NUMERIC","parseFloat","schemaName","getSchemaName","raw","usesForeignKeys","getSqlType","type","transformErrors","error","code","errors","column","undefined","constructor","schemaInspector","PostgresqlSchemaInspector"],"mappings":";;;;AAKe,MAAMA,eAAwBC,SAAAA,OAAAA,CAAAA;IAS3CC,YAAe,GAAA;QACb,OAAO,IAAA;AACT;IAEA,MAAMC,UAAAA,CAAWC,gBAAyB,EAAE;;AAE1C,QAAA,IAAI,CAACC,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACC,aAAa,CAClD,IAAI,CAACL,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACE,QAAQ,CAACC,IAAI,EACpD,MAAA,EACA,CAACC,CAAeA,GAAAA,CAAAA,CAAAA;;AAGlB,QAAA,IAAI,CAACR,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACC,aAAa,CAClD,IAAI,CAACL,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACE,QAAQ,CAACG,KAAK,EACrD,MAAA,EACA,CAACD,CAAeA,GAAAA,CAAAA,CAAAA;AAElB,QAAA,IAAI,CAACR,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACC,aAAa,CAClD,IAAI,CAACL,EAAE,CAACC,UAAU,CAACC,MAAM,CAACC,MAAM,CAACC,KAAK,CAACE,QAAQ,CAACI,OAAO,EACvD,MACAC,EAAAA,UAAAA,CAAAA;;;;;AAOF,QAAA,MAAMC,UAAa,GAAA,IAAI,CAACZ,EAAE,CAACa,aAAa,EAAA;AACxC,QAAA,IAAID,UAAY,EAAA;AACd,YAAA,MAAM,IAAI,CAACZ,EAAE,CAACC,UAAU,CACrBa,GAAG,CAAC,CAAC,oBAAoB,EAAEF,UAAW,CAAA,CAAC,CAAC,CAAA,CACxCX,UAAU,CAACF,gBAAAA,CAAAA;AAChB;AACF;IAEAgB,eAAkB,GAAA;QAChB,OAAO,IAAA;AACT;AAEAC,IAAAA,UAAAA,CAAWC,IAAY,EAAE;QACvB,OAAQA,IAAAA;YACN,KAAK,WAAA;AAAa,gBAAA;oBAChB,OAAO,UAAA;AACT;AACA,YAAA;AAAS,gBAAA;oBACP,OAAOA,IAAAA;AACT;AACF;AACF;AAEAC,IAAAA,eAAAA,CAAgBC,KAA4B,EAAE;AAC5C,QAAA,OAAQA,MAAMC,IAAI;YAChB,KAAK,OAAA;AAAS,gBAAA;oBACZ,MAAM,IAAIC,YAAmB,CAAC;wBAC5BC,MAAQ,EAAA,QAAA,IAAYH,QAAQ,CAAC,EAAEA,MAAMG,MAAM,CAAC,CAAC,GAAGC;AAClD,qBAAA,CAAA;AACF;AACA,YAAA;AAAS,gBAAA;AACP,oBAAA,KAAK,CAACL,eAAgBC,CAAAA,KAAAA,CAAAA;AACxB;AACF;AACF;AAnEAK,IAAAA,WAAAA,CAAYxB,EAAY,CAAE;AACxB,QAAA,KAAK,CAACA,EAAI,EAAA,UAAA,CAAA;AAEV,QAAA,IAAI,CAACyB,eAAe,GAAG,IAAIC,yBAA0B1B,CAAAA,EAAAA,CAAAA;AACvD;AAgEF;;;;"}

View File

@@ -0,0 +1,14 @@
import type { Database } from '../..';
import type { Schema, Column, Index, ForeignKey } from '../../schema/types';
import type { SchemaInspector } from '../dialect';
export default class PostgresqlSchemaInspector implements SchemaInspector {
db: Database;
constructor(db: Database);
getSchema(): Promise<Schema>;
getDatabaseSchema(): string;
getTables(): Promise<string[]>;
getColumns(tableName: string): Promise<Column[]>;
getIndexes(tableName: string): Promise<Index[]>;
getForeignKeys(tableName: string): Promise<ForeignKey[]>;
}
//# sourceMappingURL=schema-inspector.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"schema-inspector.d.ts","sourceRoot":"","sources":["../../../src/dialects/postgresql/schema-inspector.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AA8JlD,MAAM,CAAC,OAAO,OAAO,yBAA0B,YAAW,eAAe;IACvE,EAAE,EAAE,QAAQ,CAAC;gBAED,EAAE,EAAE,QAAQ;IAIlB,SAAS;IAuBf,iBAAiB,IAAI,MAAM;IAIrB,SAAS,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAQ9B,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAwBhD,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IA2B/C,cAAc,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;CAsD/D"}

View File

@@ -0,0 +1,316 @@
'use strict';
const SQL_QUERIES = {
TABLE_LIST: /* sql */ `
SELECT *
FROM information_schema.tables
WHERE
table_schema = ?
AND table_type = 'BASE TABLE'
AND table_name != 'geometry_columns'
AND table_name != 'spatial_ref_sys';
`,
LIST_COLUMNS: /* sql */ `
SELECT data_type, column_name, character_maximum_length, column_default, is_nullable
FROM information_schema.columns
WHERE table_schema = ? AND table_name = ?;
`,
INDEX_LIST: /* sql */ `
SELECT
ix.indexrelid,
i.relname as index_name,
a.attname as column_name,
ix.indisunique as is_unique,
ix.indisprimary as is_primary
FROM
pg_class t,
pg_namespace s,
pg_class i,
pg_index ix,
pg_attribute a
WHERE
t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relnamespace = s.oid
AND s.nspname = ?
AND t.relname = ?;
`,
FOREIGN_KEY_LIST: /* sql */ `
SELECT
tco."constraint_name" as constraint_name
FROM information_schema.table_constraints tco
WHERE
tco.constraint_type = 'FOREIGN KEY'
AND tco.constraint_schema = ?
AND tco.table_name = ?
`,
FOREIGN_KEY_REFERENCES: /* sql */ `
SELECT
kcu."constraint_name" as constraint_name,
kcu."column_name" as column_name
FROM information_schema.key_column_usage kcu
WHERE kcu.constraint_name=ANY(?)
AND kcu.table_schema = ?
AND kcu.table_name = ?;
`,
FOREIGN_KEY_REFERENCES_CONSTRAIN: /* sql */ `
SELECT
rco.update_rule as on_update,
rco.delete_rule as on_delete,
rco."unique_constraint_name" as unique_constraint_name
FROM information_schema.referential_constraints rco
WHERE rco.constraint_name=ANY(?)
AND rco.constraint_schema = ?
`,
FOREIGN_KEY_REFERENCES_CONSTRAIN_RFERENCE: /* sql */ `
SELECT
rel_kcu."table_name" as foreign_table,
rel_kcu."column_name" as fk_column_name
FROM information_schema.key_column_usage rel_kcu
WHERE rel_kcu.constraint_name=?
AND rel_kcu.table_schema = ?
`
};
const toStrapiType = (column)=>{
const rootType = column.data_type.toLowerCase().match(/[^(), ]+/)?.[0];
switch(rootType){
case 'integer':
{
// find a way to figure out the increments
return {
type: 'integer'
};
}
case 'text':
{
return {
type: 'text',
args: [
'longtext'
]
};
}
case 'boolean':
{
return {
type: 'boolean'
};
}
case 'character':
{
return {
type: 'string',
args: [
column.character_maximum_length
]
};
}
case 'timestamp':
{
return {
type: 'datetime',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'date':
{
return {
type: 'date'
};
}
case 'time':
{
return {
type: 'time',
args: [
{
precision: 3
}
]
};
}
case 'numeric':
{
return {
type: 'decimal',
args: [
10,
2
]
};
}
case 'real':
case 'double':
{
return {
type: 'double'
};
}
case 'bigint':
{
return {
type: 'bigInteger'
};
}
case 'jsonb':
{
return {
type: 'jsonb'
};
}
default:
{
return {
type: 'specificType',
args: [
column.data_type
]
};
}
}
};
const getIndexType = (index)=>{
if (index.is_primary) {
return 'primary';
}
if (index.is_unique) {
return 'unique';
}
};
class PostgresqlSchemaInspector {
async getSchema() {
const schema = {
tables: []
};
const tables = await this.getTables();
schema.tables = await Promise.all(tables.map(async (tableName)=>{
const columns = await this.getColumns(tableName);
const indexes = await this.getIndexes(tableName);
const foreignKeys = await this.getForeignKeys(tableName);
return {
name: tableName,
columns,
indexes,
foreignKeys
};
}));
return schema;
}
getDatabaseSchema() {
return this.db.getSchemaName() || 'public';
}
async getTables() {
const { rows } = await this.db.connection.raw(SQL_QUERIES.TABLE_LIST, [
this.getDatabaseSchema()
]);
return rows.map((row)=>row.table_name);
}
async getColumns(tableName) {
const { rows } = await this.db.connection.raw(SQL_QUERIES.LIST_COLUMNS, [
this.getDatabaseSchema(),
tableName
]);
return rows.map((row)=>{
const { type, args = [], ...rest } = toStrapiType(row);
const defaultTo = row.column_default && row.column_default.includes('nextval(') ? null : row.column_default;
return {
type,
args,
defaultTo,
name: row.column_name,
notNullable: row.is_nullable === 'NO',
unsigned: false,
...rest
};
});
}
async getIndexes(tableName) {
const { rows } = await this.db.connection.raw(SQL_QUERIES.INDEX_LIST, [
this.getDatabaseSchema(),
tableName
]);
const ret = {};
for (const index of rows){
if (index.column_name === 'id') {
continue;
}
if (!ret[index.indexrelid]) {
ret[index.indexrelid] = {
columns: [
index.column_name
],
name: index.index_name,
type: getIndexType(index)
};
} else {
ret[index.indexrelid].columns.push(index.column_name);
}
}
return Object.values(ret);
}
async getForeignKeys(tableName) {
const { rows } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_LIST, [
this.getDatabaseSchema(),
tableName
]);
const ret = {};
for (const fk of rows){
ret[fk.constraint_name] = {
name: fk.constraint_name,
columns: [],
referencedColumns: [],
referencedTable: null,
onUpdate: null,
onDelete: null
};
}
const constraintNames = Object.keys(ret);
const dbSchema = this.getDatabaseSchema();
if (constraintNames.length > 0) {
const { rows: fkReferences } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES, [
[
constraintNames
],
dbSchema,
tableName
]);
for (const fkReference of fkReferences){
ret[fkReference.constraint_name].columns.push(fkReference.column_name);
const { rows: fkReferencesConstraint } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES_CONSTRAIN, [
[
fkReference.constraint_name
],
dbSchema
]);
for (const fkReferenceC of fkReferencesConstraint){
const { rows: fkReferencesConstraintReferece } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES_CONSTRAIN_RFERENCE, [
fkReferenceC.unique_constraint_name,
dbSchema
]);
for (const fkReferenceConst of fkReferencesConstraintReferece){
ret[fkReference.constraint_name].referencedTable = fkReferenceConst.foreign_table;
ret[fkReference.constraint_name].referencedColumns.push(fkReferenceConst.fk_column_name);
}
ret[fkReference.constraint_name].onUpdate = fkReferenceC.on_update.toUpperCase();
ret[fkReference.constraint_name].onDelete = fkReferenceC.on_delete.toUpperCase();
}
}
}
return Object.values(ret);
}
constructor(db){
this.db = db;
}
}
module.exports = PostgresqlSchemaInspector;
//# sourceMappingURL=schema-inspector.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,314 @@
const SQL_QUERIES = {
TABLE_LIST: /* sql */ `
SELECT *
FROM information_schema.tables
WHERE
table_schema = ?
AND table_type = 'BASE TABLE'
AND table_name != 'geometry_columns'
AND table_name != 'spatial_ref_sys';
`,
LIST_COLUMNS: /* sql */ `
SELECT data_type, column_name, character_maximum_length, column_default, is_nullable
FROM information_schema.columns
WHERE table_schema = ? AND table_name = ?;
`,
INDEX_LIST: /* sql */ `
SELECT
ix.indexrelid,
i.relname as index_name,
a.attname as column_name,
ix.indisunique as is_unique,
ix.indisprimary as is_primary
FROM
pg_class t,
pg_namespace s,
pg_class i,
pg_index ix,
pg_attribute a
WHERE
t.oid = ix.indrelid
AND i.oid = ix.indexrelid
AND a.attrelid = t.oid
AND a.attnum = ANY(ix.indkey)
AND t.relkind = 'r'
AND t.relnamespace = s.oid
AND s.nspname = ?
AND t.relname = ?;
`,
FOREIGN_KEY_LIST: /* sql */ `
SELECT
tco."constraint_name" as constraint_name
FROM information_schema.table_constraints tco
WHERE
tco.constraint_type = 'FOREIGN KEY'
AND tco.constraint_schema = ?
AND tco.table_name = ?
`,
FOREIGN_KEY_REFERENCES: /* sql */ `
SELECT
kcu."constraint_name" as constraint_name,
kcu."column_name" as column_name
FROM information_schema.key_column_usage kcu
WHERE kcu.constraint_name=ANY(?)
AND kcu.table_schema = ?
AND kcu.table_name = ?;
`,
FOREIGN_KEY_REFERENCES_CONSTRAIN: /* sql */ `
SELECT
rco.update_rule as on_update,
rco.delete_rule as on_delete,
rco."unique_constraint_name" as unique_constraint_name
FROM information_schema.referential_constraints rco
WHERE rco.constraint_name=ANY(?)
AND rco.constraint_schema = ?
`,
FOREIGN_KEY_REFERENCES_CONSTRAIN_RFERENCE: /* sql */ `
SELECT
rel_kcu."table_name" as foreign_table,
rel_kcu."column_name" as fk_column_name
FROM information_schema.key_column_usage rel_kcu
WHERE rel_kcu.constraint_name=?
AND rel_kcu.table_schema = ?
`
};
const toStrapiType = (column)=>{
const rootType = column.data_type.toLowerCase().match(/[^(), ]+/)?.[0];
switch(rootType){
case 'integer':
{
// find a way to figure out the increments
return {
type: 'integer'
};
}
case 'text':
{
return {
type: 'text',
args: [
'longtext'
]
};
}
case 'boolean':
{
return {
type: 'boolean'
};
}
case 'character':
{
return {
type: 'string',
args: [
column.character_maximum_length
]
};
}
case 'timestamp':
{
return {
type: 'datetime',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'date':
{
return {
type: 'date'
};
}
case 'time':
{
return {
type: 'time',
args: [
{
precision: 3
}
]
};
}
case 'numeric':
{
return {
type: 'decimal',
args: [
10,
2
]
};
}
case 'real':
case 'double':
{
return {
type: 'double'
};
}
case 'bigint':
{
return {
type: 'bigInteger'
};
}
case 'jsonb':
{
return {
type: 'jsonb'
};
}
default:
{
return {
type: 'specificType',
args: [
column.data_type
]
};
}
}
};
const getIndexType = (index)=>{
if (index.is_primary) {
return 'primary';
}
if (index.is_unique) {
return 'unique';
}
};
class PostgresqlSchemaInspector {
async getSchema() {
const schema = {
tables: []
};
const tables = await this.getTables();
schema.tables = await Promise.all(tables.map(async (tableName)=>{
const columns = await this.getColumns(tableName);
const indexes = await this.getIndexes(tableName);
const foreignKeys = await this.getForeignKeys(tableName);
return {
name: tableName,
columns,
indexes,
foreignKeys
};
}));
return schema;
}
getDatabaseSchema() {
return this.db.getSchemaName() || 'public';
}
async getTables() {
const { rows } = await this.db.connection.raw(SQL_QUERIES.TABLE_LIST, [
this.getDatabaseSchema()
]);
return rows.map((row)=>row.table_name);
}
async getColumns(tableName) {
const { rows } = await this.db.connection.raw(SQL_QUERIES.LIST_COLUMNS, [
this.getDatabaseSchema(),
tableName
]);
return rows.map((row)=>{
const { type, args = [], ...rest } = toStrapiType(row);
const defaultTo = row.column_default && row.column_default.includes('nextval(') ? null : row.column_default;
return {
type,
args,
defaultTo,
name: row.column_name,
notNullable: row.is_nullable === 'NO',
unsigned: false,
...rest
};
});
}
async getIndexes(tableName) {
const { rows } = await this.db.connection.raw(SQL_QUERIES.INDEX_LIST, [
this.getDatabaseSchema(),
tableName
]);
const ret = {};
for (const index of rows){
if (index.column_name === 'id') {
continue;
}
if (!ret[index.indexrelid]) {
ret[index.indexrelid] = {
columns: [
index.column_name
],
name: index.index_name,
type: getIndexType(index)
};
} else {
ret[index.indexrelid].columns.push(index.column_name);
}
}
return Object.values(ret);
}
async getForeignKeys(tableName) {
const { rows } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_LIST, [
this.getDatabaseSchema(),
tableName
]);
const ret = {};
for (const fk of rows){
ret[fk.constraint_name] = {
name: fk.constraint_name,
columns: [],
referencedColumns: [],
referencedTable: null,
onUpdate: null,
onDelete: null
};
}
const constraintNames = Object.keys(ret);
const dbSchema = this.getDatabaseSchema();
if (constraintNames.length > 0) {
const { rows: fkReferences } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES, [
[
constraintNames
],
dbSchema,
tableName
]);
for (const fkReference of fkReferences){
ret[fkReference.constraint_name].columns.push(fkReference.column_name);
const { rows: fkReferencesConstraint } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES_CONSTRAIN, [
[
fkReference.constraint_name
],
dbSchema
]);
for (const fkReferenceC of fkReferencesConstraint){
const { rows: fkReferencesConstraintReferece } = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_REFERENCES_CONSTRAIN_RFERENCE, [
fkReferenceC.unique_constraint_name,
dbSchema
]);
for (const fkReferenceConst of fkReferencesConstraintReferece){
ret[fkReference.constraint_name].referencedTable = fkReferenceConst.foreign_table;
ret[fkReference.constraint_name].referencedColumns.push(fkReferenceConst.fk_column_name);
}
ret[fkReference.constraint_name].onUpdate = fkReferenceC.on_update.toUpperCase();
ret[fkReference.constraint_name].onDelete = fkReferenceC.on_delete.toUpperCase();
}
}
}
return Object.values(ret);
}
constructor(db){
this.db = db;
}
}
export { PostgresqlSchemaInspector as default };
//# sourceMappingURL=schema-inspector.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,20 @@
/// <reference types="node" />
import type { Knex } from 'knex';
import Dialect from '../dialect';
import SqliteSchemaInspector from './schema-inspector';
import type { Database } from '../..';
export default class SqliteDialect extends Dialect {
schemaInspector: SqliteSchemaInspector;
constructor(db: Database);
configure(conn?: Knex.Sqlite3ConnectionConfig): void;
useReturning(): boolean;
initialize(nativeConnection: unknown): Promise<void>;
canAlterConstraints(): boolean;
getSqlType(type: string): string;
supportsOperator(operator: string): boolean;
startSchemaUpdate(): Promise<void>;
endSchemaUpdate(): Promise<void>;
transformErrors(error: NodeJS.ErrnoException): void;
canAddIncrements(): boolean;
}
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/dialects/sqlite/index.ts"],"names":[],"mappings":";AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAGjC,OAAO,OAAO,MAAM,YAAY,CAAC;AACjC,OAAO,qBAAqB,MAAM,oBAAoB,CAAC;AACvD,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAItC,MAAM,CAAC,OAAO,OAAO,aAAc,SAAQ,OAAO;IAChD,eAAe,EAAE,qBAAqB,CAAC;gBAE3B,EAAE,EAAE,QAAQ;IAMxB,SAAS,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,uBAAuB;IAY7C,YAAY;IAIN,UAAU,CAAC,gBAAgB,EAAE,OAAO;IAI1C,mBAAmB;IAInB,UAAU,CAAC,IAAI,EAAE,MAAM;IAkBvB,gBAAgB,CAAC,QAAQ,EAAE,MAAM;IAI3B,iBAAiB;IAIjB,eAAe;IAIrB,eAAe,CAAC,KAAK,EAAE,MAAM,CAAC,cAAc;IAW5C,gBAAgB;CAGjB"}

View File

@@ -0,0 +1,82 @@
'use strict';
var path = require('path');
var fse = require('fs-extra');
var notNull = require('../../errors/not-null.js');
var dialect = require('../dialect.js');
var schemaInspector = require('./schema-inspector.js');
const UNSUPPORTED_OPERATORS = [
'$jsonSupersetOf'
];
class SqliteDialect extends dialect {
configure(conn) {
const connection = conn || this.db.config.connection.connection;
if (typeof connection !== 'string') {
connection.filename = path.resolve(connection.filename);
}
const dbDir = path.dirname(connection.filename);
fse.ensureDirSync(dbDir);
}
useReturning() {
return true;
}
async initialize(nativeConnection) {
await this.db.connection.raw('pragma foreign_keys = on').connection(nativeConnection);
}
canAlterConstraints() {
return false;
}
getSqlType(type) {
switch(type){
case 'enum':
{
return 'text';
}
case 'double':
case 'decimal':
{
return 'float';
}
case 'timestamp':
{
return 'datetime';
}
default:
{
return type;
}
}
}
supportsOperator(operator) {
return !UNSUPPORTED_OPERATORS.includes(operator);
}
async startSchemaUpdate() {
await this.db.connection.raw(`pragma foreign_keys = off`);
}
async endSchemaUpdate() {
await this.db.connection.raw(`pragma foreign_keys = on`);
}
transformErrors(error) {
switch(error.errno){
case 19:
{
throw new notNull(); // TODO: extract column name
}
default:
{
super.transformErrors(error);
}
}
}
canAddIncrements() {
return false;
}
constructor(db){
super(db, 'sqlite');
this.schemaInspector = new schemaInspector(db);
}
}
module.exports = SqliteDialect;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../../src/dialects/sqlite/index.ts"],"sourcesContent":["import path from 'path';\nimport fse from 'fs-extra';\nimport type { Knex } from 'knex';\n\nimport * as errors from '../../errors';\nimport Dialect from '../dialect';\nimport SqliteSchemaInspector from './schema-inspector';\nimport type { Database } from '../..';\n\nconst UNSUPPORTED_OPERATORS = ['$jsonSupersetOf'];\n\nexport default class SqliteDialect extends Dialect {\n schemaInspector: SqliteSchemaInspector;\n\n constructor(db: Database) {\n super(db, 'sqlite');\n\n this.schemaInspector = new SqliteSchemaInspector(db);\n }\n\n configure(conn?: Knex.Sqlite3ConnectionConfig) {\n const connection =\n conn || (this.db.config.connection.connection as Knex.Sqlite3ConnectionConfig);\n if (typeof connection !== 'string') {\n connection.filename = path.resolve(connection.filename);\n }\n\n const dbDir = path.dirname(connection.filename);\n\n fse.ensureDirSync(dbDir);\n }\n\n useReturning() {\n return true;\n }\n\n async initialize(nativeConnection: unknown) {\n await this.db.connection.raw('pragma foreign_keys = on').connection(nativeConnection);\n }\n\n canAlterConstraints() {\n return false;\n }\n\n getSqlType(type: string) {\n switch (type) {\n case 'enum': {\n return 'text';\n }\n case 'double':\n case 'decimal': {\n return 'float';\n }\n case 'timestamp': {\n return 'datetime';\n }\n default: {\n return type;\n }\n }\n }\n\n supportsOperator(operator: string) {\n return !UNSUPPORTED_OPERATORS.includes(operator);\n }\n\n async startSchemaUpdate() {\n await this.db.connection.raw(`pragma foreign_keys = off`);\n }\n\n async endSchemaUpdate() {\n await this.db.connection.raw(`pragma foreign_keys = on`);\n }\n\n transformErrors(error: NodeJS.ErrnoException) {\n switch (error.errno) {\n case 19: {\n throw new errors.NotNullError(); // TODO: extract column name\n }\n default: {\n super.transformErrors(error);\n }\n }\n }\n\n canAddIncrements() {\n return false;\n }\n}\n"],"names":["UNSUPPORTED_OPERATORS","SqliteDialect","Dialect","configure","conn","connection","db","config","filename","path","resolve","dbDir","dirname","fse","ensureDirSync","useReturning","initialize","nativeConnection","raw","canAlterConstraints","getSqlType","type","supportsOperator","operator","includes","startSchemaUpdate","endSchemaUpdate","transformErrors","error","errno","errors","canAddIncrements","constructor","schemaInspector","SqliteSchemaInspector"],"mappings":";;;;;;;;AASA,MAAMA,qBAAwB,GAAA;AAAC,IAAA;AAAkB,CAAA;AAElC,MAAMC,aAAsBC,SAAAA,OAAAA,CAAAA;AASzCC,IAAAA,SAAAA,CAAUC,IAAmC,EAAE;QAC7C,MAAMC,UAAAA,GACJD,IAAS,IAAA,IAAI,CAACE,EAAE,CAACC,MAAM,CAACF,UAAU,CAACA,UAAU;QAC/C,IAAI,OAAOA,eAAe,QAAU,EAAA;AAClCA,YAAAA,UAAAA,CAAWG,QAAQ,GAAGC,IAAAA,CAAKC,OAAO,CAACL,WAAWG,QAAQ,CAAA;AACxD;AAEA,QAAA,MAAMG,KAAQF,GAAAA,IAAAA,CAAKG,OAAO,CAACP,WAAWG,QAAQ,CAAA;AAE9CK,QAAAA,GAAAA,CAAIC,aAAa,CAACH,KAAAA,CAAAA;AACpB;IAEAI,YAAe,GAAA;QACb,OAAO,IAAA;AACT;IAEA,MAAMC,UAAAA,CAAWC,gBAAyB,EAAE;QAC1C,MAAM,IAAI,CAACX,EAAE,CAACD,UAAU,CAACa,GAAG,CAAC,0BAA4Bb,CAAAA,CAAAA,UAAU,CAACY,gBAAAA,CAAAA;AACtE;IAEAE,mBAAsB,GAAA;QACpB,OAAO,KAAA;AACT;AAEAC,IAAAA,UAAAA,CAAWC,IAAY,EAAE;QACvB,OAAQA,IAAAA;YACN,KAAK,MAAA;AAAQ,gBAAA;oBACX,OAAO,MAAA;AACT;YACA,KAAK,QAAA;YACL,KAAK,SAAA;AAAW,gBAAA;oBACd,OAAO,OAAA;AACT;YACA,KAAK,WAAA;AAAa,gBAAA;oBAChB,OAAO,UAAA;AACT;AACA,YAAA;AAAS,gBAAA;oBACP,OAAOA,IAAAA;AACT;AACF;AACF;AAEAC,IAAAA,gBAAAA,CAAiBC,QAAgB,EAAE;QACjC,OAAO,CAACvB,qBAAsBwB,CAAAA,QAAQ,CAACD,QAAAA,CAAAA;AACzC;AAEA,IAAA,MAAME,iBAAoB,GAAA;QACxB,MAAM,IAAI,CAACnB,EAAE,CAACD,UAAU,CAACa,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAA;AAC1D;AAEA,IAAA,MAAMQ,eAAkB,GAAA;QACtB,MAAM,IAAI,CAACpB,EAAE,CAACD,UAAU,CAACa,GAAG,CAAC,CAAC,wBAAwB,CAAC,CAAA;AACzD;AAEAS,IAAAA,eAAAA,CAAgBC,KAA4B,EAAE;AAC5C,QAAA,OAAQA,MAAMC,KAAK;YACjB,KAAK,EAAA;AAAI,gBAAA;AACP,oBAAA,MAAM,IAAIC,OAAmB,EAAA,CAAA;AAC/B;AACA,YAAA;AAAS,gBAAA;AACP,oBAAA,KAAK,CAACH,eAAgBC,CAAAA,KAAAA,CAAAA;AACxB;AACF;AACF;IAEAG,gBAAmB,GAAA;QACjB,OAAO,KAAA;AACT;AAzEAC,IAAAA,WAAAA,CAAY1B,EAAY,CAAE;AACxB,QAAA,KAAK,CAACA,EAAI,EAAA,QAAA,CAAA;AAEV,QAAA,IAAI,CAAC2B,eAAe,GAAG,IAAIC,eAAsB5B,CAAAA,EAAAA,CAAAA;AACnD;AAsEF;;;;"}

View File

@@ -0,0 +1,80 @@
import path from 'path';
import fse from 'fs-extra';
import NotNullError from '../../errors/not-null.mjs';
import Dialect from '../dialect.mjs';
import SqliteSchemaInspector from './schema-inspector.mjs';
const UNSUPPORTED_OPERATORS = [
'$jsonSupersetOf'
];
class SqliteDialect extends Dialect {
configure(conn) {
const connection = conn || this.db.config.connection.connection;
if (typeof connection !== 'string') {
connection.filename = path.resolve(connection.filename);
}
const dbDir = path.dirname(connection.filename);
fse.ensureDirSync(dbDir);
}
useReturning() {
return true;
}
async initialize(nativeConnection) {
await this.db.connection.raw('pragma foreign_keys = on').connection(nativeConnection);
}
canAlterConstraints() {
return false;
}
getSqlType(type) {
switch(type){
case 'enum':
{
return 'text';
}
case 'double':
case 'decimal':
{
return 'float';
}
case 'timestamp':
{
return 'datetime';
}
default:
{
return type;
}
}
}
supportsOperator(operator) {
return !UNSUPPORTED_OPERATORS.includes(operator);
}
async startSchemaUpdate() {
await this.db.connection.raw(`pragma foreign_keys = off`);
}
async endSchemaUpdate() {
await this.db.connection.raw(`pragma foreign_keys = on`);
}
transformErrors(error) {
switch(error.errno){
case 19:
{
throw new NotNullError(); // TODO: extract column name
}
default:
{
super.transformErrors(error);
}
}
}
canAddIncrements() {
return false;
}
constructor(db){
super(db, 'sqlite');
this.schemaInspector = new SqliteSchemaInspector(db);
}
}
export { SqliteDialect as default };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../../src/dialects/sqlite/index.ts"],"sourcesContent":["import path from 'path';\nimport fse from 'fs-extra';\nimport type { Knex } from 'knex';\n\nimport * as errors from '../../errors';\nimport Dialect from '../dialect';\nimport SqliteSchemaInspector from './schema-inspector';\nimport type { Database } from '../..';\n\nconst UNSUPPORTED_OPERATORS = ['$jsonSupersetOf'];\n\nexport default class SqliteDialect extends Dialect {\n schemaInspector: SqliteSchemaInspector;\n\n constructor(db: Database) {\n super(db, 'sqlite');\n\n this.schemaInspector = new SqliteSchemaInspector(db);\n }\n\n configure(conn?: Knex.Sqlite3ConnectionConfig) {\n const connection =\n conn || (this.db.config.connection.connection as Knex.Sqlite3ConnectionConfig);\n if (typeof connection !== 'string') {\n connection.filename = path.resolve(connection.filename);\n }\n\n const dbDir = path.dirname(connection.filename);\n\n fse.ensureDirSync(dbDir);\n }\n\n useReturning() {\n return true;\n }\n\n async initialize(nativeConnection: unknown) {\n await this.db.connection.raw('pragma foreign_keys = on').connection(nativeConnection);\n }\n\n canAlterConstraints() {\n return false;\n }\n\n getSqlType(type: string) {\n switch (type) {\n case 'enum': {\n return 'text';\n }\n case 'double':\n case 'decimal': {\n return 'float';\n }\n case 'timestamp': {\n return 'datetime';\n }\n default: {\n return type;\n }\n }\n }\n\n supportsOperator(operator: string) {\n return !UNSUPPORTED_OPERATORS.includes(operator);\n }\n\n async startSchemaUpdate() {\n await this.db.connection.raw(`pragma foreign_keys = off`);\n }\n\n async endSchemaUpdate() {\n await this.db.connection.raw(`pragma foreign_keys = on`);\n }\n\n transformErrors(error: NodeJS.ErrnoException) {\n switch (error.errno) {\n case 19: {\n throw new errors.NotNullError(); // TODO: extract column name\n }\n default: {\n super.transformErrors(error);\n }\n }\n }\n\n canAddIncrements() {\n return false;\n }\n}\n"],"names":["UNSUPPORTED_OPERATORS","SqliteDialect","Dialect","configure","conn","connection","db","config","filename","path","resolve","dbDir","dirname","fse","ensureDirSync","useReturning","initialize","nativeConnection","raw","canAlterConstraints","getSqlType","type","supportsOperator","operator","includes","startSchemaUpdate","endSchemaUpdate","transformErrors","error","errno","errors","canAddIncrements","constructor","schemaInspector","SqliteSchemaInspector"],"mappings":";;;;;;AASA,MAAMA,qBAAwB,GAAA;AAAC,IAAA;AAAkB,CAAA;AAElC,MAAMC,aAAsBC,SAAAA,OAAAA,CAAAA;AASzCC,IAAAA,SAAAA,CAAUC,IAAmC,EAAE;QAC7C,MAAMC,UAAAA,GACJD,IAAS,IAAA,IAAI,CAACE,EAAE,CAACC,MAAM,CAACF,UAAU,CAACA,UAAU;QAC/C,IAAI,OAAOA,eAAe,QAAU,EAAA;AAClCA,YAAAA,UAAAA,CAAWG,QAAQ,GAAGC,IAAAA,CAAKC,OAAO,CAACL,WAAWG,QAAQ,CAAA;AACxD;AAEA,QAAA,MAAMG,KAAQF,GAAAA,IAAAA,CAAKG,OAAO,CAACP,WAAWG,QAAQ,CAAA;AAE9CK,QAAAA,GAAAA,CAAIC,aAAa,CAACH,KAAAA,CAAAA;AACpB;IAEAI,YAAe,GAAA;QACb,OAAO,IAAA;AACT;IAEA,MAAMC,UAAAA,CAAWC,gBAAyB,EAAE;QAC1C,MAAM,IAAI,CAACX,EAAE,CAACD,UAAU,CAACa,GAAG,CAAC,0BAA4Bb,CAAAA,CAAAA,UAAU,CAACY,gBAAAA,CAAAA;AACtE;IAEAE,mBAAsB,GAAA;QACpB,OAAO,KAAA;AACT;AAEAC,IAAAA,UAAAA,CAAWC,IAAY,EAAE;QACvB,OAAQA,IAAAA;YACN,KAAK,MAAA;AAAQ,gBAAA;oBACX,OAAO,MAAA;AACT;YACA,KAAK,QAAA;YACL,KAAK,SAAA;AAAW,gBAAA;oBACd,OAAO,OAAA;AACT;YACA,KAAK,WAAA;AAAa,gBAAA;oBAChB,OAAO,UAAA;AACT;AACA,YAAA;AAAS,gBAAA;oBACP,OAAOA,IAAAA;AACT;AACF;AACF;AAEAC,IAAAA,gBAAAA,CAAiBC,QAAgB,EAAE;QACjC,OAAO,CAACvB,qBAAsBwB,CAAAA,QAAQ,CAACD,QAAAA,CAAAA;AACzC;AAEA,IAAA,MAAME,iBAAoB,GAAA;QACxB,MAAM,IAAI,CAACnB,EAAE,CAACD,UAAU,CAACa,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAA;AAC1D;AAEA,IAAA,MAAMQ,eAAkB,GAAA;QACtB,MAAM,IAAI,CAACpB,EAAE,CAACD,UAAU,CAACa,GAAG,CAAC,CAAC,wBAAwB,CAAC,CAAA;AACzD;AAEAS,IAAAA,eAAAA,CAAgBC,KAA4B,EAAE;AAC5C,QAAA,OAAQA,MAAMC,KAAK;YACjB,KAAK,EAAA;AAAI,gBAAA;AACP,oBAAA,MAAM,IAAIC,YAAmB,EAAA,CAAA;AAC/B;AACA,YAAA;AAAS,gBAAA;AACP,oBAAA,KAAK,CAACH,eAAgBC,CAAAA,KAAAA,CAAAA;AACxB;AACF;AACF;IAEAG,gBAAmB,GAAA;QACjB,OAAO,KAAA;AACT;AAzEAC,IAAAA,WAAAA,CAAY1B,EAAY,CAAE;AACxB,QAAA,KAAK,CAACA,EAAI,EAAA,QAAA,CAAA;AAEV,QAAA,IAAI,CAAC2B,eAAe,GAAG,IAAIC,qBAAsB5B,CAAAA,EAAAA,CAAAA;AACnD;AAsEF;;;;"}

View File

@@ -0,0 +1,13 @@
import type { Database } from '../..';
import type { Schema, Column, Index, ForeignKey } from '../../schema/types';
import type { SchemaInspector } from '../dialect';
export default class SqliteSchemaInspector implements SchemaInspector {
db: Database;
constructor(db: Database);
getSchema(): Promise<Schema>;
getTables(): Promise<string[]>;
getColumns(tableName: string): Promise<Column[]>;
getIndexes(tableName: string): Promise<Index[]>;
getForeignKeys(tableName: string): Promise<ForeignKey[]>;
}
//# sourceMappingURL=schema-inspector.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"schema-inspector.d.ts","sourceRoot":"","sources":["../../../src/dialects/sqlite/schema-inspector.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AACtC,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAC5E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AAqGlD,MAAM,CAAC,OAAO,OAAO,qBAAsB,YAAW,eAAe;IACnE,EAAE,EAAE,QAAQ,CAAC;gBAED,EAAE,EAAE,QAAQ;IAIlB,SAAS;IAoBT,SAAS,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;IAM9B,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAkBhD,UAAU,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IAyB/C,cAAc,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,EAAE,CAAC;CA0B/D"}

View File

@@ -0,0 +1,211 @@
'use strict';
const SQL_QUERIES = {
TABLE_LIST: `select name from sqlite_master where type = 'table' and name NOT LIKE 'sqlite%'`,
TABLE_INFO: `pragma table_info(??)`,
INDEX_LIST: 'pragma index_list(??)',
INDEX_INFO: 'pragma index_info(??)',
FOREIGN_KEY_LIST: 'pragma foreign_key_list(??)'
};
const toStrapiType = (column)=>{
const { type } = column;
const rootType = type.toLowerCase().match(/[^(), ]+/)?.[0];
switch(rootType){
case 'integer':
{
if (column.pk) {
return {
type: 'increments',
args: [
{
primary: true,
primaryKey: true
}
]
};
}
return {
type: 'integer'
};
}
case 'float':
{
return {
type: 'float',
args: [
10,
2
]
};
}
case 'bigint':
{
return {
type: 'bigInteger'
};
}
case 'varchar':
{
const length = type.slice(8, type.length - 1);
return {
type: 'string',
args: [
Number(length)
]
};
}
case 'text':
{
return {
type: 'text',
args: [
'longtext'
]
};
}
case 'json':
{
return {
type: 'jsonb'
};
}
case 'boolean':
{
return {
type: 'boolean'
};
}
case 'datetime':
{
return {
type: 'datetime',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'date':
{
return {
type: 'date'
};
}
case 'time':
{
return {
type: 'time',
args: [
{
precision: 3
}
]
};
}
default:
{
return {
type: 'specificType',
args: [
column.data_type
]
};
}
}
};
class SqliteSchemaInspector {
async getSchema() {
const schema = {
tables: []
};
const tables = await this.getTables();
for (const tableName of tables){
const columns = await this.getColumns(tableName);
const indexes = await this.getIndexes(tableName);
const foreignKeys = await this.getForeignKeys(tableName);
schema.tables.push({
name: tableName,
columns,
indexes,
foreignKeys
});
}
return schema;
}
async getTables() {
const rows = await this.db.connection.raw(SQL_QUERIES.TABLE_LIST);
return rows.map((row)=>row.name);
}
async getColumns(tableName) {
const rows = await this.db.connection.raw(SQL_QUERIES.TABLE_INFO, [
tableName
]);
return rows.map((row)=>{
const { type, args = [], ...rest } = toStrapiType(row);
return {
type,
args,
name: row.name,
defaultTo: row.dflt_value,
notNullable: row.notnull !== null ? Boolean(row.notnull) : null,
unsigned: false,
...rest
};
});
}
async getIndexes(tableName) {
const indexes = await this.db.connection.raw(SQL_QUERIES.INDEX_LIST, [
tableName
]);
const ret = [];
for (const index of indexes.filter((index)=>!index.name.startsWith('sqlite_'))){
const res = await this.db.connection.raw(SQL_QUERIES.INDEX_INFO, [
index.name
]);
const indexInfo = {
columns: res.map((row)=>row.name),
name: index.name
};
if (index.unique) {
indexInfo.type = 'unique';
}
ret.push(indexInfo);
}
return ret;
}
async getForeignKeys(tableName) {
const fks = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_LIST, [
tableName
]);
const ret = {};
for (const fk of fks){
if (!ret[fk.id]) {
ret[fk.id] = {
// TODO: name, // find name
name: '',
columns: [
fk.from
],
referencedColumns: [
fk.to
],
referencedTable: fk.table,
onUpdate: fk.on_update.toUpperCase(),
onDelete: fk.on_delete.toUpperCase()
};
} else {
ret[fk.id].columns.push(fk.from);
ret[fk.id].referencedColumns.push(fk.to);
}
}
return Object.values(ret);
}
constructor(db){
this.db = db;
}
}
module.exports = SqliteSchemaInspector;
//# sourceMappingURL=schema-inspector.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,209 @@
const SQL_QUERIES = {
TABLE_LIST: `select name from sqlite_master where type = 'table' and name NOT LIKE 'sqlite%'`,
TABLE_INFO: `pragma table_info(??)`,
INDEX_LIST: 'pragma index_list(??)',
INDEX_INFO: 'pragma index_info(??)',
FOREIGN_KEY_LIST: 'pragma foreign_key_list(??)'
};
const toStrapiType = (column)=>{
const { type } = column;
const rootType = type.toLowerCase().match(/[^(), ]+/)?.[0];
switch(rootType){
case 'integer':
{
if (column.pk) {
return {
type: 'increments',
args: [
{
primary: true,
primaryKey: true
}
]
};
}
return {
type: 'integer'
};
}
case 'float':
{
return {
type: 'float',
args: [
10,
2
]
};
}
case 'bigint':
{
return {
type: 'bigInteger'
};
}
case 'varchar':
{
const length = type.slice(8, type.length - 1);
return {
type: 'string',
args: [
Number(length)
]
};
}
case 'text':
{
return {
type: 'text',
args: [
'longtext'
]
};
}
case 'json':
{
return {
type: 'jsonb'
};
}
case 'boolean':
{
return {
type: 'boolean'
};
}
case 'datetime':
{
return {
type: 'datetime',
args: [
{
useTz: false,
precision: 6
}
]
};
}
case 'date':
{
return {
type: 'date'
};
}
case 'time':
{
return {
type: 'time',
args: [
{
precision: 3
}
]
};
}
default:
{
return {
type: 'specificType',
args: [
column.data_type
]
};
}
}
};
class SqliteSchemaInspector {
async getSchema() {
const schema = {
tables: []
};
const tables = await this.getTables();
for (const tableName of tables){
const columns = await this.getColumns(tableName);
const indexes = await this.getIndexes(tableName);
const foreignKeys = await this.getForeignKeys(tableName);
schema.tables.push({
name: tableName,
columns,
indexes,
foreignKeys
});
}
return schema;
}
async getTables() {
const rows = await this.db.connection.raw(SQL_QUERIES.TABLE_LIST);
return rows.map((row)=>row.name);
}
async getColumns(tableName) {
const rows = await this.db.connection.raw(SQL_QUERIES.TABLE_INFO, [
tableName
]);
return rows.map((row)=>{
const { type, args = [], ...rest } = toStrapiType(row);
return {
type,
args,
name: row.name,
defaultTo: row.dflt_value,
notNullable: row.notnull !== null ? Boolean(row.notnull) : null,
unsigned: false,
...rest
};
});
}
async getIndexes(tableName) {
const indexes = await this.db.connection.raw(SQL_QUERIES.INDEX_LIST, [
tableName
]);
const ret = [];
for (const index of indexes.filter((index)=>!index.name.startsWith('sqlite_'))){
const res = await this.db.connection.raw(SQL_QUERIES.INDEX_INFO, [
index.name
]);
const indexInfo = {
columns: res.map((row)=>row.name),
name: index.name
};
if (index.unique) {
indexInfo.type = 'unique';
}
ret.push(indexInfo);
}
return ret;
}
async getForeignKeys(tableName) {
const fks = await this.db.connection.raw(SQL_QUERIES.FOREIGN_KEY_LIST, [
tableName
]);
const ret = {};
for (const fk of fks){
if (!ret[fk.id]) {
ret[fk.id] = {
// TODO: name, // find name
name: '',
columns: [
fk.from
],
referencedColumns: [
fk.to
],
referencedTable: fk.table,
onUpdate: fk.on_update.toUpperCase(),
onDelete: fk.on_delete.toUpperCase()
};
} else {
ret[fk.id].columns.push(fk.from);
ret[fk.id].referencedColumns.push(fk.to);
}
}
return Object.values(ret);
}
constructor(db){
this.db = db;
}
}
export { SqliteSchemaInspector as default };
//# sourceMappingURL=schema-inspector.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
import type { Database } from '..';
import type { Repository } from './types';
export declare const createRepository: (uid: string, db: Database) => Repository;
//# sourceMappingURL=entity-repository.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"entity-repository.d.ts","sourceRoot":"","sources":["../../src/entity-manager/entity-repository.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AACnC,OAAO,KAAK,EAAE,UAAU,EAAU,MAAM,SAAS,CAAC;AAkClD,eAAO,MAAM,gBAAgB,QAAS,MAAM,MAAM,QAAQ,KAAG,UA8H5D,CAAC"}

View File

@@ -0,0 +1,139 @@
'use strict';
var _ = require('lodash/fp');
const withDefaultPagination = (params)=>{
const { page = 1, pageSize = 10, ...rest } = params;
return {
page: Number(page),
pageSize: Number(pageSize),
...rest
};
};
const withOffsetLimit = (params)=>{
const { page, pageSize, ...rest } = withDefaultPagination(params);
const offset = Math.max(page - 1, 0) * pageSize;
const limit = pageSize;
const query = {
...rest,
limit,
offset
};
return [
query,
{
page,
pageSize
}
];
};
const createRepository = (uid, db)=>{
return {
findOne (params = {}) {
return db.entityManager.findOne(uid, params);
},
findMany (params = {}) {
return db.entityManager.findMany(uid, params);
},
findWithCount (params = {}) {
return Promise.all([
db.entityManager.findMany(uid, params),
db.entityManager.count(uid, params)
]);
},
async findPage (params) {
const [query, { page, pageSize }] = withOffsetLimit(params);
const [results, total] = await Promise.all([
db.entityManager.findMany(uid, query),
db.entityManager.count(uid, query)
]);
return {
results,
pagination: {
page,
pageSize,
pageCount: Math.ceil(total / pageSize),
total
}
};
},
create (params) {
return db.entityManager.create(uid, params);
},
createMany (params) {
return db.entityManager.createMany(uid, params);
},
update (params) {
return db.entityManager.update(uid, params);
},
updateMany (params) {
return db.entityManager.updateMany(uid, params);
},
delete (params) {
return db.entityManager.delete(uid, params);
},
deleteMany (params = {}) {
return db.entityManager.deleteMany(uid, params);
},
count (params) {
return db.entityManager.count(uid, params);
},
attachRelations (id, data) {
return db.entityManager.attachRelations(uid, id, data);
},
async updateRelations (id, data) {
const trx = await db.transaction();
try {
await db.entityManager.updateRelations(uid, id, data, {
transaction: trx.get()
});
return await trx.commit();
} catch (e) {
await trx.rollback();
throw e;
}
},
deleteRelations (id) {
return db.entityManager.deleteRelations(uid, id);
},
populate (entity, populate) {
return db.entityManager.populate(uid, entity, populate);
},
load (entity, fields, params) {
return db.entityManager.load(uid, entity, fields, params);
},
async loadPages (entity, field, params) {
if (!_.isString(field)) {
throw new Error(`Invalid load. Expected ${field} to be a string`);
}
const { attributes } = db.metadata.get(uid);
const attribute = attributes[field];
if (!attribute || attribute.type !== 'relation' || !attribute.relation || ![
'oneToMany',
'manyToMany'
].includes(attribute.relation)) {
throw new Error(`Invalid load. Expected ${field} to be an anyToMany relational attribute`);
}
const [query, { page, pageSize }] = withOffsetLimit(params);
const [results, { count: total }] = await Promise.all([
db.entityManager.load(uid, entity, field, query),
db.entityManager.load(uid, entity, field, {
...query,
count: true
})
]);
return {
results,
pagination: {
page,
pageSize,
pageCount: Math.ceil(total / pageSize),
total
}
};
}
};
};
exports.createRepository = createRepository;
//# sourceMappingURL=entity-repository.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,137 @@
import { isString } from 'lodash/fp';
const withDefaultPagination = (params)=>{
const { page = 1, pageSize = 10, ...rest } = params;
return {
page: Number(page),
pageSize: Number(pageSize),
...rest
};
};
const withOffsetLimit = (params)=>{
const { page, pageSize, ...rest } = withDefaultPagination(params);
const offset = Math.max(page - 1, 0) * pageSize;
const limit = pageSize;
const query = {
...rest,
limit,
offset
};
return [
query,
{
page,
pageSize
}
];
};
const createRepository = (uid, db)=>{
return {
findOne (params = {}) {
return db.entityManager.findOne(uid, params);
},
findMany (params = {}) {
return db.entityManager.findMany(uid, params);
},
findWithCount (params = {}) {
return Promise.all([
db.entityManager.findMany(uid, params),
db.entityManager.count(uid, params)
]);
},
async findPage (params) {
const [query, { page, pageSize }] = withOffsetLimit(params);
const [results, total] = await Promise.all([
db.entityManager.findMany(uid, query),
db.entityManager.count(uid, query)
]);
return {
results,
pagination: {
page,
pageSize,
pageCount: Math.ceil(total / pageSize),
total
}
};
},
create (params) {
return db.entityManager.create(uid, params);
},
createMany (params) {
return db.entityManager.createMany(uid, params);
},
update (params) {
return db.entityManager.update(uid, params);
},
updateMany (params) {
return db.entityManager.updateMany(uid, params);
},
delete (params) {
return db.entityManager.delete(uid, params);
},
deleteMany (params = {}) {
return db.entityManager.deleteMany(uid, params);
},
count (params) {
return db.entityManager.count(uid, params);
},
attachRelations (id, data) {
return db.entityManager.attachRelations(uid, id, data);
},
async updateRelations (id, data) {
const trx = await db.transaction();
try {
await db.entityManager.updateRelations(uid, id, data, {
transaction: trx.get()
});
return await trx.commit();
} catch (e) {
await trx.rollback();
throw e;
}
},
deleteRelations (id) {
return db.entityManager.deleteRelations(uid, id);
},
populate (entity, populate) {
return db.entityManager.populate(uid, entity, populate);
},
load (entity, fields, params) {
return db.entityManager.load(uid, entity, fields, params);
},
async loadPages (entity, field, params) {
if (!isString(field)) {
throw new Error(`Invalid load. Expected ${field} to be a string`);
}
const { attributes } = db.metadata.get(uid);
const attribute = attributes[field];
if (!attribute || attribute.type !== 'relation' || !attribute.relation || ![
'oneToMany',
'manyToMany'
].includes(attribute.relation)) {
throw new Error(`Invalid load. Expected ${field} to be an anyToMany relational attribute`);
}
const [query, { page, pageSize }] = withOffsetLimit(params);
const [results, { count: total }] = await Promise.all([
db.entityManager.load(uid, entity, field, query),
db.entityManager.load(uid, entity, field, {
...query,
count: true
})
]);
return {
results,
pagination: {
page,
pageSize,
pageCount: Math.ceil(total / pageSize),
total
}
};
}
};
};
export { createRepository };
//# sourceMappingURL=entity-repository.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,5 @@
import type { Database } from '..';
import { EntityManager } from './types';
export * from './types';
export declare const createEntityManager: (db: Database) => EntityManager;
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/entity-manager/index.ts"],"names":[],"mappings":"AA8CA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AAGnC,OAAO,EAAE,aAAa,EAAsB,MAAM,SAAS,CAAC;AAE5D,cAAc,SAAS,CAAC;AA2LxB,eAAO,MAAM,mBAAmB,OAAQ,QAAQ,KAAG,aAmzClD,CAAC"}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,25 @@
/// <reference types="lodash" />
import type { Knex } from 'knex';
import type { Database } from '..';
import type { MorphJoinTable } from '../types';
type Rows = Record<string, unknown>[];
export declare const deleteRelatedMorphOneRelationsAfterMorphToManyUpdate: (rows: Rows, { uid, attributeName, joinTable, db, transaction: trx, }: {
uid: string;
attributeName: string;
joinTable: MorphJoinTable;
db: Database;
transaction?: Knex.Transaction;
}) => Promise<void>;
/**
* Encoding utilities for polymorphic relations.
*
* In some scenarios is useful to encode both the id & __type of the relation
* to have a unique identifier for the relation. (e.g. relations reordering)
*/
export declare const encodePolymorphicId: (id: number | string, __type: string) => string;
export declare const encodePolymorphicRelation: import("lodash").CurriedFunction2<{
idColumn: any;
typeColumn: any;
}, any, any>;
export {};
//# sourceMappingURL=morph-relations.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"morph-relations.d.ts","sourceRoot":"","sources":["../../src/entity-manager/morph-relations.ts"],"names":[],"mappings":";AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAGjC,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AACnC,OAAO,KAAK,EAAE,cAAc,EAAY,MAAM,UAAU,CAAC;AAEzD,KAAK,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC;AA8BtC,eAAO,MAAM,oDAAoD,yEAQ5D;IACD,GAAG,EAAE,MAAM,CAAC;IACZ,aAAa,EAAE,MAAM,CAAC;IACtB,SAAS,EAAE,cAAc,CAAC;IAC1B,EAAE,EAAE,QAAQ,CAAC;IACb,WAAW,CAAC,EAAE,KAAK,WAAW,CAAC;CAChC,kBAoCF,CAAC;AAEF;;;;;GAKG;AAEH,eAAO,MAAM,mBAAmB,OAAQ,MAAM,GAAG,MAAM,UAAU,MAAM,WAEtE,CAAC;AAEF,eAAO,MAAM,yBAAyB;;;YAkBpC,CAAC"}

View File

@@ -0,0 +1,73 @@
'use strict';
var _ = require('lodash/fp');
var queryBuilder = require('../query/query-builder.js');
/* eslint-disable @typescript-eslint/naming-convention */ // allow __type
const getMorphToManyRowsLinkedToMorphOne = (rows, { uid, attributeName, typeColumn, db })=>rows.filter((row)=>{
const relatedType = row[typeColumn.name];
const field = row.field;
const targetAttribute = db.metadata.get(relatedType).attributes[field];
// ensure targeted field is the right one + check if it is a morphOne
return targetAttribute?.target === uid && targetAttribute?.morphBy === attributeName && targetAttribute?.relation === 'morphOne';
});
const deleteRelatedMorphOneRelationsAfterMorphToManyUpdate = async (rows, { uid, attributeName, joinTable, db, transaction: trx })=>{
const { morphColumn } = joinTable;
const { idColumn, typeColumn } = morphColumn;
const morphOneRows = getMorphToManyRowsLinkedToMorphOne(rows, {
uid,
attributeName,
typeColumn,
db
});
const groupByType = _.groupBy(typeColumn.name);
const groupByField = _.groupBy('field');
const typeAndFieldIdsGrouped = _.pipe(groupByType, _.mapValues(groupByField))(morphOneRows);
const orWhere = [];
for (const [type, v] of Object.entries(typeAndFieldIdsGrouped)){
for (const [field, arr] of Object.entries(v)){
orWhere.push({
[typeColumn.name]: type,
field,
[idColumn.name]: {
$in: _.map(idColumn.name, arr)
}
});
}
}
if (!_.isEmpty(orWhere)) {
await queryBuilder(joinTable.name, db).delete().where({
$or: orWhere
}).transacting(trx).execute();
}
};
/**
* Encoding utilities for polymorphic relations.
*
* In some scenarios is useful to encode both the id & __type of the relation
* to have a unique identifier for the relation. (e.g. relations reordering)
*/ const encodePolymorphicId = (id, __type)=>{
return `${id}:::${__type}`;
};
const encodePolymorphicRelation = _.curry(({ idColumn, typeColumn }, relation)=>{
// Encode the id of the relation and the positional argument if it exist
const newRelation = {
...relation,
[idColumn]: encodePolymorphicId(relation[idColumn], relation[typeColumn])
};
if (relation.position) {
const { before, after } = relation.position;
const __type = relation.position.__type || relation.__type;
newRelation.position = {
...relation.position
};
if (before) newRelation.position.before = encodePolymorphicId(before, __type);
if (after) newRelation.position.after = encodePolymorphicId(after, __type);
}
return newRelation;
});
exports.deleteRelatedMorphOneRelationsAfterMorphToManyUpdate = deleteRelatedMorphOneRelationsAfterMorphToManyUpdate;
exports.encodePolymorphicId = encodePolymorphicId;
exports.encodePolymorphicRelation = encodePolymorphicRelation;
//# sourceMappingURL=morph-relations.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,69 @@
import { curry, groupBy, pipe, mapValues, map, isEmpty } from 'lodash/fp';
import createQueryBuilder from '../query/query-builder.mjs';
/* eslint-disable @typescript-eslint/naming-convention */ // allow __type
const getMorphToManyRowsLinkedToMorphOne = (rows, { uid, attributeName, typeColumn, db })=>rows.filter((row)=>{
const relatedType = row[typeColumn.name];
const field = row.field;
const targetAttribute = db.metadata.get(relatedType).attributes[field];
// ensure targeted field is the right one + check if it is a morphOne
return targetAttribute?.target === uid && targetAttribute?.morphBy === attributeName && targetAttribute?.relation === 'morphOne';
});
const deleteRelatedMorphOneRelationsAfterMorphToManyUpdate = async (rows, { uid, attributeName, joinTable, db, transaction: trx })=>{
const { morphColumn } = joinTable;
const { idColumn, typeColumn } = morphColumn;
const morphOneRows = getMorphToManyRowsLinkedToMorphOne(rows, {
uid,
attributeName,
typeColumn,
db
});
const groupByType = groupBy(typeColumn.name);
const groupByField = groupBy('field');
const typeAndFieldIdsGrouped = pipe(groupByType, mapValues(groupByField))(morphOneRows);
const orWhere = [];
for (const [type, v] of Object.entries(typeAndFieldIdsGrouped)){
for (const [field, arr] of Object.entries(v)){
orWhere.push({
[typeColumn.name]: type,
field,
[idColumn.name]: {
$in: map(idColumn.name, arr)
}
});
}
}
if (!isEmpty(orWhere)) {
await createQueryBuilder(joinTable.name, db).delete().where({
$or: orWhere
}).transacting(trx).execute();
}
};
/**
* Encoding utilities for polymorphic relations.
*
* In some scenarios is useful to encode both the id & __type of the relation
* to have a unique identifier for the relation. (e.g. relations reordering)
*/ const encodePolymorphicId = (id, __type)=>{
return `${id}:::${__type}`;
};
const encodePolymorphicRelation = curry(({ idColumn, typeColumn }, relation)=>{
// Encode the id of the relation and the positional argument if it exist
const newRelation = {
...relation,
[idColumn]: encodePolymorphicId(relation[idColumn], relation[typeColumn])
};
if (relation.position) {
const { before, after } = relation.position;
const __type = relation.position.__type || relation.__type;
newRelation.position = {
...relation.position
};
if (before) newRelation.position.before = encodePolymorphicId(before, __type);
if (after) newRelation.position.after = encodePolymorphicId(after, __type);
}
return newRelation;
});
export { deleteRelatedMorphOneRelationsAfterMorphToManyUpdate, encodePolymorphicId, encodePolymorphicRelation };
//# sourceMappingURL=morph-relations.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,58 @@
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import type { Knex } from 'knex';
import type { Database } from '..';
import type { ID, Relation } from '../types';
declare module 'knex' {
namespace Knex {
interface ChainableInterface {
transacting(trx?: Knex.Transaction): this;
}
}
}
/**
* If some relations currently exist for this oneToX relation, on the one side, this function removes them and update the inverse order if needed.
*/
declare const deletePreviousOneToAnyRelations: ({ id, attribute, relIdsToadd, db, transaction: trx, }: {
id: ID;
attribute: Relation.Bidirectional;
relIdsToadd: ID[];
db: Database;
transaction?: Knex.Transaction;
}) => Promise<void>;
/**
* If a relation currently exists for this xToOne relations, this function removes it and update the inverse order if needed.
*/
declare const deletePreviousAnyToOneRelations: ({ id, attribute, relIdToadd, db, transaction: trx, }: {
id: ID;
attribute: Relation.Bidirectional;
relIdToadd: ID;
db: Database;
transaction?: Knex.Transaction;
}) => Promise<void>;
/**
* Delete all or some relations of entity field
*/
declare const deleteRelations: ({ id, attribute, db, relIdsToNotDelete, relIdsToDelete, transaction: trx, }: {
id: ID;
attribute: Relation.Bidirectional;
db: Database;
relIdsToNotDelete?: ID[];
relIdsToDelete?: ID[] | 'all';
transaction?: Knex.Transaction;
}) => Promise<void>;
/**
* Clean the order columns by ensuring the order value are continuous (ex: 1, 2, 3 and not 1, 5, 10)
*/
declare const cleanOrderColumns: ({ id, attribute, db, inverseRelIds, transaction: trx, }: {
id?: ID;
attribute: Relation.Bidirectional;
db: Database;
inverseRelIds?: ID[];
transaction?: Knex.Transaction;
}) => Promise<[void, void] | undefined>;
export { deletePreviousOneToAnyRelations, deletePreviousAnyToOneRelations, deleteRelations, cleanOrderColumns, };
//# sourceMappingURL=regular-relations.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"regular-relations.d.ts","sourceRoot":"","sources":["../../src/entity-manager/regular-relations.ts"],"names":[],"mappings":";;;;;AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAYjC,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AACnC,OAAO,KAAK,EAAE,EAAE,EAAE,QAAQ,EAAS,MAAM,UAAU,CAAC;AAEpD,OAAO,QAAQ,MAAM,CAAC;IACpB,UAAU,IAAI,CAAC;QACb,UAAU,kBAAkB;YAC1B,WAAW,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;SAC3C;KACF;CACF;AAiCD;;GAEG;AACH,QAAA,MAAM,+BAA+B,0DAMlC;IACD,EAAE,EAAE,EAAE,CAAC;IACP,SAAS,EAAE,SAAS,aAAa,CAAC;IAClC,WAAW,EAAE,EAAE,EAAE,CAAC;IAClB,EAAE,EAAE,QAAQ,CAAC;IACb,WAAW,CAAC,EAAE,KAAK,WAAW,CAAC;CAChC,kBAsBA,CAAC;AAEF;;GAEG;AACH,QAAA,MAAM,+BAA+B,yDAMlC;IACD,EAAE,EAAE,EAAE,CAAC;IACP,SAAS,EAAE,SAAS,aAAa,CAAC;IAClC,UAAU,EAAE,EAAE,CAAC;IACf,EAAE,EAAE,QAAQ,CAAC;IACb,WAAW,CAAC,EAAE,KAAK,WAAW,CAAC;CAChC,kBAkDA,CAAC;AAEF;;GAEG;AACH,QAAA,MAAM,eAAe,gFAOlB;IACD,EAAE,EAAE,EAAE,CAAC;IACP,SAAS,EAAE,SAAS,aAAa,CAAC;IAClC,EAAE,EAAE,QAAQ,CAAC;IACb,iBAAiB,CAAC,EAAE,EAAE,EAAE,CAAC;IACzB,cAAc,CAAC,EAAE,EAAE,EAAE,GAAG,KAAK,CAAC;IAC9B,WAAW,CAAC,EAAE,KAAK,WAAW,CAAC;CAChC,kBAsDA,CAAC;AAEF;;GAEG;AACH,QAAA,MAAM,iBAAiB,4DAMpB;IACD,EAAE,CAAC,EAAE,EAAE,CAAC;IACR,SAAS,EAAE,SAAS,aAAa,CAAC;IAClC,EAAE,EAAE,QAAQ,CAAC;IACb,aAAa,CAAC,EAAE,EAAE,EAAE,CAAC;IACrB,WAAW,CAAC,EAAE,KAAK,WAAW,CAAC;CAChC,sCAiIA,CAAC;AAEF,OAAO,EACL,+BAA+B,EAC/B,+BAA+B,EAC/B,eAAe,EACf,iBAAiB,GAClB,CAAC"}

View File

@@ -0,0 +1,247 @@
'use strict';
var _ = require('lodash/fp');
var relations = require('../metadata/relations.js');
require('../utils/identifiers/index.js');
var queryBuilder = require('../query/query-builder.js');
var knex = require('../utils/knex.js');
// TODO: This is a short term solution, to not steal relations from the same document.
const getDocumentSiblingIdsQuery = (tableName, id)=>{
// Find if the model is a content type or something else (e.g. component)
// to only get the documentId if it's a content type
const models = Array.from(strapi.db.metadata.values());
const isContentType = models.find((model)=>{
return model.tableName === tableName && model.attributes.documentId;
});
if (!isContentType) {
return [
id
];
}
// NOTE: SubQueries are wrapped in a function to not reuse the same connection,
// which causes infinite self references
return function(query) {
query.select('id').from(tableName)// Get all child ids of the document id
.whereIn('document_id', (documentIDSubQuery)=>{
documentIDSubQuery.from(tableName)// get document id related to the current id
.select('document_id').where('id', id);
});
};
};
/**
* If some relations currently exist for this oneToX relation, on the one side, this function removes them and update the inverse order if needed.
*/ const deletePreviousOneToAnyRelations = async ({ id, attribute, relIdsToadd, db, transaction: trx })=>{
if (!(relations.isBidirectional(attribute) && relations.isOneToAny(attribute))) {
throw new Error('deletePreviousOneToAnyRelations can only be called for bidirectional oneToAny relations');
}
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn } = joinTable;
const con = db.getConnection();
await con.delete().from(joinTable.name)// Exclude the ids of the current document
.whereNotIn(joinColumn.name, getDocumentSiblingIdsQuery(joinColumn.referencedTable, id))// Include all the ids that are being connected
.whereIn(inverseJoinColumn.name, relIdsToadd).where(joinTable.on || {}).transacting(trx);
await cleanOrderColumns({
attribute,
db,
inverseRelIds: relIdsToadd,
transaction: trx
});
};
/**
* If a relation currently exists for this xToOne relations, this function removes it and update the inverse order if needed.
*/ const deletePreviousAnyToOneRelations = async ({ id, attribute, relIdToadd, db, transaction: trx })=>{
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn } = joinTable;
const con = db.getConnection();
if (!relations.isAnyToOne(attribute)) {
throw new Error('deletePreviousAnyToOneRelations can only be called for anyToOne relations');
}
// handling manyToOne
if (relations.isManyToAny(attribute)) {
// if the database integrity was not broken relsToDelete is supposed to be of length 1
const relsToDelete = await con.select(inverseJoinColumn.name).from(joinTable.name).where(joinColumn.name, id).whereNotIn(inverseJoinColumn.name, getDocumentSiblingIdsQuery(inverseJoinColumn.referencedTable, relIdToadd)).where(joinTable.on || {}).transacting(trx);
const relIdsToDelete = _.map(inverseJoinColumn.name, relsToDelete);
await queryBuilder(joinTable.name, db).delete().where({
[joinColumn.name]: id,
[inverseJoinColumn.name]: {
$in: relIdsToDelete
}
}).where(joinTable.on || {}).transacting(trx).execute();
await cleanOrderColumns({
attribute,
db,
inverseRelIds: relIdsToDelete,
transaction: trx
});
// handling oneToOne
} else {
await con.delete().from(joinTable.name).where(joinColumn.name, id)// Exclude the ids of the current document
.whereNotIn(inverseJoinColumn.name, getDocumentSiblingIdsQuery(inverseJoinColumn.referencedTable, relIdToadd)).where(joinTable.on || {}).transacting(trx);
}
};
/**
* Delete all or some relations of entity field
*/ const deleteRelations = async ({ id, attribute, db, relIdsToNotDelete = [], relIdsToDelete = [], transaction: trx })=>{
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn } = joinTable;
const all = relIdsToDelete === 'all';
if (relations.hasOrderColumn(attribute) || relations.hasInverseOrderColumn(attribute)) {
let lastId = 0;
let done = false;
const batchSize = 100;
while(!done){
const batchToDelete = await queryBuilder(joinTable.name, db).select(inverseJoinColumn.name).where({
[joinColumn.name]: id,
id: {
$gt: lastId
},
[inverseJoinColumn.name]: {
$notIn: relIdsToNotDelete
},
...all ? {} : {
[inverseJoinColumn.name]: {
$in: relIdsToDelete
}
}
}).where(joinTable.on || {}).orderBy('id').limit(batchSize).transacting(trx).execute();
done = batchToDelete.length < batchSize;
lastId = batchToDelete[batchToDelete.length - 1]?.id || 0;
const batchIds = _.map(inverseJoinColumn.name, batchToDelete);
await queryBuilder(joinTable.name, db).delete().where({
[joinColumn.name]: id,
[inverseJoinColumn.name]: {
$in: batchIds
}
}).where(joinTable.on || {}).transacting(trx).execute();
await cleanOrderColumns({
attribute,
db,
id,
inverseRelIds: batchIds,
transaction: trx
});
}
} else {
await queryBuilder(joinTable.name, db).delete().where({
[joinColumn.name]: id,
[inverseJoinColumn.name]: {
$notIn: relIdsToNotDelete
},
...all ? {} : {
[inverseJoinColumn.name]: {
$in: relIdsToDelete
}
}
}).where(joinTable.on || {}).transacting(trx).execute();
}
};
/**
* Clean the order columns by ensuring the order value are continuous (ex: 1, 2, 3 and not 1, 5, 10)
*/ const cleanOrderColumns = async ({ id, attribute, db, inverseRelIds = [], transaction: trx })=>{
if (!(relations.hasOrderColumn(attribute) && id) && !(relations.hasInverseOrderColumn(attribute) && !_.isEmpty(inverseRelIds))) {
return;
}
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn, orderColumnName, inverseOrderColumnName } = joinTable;
/**
UPDATE :joinTable: as a,
(
SELECT
id,
ROW_NUMBER() OVER ( PARTITION BY :joinColumn: ORDER BY :orderColumn:) AS src_order,
FROM :joinTable:
WHERE :joinColumn: = :id
) AS b
SET :orderColumn: = b.src_order
WHERE b.id = a.id;
*/ const updateOrderColumn = async ()=>{
if (!relations.hasOrderColumn(attribute) || !id) {
return;
}
const selectRowsToOrder = (joinTableName)=>db.connection(joinTableName).select('id').rowNumber('src_order', orderColumnName, joinColumn.name).where(joinColumn.name, id).toSQL();
switch(strapi.db.dialect.client){
case 'mysql':
{
// Here it's MariaDB and MySQL 8
const select = selectRowsToOrder(joinTable.name);
await db.getConnection().raw(`UPDATE ?? as a, ( ${select.sql} ) AS b
SET ?? = b.src_order
WHERE b.id = a.id`, [
joinTable.name,
...select.bindings,
orderColumnName
]).transacting(trx);
break;
}
default:
{
const joinTableName = knex.addSchema(db, joinTable.name);
const select = selectRowsToOrder(joinTableName);
// raw query as knex doesn't allow updating from a subquery
await db.connection.raw(`UPDATE ?? as a
SET ?? = b.src_order
FROM ( ${select.sql} ) AS b
WHERE b.id = a.id`, [
joinTableName,
orderColumnName,
...select.bindings
]).transacting(trx);
}
}
};
/**
UPDATE :joinTable: as a,
(
SELECT
id,
ROW_NUMBER() OVER ( PARTITION BY :inverseJoinColumn: ORDER BY :inverseOrderColumn:) AS inv_order
FROM :joinTable:
WHERE :inverseJoinColumn: IN (:inverseRelIds)
) AS b
SET :inverseOrderColumn: = b.inv_order
WHERE b.id = a.id;
*/ const updateInverseOrderColumn = async ()=>{
if (!relations.hasInverseOrderColumn(attribute) || _.isEmpty(inverseRelIds)) return;
const selectRowsToOrder = (joinTableName)=>db.connection(joinTableName).select('id').rowNumber('inv_order', inverseOrderColumnName, inverseJoinColumn.name).where(inverseJoinColumn.name, 'in', inverseRelIds).toSQL();
switch(strapi.db.dialect.client){
case 'mysql':
{
// Here it's MariaDB and MySQL 8
const select = selectRowsToOrder(joinTable.name);
await db.getConnection().raw(`UPDATE ?? as a, ( ${select.sql} ) AS b
SET ?? = b.inv_order
WHERE b.id = a.id`, [
joinTable.name,
...select.bindings,
inverseOrderColumnName
]).transacting(trx);
break;
}
default:
{
const joinTableName = knex.addSchema(db, joinTable.name);
const select = selectRowsToOrder(joinTableName);
// raw query as knex doesn't allow updating from a subquery
await db.connection.raw(`UPDATE ?? as a
SET ?? = b.inv_order
FROM ( ${select.sql} ) AS b
WHERE b.id = a.id`, [
joinTableName,
inverseOrderColumnName,
...select.bindings
]).transacting(trx);
}
}
};
return Promise.all([
updateOrderColumn(),
updateInverseOrderColumn()
]);
};
exports.cleanOrderColumns = cleanOrderColumns;
exports.deletePreviousAnyToOneRelations = deletePreviousAnyToOneRelations;
exports.deletePreviousOneToAnyRelations = deletePreviousOneToAnyRelations;
exports.deleteRelations = deleteRelations;
//# sourceMappingURL=regular-relations.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,242 @@
import { map, isEmpty } from 'lodash/fp';
import { isBidirectional, isOneToAny, isAnyToOne, isManyToAny, hasOrderColumn, hasInverseOrderColumn } from '../metadata/relations.mjs';
import '../utils/identifiers/index.mjs';
import createQueryBuilder from '../query/query-builder.mjs';
import { addSchema } from '../utils/knex.mjs';
// TODO: This is a short term solution, to not steal relations from the same document.
const getDocumentSiblingIdsQuery = (tableName, id)=>{
// Find if the model is a content type or something else (e.g. component)
// to only get the documentId if it's a content type
const models = Array.from(strapi.db.metadata.values());
const isContentType = models.find((model)=>{
return model.tableName === tableName && model.attributes.documentId;
});
if (!isContentType) {
return [
id
];
}
// NOTE: SubQueries are wrapped in a function to not reuse the same connection,
// which causes infinite self references
return function(query) {
query.select('id').from(tableName)// Get all child ids of the document id
.whereIn('document_id', (documentIDSubQuery)=>{
documentIDSubQuery.from(tableName)// get document id related to the current id
.select('document_id').where('id', id);
});
};
};
/**
* If some relations currently exist for this oneToX relation, on the one side, this function removes them and update the inverse order if needed.
*/ const deletePreviousOneToAnyRelations = async ({ id, attribute, relIdsToadd, db, transaction: trx })=>{
if (!(isBidirectional(attribute) && isOneToAny(attribute))) {
throw new Error('deletePreviousOneToAnyRelations can only be called for bidirectional oneToAny relations');
}
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn } = joinTable;
const con = db.getConnection();
await con.delete().from(joinTable.name)// Exclude the ids of the current document
.whereNotIn(joinColumn.name, getDocumentSiblingIdsQuery(joinColumn.referencedTable, id))// Include all the ids that are being connected
.whereIn(inverseJoinColumn.name, relIdsToadd).where(joinTable.on || {}).transacting(trx);
await cleanOrderColumns({
attribute,
db,
inverseRelIds: relIdsToadd,
transaction: trx
});
};
/**
* If a relation currently exists for this xToOne relations, this function removes it and update the inverse order if needed.
*/ const deletePreviousAnyToOneRelations = async ({ id, attribute, relIdToadd, db, transaction: trx })=>{
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn } = joinTable;
const con = db.getConnection();
if (!isAnyToOne(attribute)) {
throw new Error('deletePreviousAnyToOneRelations can only be called for anyToOne relations');
}
// handling manyToOne
if (isManyToAny(attribute)) {
// if the database integrity was not broken relsToDelete is supposed to be of length 1
const relsToDelete = await con.select(inverseJoinColumn.name).from(joinTable.name).where(joinColumn.name, id).whereNotIn(inverseJoinColumn.name, getDocumentSiblingIdsQuery(inverseJoinColumn.referencedTable, relIdToadd)).where(joinTable.on || {}).transacting(trx);
const relIdsToDelete = map(inverseJoinColumn.name, relsToDelete);
await createQueryBuilder(joinTable.name, db).delete().where({
[joinColumn.name]: id,
[inverseJoinColumn.name]: {
$in: relIdsToDelete
}
}).where(joinTable.on || {}).transacting(trx).execute();
await cleanOrderColumns({
attribute,
db,
inverseRelIds: relIdsToDelete,
transaction: trx
});
// handling oneToOne
} else {
await con.delete().from(joinTable.name).where(joinColumn.name, id)// Exclude the ids of the current document
.whereNotIn(inverseJoinColumn.name, getDocumentSiblingIdsQuery(inverseJoinColumn.referencedTable, relIdToadd)).where(joinTable.on || {}).transacting(trx);
}
};
/**
* Delete all or some relations of entity field
*/ const deleteRelations = async ({ id, attribute, db, relIdsToNotDelete = [], relIdsToDelete = [], transaction: trx })=>{
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn } = joinTable;
const all = relIdsToDelete === 'all';
if (hasOrderColumn(attribute) || hasInverseOrderColumn(attribute)) {
let lastId = 0;
let done = false;
const batchSize = 100;
while(!done){
const batchToDelete = await createQueryBuilder(joinTable.name, db).select(inverseJoinColumn.name).where({
[joinColumn.name]: id,
id: {
$gt: lastId
},
[inverseJoinColumn.name]: {
$notIn: relIdsToNotDelete
},
...all ? {} : {
[inverseJoinColumn.name]: {
$in: relIdsToDelete
}
}
}).where(joinTable.on || {}).orderBy('id').limit(batchSize).transacting(trx).execute();
done = batchToDelete.length < batchSize;
lastId = batchToDelete[batchToDelete.length - 1]?.id || 0;
const batchIds = map(inverseJoinColumn.name, batchToDelete);
await createQueryBuilder(joinTable.name, db).delete().where({
[joinColumn.name]: id,
[inverseJoinColumn.name]: {
$in: batchIds
}
}).where(joinTable.on || {}).transacting(trx).execute();
await cleanOrderColumns({
attribute,
db,
id,
inverseRelIds: batchIds,
transaction: trx
});
}
} else {
await createQueryBuilder(joinTable.name, db).delete().where({
[joinColumn.name]: id,
[inverseJoinColumn.name]: {
$notIn: relIdsToNotDelete
},
...all ? {} : {
[inverseJoinColumn.name]: {
$in: relIdsToDelete
}
}
}).where(joinTable.on || {}).transacting(trx).execute();
}
};
/**
* Clean the order columns by ensuring the order value are continuous (ex: 1, 2, 3 and not 1, 5, 10)
*/ const cleanOrderColumns = async ({ id, attribute, db, inverseRelIds = [], transaction: trx })=>{
if (!(hasOrderColumn(attribute) && id) && !(hasInverseOrderColumn(attribute) && !isEmpty(inverseRelIds))) {
return;
}
const { joinTable } = attribute;
const { joinColumn, inverseJoinColumn, orderColumnName, inverseOrderColumnName } = joinTable;
/**
UPDATE :joinTable: as a,
(
SELECT
id,
ROW_NUMBER() OVER ( PARTITION BY :joinColumn: ORDER BY :orderColumn:) AS src_order,
FROM :joinTable:
WHERE :joinColumn: = :id
) AS b
SET :orderColumn: = b.src_order
WHERE b.id = a.id;
*/ const updateOrderColumn = async ()=>{
if (!hasOrderColumn(attribute) || !id) {
return;
}
const selectRowsToOrder = (joinTableName)=>db.connection(joinTableName).select('id').rowNumber('src_order', orderColumnName, joinColumn.name).where(joinColumn.name, id).toSQL();
switch(strapi.db.dialect.client){
case 'mysql':
{
// Here it's MariaDB and MySQL 8
const select = selectRowsToOrder(joinTable.name);
await db.getConnection().raw(`UPDATE ?? as a, ( ${select.sql} ) AS b
SET ?? = b.src_order
WHERE b.id = a.id`, [
joinTable.name,
...select.bindings,
orderColumnName
]).transacting(trx);
break;
}
default:
{
const joinTableName = addSchema(db, joinTable.name);
const select = selectRowsToOrder(joinTableName);
// raw query as knex doesn't allow updating from a subquery
await db.connection.raw(`UPDATE ?? as a
SET ?? = b.src_order
FROM ( ${select.sql} ) AS b
WHERE b.id = a.id`, [
joinTableName,
orderColumnName,
...select.bindings
]).transacting(trx);
}
}
};
/**
UPDATE :joinTable: as a,
(
SELECT
id,
ROW_NUMBER() OVER ( PARTITION BY :inverseJoinColumn: ORDER BY :inverseOrderColumn:) AS inv_order
FROM :joinTable:
WHERE :inverseJoinColumn: IN (:inverseRelIds)
) AS b
SET :inverseOrderColumn: = b.inv_order
WHERE b.id = a.id;
*/ const updateInverseOrderColumn = async ()=>{
if (!hasInverseOrderColumn(attribute) || isEmpty(inverseRelIds)) return;
const selectRowsToOrder = (joinTableName)=>db.connection(joinTableName).select('id').rowNumber('inv_order', inverseOrderColumnName, inverseJoinColumn.name).where(inverseJoinColumn.name, 'in', inverseRelIds).toSQL();
switch(strapi.db.dialect.client){
case 'mysql':
{
// Here it's MariaDB and MySQL 8
const select = selectRowsToOrder(joinTable.name);
await db.getConnection().raw(`UPDATE ?? as a, ( ${select.sql} ) AS b
SET ?? = b.inv_order
WHERE b.id = a.id`, [
joinTable.name,
...select.bindings,
inverseOrderColumnName
]).transacting(trx);
break;
}
default:
{
const joinTableName = addSchema(db, joinTable.name);
const select = selectRowsToOrder(joinTableName);
// raw query as knex doesn't allow updating from a subquery
await db.connection.raw(`UPDATE ?? as a
SET ?? = b.inv_order
FROM ( ${select.sql} ) AS b
WHERE b.id = a.id`, [
joinTableName,
inverseOrderColumnName,
...select.bindings
]).transacting(trx);
}
}
};
return Promise.all([
updateOrderColumn(),
updateInverseOrderColumn()
]);
};
export { cleanOrderColumns, deletePreviousAnyToOneRelations, deletePreviousOneToAnyRelations, deleteRelations };
//# sourceMappingURL=regular-relations.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,74 @@
import type { ID } from '../types';
interface Link {
id: ID;
position?: {
before?: ID;
after?: ID;
start?: true;
end?: true;
};
order?: number;
__component?: string;
}
interface OrderedLink extends Link {
init?: boolean;
order: number;
}
/**
* When connecting relations, the order you connect them matters.
*
* Example, if you connect the following relations:
* { id: 5, position: { before: 1 } }
* { id: 1, position: { before: 2 } }
* { id: 2, position: { end: true } }
*
* Going through the connect array, id 5 has to be connected before id 1,
* so the order of id5 = id1 - 1. But the order value of id 1 is unknown.
* The only way to know the order of id 1 is to connect it first.
*
* This function makes sure the relations are connected in the right order:
* { id: 2, position: { end: true } }
* { id: 1, position: { before: 2 } }
* { id: 5, position: { before: 1 } }
*
*/
declare const sortConnectArray: (connectArr: Link[], initialArr?: Link[], strictSort?: boolean) => Link[];
/**
* Responsible for calculating the relations order when connecting them.
*
* The connect method takes an array of relations with positional attributes:
* - before: the id of the relation to connect before
* - after: the id of the relation to connect after
* - end: it should be at the end
* - start: it should be at the start
*
* Example:
* - Having a connect array like:
* [ { id: 4, before: 2 }, { id: 4, before: 3}, {id: 5, before: 4} ]
* - With the initial relations:
* [ { id: 2, order: 4 }, { id: 3, order: 10 } ]
* - Step by step, going through the connect array, the array of relations would be:
* [ { id: 4, order: 3.5 }, { id: 2, order: 4 }, { id: 3, order: 10 } ]
* [ { id: 2, order: 4 }, { id: 4, order: 3.5 }, { id: 3, order: 10 } ]
* [ { id: 2, order: 4 }, { id: 5, order: 3.5 }, { id: 4, order: 3.5 }, { id: 3, order: 10 } ]
* - The final step would be to recalculate fractional order values.
* [ { id: 2, order: 4 }, { id: 5, order: 3.33 }, { id: 4, order: 3.66 }, { id: 3, order: 10 } ]
*
* @param {Array<*>} initArr - array of relations to initialize the class with
* @param {string} idColumn - the column name of the id
* @param {string} orderColumn - the column name of the order
* @param {boolean} strict - if true, will throw an error if a relation is connected adjacent to
* another one that does not exist
* @return {*}
*/
declare const relationsOrderer: <TRelation extends Record<string, ID | null>>(initArr: TRelation[], idColumn: keyof TRelation, orderColumn: keyof TRelation, strict?: boolean) => {
disconnect(relations: Link | Link[]): any;
connect(relations: Link | Link[]): any;
get(): OrderedLink[];
/**
* Get a map between the relation id and its order
*/
getOrderMap(): Record<ID, number>;
};
export { relationsOrderer, sortConnectArray };
//# sourceMappingURL=relations-orderer.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"relations-orderer.d.ts","sourceRoot":"","sources":["../../src/entity-manager/relations-orderer.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,UAAU,CAAC;AAEnC,UAAU,IAAI;IACZ,EAAE,EAAE,EAAE,CAAC;IACP,QAAQ,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,EAAE,CAAC;QAAC,KAAK,CAAC,EAAE,EAAE,CAAC;QAAC,KAAK,CAAC,EAAE,IAAI,CAAC;QAAC,GAAG,CAAC,EAAE,IAAI,CAAA;KAAE,CAAC;IACjE,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,UAAU,WAAY,SAAQ,IAAI;IAChC,IAAI,CAAC,EAAE,OAAO,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,QAAA,MAAM,gBAAgB,eAAgB,IAAI,EAAE,eAAc,IAAI,EAAE,iCA0G/D,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;GA2BG;AACH,QAAA,MAAM,gBAAgB,yDACX,SAAS,EAAE,YACV,MAAM,SAAS,eACZ,MAAM,SAAS,WACnB,OAAO;0BAuDQ,IAAI,GAAG,IAAI,EAAE;uBAMhB,IAAI,GAAG,IAAI,EAAE;;IAqBhC;;OAEG;;CAgBN,CAAC;AAEF,OAAO,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,CAAC"}

View File

@@ -0,0 +1,221 @@
'use strict';
var _ = require('lodash/fp');
var _$1 = require('lodash');
var invalidRelation = require('../errors/invalid-relation.js');
/**
* When connecting relations, the order you connect them matters.
*
* Example, if you connect the following relations:
* { id: 5, position: { before: 1 } }
* { id: 1, position: { before: 2 } }
* { id: 2, position: { end: true } }
*
* Going through the connect array, id 5 has to be connected before id 1,
* so the order of id5 = id1 - 1. But the order value of id 1 is unknown.
* The only way to know the order of id 1 is to connect it first.
*
* This function makes sure the relations are connected in the right order:
* { id: 2, position: { end: true } }
* { id: 1, position: { before: 2 } }
* { id: 5, position: { before: 1 } }
*
*/ const sortConnectArray = (connectArr, initialArr = [], strictSort = true)=>{
const sortedConnect = [];
// Boolean to know if we have to recalculate the order of the relations
let needsSorting = false;
// Map to validate if relation is already in sortedConnect or DB.
const relationInInitialArray = initialArr.reduce((acc, rel)=>({
...acc,
[rel.id]: true
}), {});
// Map to store the first index where a relation id is connected
const mappedRelations = connectArr.reduce((mapper, relation)=>{
const adjacentRelId = relation.position?.before || relation.position?.after;
if (!adjacentRelId || !relationInInitialArray[adjacentRelId] && !mapper[adjacentRelId]) {
needsSorting = true;
}
/**
* We do not allow duplicate relations to be connected, so we need to check for uniqueness with components
* Note that the id here includes the uid for polymorphic relations
*
* So for normal relations, the same id means the same relation
* For component relations, it means the unique combo of (id, component name)
*/ // Check if there's an existing relation with this id
const existingRelation = mapper[relation.id];
// Check if existing relation has a component or not
const hasNoComponent = existingRelation && !('__component' in existingRelation);
// Check if the existing relation has the same component as the new relation
const hasSameComponent = existingRelation && existingRelation.__component === relation.__component;
// If we have an existing relation that is not unique (no component or same component) we won't accept it
if (existingRelation && (hasNoComponent || hasSameComponent)) {
throw new invalidRelation(`The relation with id ${relation.id} is already connected. ` + 'You cannot connect the same relation twice.');
}
return {
[relation.id]: {
...relation,
computed: false
},
...mapper
};
}, {});
// If we don't need to sort the connect array, we can return it as is
if (!needsSorting) return connectArr;
// Recursively compute in which order the relation should be connected
const computeRelation = (relation, relationsSeenInBranch)=>{
const adjacentRelId = relation.position?.before || relation.position?.after;
const adjacentRelation = mappedRelations[adjacentRelId];
// If the relation has already been seen in the current branch,
// it means there is a circular reference
if (adjacentRelId && relationsSeenInBranch[adjacentRelId]) {
throw new invalidRelation('A circular reference was found in the connect array. ' + 'One relation is trying to connect before/after another one that is trying to connect before/after it');
}
// This relation has already been computed
if (mappedRelations[relation.id]?.computed) {
return;
}
mappedRelations[relation.id].computed = true;
// Relation does not have a before or after attribute or is in the initial array
if (!adjacentRelId || relationInInitialArray[adjacentRelId]) {
sortedConnect.push(relation);
return;
}
// Look if id is referenced elsewhere in the array
if (mappedRelations[adjacentRelId]) {
computeRelation(adjacentRelation, {
...relationsSeenInBranch,
[relation.id]: true
});
sortedConnect.push(relation);
} else if (strictSort) {
// If we reach this point, it means that the adjacent relation is not in the connect array
// and it is not in the database.
throw new invalidRelation(`There was a problem connecting relation with id ${relation.id} at position ${JSON.stringify(relation.position)}. The relation with id ${adjacentRelId} needs to be connected first.`);
} else {
// We are in non-strict mode so we can push the relation.
sortedConnect.push({
id: relation.id,
position: {
end: true
}
});
}
};
// Iterate over connectArr and populate sortedConnect
connectArr.forEach((relation)=>computeRelation(relation, {}));
return sortedConnect;
};
/**
* Responsible for calculating the relations order when connecting them.
*
* The connect method takes an array of relations with positional attributes:
* - before: the id of the relation to connect before
* - after: the id of the relation to connect after
* - end: it should be at the end
* - start: it should be at the start
*
* Example:
* - Having a connect array like:
* [ { id: 4, before: 2 }, { id: 4, before: 3}, {id: 5, before: 4} ]
* - With the initial relations:
* [ { id: 2, order: 4 }, { id: 3, order: 10 } ]
* - Step by step, going through the connect array, the array of relations would be:
* [ { id: 4, order: 3.5 }, { id: 2, order: 4 }, { id: 3, order: 10 } ]
* [ { id: 2, order: 4 }, { id: 4, order: 3.5 }, { id: 3, order: 10 } ]
* [ { id: 2, order: 4 }, { id: 5, order: 3.5 }, { id: 4, order: 3.5 }, { id: 3, order: 10 } ]
* - The final step would be to recalculate fractional order values.
* [ { id: 2, order: 4 }, { id: 5, order: 3.33 }, { id: 4, order: 3.66 }, { id: 3, order: 10 } ]
*
* @param {Array<*>} initArr - array of relations to initialize the class with
* @param {string} idColumn - the column name of the id
* @param {string} orderColumn - the column name of the order
* @param {boolean} strict - if true, will throw an error if a relation is connected adjacent to
* another one that does not exist
* @return {*}
*/ const relationsOrderer = (initArr, idColumn, orderColumn, strict)=>{
const computedRelations = _.castArray(initArr ?? []).map((r)=>({
init: true,
id: r[idColumn],
order: Number(r[orderColumn]) || 1
}));
const maxOrder = _.maxBy('order', computedRelations)?.order || 0;
const findRelation = (id)=>{
const idx = computedRelations.findIndex((r)=>r.id === id);
return {
idx,
relation: computedRelations[idx]
};
};
const removeRelation = (r)=>{
const { idx } = findRelation(r.id);
if (idx >= 0) {
computedRelations.splice(idx, 1);
}
};
const insertRelation = (r)=>{
let idx;
if (r.position?.before) {
const { idx: _idx, relation } = findRelation(r.position.before);
if (relation.init) {
r.order = relation.order - 0.5;
} else {
r.order = relation.order;
}
idx = _idx;
} else if (r.position?.after) {
const { idx: _idx, relation } = findRelation(r.position.after);
if (relation.init) {
r.order = relation.order + 0.5;
} else {
r.order = relation.order;
}
idx = _idx + 1;
} else if (r.position?.start) {
r.order = 0.5;
idx = 0;
} else {
r.order = maxOrder + 0.5;
idx = computedRelations.length;
}
// Insert the relation in the array
computedRelations.splice(idx, 0, r);
};
return {
disconnect (relations) {
_.castArray(relations).forEach((relation)=>{
removeRelation(relation);
});
return this;
},
connect (relations) {
sortConnectArray(_.castArray(relations), computedRelations, strict).forEach((relation)=>{
this.disconnect(relation);
try {
insertRelation(relation);
} catch (err) {
throw new Error(`There was a problem connecting relation with id ${relation.id} at position ${JSON.stringify(relation.position)}. The list of connect relations is not valid`);
}
});
return this;
},
get () {
return computedRelations;
},
/**
* Get a map between the relation id and its order
*/ getOrderMap () {
return _$1(computedRelations).groupBy('order').reduce((acc, relations)=>{
if (relations[0]?.init) return acc;
relations.forEach((relation, idx)=>{
acc[relation.id] = Math.floor(relation.order) + (idx + 1) / (relations.length + 1);
});
return acc;
}, {});
}
};
};
exports.relationsOrderer = relationsOrderer;
exports.sortConnectArray = sortConnectArray;
//# sourceMappingURL=relations-orderer.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,218 @@
import { castArray, maxBy } from 'lodash/fp';
import _ from 'lodash';
import InvalidRelationError from '../errors/invalid-relation.mjs';
/**
* When connecting relations, the order you connect them matters.
*
* Example, if you connect the following relations:
* { id: 5, position: { before: 1 } }
* { id: 1, position: { before: 2 } }
* { id: 2, position: { end: true } }
*
* Going through the connect array, id 5 has to be connected before id 1,
* so the order of id5 = id1 - 1. But the order value of id 1 is unknown.
* The only way to know the order of id 1 is to connect it first.
*
* This function makes sure the relations are connected in the right order:
* { id: 2, position: { end: true } }
* { id: 1, position: { before: 2 } }
* { id: 5, position: { before: 1 } }
*
*/ const sortConnectArray = (connectArr, initialArr = [], strictSort = true)=>{
const sortedConnect = [];
// Boolean to know if we have to recalculate the order of the relations
let needsSorting = false;
// Map to validate if relation is already in sortedConnect or DB.
const relationInInitialArray = initialArr.reduce((acc, rel)=>({
...acc,
[rel.id]: true
}), {});
// Map to store the first index where a relation id is connected
const mappedRelations = connectArr.reduce((mapper, relation)=>{
const adjacentRelId = relation.position?.before || relation.position?.after;
if (!adjacentRelId || !relationInInitialArray[adjacentRelId] && !mapper[adjacentRelId]) {
needsSorting = true;
}
/**
* We do not allow duplicate relations to be connected, so we need to check for uniqueness with components
* Note that the id here includes the uid for polymorphic relations
*
* So for normal relations, the same id means the same relation
* For component relations, it means the unique combo of (id, component name)
*/ // Check if there's an existing relation with this id
const existingRelation = mapper[relation.id];
// Check if existing relation has a component or not
const hasNoComponent = existingRelation && !('__component' in existingRelation);
// Check if the existing relation has the same component as the new relation
const hasSameComponent = existingRelation && existingRelation.__component === relation.__component;
// If we have an existing relation that is not unique (no component or same component) we won't accept it
if (existingRelation && (hasNoComponent || hasSameComponent)) {
throw new InvalidRelationError(`The relation with id ${relation.id} is already connected. ` + 'You cannot connect the same relation twice.');
}
return {
[relation.id]: {
...relation,
computed: false
},
...mapper
};
}, {});
// If we don't need to sort the connect array, we can return it as is
if (!needsSorting) return connectArr;
// Recursively compute in which order the relation should be connected
const computeRelation = (relation, relationsSeenInBranch)=>{
const adjacentRelId = relation.position?.before || relation.position?.after;
const adjacentRelation = mappedRelations[adjacentRelId];
// If the relation has already been seen in the current branch,
// it means there is a circular reference
if (adjacentRelId && relationsSeenInBranch[adjacentRelId]) {
throw new InvalidRelationError('A circular reference was found in the connect array. ' + 'One relation is trying to connect before/after another one that is trying to connect before/after it');
}
// This relation has already been computed
if (mappedRelations[relation.id]?.computed) {
return;
}
mappedRelations[relation.id].computed = true;
// Relation does not have a before or after attribute or is in the initial array
if (!adjacentRelId || relationInInitialArray[adjacentRelId]) {
sortedConnect.push(relation);
return;
}
// Look if id is referenced elsewhere in the array
if (mappedRelations[adjacentRelId]) {
computeRelation(adjacentRelation, {
...relationsSeenInBranch,
[relation.id]: true
});
sortedConnect.push(relation);
} else if (strictSort) {
// If we reach this point, it means that the adjacent relation is not in the connect array
// and it is not in the database.
throw new InvalidRelationError(`There was a problem connecting relation with id ${relation.id} at position ${JSON.stringify(relation.position)}. The relation with id ${adjacentRelId} needs to be connected first.`);
} else {
// We are in non-strict mode so we can push the relation.
sortedConnect.push({
id: relation.id,
position: {
end: true
}
});
}
};
// Iterate over connectArr and populate sortedConnect
connectArr.forEach((relation)=>computeRelation(relation, {}));
return sortedConnect;
};
/**
* Responsible for calculating the relations order when connecting them.
*
* The connect method takes an array of relations with positional attributes:
* - before: the id of the relation to connect before
* - after: the id of the relation to connect after
* - end: it should be at the end
* - start: it should be at the start
*
* Example:
* - Having a connect array like:
* [ { id: 4, before: 2 }, { id: 4, before: 3}, {id: 5, before: 4} ]
* - With the initial relations:
* [ { id: 2, order: 4 }, { id: 3, order: 10 } ]
* - Step by step, going through the connect array, the array of relations would be:
* [ { id: 4, order: 3.5 }, { id: 2, order: 4 }, { id: 3, order: 10 } ]
* [ { id: 2, order: 4 }, { id: 4, order: 3.5 }, { id: 3, order: 10 } ]
* [ { id: 2, order: 4 }, { id: 5, order: 3.5 }, { id: 4, order: 3.5 }, { id: 3, order: 10 } ]
* - The final step would be to recalculate fractional order values.
* [ { id: 2, order: 4 }, { id: 5, order: 3.33 }, { id: 4, order: 3.66 }, { id: 3, order: 10 } ]
*
* @param {Array<*>} initArr - array of relations to initialize the class with
* @param {string} idColumn - the column name of the id
* @param {string} orderColumn - the column name of the order
* @param {boolean} strict - if true, will throw an error if a relation is connected adjacent to
* another one that does not exist
* @return {*}
*/ const relationsOrderer = (initArr, idColumn, orderColumn, strict)=>{
const computedRelations = castArray(initArr ?? []).map((r)=>({
init: true,
id: r[idColumn],
order: Number(r[orderColumn]) || 1
}));
const maxOrder = maxBy('order', computedRelations)?.order || 0;
const findRelation = (id)=>{
const idx = computedRelations.findIndex((r)=>r.id === id);
return {
idx,
relation: computedRelations[idx]
};
};
const removeRelation = (r)=>{
const { idx } = findRelation(r.id);
if (idx >= 0) {
computedRelations.splice(idx, 1);
}
};
const insertRelation = (r)=>{
let idx;
if (r.position?.before) {
const { idx: _idx, relation } = findRelation(r.position.before);
if (relation.init) {
r.order = relation.order - 0.5;
} else {
r.order = relation.order;
}
idx = _idx;
} else if (r.position?.after) {
const { idx: _idx, relation } = findRelation(r.position.after);
if (relation.init) {
r.order = relation.order + 0.5;
} else {
r.order = relation.order;
}
idx = _idx + 1;
} else if (r.position?.start) {
r.order = 0.5;
idx = 0;
} else {
r.order = maxOrder + 0.5;
idx = computedRelations.length;
}
// Insert the relation in the array
computedRelations.splice(idx, 0, r);
};
return {
disconnect (relations) {
castArray(relations).forEach((relation)=>{
removeRelation(relation);
});
return this;
},
connect (relations) {
sortConnectArray(castArray(relations), computedRelations, strict).forEach((relation)=>{
this.disconnect(relation);
try {
insertRelation(relation);
} catch (err) {
throw new Error(`There was a problem connecting relation with id ${relation.id} at position ${JSON.stringify(relation.position)}. The list of connect relations is not valid`);
}
});
return this;
},
get () {
return computedRelations;
},
/**
* Get a map between the relation id and its order
*/ getOrderMap () {
return _(computedRelations).groupBy('order').reduce((acc, relations)=>{
if (relations[0]?.init) return acc;
relations.forEach((relation, idx)=>{
acc[relation.id] = Math.floor(relation.order) + (idx + 1) / (relations.length + 1);
});
return acc;
}, {});
}
};
};
export { relationsOrderer, sortConnectArray };
//# sourceMappingURL=relations-orderer.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,79 @@
import type { Knex } from 'knex';
import type { CountResult, ID } from '../types';
import { QueryBuilder } from '../query/query-builder';
export type Data = Record<string, unknown>;
export type Params = {
where?: any;
filters?: any;
select?: any;
populate?: any;
orderBy?: any;
_q?: string;
data?: any;
page?: number;
pageSize?: number;
limit?: number;
offset?: number;
count?: boolean;
};
export type FindOneParams = Pick<Params, 'where' | 'select' | 'populate' | '_q' | 'orderBy'>;
export interface Repository {
findOne(params?: FindOneParams): Promise<any>;
findMany(params?: Params): Promise<any[]>;
findWithCount(params?: Params): Promise<[any[], number]>;
findPage(params: Params): Promise<{
results: any[];
pagination: {
page: number;
pageSize: number;
pageCount: number;
total: number;
};
}>;
create(params: Params): Promise<any>;
createMany(params: Params): Promise<CountResult & {
ids: ID[];
}>;
update(params: Params): Promise<any>;
updateMany(params: Params): Promise<CountResult>;
delete(params: Params): Promise<any>;
deleteMany(params?: Params): Promise<CountResult>;
count(params?: Params): Promise<number>;
attachRelations(id: ID, data: Data): Promise<any>;
updateRelations(id: ID, data: Data): Promise<any>;
deleteRelations(id: ID): Promise<any>;
populate(entity: Entity, populate: Params['populate']): Promise<any>;
load(entity: any, field: string | string[], populate?: Params['populate']): Promise<any>;
loadPages<TField extends string>(entity: any, field: TField | TField[], populate?: Params['populate']): Promise<any>;
}
export type Entity = {
id: ID;
[key: string]: any;
};
export interface EntityManager {
findOne(uid: string, params: Params): Promise<any>;
findMany(uid: string, params: Params): Promise<any[]>;
count(uid: string, params?: Params): Promise<number>;
create(uid: string, params: Params): Promise<any>;
createMany(uid: string, params: Params): Promise<CountResult & {
ids: ID[];
}>;
update(uid: string, params: Params): Promise<any>;
updateMany(uid: string, params: Params): Promise<CountResult>;
delete(uid: string, params: Params): Promise<any>;
deleteMany(uid: string, params: Params): Promise<CountResult>;
populate(uid: string, entity: Entity, populate: Params['populate']): Promise<Entity>;
load(uid: string, entity: Entity, field: string | string[], populate?: Params['populate']): Promise<any>;
attachRelations(uid: string, id: ID, data: any, options?: {
transaction?: Knex.Transaction;
}): Promise<any>;
updateRelations(uid: string, id: ID, data: any, options?: {
transaction?: Knex.Transaction;
}): Promise<any>;
deleteRelations(uid: string, id: ID, options?: {
transaction?: Knex.Transaction;
}): Promise<void>;
createQueryBuilder(uid: string): QueryBuilder;
getRepository(uid: string): Repository;
}
//# sourceMappingURL=types.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/entity-manager/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AACjC,OAAO,KAAK,EAAE,WAAW,EAAE,EAAE,EAAE,MAAM,UAAU,CAAC;AAChD,OAAO,EAAE,YAAY,EAAE,MAAM,wBAAwB,CAAC;AAEtD,MAAM,MAAM,IAAI,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAE3C,MAAM,MAAM,MAAM,GAAG;IACnB,KAAK,CAAC,EAAE,GAAG,CAAC;IACZ,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;IACb,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,OAAO,CAAC;CACjB,CAAC;AAEF,MAAM,MAAM,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,OAAO,GAAG,QAAQ,GAAG,UAAU,GAAG,IAAI,GAAG,SAAS,CAAC,CAAC;AAE7F,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,MAAM,CAAC,EAAE,aAAa,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IAC9C,QAAQ,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC;IAC1C,aAAa,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC;IACzD,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC;QAChC,OAAO,EAAE,GAAG,EAAE,CAAC;QACf,UAAU,EAAE;YACV,IAAI,EAAE,MAAM,CAAC;YACb,QAAQ,EAAE,MAAM,CAAC;YACjB,SAAS,EAAE,MAAM,CAAC;YAClB,KAAK,EAAE,MAAM,CAAC;SACf,CAAC;KACH,CAAC,CAAC;IACH,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,GAAG;QAAE,GAAG,EAAE,EAAE,EAAE,CAAA;KAAE,CAAC,CAAC;IACjE,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,UAAU,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;IACjD,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,UAAU,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;IAClD,KAAK,CAAC,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACxC,eAAe,CAAC,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IAClD,eAAe,CAAC,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IAClD,eAAe,CAAC,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACtC,QAAQ,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACrE,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EAAE,QAAQ,CAAC,EAAE,MAAM,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACzF,SAAS,CAAC,MAAM,SAAS,MAAM,EAC7B,MAAM,EAAE,GAAG,EACX,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,QAAQ,CAAC,EAAE,MAAM,CAAC,UAAU,CAAC,GAC5B,OAAO,CAAC,GAAG,CAAC,CAAC;CACjB;AAED,MAAM,MAAM,MAAM,GAAG;IACnB,EAAE,EAAE,EAAE,CAAC;IACP,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB,CAAC;AAGF,MAAM,WAAW,aAAa;IAC5B,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IACnD,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC;IACtD,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACrD,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IAClD,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,GAAG;QAAE,GAAG,EAAE,EAAE,EAAE,CAAA;KAAE,CAAC,CAAC;IAC9E,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IAClD,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;IAClD,UAAU,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAAC;IAC9D,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,UAAU,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IACrF,IAAI,CACF,GAAG,EAAE,MAAM,EACX,MAAM,EAAE,MAAM,EACd,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,EACxB,QAAQ,CAAC,EAAE,MAAM,CAAC,UAAU,CAAC,GAC5B,OAAO,CAAC,GAAG,CAAC,CAAC;IAChB,eAAe,CACb,GAAG,EAAE,MAAM,EACX,EAAE,EAAE,EAAE,EACN,IAAI,EAAE,GAAG,EACT,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,IAAI,CAAC,WAAW,CAAA;KAAE,GAC3C,OAAO,CAAC,GAAG,CAAC,CAAC;IAChB,eAAe,CACb,GAAG,EAAE,MAAM,EACX,EAAE,EAAE,EAAE,EACN,IAAI,EAAE,GAAG,EACT,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,IAAI,CAAC,WAAW,CAAA;KAAE,GAC3C,OAAO,CAAC,GAAG,CAAC,CAAC;IAChB,eAAe,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,EAAE,OAAO,CAAC,EAAE;QAAE,WAAW,CAAC,EAAE,IAAI,CAAC,WAAW,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAClG,kBAAkB,CAAC,GAAG,EAAE,MAAM,GAAG,YAAY,CAAC;IAC9C,aAAa,CAAC,GAAG,EAAE,MAAM,GAAG,UAAU,CAAC;CACxC"}

Some files were not shown because too many files have changed in this diff Show More