node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

View File

@@ -0,0 +1,34 @@
import type { Resolver } from 'umzug';
import type { Knex } from 'knex';
import type { Database } from '..';
export interface UserMigrationProvider {
shouldRun(): Promise<boolean>;
up(): Promise<void>;
down(): Promise<void>;
}
export interface InternalMigrationProvider {
register(migration: Migration): void;
shouldRun(): Promise<boolean>;
up(): Promise<void>;
down(): Promise<void>;
}
export interface MigrationProvider {
providers: {
internal: InternalMigrationProvider;
};
shouldRun(): Promise<boolean>;
up(): Promise<void>;
down(): Promise<void>;
}
export type Context = {
db: Database;
};
export type MigrationResolver = Resolver<Context>;
export type MigrationFn = (knex: Knex.Transaction, db: Database) => Promise<void>;
export type Migration = {
name: string;
up: MigrationFn;
down: MigrationFn;
};
export declare const wrapTransaction: (db: Database) => (fn: MigrationFn) => () => Promise<Promise<void>>;
//# sourceMappingURL=common.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"common.d.ts","sourceRoot":"","sources":["../../src/migrations/common.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AACtC,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AAEjC,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AAEnC,MAAM,WAAW,qBAAqB;IACpC,SAAS,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC;IAC9B,EAAE,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACpB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACvB;AAED,MAAM,WAAW,yBAAyB;IACxC,QAAQ,CAAC,SAAS,EAAE,SAAS,GAAG,IAAI,CAAC;IACrC,SAAS,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC;IAC9B,EAAE,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACpB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACvB;AACD,MAAM,WAAW,iBAAiB;IAChC,SAAS,EAAE;QAAE,QAAQ,EAAE,yBAAyB,CAAA;KAAE,CAAC;IACnD,SAAS,IAAI,OAAO,CAAC,OAAO,CAAC,CAAC;IAC9B,EAAE,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;IACpB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;CACvB;AAED,MAAM,MAAM,OAAO,GAAG;IAAE,EAAE,EAAE,QAAQ,CAAA;CAAE,CAAC;AAEvC,MAAM,MAAM,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;AAElD,MAAM,MAAM,WAAW,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,EAAE,EAAE,EAAE,QAAQ,KAAK,OAAO,CAAC,IAAI,CAAC,CAAC;AAElF,MAAM,MAAM,SAAS,GAAG;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,EAAE,EAAE,WAAW,CAAC;IAChB,IAAI,EAAE,WAAW,CAAC;CACnB,CAAC;AAEF,eAAO,MAAM,eAAe,OAAQ,QAAQ,UAAU,WAAW,iCAEhE,CAAC"}

View File

@@ -0,0 +1,8 @@
'use strict';
const wrapTransaction = (db)=>(fn)=>()=>{
return db.transaction(({ trx })=>Promise.resolve(fn(trx, db)));
};
exports.wrapTransaction = wrapTransaction;
//# sourceMappingURL=common.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"common.js","sources":["../../src/migrations/common.ts"],"sourcesContent":["import type { Resolver } from 'umzug';\nimport type { Knex } from 'knex';\n\nimport type { Database } from '..';\n\nexport interface UserMigrationProvider {\n shouldRun(): Promise<boolean>;\n up(): Promise<void>;\n down(): Promise<void>;\n}\n\nexport interface InternalMigrationProvider {\n register(migration: Migration): void;\n shouldRun(): Promise<boolean>;\n up(): Promise<void>;\n down(): Promise<void>;\n}\nexport interface MigrationProvider {\n providers: { internal: InternalMigrationProvider };\n shouldRun(): Promise<boolean>;\n up(): Promise<void>;\n down(): Promise<void>;\n}\n\nexport type Context = { db: Database };\n\nexport type MigrationResolver = Resolver<Context>;\n\nexport type MigrationFn = (knex: Knex.Transaction, db: Database) => Promise<void>;\n\nexport type Migration = {\n name: string;\n up: MigrationFn;\n down: MigrationFn;\n};\n\nexport const wrapTransaction = (db: Database) => (fn: MigrationFn) => () => {\n return db.transaction(({ trx }) => Promise.resolve(fn(trx, db)));\n};\n"],"names":["wrapTransaction","db","fn","transaction","trx","Promise","resolve"],"mappings":";;AAoCaA,MAAAA,eAAAA,GAAkB,CAACC,EAAAA,GAAiB,CAACC,EAAoB,GAAA,IAAA;AACpE,YAAA,OAAOD,EAAGE,CAAAA,WAAW,CAAC,CAAC,EAAEC,GAAG,EAAE,GAAKC,OAAQC,CAAAA,OAAO,CAACJ,EAAAA,CAAGE,GAAKH,EAAAA,EAAAA,CAAAA,CAAAA,CAAAA;;;;;"}

View File

@@ -0,0 +1,6 @@
const wrapTransaction = (db)=>(fn)=>()=>{
return db.transaction(({ trx })=>Promise.resolve(fn(trx, db)));
};
export { wrapTransaction };
//# sourceMappingURL=common.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"common.mjs","sources":["../../src/migrations/common.ts"],"sourcesContent":["import type { Resolver } from 'umzug';\nimport type { Knex } from 'knex';\n\nimport type { Database } from '..';\n\nexport interface UserMigrationProvider {\n shouldRun(): Promise<boolean>;\n up(): Promise<void>;\n down(): Promise<void>;\n}\n\nexport interface InternalMigrationProvider {\n register(migration: Migration): void;\n shouldRun(): Promise<boolean>;\n up(): Promise<void>;\n down(): Promise<void>;\n}\nexport interface MigrationProvider {\n providers: { internal: InternalMigrationProvider };\n shouldRun(): Promise<boolean>;\n up(): Promise<void>;\n down(): Promise<void>;\n}\n\nexport type Context = { db: Database };\n\nexport type MigrationResolver = Resolver<Context>;\n\nexport type MigrationFn = (knex: Knex.Transaction, db: Database) => Promise<void>;\n\nexport type Migration = {\n name: string;\n up: MigrationFn;\n down: MigrationFn;\n};\n\nexport const wrapTransaction = (db: Database) => (fn: MigrationFn) => () => {\n return db.transaction(({ trx }) => Promise.resolve(fn(trx, db)));\n};\n"],"names":["wrapTransaction","db","fn","transaction","trx","Promise","resolve"],"mappings":"AAoCaA,MAAAA,eAAAA,GAAkB,CAACC,EAAAA,GAAiB,CAACC,EAAoB,GAAA,IAAA;AACpE,YAAA,OAAOD,EAAGE,CAAAA,WAAW,CAAC,CAAC,EAAEC,GAAG,EAAE,GAAKC,OAAQC,CAAAA,OAAO,CAACJ,EAAAA,CAAGE,GAAKH,EAAAA,EAAAA,CAAAA,CAAAA,CAAAA;;;;;"}

View File

@@ -0,0 +1,5 @@
import type { MigrationProvider, Migration } from './common';
import type { Database } from '..';
export type { MigrationProvider, Migration };
export declare const createMigrationsProvider: (db: Database) => MigrationProvider;
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/migrations/index.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAE,iBAAiB,EAAE,SAAS,EAAE,MAAM,UAAU,CAAC;AAC7D,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AAEnC,YAAY,EAAE,iBAAiB,EAAE,SAAS,EAAE,CAAC;AAE7C,eAAO,MAAM,wBAAwB,OAAQ,QAAQ,KAAG,iBA+BvD,CAAC"}

View File

@@ -0,0 +1,39 @@
'use strict';
var users = require('./users.js');
var internal = require('./internal.js');
const createMigrationsProvider = (db)=>{
const userProvider = users.createUserMigrationProvider(db);
const internalProvider = internal.createInternalMigrationProvider(db);
const providers = [
userProvider,
internalProvider
];
return {
providers: {
internal: internalProvider
},
async shouldRun () {
const shouldRunResponses = await Promise.all(providers.map((provider)=>provider.shouldRun()));
return shouldRunResponses.some((shouldRun)=>shouldRun);
},
async up () {
for (const provider of providers){
if (await provider.shouldRun()) {
await provider.up();
}
}
},
async down () {
for (const provider of providers){
if (await provider.shouldRun()) {
await provider.down();
}
}
}
};
};
exports.createMigrationsProvider = createMigrationsProvider;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../src/migrations/index.ts"],"sourcesContent":["import { createUserMigrationProvider } from './users';\nimport { createInternalMigrationProvider } from './internal';\n\nimport type { MigrationProvider, Migration } from './common';\nimport type { Database } from '..';\n\nexport type { MigrationProvider, Migration };\n\nexport const createMigrationsProvider = (db: Database): MigrationProvider => {\n const userProvider = createUserMigrationProvider(db);\n const internalProvider = createInternalMigrationProvider(db);\n const providers = [userProvider, internalProvider];\n\n return {\n providers: {\n internal: internalProvider,\n },\n async shouldRun() {\n const shouldRunResponses = await Promise.all(\n providers.map((provider) => provider.shouldRun())\n );\n\n return shouldRunResponses.some((shouldRun) => shouldRun);\n },\n async up() {\n for (const provider of providers) {\n if (await provider.shouldRun()) {\n await provider.up();\n }\n }\n },\n async down() {\n for (const provider of providers) {\n if (await provider.shouldRun()) {\n await provider.down();\n }\n }\n },\n };\n};\n"],"names":["createMigrationsProvider","db","userProvider","createUserMigrationProvider","internalProvider","createInternalMigrationProvider","providers","internal","shouldRun","shouldRunResponses","Promise","all","map","provider","some","up","down"],"mappings":";;;;;AAQO,MAAMA,2BAA2B,CAACC,EAAAA,GAAAA;AACvC,IAAA,MAAMC,eAAeC,iCAA4BF,CAAAA,EAAAA,CAAAA;AACjD,IAAA,MAAMG,mBAAmBC,wCAAgCJ,CAAAA,EAAAA,CAAAA;AACzD,IAAA,MAAMK,SAAY,GAAA;AAACJ,QAAAA,YAAAA;AAAcE,QAAAA;AAAiB,KAAA;IAElD,OAAO;QACLE,SAAW,EAAA;YACTC,QAAUH,EAAAA;AACZ,SAAA;QACA,MAAMI,SAAAA,CAAAA,GAAAA;YACJ,MAAMC,kBAAAA,GAAqB,MAAMC,OAAAA,CAAQC,GAAG,CAC1CL,SAAUM,CAAAA,GAAG,CAAC,CAACC,QAAaA,GAAAA,QAAAA,CAASL,SAAS,EAAA,CAAA,CAAA;AAGhD,YAAA,OAAOC,kBAAmBK,CAAAA,IAAI,CAAC,CAACN,SAAcA,GAAAA,SAAAA,CAAAA;AAChD,SAAA;QACA,MAAMO,EAAAA,CAAAA,GAAAA;YACJ,KAAK,MAAMF,YAAYP,SAAW,CAAA;gBAChC,IAAI,MAAMO,QAASL,CAAAA,SAAS,EAAI,EAAA;AAC9B,oBAAA,MAAMK,SAASE,EAAE,EAAA;AACnB;AACF;AACF,SAAA;QACA,MAAMC,IAAAA,CAAAA,GAAAA;YACJ,KAAK,MAAMH,YAAYP,SAAW,CAAA;gBAChC,IAAI,MAAMO,QAASL,CAAAA,SAAS,EAAI,EAAA;AAC9B,oBAAA,MAAMK,SAASG,IAAI,EAAA;AACrB;AACF;AACF;AACF,KAAA;AACF;;;;"}

View File

@@ -0,0 +1,37 @@
import { createUserMigrationProvider } from './users.mjs';
import { createInternalMigrationProvider } from './internal.mjs';
const createMigrationsProvider = (db)=>{
const userProvider = createUserMigrationProvider(db);
const internalProvider = createInternalMigrationProvider(db);
const providers = [
userProvider,
internalProvider
];
return {
providers: {
internal: internalProvider
},
async shouldRun () {
const shouldRunResponses = await Promise.all(providers.map((provider)=>provider.shouldRun()));
return shouldRunResponses.some((shouldRun)=>shouldRun);
},
async up () {
for (const provider of providers){
if (await provider.shouldRun()) {
await provider.up();
}
}
},
async down () {
for (const provider of providers){
if (await provider.shouldRun()) {
await provider.down();
}
}
}
};
};
export { createMigrationsProvider };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../src/migrations/index.ts"],"sourcesContent":["import { createUserMigrationProvider } from './users';\nimport { createInternalMigrationProvider } from './internal';\n\nimport type { MigrationProvider, Migration } from './common';\nimport type { Database } from '..';\n\nexport type { MigrationProvider, Migration };\n\nexport const createMigrationsProvider = (db: Database): MigrationProvider => {\n const userProvider = createUserMigrationProvider(db);\n const internalProvider = createInternalMigrationProvider(db);\n const providers = [userProvider, internalProvider];\n\n return {\n providers: {\n internal: internalProvider,\n },\n async shouldRun() {\n const shouldRunResponses = await Promise.all(\n providers.map((provider) => provider.shouldRun())\n );\n\n return shouldRunResponses.some((shouldRun) => shouldRun);\n },\n async up() {\n for (const provider of providers) {\n if (await provider.shouldRun()) {\n await provider.up();\n }\n }\n },\n async down() {\n for (const provider of providers) {\n if (await provider.shouldRun()) {\n await provider.down();\n }\n }\n },\n };\n};\n"],"names":["createMigrationsProvider","db","userProvider","createUserMigrationProvider","internalProvider","createInternalMigrationProvider","providers","internal","shouldRun","shouldRunResponses","Promise","all","map","provider","some","up","down"],"mappings":";;;AAQO,MAAMA,2BAA2B,CAACC,EAAAA,GAAAA;AACvC,IAAA,MAAMC,eAAeC,2BAA4BF,CAAAA,EAAAA,CAAAA;AACjD,IAAA,MAAMG,mBAAmBC,+BAAgCJ,CAAAA,EAAAA,CAAAA;AACzD,IAAA,MAAMK,SAAY,GAAA;AAACJ,QAAAA,YAAAA;AAAcE,QAAAA;AAAiB,KAAA;IAElD,OAAO;QACLE,SAAW,EAAA;YACTC,QAAUH,EAAAA;AACZ,SAAA;QACA,MAAMI,SAAAA,CAAAA,GAAAA;YACJ,MAAMC,kBAAAA,GAAqB,MAAMC,OAAAA,CAAQC,GAAG,CAC1CL,SAAUM,CAAAA,GAAG,CAAC,CAACC,QAAaA,GAAAA,QAAAA,CAASL,SAAS,EAAA,CAAA,CAAA;AAGhD,YAAA,OAAOC,kBAAmBK,CAAAA,IAAI,CAAC,CAACN,SAAcA,GAAAA,SAAAA,CAAAA;AAChD,SAAA;QACA,MAAMO,EAAAA,CAAAA,GAAAA;YACJ,KAAK,MAAMF,YAAYP,SAAW,CAAA;gBAChC,IAAI,MAAMO,QAASL,CAAAA,SAAS,EAAI,EAAA;AAC9B,oBAAA,MAAMK,SAASE,EAAE,EAAA;AACnB;AACF;AACF,SAAA;QACA,MAAMC,IAAAA,CAAAA,GAAAA;YACJ,KAAK,MAAMH,YAAYP,SAAW,CAAA;gBAChC,IAAI,MAAMO,QAASL,CAAAA,SAAS,EAAI,EAAA;AAC9B,oBAAA,MAAMK,SAASG,IAAI,EAAA;AACrB;AACF;AACF;AACF,KAAA;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const renameIdentifiersLongerThanMaxLength: Migration;
//# sourceMappingURL=5.0.0-01-convert-identifiers-long-than-max-length.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-01-convert-identifiers-long-than-max-length.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-01-convert-identifiers-long-than-max-length.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAqB3C,eAAO,MAAM,oCAAoC,EAAE,SA4ClD,CAAC"}

View File

@@ -0,0 +1,179 @@
'use strict';
var createDebug = require('debug');
var index = require('../../utils/identifiers/index.js');
const debug = createDebug('strapi::database::migration');
const renameIdentifiersLongerThanMaxLength = {
name: '5.0.0-rename-identifiers-longer-than-max-length',
async up (knex, db) {
const md = db.metadata;
const diffs = findDiffs(md);
// migrate indexes before tables so we know to target the original tableName
for (const indexDiff of diffs.indexes){
await renameIndex(knex, db, indexDiff);
}
// migrate columns before table names so we know to target the original tableName
for (const columnDiff of diffs.columns){
const { full, short } = columnDiff;
const tableName = full.tableName;
const hasTable = await knex.schema.hasTable(tableName);
if (hasTable) {
// tablebuilder methods MUST be synchronous and so you cannot use async inside it, which is why we check the column here
const hasColumn = await knex.schema.hasColumn(tableName, full.columnName);
if (hasColumn) {
await knex.schema.alterTable(tableName, async (table)=>{
debug(`renaming column ${full.columnName} to ${short.columnName}`);
table.renameColumn(full.columnName, short.columnName);
});
}
}
}
// migrate table names
for (const tableDiff of diffs.tables){
const hasTable = await knex.schema.hasTable(tableDiff.full.tableName);
if (hasTable) {
debug(`renaming table ${tableDiff.full.tableName} to ${tableDiff.short.tableName}`);
await knex.schema.renameTable(tableDiff.full.tableName, tableDiff.short.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
const renameIndex = async (knex, db, diff)=>{
const client = db.config.connection.client;
const short = diff.short;
const full = diff.full;
if (full.indexName === short.indexName) {
debug(`not renaming index ${full.indexName} because name hasn't changed`);
return;
}
// fk indexes can't be easily renamed, and will be recreated by db sync
// if this misses something due to the loose string matching, it's not critical, it just means index will be rebuilt in db sync
if (short.indexName.endsWith('fk') || full.indexName.endsWith('fk')) {
return;
}
debug(`renaming index from ${full.indexName} to ${short.indexName}`);
// If schema creation has never actually run before, none of these will exist, and they will throw an error
// we have no way of running an "if exists" other than a per-dialect manual check, which we won't do
// because even if it fails for some other reason, the schema sync will recreate them anyway
// Therefore, we wrap this in a nested transaction (considering we are running this migration in a transaction)
// so that we can suppress the error
try {
await knex.transaction(async (trx)=>{
if (client === 'mysql' || client === 'mariadb') {
await knex.raw('ALTER TABLE ?? RENAME INDEX ?? TO ??', [
full.tableName,
full.indexName,
short.indexName
]).transacting(trx);
} else if (client === 'pg' || client === 'postgres') {
await knex.raw('ALTER INDEX ?? RENAME TO ??', [
full.indexName,
short.indexName
]).transacting(trx);
} else if ([
'sqlite',
'sqlite3',
'better-sqlite3'
].includes(client)) {
// SQLite doesn't support renaming, so rather than trying to drop/recreate we'll let db sync handle it
debug(`SQLite does not support index renaming, not renaming index ${full.indexName}`);
} else {
debug(`No db client name matches, not renaming index ${full.indexName}`);
}
});
} catch (err) {
debug(`error creating index: ${JSON.stringify(err)}`);
}
};
const findDiffs = (shortMap)=>{
const diffs = {
tables: [],
columns: [],
indexes: []
};
const shortArr = Array.from(shortMap.entries());
shortArr.forEach(([, shortObj], index$1)=>{
const fullTableName = index.identifiers.getUnshortenedName(shortObj.tableName);
if (!fullTableName) {
throw new Error(`Missing full table name for ${shortObj.tableName}`);
}
// find table name diffs
if (shortObj.tableName !== fullTableName) {
diffs.tables.push({
full: {
index: index$1,
key: 'tableName',
tableName: fullTableName
},
short: {
index: index$1,
key: 'tableName',
tableName: shortObj.tableName
}
});
}
// find column name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.attributes){
if (shortObj.attributes[attrKey].type === 'relation') {
continue;
}
// TODO: add more type checks so we don't need any
const attr = shortObj.attributes[attrKey];
const shortColumnName = attr.columnName;
const longColumnName = index.identifiers.getUnshortenedName(shortColumnName);
if (!shortColumnName || !longColumnName) {
throw new Error(`missing column name(s) for attribute ${JSON.stringify(attr, null, 2)}`);
}
if (shortColumnName && longColumnName && shortColumnName !== longColumnName) {
diffs.columns.push({
short: {
index: index$1,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: shortColumnName
},
full: {
index: index$1,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: longColumnName
}
});
}
}
// find index name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.indexes){
const shortIndexName = shortObj.indexes[attrKey].name;
const longIndexName = index.identifiers.getUnshortenedName(shortIndexName);
if (!longIndexName) {
throw new Error(`Missing full index name for ${shortIndexName}`);
}
if (shortIndexName && longIndexName && shortIndexName !== longIndexName) {
diffs.indexes.push({
short: {
index: index$1,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: shortIndexName
},
full: {
index: index$1,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: longIndexName
}
});
}
}
});
return diffs;
};
exports.renameIdentifiersLongerThanMaxLength = renameIdentifiersLongerThanMaxLength;
//# sourceMappingURL=5.0.0-01-convert-identifiers-long-than-max-length.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,177 @@
import createDebug from 'debug';
import { identifiers } from '../../utils/identifiers/index.mjs';
const debug = createDebug('strapi::database::migration');
const renameIdentifiersLongerThanMaxLength = {
name: '5.0.0-rename-identifiers-longer-than-max-length',
async up (knex, db) {
const md = db.metadata;
const diffs = findDiffs(md);
// migrate indexes before tables so we know to target the original tableName
for (const indexDiff of diffs.indexes){
await renameIndex(knex, db, indexDiff);
}
// migrate columns before table names so we know to target the original tableName
for (const columnDiff of diffs.columns){
const { full, short } = columnDiff;
const tableName = full.tableName;
const hasTable = await knex.schema.hasTable(tableName);
if (hasTable) {
// tablebuilder methods MUST be synchronous and so you cannot use async inside it, which is why we check the column here
const hasColumn = await knex.schema.hasColumn(tableName, full.columnName);
if (hasColumn) {
await knex.schema.alterTable(tableName, async (table)=>{
debug(`renaming column ${full.columnName} to ${short.columnName}`);
table.renameColumn(full.columnName, short.columnName);
});
}
}
}
// migrate table names
for (const tableDiff of diffs.tables){
const hasTable = await knex.schema.hasTable(tableDiff.full.tableName);
if (hasTable) {
debug(`renaming table ${tableDiff.full.tableName} to ${tableDiff.short.tableName}`);
await knex.schema.renameTable(tableDiff.full.tableName, tableDiff.short.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
const renameIndex = async (knex, db, diff)=>{
const client = db.config.connection.client;
const short = diff.short;
const full = diff.full;
if (full.indexName === short.indexName) {
debug(`not renaming index ${full.indexName} because name hasn't changed`);
return;
}
// fk indexes can't be easily renamed, and will be recreated by db sync
// if this misses something due to the loose string matching, it's not critical, it just means index will be rebuilt in db sync
if (short.indexName.endsWith('fk') || full.indexName.endsWith('fk')) {
return;
}
debug(`renaming index from ${full.indexName} to ${short.indexName}`);
// If schema creation has never actually run before, none of these will exist, and they will throw an error
// we have no way of running an "if exists" other than a per-dialect manual check, which we won't do
// because even if it fails for some other reason, the schema sync will recreate them anyway
// Therefore, we wrap this in a nested transaction (considering we are running this migration in a transaction)
// so that we can suppress the error
try {
await knex.transaction(async (trx)=>{
if (client === 'mysql' || client === 'mariadb') {
await knex.raw('ALTER TABLE ?? RENAME INDEX ?? TO ??', [
full.tableName,
full.indexName,
short.indexName
]).transacting(trx);
} else if (client === 'pg' || client === 'postgres') {
await knex.raw('ALTER INDEX ?? RENAME TO ??', [
full.indexName,
short.indexName
]).transacting(trx);
} else if ([
'sqlite',
'sqlite3',
'better-sqlite3'
].includes(client)) {
// SQLite doesn't support renaming, so rather than trying to drop/recreate we'll let db sync handle it
debug(`SQLite does not support index renaming, not renaming index ${full.indexName}`);
} else {
debug(`No db client name matches, not renaming index ${full.indexName}`);
}
});
} catch (err) {
debug(`error creating index: ${JSON.stringify(err)}`);
}
};
const findDiffs = (shortMap)=>{
const diffs = {
tables: [],
columns: [],
indexes: []
};
const shortArr = Array.from(shortMap.entries());
shortArr.forEach(([, shortObj], index)=>{
const fullTableName = identifiers.getUnshortenedName(shortObj.tableName);
if (!fullTableName) {
throw new Error(`Missing full table name for ${shortObj.tableName}`);
}
// find table name diffs
if (shortObj.tableName !== fullTableName) {
diffs.tables.push({
full: {
index,
key: 'tableName',
tableName: fullTableName
},
short: {
index,
key: 'tableName',
tableName: shortObj.tableName
}
});
}
// find column name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.attributes){
if (shortObj.attributes[attrKey].type === 'relation') {
continue;
}
// TODO: add more type checks so we don't need any
const attr = shortObj.attributes[attrKey];
const shortColumnName = attr.columnName;
const longColumnName = identifiers.getUnshortenedName(shortColumnName);
if (!shortColumnName || !longColumnName) {
throw new Error(`missing column name(s) for attribute ${JSON.stringify(attr, null, 2)}`);
}
if (shortColumnName && longColumnName && shortColumnName !== longColumnName) {
diffs.columns.push({
short: {
index,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: shortColumnName
},
full: {
index,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: longColumnName
}
});
}
}
// find index name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.indexes){
const shortIndexName = shortObj.indexes[attrKey].name;
const longIndexName = identifiers.getUnshortenedName(shortIndexName);
if (!longIndexName) {
throw new Error(`Missing full index name for ${shortIndexName}`);
}
if (shortIndexName && longIndexName && shortIndexName !== longIndexName) {
diffs.indexes.push({
short: {
index,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: shortIndexName
},
full: {
index,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: longIndexName
}
});
}
}
});
return diffs;
};
export { renameIdentifiersLongerThanMaxLength };
//# sourceMappingURL=5.0.0-01-convert-identifiers-long-than-max-length.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const createdDocumentId: Migration;
//# sourceMappingURL=5.0.0-02-document-id.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-02-document-id.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-02-document-id.ts"],"names":[],"mappings":"AAiBA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AA2I3C,eAAO,MAAM,iBAAiB,EAAE,SAgC/B,CAAC"}

View File

@@ -0,0 +1,125 @@
'use strict';
var cuid2 = require('@paralleldrive/cuid2');
var _ = require('lodash/fp');
const QUERIES = {
async postgres (knex, params) {
const res = await knex.raw(`
SELECT :tableName:.id as id, string_agg(DISTINCT :inverseJoinColumn:::character varying, ',') as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res.rows;
},
async mysql (knex, params) {
const [res] = await knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res;
},
async sqlite (knex, params) {
return knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
}
};
const getNextIdsToCreateDocumentId = async (db, knex, { joinColumn, inverseJoinColumn, tableName, joinTableName })=>{
const res = await QUERIES[db.dialect.client](knex, {
joinColumn,
inverseJoinColumn,
tableName,
joinTableName
});
if (res.length > 0) {
const row = res[0];
const otherIds = row.other_ids ? row.other_ids.split(',').map((v)=>parseInt(v, 10)) : [];
return [
row.id,
...otherIds
];
}
return [];
};
// Migrate document ids for tables that have localizations
const migrateDocumentIdsWithLocalizations = async (db, knex, meta)=>{
const singularName = meta.singularName.toLowerCase();
const joinColumn = _.snakeCase(`${singularName}_id`);
const inverseJoinColumn = _.snakeCase(`inv_${singularName}_id`);
let ids;
do {
ids = await getNextIdsToCreateDocumentId(db, knex, {
joinColumn,
inverseJoinColumn,
tableName: meta.tableName,
joinTableName: _.snakeCase(`${meta.tableName}_localizations_links`)
});
if (ids.length > 0) {
await knex(meta.tableName).update({
document_id: cuid2.createId()
}).whereIn('id', ids);
}
}while (ids.length > 0)
};
// Migrate document ids for tables that don't have localizations
const migrationDocumentIds = async (db, knex, meta)=>{
let updatedRows;
do {
updatedRows = await knex(meta.tableName).update({
document_id: cuid2.createId()
}).whereIn('id', knex(meta.tableName).select('id').from(knex(meta.tableName).select('id').whereNull('document_id').limit(1).as('sub_query')));
}while (updatedRows > 0)
};
const createDocumentIdColumn = async (knex, tableName)=>{
await knex.schema.alterTable(tableName, (table)=>{
table.string('document_id');
});
};
const hasLocalizationsJoinTable = async (knex, tableName)=>{
const joinTableName = _.snakeCase(`${tableName}_localizations_links`);
return knex.schema.hasTable(joinTableName);
};
const createdDocumentId = {
name: '5.0.0-02-created-document-id',
async up (knex, db) {
// do sth
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
if ('documentId' in meta.attributes) {
// add column if doesn't exist
const hasDocumentIdColumn = await knex.schema.hasColumn(meta.tableName, 'document_id');
if (hasDocumentIdColumn) {
continue;
}
await createDocumentIdColumn(knex, meta.tableName);
if (await hasLocalizationsJoinTable(knex, meta.tableName)) {
await migrateDocumentIdsWithLocalizations(db, knex, meta);
} else {
await migrationDocumentIds(db, knex, meta);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.createdDocumentId = createdDocumentId;
//# sourceMappingURL=5.0.0-02-document-id.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,123 @@
import { createId } from '@paralleldrive/cuid2';
import { snakeCase } from 'lodash/fp';
const QUERIES = {
async postgres (knex, params) {
const res = await knex.raw(`
SELECT :tableName:.id as id, string_agg(DISTINCT :inverseJoinColumn:::character varying, ',') as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res.rows;
},
async mysql (knex, params) {
const [res] = await knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res;
},
async sqlite (knex, params) {
return knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
}
};
const getNextIdsToCreateDocumentId = async (db, knex, { joinColumn, inverseJoinColumn, tableName, joinTableName })=>{
const res = await QUERIES[db.dialect.client](knex, {
joinColumn,
inverseJoinColumn,
tableName,
joinTableName
});
if (res.length > 0) {
const row = res[0];
const otherIds = row.other_ids ? row.other_ids.split(',').map((v)=>parseInt(v, 10)) : [];
return [
row.id,
...otherIds
];
}
return [];
};
// Migrate document ids for tables that have localizations
const migrateDocumentIdsWithLocalizations = async (db, knex, meta)=>{
const singularName = meta.singularName.toLowerCase();
const joinColumn = snakeCase(`${singularName}_id`);
const inverseJoinColumn = snakeCase(`inv_${singularName}_id`);
let ids;
do {
ids = await getNextIdsToCreateDocumentId(db, knex, {
joinColumn,
inverseJoinColumn,
tableName: meta.tableName,
joinTableName: snakeCase(`${meta.tableName}_localizations_links`)
});
if (ids.length > 0) {
await knex(meta.tableName).update({
document_id: createId()
}).whereIn('id', ids);
}
}while (ids.length > 0)
};
// Migrate document ids for tables that don't have localizations
const migrationDocumentIds = async (db, knex, meta)=>{
let updatedRows;
do {
updatedRows = await knex(meta.tableName).update({
document_id: createId()
}).whereIn('id', knex(meta.tableName).select('id').from(knex(meta.tableName).select('id').whereNull('document_id').limit(1).as('sub_query')));
}while (updatedRows > 0)
};
const createDocumentIdColumn = async (knex, tableName)=>{
await knex.schema.alterTable(tableName, (table)=>{
table.string('document_id');
});
};
const hasLocalizationsJoinTable = async (knex, tableName)=>{
const joinTableName = snakeCase(`${tableName}_localizations_links`);
return knex.schema.hasTable(joinTableName);
};
const createdDocumentId = {
name: '5.0.0-02-created-document-id',
async up (knex, db) {
// do sth
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
if ('documentId' in meta.attributes) {
// add column if doesn't exist
const hasDocumentIdColumn = await knex.schema.hasColumn(meta.tableName, 'document_id');
if (hasDocumentIdColumn) {
continue;
}
await createDocumentIdColumn(knex, meta.tableName);
if (await hasLocalizationsJoinTable(knex, meta.tableName)) {
await migrateDocumentIdsWithLocalizations(db, knex, meta);
} else {
await migrationDocumentIds(db, knex, meta);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { createdDocumentId };
//# sourceMappingURL=5.0.0-02-document-id.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const createdLocale: Migration;
//# sourceMappingURL=5.0.0-03-locale.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-03-locale.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-03-locale.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAe3C,eAAO,MAAM,aAAa,EAAE,SA4B3B,CAAC"}

View File

@@ -0,0 +1,41 @@
'use strict';
/**
* In v4, content types with disabled i18n did not have any locale column.
* In v5, we need to add a `locale` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `locale` column if it doesn't exist.
*/ const createLocaleColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('locale');
});
};
const createdLocale = {
name: '5.0.0-03-created-locale',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create locale column if it doesn't exist
const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');
if (meta.attributes.locale && !hasLocaleColumn) {
await createLocaleColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.createdLocale = createdLocale;
//# sourceMappingURL=5.0.0-03-locale.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-03-locale.js","sources":["../../../src/migrations/internal-migrations/5.0.0-03-locale.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled i18n did not have any locale column.\n * In v5, we need to add a `locale` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `locale` column if it doesn't exist.\n */\nconst createLocaleColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('locale');\n });\n};\n\nexport const createdLocale: Migration = {\n name: '5.0.0-03-created-locale',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create locale column if it doesn't exist\n const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');\n\n if (meta.attributes.locale && !hasLocaleColumn) {\n await createLocaleColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createLocaleColumn","db","tableName","schema","alterTable","table","string","createdLocale","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasLocaleColumn","hasColumn","attributes","locale","down","Error"],"mappings":";;AAIA;;;;;;IAOA,MAAMA,kBAAqB,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC1C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,QAAA,CAAA;AACf,KAAA,CAAA;AACF,CAAA;MAEaC,aAA2B,GAAA;IACtCC,IAAM,EAAA,yBAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAET,EAAE,EAAA;AACf,QAAA,KAAK,MAAMU,IAAQV,IAAAA,EAAAA,CAAGW,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKP,CAAAA,MAAM,CAACW,QAAQ,CAACH,KAAKT,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACY,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,eAAAA,GAAkB,MAAMT,IAAKP,CAAAA,MAAM,CAACiB,SAAS,CAACT,IAAKT,CAAAA,SAAS,EAAE,QAAA,CAAA;AAEpE,YAAA,IAAIS,KAAKU,UAAU,CAACC,MAAM,IAAI,CAACH,eAAiB,EAAA;gBAC9C,MAAMnB,kBAAAA,CAAmBU,IAAMC,EAAAA,IAAAA,CAAKT,SAAS,CAAA;AAC/C;AACF;AACF,KAAA;IACA,MAAMqB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,39 @@
/**
* In v4, content types with disabled i18n did not have any locale column.
* In v5, we need to add a `locale` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `locale` column if it doesn't exist.
*/ const createLocaleColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('locale');
});
};
const createdLocale = {
name: '5.0.0-03-created-locale',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create locale column if it doesn't exist
const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');
if (meta.attributes.locale && !hasLocaleColumn) {
await createLocaleColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { createdLocale };
//# sourceMappingURL=5.0.0-03-locale.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-03-locale.mjs","sources":["../../../src/migrations/internal-migrations/5.0.0-03-locale.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled i18n did not have any locale column.\n * In v5, we need to add a `locale` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `locale` column if it doesn't exist.\n */\nconst createLocaleColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('locale');\n });\n};\n\nexport const createdLocale: Migration = {\n name: '5.0.0-03-created-locale',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create locale column if it doesn't exist\n const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');\n\n if (meta.attributes.locale && !hasLocaleColumn) {\n await createLocaleColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createLocaleColumn","db","tableName","schema","alterTable","table","string","createdLocale","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasLocaleColumn","hasColumn","attributes","locale","down","Error"],"mappings":"AAIA;;;;;;IAOA,MAAMA,kBAAqB,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC1C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,QAAA,CAAA;AACf,KAAA,CAAA;AACF,CAAA;MAEaC,aAA2B,GAAA;IACtCC,IAAM,EAAA,yBAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAET,EAAE,EAAA;AACf,QAAA,KAAK,MAAMU,IAAQV,IAAAA,EAAAA,CAAGW,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKP,CAAAA,MAAM,CAACW,QAAQ,CAACH,KAAKT,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACY,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,eAAAA,GAAkB,MAAMT,IAAKP,CAAAA,MAAM,CAACiB,SAAS,CAACT,IAAKT,CAAAA,SAAS,EAAE,QAAA,CAAA;AAEpE,YAAA,IAAIS,KAAKU,UAAU,CAACC,MAAM,IAAI,CAACH,eAAiB,EAAA;gBAC9C,MAAMnB,kBAAAA,CAAmBU,IAAMC,EAAAA,IAAAA,CAAKT,SAAS,CAAA;AAC/C;AACF;AACF,KAAA;IACA,MAAMqB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const createdPublishedAt: Migration;
//# sourceMappingURL=5.0.0-04-published-at.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-04-published-at.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-04-published-at.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAkB3C,eAAO,MAAM,kBAAkB,EAAE,SA4BhC,CAAC"}

View File

@@ -0,0 +1,45 @@
'use strict';
/**
* In v4, content types with disabled D&P did not have any `published_at` column.
* In v5, we need to add a `published_at` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `published_at` column if it doesn't exist.
*/ const createPublishedAtColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('published_at');
});
// Non DP content types should have their `published_at` column set to a date
await db(tableName).update({
published_at: new Date()
});
};
const createdPublishedAt = {
name: '5.0.0-04-created-published-at',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create publishedAt column if it doesn't exist
const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');
if (meta.attributes.publishedAt && !hasPublishedAtColumn) {
await createPublishedAtColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.createdPublishedAt = createdPublishedAt;
//# sourceMappingURL=5.0.0-04-published-at.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-04-published-at.js","sources":["../../../src/migrations/internal-migrations/5.0.0-04-published-at.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled D&P did not have any `published_at` column.\n * In v5, we need to add a `published_at` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `published_at` column if it doesn't exist.\n */\nconst createPublishedAtColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('published_at');\n });\n\n // Non DP content types should have their `published_at` column set to a date\n await db(tableName).update({ published_at: new Date() });\n};\n\nexport const createdPublishedAt: Migration = {\n name: '5.0.0-04-created-published-at',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create publishedAt column if it doesn't exist\n const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');\n\n if (meta.attributes.publishedAt && !hasPublishedAtColumn) {\n await createPublishedAtColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createPublishedAtColumn","db","tableName","schema","alterTable","table","string","update","published_at","Date","createdPublishedAt","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasPublishedAtColumn","hasColumn","attributes","publishedAt","down","Error"],"mappings":";;AAIA;;;;;;IAOA,MAAMA,uBAA0B,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC/C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,cAAA,CAAA;AACf,KAAA,CAAA;;IAGA,MAAML,EAAAA,CAAGC,SAAWK,CAAAA,CAAAA,MAAM,CAAC;AAAEC,QAAAA,YAAAA,EAAc,IAAIC,IAAAA;AAAO,KAAA,CAAA;AACxD,CAAA;MAEaC,kBAAgC,GAAA;IAC3CC,IAAM,EAAA,+BAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAEZ,EAAE,EAAA;AACf,QAAA,KAAK,MAAMa,IAAQb,IAAAA,EAAAA,CAAGc,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKV,CAAAA,MAAM,CAACc,QAAQ,CAACH,KAAKZ,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACe,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,oBAAAA,GAAuB,MAAMT,IAAKV,CAAAA,MAAM,CAACoB,SAAS,CAACT,IAAKZ,CAAAA,SAAS,EAAE,cAAA,CAAA;AAEzE,YAAA,IAAIY,KAAKU,UAAU,CAACC,WAAW,IAAI,CAACH,oBAAsB,EAAA;gBACxD,MAAMtB,uBAAAA,CAAwBa,IAAMC,EAAAA,IAAAA,CAAKZ,SAAS,CAAA;AACpD;AACF;AACF,KAAA;IACA,MAAMwB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,43 @@
/**
* In v4, content types with disabled D&P did not have any `published_at` column.
* In v5, we need to add a `published_at` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `published_at` column if it doesn't exist.
*/ const createPublishedAtColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('published_at');
});
// Non DP content types should have their `published_at` column set to a date
await db(tableName).update({
published_at: new Date()
});
};
const createdPublishedAt = {
name: '5.0.0-04-created-published-at',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create publishedAt column if it doesn't exist
const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');
if (meta.attributes.publishedAt && !hasPublishedAtColumn) {
await createPublishedAtColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { createdPublishedAt };
//# sourceMappingURL=5.0.0-04-published-at.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-04-published-at.mjs","sources":["../../../src/migrations/internal-migrations/5.0.0-04-published-at.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled D&P did not have any `published_at` column.\n * In v5, we need to add a `published_at` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `published_at` column if it doesn't exist.\n */\nconst createPublishedAtColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('published_at');\n });\n\n // Non DP content types should have their `published_at` column set to a date\n await db(tableName).update({ published_at: new Date() });\n};\n\nexport const createdPublishedAt: Migration = {\n name: '5.0.0-04-created-published-at',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create publishedAt column if it doesn't exist\n const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');\n\n if (meta.attributes.publishedAt && !hasPublishedAtColumn) {\n await createPublishedAtColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createPublishedAtColumn","db","tableName","schema","alterTable","table","string","update","published_at","Date","createdPublishedAt","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasPublishedAtColumn","hasColumn","attributes","publishedAt","down","Error"],"mappings":"AAIA;;;;;;IAOA,MAAMA,uBAA0B,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC/C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,cAAA,CAAA;AACf,KAAA,CAAA;;IAGA,MAAML,EAAAA,CAAGC,SAAWK,CAAAA,CAAAA,MAAM,CAAC;AAAEC,QAAAA,YAAAA,EAAc,IAAIC,IAAAA;AAAO,KAAA,CAAA;AACxD,CAAA;MAEaC,kBAAgC,GAAA;IAC3CC,IAAM,EAAA,+BAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAEZ,EAAE,EAAA;AACf,QAAA,KAAK,MAAMa,IAAQb,IAAAA,EAAAA,CAAGc,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKV,CAAAA,MAAM,CAACc,QAAQ,CAACH,KAAKZ,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACe,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,oBAAAA,GAAuB,MAAMT,IAAKV,CAAAA,MAAM,CAACoB,SAAS,CAACT,IAAKZ,CAAAA,SAAS,EAAE,cAAA,CAAA;AAEzE,YAAA,IAAIY,KAAKU,UAAU,CAACC,WAAW,IAAI,CAACH,oBAAsB,EAAA;gBACxD,MAAMtB,uBAAAA,CAAwBa,IAAMC,EAAAA,IAAAA,CAAKZ,SAAS,CAAA;AACpD;AACF;AACF,KAAA;IACA,MAAMwB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const dropSlugFieldsIndex: Migration;
//# sourceMappingURL=5.0.0-05-drop-slug-unique-index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-05-drop-slug-unique-index.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-05-drop-slug-unique-index.ts"],"names":[],"mappings":"AASA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAc3C,eAAO,MAAM,mBAAmB,EAAE,SAmBjC,CAAC"}

View File

@@ -0,0 +1,43 @@
'use strict';
/**
* In V4 slug fields contained a unique index.
* In V5 slug fields should not have a unique index.
*
* This migration drops existing unique indexes from slug fields so downstream migrations
* can work on the data without violating the unique index.
*/ const dropIndex = async (knex, tableName, columnName)=>{
try {
await knex.schema.alterTable(tableName, (table)=>{
// NOTE: Can not use "identifiers" utility, as the 5.0.0-01 migration does not rename this particular index
// to `tableName_columnName_uq`.
table.dropUnique([
columnName
], `${tableName}_${columnName}_unique`);
});
} catch (error) {
// If unique index does not exist, do nothing
}
};
const dropSlugFieldsIndex = {
name: '5.0.0-05-drop-slug-fields-index',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
for (const attribute of Object.values(meta.attributes)){
if (attribute.type === 'uid' && attribute.columnName) {
await dropIndex(knex, meta.tableName, attribute.columnName);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.dropSlugFieldsIndex = dropSlugFieldsIndex;
//# sourceMappingURL=5.0.0-05-drop-slug-unique-index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-05-drop-slug-unique-index.js","sources":["../../../src/migrations/internal-migrations/5.0.0-05-drop-slug-unique-index.ts"],"sourcesContent":["/**\n * In V4 slug fields contained a unique index.\n * In V5 slug fields should not have a unique index.\n *\n * This migration drops existing unique indexes from slug fields so downstream migrations\n * can work on the data without violating the unique index.\n */\nimport type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\nconst dropIndex = async (knex: Knex, tableName: string, columnName: string) => {\n try {\n await knex.schema.alterTable(tableName, (table) => {\n // NOTE: Can not use \"identifiers\" utility, as the 5.0.0-01 migration does not rename this particular index\n // to `tableName_columnName_uq`.\n table.dropUnique([columnName], `${tableName}_${columnName}_unique`);\n });\n } catch (error) {\n // If unique index does not exist, do nothing\n }\n};\n\nexport const dropSlugFieldsIndex: Migration = {\n name: '5.0.0-05-drop-slug-fields-index',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n if (!hasTable) {\n continue;\n }\n\n for (const attribute of Object.values(meta.attributes)) {\n if (attribute.type === 'uid' && attribute.columnName) {\n await dropIndex(knex, meta.tableName, attribute.columnName);\n }\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["dropIndex","knex","tableName","columnName","schema","alterTable","table","dropUnique","error","dropSlugFieldsIndex","name","up","db","meta","metadata","values","hasTable","attribute","Object","attributes","type","down","Error"],"mappings":";;AAAA;;;;;;AAMC,IAKD,MAAMA,SAAAA,GAAY,OAAOC,IAAAA,EAAYC,SAAmBC,EAAAA,UAAAA,GAAAA;IACtD,IAAI;AACF,QAAA,MAAMF,KAAKG,MAAM,CAACC,UAAU,CAACH,WAAW,CAACI,KAAAA,GAAAA;;;AAGvCA,YAAAA,KAAAA,CAAMC,UAAU,CAAC;AAACJ,gBAAAA;AAAW,aAAA,EAAE,CAAC,EAAED,SAAAA,CAAU,CAAC,EAAEC,UAAAA,CAAW,OAAO,CAAC,CAAA;AACpE,SAAA,CAAA;AACF,KAAA,CAAE,OAAOK,KAAO,EAAA;;AAEhB;AACF,CAAA;MAEaC,mBAAiC,GAAA;IAC5CC,IAAM,EAAA,iCAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGV,IAAI,EAAEW,EAAE,EAAA;AACf,QAAA,KAAK,MAAMC,IAAQD,IAAAA,EAAAA,CAAGE,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMf,IAAKG,CAAAA,MAAM,CAACY,QAAQ,CAACH,KAAKX,SAAS,CAAA;AAC1D,YAAA,IAAI,CAACc,QAAU,EAAA;AACb,gBAAA;AACF;AAEA,YAAA,KAAK,MAAMC,SAAaC,IAAAA,MAAAA,CAAOH,MAAM,CAACF,IAAAA,CAAKM,UAAU,CAAG,CAAA;AACtD,gBAAA,IAAIF,UAAUG,IAAI,KAAK,KAASH,IAAAA,SAAAA,CAAUd,UAAU,EAAE;AACpD,oBAAA,MAAMH,UAAUC,IAAMY,EAAAA,IAAAA,CAAKX,SAAS,EAAEe,UAAUd,UAAU,CAAA;AAC5D;AACF;AACF;AACF,KAAA;IACA,MAAMkB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,41 @@
/**
* In V4 slug fields contained a unique index.
* In V5 slug fields should not have a unique index.
*
* This migration drops existing unique indexes from slug fields so downstream migrations
* can work on the data without violating the unique index.
*/ const dropIndex = async (knex, tableName, columnName)=>{
try {
await knex.schema.alterTable(tableName, (table)=>{
// NOTE: Can not use "identifiers" utility, as the 5.0.0-01 migration does not rename this particular index
// to `tableName_columnName_uq`.
table.dropUnique([
columnName
], `${tableName}_${columnName}_unique`);
});
} catch (error) {
// If unique index does not exist, do nothing
}
};
const dropSlugFieldsIndex = {
name: '5.0.0-05-drop-slug-fields-index',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
for (const attribute of Object.values(meta.attributes)){
if (attribute.type === 'uid' && attribute.columnName) {
await dropIndex(knex, meta.tableName, attribute.columnName);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { dropSlugFieldsIndex };
//# sourceMappingURL=5.0.0-05-drop-slug-unique-index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-05-drop-slug-unique-index.mjs","sources":["../../../src/migrations/internal-migrations/5.0.0-05-drop-slug-unique-index.ts"],"sourcesContent":["/**\n * In V4 slug fields contained a unique index.\n * In V5 slug fields should not have a unique index.\n *\n * This migration drops existing unique indexes from slug fields so downstream migrations\n * can work on the data without violating the unique index.\n */\nimport type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\nconst dropIndex = async (knex: Knex, tableName: string, columnName: string) => {\n try {\n await knex.schema.alterTable(tableName, (table) => {\n // NOTE: Can not use \"identifiers\" utility, as the 5.0.0-01 migration does not rename this particular index\n // to `tableName_columnName_uq`.\n table.dropUnique([columnName], `${tableName}_${columnName}_unique`);\n });\n } catch (error) {\n // If unique index does not exist, do nothing\n }\n};\n\nexport const dropSlugFieldsIndex: Migration = {\n name: '5.0.0-05-drop-slug-fields-index',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n if (!hasTable) {\n continue;\n }\n\n for (const attribute of Object.values(meta.attributes)) {\n if (attribute.type === 'uid' && attribute.columnName) {\n await dropIndex(knex, meta.tableName, attribute.columnName);\n }\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["dropIndex","knex","tableName","columnName","schema","alterTable","table","dropUnique","error","dropSlugFieldsIndex","name","up","db","meta","metadata","values","hasTable","attribute","Object","attributes","type","down","Error"],"mappings":"AAAA;;;;;;AAMC,IAKD,MAAMA,SAAAA,GAAY,OAAOC,IAAAA,EAAYC,SAAmBC,EAAAA,UAAAA,GAAAA;IACtD,IAAI;AACF,QAAA,MAAMF,KAAKG,MAAM,CAACC,UAAU,CAACH,WAAW,CAACI,KAAAA,GAAAA;;;AAGvCA,YAAAA,KAAAA,CAAMC,UAAU,CAAC;AAACJ,gBAAAA;AAAW,aAAA,EAAE,CAAC,EAAED,SAAAA,CAAU,CAAC,EAAEC,UAAAA,CAAW,OAAO,CAAC,CAAA;AACpE,SAAA,CAAA;AACF,KAAA,CAAE,OAAOK,KAAO,EAAA;;AAEhB;AACF,CAAA;MAEaC,mBAAiC,GAAA;IAC5CC,IAAM,EAAA,iCAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGV,IAAI,EAAEW,EAAE,EAAA;AACf,QAAA,KAAK,MAAMC,IAAQD,IAAAA,EAAAA,CAAGE,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMf,IAAKG,CAAAA,MAAM,CAACY,QAAQ,CAACH,KAAKX,SAAS,CAAA;AAC1D,YAAA,IAAI,CAACc,QAAU,EAAA;AACb,gBAAA;AACF;AAEA,YAAA,KAAK,MAAMC,SAAaC,IAAAA,MAAAA,CAAOH,MAAM,CAACF,IAAAA,CAAKM,UAAU,CAAG,CAAA;AACtD,gBAAA,IAAIF,UAAUG,IAAI,KAAK,KAASH,IAAAA,SAAAA,CAAUd,UAAU,EAAE;AACpD,oBAAA,MAAMH,UAAUC,IAAMY,EAAAA,IAAAA,CAAKX,SAAS,EAAEe,UAAUd,UAAU,CAAA;AAC5D;AACF;AACF;AACF,KAAA;IACA,MAAMkB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,12 @@
import type { Migration } from '../common';
/**
* List of all the internal migrations. The array order will be the order in which they are executed.
*
* {
* name: 'some-name',
* async up(knex: Knex, db: Database) {},
* async down(knex: Knex, db: Database) {},
* },
*/
export declare const internalMigrations: Migration[];
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAO3C;;;;;;;;GAQG;AACH,eAAO,MAAM,kBAAkB,EAAE,SAAS,EAMzC,CAAC"}

View File

@@ -0,0 +1,26 @@
'use strict';
var _5_0_002DocumentId = require('./5.0.0-02-document-id.js');
var _5_0_001ConvertIdentifiersLongThanMaxLength = require('./5.0.0-01-convert-identifiers-long-than-max-length.js');
var _5_0_003Locale = require('./5.0.0-03-locale.js');
var _5_0_004PublishedAt = require('./5.0.0-04-published-at.js');
var _5_0_005DropSlugUniqueIndex = require('./5.0.0-05-drop-slug-unique-index.js');
/**
* List of all the internal migrations. The array order will be the order in which they are executed.
*
* {
* name: 'some-name',
* async up(knex: Knex, db: Database) {},
* async down(knex: Knex, db: Database) {},
* },
*/ const internalMigrations = [
_5_0_001ConvertIdentifiersLongThanMaxLength.renameIdentifiersLongerThanMaxLength,
_5_0_002DocumentId.createdDocumentId,
_5_0_003Locale.createdLocale,
_5_0_004PublishedAt.createdPublishedAt,
_5_0_005DropSlugUniqueIndex.dropSlugFieldsIndex
];
exports.internalMigrations = internalMigrations;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../../src/migrations/internal-migrations/index.ts"],"sourcesContent":["import type { Migration } from '../common';\nimport { createdDocumentId } from './5.0.0-02-document-id';\nimport { renameIdentifiersLongerThanMaxLength } from './5.0.0-01-convert-identifiers-long-than-max-length';\nimport { createdLocale } from './5.0.0-03-locale';\nimport { createdPublishedAt } from './5.0.0-04-published-at';\nimport { dropSlugFieldsIndex } from './5.0.0-05-drop-slug-unique-index';\n\n/**\n * List of all the internal migrations. The array order will be the order in which they are executed.\n *\n * {\n * name: 'some-name',\n * async up(knex: Knex, db: Database) {},\n * async down(knex: Knex, db: Database) {},\n * },\n */\nexport const internalMigrations: Migration[] = [\n renameIdentifiersLongerThanMaxLength,\n createdDocumentId,\n createdLocale,\n createdPublishedAt,\n dropSlugFieldsIndex,\n];\n"],"names":["internalMigrations","renameIdentifiersLongerThanMaxLength","createdDocumentId","createdLocale","createdPublishedAt","dropSlugFieldsIndex"],"mappings":";;;;;;;;AAOA;;;;;;;;UASaA,kBAAkC,GAAA;AAC7CC,IAAAA,gFAAAA;AACAC,IAAAA,oCAAAA;AACAC,IAAAA,4BAAAA;AACAC,IAAAA,sCAAAA;AACAC,IAAAA;;;;;"}

View File

@@ -0,0 +1,24 @@
import { createdDocumentId } from './5.0.0-02-document-id.mjs';
import { renameIdentifiersLongerThanMaxLength } from './5.0.0-01-convert-identifiers-long-than-max-length.mjs';
import { createdLocale } from './5.0.0-03-locale.mjs';
import { createdPublishedAt } from './5.0.0-04-published-at.mjs';
import { dropSlugFieldsIndex } from './5.0.0-05-drop-slug-unique-index.mjs';
/**
* List of all the internal migrations. The array order will be the order in which they are executed.
*
* {
* name: 'some-name',
* async up(knex: Knex, db: Database) {},
* async down(knex: Knex, db: Database) {},
* },
*/ const internalMigrations = [
renameIdentifiersLongerThanMaxLength,
createdDocumentId,
createdLocale,
createdPublishedAt,
dropSlugFieldsIndex
];
export { internalMigrations };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../../src/migrations/internal-migrations/index.ts"],"sourcesContent":["import type { Migration } from '../common';\nimport { createdDocumentId } from './5.0.0-02-document-id';\nimport { renameIdentifiersLongerThanMaxLength } from './5.0.0-01-convert-identifiers-long-than-max-length';\nimport { createdLocale } from './5.0.0-03-locale';\nimport { createdPublishedAt } from './5.0.0-04-published-at';\nimport { dropSlugFieldsIndex } from './5.0.0-05-drop-slug-unique-index';\n\n/**\n * List of all the internal migrations. The array order will be the order in which they are executed.\n *\n * {\n * name: 'some-name',\n * async up(knex: Knex, db: Database) {},\n * async down(knex: Knex, db: Database) {},\n * },\n */\nexport const internalMigrations: Migration[] = [\n renameIdentifiersLongerThanMaxLength,\n createdDocumentId,\n createdLocale,\n createdPublishedAt,\n dropSlugFieldsIndex,\n];\n"],"names":["internalMigrations","renameIdentifiersLongerThanMaxLength","createdDocumentId","createdLocale","createdPublishedAt","dropSlugFieldsIndex"],"mappings":";;;;;;AAOA;;;;;;;;UASaA,kBAAkC,GAAA;AAC7CC,IAAAA,oCAAAA;AACAC,IAAAA,iBAAAA;AACAC,IAAAA,aAAAA;AACAC,IAAAA,kBAAAA;AACAC,IAAAA;;;;;"}

View File

@@ -0,0 +1,4 @@
import type { InternalMigrationProvider } from './common';
import type { Database } from '..';
export declare const createInternalMigrationProvider: (db: Database) => InternalMigrationProvider;
//# sourceMappingURL=internal.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"internal.d.ts","sourceRoot":"","sources":["../../src/migrations/internal.ts"],"names":[],"mappings":"AAOA,OAAO,KAAK,EAAE,yBAAyB,EAAa,MAAM,UAAU,CAAC;AACrE,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AAEnC,eAAO,MAAM,+BAA+B,OAAQ,QAAQ,KAAG,yBA+C9D,CAAC"}

View File

@@ -0,0 +1,63 @@
'use strict';
var umzug = require('umzug');
var common = require('./common.js');
var index = require('./internal-migrations/index.js');
var storage = require('./storage.js');
var logger = require('./logger.js');
const createInternalMigrationProvider = (db)=>{
const context = {
db
};
const migrations = [
...index.internalMigrations
];
const umzugProvider = new umzug.Umzug({
storage: storage.createStorage({
db,
tableName: 'strapi_migrations_internal'
}),
logger: {
info (message) {
// NOTE: only log internal migration in debug mode
db.logger.debug(logger.transformLogMessage('info', message));
},
warn (message) {
db.logger.warn(logger.transformLogMessage('warn', message));
},
error (message) {
db.logger.error(logger.transformLogMessage('error', message));
},
debug (message) {
db.logger.debug(logger.transformLogMessage('debug', message));
}
},
context,
migrations: ()=>migrations.map((migration)=>{
return {
name: migration.name,
up: common.wrapTransaction(context.db)(migration.up),
down: common.wrapTransaction(context.db)(migration.down)
};
})
});
return {
async register (migration) {
migrations.push(migration);
},
async shouldRun () {
const pendingMigrations = await umzugProvider.pending();
return pendingMigrations.length > 0;
},
async up () {
await umzugProvider.up();
},
async down () {
await umzugProvider.down();
}
};
};
exports.createInternalMigrationProvider = createInternalMigrationProvider;
//# sourceMappingURL=internal.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"internal.js","sources":["../../src/migrations/internal.ts"],"sourcesContent":["import { Umzug } from 'umzug';\n\nimport { wrapTransaction } from './common';\nimport { internalMigrations } from './internal-migrations';\nimport { createStorage } from './storage';\nimport { transformLogMessage } from './logger';\n\nimport type { InternalMigrationProvider, Migration } from './common';\nimport type { Database } from '..';\n\nexport const createInternalMigrationProvider = (db: Database): InternalMigrationProvider => {\n const context = { db };\n const migrations: Migration[] = [...internalMigrations];\n\n const umzugProvider = new Umzug({\n storage: createStorage({ db, tableName: 'strapi_migrations_internal' }),\n logger: {\n info(message) {\n // NOTE: only log internal migration in debug mode\n db.logger.debug(transformLogMessage('info', message));\n },\n warn(message) {\n db.logger.warn(transformLogMessage('warn', message));\n },\n error(message) {\n db.logger.error(transformLogMessage('error', message));\n },\n debug(message) {\n db.logger.debug(transformLogMessage('debug', message));\n },\n },\n context,\n migrations: () =>\n migrations.map((migration) => {\n return {\n name: migration.name,\n up: wrapTransaction(context.db)(migration.up),\n down: wrapTransaction(context.db)(migration.down),\n };\n }),\n });\n\n return {\n async register(migration: Migration) {\n migrations.push(migration);\n },\n async shouldRun() {\n const pendingMigrations = await umzugProvider.pending();\n return pendingMigrations.length > 0;\n },\n async up() {\n await umzugProvider.up();\n },\n async down() {\n await umzugProvider.down();\n },\n };\n};\n"],"names":["createInternalMigrationProvider","db","context","migrations","internalMigrations","umzugProvider","Umzug","storage","createStorage","tableName","logger","info","message","debug","transformLogMessage","warn","error","map","migration","name","up","wrapTransaction","down","register","push","shouldRun","pendingMigrations","pending","length"],"mappings":";;;;;;;;AAUO,MAAMA,kCAAkC,CAACC,EAAAA,GAAAA;AAC9C,IAAA,MAAMC,OAAU,GAAA;AAAED,QAAAA;AAAG,KAAA;AACrB,IAAA,MAAME,UAA0B,GAAA;AAAIC,QAAAA,GAAAA;AAAmB,KAAA;IAEvD,MAAMC,aAAAA,GAAgB,IAAIC,WAAM,CAAA;AAC9BC,QAAAA,OAAAA,EAASC,qBAAc,CAAA;AAAEP,YAAAA,EAAAA;YAAIQ,SAAW,EAAA;AAA6B,SAAA,CAAA;QACrEC,MAAQ,EAAA;AACNC,YAAAA,IAAAA,CAAAA,CAAKC,OAAO,EAAA;;AAEVX,gBAAAA,EAAAA,CAAGS,MAAM,CAACG,KAAK,CAACC,2BAAoB,MAAQF,EAAAA,OAAAA,CAAAA,CAAAA;AAC9C,aAAA;AACAG,YAAAA,IAAAA,CAAAA,CAAKH,OAAO,EAAA;AACVX,gBAAAA,EAAAA,CAAGS,MAAM,CAACK,IAAI,CAACD,2BAAoB,MAAQF,EAAAA,OAAAA,CAAAA,CAAAA;AAC7C,aAAA;AACAI,YAAAA,KAAAA,CAAAA,CAAMJ,OAAO,EAAA;AACXX,gBAAAA,EAAAA,CAAGS,MAAM,CAACM,KAAK,CAACF,2BAAoB,OAASF,EAAAA,OAAAA,CAAAA,CAAAA;AAC/C,aAAA;AACAC,YAAAA,KAAAA,CAAAA,CAAMD,OAAO,EAAA;AACXX,gBAAAA,EAAAA,CAAGS,MAAM,CAACG,KAAK,CAACC,2BAAoB,OAASF,EAAAA,OAAAA,CAAAA,CAAAA;AAC/C;AACF,SAAA;AACAV,QAAAA,OAAAA;AACAC,QAAAA,UAAAA,EAAY,IACVA,UAAAA,CAAWc,GAAG,CAAC,CAACC,SAAAA,GAAAA;gBACd,OAAO;AACLC,oBAAAA,IAAAA,EAAMD,UAAUC,IAAI;AACpBC,oBAAAA,EAAAA,EAAIC,sBAAgBnB,CAAAA,OAAAA,CAAQD,EAAE,CAAA,CAAEiB,UAAUE,EAAE,CAAA;AAC5CE,oBAAAA,IAAAA,EAAMD,sBAAgBnB,CAAAA,OAAAA,CAAQD,EAAE,CAAA,CAAEiB,UAAUI,IAAI;AAClD,iBAAA;AACF,aAAA;AACJ,KAAA,CAAA;IAEA,OAAO;AACL,QAAA,MAAMC,UAASL,SAAoB,EAAA;AACjCf,YAAAA,UAAAA,CAAWqB,IAAI,CAACN,SAAAA,CAAAA;AAClB,SAAA;QACA,MAAMO,SAAAA,CAAAA,GAAAA;YACJ,MAAMC,iBAAAA,GAAoB,MAAMrB,aAAAA,CAAcsB,OAAO,EAAA;YACrD,OAAOD,iBAAAA,CAAkBE,MAAM,GAAG,CAAA;AACpC,SAAA;QACA,MAAMR,EAAAA,CAAAA,GAAAA;AACJ,YAAA,MAAMf,cAAce,EAAE,EAAA;AACxB,SAAA;QACA,MAAME,IAAAA,CAAAA,GAAAA;AACJ,YAAA,MAAMjB,cAAciB,IAAI,EAAA;AAC1B;AACF,KAAA;AACF;;;;"}

View File

@@ -0,0 +1,61 @@
import { Umzug } from 'umzug';
import { wrapTransaction } from './common.mjs';
import { internalMigrations } from './internal-migrations/index.mjs';
import { createStorage } from './storage.mjs';
import { transformLogMessage } from './logger.mjs';
const createInternalMigrationProvider = (db)=>{
const context = {
db
};
const migrations = [
...internalMigrations
];
const umzugProvider = new Umzug({
storage: createStorage({
db,
tableName: 'strapi_migrations_internal'
}),
logger: {
info (message) {
// NOTE: only log internal migration in debug mode
db.logger.debug(transformLogMessage('info', message));
},
warn (message) {
db.logger.warn(transformLogMessage('warn', message));
},
error (message) {
db.logger.error(transformLogMessage('error', message));
},
debug (message) {
db.logger.debug(transformLogMessage('debug', message));
}
},
context,
migrations: ()=>migrations.map((migration)=>{
return {
name: migration.name,
up: wrapTransaction(context.db)(migration.up),
down: wrapTransaction(context.db)(migration.down)
};
})
});
return {
async register (migration) {
migrations.push(migration);
},
async shouldRun () {
const pendingMigrations = await umzugProvider.pending();
return pendingMigrations.length > 0;
},
async up () {
await umzugProvider.up();
},
async down () {
await umzugProvider.down();
}
};
};
export { createInternalMigrationProvider };
//# sourceMappingURL=internal.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"internal.mjs","sources":["../../src/migrations/internal.ts"],"sourcesContent":["import { Umzug } from 'umzug';\n\nimport { wrapTransaction } from './common';\nimport { internalMigrations } from './internal-migrations';\nimport { createStorage } from './storage';\nimport { transformLogMessage } from './logger';\n\nimport type { InternalMigrationProvider, Migration } from './common';\nimport type { Database } from '..';\n\nexport const createInternalMigrationProvider = (db: Database): InternalMigrationProvider => {\n const context = { db };\n const migrations: Migration[] = [...internalMigrations];\n\n const umzugProvider = new Umzug({\n storage: createStorage({ db, tableName: 'strapi_migrations_internal' }),\n logger: {\n info(message) {\n // NOTE: only log internal migration in debug mode\n db.logger.debug(transformLogMessage('info', message));\n },\n warn(message) {\n db.logger.warn(transformLogMessage('warn', message));\n },\n error(message) {\n db.logger.error(transformLogMessage('error', message));\n },\n debug(message) {\n db.logger.debug(transformLogMessage('debug', message));\n },\n },\n context,\n migrations: () =>\n migrations.map((migration) => {\n return {\n name: migration.name,\n up: wrapTransaction(context.db)(migration.up),\n down: wrapTransaction(context.db)(migration.down),\n };\n }),\n });\n\n return {\n async register(migration: Migration) {\n migrations.push(migration);\n },\n async shouldRun() {\n const pendingMigrations = await umzugProvider.pending();\n return pendingMigrations.length > 0;\n },\n async up() {\n await umzugProvider.up();\n },\n async down() {\n await umzugProvider.down();\n },\n };\n};\n"],"names":["createInternalMigrationProvider","db","context","migrations","internalMigrations","umzugProvider","Umzug","storage","createStorage","tableName","logger","info","message","debug","transformLogMessage","warn","error","map","migration","name","up","wrapTransaction","down","register","push","shouldRun","pendingMigrations","pending","length"],"mappings":";;;;;;AAUO,MAAMA,kCAAkC,CAACC,EAAAA,GAAAA;AAC9C,IAAA,MAAMC,OAAU,GAAA;AAAED,QAAAA;AAAG,KAAA;AACrB,IAAA,MAAME,UAA0B,GAAA;AAAIC,QAAAA,GAAAA;AAAmB,KAAA;IAEvD,MAAMC,aAAAA,GAAgB,IAAIC,KAAM,CAAA;AAC9BC,QAAAA,OAAAA,EAASC,aAAc,CAAA;AAAEP,YAAAA,EAAAA;YAAIQ,SAAW,EAAA;AAA6B,SAAA,CAAA;QACrEC,MAAQ,EAAA;AACNC,YAAAA,IAAAA,CAAAA,CAAKC,OAAO,EAAA;;AAEVX,gBAAAA,EAAAA,CAAGS,MAAM,CAACG,KAAK,CAACC,oBAAoB,MAAQF,EAAAA,OAAAA,CAAAA,CAAAA;AAC9C,aAAA;AACAG,YAAAA,IAAAA,CAAAA,CAAKH,OAAO,EAAA;AACVX,gBAAAA,EAAAA,CAAGS,MAAM,CAACK,IAAI,CAACD,oBAAoB,MAAQF,EAAAA,OAAAA,CAAAA,CAAAA;AAC7C,aAAA;AACAI,YAAAA,KAAAA,CAAAA,CAAMJ,OAAO,EAAA;AACXX,gBAAAA,EAAAA,CAAGS,MAAM,CAACM,KAAK,CAACF,oBAAoB,OAASF,EAAAA,OAAAA,CAAAA,CAAAA;AAC/C,aAAA;AACAC,YAAAA,KAAAA,CAAAA,CAAMD,OAAO,EAAA;AACXX,gBAAAA,EAAAA,CAAGS,MAAM,CAACG,KAAK,CAACC,oBAAoB,OAASF,EAAAA,OAAAA,CAAAA,CAAAA;AAC/C;AACF,SAAA;AACAV,QAAAA,OAAAA;AACAC,QAAAA,UAAAA,EAAY,IACVA,UAAAA,CAAWc,GAAG,CAAC,CAACC,SAAAA,GAAAA;gBACd,OAAO;AACLC,oBAAAA,IAAAA,EAAMD,UAAUC,IAAI;AACpBC,oBAAAA,EAAAA,EAAIC,eAAgBnB,CAAAA,OAAAA,CAAQD,EAAE,CAAA,CAAEiB,UAAUE,EAAE,CAAA;AAC5CE,oBAAAA,IAAAA,EAAMD,eAAgBnB,CAAAA,OAAAA,CAAQD,EAAE,CAAA,CAAEiB,UAAUI,IAAI;AAClD,iBAAA;AACF,aAAA;AACJ,KAAA,CAAA;IAEA,OAAO;AACL,QAAA,MAAMC,UAASL,SAAoB,EAAA;AACjCf,YAAAA,UAAAA,CAAWqB,IAAI,CAACN,SAAAA,CAAAA;AAClB,SAAA;QACA,MAAMO,SAAAA,CAAAA,GAAAA;YACJ,MAAMC,iBAAAA,GAAoB,MAAMrB,aAAAA,CAAcsB,OAAO,EAAA;YACrD,OAAOD,iBAAAA,CAAkBE,MAAM,GAAG,CAAA;AACpC,SAAA;QACA,MAAMR,EAAAA,CAAAA,GAAAA;AACJ,YAAA,MAAMf,cAAce,EAAE,EAAA;AACxB,SAAA;QACA,MAAME,IAAAA,CAAAA,GAAAA;AACJ,YAAA,MAAMjB,cAAciB,IAAI,EAAA;AAC1B;AACF,KAAA;AACF;;;;"}

View File

@@ -0,0 +1,10 @@
export declare const transformLogMessage: (level: string, message: unknown) => "" | {
level: string;
message: string;
timestamp?: undefined;
} | {
level: string;
message: string;
timestamp: number;
};
//# sourceMappingURL=logger.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"logger.d.ts","sourceRoot":"","sources":["../../src/migrations/logger.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,mBAAmB,UAAW,MAAM,WAAW,OAAO;;;;;;;;CAiBlE,CAAC"}

View File

@@ -0,0 +1,24 @@
'use strict';
const transformLogMessage = (level, message)=>{
if (typeof message === 'string') {
return {
level,
message
};
}
if (typeof message === 'object' && message !== null) {
if ('event' in message && 'name' in message) {
return {
level,
message: `[internal migration]: ${message.event} ${message?.name}`,
timestamp: Date.now()
};
}
}
// NOTE: the message typing are too loose so in practice we should never arrive here.
return '';
};
exports.transformLogMessage = transformLogMessage;
//# sourceMappingURL=logger.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"logger.js","sources":["../../src/migrations/logger.ts"],"sourcesContent":["export const transformLogMessage = (level: string, message: unknown) => {\n if (typeof message === 'string') {\n return { level, message };\n }\n\n if (typeof message === 'object' && message !== null) {\n if ('event' in message && 'name' in message) {\n return {\n level,\n message: `[internal migration]: ${message.event} ${message?.name}`,\n timestamp: Date.now(),\n };\n }\n }\n\n // NOTE: the message typing are too loose so in practice we should never arrive here.\n return '';\n};\n"],"names":["transformLogMessage","level","message","event","name","timestamp","Date","now"],"mappings":";;AAAO,MAAMA,mBAAsB,GAAA,CAACC,KAAeC,EAAAA,OAAAA,GAAAA;IACjD,IAAI,OAAOA,YAAY,QAAU,EAAA;QAC/B,OAAO;AAAED,YAAAA,KAAAA;AAAOC,YAAAA;AAAQ,SAAA;AAC1B;AAEA,IAAA,IAAI,OAAOA,OAAAA,KAAY,QAAYA,IAAAA,OAAAA,KAAY,IAAM,EAAA;QACnD,IAAI,OAAA,IAAWA,OAAW,IAAA,MAAA,IAAUA,OAAS,EAAA;YAC3C,OAAO;AACLD,gBAAAA,KAAAA;gBACAC,OAAS,EAAA,CAAC,sBAAsB,EAAEA,OAAQC,CAAAA,KAAK,CAAC,CAAC,EAAED,OAASE,EAAAA,IAAAA,CAAK,CAAC;AAClEC,gBAAAA,SAAAA,EAAWC,KAAKC,GAAG;AACrB,aAAA;AACF;AACF;;IAGA,OAAO,EAAA;AACT;;;;"}

View File

@@ -0,0 +1,22 @@
const transformLogMessage = (level, message)=>{
if (typeof message === 'string') {
return {
level,
message
};
}
if (typeof message === 'object' && message !== null) {
if ('event' in message && 'name' in message) {
return {
level,
message: `[internal migration]: ${message.event} ${message?.name}`,
timestamp: Date.now()
};
}
}
// NOTE: the message typing are too loose so in practice we should never arrive here.
return '';
};
export { transformLogMessage };
//# sourceMappingURL=logger.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"logger.mjs","sources":["../../src/migrations/logger.ts"],"sourcesContent":["export const transformLogMessage = (level: string, message: unknown) => {\n if (typeof message === 'string') {\n return { level, message };\n }\n\n if (typeof message === 'object' && message !== null) {\n if ('event' in message && 'name' in message) {\n return {\n level,\n message: `[internal migration]: ${message.event} ${message?.name}`,\n timestamp: Date.now(),\n };\n }\n }\n\n // NOTE: the message typing are too loose so in practice we should never arrive here.\n return '';\n};\n"],"names":["transformLogMessage","level","message","event","name","timestamp","Date","now"],"mappings":"AAAO,MAAMA,mBAAsB,GAAA,CAACC,KAAeC,EAAAA,OAAAA,GAAAA;IACjD,IAAI,OAAOA,YAAY,QAAU,EAAA;QAC/B,OAAO;AAAED,YAAAA,KAAAA;AAAOC,YAAAA;AAAQ,SAAA;AAC1B;AAEA,IAAA,IAAI,OAAOA,OAAAA,KAAY,QAAYA,IAAAA,OAAAA,KAAY,IAAM,EAAA;QACnD,IAAI,OAAA,IAAWA,OAAW,IAAA,MAAA,IAAUA,OAAS,EAAA;YAC3C,OAAO;AACLD,gBAAAA,KAAAA;gBACAC,OAAS,EAAA,CAAC,sBAAsB,EAAEA,OAAQC,CAAAA,KAAK,CAAC,CAAC,EAAED,OAASE,EAAAA,IAAAA,CAAK,CAAC;AAClEC,gBAAAA,SAAAA,EAAWC,KAAKC,GAAG;AACrB,aAAA;AACF;AACF;;IAGA,OAAO,EAAA;AACT;;;;"}

View File

@@ -0,0 +1,15 @@
import type { Database } from '..';
export interface Options {
db: Database;
tableName: string;
}
export declare const createStorage: (opts: Options) => {
logMigration({ name }: {
name: string;
}): Promise<void>;
unlogMigration({ name }: {
name: string;
}): Promise<void>;
executed(): Promise<any>;
};
//# sourceMappingURL=storage.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"storage.d.ts","sourceRoot":"","sources":["../../src/migrations/storage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AAEnC,MAAM,WAAW,OAAO;IACtB,EAAE,EAAE,QAAQ,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,eAAO,MAAM,aAAa,SAAU,OAAO;2BAcV;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE;6BAUd;QAAE,IAAI,EAAE,MAAM,CAAA;KAAE;;CAelD,CAAC"}

View File

@@ -0,0 +1,39 @@
'use strict';
const createStorage = (opts)=>{
const { db, tableName } = opts;
const hasMigrationTable = ()=>db.getSchemaConnection().hasTable(tableName);
const createMigrationTable = ()=>{
return db.getSchemaConnection().createTable(tableName, (table)=>{
table.increments('id');
table.string('name');
table.datetime('time', {
useTz: false
});
});
};
return {
async logMigration ({ name }) {
await db.getConnection().insert({
name,
time: new Date()
}).into(tableName);
},
async unlogMigration ({ name }) {
await db.getConnection(tableName).del().where({
name
});
},
async executed () {
if (!await hasMigrationTable()) {
await createMigrationTable();
return [];
}
const logs = await db.getConnection(tableName).select().from(tableName).orderBy('time');
return logs.map((log)=>log.name);
}
};
};
exports.createStorage = createStorage;
//# sourceMappingURL=storage.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"storage.js","sources":["../../src/migrations/storage.ts"],"sourcesContent":["import type { Database } from '..';\n\nexport interface Options {\n db: Database;\n tableName: string;\n}\n\nexport const createStorage = (opts: Options) => {\n const { db, tableName } = opts;\n\n const hasMigrationTable = () => db.getSchemaConnection().hasTable(tableName);\n\n const createMigrationTable = () => {\n return db.getSchemaConnection().createTable(tableName, (table) => {\n table.increments('id');\n table.string('name');\n table.datetime('time', { useTz: false });\n });\n };\n\n return {\n async logMigration({ name }: { name: string }) {\n await db\n .getConnection()\n .insert({\n name,\n time: new Date(),\n })\n .into(tableName);\n },\n\n async unlogMigration({ name }: { name: string }) {\n await db.getConnection(tableName).del().where({ name });\n },\n\n async executed() {\n if (!(await hasMigrationTable())) {\n await createMigrationTable();\n return [];\n }\n\n const logs = await db.getConnection(tableName).select().from(tableName).orderBy('time');\n\n return logs.map((log: { name: string }) => log.name);\n },\n };\n};\n"],"names":["createStorage","opts","db","tableName","hasMigrationTable","getSchemaConnection","hasTable","createMigrationTable","createTable","table","increments","string","datetime","useTz","logMigration","name","getConnection","insert","time","Date","into","unlogMigration","del","where","executed","logs","select","from","orderBy","map","log"],"mappings":";;AAOO,MAAMA,gBAAgB,CAACC,IAAAA,GAAAA;AAC5B,IAAA,MAAM,EAAEC,EAAE,EAAEC,SAAS,EAAE,GAAGF,IAAAA;AAE1B,IAAA,MAAMG,oBAAoB,IAAMF,EAAAA,CAAGG,mBAAmB,EAAA,CAAGC,QAAQ,CAACH,SAAAA,CAAAA;AAElE,IAAA,MAAMI,oBAAuB,GAAA,IAAA;AAC3B,QAAA,OAAOL,GAAGG,mBAAmB,EAAA,CAAGG,WAAW,CAACL,WAAW,CAACM,KAAAA,GAAAA;AACtDA,YAAAA,KAAAA,CAAMC,UAAU,CAAC,IAAA,CAAA;AACjBD,YAAAA,KAAAA,CAAME,MAAM,CAAC,MAAA,CAAA;YACbF,KAAMG,CAAAA,QAAQ,CAAC,MAAQ,EAAA;gBAAEC,KAAO,EAAA;AAAM,aAAA,CAAA;AACxC,SAAA,CAAA;AACF,KAAA;IAEA,OAAO;QACL,MAAMC,YAAAA,CAAAA,CAAa,EAAEC,IAAI,EAAoB,EAAA;AAC3C,YAAA,MAAMb,EACHc,CAAAA,aAAa,EACbC,CAAAA,MAAM,CAAC;AACNF,gBAAAA,IAAAA;AACAG,gBAAAA,IAAAA,EAAM,IAAIC,IAAAA;AACZ,aAAA,CAAA,CACCC,IAAI,CAACjB,SAAAA,CAAAA;AACV,SAAA;QAEA,MAAMkB,cAAAA,CAAAA,CAAe,EAAEN,IAAI,EAAoB,EAAA;AAC7C,YAAA,MAAMb,GAAGc,aAAa,CAACb,WAAWmB,GAAG,EAAA,CAAGC,KAAK,CAAC;AAAER,gBAAAA;AAAK,aAAA,CAAA;AACvD,SAAA;QAEA,MAAMS,QAAAA,CAAAA,GAAAA;YACJ,IAAI,CAAE,MAAMpB,iBAAsB,EAAA,EAAA;gBAChC,MAAMG,oBAAAA,EAAAA;AACN,gBAAA,OAAO,EAAE;AACX;AAEA,YAAA,MAAMkB,IAAO,GAAA,MAAMvB,EAAGc,CAAAA,aAAa,CAACb,SAAAA,CAAAA,CAAWuB,MAAM,EAAA,CAAGC,IAAI,CAACxB,SAAWyB,CAAAA,CAAAA,OAAO,CAAC,MAAA,CAAA;AAEhF,YAAA,OAAOH,KAAKI,GAAG,CAAC,CAACC,GAAAA,GAA0BA,IAAIf,IAAI,CAAA;AACrD;AACF,KAAA;AACF;;;;"}

View File

@@ -0,0 +1,37 @@
const createStorage = (opts)=>{
const { db, tableName } = opts;
const hasMigrationTable = ()=>db.getSchemaConnection().hasTable(tableName);
const createMigrationTable = ()=>{
return db.getSchemaConnection().createTable(tableName, (table)=>{
table.increments('id');
table.string('name');
table.datetime('time', {
useTz: false
});
});
};
return {
async logMigration ({ name }) {
await db.getConnection().insert({
name,
time: new Date()
}).into(tableName);
},
async unlogMigration ({ name }) {
await db.getConnection(tableName).del().where({
name
});
},
async executed () {
if (!await hasMigrationTable()) {
await createMigrationTable();
return [];
}
const logs = await db.getConnection(tableName).select().from(tableName).orderBy('time');
return logs.map((log)=>log.name);
}
};
};
export { createStorage };
//# sourceMappingURL=storage.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"storage.mjs","sources":["../../src/migrations/storage.ts"],"sourcesContent":["import type { Database } from '..';\n\nexport interface Options {\n db: Database;\n tableName: string;\n}\n\nexport const createStorage = (opts: Options) => {\n const { db, tableName } = opts;\n\n const hasMigrationTable = () => db.getSchemaConnection().hasTable(tableName);\n\n const createMigrationTable = () => {\n return db.getSchemaConnection().createTable(tableName, (table) => {\n table.increments('id');\n table.string('name');\n table.datetime('time', { useTz: false });\n });\n };\n\n return {\n async logMigration({ name }: { name: string }) {\n await db\n .getConnection()\n .insert({\n name,\n time: new Date(),\n })\n .into(tableName);\n },\n\n async unlogMigration({ name }: { name: string }) {\n await db.getConnection(tableName).del().where({ name });\n },\n\n async executed() {\n if (!(await hasMigrationTable())) {\n await createMigrationTable();\n return [];\n }\n\n const logs = await db.getConnection(tableName).select().from(tableName).orderBy('time');\n\n return logs.map((log: { name: string }) => log.name);\n },\n };\n};\n"],"names":["createStorage","opts","db","tableName","hasMigrationTable","getSchemaConnection","hasTable","createMigrationTable","createTable","table","increments","string","datetime","useTz","logMigration","name","getConnection","insert","time","Date","into","unlogMigration","del","where","executed","logs","select","from","orderBy","map","log"],"mappings":"AAOO,MAAMA,gBAAgB,CAACC,IAAAA,GAAAA;AAC5B,IAAA,MAAM,EAAEC,EAAE,EAAEC,SAAS,EAAE,GAAGF,IAAAA;AAE1B,IAAA,MAAMG,oBAAoB,IAAMF,EAAAA,CAAGG,mBAAmB,EAAA,CAAGC,QAAQ,CAACH,SAAAA,CAAAA;AAElE,IAAA,MAAMI,oBAAuB,GAAA,IAAA;AAC3B,QAAA,OAAOL,GAAGG,mBAAmB,EAAA,CAAGG,WAAW,CAACL,WAAW,CAACM,KAAAA,GAAAA;AACtDA,YAAAA,KAAAA,CAAMC,UAAU,CAAC,IAAA,CAAA;AACjBD,YAAAA,KAAAA,CAAME,MAAM,CAAC,MAAA,CAAA;YACbF,KAAMG,CAAAA,QAAQ,CAAC,MAAQ,EAAA;gBAAEC,KAAO,EAAA;AAAM,aAAA,CAAA;AACxC,SAAA,CAAA;AACF,KAAA;IAEA,OAAO;QACL,MAAMC,YAAAA,CAAAA,CAAa,EAAEC,IAAI,EAAoB,EAAA;AAC3C,YAAA,MAAMb,EACHc,CAAAA,aAAa,EACbC,CAAAA,MAAM,CAAC;AACNF,gBAAAA,IAAAA;AACAG,gBAAAA,IAAAA,EAAM,IAAIC,IAAAA;AACZ,aAAA,CAAA,CACCC,IAAI,CAACjB,SAAAA,CAAAA;AACV,SAAA;QAEA,MAAMkB,cAAAA,CAAAA,CAAe,EAAEN,IAAI,EAAoB,EAAA;AAC7C,YAAA,MAAMb,GAAGc,aAAa,CAACb,WAAWmB,GAAG,EAAA,CAAGC,KAAK,CAAC;AAAER,gBAAAA;AAAK,aAAA,CAAA;AACvD,SAAA;QAEA,MAAMS,QAAAA,CAAAA,GAAAA;YACJ,IAAI,CAAE,MAAMpB,iBAAsB,EAAA,EAAA;gBAChC,MAAMG,oBAAAA,EAAAA;AACN,gBAAA,OAAO,EAAE;AACX;AAEA,YAAA,MAAMkB,IAAO,GAAA,MAAMvB,EAAGc,CAAAA,aAAa,CAACb,SAAAA,CAAAA,CAAWuB,MAAM,EAAA,CAAGC,IAAI,CAACxB,SAAWyB,CAAAA,CAAAA,OAAO,CAAC,MAAA,CAAA;AAEhF,YAAA,OAAOH,KAAKI,GAAG,CAAC,CAACC,GAAAA,GAA0BA,IAAIf,IAAI,CAAA;AACrD;AACF,KAAA;AACF;;;;"}

View File

@@ -0,0 +1,4 @@
import type { UserMigrationProvider } from './common';
import type { Database } from '..';
export declare const createUserMigrationProvider: (db: Database) => UserMigrationProvider;
//# sourceMappingURL=users.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"users.d.ts","sourceRoot":"","sources":["../../src/migrations/users.ts"],"names":[],"mappings":"AAOA,OAAO,KAAK,EAAqB,qBAAqB,EAAE,MAAM,UAAU,CAAC;AACzE,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,IAAI,CAAC;AAiCnC,eAAO,MAAM,2BAA2B,OAAQ,QAAQ,KAAG,qBA2C1D,CAAC"}

View File

@@ -0,0 +1,87 @@
'use strict';
var fse = require('fs-extra');
var umzug = require('umzug');
var storage = require('./storage.js');
var common = require('./common.js');
var logger = require('./logger.js');
// TODO: check multiple commands in one sql statement
const migrationResolver = ({ name, path, context })=>{
const { db } = context;
if (!path) {
throw new Error(`Migration ${name} has no path`);
}
// if sql file run with knex raw
if (path.match(/\.sql$/)) {
const sql = fse.readFileSync(path, 'utf8');
return {
name,
up: common.wrapTransaction(db)((knex)=>knex.raw(sql)),
async down () {
throw new Error('Down migration is not supported for sql files');
}
};
}
// NOTE: we can add some ts register if we want to handle ts migration files at some point
// eslint-disable-next-line @typescript-eslint/no-var-requires
const migration = require(path);
return {
name,
up: common.wrapTransaction(db)(migration.up),
down: common.wrapTransaction(db)(migration.down)
};
};
const createUserMigrationProvider = (db)=>{
const dir = db.config.settings.migrations.dir;
fse.ensureDirSync(dir);
const context = {
db
};
const umzugProvider = new umzug.Umzug({
storage: storage.createStorage({
db,
tableName: 'strapi_migrations'
}),
logger: {
info (message) {
// NOTE: only log internal migration in debug mode
db.logger.info(logger.transformLogMessage('info', message));
},
warn (message) {
db.logger.warn(logger.transformLogMessage('warn', message));
},
error (message) {
db.logger.error(logger.transformLogMessage('error', message));
},
debug (message) {
db.logger.debug(logger.transformLogMessage('debug', message));
}
},
context,
migrations: {
glob: [
'*.{js,sql}',
{
cwd: dir
}
],
resolve: migrationResolver
}
});
return {
async shouldRun () {
const pendingMigrations = await umzugProvider.pending();
return pendingMigrations.length > 0 && db.config?.settings?.runMigrations === true;
},
async up () {
await umzugProvider.up();
},
async down () {
await umzugProvider.down();
}
};
};
exports.createUserMigrationProvider = createUserMigrationProvider;
//# sourceMappingURL=users.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,85 @@
import fse from 'fs-extra';
import { Umzug } from 'umzug';
import { createStorage } from './storage.mjs';
import { wrapTransaction } from './common.mjs';
import { transformLogMessage } from './logger.mjs';
// TODO: check multiple commands in one sql statement
const migrationResolver = ({ name, path, context })=>{
const { db } = context;
if (!path) {
throw new Error(`Migration ${name} has no path`);
}
// if sql file run with knex raw
if (path.match(/\.sql$/)) {
const sql = fse.readFileSync(path, 'utf8');
return {
name,
up: wrapTransaction(db)((knex)=>knex.raw(sql)),
async down () {
throw new Error('Down migration is not supported for sql files');
}
};
}
// NOTE: we can add some ts register if we want to handle ts migration files at some point
// eslint-disable-next-line @typescript-eslint/no-var-requires
const migration = require(path);
return {
name,
up: wrapTransaction(db)(migration.up),
down: wrapTransaction(db)(migration.down)
};
};
const createUserMigrationProvider = (db)=>{
const dir = db.config.settings.migrations.dir;
fse.ensureDirSync(dir);
const context = {
db
};
const umzugProvider = new Umzug({
storage: createStorage({
db,
tableName: 'strapi_migrations'
}),
logger: {
info (message) {
// NOTE: only log internal migration in debug mode
db.logger.info(transformLogMessage('info', message));
},
warn (message) {
db.logger.warn(transformLogMessage('warn', message));
},
error (message) {
db.logger.error(transformLogMessage('error', message));
},
debug (message) {
db.logger.debug(transformLogMessage('debug', message));
}
},
context,
migrations: {
glob: [
'*.{js,sql}',
{
cwd: dir
}
],
resolve: migrationResolver
}
});
return {
async shouldRun () {
const pendingMigrations = await umzugProvider.pending();
return pendingMigrations.length > 0 && db.config?.settings?.runMigrations === true;
},
async up () {
await umzugProvider.up();
},
async down () {
await umzugProvider.down();
}
};
};
export { createUserMigrationProvider };
//# sourceMappingURL=users.mjs.map

File diff suppressed because one or more lines are too long