node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const renameIdentifiersLongerThanMaxLength: Migration;
//# sourceMappingURL=5.0.0-01-convert-identifiers-long-than-max-length.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-01-convert-identifiers-long-than-max-length.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-01-convert-identifiers-long-than-max-length.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAqB3C,eAAO,MAAM,oCAAoC,EAAE,SA4ClD,CAAC"}

View File

@@ -0,0 +1,179 @@
'use strict';
var createDebug = require('debug');
var index = require('../../utils/identifiers/index.js');
const debug = createDebug('strapi::database::migration');
const renameIdentifiersLongerThanMaxLength = {
name: '5.0.0-rename-identifiers-longer-than-max-length',
async up (knex, db) {
const md = db.metadata;
const diffs = findDiffs(md);
// migrate indexes before tables so we know to target the original tableName
for (const indexDiff of diffs.indexes){
await renameIndex(knex, db, indexDiff);
}
// migrate columns before table names so we know to target the original tableName
for (const columnDiff of diffs.columns){
const { full, short } = columnDiff;
const tableName = full.tableName;
const hasTable = await knex.schema.hasTable(tableName);
if (hasTable) {
// tablebuilder methods MUST be synchronous and so you cannot use async inside it, which is why we check the column here
const hasColumn = await knex.schema.hasColumn(tableName, full.columnName);
if (hasColumn) {
await knex.schema.alterTable(tableName, async (table)=>{
debug(`renaming column ${full.columnName} to ${short.columnName}`);
table.renameColumn(full.columnName, short.columnName);
});
}
}
}
// migrate table names
for (const tableDiff of diffs.tables){
const hasTable = await knex.schema.hasTable(tableDiff.full.tableName);
if (hasTable) {
debug(`renaming table ${tableDiff.full.tableName} to ${tableDiff.short.tableName}`);
await knex.schema.renameTable(tableDiff.full.tableName, tableDiff.short.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
const renameIndex = async (knex, db, diff)=>{
const client = db.config.connection.client;
const short = diff.short;
const full = diff.full;
if (full.indexName === short.indexName) {
debug(`not renaming index ${full.indexName} because name hasn't changed`);
return;
}
// fk indexes can't be easily renamed, and will be recreated by db sync
// if this misses something due to the loose string matching, it's not critical, it just means index will be rebuilt in db sync
if (short.indexName.endsWith('fk') || full.indexName.endsWith('fk')) {
return;
}
debug(`renaming index from ${full.indexName} to ${short.indexName}`);
// If schema creation has never actually run before, none of these will exist, and they will throw an error
// we have no way of running an "if exists" other than a per-dialect manual check, which we won't do
// because even if it fails for some other reason, the schema sync will recreate them anyway
// Therefore, we wrap this in a nested transaction (considering we are running this migration in a transaction)
// so that we can suppress the error
try {
await knex.transaction(async (trx)=>{
if (client === 'mysql' || client === 'mariadb') {
await knex.raw('ALTER TABLE ?? RENAME INDEX ?? TO ??', [
full.tableName,
full.indexName,
short.indexName
]).transacting(trx);
} else if (client === 'pg' || client === 'postgres') {
await knex.raw('ALTER INDEX ?? RENAME TO ??', [
full.indexName,
short.indexName
]).transacting(trx);
} else if ([
'sqlite',
'sqlite3',
'better-sqlite3'
].includes(client)) {
// SQLite doesn't support renaming, so rather than trying to drop/recreate we'll let db sync handle it
debug(`SQLite does not support index renaming, not renaming index ${full.indexName}`);
} else {
debug(`No db client name matches, not renaming index ${full.indexName}`);
}
});
} catch (err) {
debug(`error creating index: ${JSON.stringify(err)}`);
}
};
const findDiffs = (shortMap)=>{
const diffs = {
tables: [],
columns: [],
indexes: []
};
const shortArr = Array.from(shortMap.entries());
shortArr.forEach(([, shortObj], index$1)=>{
const fullTableName = index.identifiers.getUnshortenedName(shortObj.tableName);
if (!fullTableName) {
throw new Error(`Missing full table name for ${shortObj.tableName}`);
}
// find table name diffs
if (shortObj.tableName !== fullTableName) {
diffs.tables.push({
full: {
index: index$1,
key: 'tableName',
tableName: fullTableName
},
short: {
index: index$1,
key: 'tableName',
tableName: shortObj.tableName
}
});
}
// find column name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.attributes){
if (shortObj.attributes[attrKey].type === 'relation') {
continue;
}
// TODO: add more type checks so we don't need any
const attr = shortObj.attributes[attrKey];
const shortColumnName = attr.columnName;
const longColumnName = index.identifiers.getUnshortenedName(shortColumnName);
if (!shortColumnName || !longColumnName) {
throw new Error(`missing column name(s) for attribute ${JSON.stringify(attr, null, 2)}`);
}
if (shortColumnName && longColumnName && shortColumnName !== longColumnName) {
diffs.columns.push({
short: {
index: index$1,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: shortColumnName
},
full: {
index: index$1,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: longColumnName
}
});
}
}
// find index name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.indexes){
const shortIndexName = shortObj.indexes[attrKey].name;
const longIndexName = index.identifiers.getUnshortenedName(shortIndexName);
if (!longIndexName) {
throw new Error(`Missing full index name for ${shortIndexName}`);
}
if (shortIndexName && longIndexName && shortIndexName !== longIndexName) {
diffs.indexes.push({
short: {
index: index$1,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: shortIndexName
},
full: {
index: index$1,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: longIndexName
}
});
}
}
});
return diffs;
};
exports.renameIdentifiersLongerThanMaxLength = renameIdentifiersLongerThanMaxLength;
//# sourceMappingURL=5.0.0-01-convert-identifiers-long-than-max-length.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,177 @@
import createDebug from 'debug';
import { identifiers } from '../../utils/identifiers/index.mjs';
const debug = createDebug('strapi::database::migration');
const renameIdentifiersLongerThanMaxLength = {
name: '5.0.0-rename-identifiers-longer-than-max-length',
async up (knex, db) {
const md = db.metadata;
const diffs = findDiffs(md);
// migrate indexes before tables so we know to target the original tableName
for (const indexDiff of diffs.indexes){
await renameIndex(knex, db, indexDiff);
}
// migrate columns before table names so we know to target the original tableName
for (const columnDiff of diffs.columns){
const { full, short } = columnDiff;
const tableName = full.tableName;
const hasTable = await knex.schema.hasTable(tableName);
if (hasTable) {
// tablebuilder methods MUST be synchronous and so you cannot use async inside it, which is why we check the column here
const hasColumn = await knex.schema.hasColumn(tableName, full.columnName);
if (hasColumn) {
await knex.schema.alterTable(tableName, async (table)=>{
debug(`renaming column ${full.columnName} to ${short.columnName}`);
table.renameColumn(full.columnName, short.columnName);
});
}
}
}
// migrate table names
for (const tableDiff of diffs.tables){
const hasTable = await knex.schema.hasTable(tableDiff.full.tableName);
if (hasTable) {
debug(`renaming table ${tableDiff.full.tableName} to ${tableDiff.short.tableName}`);
await knex.schema.renameTable(tableDiff.full.tableName, tableDiff.short.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
const renameIndex = async (knex, db, diff)=>{
const client = db.config.connection.client;
const short = diff.short;
const full = diff.full;
if (full.indexName === short.indexName) {
debug(`not renaming index ${full.indexName} because name hasn't changed`);
return;
}
// fk indexes can't be easily renamed, and will be recreated by db sync
// if this misses something due to the loose string matching, it's not critical, it just means index will be rebuilt in db sync
if (short.indexName.endsWith('fk') || full.indexName.endsWith('fk')) {
return;
}
debug(`renaming index from ${full.indexName} to ${short.indexName}`);
// If schema creation has never actually run before, none of these will exist, and they will throw an error
// we have no way of running an "if exists" other than a per-dialect manual check, which we won't do
// because even if it fails for some other reason, the schema sync will recreate them anyway
// Therefore, we wrap this in a nested transaction (considering we are running this migration in a transaction)
// so that we can suppress the error
try {
await knex.transaction(async (trx)=>{
if (client === 'mysql' || client === 'mariadb') {
await knex.raw('ALTER TABLE ?? RENAME INDEX ?? TO ??', [
full.tableName,
full.indexName,
short.indexName
]).transacting(trx);
} else if (client === 'pg' || client === 'postgres') {
await knex.raw('ALTER INDEX ?? RENAME TO ??', [
full.indexName,
short.indexName
]).transacting(trx);
} else if ([
'sqlite',
'sqlite3',
'better-sqlite3'
].includes(client)) {
// SQLite doesn't support renaming, so rather than trying to drop/recreate we'll let db sync handle it
debug(`SQLite does not support index renaming, not renaming index ${full.indexName}`);
} else {
debug(`No db client name matches, not renaming index ${full.indexName}`);
}
});
} catch (err) {
debug(`error creating index: ${JSON.stringify(err)}`);
}
};
const findDiffs = (shortMap)=>{
const diffs = {
tables: [],
columns: [],
indexes: []
};
const shortArr = Array.from(shortMap.entries());
shortArr.forEach(([, shortObj], index)=>{
const fullTableName = identifiers.getUnshortenedName(shortObj.tableName);
if (!fullTableName) {
throw new Error(`Missing full table name for ${shortObj.tableName}`);
}
// find table name diffs
if (shortObj.tableName !== fullTableName) {
diffs.tables.push({
full: {
index,
key: 'tableName',
tableName: fullTableName
},
short: {
index,
key: 'tableName',
tableName: shortObj.tableName
}
});
}
// find column name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.attributes){
if (shortObj.attributes[attrKey].type === 'relation') {
continue;
}
// TODO: add more type checks so we don't need any
const attr = shortObj.attributes[attrKey];
const shortColumnName = attr.columnName;
const longColumnName = identifiers.getUnshortenedName(shortColumnName);
if (!shortColumnName || !longColumnName) {
throw new Error(`missing column name(s) for attribute ${JSON.stringify(attr, null, 2)}`);
}
if (shortColumnName && longColumnName && shortColumnName !== longColumnName) {
diffs.columns.push({
short: {
index,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: shortColumnName
},
full: {
index,
tableName: fullTableName,
key: `attributes.${attrKey}`,
columnName: longColumnName
}
});
}
}
// find index name diffs
// eslint-disable-next-line guard-for-in
for(const attrKey in shortObj.indexes){
const shortIndexName = shortObj.indexes[attrKey].name;
const longIndexName = identifiers.getUnshortenedName(shortIndexName);
if (!longIndexName) {
throw new Error(`Missing full index name for ${shortIndexName}`);
}
if (shortIndexName && longIndexName && shortIndexName !== longIndexName) {
diffs.indexes.push({
short: {
index,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: shortIndexName
},
full: {
index,
tableName: fullTableName,
key: `indexes.${attrKey}`,
indexName: longIndexName
}
});
}
}
});
return diffs;
};
export { renameIdentifiersLongerThanMaxLength };
//# sourceMappingURL=5.0.0-01-convert-identifiers-long-than-max-length.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const createdDocumentId: Migration;
//# sourceMappingURL=5.0.0-02-document-id.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-02-document-id.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-02-document-id.ts"],"names":[],"mappings":"AAiBA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AA2I3C,eAAO,MAAM,iBAAiB,EAAE,SAgC/B,CAAC"}

View File

@@ -0,0 +1,125 @@
'use strict';
var cuid2 = require('@paralleldrive/cuid2');
var _ = require('lodash/fp');
const QUERIES = {
async postgres (knex, params) {
const res = await knex.raw(`
SELECT :tableName:.id as id, string_agg(DISTINCT :inverseJoinColumn:::character varying, ',') as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res.rows;
},
async mysql (knex, params) {
const [res] = await knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res;
},
async sqlite (knex, params) {
return knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
}
};
const getNextIdsToCreateDocumentId = async (db, knex, { joinColumn, inverseJoinColumn, tableName, joinTableName })=>{
const res = await QUERIES[db.dialect.client](knex, {
joinColumn,
inverseJoinColumn,
tableName,
joinTableName
});
if (res.length > 0) {
const row = res[0];
const otherIds = row.other_ids ? row.other_ids.split(',').map((v)=>parseInt(v, 10)) : [];
return [
row.id,
...otherIds
];
}
return [];
};
// Migrate document ids for tables that have localizations
const migrateDocumentIdsWithLocalizations = async (db, knex, meta)=>{
const singularName = meta.singularName.toLowerCase();
const joinColumn = _.snakeCase(`${singularName}_id`);
const inverseJoinColumn = _.snakeCase(`inv_${singularName}_id`);
let ids;
do {
ids = await getNextIdsToCreateDocumentId(db, knex, {
joinColumn,
inverseJoinColumn,
tableName: meta.tableName,
joinTableName: _.snakeCase(`${meta.tableName}_localizations_links`)
});
if (ids.length > 0) {
await knex(meta.tableName).update({
document_id: cuid2.createId()
}).whereIn('id', ids);
}
}while (ids.length > 0)
};
// Migrate document ids for tables that don't have localizations
const migrationDocumentIds = async (db, knex, meta)=>{
let updatedRows;
do {
updatedRows = await knex(meta.tableName).update({
document_id: cuid2.createId()
}).whereIn('id', knex(meta.tableName).select('id').from(knex(meta.tableName).select('id').whereNull('document_id').limit(1).as('sub_query')));
}while (updatedRows > 0)
};
const createDocumentIdColumn = async (knex, tableName)=>{
await knex.schema.alterTable(tableName, (table)=>{
table.string('document_id');
});
};
const hasLocalizationsJoinTable = async (knex, tableName)=>{
const joinTableName = _.snakeCase(`${tableName}_localizations_links`);
return knex.schema.hasTable(joinTableName);
};
const createdDocumentId = {
name: '5.0.0-02-created-document-id',
async up (knex, db) {
// do sth
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
if ('documentId' in meta.attributes) {
// add column if doesn't exist
const hasDocumentIdColumn = await knex.schema.hasColumn(meta.tableName, 'document_id');
if (hasDocumentIdColumn) {
continue;
}
await createDocumentIdColumn(knex, meta.tableName);
if (await hasLocalizationsJoinTable(knex, meta.tableName)) {
await migrateDocumentIdsWithLocalizations(db, knex, meta);
} else {
await migrationDocumentIds(db, knex, meta);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.createdDocumentId = createdDocumentId;
//# sourceMappingURL=5.0.0-02-document-id.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,123 @@
import { createId } from '@paralleldrive/cuid2';
import { snakeCase } from 'lodash/fp';
const QUERIES = {
async postgres (knex, params) {
const res = await knex.raw(`
SELECT :tableName:.id as id, string_agg(DISTINCT :inverseJoinColumn:::character varying, ',') as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res.rows;
},
async mysql (knex, params) {
const [res] = await knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :tableName:.id, :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
return res;
},
async sqlite (knex, params) {
return knex.raw(`
SELECT :tableName:.id as id, group_concat(DISTINCT :inverseJoinColumn:) as other_ids
FROM :tableName:
LEFT JOIN :joinTableName: ON :tableName:.id = :joinTableName:.:joinColumn:
WHERE :tableName:.document_id IS NULL
GROUP BY :joinTableName:.:joinColumn:
LIMIT 1;
`, params);
}
};
const getNextIdsToCreateDocumentId = async (db, knex, { joinColumn, inverseJoinColumn, tableName, joinTableName })=>{
const res = await QUERIES[db.dialect.client](knex, {
joinColumn,
inverseJoinColumn,
tableName,
joinTableName
});
if (res.length > 0) {
const row = res[0];
const otherIds = row.other_ids ? row.other_ids.split(',').map((v)=>parseInt(v, 10)) : [];
return [
row.id,
...otherIds
];
}
return [];
};
// Migrate document ids for tables that have localizations
const migrateDocumentIdsWithLocalizations = async (db, knex, meta)=>{
const singularName = meta.singularName.toLowerCase();
const joinColumn = snakeCase(`${singularName}_id`);
const inverseJoinColumn = snakeCase(`inv_${singularName}_id`);
let ids;
do {
ids = await getNextIdsToCreateDocumentId(db, knex, {
joinColumn,
inverseJoinColumn,
tableName: meta.tableName,
joinTableName: snakeCase(`${meta.tableName}_localizations_links`)
});
if (ids.length > 0) {
await knex(meta.tableName).update({
document_id: createId()
}).whereIn('id', ids);
}
}while (ids.length > 0)
};
// Migrate document ids for tables that don't have localizations
const migrationDocumentIds = async (db, knex, meta)=>{
let updatedRows;
do {
updatedRows = await knex(meta.tableName).update({
document_id: createId()
}).whereIn('id', knex(meta.tableName).select('id').from(knex(meta.tableName).select('id').whereNull('document_id').limit(1).as('sub_query')));
}while (updatedRows > 0)
};
const createDocumentIdColumn = async (knex, tableName)=>{
await knex.schema.alterTable(tableName, (table)=>{
table.string('document_id');
});
};
const hasLocalizationsJoinTable = async (knex, tableName)=>{
const joinTableName = snakeCase(`${tableName}_localizations_links`);
return knex.schema.hasTable(joinTableName);
};
const createdDocumentId = {
name: '5.0.0-02-created-document-id',
async up (knex, db) {
// do sth
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
if ('documentId' in meta.attributes) {
// add column if doesn't exist
const hasDocumentIdColumn = await knex.schema.hasColumn(meta.tableName, 'document_id');
if (hasDocumentIdColumn) {
continue;
}
await createDocumentIdColumn(knex, meta.tableName);
if (await hasLocalizationsJoinTable(knex, meta.tableName)) {
await migrateDocumentIdsWithLocalizations(db, knex, meta);
} else {
await migrationDocumentIds(db, knex, meta);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { createdDocumentId };
//# sourceMappingURL=5.0.0-02-document-id.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const createdLocale: Migration;
//# sourceMappingURL=5.0.0-03-locale.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-03-locale.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-03-locale.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAe3C,eAAO,MAAM,aAAa,EAAE,SA4B3B,CAAC"}

View File

@@ -0,0 +1,41 @@
'use strict';
/**
* In v4, content types with disabled i18n did not have any locale column.
* In v5, we need to add a `locale` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `locale` column if it doesn't exist.
*/ const createLocaleColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('locale');
});
};
const createdLocale = {
name: '5.0.0-03-created-locale',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create locale column if it doesn't exist
const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');
if (meta.attributes.locale && !hasLocaleColumn) {
await createLocaleColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.createdLocale = createdLocale;
//# sourceMappingURL=5.0.0-03-locale.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-03-locale.js","sources":["../../../src/migrations/internal-migrations/5.0.0-03-locale.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled i18n did not have any locale column.\n * In v5, we need to add a `locale` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `locale` column if it doesn't exist.\n */\nconst createLocaleColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('locale');\n });\n};\n\nexport const createdLocale: Migration = {\n name: '5.0.0-03-created-locale',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create locale column if it doesn't exist\n const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');\n\n if (meta.attributes.locale && !hasLocaleColumn) {\n await createLocaleColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createLocaleColumn","db","tableName","schema","alterTable","table","string","createdLocale","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasLocaleColumn","hasColumn","attributes","locale","down","Error"],"mappings":";;AAIA;;;;;;IAOA,MAAMA,kBAAqB,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC1C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,QAAA,CAAA;AACf,KAAA,CAAA;AACF,CAAA;MAEaC,aAA2B,GAAA;IACtCC,IAAM,EAAA,yBAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAET,EAAE,EAAA;AACf,QAAA,KAAK,MAAMU,IAAQV,IAAAA,EAAAA,CAAGW,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKP,CAAAA,MAAM,CAACW,QAAQ,CAACH,KAAKT,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACY,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,eAAAA,GAAkB,MAAMT,IAAKP,CAAAA,MAAM,CAACiB,SAAS,CAACT,IAAKT,CAAAA,SAAS,EAAE,QAAA,CAAA;AAEpE,YAAA,IAAIS,KAAKU,UAAU,CAACC,MAAM,IAAI,CAACH,eAAiB,EAAA;gBAC9C,MAAMnB,kBAAAA,CAAmBU,IAAMC,EAAAA,IAAAA,CAAKT,SAAS,CAAA;AAC/C;AACF;AACF,KAAA;IACA,MAAMqB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,39 @@
/**
* In v4, content types with disabled i18n did not have any locale column.
* In v5, we need to add a `locale` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `locale` column if it doesn't exist.
*/ const createLocaleColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('locale');
});
};
const createdLocale = {
name: '5.0.0-03-created-locale',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create locale column if it doesn't exist
const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');
if (meta.attributes.locale && !hasLocaleColumn) {
await createLocaleColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { createdLocale };
//# sourceMappingURL=5.0.0-03-locale.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-03-locale.mjs","sources":["../../../src/migrations/internal-migrations/5.0.0-03-locale.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled i18n did not have any locale column.\n * In v5, we need to add a `locale` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `locale` column if it doesn't exist.\n */\nconst createLocaleColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('locale');\n });\n};\n\nexport const createdLocale: Migration = {\n name: '5.0.0-03-created-locale',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create locale column if it doesn't exist\n const hasLocaleColumn = await knex.schema.hasColumn(meta.tableName, 'locale');\n\n if (meta.attributes.locale && !hasLocaleColumn) {\n await createLocaleColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createLocaleColumn","db","tableName","schema","alterTable","table","string","createdLocale","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasLocaleColumn","hasColumn","attributes","locale","down","Error"],"mappings":"AAIA;;;;;;IAOA,MAAMA,kBAAqB,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC1C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,QAAA,CAAA;AACf,KAAA,CAAA;AACF,CAAA;MAEaC,aAA2B,GAAA;IACtCC,IAAM,EAAA,yBAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAET,EAAE,EAAA;AACf,QAAA,KAAK,MAAMU,IAAQV,IAAAA,EAAAA,CAAGW,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKP,CAAAA,MAAM,CAACW,QAAQ,CAACH,KAAKT,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACY,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,eAAAA,GAAkB,MAAMT,IAAKP,CAAAA,MAAM,CAACiB,SAAS,CAACT,IAAKT,CAAAA,SAAS,EAAE,QAAA,CAAA;AAEpE,YAAA,IAAIS,KAAKU,UAAU,CAACC,MAAM,IAAI,CAACH,eAAiB,EAAA;gBAC9C,MAAMnB,kBAAAA,CAAmBU,IAAMC,EAAAA,IAAAA,CAAKT,SAAS,CAAA;AAC/C;AACF;AACF,KAAA;IACA,MAAMqB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const createdPublishedAt: Migration;
//# sourceMappingURL=5.0.0-04-published-at.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-04-published-at.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-04-published-at.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAkB3C,eAAO,MAAM,kBAAkB,EAAE,SA4BhC,CAAC"}

View File

@@ -0,0 +1,45 @@
'use strict';
/**
* In v4, content types with disabled D&P did not have any `published_at` column.
* In v5, we need to add a `published_at` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `published_at` column if it doesn't exist.
*/ const createPublishedAtColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('published_at');
});
// Non DP content types should have their `published_at` column set to a date
await db(tableName).update({
published_at: new Date()
});
};
const createdPublishedAt = {
name: '5.0.0-04-created-published-at',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create publishedAt column if it doesn't exist
const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');
if (meta.attributes.publishedAt && !hasPublishedAtColumn) {
await createPublishedAtColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.createdPublishedAt = createdPublishedAt;
//# sourceMappingURL=5.0.0-04-published-at.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-04-published-at.js","sources":["../../../src/migrations/internal-migrations/5.0.0-04-published-at.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled D&P did not have any `published_at` column.\n * In v5, we need to add a `published_at` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `published_at` column if it doesn't exist.\n */\nconst createPublishedAtColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('published_at');\n });\n\n // Non DP content types should have their `published_at` column set to a date\n await db(tableName).update({ published_at: new Date() });\n};\n\nexport const createdPublishedAt: Migration = {\n name: '5.0.0-04-created-published-at',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create publishedAt column if it doesn't exist\n const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');\n\n if (meta.attributes.publishedAt && !hasPublishedAtColumn) {\n await createPublishedAtColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createPublishedAtColumn","db","tableName","schema","alterTable","table","string","update","published_at","Date","createdPublishedAt","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasPublishedAtColumn","hasColumn","attributes","publishedAt","down","Error"],"mappings":";;AAIA;;;;;;IAOA,MAAMA,uBAA0B,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC/C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,cAAA,CAAA;AACf,KAAA,CAAA;;IAGA,MAAML,EAAAA,CAAGC,SAAWK,CAAAA,CAAAA,MAAM,CAAC;AAAEC,QAAAA,YAAAA,EAAc,IAAIC,IAAAA;AAAO,KAAA,CAAA;AACxD,CAAA;MAEaC,kBAAgC,GAAA;IAC3CC,IAAM,EAAA,+BAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAEZ,EAAE,EAAA;AACf,QAAA,KAAK,MAAMa,IAAQb,IAAAA,EAAAA,CAAGc,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKV,CAAAA,MAAM,CAACc,QAAQ,CAACH,KAAKZ,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACe,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,oBAAAA,GAAuB,MAAMT,IAAKV,CAAAA,MAAM,CAACoB,SAAS,CAACT,IAAKZ,CAAAA,SAAS,EAAE,cAAA,CAAA;AAEzE,YAAA,IAAIY,KAAKU,UAAU,CAACC,WAAW,IAAI,CAACH,oBAAsB,EAAA;gBACxD,MAAMtB,uBAAAA,CAAwBa,IAAMC,EAAAA,IAAAA,CAAKZ,SAAS,CAAA;AACpD;AACF;AACF,KAAA;IACA,MAAMwB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,43 @@
/**
* In v4, content types with disabled D&P did not have any `published_at` column.
* In v5, we need to add a `published_at` column to all content types.
* Other downstream migrations will make use of this column.
*
* This function creates the `published_at` column if it doesn't exist.
*/ const createPublishedAtColumn = async (db, tableName)=>{
await db.schema.alterTable(tableName, (table)=>{
table.string('published_at');
});
// Non DP content types should have their `published_at` column set to a date
await db(tableName).update({
published_at: new Date()
});
};
const createdPublishedAt = {
name: '5.0.0-04-created-published-at',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
// Ignore non-content types
const uid = meta.uid;
const model = strapi.getModel(uid);
if (!model) {
continue;
}
// Create publishedAt column if it doesn't exist
const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');
if (meta.attributes.publishedAt && !hasPublishedAtColumn) {
await createPublishedAtColumn(knex, meta.tableName);
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { createdPublishedAt };
//# sourceMappingURL=5.0.0-04-published-at.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-04-published-at.mjs","sources":["../../../src/migrations/internal-migrations/5.0.0-04-published-at.ts"],"sourcesContent":["import type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\n/**\n * In v4, content types with disabled D&P did not have any `published_at` column.\n * In v5, we need to add a `published_at` column to all content types.\n * Other downstream migrations will make use of this column.\n *\n * This function creates the `published_at` column if it doesn't exist.\n */\nconst createPublishedAtColumn = async (db: Knex, tableName: string) => {\n await db.schema.alterTable(tableName, (table) => {\n table.string('published_at');\n });\n\n // Non DP content types should have their `published_at` column set to a date\n await db(tableName).update({ published_at: new Date() });\n};\n\nexport const createdPublishedAt: Migration = {\n name: '5.0.0-04-created-published-at',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n\n if (!hasTable) {\n continue;\n }\n\n // Ignore non-content types\n const uid = meta.uid;\n const model = strapi.getModel(uid);\n if (!model) {\n continue;\n }\n\n // Create publishedAt column if it doesn't exist\n const hasPublishedAtColumn = await knex.schema.hasColumn(meta.tableName, 'published_at');\n\n if (meta.attributes.publishedAt && !hasPublishedAtColumn) {\n await createPublishedAtColumn(knex, meta.tableName);\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["createPublishedAtColumn","db","tableName","schema","alterTable","table","string","update","published_at","Date","createdPublishedAt","name","up","knex","meta","metadata","values","hasTable","uid","model","strapi","getModel","hasPublishedAtColumn","hasColumn","attributes","publishedAt","down","Error"],"mappings":"AAIA;;;;;;IAOA,MAAMA,uBAA0B,GAAA,OAAOC,EAAUC,EAAAA,SAAAA,GAAAA;AAC/C,IAAA,MAAMD,GAAGE,MAAM,CAACC,UAAU,CAACF,WAAW,CAACG,KAAAA,GAAAA;AACrCA,QAAAA,KAAAA,CAAMC,MAAM,CAAC,cAAA,CAAA;AACf,KAAA,CAAA;;IAGA,MAAML,EAAAA,CAAGC,SAAWK,CAAAA,CAAAA,MAAM,CAAC;AAAEC,QAAAA,YAAAA,EAAc,IAAIC,IAAAA;AAAO,KAAA,CAAA;AACxD,CAAA;MAEaC,kBAAgC,GAAA;IAC3CC,IAAM,EAAA,+BAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGC,IAAI,EAAEZ,EAAE,EAAA;AACf,QAAA,KAAK,MAAMa,IAAQb,IAAAA,EAAAA,CAAGc,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMJ,IAAKV,CAAAA,MAAM,CAACc,QAAQ,CAACH,KAAKZ,SAAS,CAAA;AAE1D,YAAA,IAAI,CAACe,QAAU,EAAA;AACb,gBAAA;AACF;;YAGA,MAAMC,GAAAA,GAAMJ,KAAKI,GAAG;YACpB,MAAMC,KAAAA,GAAQC,MAAOC,CAAAA,QAAQ,CAACH,GAAAA,CAAAA;AAC9B,YAAA,IAAI,CAACC,KAAO,EAAA;AACV,gBAAA;AACF;;YAGA,MAAMG,oBAAAA,GAAuB,MAAMT,IAAKV,CAAAA,MAAM,CAACoB,SAAS,CAACT,IAAKZ,CAAAA,SAAS,EAAE,cAAA,CAAA;AAEzE,YAAA,IAAIY,KAAKU,UAAU,CAACC,WAAW,IAAI,CAACH,oBAAsB,EAAA;gBACxD,MAAMtB,uBAAAA,CAAwBa,IAAMC,EAAAA,IAAAA,CAAKZ,SAAS,CAAA;AACpD;AACF;AACF,KAAA;IACA,MAAMwB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
import type { Migration } from '../common';
export declare const dropSlugFieldsIndex: Migration;
//# sourceMappingURL=5.0.0-05-drop-slug-unique-index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-05-drop-slug-unique-index.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/5.0.0-05-drop-slug-unique-index.ts"],"names":[],"mappings":"AASA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAc3C,eAAO,MAAM,mBAAmB,EAAE,SAmBjC,CAAC"}

View File

@@ -0,0 +1,43 @@
'use strict';
/**
* In V4 slug fields contained a unique index.
* In V5 slug fields should not have a unique index.
*
* This migration drops existing unique indexes from slug fields so downstream migrations
* can work on the data without violating the unique index.
*/ const dropIndex = async (knex, tableName, columnName)=>{
try {
await knex.schema.alterTable(tableName, (table)=>{
// NOTE: Can not use "identifiers" utility, as the 5.0.0-01 migration does not rename this particular index
// to `tableName_columnName_uq`.
table.dropUnique([
columnName
], `${tableName}_${columnName}_unique`);
});
} catch (error) {
// If unique index does not exist, do nothing
}
};
const dropSlugFieldsIndex = {
name: '5.0.0-05-drop-slug-fields-index',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
for (const attribute of Object.values(meta.attributes)){
if (attribute.type === 'uid' && attribute.columnName) {
await dropIndex(knex, meta.tableName, attribute.columnName);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
exports.dropSlugFieldsIndex = dropSlugFieldsIndex;
//# sourceMappingURL=5.0.0-05-drop-slug-unique-index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-05-drop-slug-unique-index.js","sources":["../../../src/migrations/internal-migrations/5.0.0-05-drop-slug-unique-index.ts"],"sourcesContent":["/**\n * In V4 slug fields contained a unique index.\n * In V5 slug fields should not have a unique index.\n *\n * This migration drops existing unique indexes from slug fields so downstream migrations\n * can work on the data without violating the unique index.\n */\nimport type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\nconst dropIndex = async (knex: Knex, tableName: string, columnName: string) => {\n try {\n await knex.schema.alterTable(tableName, (table) => {\n // NOTE: Can not use \"identifiers\" utility, as the 5.0.0-01 migration does not rename this particular index\n // to `tableName_columnName_uq`.\n table.dropUnique([columnName], `${tableName}_${columnName}_unique`);\n });\n } catch (error) {\n // If unique index does not exist, do nothing\n }\n};\n\nexport const dropSlugFieldsIndex: Migration = {\n name: '5.0.0-05-drop-slug-fields-index',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n if (!hasTable) {\n continue;\n }\n\n for (const attribute of Object.values(meta.attributes)) {\n if (attribute.type === 'uid' && attribute.columnName) {\n await dropIndex(knex, meta.tableName, attribute.columnName);\n }\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["dropIndex","knex","tableName","columnName","schema","alterTable","table","dropUnique","error","dropSlugFieldsIndex","name","up","db","meta","metadata","values","hasTable","attribute","Object","attributes","type","down","Error"],"mappings":";;AAAA;;;;;;AAMC,IAKD,MAAMA,SAAAA,GAAY,OAAOC,IAAAA,EAAYC,SAAmBC,EAAAA,UAAAA,GAAAA;IACtD,IAAI;AACF,QAAA,MAAMF,KAAKG,MAAM,CAACC,UAAU,CAACH,WAAW,CAACI,KAAAA,GAAAA;;;AAGvCA,YAAAA,KAAAA,CAAMC,UAAU,CAAC;AAACJ,gBAAAA;AAAW,aAAA,EAAE,CAAC,EAAED,SAAAA,CAAU,CAAC,EAAEC,UAAAA,CAAW,OAAO,CAAC,CAAA;AACpE,SAAA,CAAA;AACF,KAAA,CAAE,OAAOK,KAAO,EAAA;;AAEhB;AACF,CAAA;MAEaC,mBAAiC,GAAA;IAC5CC,IAAM,EAAA,iCAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGV,IAAI,EAAEW,EAAE,EAAA;AACf,QAAA,KAAK,MAAMC,IAAQD,IAAAA,EAAAA,CAAGE,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMf,IAAKG,CAAAA,MAAM,CAACY,QAAQ,CAACH,KAAKX,SAAS,CAAA;AAC1D,YAAA,IAAI,CAACc,QAAU,EAAA;AACb,gBAAA;AACF;AAEA,YAAA,KAAK,MAAMC,SAAaC,IAAAA,MAAAA,CAAOH,MAAM,CAACF,IAAAA,CAAKM,UAAU,CAAG,CAAA;AACtD,gBAAA,IAAIF,UAAUG,IAAI,KAAK,KAASH,IAAAA,SAAAA,CAAUd,UAAU,EAAE;AACpD,oBAAA,MAAMH,UAAUC,IAAMY,EAAAA,IAAAA,CAAKX,SAAS,EAAEe,UAAUd,UAAU,CAAA;AAC5D;AACF;AACF;AACF,KAAA;IACA,MAAMkB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,41 @@
/**
* In V4 slug fields contained a unique index.
* In V5 slug fields should not have a unique index.
*
* This migration drops existing unique indexes from slug fields so downstream migrations
* can work on the data without violating the unique index.
*/ const dropIndex = async (knex, tableName, columnName)=>{
try {
await knex.schema.alterTable(tableName, (table)=>{
// NOTE: Can not use "identifiers" utility, as the 5.0.0-01 migration does not rename this particular index
// to `tableName_columnName_uq`.
table.dropUnique([
columnName
], `${tableName}_${columnName}_unique`);
});
} catch (error) {
// If unique index does not exist, do nothing
}
};
const dropSlugFieldsIndex = {
name: '5.0.0-05-drop-slug-fields-index',
async up (knex, db) {
for (const meta of db.metadata.values()){
const hasTable = await knex.schema.hasTable(meta.tableName);
if (!hasTable) {
continue;
}
for (const attribute of Object.values(meta.attributes)){
if (attribute.type === 'uid' && attribute.columnName) {
await dropIndex(knex, meta.tableName, attribute.columnName);
}
}
}
},
async down () {
throw new Error('not implemented');
}
};
export { dropSlugFieldsIndex };
//# sourceMappingURL=5.0.0-05-drop-slug-unique-index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"5.0.0-05-drop-slug-unique-index.mjs","sources":["../../../src/migrations/internal-migrations/5.0.0-05-drop-slug-unique-index.ts"],"sourcesContent":["/**\n * In V4 slug fields contained a unique index.\n * In V5 slug fields should not have a unique index.\n *\n * This migration drops existing unique indexes from slug fields so downstream migrations\n * can work on the data without violating the unique index.\n */\nimport type { Knex } from 'knex';\n\nimport type { Migration } from '../common';\n\nconst dropIndex = async (knex: Knex, tableName: string, columnName: string) => {\n try {\n await knex.schema.alterTable(tableName, (table) => {\n // NOTE: Can not use \"identifiers\" utility, as the 5.0.0-01 migration does not rename this particular index\n // to `tableName_columnName_uq`.\n table.dropUnique([columnName], `${tableName}_${columnName}_unique`);\n });\n } catch (error) {\n // If unique index does not exist, do nothing\n }\n};\n\nexport const dropSlugFieldsIndex: Migration = {\n name: '5.0.0-05-drop-slug-fields-index',\n async up(knex, db) {\n for (const meta of db.metadata.values()) {\n const hasTable = await knex.schema.hasTable(meta.tableName);\n if (!hasTable) {\n continue;\n }\n\n for (const attribute of Object.values(meta.attributes)) {\n if (attribute.type === 'uid' && attribute.columnName) {\n await dropIndex(knex, meta.tableName, attribute.columnName);\n }\n }\n }\n },\n async down() {\n throw new Error('not implemented');\n },\n};\n"],"names":["dropIndex","knex","tableName","columnName","schema","alterTable","table","dropUnique","error","dropSlugFieldsIndex","name","up","db","meta","metadata","values","hasTable","attribute","Object","attributes","type","down","Error"],"mappings":"AAAA;;;;;;AAMC,IAKD,MAAMA,SAAAA,GAAY,OAAOC,IAAAA,EAAYC,SAAmBC,EAAAA,UAAAA,GAAAA;IACtD,IAAI;AACF,QAAA,MAAMF,KAAKG,MAAM,CAACC,UAAU,CAACH,WAAW,CAACI,KAAAA,GAAAA;;;AAGvCA,YAAAA,KAAAA,CAAMC,UAAU,CAAC;AAACJ,gBAAAA;AAAW,aAAA,EAAE,CAAC,EAAED,SAAAA,CAAU,CAAC,EAAEC,UAAAA,CAAW,OAAO,CAAC,CAAA;AACpE,SAAA,CAAA;AACF,KAAA,CAAE,OAAOK,KAAO,EAAA;;AAEhB;AACF,CAAA;MAEaC,mBAAiC,GAAA;IAC5CC,IAAM,EAAA,iCAAA;IACN,MAAMC,EAAAA,CAAAA,CAAGV,IAAI,EAAEW,EAAE,EAAA;AACf,QAAA,KAAK,MAAMC,IAAQD,IAAAA,EAAAA,CAAGE,QAAQ,CAACC,MAAM,EAAI,CAAA;YACvC,MAAMC,QAAAA,GAAW,MAAMf,IAAKG,CAAAA,MAAM,CAACY,QAAQ,CAACH,KAAKX,SAAS,CAAA;AAC1D,YAAA,IAAI,CAACc,QAAU,EAAA;AACb,gBAAA;AACF;AAEA,YAAA,KAAK,MAAMC,SAAaC,IAAAA,MAAAA,CAAOH,MAAM,CAACF,IAAAA,CAAKM,UAAU,CAAG,CAAA;AACtD,gBAAA,IAAIF,UAAUG,IAAI,KAAK,KAASH,IAAAA,SAAAA,CAAUd,UAAU,EAAE;AACpD,oBAAA,MAAMH,UAAUC,IAAMY,EAAAA,IAAAA,CAAKX,SAAS,EAAEe,UAAUd,UAAU,CAAA;AAC5D;AACF;AACF;AACF,KAAA;IACA,MAAMkB,IAAAA,CAAAA,GAAAA;AACJ,QAAA,MAAM,IAAIC,KAAM,CAAA,iBAAA,CAAA;AAClB;AACF;;;;"}

View File

@@ -0,0 +1,12 @@
import type { Migration } from '../common';
/**
* List of all the internal migrations. The array order will be the order in which they are executed.
*
* {
* name: 'some-name',
* async up(knex: Knex, db: Database) {},
* async down(knex: Knex, db: Database) {},
* },
*/
export declare const internalMigrations: Migration[];
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/migrations/internal-migrations/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,WAAW,CAAC;AAO3C;;;;;;;;GAQG;AACH,eAAO,MAAM,kBAAkB,EAAE,SAAS,EAMzC,CAAC"}

View File

@@ -0,0 +1,26 @@
'use strict';
var _5_0_002DocumentId = require('./5.0.0-02-document-id.js');
var _5_0_001ConvertIdentifiersLongThanMaxLength = require('./5.0.0-01-convert-identifiers-long-than-max-length.js');
var _5_0_003Locale = require('./5.0.0-03-locale.js');
var _5_0_004PublishedAt = require('./5.0.0-04-published-at.js');
var _5_0_005DropSlugUniqueIndex = require('./5.0.0-05-drop-slug-unique-index.js');
/**
* List of all the internal migrations. The array order will be the order in which they are executed.
*
* {
* name: 'some-name',
* async up(knex: Knex, db: Database) {},
* async down(knex: Knex, db: Database) {},
* },
*/ const internalMigrations = [
_5_0_001ConvertIdentifiersLongThanMaxLength.renameIdentifiersLongerThanMaxLength,
_5_0_002DocumentId.createdDocumentId,
_5_0_003Locale.createdLocale,
_5_0_004PublishedAt.createdPublishedAt,
_5_0_005DropSlugUniqueIndex.dropSlugFieldsIndex
];
exports.internalMigrations = internalMigrations;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../../src/migrations/internal-migrations/index.ts"],"sourcesContent":["import type { Migration } from '../common';\nimport { createdDocumentId } from './5.0.0-02-document-id';\nimport { renameIdentifiersLongerThanMaxLength } from './5.0.0-01-convert-identifiers-long-than-max-length';\nimport { createdLocale } from './5.0.0-03-locale';\nimport { createdPublishedAt } from './5.0.0-04-published-at';\nimport { dropSlugFieldsIndex } from './5.0.0-05-drop-slug-unique-index';\n\n/**\n * List of all the internal migrations. The array order will be the order in which they are executed.\n *\n * {\n * name: 'some-name',\n * async up(knex: Knex, db: Database) {},\n * async down(knex: Knex, db: Database) {},\n * },\n */\nexport const internalMigrations: Migration[] = [\n renameIdentifiersLongerThanMaxLength,\n createdDocumentId,\n createdLocale,\n createdPublishedAt,\n dropSlugFieldsIndex,\n];\n"],"names":["internalMigrations","renameIdentifiersLongerThanMaxLength","createdDocumentId","createdLocale","createdPublishedAt","dropSlugFieldsIndex"],"mappings":";;;;;;;;AAOA;;;;;;;;UASaA,kBAAkC,GAAA;AAC7CC,IAAAA,gFAAAA;AACAC,IAAAA,oCAAAA;AACAC,IAAAA,4BAAAA;AACAC,IAAAA,sCAAAA;AACAC,IAAAA;;;;;"}

View File

@@ -0,0 +1,24 @@
import { createdDocumentId } from './5.0.0-02-document-id.mjs';
import { renameIdentifiersLongerThanMaxLength } from './5.0.0-01-convert-identifiers-long-than-max-length.mjs';
import { createdLocale } from './5.0.0-03-locale.mjs';
import { createdPublishedAt } from './5.0.0-04-published-at.mjs';
import { dropSlugFieldsIndex } from './5.0.0-05-drop-slug-unique-index.mjs';
/**
* List of all the internal migrations. The array order will be the order in which they are executed.
*
* {
* name: 'some-name',
* async up(knex: Knex, db: Database) {},
* async down(knex: Knex, db: Database) {},
* },
*/ const internalMigrations = [
renameIdentifiersLongerThanMaxLength,
createdDocumentId,
createdLocale,
createdPublishedAt,
dropSlugFieldsIndex
];
export { internalMigrations };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../../src/migrations/internal-migrations/index.ts"],"sourcesContent":["import type { Migration } from '../common';\nimport { createdDocumentId } from './5.0.0-02-document-id';\nimport { renameIdentifiersLongerThanMaxLength } from './5.0.0-01-convert-identifiers-long-than-max-length';\nimport { createdLocale } from './5.0.0-03-locale';\nimport { createdPublishedAt } from './5.0.0-04-published-at';\nimport { dropSlugFieldsIndex } from './5.0.0-05-drop-slug-unique-index';\n\n/**\n * List of all the internal migrations. The array order will be the order in which they are executed.\n *\n * {\n * name: 'some-name',\n * async up(knex: Knex, db: Database) {},\n * async down(knex: Knex, db: Database) {},\n * },\n */\nexport const internalMigrations: Migration[] = [\n renameIdentifiersLongerThanMaxLength,\n createdDocumentId,\n createdLocale,\n createdPublishedAt,\n dropSlugFieldsIndex,\n];\n"],"names":["internalMigrations","renameIdentifiersLongerThanMaxLength","createdDocumentId","createdLocale","createdPublishedAt","dropSlugFieldsIndex"],"mappings":";;;;;;AAOA;;;;;;;;UASaA,kBAAkC,GAAA;AAC7CC,IAAAA,oCAAAA;AACAC,IAAAA,iBAAAA;AACAC,IAAAA,aAAAA;AACAC,IAAAA,kBAAAA;AACAC,IAAAA;;;;;"}