node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

37
server/node_modules/@strapi/data-transfer/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,37 @@
Copyright (c) 2015-present Strapi Solutions SAS
Portions of the Strapi software are licensed as follows:
* All software that resides under an "ee/" directory (the “EE Software”), if that directory exists, is licensed under the license defined below.
Enterprise License
If you or the company you represent has entered into a written agreement referencing the Enterprise Edition of the Strapi source code available at
https://github.com/strapi/strapi, then such agreement applies to your use of the Enterprise Edition of the Strapi Software. If you or the company you
represent is using the Enterprise Edition of the Strapi Software in connection with a subscription to our cloud offering, then the agreement you have
agreed to with respect to our cloud offering and the licenses included in such agreement apply to your use of the Enterprise Edition of the Strapi Software.
Otherwise, the Strapi Enterprise Software License Agreement (found here https://strapi.io/enterprise-terms) applies to your use of the Enterprise Edition of the Strapi Software.
BY ACCESSING OR USING THE ENTERPRISE EDITION OF THE STRAPI SOFTWARE, YOU ARE AGREEING TO BE BOUND BY THE RELEVANT REFERENCED AGREEMENT.
IF YOU ARE NOT AUTHORIZED TO ACCEPT THESE TERMS ON BEHALF OF THE COMPANY YOU REPRESENT OR IF YOU DO NOT AGREE TO ALL OF THE RELEVANT TERMS AND CONDITIONS REFERENCED AND YOU
HAVE NOT OTHERWISE EXECUTED A WRITTEN AGREEMENT WITH STRAPI, YOU ARE NOT AUTHORIZED TO ACCESS OR USE OR ALLOW ANY USER TO ACCESS OR USE ANY PART OF
THE ENTERPRISE EDITION OF THE STRAPI SOFTWARE. YOUR ACCESS RIGHTS ARE CONDITIONAL ON YOUR CONSENT TO THE RELEVANT REFERENCED TERMS TO THE EXCLUSION OF ALL OTHER TERMS;
IF THE RELEVANT REFERENCED TERMS ARE CONSIDERED AN OFFER BY YOU, ACCEPTANCE IS EXPRESSLY LIMITED TO THE RELEVANT REFERENCED TERMS.
* All software outside of the above-mentioned directories or restrictions above is available under the "MIT Expat" license as set forth below.
MIT Expat License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,29 @@
import { DataTransferError, Severity } from '../errors';
type TransferEngineStep = 'initialization' | 'validation' | 'transfer';
type TransferEngineErrorDetails<P extends TransferEngineStep = TransferEngineStep, U = never> = {
step: P;
} & ([U] extends [never] ? unknown : {
details?: U;
});
declare class TransferEngineError<P extends TransferEngineStep = TransferEngineStep, U = never, T extends TransferEngineErrorDetails<P, U> = TransferEngineErrorDetails<P, U>> extends DataTransferError<T> {
constructor(severity: Severity, message?: string, details?: T | null);
}
declare class TransferEngineInitializationError extends TransferEngineError<'initialization'> {
constructor(message?: string);
}
declare class TransferEngineValidationError<T extends {
check: string;
} = {
check: string;
}> extends TransferEngineError<'validation', T> {
constructor(message?: string, details?: T);
}
declare class TransferEngineTransferError<T extends {
check: string;
} = {
check: string;
}> extends TransferEngineError<'transfer', T> {
constructor(message?: string, details?: T);
}
export { TransferEngineError, TransferEngineInitializationError, TransferEngineValidationError, TransferEngineTransferError, };
//# sourceMappingURL=errors.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../../src/engine/errors.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,QAAQ,EAAgB,MAAM,WAAW,CAAC;AAEtE,KAAK,kBAAkB,GAAG,gBAAgB,GAAG,YAAY,GAAG,UAAU,CAAC;AAEvE,KAAK,0BAA0B,CAAC,CAAC,SAAS,kBAAkB,GAAG,kBAAkB,EAAE,CAAC,GAAG,KAAK,IAAI;IAC9F,IAAI,EAAE,CAAC,CAAC;CACT,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,OAAO,GAAG;IAAE,OAAO,CAAC,EAAE,CAAC,CAAA;CAAE,CAAC,CAAC;AAEtD,cAAM,mBAAmB,CACvB,CAAC,SAAS,kBAAkB,GAAG,kBAAkB,EACjD,CAAC,GAAG,KAAK,EACT,CAAC,SAAS,0BAA0B,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,0BAA0B,CAAC,CAAC,EAAE,CAAC,CAAC,CAC7E,SAAQ,iBAAiB,CAAC,CAAC,CAAC;gBAChB,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,IAAI;CAGrE;AAED,cAAM,iCAAkC,SAAQ,mBAAmB,CAAC,gBAAgB,CAAC;gBACvE,OAAO,CAAC,EAAE,MAAM;CAG7B;AAED,cAAM,6BAA6B,CACjC,CAAC,SAAS;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAC/C,SAAQ,mBAAmB,CAAC,YAAY,EAAE,CAAC,CAAC;gBAChC,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC;CAG1C;AAED,cAAM,2BAA2B,CAC/B,CAAC,SAAS;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,GAAG;IAAE,KAAK,EAAE,MAAM,CAAA;CAAE,CAC/C,SAAQ,mBAAmB,CAAC,UAAU,EAAE,CAAC,CAAC;gBAC9B,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC;CAG1C;AAED,OAAO,EACL,mBAAmB,EACnB,iCAAiC,EACjC,6BAA6B,EAC7B,2BAA2B,GAC5B,CAAC"}

View File

@@ -0,0 +1,39 @@
'use strict';
var constants = require('../errors/constants.js');
var base = require('../errors/base.js');
class TransferEngineError extends base.DataTransferError {
constructor(severity, message, details){
super('engine', severity, message, details);
}
}
class TransferEngineInitializationError extends TransferEngineError {
constructor(message){
super(constants.SeverityKind.FATAL, message, {
step: 'initialization'
});
}
}
class TransferEngineValidationError extends TransferEngineError {
constructor(message, details){
super(constants.SeverityKind.FATAL, message, {
step: 'validation',
details
});
}
}
class TransferEngineTransferError extends TransferEngineError {
constructor(message, details){
super(constants.SeverityKind.FATAL, message, {
step: 'transfer',
details
});
}
}
exports.TransferEngineError = TransferEngineError;
exports.TransferEngineInitializationError = TransferEngineInitializationError;
exports.TransferEngineTransferError = TransferEngineTransferError;
exports.TransferEngineValidationError = TransferEngineValidationError;
//# sourceMappingURL=errors.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"errors.js","sources":["../../src/engine/errors.ts"],"sourcesContent":["import { DataTransferError, Severity, SeverityKind } from '../errors';\n\ntype TransferEngineStep = 'initialization' | 'validation' | 'transfer';\n\ntype TransferEngineErrorDetails<P extends TransferEngineStep = TransferEngineStep, U = never> = {\n step: P;\n} & ([U] extends [never] ? unknown : { details?: U });\n\nclass TransferEngineError<\n P extends TransferEngineStep = TransferEngineStep,\n U = never,\n T extends TransferEngineErrorDetails<P, U> = TransferEngineErrorDetails<P, U>,\n> extends DataTransferError<T> {\n constructor(severity: Severity, message?: string, details?: T | null) {\n super('engine', severity, message, details);\n }\n}\n\nclass TransferEngineInitializationError extends TransferEngineError<'initialization'> {\n constructor(message?: string) {\n super(SeverityKind.FATAL, message, { step: 'initialization' });\n }\n}\n\nclass TransferEngineValidationError<\n T extends { check: string } = { check: string },\n> extends TransferEngineError<'validation', T> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.FATAL, message, { step: 'validation', details });\n }\n}\n\nclass TransferEngineTransferError<\n T extends { check: string } = { check: string },\n> extends TransferEngineError<'transfer', T> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.FATAL, message, { step: 'transfer', details });\n }\n}\n\nexport {\n TransferEngineError,\n TransferEngineInitializationError,\n TransferEngineValidationError,\n TransferEngineTransferError,\n};\n"],"names":["TransferEngineError","DataTransferError","constructor","severity","message","details","TransferEngineInitializationError","SeverityKind","FATAL","step","TransferEngineValidationError","TransferEngineTransferError"],"mappings":";;;;;AAQA,MAAMA,mBAIIC,SAAAA,sBAAAA,CAAAA;AACRC,IAAAA,WAAAA,CAAYC,QAAkB,EAAEC,OAAgB,EAAEC,OAAkB,CAAE;QACpE,KAAK,CAAC,QAAUF,EAAAA,QAAAA,EAAUC,OAASC,EAAAA,OAAAA,CAAAA;AACrC;AACF;AAEA,MAAMC,iCAA0CN,SAAAA,mBAAAA,CAAAA;AAC9CE,IAAAA,WAAAA,CAAYE,OAAgB,CAAE;AAC5B,QAAA,KAAK,CAACG,sBAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA;AAAiB,SAAA,CAAA;AAC9D;AACF;AAEA,MAAMC,6BAEIV,SAAAA,mBAAAA,CAAAA;IACRE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,sBAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA,YAAA;AAAcJ,YAAAA;AAAQ,SAAA,CAAA;AACnE;AACF;AAEA,MAAMM,2BAEIX,SAAAA,mBAAAA,CAAAA;IACRE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,sBAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA,UAAA;AAAYJ,YAAAA;AAAQ,SAAA,CAAA;AACjE;AACF;;;;;;;"}

View File

@@ -0,0 +1,34 @@
import { SeverityKind } from '../errors/constants.mjs';
import { DataTransferError } from '../errors/base.mjs';
class TransferEngineError extends DataTransferError {
constructor(severity, message, details){
super('engine', severity, message, details);
}
}
class TransferEngineInitializationError extends TransferEngineError {
constructor(message){
super(SeverityKind.FATAL, message, {
step: 'initialization'
});
}
}
class TransferEngineValidationError extends TransferEngineError {
constructor(message, details){
super(SeverityKind.FATAL, message, {
step: 'validation',
details
});
}
}
class TransferEngineTransferError extends TransferEngineError {
constructor(message, details){
super(SeverityKind.FATAL, message, {
step: 'transfer',
details
});
}
}
export { TransferEngineError, TransferEngineInitializationError, TransferEngineTransferError, TransferEngineValidationError };
//# sourceMappingURL=errors.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"errors.mjs","sources":["../../src/engine/errors.ts"],"sourcesContent":["import { DataTransferError, Severity, SeverityKind } from '../errors';\n\ntype TransferEngineStep = 'initialization' | 'validation' | 'transfer';\n\ntype TransferEngineErrorDetails<P extends TransferEngineStep = TransferEngineStep, U = never> = {\n step: P;\n} & ([U] extends [never] ? unknown : { details?: U });\n\nclass TransferEngineError<\n P extends TransferEngineStep = TransferEngineStep,\n U = never,\n T extends TransferEngineErrorDetails<P, U> = TransferEngineErrorDetails<P, U>,\n> extends DataTransferError<T> {\n constructor(severity: Severity, message?: string, details?: T | null) {\n super('engine', severity, message, details);\n }\n}\n\nclass TransferEngineInitializationError extends TransferEngineError<'initialization'> {\n constructor(message?: string) {\n super(SeverityKind.FATAL, message, { step: 'initialization' });\n }\n}\n\nclass TransferEngineValidationError<\n T extends { check: string } = { check: string },\n> extends TransferEngineError<'validation', T> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.FATAL, message, { step: 'validation', details });\n }\n}\n\nclass TransferEngineTransferError<\n T extends { check: string } = { check: string },\n> extends TransferEngineError<'transfer', T> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.FATAL, message, { step: 'transfer', details });\n }\n}\n\nexport {\n TransferEngineError,\n TransferEngineInitializationError,\n TransferEngineValidationError,\n TransferEngineTransferError,\n};\n"],"names":["TransferEngineError","DataTransferError","constructor","severity","message","details","TransferEngineInitializationError","SeverityKind","FATAL","step","TransferEngineValidationError","TransferEngineTransferError"],"mappings":";;;AAQA,MAAMA,mBAIIC,SAAAA,iBAAAA,CAAAA;AACRC,IAAAA,WAAAA,CAAYC,QAAkB,EAAEC,OAAgB,EAAEC,OAAkB,CAAE;QACpE,KAAK,CAAC,QAAUF,EAAAA,QAAAA,EAAUC,OAASC,EAAAA,OAAAA,CAAAA;AACrC;AACF;AAEA,MAAMC,iCAA0CN,SAAAA,mBAAAA,CAAAA;AAC9CE,IAAAA,WAAAA,CAAYE,OAAgB,CAAE;AAC5B,QAAA,KAAK,CAACG,YAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA;AAAiB,SAAA,CAAA;AAC9D;AACF;AAEA,MAAMC,6BAEIV,SAAAA,mBAAAA,CAAAA;IACRE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,YAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA,YAAA;AAAcJ,YAAAA;AAAQ,SAAA,CAAA;AACnE;AACF;AAEA,MAAMM,2BAEIX,SAAAA,mBAAAA,CAAAA;IACRE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,YAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA,UAAA;AAAYJ,YAAAA;AAAQ,SAAA,CAAA;AACjE;AACF;;;;"}

View File

@@ -0,0 +1,66 @@
/// <reference types="node" />
import { PassThrough } from 'stream';
import type { IDestinationProvider, ISourceProvider, ITransferEngine, ITransferEngineOptions, TransferProgress, ITransferResults, TransferStage, TransferFilters, TransferFilterPreset, SchemaDiffHandler, SchemaDiffHandlerContext, ErrorHandler, ErrorHandlerContext, ErrorCode } from '../../types';
import { IDiagnosticReporter, ErrorDiagnosticSeverity } from '../utils/diagnostic';
export declare const TRANSFER_STAGES: ReadonlyArray<TransferStage>;
export type TransferGroupFilter = Record<TransferFilterPreset, TransferFilters>;
/**
* Preset filters for only/exclude options
* */
export declare const TransferGroupPresets: TransferGroupFilter;
export declare const DEFAULT_VERSION_STRATEGY = "ignore";
export declare const DEFAULT_SCHEMA_STRATEGY = "strict";
declare class TransferEngine<S extends ISourceProvider = ISourceProvider, D extends IDestinationProvider = IDestinationProvider> implements ITransferEngine {
#private;
sourceProvider: ISourceProvider;
destinationProvider: IDestinationProvider;
options: ITransferEngineOptions;
progress: {
data: TransferProgress;
stream: PassThrough;
};
diagnostics: IDiagnosticReporter;
onSchemaDiff(handler: SchemaDiffHandler): void;
addErrorHandler(handlerName: ErrorCode, handler: ErrorHandler): void;
attemptResolveError(error: Error): Promise<boolean>;
constructor(sourceProvider: S, destinationProvider: D, options: ITransferEngineOptions);
/**
* Report a fatal error and throw it
*/
panic(error: Error): void;
/**
* Report an error diagnostic
*/
reportError(error: Error, severity: ErrorDiagnosticSeverity): void;
/**
* Report a warning diagnostic
*/
reportWarning(message: string, origin?: string): void;
/**
* Report an info diagnostic
*/
reportInfo(message: string, params?: unknown): void;
shouldSkipStage(stage: TransferStage): boolean;
abortTransfer(): Promise<void>;
init(): Promise<void>;
/**
* Run the bootstrap method in both source and destination providers
*/
bootstrap(): Promise<void>;
/**
* Run the close method in both source and destination providers
*/
close(): Promise<void>;
integrityCheck(): Promise<void>;
transfer(): Promise<ITransferResults<S, D>>;
beforeTransfer(): Promise<void>;
transferSchemas(): Promise<void>;
transferEntities(): Promise<void>;
transferLinks(): Promise<void>;
transferAssets(): Promise<void>;
transferConfiguration(): Promise<void>;
}
export declare const createTransferEngine: <S extends ISourceProvider, D extends IDestinationProvider>(sourceProvider: S, destinationProvider: D, options: ITransferEngineOptions) => TransferEngine<S, D>;
export type { TransferEngine, ITransferEngine, ITransferEngineOptions, ISourceProvider, IDestinationProvider, TransferStage, TransferFilterPreset, ErrorHandlerContext, SchemaDiffHandlerContext, ITransferResults, };
export * as errors from './errors';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/engine/index.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,WAAW,EAAiC,MAAM,QAAQ,CAAC;AAWpE,OAAO,KAAK,EAEV,oBAAoB,EAIpB,eAAe,EACf,eAAe,EACf,sBAAsB,EACtB,gBAAgB,EAChB,gBAAgB,EAChB,aAAa,EAGb,eAAe,EACf,oBAAoB,EAEpB,iBAAiB,EACjB,wBAAwB,EACxB,YAAY,EACZ,mBAAmB,EAEnB,SAAS,EACV,MAAM,aAAa,CAAC;AAMrB,OAAO,EAEL,mBAAmB,EACnB,uBAAuB,EACxB,MAAM,qBAAqB,CAAC;AAK7B,eAAO,MAAM,eAAe,EAAE,aAAa,CAAC,aAAa,CAMvD,CAAC;AAEH,MAAM,MAAM,mBAAmB,GAAG,MAAM,CAAC,oBAAoB,EAAE,eAAe,CAAC,CAAC;AAEhF;;KAEK;AACL,eAAO,MAAM,oBAAoB,EAAE,mBAoBlC,CAAC;AAEF,eAAO,MAAM,wBAAwB,WAAW,CAAC;AACjD,eAAO,MAAM,uBAAuB,WAAW,CAAC;AAIhD,cAAM,cAAc,CAClB,CAAC,SAAS,eAAe,GAAG,eAAe,EAC3C,CAAC,SAAS,oBAAoB,GAAG,oBAAoB,CACrD,YAAW,eAAe;;IAE1B,cAAc,EAAE,eAAe,CAAC;IAEhC,mBAAmB,EAAE,oBAAoB,CAAC;IAE1C,OAAO,EAAE,sBAAsB,CAAC;IAOhC,QAAQ,EAAE;QAER,IAAI,EAAE,gBAAgB,CAAC;QAEvB,MAAM,EAAE,WAAW,CAAC;KACrB,CAAC;IAEF,WAAW,EAAE,mBAAmB,CAAC;IAcjC,YAAY,CAAC,OAAO,EAAE,iBAAiB;IAIvC,eAAe,CAAC,WAAW,EAAE,SAAS,EAAE,OAAO,EAAE,YAAY;IAOvD,mBAAmB,CAAC,KAAK,EAAE,KAAK;gBAa1B,cAAc,EAAE,CAAC,EAAE,mBAAmB,EAAE,CAAC,EAAE,OAAO,EAAE,sBAAsB;IAatF;;OAEG;IACH,KAAK,CAAC,KAAK,EAAE,KAAK;IAMlB;;OAEG;IACH,WAAW,CAAC,KAAK,EAAE,KAAK,EAAE,QAAQ,EAAE,uBAAuB;IAa3D;;OAEG;IACH,aAAa,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,MAAM;IAO9C;;OAEG;IACH,UAAU,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,OAAO;IAqR5C,eAAe,CAAC,KAAK,EAAE,aAAa;IAuH9B,aAAa,IAAI,OAAO,CAAC,IAAI,CAAC;IAM9B,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAa3B;;OAEG;IACG,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAahC;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAyCtB,cAAc;IAyDd,QAAQ,IAAI,OAAO,CAAC,gBAAgB,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAmD3C,cAAc,IAAI,OAAO,CAAC,IAAI,CAAC;IAwB/B,eAAe,IAAI,OAAO,CAAC,IAAI,CAAC;IAiBhC,gBAAgB,IAAI,OAAO,CAAC,IAAI,CAAC;IA6CjC,aAAa,IAAI,OAAO,CAAC,IAAI,CAAC;IAsC9B,cAAc,IAAI,OAAO,CAAC,IAAI,CAAC;IAkB/B,qBAAqB,IAAI,OAAO,CAAC,IAAI,CAAC;CAc7C;AAED,eAAO,MAAM,oBAAoB,8EACf,CAAC,uBACI,CAAC,WACb,sBAAsB,KAC9B,eAAe,CAAC,EAAE,CAAC,CAErB,CAAC;AAEF,YAAY,EACV,cAAc,EACd,eAAe,EACf,sBAAsB,EACtB,eAAe,EACf,oBAAoB,EACpB,aAAa,EACb,oBAAoB,EACpB,mBAAmB,EACnB,wBAAwB,EACxB,gBAAgB,GACjB,CAAC;AAEF,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC"}

View File

@@ -0,0 +1,797 @@
'use strict';
var stream = require('stream');
var promises = require('stream/promises');
var path = require('path');
var os = require('os');
var streamChain = require('stream-chain');
var fp = require('lodash/fp');
var semver = require('semver');
var index = require('./validation/schemas/index.js');
var provider = require('./validation/provider.js');
var errors = require('./errors.js');
var diagnostic = require('../utils/diagnostic.js');
require('crypto');
var stream$1 = require('../utils/stream.js');
var json = require('../utils/json.js');
require('events');
var middleware = require('../utils/middleware.js');
var providers = require('../errors/providers.js');
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const TRANSFER_STAGES = Object.freeze([
'entities',
'links',
'assets',
'schemas',
'configuration'
]);
/**
* Preset filters for only/exclude options
* */ const TransferGroupPresets = {
content: {
links: true,
entities: true
},
files: {
assets: true
},
config: {
configuration: true
}
};
const DEFAULT_VERSION_STRATEGY = 'ignore';
const DEFAULT_SCHEMA_STRATEGY = 'strict';
var _metadata = /*#__PURE__*/ _class_private_field_loose_key("_metadata"), _schema = /*#__PURE__*/ _class_private_field_loose_key("_schema"), _handlers = /*#__PURE__*/ _class_private_field_loose_key("_handlers"), _currentStreamController = /*#__PURE__*/ _class_private_field_loose_key("_currentStreamController"), _aborted = /*#__PURE__*/ _class_private_field_loose_key("_aborted"), /**
* Create and return a transform stream based on the given stage and options.
*
* Allowed transformations includes 'filter' and 'map'.
*/ _createStageTransformStream = /*#__PURE__*/ _class_private_field_loose_key("_createStageTransformStream"), /**
* Update the Engine's transfer progress data for a given stage.
*
* Providing aggregate options enable custom computation to get the size (bytes) or the aggregate key associated with the data
*/ _updateTransferProgress = /*#__PURE__*/ _class_private_field_loose_key("_updateTransferProgress"), /**
* Create and return a PassThrough stream.
*
* Upon writing data into it, it'll update the Engine's transfer progress data and trigger stage update events.
*/ _progressTracker = /*#__PURE__*/ _class_private_field_loose_key("_progressTracker"), /**
* Shorthand method used to trigger transfer update events to every listeners
*/ _emitTransferUpdate = /*#__PURE__*/ _class_private_field_loose_key("_emitTransferUpdate"), /**
* Shorthand method used to trigger stage update events to every listeners
*/ _emitStageUpdate = /*#__PURE__*/ _class_private_field_loose_key("_emitStageUpdate"), /**
* Run a version check between two strapi version (source and destination) using the strategy given to the engine during initialization.
*
* If there is a mismatch, throws a validation error.
*/ _assertStrapiVersionIntegrity = /*#__PURE__*/ _class_private_field_loose_key("_assertStrapiVersionIntegrity"), /**
* Run a check between two set of schemas (source and destination) using the strategy given to the engine during initialization.
*
* If there are differences and/or incompatibilities between source and destination schemas, then throw a validation error.
*/ _assertSchemasMatching = /*#__PURE__*/ _class_private_field_loose_key("_assertSchemasMatching"), _transferStage = /*#__PURE__*/ _class_private_field_loose_key("_transferStage"), _resolveProviderResource = /*#__PURE__*/ _class_private_field_loose_key("_resolveProviderResource"), _getSchemas = /*#__PURE__*/ _class_private_field_loose_key("_getSchemas");
class TransferEngine {
onSchemaDiff(handler) {
_class_private_field_loose_base(this, _handlers)[_handlers]?.schemaDiff?.push(handler);
}
addErrorHandler(handlerName, handler) {
if (!_class_private_field_loose_base(this, _handlers)[_handlers].errors[handlerName]) {
_class_private_field_loose_base(this, _handlers)[_handlers].errors[handlerName] = [];
}
_class_private_field_loose_base(this, _handlers)[_handlers].errors[handlerName]?.push(handler);
}
async attemptResolveError(error) {
const context = {};
if (error instanceof providers.ProviderTransferError && error.details?.details.code) {
const errorCode = error.details?.details.code;
if (!_class_private_field_loose_base(this, _handlers)[_handlers].errors[errorCode]) {
_class_private_field_loose_base(this, _handlers)[_handlers].errors[errorCode] = [];
}
await middleware.runMiddleware(context ?? {}, _class_private_field_loose_base(this, _handlers)[_handlers].errors[errorCode] ?? []);
}
return !!context.ignore;
}
/**
* Report a fatal error and throw it
*/ panic(error) {
this.reportError(error, 'fatal');
throw error;
}
/**
* Report an error diagnostic
*/ reportError(error, severity) {
this.diagnostics.report({
kind: 'error',
details: {
severity,
createdAt: new Date(),
name: error.name,
message: error.message,
error
}
});
}
/**
* Report a warning diagnostic
*/ reportWarning(message, origin1) {
this.diagnostics.report({
kind: 'warning',
details: {
createdAt: new Date(),
message,
origin: origin1
}
});
}
/**
* Report an info diagnostic
*/ reportInfo(message, params) {
this.diagnostics.report({
kind: 'info',
details: {
createdAt: new Date(),
message,
params,
origin: 'engine'
}
});
}
shouldSkipStage(stage) {
const { exclude, only } = this.options;
// schemas must always be included
if (stage === 'schemas') {
return false;
}
// everything is included by default unless 'only' has been set
let included = fp.isEmpty(only);
if (only && only.length > 0) {
included = only.some((transferGroup)=>{
return TransferGroupPresets[transferGroup][stage];
});
}
if (exclude && exclude.length > 0) {
if (included) {
included = !exclude.some((transferGroup)=>{
return TransferGroupPresets[transferGroup][stage];
});
}
}
return !included;
}
// Cause an ongoing transfer to abort gracefully
async abortTransfer() {
_class_private_field_loose_base(this, _aborted)[_aborted] = true;
_class_private_field_loose_base(this, _currentStreamController)[_currentStreamController]?.abort();
throw new errors.TransferEngineError('fatal', 'Transfer aborted.');
}
async init() {
// Resolve providers' resource and store
// them in the engine's internal state
await _class_private_field_loose_base(this, _resolveProviderResource)[_resolveProviderResource]();
// Update the destination provider's source metadata
const { source: sourceMetadata } = _class_private_field_loose_base(this, _metadata)[_metadata];
if (sourceMetadata) {
this.destinationProvider.setMetadata?.('source', sourceMetadata);
}
}
/**
* Run the bootstrap method in both source and destination providers
*/ async bootstrap() {
const results = await Promise.allSettled([
this.sourceProvider.bootstrap?.(this.diagnostics),
this.destinationProvider.bootstrap?.(this.diagnostics)
]);
results.forEach((result)=>{
if (result.status === 'rejected') {
this.panic(result.reason);
}
});
}
/**
* Run the close method in both source and destination providers
*/ async close() {
const results = await Promise.allSettled([
this.sourceProvider.close?.(),
this.destinationProvider.close?.()
]);
results.forEach((result)=>{
if (result.status === 'rejected') {
this.panic(result.reason);
}
});
}
async integrityCheck() {
const sourceMetadata = await this.sourceProvider.getMetadata();
const destinationMetadata = await this.destinationProvider.getMetadata();
if (sourceMetadata && destinationMetadata) {
_class_private_field_loose_base(this, _assertStrapiVersionIntegrity)[_assertStrapiVersionIntegrity](sourceMetadata?.strapi?.version, destinationMetadata?.strapi?.version);
}
const { sourceSchemas, destinationSchemas } = await _class_private_field_loose_base(this, _getSchemas)[_getSchemas]();
try {
if (sourceSchemas && destinationSchemas) {
_class_private_field_loose_base(this, _assertSchemasMatching)[_assertSchemasMatching](sourceSchemas, destinationSchemas);
}
} catch (error) {
// if this is a schema matching error, allow handlers to resolve it
if (error instanceof errors.TransferEngineValidationError && error.details?.details?.diffs) {
const schemaDiffs = error.details?.details?.diffs;
const context = {
ignoredDiffs: {},
diffs: schemaDiffs,
source: this.sourceProvider,
destination: this.destinationProvider
};
// if we don't have any handlers, throw the original error
if (fp.isEmpty(_class_private_field_loose_base(this, _handlers)[_handlers].schemaDiff)) {
throw error;
}
await middleware.runMiddleware(context, _class_private_field_loose_base(this, _handlers)[_handlers].schemaDiff);
// if there are any remaining diffs that weren't ignored
const unresolvedDiffs = json.diff(context.diffs, context.ignoredDiffs);
if (unresolvedDiffs.length) {
this.panic(new errors.TransferEngineValidationError('Unresolved differences in schema', {
check: 'schema.changes',
unresolvedDiffs
}));
}
return;
}
throw error;
}
}
async transfer() {
// reset data between transfers
this.progress.data = {};
try {
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('init');
await this.bootstrap();
await this.init();
await this.integrityCheck();
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('start');
await this.beforeTransfer();
// Run the transfer stages
await this.transferSchemas();
await this.transferEntities();
await this.transferAssets();
await this.transferLinks();
await this.transferConfiguration();
// Gracefully close the providers
await this.close();
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('finish');
} catch (e) {
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('error', {
error: e
});
const lastDiagnostic = fp.last(this.diagnostics.stack.items);
// Do not report an error diagnostic if the last one reported the same error
if (e instanceof Error && (!lastDiagnostic || lastDiagnostic.kind !== 'error' || lastDiagnostic.details.error !== e)) {
this.reportError(e, e.severity || 'fatal');
}
// Rollback the destination provider if an exception is thrown during the transfer
// Note: This will be configurable in the future
await this.destinationProvider.rollback?.(e);
throw e;
}
return {
source: this.sourceProvider.results,
destination: this.destinationProvider.results,
engine: this.progress.data
};
}
async beforeTransfer() {
const runWithDiagnostic = async (provider)=>{
try {
await provider.beforeTransfer?.();
} catch (error) {
if (error instanceof Error) {
const resolved = await this.attemptResolveError(error);
if (resolved) {
return;
}
this.panic(error);
} else {
this.panic(new Error(`Unknwon error when executing "beforeTransfer" on the ${origin} provider`));
}
}
};
await runWithDiagnostic(this.sourceProvider);
await runWithDiagnostic(this.destinationProvider);
}
async transferSchemas() {
const stage = 'schemas';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createSchemasReadStream?.();
const destination = await this.destinationProvider.createSchemasWriteStream?.();
const transform = _class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage, {
key: (value)=>value.modelType
});
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferEntities() {
const stage = 'entities';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createEntitiesReadStream?.();
const destination = await this.destinationProvider.createEntitiesWriteStream?.();
const transform = streamChain.chain([
_class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage),
new stream.Transform({
objectMode: true,
transform: async (entity, _encoding, callback)=>{
const { destinationSchemas: schemas } = await _class_private_field_loose_base(this, _getSchemas)[_getSchemas]();
if (!schemas) {
return callback(null, entity);
}
// TODO: this would be safer if we only ignored things in ignoredDiffs, otherwise continue and let an error be thrown
const availableContentTypes = Object.entries(schemas).filter(([, schema])=>schema.modelType === 'contentType').map(([uid])=>uid);
// If the type of the transferred entity doesn't exist in the destination, then discard it
if (!availableContentTypes.includes(entity.type)) {
return callback(null, undefined);
}
const { type, data } = entity;
const attributes = schemas[type].attributes;
const attributesToKeep = Object.keys(attributes).concat('documentId');
const updatedEntity = fp.set('data', fp.pick(attributesToKeep, data), entity);
callback(null, updatedEntity);
}
})
]);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage, {
key: (value)=>value.type
});
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferLinks() {
const stage = 'links';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createLinksReadStream?.();
const destination = await this.destinationProvider.createLinksWriteStream?.();
const transform = streamChain.chain([
_class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage),
new stream.Transform({
objectMode: true,
transform: async (link, _encoding, callback)=>{
const { destinationSchemas: schemas } = await _class_private_field_loose_base(this, _getSchemas)[_getSchemas]();
if (!schemas) {
return callback(null, link);
}
// TODO: this would be safer if we only ignored things in ignoredDiffs, otherwise continue and let an error be thrown
const availableContentTypes = Object.keys(schemas);
const isValidType = (uid)=>availableContentTypes.includes(uid);
if (!isValidType(link.left.type) || !isValidType(link.right.type)) {
return callback(null, undefined); // ignore the link
}
callback(null, link);
}
})
]);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage);
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferAssets() {
const stage = 'assets';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createAssetsReadStream?.();
const destination = await this.destinationProvider.createAssetsWriteStream?.();
const transform = _class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage, {
size: (value)=>value.stats.size,
key: (value)=>path.extname(value.filename) || 'No extension'
});
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferConfiguration() {
const stage = 'configuration';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createConfigurationReadStream?.();
const destination = await this.destinationProvider.createConfigurationWriteStream?.();
const transform = _class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage);
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
constructor(sourceProvider, destinationProvider, options){
Object.defineProperty(this, _createStageTransformStream, {
value: createStageTransformStream
});
Object.defineProperty(this, _updateTransferProgress, {
value: updateTransferProgress
});
Object.defineProperty(this, _progressTracker, {
value: progressTracker
});
Object.defineProperty(this, _emitTransferUpdate, {
value: emitTransferUpdate
});
Object.defineProperty(this, _emitStageUpdate, {
value: emitStageUpdate
});
Object.defineProperty(this, _assertStrapiVersionIntegrity, {
value: assertStrapiVersionIntegrity
});
Object.defineProperty(this, _assertSchemasMatching, {
value: assertSchemasMatching
});
Object.defineProperty(this, _transferStage, {
value: transferStage
});
Object.defineProperty(this, _resolveProviderResource, {
value: resolveProviderResource
});
Object.defineProperty(this, _getSchemas, {
value: getSchemas
});
Object.defineProperty(this, _metadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _schema, {
writable: true,
value: void 0
});
Object.defineProperty(this, _handlers, {
writable: true,
value: void 0
});
Object.defineProperty(this, _currentStreamController, {
writable: true,
value: void 0
});
Object.defineProperty(this, _aborted, {
writable: true,
value: void 0
});
_class_private_field_loose_base(this, _metadata)[_metadata] = {};
_class_private_field_loose_base(this, _schema)[_schema] = {};
_class_private_field_loose_base(this, _handlers)[_handlers] = {
schemaDiff: [],
errors: {}
};
_class_private_field_loose_base(this, _aborted)[_aborted] = false;
this.diagnostics = diagnostic.createDiagnosticReporter();
provider.validateProvider('source', sourceProvider);
provider.validateProvider('destination', destinationProvider);
this.sourceProvider = sourceProvider;
this.destinationProvider = destinationProvider;
this.options = options;
this.progress = {
data: {},
stream: new stream.PassThrough({
objectMode: true
})
};
}
}
function createStageTransformStream(key, options = {}) {
const { includeGlobal = true } = options;
const { throttle } = this.options;
const { global: globalTransforms, [key]: stageTransforms } = this.options?.transforms ?? {};
let stream$2 = new stream.PassThrough({
objectMode: true
});
const applyTransforms = (transforms = [])=>{
const chainTransforms = [];
for (const transform of transforms){
if ('filter' in transform) {
chainTransforms.push(stream$1.filter(transform.filter));
}
if ('map' in transform) {
chainTransforms.push(stream$1.map(transform.map));
}
}
if (chainTransforms.length) {
stream$2 = stream$2.pipe(streamChain.chain(chainTransforms));
}
};
if (includeGlobal) {
applyTransforms(globalTransforms);
}
if (fp.isNumber(throttle) && throttle > 0) {
stream$2 = stream$2.pipe(new stream.PassThrough({
objectMode: true,
async transform (data, _encoding, callback) {
await new Promise((resolve)=>{
setTimeout(resolve, throttle);
});
callback(null, data);
}
}));
}
applyTransforms(stageTransforms);
return stream$2;
}
function updateTransferProgress(stage, data, aggregate) {
if (!this.progress.data[stage]) {
this.progress.data[stage] = {
count: 0,
bytes: 0,
startTime: Date.now()
};
}
const stageProgress = this.progress.data[stage];
if (!stageProgress) {
return;
}
const size = aggregate?.size?.(data) ?? JSON.stringify(data).length;
const key = aggregate?.key?.(data);
stageProgress.count += 1;
stageProgress.bytes += size;
// Handle aggregate updates if necessary
if (key) {
if (!stageProgress.aggregates) {
stageProgress.aggregates = {};
}
const { aggregates } = stageProgress;
if (!aggregates[key]) {
aggregates[key] = {
count: 0,
bytes: 0
};
}
aggregates[key].count += 1;
aggregates[key].bytes += size;
}
}
function progressTracker(stage, aggregate) {
return new stream.PassThrough({
objectMode: true,
transform: (data, _encoding, callback)=>{
_class_private_field_loose_base(this, _updateTransferProgress)[_updateTransferProgress](stage, data, aggregate);
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('progress', stage);
callback(null, data);
}
});
}
function emitTransferUpdate(type, payload) {
this.progress.stream.emit(`transfer::${type}`, payload);
}
function emitStageUpdate(type, transferStage) {
this.progress.stream.emit(`stage::${type}`, {
data: this.progress.data,
stage: transferStage
});
}
function assertStrapiVersionIntegrity(sourceVersion, destinationVersion) {
const strategy = this.options.versionStrategy || DEFAULT_VERSION_STRATEGY;
const reject = ()=>{
throw new errors.TransferEngineValidationError(`The source and destination provide are targeting incompatible Strapi versions (using the "${strategy}" strategy). The source (${this.sourceProvider.name}) version is ${sourceVersion} and the destination (${this.destinationProvider.name}) version is ${destinationVersion}`, {
check: 'strapi.version',
strategy,
versions: {
source: sourceVersion,
destination: destinationVersion
}
});
};
if (!sourceVersion || !destinationVersion || strategy === 'ignore' || destinationVersion === sourceVersion) {
return;
}
let diff;
try {
diff = semver.diff(sourceVersion, destinationVersion);
} catch {
reject();
}
if (!diff) {
return;
}
const validPatch = [
'prelease',
'build'
];
const validMinor = [
...validPatch,
'patch',
'prepatch'
];
const validMajor = [
...validMinor,
'minor',
'preminor'
];
if (strategy === 'patch' && validPatch.includes(diff)) {
return;
}
if (strategy === 'minor' && validMinor.includes(diff)) {
return;
}
if (strategy === 'major' && validMajor.includes(diff)) {
return;
}
reject();
}
function assertSchemasMatching(sourceSchemas, destinationSchemas) {
const strategy = this.options.schemaStrategy || DEFAULT_SCHEMA_STRATEGY;
if (strategy === 'ignore') {
return;
}
const keys = fp.uniq(Object.keys(sourceSchemas).concat(Object.keys(destinationSchemas)));
const diffs = {};
keys.forEach((key)=>{
const sourceSchema = sourceSchemas[key];
const destinationSchema = destinationSchemas[key];
const schemaDiffs = index.compareSchemas(sourceSchema, destinationSchema, strategy);
if (schemaDiffs.length) {
diffs[key] = schemaDiffs;
}
});
if (!fp.isEmpty(diffs)) {
const formattedDiffs = Object.entries(diffs).map(([uid, ctDiffs])=>{
let msg = `- ${uid}:${os.EOL}`;
msg += ctDiffs.sort((a, b)=>a.kind > b.kind ? -1 : 1).map((diff)=>{
const path = diff.path.join('.');
if (diff.kind === 'added') {
return `${path} exists in destination schema but not in source schema and the data will not be transferred.`;
}
if (diff.kind === 'deleted') {
return `${path} exists in source schema but not in destination schema and the data will not be transferred.`;
}
if (diff.kind === 'modified') {
if (diff.types[0] === diff.types[1]) {
return `Schema value changed at "${path}": "${diff.values[0]}" (${diff.types[0]}) => "${diff.values[1]}" (${diff.types[1]})`;
}
return `Schema has differing data types at "${path}": "${diff.values[0]}" (${diff.types[0]}) => "${diff.values[1]}" (${diff.types[1]})`;
}
throw new errors.TransferEngineValidationError(`Invalid diff found for "${uid}"`, {
check: `schema on ${uid}`
});
}).map((line)=>` - ${line}`).join(os.EOL);
return msg;
}).join(os.EOL);
throw new errors.TransferEngineValidationError(`Invalid schema changes detected during integrity checks (using the ${strategy} strategy). Please find a summary of the changes below:\n${formattedDiffs}`, {
check: 'schema.changes',
strategy,
diffs
});
}
}
async function transferStage(options) {
if (_class_private_field_loose_base(this, _aborted)[_aborted]) {
throw new errors.TransferEngineError('fatal', 'Transfer aborted.');
}
const { stage, source, destination, transform, tracker } = options;
const updateEndTime = ()=>{
const stageData = this.progress.data[stage];
if (stageData) {
stageData.endTime = Date.now();
}
};
if (!source || !destination || this.shouldSkipStage(stage)) {
// Wait until source and destination are closed
const results = await Promise.allSettled([
source,
destination
].map((stream)=>{
// if stream is undefined or already closed, resolve immediately
if (!stream || stream.destroyed) {
return Promise.resolve();
}
// Wait until the close event is produced and then destroy the stream and resolve
return new Promise((resolve, reject)=>{
stream.on('close', resolve).on('error', reject).destroy();
});
}));
results.forEach((state)=>{
if (state.status === 'rejected') {
this.reportWarning(state.reason, `transfer(${stage})`);
}
});
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('skip', stage);
return;
}
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('start', stage);
try {
const streams = [
source
];
if (transform) {
streams.push(transform);
}
if (tracker) {
streams.push(tracker);
}
streams.push(destination);
// NOTE: to debug/confirm backpressure issues from misbehaving stream, uncomment the following lines
// source.on('pause', () => console.log(`[${stage}] Source paused due to backpressure`));
// source.on('resume', () => console.log(`[${stage}] Source resumed`));
// destination.on('drain', () =>
// console.log(`[${stage}] Destination drained, resuming data flow`)
// );
// destination.on('error', (err) => console.error(`[${stage}] Destination error:`, err));
const controller = new AbortController();
const { signal } = controller;
// Store the controller so you can cancel later
_class_private_field_loose_base(this, _currentStreamController)[_currentStreamController] = controller;
await promises.pipeline(streams, {
signal
});
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('finish', stage);
} catch (e) {
updateEndTime();
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('error', stage);
this.reportError(e, 'error');
if (!destination.destroyed) {
destination.destroy(e);
}
} finally{
updateEndTime();
}
}
async function resolveProviderResource() {
const sourceMetadata = await this.sourceProvider.getMetadata();
const destinationMetadata = await this.destinationProvider.getMetadata();
if (sourceMetadata) {
_class_private_field_loose_base(this, _metadata)[_metadata].source = sourceMetadata;
}
if (destinationMetadata) {
_class_private_field_loose_base(this, _metadata)[_metadata].destination = destinationMetadata;
}
}
async function getSchemas() {
if (!_class_private_field_loose_base(this, _schema)[_schema].source) {
_class_private_field_loose_base(this, _schema)[_schema].source = await this.sourceProvider.getSchemas?.();
}
if (!_class_private_field_loose_base(this, _schema)[_schema].destination) {
_class_private_field_loose_base(this, _schema)[_schema].destination = await this.destinationProvider.getSchemas?.();
}
return {
sourceSchemas: _class_private_field_loose_base(this, _schema)[_schema].source,
destinationSchemas: _class_private_field_loose_base(this, _schema)[_schema].destination
};
}
const createTransferEngine = (sourceProvider, destinationProvider, options)=>{
return new TransferEngine(sourceProvider, destinationProvider, options);
};
exports.errors = errors;
exports.DEFAULT_SCHEMA_STRATEGY = DEFAULT_SCHEMA_STRATEGY;
exports.DEFAULT_VERSION_STRATEGY = DEFAULT_VERSION_STRATEGY;
exports.TRANSFER_STAGES = TRANSFER_STAGES;
exports.TransferGroupPresets = TransferGroupPresets;
exports.createTransferEngine = createTransferEngine;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,792 @@
import { Transform, PassThrough } from 'stream';
import { pipeline } from 'stream/promises';
import { extname } from 'path';
import { EOL } from 'os';
import { chain } from 'stream-chain';
import { isEmpty, last, set, pick, isNumber, uniq } from 'lodash/fp';
import { diff as diff$1 } from 'semver';
import { compareSchemas } from './validation/schemas/index.mjs';
import { validateProvider } from './validation/provider.mjs';
import { TransferEngineError, TransferEngineValidationError } from './errors.mjs';
import * as errors from './errors.mjs';
export { errors };
import { createDiagnosticReporter } from '../utils/diagnostic.mjs';
import 'crypto';
import { filter, map } from '../utils/stream.mjs';
import { diff } from '../utils/json.mjs';
import 'events';
import { runMiddleware } from '../utils/middleware.mjs';
import { ProviderTransferError } from '../errors/providers.mjs';
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const TRANSFER_STAGES = Object.freeze([
'entities',
'links',
'assets',
'schemas',
'configuration'
]);
/**
* Preset filters for only/exclude options
* */ const TransferGroupPresets = {
content: {
links: true,
entities: true
},
files: {
assets: true
},
config: {
configuration: true
}
};
const DEFAULT_VERSION_STRATEGY = 'ignore';
const DEFAULT_SCHEMA_STRATEGY = 'strict';
var _metadata = /*#__PURE__*/ _class_private_field_loose_key("_metadata"), _schema = /*#__PURE__*/ _class_private_field_loose_key("_schema"), _handlers = /*#__PURE__*/ _class_private_field_loose_key("_handlers"), _currentStreamController = /*#__PURE__*/ _class_private_field_loose_key("_currentStreamController"), _aborted = /*#__PURE__*/ _class_private_field_loose_key("_aborted"), /**
* Create and return a transform stream based on the given stage and options.
*
* Allowed transformations includes 'filter' and 'map'.
*/ _createStageTransformStream = /*#__PURE__*/ _class_private_field_loose_key("_createStageTransformStream"), /**
* Update the Engine's transfer progress data for a given stage.
*
* Providing aggregate options enable custom computation to get the size (bytes) or the aggregate key associated with the data
*/ _updateTransferProgress = /*#__PURE__*/ _class_private_field_loose_key("_updateTransferProgress"), /**
* Create and return a PassThrough stream.
*
* Upon writing data into it, it'll update the Engine's transfer progress data and trigger stage update events.
*/ _progressTracker = /*#__PURE__*/ _class_private_field_loose_key("_progressTracker"), /**
* Shorthand method used to trigger transfer update events to every listeners
*/ _emitTransferUpdate = /*#__PURE__*/ _class_private_field_loose_key("_emitTransferUpdate"), /**
* Shorthand method used to trigger stage update events to every listeners
*/ _emitStageUpdate = /*#__PURE__*/ _class_private_field_loose_key("_emitStageUpdate"), /**
* Run a version check between two strapi version (source and destination) using the strategy given to the engine during initialization.
*
* If there is a mismatch, throws a validation error.
*/ _assertStrapiVersionIntegrity = /*#__PURE__*/ _class_private_field_loose_key("_assertStrapiVersionIntegrity"), /**
* Run a check between two set of schemas (source and destination) using the strategy given to the engine during initialization.
*
* If there are differences and/or incompatibilities between source and destination schemas, then throw a validation error.
*/ _assertSchemasMatching = /*#__PURE__*/ _class_private_field_loose_key("_assertSchemasMatching"), _transferStage = /*#__PURE__*/ _class_private_field_loose_key("_transferStage"), _resolveProviderResource = /*#__PURE__*/ _class_private_field_loose_key("_resolveProviderResource"), _getSchemas = /*#__PURE__*/ _class_private_field_loose_key("_getSchemas");
class TransferEngine {
onSchemaDiff(handler) {
_class_private_field_loose_base(this, _handlers)[_handlers]?.schemaDiff?.push(handler);
}
addErrorHandler(handlerName, handler) {
if (!_class_private_field_loose_base(this, _handlers)[_handlers].errors[handlerName]) {
_class_private_field_loose_base(this, _handlers)[_handlers].errors[handlerName] = [];
}
_class_private_field_loose_base(this, _handlers)[_handlers].errors[handlerName]?.push(handler);
}
async attemptResolveError(error) {
const context = {};
if (error instanceof ProviderTransferError && error.details?.details.code) {
const errorCode = error.details?.details.code;
if (!_class_private_field_loose_base(this, _handlers)[_handlers].errors[errorCode]) {
_class_private_field_loose_base(this, _handlers)[_handlers].errors[errorCode] = [];
}
await runMiddleware(context ?? {}, _class_private_field_loose_base(this, _handlers)[_handlers].errors[errorCode] ?? []);
}
return !!context.ignore;
}
/**
* Report a fatal error and throw it
*/ panic(error) {
this.reportError(error, 'fatal');
throw error;
}
/**
* Report an error diagnostic
*/ reportError(error, severity) {
this.diagnostics.report({
kind: 'error',
details: {
severity,
createdAt: new Date(),
name: error.name,
message: error.message,
error
}
});
}
/**
* Report a warning diagnostic
*/ reportWarning(message, origin1) {
this.diagnostics.report({
kind: 'warning',
details: {
createdAt: new Date(),
message,
origin: origin1
}
});
}
/**
* Report an info diagnostic
*/ reportInfo(message, params) {
this.diagnostics.report({
kind: 'info',
details: {
createdAt: new Date(),
message,
params,
origin: 'engine'
}
});
}
shouldSkipStage(stage) {
const { exclude, only } = this.options;
// schemas must always be included
if (stage === 'schemas') {
return false;
}
// everything is included by default unless 'only' has been set
let included = isEmpty(only);
if (only && only.length > 0) {
included = only.some((transferGroup)=>{
return TransferGroupPresets[transferGroup][stage];
});
}
if (exclude && exclude.length > 0) {
if (included) {
included = !exclude.some((transferGroup)=>{
return TransferGroupPresets[transferGroup][stage];
});
}
}
return !included;
}
// Cause an ongoing transfer to abort gracefully
async abortTransfer() {
_class_private_field_loose_base(this, _aborted)[_aborted] = true;
_class_private_field_loose_base(this, _currentStreamController)[_currentStreamController]?.abort();
throw new TransferEngineError('fatal', 'Transfer aborted.');
}
async init() {
// Resolve providers' resource and store
// them in the engine's internal state
await _class_private_field_loose_base(this, _resolveProviderResource)[_resolveProviderResource]();
// Update the destination provider's source metadata
const { source: sourceMetadata } = _class_private_field_loose_base(this, _metadata)[_metadata];
if (sourceMetadata) {
this.destinationProvider.setMetadata?.('source', sourceMetadata);
}
}
/**
* Run the bootstrap method in both source and destination providers
*/ async bootstrap() {
const results = await Promise.allSettled([
this.sourceProvider.bootstrap?.(this.diagnostics),
this.destinationProvider.bootstrap?.(this.diagnostics)
]);
results.forEach((result)=>{
if (result.status === 'rejected') {
this.panic(result.reason);
}
});
}
/**
* Run the close method in both source and destination providers
*/ async close() {
const results = await Promise.allSettled([
this.sourceProvider.close?.(),
this.destinationProvider.close?.()
]);
results.forEach((result)=>{
if (result.status === 'rejected') {
this.panic(result.reason);
}
});
}
async integrityCheck() {
const sourceMetadata = await this.sourceProvider.getMetadata();
const destinationMetadata = await this.destinationProvider.getMetadata();
if (sourceMetadata && destinationMetadata) {
_class_private_field_loose_base(this, _assertStrapiVersionIntegrity)[_assertStrapiVersionIntegrity](sourceMetadata?.strapi?.version, destinationMetadata?.strapi?.version);
}
const { sourceSchemas, destinationSchemas } = await _class_private_field_loose_base(this, _getSchemas)[_getSchemas]();
try {
if (sourceSchemas && destinationSchemas) {
_class_private_field_loose_base(this, _assertSchemasMatching)[_assertSchemasMatching](sourceSchemas, destinationSchemas);
}
} catch (error) {
// if this is a schema matching error, allow handlers to resolve it
if (error instanceof TransferEngineValidationError && error.details?.details?.diffs) {
const schemaDiffs = error.details?.details?.diffs;
const context = {
ignoredDiffs: {},
diffs: schemaDiffs,
source: this.sourceProvider,
destination: this.destinationProvider
};
// if we don't have any handlers, throw the original error
if (isEmpty(_class_private_field_loose_base(this, _handlers)[_handlers].schemaDiff)) {
throw error;
}
await runMiddleware(context, _class_private_field_loose_base(this, _handlers)[_handlers].schemaDiff);
// if there are any remaining diffs that weren't ignored
const unresolvedDiffs = diff(context.diffs, context.ignoredDiffs);
if (unresolvedDiffs.length) {
this.panic(new TransferEngineValidationError('Unresolved differences in schema', {
check: 'schema.changes',
unresolvedDiffs
}));
}
return;
}
throw error;
}
}
async transfer() {
// reset data between transfers
this.progress.data = {};
try {
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('init');
await this.bootstrap();
await this.init();
await this.integrityCheck();
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('start');
await this.beforeTransfer();
// Run the transfer stages
await this.transferSchemas();
await this.transferEntities();
await this.transferAssets();
await this.transferLinks();
await this.transferConfiguration();
// Gracefully close the providers
await this.close();
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('finish');
} catch (e) {
_class_private_field_loose_base(this, _emitTransferUpdate)[_emitTransferUpdate]('error', {
error: e
});
const lastDiagnostic = last(this.diagnostics.stack.items);
// Do not report an error diagnostic if the last one reported the same error
if (e instanceof Error && (!lastDiagnostic || lastDiagnostic.kind !== 'error' || lastDiagnostic.details.error !== e)) {
this.reportError(e, e.severity || 'fatal');
}
// Rollback the destination provider if an exception is thrown during the transfer
// Note: This will be configurable in the future
await this.destinationProvider.rollback?.(e);
throw e;
}
return {
source: this.sourceProvider.results,
destination: this.destinationProvider.results,
engine: this.progress.data
};
}
async beforeTransfer() {
const runWithDiagnostic = async (provider)=>{
try {
await provider.beforeTransfer?.();
} catch (error) {
if (error instanceof Error) {
const resolved = await this.attemptResolveError(error);
if (resolved) {
return;
}
this.panic(error);
} else {
this.panic(new Error(`Unknwon error when executing "beforeTransfer" on the ${origin} provider`));
}
}
};
await runWithDiagnostic(this.sourceProvider);
await runWithDiagnostic(this.destinationProvider);
}
async transferSchemas() {
const stage = 'schemas';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createSchemasReadStream?.();
const destination = await this.destinationProvider.createSchemasWriteStream?.();
const transform = _class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage, {
key: (value)=>value.modelType
});
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferEntities() {
const stage = 'entities';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createEntitiesReadStream?.();
const destination = await this.destinationProvider.createEntitiesWriteStream?.();
const transform = chain([
_class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage),
new Transform({
objectMode: true,
transform: async (entity, _encoding, callback)=>{
const { destinationSchemas: schemas } = await _class_private_field_loose_base(this, _getSchemas)[_getSchemas]();
if (!schemas) {
return callback(null, entity);
}
// TODO: this would be safer if we only ignored things in ignoredDiffs, otherwise continue and let an error be thrown
const availableContentTypes = Object.entries(schemas).filter(([, schema])=>schema.modelType === 'contentType').map(([uid])=>uid);
// If the type of the transferred entity doesn't exist in the destination, then discard it
if (!availableContentTypes.includes(entity.type)) {
return callback(null, undefined);
}
const { type, data } = entity;
const attributes = schemas[type].attributes;
const attributesToKeep = Object.keys(attributes).concat('documentId');
const updatedEntity = set('data', pick(attributesToKeep, data), entity);
callback(null, updatedEntity);
}
})
]);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage, {
key: (value)=>value.type
});
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferLinks() {
const stage = 'links';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createLinksReadStream?.();
const destination = await this.destinationProvider.createLinksWriteStream?.();
const transform = chain([
_class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage),
new Transform({
objectMode: true,
transform: async (link, _encoding, callback)=>{
const { destinationSchemas: schemas } = await _class_private_field_loose_base(this, _getSchemas)[_getSchemas]();
if (!schemas) {
return callback(null, link);
}
// TODO: this would be safer if we only ignored things in ignoredDiffs, otherwise continue and let an error be thrown
const availableContentTypes = Object.keys(schemas);
const isValidType = (uid)=>availableContentTypes.includes(uid);
if (!isValidType(link.left.type) || !isValidType(link.right.type)) {
return callback(null, undefined); // ignore the link
}
callback(null, link);
}
})
]);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage);
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferAssets() {
const stage = 'assets';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createAssetsReadStream?.();
const destination = await this.destinationProvider.createAssetsWriteStream?.();
const transform = _class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage, {
size: (value)=>value.stats.size,
key: (value)=>extname(value.filename) || 'No extension'
});
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
async transferConfiguration() {
const stage = 'configuration';
if (this.shouldSkipStage(stage)) {
return;
}
const source = await this.sourceProvider.createConfigurationReadStream?.();
const destination = await this.destinationProvider.createConfigurationWriteStream?.();
const transform = _class_private_field_loose_base(this, _createStageTransformStream)[_createStageTransformStream](stage);
const tracker = _class_private_field_loose_base(this, _progressTracker)[_progressTracker](stage);
await _class_private_field_loose_base(this, _transferStage)[_transferStage]({
stage,
source,
destination,
transform,
tracker
});
}
constructor(sourceProvider, destinationProvider, options){
Object.defineProperty(this, _createStageTransformStream, {
value: createStageTransformStream
});
Object.defineProperty(this, _updateTransferProgress, {
value: updateTransferProgress
});
Object.defineProperty(this, _progressTracker, {
value: progressTracker
});
Object.defineProperty(this, _emitTransferUpdate, {
value: emitTransferUpdate
});
Object.defineProperty(this, _emitStageUpdate, {
value: emitStageUpdate
});
Object.defineProperty(this, _assertStrapiVersionIntegrity, {
value: assertStrapiVersionIntegrity
});
Object.defineProperty(this, _assertSchemasMatching, {
value: assertSchemasMatching
});
Object.defineProperty(this, _transferStage, {
value: transferStage
});
Object.defineProperty(this, _resolveProviderResource, {
value: resolveProviderResource
});
Object.defineProperty(this, _getSchemas, {
value: getSchemas
});
Object.defineProperty(this, _metadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _schema, {
writable: true,
value: void 0
});
Object.defineProperty(this, _handlers, {
writable: true,
value: void 0
});
Object.defineProperty(this, _currentStreamController, {
writable: true,
value: void 0
});
Object.defineProperty(this, _aborted, {
writable: true,
value: void 0
});
_class_private_field_loose_base(this, _metadata)[_metadata] = {};
_class_private_field_loose_base(this, _schema)[_schema] = {};
_class_private_field_loose_base(this, _handlers)[_handlers] = {
schemaDiff: [],
errors: {}
};
_class_private_field_loose_base(this, _aborted)[_aborted] = false;
this.diagnostics = createDiagnosticReporter();
validateProvider('source', sourceProvider);
validateProvider('destination', destinationProvider);
this.sourceProvider = sourceProvider;
this.destinationProvider = destinationProvider;
this.options = options;
this.progress = {
data: {},
stream: new PassThrough({
objectMode: true
})
};
}
}
function createStageTransformStream(key, options = {}) {
const { includeGlobal = true } = options;
const { throttle } = this.options;
const { global: globalTransforms, [key]: stageTransforms } = this.options?.transforms ?? {};
let stream = new PassThrough({
objectMode: true
});
const applyTransforms = (transforms = [])=>{
const chainTransforms = [];
for (const transform of transforms){
if ('filter' in transform) {
chainTransforms.push(filter(transform.filter));
}
if ('map' in transform) {
chainTransforms.push(map(transform.map));
}
}
if (chainTransforms.length) {
stream = stream.pipe(chain(chainTransforms));
}
};
if (includeGlobal) {
applyTransforms(globalTransforms);
}
if (isNumber(throttle) && throttle > 0) {
stream = stream.pipe(new PassThrough({
objectMode: true,
async transform (data, _encoding, callback) {
await new Promise((resolve)=>{
setTimeout(resolve, throttle);
});
callback(null, data);
}
}));
}
applyTransforms(stageTransforms);
return stream;
}
function updateTransferProgress(stage, data, aggregate) {
if (!this.progress.data[stage]) {
this.progress.data[stage] = {
count: 0,
bytes: 0,
startTime: Date.now()
};
}
const stageProgress = this.progress.data[stage];
if (!stageProgress) {
return;
}
const size = aggregate?.size?.(data) ?? JSON.stringify(data).length;
const key = aggregate?.key?.(data);
stageProgress.count += 1;
stageProgress.bytes += size;
// Handle aggregate updates if necessary
if (key) {
if (!stageProgress.aggregates) {
stageProgress.aggregates = {};
}
const { aggregates } = stageProgress;
if (!aggregates[key]) {
aggregates[key] = {
count: 0,
bytes: 0
};
}
aggregates[key].count += 1;
aggregates[key].bytes += size;
}
}
function progressTracker(stage, aggregate) {
return new PassThrough({
objectMode: true,
transform: (data, _encoding, callback)=>{
_class_private_field_loose_base(this, _updateTransferProgress)[_updateTransferProgress](stage, data, aggregate);
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('progress', stage);
callback(null, data);
}
});
}
function emitTransferUpdate(type, payload) {
this.progress.stream.emit(`transfer::${type}`, payload);
}
function emitStageUpdate(type, transferStage) {
this.progress.stream.emit(`stage::${type}`, {
data: this.progress.data,
stage: transferStage
});
}
function assertStrapiVersionIntegrity(sourceVersion, destinationVersion) {
const strategy = this.options.versionStrategy || DEFAULT_VERSION_STRATEGY;
const reject = ()=>{
throw new TransferEngineValidationError(`The source and destination provide are targeting incompatible Strapi versions (using the "${strategy}" strategy). The source (${this.sourceProvider.name}) version is ${sourceVersion} and the destination (${this.destinationProvider.name}) version is ${destinationVersion}`, {
check: 'strapi.version',
strategy,
versions: {
source: sourceVersion,
destination: destinationVersion
}
});
};
if (!sourceVersion || !destinationVersion || strategy === 'ignore' || destinationVersion === sourceVersion) {
return;
}
let diff;
try {
diff = diff$1(sourceVersion, destinationVersion);
} catch {
reject();
}
if (!diff) {
return;
}
const validPatch = [
'prelease',
'build'
];
const validMinor = [
...validPatch,
'patch',
'prepatch'
];
const validMajor = [
...validMinor,
'minor',
'preminor'
];
if (strategy === 'patch' && validPatch.includes(diff)) {
return;
}
if (strategy === 'minor' && validMinor.includes(diff)) {
return;
}
if (strategy === 'major' && validMajor.includes(diff)) {
return;
}
reject();
}
function assertSchemasMatching(sourceSchemas, destinationSchemas) {
const strategy = this.options.schemaStrategy || DEFAULT_SCHEMA_STRATEGY;
if (strategy === 'ignore') {
return;
}
const keys = uniq(Object.keys(sourceSchemas).concat(Object.keys(destinationSchemas)));
const diffs = {};
keys.forEach((key)=>{
const sourceSchema = sourceSchemas[key];
const destinationSchema = destinationSchemas[key];
const schemaDiffs = compareSchemas(sourceSchema, destinationSchema, strategy);
if (schemaDiffs.length) {
diffs[key] = schemaDiffs;
}
});
if (!isEmpty(diffs)) {
const formattedDiffs = Object.entries(diffs).map(([uid, ctDiffs])=>{
let msg = `- ${uid}:${EOL}`;
msg += ctDiffs.sort((a, b)=>a.kind > b.kind ? -1 : 1).map((diff)=>{
const path = diff.path.join('.');
if (diff.kind === 'added') {
return `${path} exists in destination schema but not in source schema and the data will not be transferred.`;
}
if (diff.kind === 'deleted') {
return `${path} exists in source schema but not in destination schema and the data will not be transferred.`;
}
if (diff.kind === 'modified') {
if (diff.types[0] === diff.types[1]) {
return `Schema value changed at "${path}": "${diff.values[0]}" (${diff.types[0]}) => "${diff.values[1]}" (${diff.types[1]})`;
}
return `Schema has differing data types at "${path}": "${diff.values[0]}" (${diff.types[0]}) => "${diff.values[1]}" (${diff.types[1]})`;
}
throw new TransferEngineValidationError(`Invalid diff found for "${uid}"`, {
check: `schema on ${uid}`
});
}).map((line)=>` - ${line}`).join(EOL);
return msg;
}).join(EOL);
throw new TransferEngineValidationError(`Invalid schema changes detected during integrity checks (using the ${strategy} strategy). Please find a summary of the changes below:\n${formattedDiffs}`, {
check: 'schema.changes',
strategy,
diffs
});
}
}
async function transferStage(options) {
if (_class_private_field_loose_base(this, _aborted)[_aborted]) {
throw new TransferEngineError('fatal', 'Transfer aborted.');
}
const { stage, source, destination, transform, tracker } = options;
const updateEndTime = ()=>{
const stageData = this.progress.data[stage];
if (stageData) {
stageData.endTime = Date.now();
}
};
if (!source || !destination || this.shouldSkipStage(stage)) {
// Wait until source and destination are closed
const results = await Promise.allSettled([
source,
destination
].map((stream)=>{
// if stream is undefined or already closed, resolve immediately
if (!stream || stream.destroyed) {
return Promise.resolve();
}
// Wait until the close event is produced and then destroy the stream and resolve
return new Promise((resolve, reject)=>{
stream.on('close', resolve).on('error', reject).destroy();
});
}));
results.forEach((state)=>{
if (state.status === 'rejected') {
this.reportWarning(state.reason, `transfer(${stage})`);
}
});
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('skip', stage);
return;
}
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('start', stage);
try {
const streams = [
source
];
if (transform) {
streams.push(transform);
}
if (tracker) {
streams.push(tracker);
}
streams.push(destination);
// NOTE: to debug/confirm backpressure issues from misbehaving stream, uncomment the following lines
// source.on('pause', () => console.log(`[${stage}] Source paused due to backpressure`));
// source.on('resume', () => console.log(`[${stage}] Source resumed`));
// destination.on('drain', () =>
// console.log(`[${stage}] Destination drained, resuming data flow`)
// );
// destination.on('error', (err) => console.error(`[${stage}] Destination error:`, err));
const controller = new AbortController();
const { signal } = controller;
// Store the controller so you can cancel later
_class_private_field_loose_base(this, _currentStreamController)[_currentStreamController] = controller;
await pipeline(streams, {
signal
});
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('finish', stage);
} catch (e) {
updateEndTime();
_class_private_field_loose_base(this, _emitStageUpdate)[_emitStageUpdate]('error', stage);
this.reportError(e, 'error');
if (!destination.destroyed) {
destination.destroy(e);
}
} finally{
updateEndTime();
}
}
async function resolveProviderResource() {
const sourceMetadata = await this.sourceProvider.getMetadata();
const destinationMetadata = await this.destinationProvider.getMetadata();
if (sourceMetadata) {
_class_private_field_loose_base(this, _metadata)[_metadata].source = sourceMetadata;
}
if (destinationMetadata) {
_class_private_field_loose_base(this, _metadata)[_metadata].destination = destinationMetadata;
}
}
async function getSchemas() {
if (!_class_private_field_loose_base(this, _schema)[_schema].source) {
_class_private_field_loose_base(this, _schema)[_schema].source = await this.sourceProvider.getSchemas?.();
}
if (!_class_private_field_loose_base(this, _schema)[_schema].destination) {
_class_private_field_loose_base(this, _schema)[_schema].destination = await this.destinationProvider.getSchemas?.();
}
return {
sourceSchemas: _class_private_field_loose_base(this, _schema)[_schema].source,
destinationSchemas: _class_private_field_loose_base(this, _schema)[_schema].destination
};
}
const createTransferEngine = (sourceProvider, destinationProvider, options)=>{
return new TransferEngine(sourceProvider, destinationProvider, options);
};
export { DEFAULT_SCHEMA_STRATEGY, DEFAULT_VERSION_STRATEGY, TRANSFER_STAGES, TransferGroupPresets, createTransferEngine };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
export * from './schemas';
export * from './provider';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/engine/validation/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAC;AAC1B,cAAc,YAAY,CAAC"}

View File

@@ -0,0 +1,4 @@
import type { IDestinationProvider, ISourceProvider, ProviderType } from '../../../types';
declare const validateProvider: <T extends ProviderType>(type: ProviderType, provider?: ([T] extends ['source'] ? ISourceProvider : IDestinationProvider) | null) => undefined;
export { validateProvider };
//# sourceMappingURL=provider.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"provider.d.ts","sourceRoot":"","sources":["../../../src/engine/validation/provider.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,oBAAoB,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAO1F,QAAA,MAAM,gBAAgB,iCACd,YAAY,aACP,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,QAAQ,CAAC,GAAG,eAAe,GAAG,oBAAoB,CAAC,GAAG,IAAI,cAapF,CAAC;AAEF,OAAO,EAAE,gBAAgB,EAAE,CAAC"}

View File

@@ -0,0 +1,19 @@
'use strict';
var fp = require('lodash/fp');
var errors = require('../errors.js');
const reject = (reason)=>{
throw new errors.TransferEngineValidationError(`Invalid provider supplied. ${reason}`);
};
const validateProvider = (type, provider)=>{
if (!provider) {
return reject(`Expected an instance of "${fp.capitalize(type)}Provider", but got "${typeof provider}" instead.`);
}
if (provider.type !== type) {
return reject(`Expected the provider to be of type "${type}" but got "${provider.type}" instead.`);
}
};
exports.validateProvider = validateProvider;
//# sourceMappingURL=provider.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"provider.js","sources":["../../../src/engine/validation/provider.ts"],"sourcesContent":["import { capitalize } from 'lodash/fp';\n\nimport type { IDestinationProvider, ISourceProvider, ProviderType } from '../../../types';\nimport { TransferEngineValidationError } from '../errors';\n\nconst reject = (reason: string): never => {\n throw new TransferEngineValidationError(`Invalid provider supplied. ${reason}`);\n};\n\nconst validateProvider = <T extends ProviderType>(\n type: ProviderType,\n provider?: ([T] extends ['source'] ? ISourceProvider : IDestinationProvider) | null\n) => {\n if (!provider) {\n return reject(\n `Expected an instance of \"${capitalize(type)}Provider\", but got \"${typeof provider}\" instead.`\n );\n }\n\n if (provider.type !== type) {\n return reject(\n `Expected the provider to be of type \"${type}\" but got \"${provider.type}\" instead.`\n );\n }\n};\n\nexport { validateProvider };\n"],"names":["reject","reason","TransferEngineValidationError","validateProvider","type","provider","capitalize"],"mappings":";;;;;AAKA,MAAMA,SAAS,CAACC,MAAAA,GAAAA;AACd,IAAA,MAAM,IAAIC,oCAA8B,CAAA,CAAC,2BAA2B,EAAED,OAAO,CAAC,CAAA;AAChF,CAAA;AAEME,MAAAA,gBAAAA,GAAmB,CACvBC,IACAC,EAAAA,QAAAA,GAAAA;AAEA,IAAA,IAAI,CAACA,QAAU,EAAA;QACb,OAAOL,MAAAA,CACL,CAAC,yBAAyB,EAAEM,aAAAA,CAAWF,IAAM,CAAA,CAAA,oBAAoB,EAAE,OAAOC,QAAS,CAAA,UAAU,CAAC,CAAA;AAElG;IAEA,IAAIA,QAAAA,CAASD,IAAI,KAAKA,IAAM,EAAA;QAC1B,OAAOJ,MAAAA,CACL,CAAC,qCAAqC,EAAEI,IAAAA,CAAK,WAAW,EAAEC,QAASD,CAAAA,IAAI,CAAC,UAAU,CAAC,CAAA;AAEvF;AACF;;;;"}

View File

@@ -0,0 +1,17 @@
import { capitalize } from 'lodash/fp';
import { TransferEngineValidationError } from '../errors.mjs';
const reject = (reason)=>{
throw new TransferEngineValidationError(`Invalid provider supplied. ${reason}`);
};
const validateProvider = (type, provider)=>{
if (!provider) {
return reject(`Expected an instance of "${capitalize(type)}Provider", but got "${typeof provider}" instead.`);
}
if (provider.type !== type) {
return reject(`Expected the provider to be of type "${type}" but got "${provider.type}" instead.`);
}
};
export { validateProvider };
//# sourceMappingURL=provider.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"provider.mjs","sources":["../../../src/engine/validation/provider.ts"],"sourcesContent":["import { capitalize } from 'lodash/fp';\n\nimport type { IDestinationProvider, ISourceProvider, ProviderType } from '../../../types';\nimport { TransferEngineValidationError } from '../errors';\n\nconst reject = (reason: string): never => {\n throw new TransferEngineValidationError(`Invalid provider supplied. ${reason}`);\n};\n\nconst validateProvider = <T extends ProviderType>(\n type: ProviderType,\n provider?: ([T] extends ['source'] ? ISourceProvider : IDestinationProvider) | null\n) => {\n if (!provider) {\n return reject(\n `Expected an instance of \"${capitalize(type)}Provider\", but got \"${typeof provider}\" instead.`\n );\n }\n\n if (provider.type !== type) {\n return reject(\n `Expected the provider to be of type \"${type}\" but got \"${provider.type}\" instead.`\n );\n }\n};\n\nexport { validateProvider };\n"],"names":["reject","reason","TransferEngineValidationError","validateProvider","type","provider","capitalize"],"mappings":";;;AAKA,MAAMA,SAAS,CAACC,MAAAA,GAAAA;AACd,IAAA,MAAM,IAAIC,6BAA8B,CAAA,CAAC,2BAA2B,EAAED,OAAO,CAAC,CAAA;AAChF,CAAA;AAEME,MAAAA,gBAAAA,GAAmB,CACvBC,IACAC,EAAAA,QAAAA,GAAAA;AAEA,IAAA,IAAI,CAACA,QAAU,EAAA;QACb,OAAOL,MAAAA,CACL,CAAC,yBAAyB,EAAEM,UAAAA,CAAWF,IAAM,CAAA,CAAA,oBAAoB,EAAE,OAAOC,QAAS,CAAA,UAAU,CAAC,CAAA;AAElG;IAEA,IAAIA,QAAAA,CAASD,IAAI,KAAKA,IAAM,EAAA;QAC1B,OAAOJ,MAAAA,CACL,CAAC,qCAAqC,EAAEI,IAAAA,CAAK,WAAW,EAAEC,QAASD,CAAAA,IAAI,CAAC,UAAU,CAAC,CAAA;AAEvF;AACF;;;;"}

View File

@@ -0,0 +1,8 @@
import type { Diff } from '../../../utils/json';
declare const strategies: {
exact(diffs: Diff[]): Diff[];
strict(diffs: Diff[]): Diff[];
};
declare const compareSchemas: <T, P>(a: T, b: P, strategy: keyof typeof strategies) => Diff[];
export { compareSchemas };
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/engine/validation/schemas/index.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,qBAAqB,CAAC;AAqChD,QAAA,MAAM,UAAU;iBAED,IAAI,EAAE;kBAOL,IAAI,EAAE;CAGrB,CAAC;AAEF,QAAA,MAAM,cAAc,YAAa,CAAC,KAAK,CAAC,YAAY,MAAM,iBAAiB,WAG1E,CAAC;AAEF,OAAO,EAAE,cAAc,EAAE,CAAC"}

View File

@@ -0,0 +1,57 @@
'use strict';
var fp = require('lodash/fp');
require('crypto');
require('stream');
var json = require('../../../utils/json.js');
require('events');
const OPTIONAL_CONTENT_TYPES = [
'audit-log'
];
const isAttributeIgnorable = (diff)=>{
return diff.path.length === 3 && // Root property must be attributes
diff.path[0] === 'attributes' && // Need a valid string attribute name
typeof diff.path[1] === 'string' && // The diff must be on ignorable attribute properties
[
'private',
'required',
'configurable',
'default'
].includes(diff.path[2]);
};
// TODO: clean up the type checking, which will require cleaning up the typings in utils/json.ts
// exclude admin tables that are not transferable and are optionally available (such as audit logs which are only available in EE)
const isOptionalAdminType = (diff)=>{
// added/deleted
if ('value' in diff && fp.isObject(diff.value)) {
const name = diff?.value?.info?.singularName;
return OPTIONAL_CONTENT_TYPES.includes(name);
}
// modified
if ('values' in diff && fp.isArray(diff.values) && fp.isObject(diff.values[0])) {
const name = diff?.values[0]?.info?.singularName;
return OPTIONAL_CONTENT_TYPES.includes(name);
}
return false;
};
const isIgnorableStrict = (diff)=>isAttributeIgnorable(diff) || isOptionalAdminType(diff);
const strategies = {
// No diffs
exact (diffs) {
return diffs;
},
// Strict: all content types must match except:
// - the property within a content type is an ignorable one
// - those that are (not transferrable and optionally available), for example EE features such as audit logs
strict (diffs) {
return fp.reject(isIgnorableStrict, diffs);
}
};
const compareSchemas = (a, b, strategy)=>{
const diffs = json.diff(a, b);
return strategies[strategy](diffs);
};
exports.compareSchemas = compareSchemas;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../../../src/engine/validation/schemas/index.ts"],"sourcesContent":["import type { Struct } from '@strapi/types';\nimport { isArray, isObject, reject } from 'lodash/fp';\nimport type { Diff } from '../../../utils/json';\nimport * as utils from '../../../utils';\n\nconst OPTIONAL_CONTENT_TYPES = ['audit-log'] as const;\n\nconst isAttributeIgnorable = (diff: Diff) => {\n return (\n diff.path.length === 3 &&\n // Root property must be attributes\n diff.path[0] === 'attributes' &&\n // Need a valid string attribute name\n typeof diff.path[1] === 'string' &&\n // The diff must be on ignorable attribute properties\n ['private', 'required', 'configurable', 'default'].includes(diff.path[2])\n );\n};\n\n// TODO: clean up the type checking, which will require cleaning up the typings in utils/json.ts\n// exclude admin tables that are not transferable and are optionally available (such as audit logs which are only available in EE)\nconst isOptionalAdminType = (diff: Diff) => {\n // added/deleted\n if ('value' in diff && isObject(diff.value)) {\n const name = (diff?.value as Struct.ContentTypeSchema)?.info?.singularName;\n return (OPTIONAL_CONTENT_TYPES as ReadonlyArray<string | undefined>).includes(name);\n }\n\n // modified\n if ('values' in diff && isArray(diff.values) && isObject(diff.values[0])) {\n const name = (diff?.values[0] as Struct.ContentTypeSchema)?.info?.singularName;\n return (OPTIONAL_CONTENT_TYPES as ReadonlyArray<string | undefined>).includes(name);\n }\n\n return false;\n};\n\nconst isIgnorableStrict = (diff: Diff) => isAttributeIgnorable(diff) || isOptionalAdminType(diff);\n\nconst strategies = {\n // No diffs\n exact(diffs: Diff[]) {\n return diffs;\n },\n\n // Strict: all content types must match except:\n // - the property within a content type is an ignorable one\n // - those that are (not transferrable and optionally available), for example EE features such as audit logs\n strict(diffs: Diff[]) {\n return reject(isIgnorableStrict, diffs);\n },\n};\n\nconst compareSchemas = <T, P>(a: T, b: P, strategy: keyof typeof strategies) => {\n const diffs = utils.json.diff(a, b);\n return strategies[strategy](diffs);\n};\n\nexport { compareSchemas };\n"],"names":["OPTIONAL_CONTENT_TYPES","isAttributeIgnorable","diff","path","length","includes","isOptionalAdminType","isObject","value","name","info","singularName","isArray","values","isIgnorableStrict","strategies","exact","diffs","strict","reject","compareSchemas","a","b","strategy","utils"],"mappings":";;;;;;;;AAKA,MAAMA,sBAAyB,GAAA;AAAC,IAAA;AAAY,CAAA;AAE5C,MAAMC,uBAAuB,CAACC,IAAAA,GAAAA;AAC5B,IAAA,OACEA,KAAKC,IAAI,CAACC,MAAM,KAAK;AAErBF,IAAAA,IAAAA,CAAKC,IAAI,CAAC,CAAE,CAAA,KAAK;AAEjB,IAAA,OAAOD,KAAKC,IAAI,CAAC,CAAE,CAAA,KAAK;AAExB,IAAA;AAAC,QAAA,SAAA;AAAW,QAAA,UAAA;AAAY,QAAA,cAAA;AAAgB,QAAA;AAAU,KAAA,CAACE,QAAQ,CAACH,IAAKC,CAAAA,IAAI,CAAC,CAAE,CAAA,CAAA;AAE5E,CAAA;AAEA;AACA;AACA,MAAMG,sBAAsB,CAACJ,IAAAA,GAAAA;;AAE3B,IAAA,IAAI,OAAWA,IAAAA,IAAAA,IAAQK,WAASL,CAAAA,IAAAA,CAAKM,KAAK,CAAG,EAAA;QAC3C,MAAMC,IAAAA,GAAQP,IAAMM,EAAAA,KAAAA,EAAoCE,IAAMC,EAAAA,YAAAA;QAC9D,OAAQX,sBAA6DK,CAAAA,QAAQ,CAACI,IAAAA,CAAAA;AAChF;;IAGA,IAAI,QAAA,IAAYP,IAAQU,IAAAA,UAAAA,CAAQV,IAAKW,CAAAA,MAAM,CAAKN,IAAAA,WAAAA,CAASL,IAAKW,CAAAA,MAAM,CAAC,CAAA,CAAE,CAAG,EAAA;AACxE,QAAA,MAAMJ,OAAQP,IAAMW,EAAAA,MAAM,CAAC,CAAA,CAAE,EAA+BH,IAAMC,EAAAA,YAAAA;QAClE,OAAQX,sBAA6DK,CAAAA,QAAQ,CAACI,IAAAA,CAAAA;AAChF;IAEA,OAAO,KAAA;AACT,CAAA;AAEA,MAAMK,iBAAoB,GAAA,CAACZ,IAAeD,GAAAA,oBAAAA,CAAqBC,SAASI,mBAAoBJ,CAAAA,IAAAA,CAAAA;AAE5F,MAAMa,UAAa,GAAA;;AAEjBC,IAAAA,KAAAA,CAAAA,CAAMC,KAAa,EAAA;QACjB,OAAOA,KAAAA;AACT,KAAA;;;;AAKAC,IAAAA,MAAAA,CAAAA,CAAOD,KAAa,EAAA;AAClB,QAAA,OAAOE,UAAOL,iBAAmBG,EAAAA,KAAAA,CAAAA;AACnC;AACF,CAAA;AAEMG,MAAAA,cAAAA,GAAiB,CAAOC,CAAAA,EAAMC,CAAMC,EAAAA,QAAAA,GAAAA;AACxC,IAAA,MAAMN,QAAQO,SAAe,CAACH,CAAGC,EAAAA,CAAAA,CAAAA;IACjC,OAAOP,UAAU,CAACQ,QAAAA,CAAS,CAACN,KAAAA,CAAAA;AAC9B;;;;"}

View File

@@ -0,0 +1,55 @@
import { reject, isObject, isArray } from 'lodash/fp';
import 'crypto';
import 'stream';
import { diff } from '../../../utils/json.mjs';
import 'events';
const OPTIONAL_CONTENT_TYPES = [
'audit-log'
];
const isAttributeIgnorable = (diff)=>{
return diff.path.length === 3 && // Root property must be attributes
diff.path[0] === 'attributes' && // Need a valid string attribute name
typeof diff.path[1] === 'string' && // The diff must be on ignorable attribute properties
[
'private',
'required',
'configurable',
'default'
].includes(diff.path[2]);
};
// TODO: clean up the type checking, which will require cleaning up the typings in utils/json.ts
// exclude admin tables that are not transferable and are optionally available (such as audit logs which are only available in EE)
const isOptionalAdminType = (diff)=>{
// added/deleted
if ('value' in diff && isObject(diff.value)) {
const name = diff?.value?.info?.singularName;
return OPTIONAL_CONTENT_TYPES.includes(name);
}
// modified
if ('values' in diff && isArray(diff.values) && isObject(diff.values[0])) {
const name = diff?.values[0]?.info?.singularName;
return OPTIONAL_CONTENT_TYPES.includes(name);
}
return false;
};
const isIgnorableStrict = (diff)=>isAttributeIgnorable(diff) || isOptionalAdminType(diff);
const strategies = {
// No diffs
exact (diffs) {
return diffs;
},
// Strict: all content types must match except:
// - the property within a content type is an ignorable one
// - those that are (not transferrable and optionally available), for example EE features such as audit logs
strict (diffs) {
return reject(isIgnorableStrict, diffs);
}
};
const compareSchemas = (a, b, strategy)=>{
const diffs = diff(a, b);
return strategies[strategy](diffs);
};
export { compareSchemas };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":["../../../../src/engine/validation/schemas/index.ts"],"sourcesContent":["import type { Struct } from '@strapi/types';\nimport { isArray, isObject, reject } from 'lodash/fp';\nimport type { Diff } from '../../../utils/json';\nimport * as utils from '../../../utils';\n\nconst OPTIONAL_CONTENT_TYPES = ['audit-log'] as const;\n\nconst isAttributeIgnorable = (diff: Diff) => {\n return (\n diff.path.length === 3 &&\n // Root property must be attributes\n diff.path[0] === 'attributes' &&\n // Need a valid string attribute name\n typeof diff.path[1] === 'string' &&\n // The diff must be on ignorable attribute properties\n ['private', 'required', 'configurable', 'default'].includes(diff.path[2])\n );\n};\n\n// TODO: clean up the type checking, which will require cleaning up the typings in utils/json.ts\n// exclude admin tables that are not transferable and are optionally available (such as audit logs which are only available in EE)\nconst isOptionalAdminType = (diff: Diff) => {\n // added/deleted\n if ('value' in diff && isObject(diff.value)) {\n const name = (diff?.value as Struct.ContentTypeSchema)?.info?.singularName;\n return (OPTIONAL_CONTENT_TYPES as ReadonlyArray<string | undefined>).includes(name);\n }\n\n // modified\n if ('values' in diff && isArray(diff.values) && isObject(diff.values[0])) {\n const name = (diff?.values[0] as Struct.ContentTypeSchema)?.info?.singularName;\n return (OPTIONAL_CONTENT_TYPES as ReadonlyArray<string | undefined>).includes(name);\n }\n\n return false;\n};\n\nconst isIgnorableStrict = (diff: Diff) => isAttributeIgnorable(diff) || isOptionalAdminType(diff);\n\nconst strategies = {\n // No diffs\n exact(diffs: Diff[]) {\n return diffs;\n },\n\n // Strict: all content types must match except:\n // - the property within a content type is an ignorable one\n // - those that are (not transferrable and optionally available), for example EE features such as audit logs\n strict(diffs: Diff[]) {\n return reject(isIgnorableStrict, diffs);\n },\n};\n\nconst compareSchemas = <T, P>(a: T, b: P, strategy: keyof typeof strategies) => {\n const diffs = utils.json.diff(a, b);\n return strategies[strategy](diffs);\n};\n\nexport { compareSchemas };\n"],"names":["OPTIONAL_CONTENT_TYPES","isAttributeIgnorable","diff","path","length","includes","isOptionalAdminType","isObject","value","name","info","singularName","isArray","values","isIgnorableStrict","strategies","exact","diffs","strict","reject","compareSchemas","a","b","strategy","utils"],"mappings":";;;;;;AAKA,MAAMA,sBAAyB,GAAA;AAAC,IAAA;AAAY,CAAA;AAE5C,MAAMC,uBAAuB,CAACC,IAAAA,GAAAA;AAC5B,IAAA,OACEA,KAAKC,IAAI,CAACC,MAAM,KAAK;AAErBF,IAAAA,IAAAA,CAAKC,IAAI,CAAC,CAAE,CAAA,KAAK;AAEjB,IAAA,OAAOD,KAAKC,IAAI,CAAC,CAAE,CAAA,KAAK;AAExB,IAAA;AAAC,QAAA,SAAA;AAAW,QAAA,UAAA;AAAY,QAAA,cAAA;AAAgB,QAAA;AAAU,KAAA,CAACE,QAAQ,CAACH,IAAKC,CAAAA,IAAI,CAAC,CAAE,CAAA,CAAA;AAE5E,CAAA;AAEA;AACA;AACA,MAAMG,sBAAsB,CAACJ,IAAAA,GAAAA;;AAE3B,IAAA,IAAI,OAAWA,IAAAA,IAAAA,IAAQK,QAASL,CAAAA,IAAAA,CAAKM,KAAK,CAAG,EAAA;QAC3C,MAAMC,IAAAA,GAAQP,IAAMM,EAAAA,KAAAA,EAAoCE,IAAMC,EAAAA,YAAAA;QAC9D,OAAQX,sBAA6DK,CAAAA,QAAQ,CAACI,IAAAA,CAAAA;AAChF;;IAGA,IAAI,QAAA,IAAYP,IAAQU,IAAAA,OAAAA,CAAQV,IAAKW,CAAAA,MAAM,CAAKN,IAAAA,QAAAA,CAASL,IAAKW,CAAAA,MAAM,CAAC,CAAA,CAAE,CAAG,EAAA;AACxE,QAAA,MAAMJ,OAAQP,IAAMW,EAAAA,MAAM,CAAC,CAAA,CAAE,EAA+BH,IAAMC,EAAAA,YAAAA;QAClE,OAAQX,sBAA6DK,CAAAA,QAAQ,CAACI,IAAAA,CAAAA;AAChF;IAEA,OAAO,KAAA;AACT,CAAA;AAEA,MAAMK,iBAAoB,GAAA,CAACZ,IAAeD,GAAAA,oBAAAA,CAAqBC,SAASI,mBAAoBJ,CAAAA,IAAAA,CAAAA;AAE5F,MAAMa,UAAa,GAAA;;AAEjBC,IAAAA,KAAAA,CAAAA,CAAMC,KAAa,EAAA;QACjB,OAAOA,KAAAA;AACT,KAAA;;;;AAKAC,IAAAA,MAAAA,CAAAA,CAAOD,KAAa,EAAA;AAClB,QAAA,OAAOE,OAAOL,iBAAmBG,EAAAA,KAAAA,CAAAA;AACnC;AACF,CAAA;AAEMG,MAAAA,cAAAA,GAAiB,CAAOC,CAAAA,EAAMC,CAAMC,EAAAA,QAAAA,GAAAA;AACxC,IAAA,MAAMN,QAAQO,IAAe,CAACH,CAAGC,EAAAA,CAAAA,CAAAA;IACjC,OAAOP,UAAU,CAACQ,QAAAA,CAAS,CAACN,KAAAA,CAAAA;AAC9B;;;;"}

View File

@@ -0,0 +1,9 @@
import { Severity } from './constants';
declare class DataTransferError<T = unknown> extends Error {
origin: string;
severity: Severity;
details: T | null;
constructor(origin: string, severity: Severity, message?: string, details?: T | null);
}
export { DataTransferError };
//# sourceMappingURL=base.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../src/errors/base.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,cAAM,iBAAiB,CAAC,CAAC,GAAG,OAAO,CAAE,SAAQ,KAAK;IAChD,MAAM,EAAE,MAAM,CAAC;IAEf,QAAQ,EAAE,QAAQ,CAAC;IAEnB,OAAO,EAAE,CAAC,GAAG,IAAI,CAAC;gBAEN,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,IAAI;CAOrF;AAED,OAAO,EAAE,iBAAiB,EAAE,CAAC"}

View File

@@ -0,0 +1,13 @@
'use strict';
class DataTransferError extends Error {
constructor(origin, severity, message, details){
super(message);
this.origin = origin;
this.severity = severity;
this.details = details ?? null;
}
}
exports.DataTransferError = DataTransferError;
//# sourceMappingURL=base.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"base.js","sources":["../../src/errors/base.ts"],"sourcesContent":["import { Severity } from './constants';\n\nclass DataTransferError<T = unknown> extends Error {\n origin: string;\n\n severity: Severity;\n\n details: T | null;\n\n constructor(origin: string, severity: Severity, message?: string, details?: T | null) {\n super(message);\n\n this.origin = origin;\n this.severity = severity;\n this.details = details ?? null;\n }\n}\n\nexport { DataTransferError };\n"],"names":["DataTransferError","Error","constructor","origin","severity","message","details"],"mappings":";;AAEA,MAAMA,iBAAuCC,SAAAA,KAAAA,CAAAA;AAO3CC,IAAAA,WAAAA,CAAYC,MAAc,EAAEC,QAAkB,EAAEC,OAAgB,EAAEC,OAAkB,CAAE;AACpF,QAAA,KAAK,CAACD,OAAAA,CAAAA;QAEN,IAAI,CAACF,MAAM,GAAGA,MAAAA;QACd,IAAI,CAACC,QAAQ,GAAGA,QAAAA;QAChB,IAAI,CAACE,OAAO,GAAGA,OAAW,IAAA,IAAA;AAC5B;AACF;;;;"}

View File

@@ -0,0 +1,11 @@
class DataTransferError extends Error {
constructor(origin, severity, message, details){
super(message);
this.origin = origin;
this.severity = severity;
this.details = details ?? null;
}
}
export { DataTransferError };
//# sourceMappingURL=base.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"base.mjs","sources":["../../src/errors/base.ts"],"sourcesContent":["import { Severity } from './constants';\n\nclass DataTransferError<T = unknown> extends Error {\n origin: string;\n\n severity: Severity;\n\n details: T | null;\n\n constructor(origin: string, severity: Severity, message?: string, details?: T | null) {\n super(message);\n\n this.origin = origin;\n this.severity = severity;\n this.details = details ?? null;\n }\n}\n\nexport { DataTransferError };\n"],"names":["DataTransferError","Error","constructor","origin","severity","message","details"],"mappings":"AAEA,MAAMA,iBAAuCC,SAAAA,KAAAA,CAAAA;AAO3CC,IAAAA,WAAAA,CAAYC,MAAc,EAAEC,QAAkB,EAAEC,OAAgB,EAAEC,OAAkB,CAAE;AACpF,QAAA,KAAK,CAACD,OAAAA,CAAAA;QAEN,IAAI,CAACF,MAAM,GAAGA,MAAAA;QACd,IAAI,CAACC,QAAQ,GAAGA,QAAAA;QAChB,IAAI,CAACE,OAAO,GAAGA,OAAW,IAAA,IAAA;AAC5B;AACF;;;;"}

View File

@@ -0,0 +1,4 @@
import { ErrorDiagnosticSeverity } from '../utils/diagnostic';
export declare const SeverityKind: Record<string, ErrorDiagnosticSeverity>;
export type Severity = (typeof SeverityKind)[keyof typeof SeverityKind];
//# sourceMappingURL=constants.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../src/errors/constants.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAC;AAE9D,eAAO,MAAM,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,uBAAuB,CAIvD,CAAC;AACX,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,YAAY,CAAC,CAAC,MAAM,OAAO,YAAY,CAAC,CAAC"}

View File

@@ -0,0 +1,10 @@
'use strict';
const SeverityKind = {
FATAL: 'fatal',
ERROR: 'error',
SILLY: 'silly'
};
exports.SeverityKind = SeverityKind;
//# sourceMappingURL=constants.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.js","sources":["../../src/errors/constants.ts"],"sourcesContent":["import { ErrorDiagnosticSeverity } from '../utils/diagnostic';\n\nexport const SeverityKind: Record<string, ErrorDiagnosticSeverity> = {\n FATAL: 'fatal',\n ERROR: 'error',\n SILLY: 'silly',\n} as const;\nexport type Severity = (typeof SeverityKind)[keyof typeof SeverityKind];\n"],"names":["SeverityKind","FATAL","ERROR","SILLY"],"mappings":";;MAEaA,YAAwD,GAAA;IACnEC,KAAO,EAAA,OAAA;IACPC,KAAO,EAAA,OAAA;IACPC,KAAO,EAAA;AACT;;;;"}

View File

@@ -0,0 +1,8 @@
const SeverityKind = {
FATAL: 'fatal',
ERROR: 'error',
SILLY: 'silly'
};
export { SeverityKind };
//# sourceMappingURL=constants.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"constants.mjs","sources":["../../src/errors/constants.ts"],"sourcesContent":["import { ErrorDiagnosticSeverity } from '../utils/diagnostic';\n\nexport const SeverityKind: Record<string, ErrorDiagnosticSeverity> = {\n FATAL: 'fatal',\n ERROR: 'error',\n SILLY: 'silly',\n} as const;\nexport type Severity = (typeof SeverityKind)[keyof typeof SeverityKind];\n"],"names":["SeverityKind","FATAL","ERROR","SILLY"],"mappings":"MAEaA,YAAwD,GAAA;IACnEC,KAAO,EAAA,OAAA;IACPC,KAAO,EAAA,OAAA;IACPC,KAAO,EAAA;AACT;;;;"}

View File

@@ -0,0 +1,3 @@
export * from './constants';
export * from './base';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/errors/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,QAAQ,CAAC"}

View File

@@ -0,0 +1,24 @@
import { ErrorCode } from '../../types';
import { DataTransferError } from './base';
import { Severity } from './constants';
type ProviderStep = 'initialization' | 'validation' | 'transfer';
export type ProviderErrorDetails<P extends ProviderStep = ProviderStep, U = never> = {
step: P;
code?: ErrorCode;
} & ([U] extends [never] ? unknown : {
details?: U;
});
export declare class ProviderError<P extends ProviderStep = ProviderStep, U = never, T extends ProviderErrorDetails<P, U> = ProviderErrorDetails<P, U>> extends DataTransferError<T> {
constructor(severity: Severity, message?: string, details?: T | null);
}
export declare class ProviderInitializationError extends ProviderError<'initialization'> {
constructor(message?: string);
}
export declare class ProviderValidationError<T = ProviderErrorDetails> extends ProviderError<'validation', T> {
constructor(message?: string, details?: T);
}
export declare class ProviderTransferError<T = ProviderErrorDetails> extends ProviderError<'transfer', T> {
constructor(message?: string, details?: T);
}
export {};
//# sourceMappingURL=providers.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"providers.d.ts","sourceRoot":"","sources":["../../src/errors/providers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,iBAAiB,EAAE,MAAM,QAAQ,CAAC;AAC3C,OAAO,EAAE,QAAQ,EAAgB,MAAM,aAAa,CAAC;AAErD,KAAK,YAAY,GAAG,gBAAgB,GAAG,YAAY,GAAG,UAAU,CAAC;AAEjE,MAAM,MAAM,oBAAoB,CAAC,CAAC,SAAS,YAAY,GAAG,YAAY,EAAE,CAAC,GAAG,KAAK,IAAI;IACnF,IAAI,EAAE,CAAC,CAAC;IACR,IAAI,CAAC,EAAE,SAAS,CAAC;CAClB,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,OAAO,GAAG;IAAE,OAAO,CAAC,EAAE,CAAC,CAAA;CAAE,CAAC,CAAC;AAEtD,qBAAa,aAAa,CACxB,CAAC,SAAS,YAAY,GAAG,YAAY,EACrC,CAAC,GAAG,KAAK,EACT,CAAC,SAAS,oBAAoB,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,oBAAoB,CAAC,CAAC,EAAE,CAAC,CAAC,CACjE,SAAQ,iBAAiB,CAAC,CAAC,CAAC;gBAChB,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC,GAAG,IAAI;CAGrE;AAED,qBAAa,2BAA4B,SAAQ,aAAa,CAAC,gBAAgB,CAAC;gBAClE,OAAO,CAAC,EAAE,MAAM;CAG7B;AAGD,qBAAa,uBAAuB,CAAC,CAAC,GAAG,oBAAoB,CAAE,SAAQ,aAAa,CAClF,YAAY,EACZ,CAAC,CACF;gBACa,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC;CAG1C;AAED,qBAAa,qBAAqB,CAAC,CAAC,GAAG,oBAAoB,CAAE,SAAQ,aAAa,CAAC,UAAU,EAAE,CAAC,CAAC;gBACnF,OAAO,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,CAAC;CAG1C"}

View File

@@ -0,0 +1,41 @@
'use strict';
var base = require('./base.js');
var constants = require('./constants.js');
class ProviderError extends base.DataTransferError {
constructor(severity, message, details){
super('provider', severity, message, details);
}
}
class ProviderInitializationError extends ProviderError {
constructor(message){
super(constants.SeverityKind.FATAL, message, {
step: 'initialization'
});
}
}
// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T
class ProviderValidationError extends ProviderError {
constructor(message, details){
super(constants.SeverityKind.SILLY, message, {
step: 'validation',
details
});
}
}
// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T
class ProviderTransferError extends ProviderError {
constructor(message, details){
super(constants.SeverityKind.FATAL, message, {
step: 'transfer',
details
});
}
}
exports.ProviderError = ProviderError;
exports.ProviderInitializationError = ProviderInitializationError;
exports.ProviderTransferError = ProviderTransferError;
exports.ProviderValidationError = ProviderValidationError;
//# sourceMappingURL=providers.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"providers.js","sources":["../../src/errors/providers.ts"],"sourcesContent":["import { ErrorCode } from '../../types';\nimport { DataTransferError } from './base';\nimport { Severity, SeverityKind } from './constants';\n\ntype ProviderStep = 'initialization' | 'validation' | 'transfer';\n\nexport type ProviderErrorDetails<P extends ProviderStep = ProviderStep, U = never> = {\n step: P;\n code?: ErrorCode;\n} & ([U] extends [never] ? unknown : { details?: U });\n\nexport class ProviderError<\n P extends ProviderStep = ProviderStep,\n U = never,\n T extends ProviderErrorDetails<P, U> = ProviderErrorDetails<P, U>,\n> extends DataTransferError<T> {\n constructor(severity: Severity, message?: string, details?: T | null) {\n super('provider', severity, message, details);\n }\n}\n\nexport class ProviderInitializationError extends ProviderError<'initialization'> {\n constructor(message?: string) {\n super(SeverityKind.FATAL, message, { step: 'initialization' });\n }\n}\n\n// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T\nexport class ProviderValidationError<T = ProviderErrorDetails> extends ProviderError<\n 'validation',\n T\n> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.SILLY, message, { step: 'validation', details });\n }\n}\n// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T\nexport class ProviderTransferError<T = ProviderErrorDetails> extends ProviderError<'transfer', T> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.FATAL, message, { step: 'transfer', details });\n }\n}\n"],"names":["ProviderError","DataTransferError","constructor","severity","message","details","ProviderInitializationError","SeverityKind","FATAL","step","ProviderValidationError","SILLY","ProviderTransferError"],"mappings":";;;;;AAWO,MAAMA,aAIHC,SAAAA,sBAAAA,CAAAA;AACRC,IAAAA,WAAAA,CAAYC,QAAkB,EAAEC,OAAgB,EAAEC,OAAkB,CAAE;QACpE,KAAK,CAAC,UAAYF,EAAAA,QAAAA,EAAUC,OAASC,EAAAA,OAAAA,CAAAA;AACvC;AACF;AAEO,MAAMC,2BAAoCN,SAAAA,aAAAA,CAAAA;AAC/CE,IAAAA,WAAAA,CAAYE,OAAgB,CAAE;AAC5B,QAAA,KAAK,CAACG,sBAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA;AAAiB,SAAA,CAAA;AAC9D;AACF;AAEA;AACO,MAAMC,uBAA0DV,SAAAA,aAAAA,CAAAA;IAIrEE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,sBAAAA,CAAaI,KAAK,EAAEP,OAAS,EAAA;YAAEK,IAAM,EAAA,YAAA;AAAcJ,YAAAA;AAAQ,SAAA,CAAA;AACnE;AACF;AACA;AACO,MAAMO,qBAAwDZ,SAAAA,aAAAA,CAAAA;IACnEE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,sBAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA,UAAA;AAAYJ,YAAAA;AAAQ,SAAA,CAAA;AACjE;AACF;;;;;;;"}

View File

@@ -0,0 +1,36 @@
import { DataTransferError } from './base.mjs';
import { SeverityKind } from './constants.mjs';
class ProviderError extends DataTransferError {
constructor(severity, message, details){
super('provider', severity, message, details);
}
}
class ProviderInitializationError extends ProviderError {
constructor(message){
super(SeverityKind.FATAL, message, {
step: 'initialization'
});
}
}
// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T
class ProviderValidationError extends ProviderError {
constructor(message, details){
super(SeverityKind.SILLY, message, {
step: 'validation',
details
});
}
}
// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T
class ProviderTransferError extends ProviderError {
constructor(message, details){
super(SeverityKind.FATAL, message, {
step: 'transfer',
details
});
}
}
export { ProviderError, ProviderInitializationError, ProviderTransferError, ProviderValidationError };
//# sourceMappingURL=providers.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"providers.mjs","sources":["../../src/errors/providers.ts"],"sourcesContent":["import { ErrorCode } from '../../types';\nimport { DataTransferError } from './base';\nimport { Severity, SeverityKind } from './constants';\n\ntype ProviderStep = 'initialization' | 'validation' | 'transfer';\n\nexport type ProviderErrorDetails<P extends ProviderStep = ProviderStep, U = never> = {\n step: P;\n code?: ErrorCode;\n} & ([U] extends [never] ? unknown : { details?: U });\n\nexport class ProviderError<\n P extends ProviderStep = ProviderStep,\n U = never,\n T extends ProviderErrorDetails<P, U> = ProviderErrorDetails<P, U>,\n> extends DataTransferError<T> {\n constructor(severity: Severity, message?: string, details?: T | null) {\n super('provider', severity, message, details);\n }\n}\n\nexport class ProviderInitializationError extends ProviderError<'initialization'> {\n constructor(message?: string) {\n super(SeverityKind.FATAL, message, { step: 'initialization' });\n }\n}\n\n// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T\nexport class ProviderValidationError<T = ProviderErrorDetails> extends ProviderError<\n 'validation',\n T\n> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.SILLY, message, { step: 'validation', details });\n }\n}\n// TODO: these types are not working correctly, ProviderTransferError() is accepting any details object rather than requiring T\nexport class ProviderTransferError<T = ProviderErrorDetails> extends ProviderError<'transfer', T> {\n constructor(message?: string, details?: T) {\n super(SeverityKind.FATAL, message, { step: 'transfer', details });\n }\n}\n"],"names":["ProviderError","DataTransferError","constructor","severity","message","details","ProviderInitializationError","SeverityKind","FATAL","step","ProviderValidationError","SILLY","ProviderTransferError"],"mappings":";;;AAWO,MAAMA,aAIHC,SAAAA,iBAAAA,CAAAA;AACRC,IAAAA,WAAAA,CAAYC,QAAkB,EAAEC,OAAgB,EAAEC,OAAkB,CAAE;QACpE,KAAK,CAAC,UAAYF,EAAAA,QAAAA,EAAUC,OAASC,EAAAA,OAAAA,CAAAA;AACvC;AACF;AAEO,MAAMC,2BAAoCN,SAAAA,aAAAA,CAAAA;AAC/CE,IAAAA,WAAAA,CAAYE,OAAgB,CAAE;AAC5B,QAAA,KAAK,CAACG,YAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA;AAAiB,SAAA,CAAA;AAC9D;AACF;AAEA;AACO,MAAMC,uBAA0DV,SAAAA,aAAAA,CAAAA;IAIrEE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,YAAAA,CAAaI,KAAK,EAAEP,OAAS,EAAA;YAAEK,IAAM,EAAA,YAAA;AAAcJ,YAAAA;AAAQ,SAAA,CAAA;AACnE;AACF;AACA;AACO,MAAMO,qBAAwDZ,SAAAA,aAAAA,CAAAA;IACnEE,WAAYE,CAAAA,OAAgB,EAAEC,OAAW,CAAE;AACzC,QAAA,KAAK,CAACE,YAAAA,CAAaC,KAAK,EAAEJ,OAAS,EAAA;YAAEK,IAAM,EAAA,UAAA;AAAYJ,YAAAA;AAAQ,SAAA,CAAA;AACjE;AACF;;;;"}

View File

@@ -0,0 +1,2 @@
export * as providers from './providers';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/file/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,aAAa,CAAC"}

View File

@@ -0,0 +1,8 @@
'use strict';
var index = require('./providers/index.js');
exports.providers = index;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;"}

View File

@@ -0,0 +1,3 @@
import * as index from './providers/index.mjs';
export { index as providers };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";"}

View File

@@ -0,0 +1,48 @@
/// <reference types="node" />
/// <reference types="stream-chain" />
/// <reference types="node" />
import zlib from 'zlib';
import { Writable } from 'stream';
import type { IDestinationProvider, IDestinationProviderTransferResults, IMetadata, ProviderType } from '../../../../types';
import type { IDiagnosticReporter } from '../../../utils/diagnostic';
export interface ILocalFileDestinationProviderOptions {
encryption: {
enabled: boolean;
key?: string;
};
compression: {
enabled: boolean;
};
file: {
path: string;
maxSize?: number;
maxSizeJsonl?: number;
};
}
export interface ILocalFileDestinationProviderTransferResults extends IDestinationProviderTransferResults {
file?: {
path?: string;
};
}
export declare const createLocalFileDestinationProvider: (options: ILocalFileDestinationProviderOptions) => LocalFileDestinationProvider;
declare class LocalFileDestinationProvider implements IDestinationProvider {
#private;
name: string;
type: ProviderType;
options: ILocalFileDestinationProviderOptions;
results: ILocalFileDestinationProviderTransferResults;
constructor(options: ILocalFileDestinationProviderOptions);
setMetadata(target: ProviderType, metadata: IMetadata): IDestinationProvider;
createGzip(): zlib.Gzip;
bootstrap(diagnostics: IDiagnosticReporter): void | Promise<void>;
close(): Promise<void>;
rollback(): Promise<void>;
getMetadata(): null;
createSchemasWriteStream(): import("stream-chain");
createEntitiesWriteStream(): Writable;
createLinksWriteStream(): Writable;
createConfigurationWriteStream(): Writable;
createAssetsWriteStream(): Writable;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/destination/index.ts"],"names":[],"mappings":";;;AACA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAY,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAO5C,OAAO,KAAK,EAEV,oBAAoB,EACpB,mCAAmC,EACnC,SAAS,EACT,YAAY,EAEb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAIrE,MAAM,WAAW,oCAAoC;IACnD,UAAU,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,GAAG,CAAC,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,WAAW,EAAE;QACX,OAAO,EAAE,OAAO,CAAC;KAClB,CAAC;IAEF,IAAI,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED,MAAM,WAAW,4CACf,SAAQ,mCAAmC;IAC3C,IAAI,CAAC,EAAE;QACL,IAAI,CAAC,EAAE,MAAM,CAAC;KACf,CAAC;CACH;AAED,eAAO,MAAM,kCAAkC,YACpC,oCAAoC,iCAG9C,CAAC;AAEF,cAAM,4BAA6B,YAAW,oBAAoB;;IAChE,IAAI,SAA6B;IAEjC,IAAI,EAAE,YAAY,CAAiB;IAEnC,OAAO,EAAE,oCAAoC,CAAC;IAE9C,OAAO,EAAE,4CAA4C,CAAM;gBAQ/C,OAAO,EAAE,oCAAoC;IA+BzD,WAAW,CAAC,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,SAAS,GAAG,oBAAoB;IAM5E,UAAU,IAAI,IAAI,CAAC,IAAI;IAKvB,SAAS,CAAC,WAAW,EAAE,mBAAmB,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAoC3D,KAAK;IAiBL,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAM/B,WAAW;IA4BX,wBAAwB;IAgBxB,yBAAyB,IAAI,QAAQ;IAgBrC,sBAAsB,IAAI,QAAQ;IAgBlC,8BAA8B,IAAI,QAAQ;IAgB1C,uBAAuB,IAAI,QAAQ;CA8CpC"}

View File

@@ -0,0 +1,248 @@
'use strict';
var path = require('path');
var zip = require('zlib');
var stream = require('stream');
var fse = require('fs-extra');
var tar = require('tar-stream');
var Stringer = require('stream-json/jsonl/Stringer');
var streamChain = require('stream-chain');
var encrypt = require('../../../utils/encryption/encrypt.js');
require('crypto');
var utils = require('./utils.js');
var providers = require('../../../errors/providers.js');
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const createLocalFileDestinationProvider = (options)=>{
return new LocalFileDestinationProvider(options);
};
var _providersMetadata = /*#__PURE__*/ _class_private_field_loose_key("_providersMetadata"), _archive = /*#__PURE__*/ _class_private_field_loose_key("_archive"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _archivePath = /*#__PURE__*/ _class_private_field_loose_key("_archivePath"), _writeMetadata = /*#__PURE__*/ _class_private_field_loose_key("_writeMetadata"), _getMetadataStream = /*#__PURE__*/ _class_private_field_loose_key("_getMetadataStream");
class LocalFileDestinationProvider {
setMetadata(target, metadata) {
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata][target] = metadata;
return this;
}
createGzip() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating gzip');
return zip.createGzip();
}
bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { compression, encryption } = this.options;
if (encryption.enabled && !encryption.key) {
throw new Error("Can't encrypt without a key");
}
_class_private_field_loose_base(this, _archive)[_archive].stream = tar.pack();
const outStream = fse.createWriteStream(_class_private_field_loose_base(this, _archivePath)[_archivePath]);
outStream.on('error', (err)=>{
if (err.code === 'ENOSPC') {
throw new providers.ProviderTransferError("Your server doesn't have space to proceed with the import.");
}
throw err;
});
const archiveTransforms = [];
if (compression.enabled) {
archiveTransforms.push(this.createGzip());
}
if (encryption.enabled && encryption.key) {
archiveTransforms.push(encrypt.createEncryptionCipher(encryption.key));
}
_class_private_field_loose_base(this, _archive)[_archive].pipeline = streamChain.chain([
_class_private_field_loose_base(this, _archive)[_archive].stream,
...archiveTransforms,
outStream
]);
this.results.file = {
path: _class_private_field_loose_base(this, _archivePath)[_archivePath]
};
}
async close() {
const { stream, pipeline } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
return;
}
await _class_private_field_loose_base(this, _writeMetadata)[_writeMetadata]();
stream.finalize();
if (pipeline && !pipeline.closed) {
await new Promise((resolve, reject)=>{
pipeline.on('close', resolve).on('error', reject);
});
}
}
async rollback() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('rolling back');
await this.close();
await fse.rm(_class_private_field_loose_base(this, _archivePath)[_archivePath], {
force: true
});
}
getMetadata() {
return null;
}
createSchemasWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas write stream');
const filePathFactory = utils.createFilePathFactory('schemas');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createEntitiesWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities write stream');
const filePathFactory = utils.createFilePathFactory('entities');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createLinksWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links write stream');
const filePathFactory = utils.createFilePathFactory('links');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createConfigurationWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration write stream');
const filePathFactory = utils.createFilePathFactory('configuration');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createAssetsWriteStream() {
const { stream: archiveStream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!archiveStream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets write stream');
return new stream.Writable({
objectMode: true,
write (data, _encoding, callback) {
// always write tar files with posix paths so we have a standard format for paths regardless of system
const entryPath = path.posix.join('assets', 'uploads', data.filename);
const entryMetadataPath = path.posix.join('assets', 'metadata', `${data.filename}.json`);
const stringifiedMetadata = JSON.stringify(data.metadata);
archiveStream.entry({
name: entryMetadataPath,
size: stringifiedMetadata.length
}, stringifiedMetadata);
const entry = archiveStream.entry({
name: entryPath,
size: data.stats.size
});
if (!entry) {
callback(new Error(`Failed to created an asset tar entry for ${entryPath}`));
return;
}
data.stream.pipe(entry);
entry.on('finish', ()=>{
callback(null);
}).on('error', (error)=>{
callback(error);
});
}
});
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _archivePath, {
get: get_archivePath,
set: void 0
});
Object.defineProperty(this, _writeMetadata, {
value: writeMetadata
});
Object.defineProperty(this, _getMetadataStream, {
value: getMetadataStream
});
Object.defineProperty(this, _providersMetadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _archive, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.name = 'destination::local-file';
this.type = 'destination';
this.results = {};
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata] = {};
_class_private_field_loose_base(this, _archive)[_archive] = {};
this.options = options;
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-destination-provider'
},
kind: 'info'
});
}
function get_archivePath() {
const { encryption, compression, file } = this.options;
let filePath = `${file.path}.tar`;
if (compression.enabled) {
filePath += '.gz';
}
if (encryption.enabled) {
filePath += '.enc';
}
return filePath;
}
async function writeMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('writing metadata');
const metadata = _class_private_field_loose_base(this, _providersMetadata)[_providersMetadata].source;
if (metadata) {
await new Promise((resolve)=>{
const outStream = _class_private_field_loose_base(this, _getMetadataStream)[_getMetadataStream]();
const data = JSON.stringify(metadata, null, 2);
stream.Readable.from(data).pipe(outStream).on('close', resolve);
});
}
}
function getMetadataStream() {
const { stream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
throw new Error('Archive stream is unavailable');
}
return utils.createTarEntryStream(stream, ()=>'metadata.json');
}
exports.createLocalFileDestinationProvider = createLocalFileDestinationProvider;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,246 @@
import path from 'path';
import zip from 'zlib';
import { Writable, Readable } from 'stream';
import { createWriteStream, rm } from 'fs-extra';
import tar from 'tar-stream';
import { stringer } from 'stream-json/jsonl/Stringer';
import { chain } from 'stream-chain';
import { createEncryptionCipher } from '../../../utils/encryption/encrypt.mjs';
import 'crypto';
import { createTarEntryStream, createFilePathFactory } from './utils.mjs';
import { ProviderTransferError } from '../../../errors/providers.mjs';
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const createLocalFileDestinationProvider = (options)=>{
return new LocalFileDestinationProvider(options);
};
var _providersMetadata = /*#__PURE__*/ _class_private_field_loose_key("_providersMetadata"), _archive = /*#__PURE__*/ _class_private_field_loose_key("_archive"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _archivePath = /*#__PURE__*/ _class_private_field_loose_key("_archivePath"), _writeMetadata = /*#__PURE__*/ _class_private_field_loose_key("_writeMetadata"), _getMetadataStream = /*#__PURE__*/ _class_private_field_loose_key("_getMetadataStream");
class LocalFileDestinationProvider {
setMetadata(target, metadata) {
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata][target] = metadata;
return this;
}
createGzip() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating gzip');
return zip.createGzip();
}
bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { compression, encryption } = this.options;
if (encryption.enabled && !encryption.key) {
throw new Error("Can't encrypt without a key");
}
_class_private_field_loose_base(this, _archive)[_archive].stream = tar.pack();
const outStream = createWriteStream(_class_private_field_loose_base(this, _archivePath)[_archivePath]);
outStream.on('error', (err)=>{
if (err.code === 'ENOSPC') {
throw new ProviderTransferError("Your server doesn't have space to proceed with the import.");
}
throw err;
});
const archiveTransforms = [];
if (compression.enabled) {
archiveTransforms.push(this.createGzip());
}
if (encryption.enabled && encryption.key) {
archiveTransforms.push(createEncryptionCipher(encryption.key));
}
_class_private_field_loose_base(this, _archive)[_archive].pipeline = chain([
_class_private_field_loose_base(this, _archive)[_archive].stream,
...archiveTransforms,
outStream
]);
this.results.file = {
path: _class_private_field_loose_base(this, _archivePath)[_archivePath]
};
}
async close() {
const { stream, pipeline } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
return;
}
await _class_private_field_loose_base(this, _writeMetadata)[_writeMetadata]();
stream.finalize();
if (pipeline && !pipeline.closed) {
await new Promise((resolve, reject)=>{
pipeline.on('close', resolve).on('error', reject);
});
}
}
async rollback() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('rolling back');
await this.close();
await rm(_class_private_field_loose_base(this, _archivePath)[_archivePath], {
force: true
});
}
getMetadata() {
return null;
}
createSchemasWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas write stream');
const filePathFactory = createFilePathFactory('schemas');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createEntitiesWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities write stream');
const filePathFactory = createFilePathFactory('entities');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createLinksWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links write stream');
const filePathFactory = createFilePathFactory('links');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createConfigurationWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration write stream');
const filePathFactory = createFilePathFactory('configuration');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createAssetsWriteStream() {
const { stream: archiveStream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!archiveStream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets write stream');
return new Writable({
objectMode: true,
write (data, _encoding, callback) {
// always write tar files with posix paths so we have a standard format for paths regardless of system
const entryPath = path.posix.join('assets', 'uploads', data.filename);
const entryMetadataPath = path.posix.join('assets', 'metadata', `${data.filename}.json`);
const stringifiedMetadata = JSON.stringify(data.metadata);
archiveStream.entry({
name: entryMetadataPath,
size: stringifiedMetadata.length
}, stringifiedMetadata);
const entry = archiveStream.entry({
name: entryPath,
size: data.stats.size
});
if (!entry) {
callback(new Error(`Failed to created an asset tar entry for ${entryPath}`));
return;
}
data.stream.pipe(entry);
entry.on('finish', ()=>{
callback(null);
}).on('error', (error)=>{
callback(error);
});
}
});
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _archivePath, {
get: get_archivePath,
set: void 0
});
Object.defineProperty(this, _writeMetadata, {
value: writeMetadata
});
Object.defineProperty(this, _getMetadataStream, {
value: getMetadataStream
});
Object.defineProperty(this, _providersMetadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _archive, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.name = 'destination::local-file';
this.type = 'destination';
this.results = {};
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata] = {};
_class_private_field_loose_base(this, _archive)[_archive] = {};
this.options = options;
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-destination-provider'
},
kind: 'info'
});
}
function get_archivePath() {
const { encryption, compression, file } = this.options;
let filePath = `${file.path}.tar`;
if (compression.enabled) {
filePath += '.gz';
}
if (encryption.enabled) {
filePath += '.enc';
}
return filePath;
}
async function writeMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('writing metadata');
const metadata = _class_private_field_loose_base(this, _providersMetadata)[_providersMetadata].source;
if (metadata) {
await new Promise((resolve)=>{
const outStream = _class_private_field_loose_base(this, _getMetadataStream)[_getMetadataStream]();
const data = JSON.stringify(metadata, null, 2);
Readable.from(data).pipe(outStream).on('close', resolve);
});
}
}
function getMetadataStream() {
const { stream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
throw new Error('Archive stream is unavailable');
}
return createTarEntryStream(stream, ()=>'metadata.json');
}
export { createLocalFileDestinationProvider };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
/// <reference types="node" />
import { Writable } from 'stream';
import tar from 'tar-stream';
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/
export declare const createFilePathFactory: (type: string) => (fileIndex?: number) => string;
export declare const createTarEntryStream: (archive: tar.Pack, pathFactory: (index?: number) => string, maxSize?: number) => Writable;
//# sourceMappingURL=utils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/destination/utils.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAElC,OAAO,GAAG,MAAM,YAAY,CAAC;AAE7B;;;GAGG;AACH,eAAO,MAAM,qBAAqB,SACzB,MAAM,6BACI,MAQhB,CAAC;AAEJ,eAAO,MAAM,oBAAoB,YACtB,IAAI,IAAI,eACJ,CAAC,KAAK,CAAC,EAAE,MAAM,KAAK,MAAM,+BAuDxC,CAAC"}

View File

@@ -0,0 +1,63 @@
'use strict';
var stream = require('stream');
var path = require('path');
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/ const createFilePathFactory = (type)=>(fileIndex = 0)=>{
// always write tar files with posix paths so we have a standard format for paths regardless of system
return path.posix.join(// "{type}" directory
type, // "${type}_XXXXX.jsonl" file
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`);
};
const createTarEntryStream = (archive, pathFactory, maxSize = 2.56e8)=>{
let fileIndex = 0;
let buffer = '';
const flush = async ()=>{
if (!buffer) {
return;
}
fileIndex += 1;
const name = pathFactory(fileIndex);
const size = buffer.length;
await new Promise((resolve, reject)=>{
archive.entry({
name,
size
}, buffer, (err)=>{
if (err) {
reject(err);
}
resolve();
});
});
buffer = '';
};
const push = (chunk)=>{
buffer += chunk;
};
return new stream.Writable({
async destroy (err, callback) {
await flush();
callback(err);
},
async write (chunk, _encoding, callback) {
const size = chunk.length;
if (chunk.length > maxSize) {
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
return;
}
if (buffer.length + size > maxSize) {
await flush();
}
push(chunk);
callback(null);
}
});
};
exports.createFilePathFactory = createFilePathFactory;
exports.createTarEntryStream = createTarEntryStream;
//# sourceMappingURL=utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sources":["../../../../src/file/providers/destination/utils.ts"],"sourcesContent":["import { Writable } from 'stream';\nimport { posix } from 'path';\nimport tar from 'tar-stream';\n\n/**\n * Create a file path factory for a given path & prefix.\n * Upon being called, the factory will return a file path for a given index\n */\nexport const createFilePathFactory =\n (type: string) =>\n (fileIndex = 0): string => {\n // always write tar files with posix paths so we have a standard format for paths regardless of system\n return posix.join(\n // \"{type}\" directory\n type,\n // \"${type}_XXXXX.jsonl\" file\n `${type}_${String(fileIndex).padStart(5, '0')}.jsonl`\n );\n };\n\nexport const createTarEntryStream = (\n archive: tar.Pack,\n pathFactory: (index?: number) => string,\n maxSize = 2.56e8\n) => {\n let fileIndex = 0;\n let buffer = '';\n\n const flush = async () => {\n if (!buffer) {\n return;\n }\n\n fileIndex += 1;\n const name = pathFactory(fileIndex);\n const size = buffer.length;\n\n await new Promise<void>((resolve, reject) => {\n archive.entry({ name, size }, buffer, (err) => {\n if (err) {\n reject(err);\n }\n\n resolve();\n });\n });\n\n buffer = '';\n };\n\n const push = (chunk: string | Buffer) => {\n buffer += chunk;\n };\n\n return new Writable({\n async destroy(err, callback) {\n await flush();\n callback(err);\n },\n\n async write(chunk, _encoding, callback) {\n const size = chunk.length;\n\n if (chunk.length > maxSize) {\n callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));\n return;\n }\n\n if (buffer.length + size > maxSize) {\n await flush();\n }\n\n push(chunk);\n\n callback(null);\n },\n });\n};\n"],"names":["createFilePathFactory","type","fileIndex","posix","join","String","padStart","createTarEntryStream","archive","pathFactory","maxSize","buffer","flush","name","size","length","Promise","resolve","reject","entry","err","push","chunk","Writable","destroy","callback","write","_encoding","Error"],"mappings":";;;;;AAIA;;;AAGC,IACYA,MAAAA,qBAAAA,GACX,CAACC,IACD,GAAA,CAACC,YAAY,CAAC,GAAA;;QAEZ,OAAOC,UAAAA,CAAMC,IAAI;AAEfH,QAAAA,IAAAA;QAEA,CAAC,EAAEA,IAAK,CAAA,CAAC,EAAEI,MAAAA,CAAOH,SAAWI,CAAAA,CAAAA,QAAQ,CAAC,CAAA,EAAG,GAAK,CAAA,CAAA,MAAM,CAAC,CAAA;;MAI9CC,oBAAuB,GAAA,CAClCC,OACAC,EAAAA,WAAAA,EACAC,UAAU,MAAM,GAAA;AAEhB,IAAA,IAAIR,SAAY,GAAA,CAAA;AAChB,IAAA,IAAIS,MAAS,GAAA,EAAA;AAEb,IAAA,MAAMC,KAAQ,GAAA,UAAA;AACZ,QAAA,IAAI,CAACD,MAAQ,EAAA;AACX,YAAA;AACF;QAEAT,SAAa,IAAA,CAAA;AACb,QAAA,MAAMW,OAAOJ,WAAYP,CAAAA,SAAAA,CAAAA;QACzB,MAAMY,IAAAA,GAAOH,OAAOI,MAAM;QAE1B,MAAM,IAAIC,OAAc,CAAA,CAACC,OAASC,EAAAA,MAAAA,GAAAA;AAChCV,YAAAA,OAAAA,CAAQW,KAAK,CAAC;AAAEN,gBAAAA,IAAAA;AAAMC,gBAAAA;AAAK,aAAA,EAAGH,QAAQ,CAACS,GAAAA,GAAAA;AACrC,gBAAA,IAAIA,GAAK,EAAA;oBACPF,MAAOE,CAAAA,GAAAA,CAAAA;AACT;AAEAH,gBAAAA,OAAAA,EAAAA;AACF,aAAA,CAAA;AACF,SAAA,CAAA;QAEAN,MAAS,GAAA,EAAA;AACX,KAAA;AAEA,IAAA,MAAMU,OAAO,CAACC,KAAAA,GAAAA;QACZX,MAAUW,IAAAA,KAAAA;AACZ,KAAA;AAEA,IAAA,OAAO,IAAIC,eAAS,CAAA;QAClB,MAAMC,OAAAA,CAAAA,CAAQJ,GAAG,EAAEK,QAAQ,EAAA;YACzB,MAAMb,KAAAA,EAAAA;YACNa,QAASL,CAAAA,GAAAA,CAAAA;AACX,SAAA;AAEA,QAAA,MAAMM,KAAMJ,CAAAA,CAAAA,KAAK,EAAEK,SAAS,EAAEF,QAAQ,EAAA;YACpC,MAAMX,IAAAA,GAAOQ,MAAMP,MAAM;YAEzB,IAAIO,KAAAA,CAAMP,MAAM,GAAGL,OAAS,EAAA;gBAC1Be,QAAS,CAAA,IAAIG,KAAM,CAAA,CAAC,mBAAmB,EAAEN,KAAMP,CAAAA,MAAM,CAAC,CAAC,EAAEL,OAAAA,CAAQ,CAAC,CAAA,CAAA;AAClE,gBAAA;AACF;AAEA,YAAA,IAAIC,MAAOI,CAAAA,MAAM,GAAGD,IAAAA,GAAOJ,OAAS,EAAA;gBAClC,MAAME,KAAAA,EAAAA;AACR;YAEAS,IAAKC,CAAAA,KAAAA,CAAAA;YAELG,QAAS,CAAA,IAAA,CAAA;AACX;AACF,KAAA,CAAA;AACF;;;;;"}

View File

@@ -0,0 +1,60 @@
import { Writable } from 'stream';
import { posix } from 'path';
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/ const createFilePathFactory = (type)=>(fileIndex = 0)=>{
// always write tar files with posix paths so we have a standard format for paths regardless of system
return posix.join(// "{type}" directory
type, // "${type}_XXXXX.jsonl" file
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`);
};
const createTarEntryStream = (archive, pathFactory, maxSize = 2.56e8)=>{
let fileIndex = 0;
let buffer = '';
const flush = async ()=>{
if (!buffer) {
return;
}
fileIndex += 1;
const name = pathFactory(fileIndex);
const size = buffer.length;
await new Promise((resolve, reject)=>{
archive.entry({
name,
size
}, buffer, (err)=>{
if (err) {
reject(err);
}
resolve();
});
});
buffer = '';
};
const push = (chunk)=>{
buffer += chunk;
};
return new Writable({
async destroy (err, callback) {
await flush();
callback(err);
},
async write (chunk, _encoding, callback) {
const size = chunk.length;
if (chunk.length > maxSize) {
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
return;
}
if (buffer.length + size > maxSize) {
await flush();
}
push(chunk);
callback(null);
}
});
};
export { createFilePathFactory, createTarEntryStream };
//# sourceMappingURL=utils.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.mjs","sources":["../../../../src/file/providers/destination/utils.ts"],"sourcesContent":["import { Writable } from 'stream';\nimport { posix } from 'path';\nimport tar from 'tar-stream';\n\n/**\n * Create a file path factory for a given path & prefix.\n * Upon being called, the factory will return a file path for a given index\n */\nexport const createFilePathFactory =\n (type: string) =>\n (fileIndex = 0): string => {\n // always write tar files with posix paths so we have a standard format for paths regardless of system\n return posix.join(\n // \"{type}\" directory\n type,\n // \"${type}_XXXXX.jsonl\" file\n `${type}_${String(fileIndex).padStart(5, '0')}.jsonl`\n );\n };\n\nexport const createTarEntryStream = (\n archive: tar.Pack,\n pathFactory: (index?: number) => string,\n maxSize = 2.56e8\n) => {\n let fileIndex = 0;\n let buffer = '';\n\n const flush = async () => {\n if (!buffer) {\n return;\n }\n\n fileIndex += 1;\n const name = pathFactory(fileIndex);\n const size = buffer.length;\n\n await new Promise<void>((resolve, reject) => {\n archive.entry({ name, size }, buffer, (err) => {\n if (err) {\n reject(err);\n }\n\n resolve();\n });\n });\n\n buffer = '';\n };\n\n const push = (chunk: string | Buffer) => {\n buffer += chunk;\n };\n\n return new Writable({\n async destroy(err, callback) {\n await flush();\n callback(err);\n },\n\n async write(chunk, _encoding, callback) {\n const size = chunk.length;\n\n if (chunk.length > maxSize) {\n callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));\n return;\n }\n\n if (buffer.length + size > maxSize) {\n await flush();\n }\n\n push(chunk);\n\n callback(null);\n },\n });\n};\n"],"names":["createFilePathFactory","type","fileIndex","posix","join","String","padStart","createTarEntryStream","archive","pathFactory","maxSize","buffer","flush","name","size","length","Promise","resolve","reject","entry","err","push","chunk","Writable","destroy","callback","write","_encoding","Error"],"mappings":";;;AAIA;;;AAGC,IACYA,MAAAA,qBAAAA,GACX,CAACC,IACD,GAAA,CAACC,YAAY,CAAC,GAAA;;QAEZ,OAAOC,KAAAA,CAAMC,IAAI;AAEfH,QAAAA,IAAAA;QAEA,CAAC,EAAEA,IAAK,CAAA,CAAC,EAAEI,MAAAA,CAAOH,SAAWI,CAAAA,CAAAA,QAAQ,CAAC,CAAA,EAAG,GAAK,CAAA,CAAA,MAAM,CAAC,CAAA;;MAI9CC,oBAAuB,GAAA,CAClCC,OACAC,EAAAA,WAAAA,EACAC,UAAU,MAAM,GAAA;AAEhB,IAAA,IAAIR,SAAY,GAAA,CAAA;AAChB,IAAA,IAAIS,MAAS,GAAA,EAAA;AAEb,IAAA,MAAMC,KAAQ,GAAA,UAAA;AACZ,QAAA,IAAI,CAACD,MAAQ,EAAA;AACX,YAAA;AACF;QAEAT,SAAa,IAAA,CAAA;AACb,QAAA,MAAMW,OAAOJ,WAAYP,CAAAA,SAAAA,CAAAA;QACzB,MAAMY,IAAAA,GAAOH,OAAOI,MAAM;QAE1B,MAAM,IAAIC,OAAc,CAAA,CAACC,OAASC,EAAAA,MAAAA,GAAAA;AAChCV,YAAAA,OAAAA,CAAQW,KAAK,CAAC;AAAEN,gBAAAA,IAAAA;AAAMC,gBAAAA;AAAK,aAAA,EAAGH,QAAQ,CAACS,GAAAA,GAAAA;AACrC,gBAAA,IAAIA,GAAK,EAAA;oBACPF,MAAOE,CAAAA,GAAAA,CAAAA;AACT;AAEAH,gBAAAA,OAAAA,EAAAA;AACF,aAAA,CAAA;AACF,SAAA,CAAA;QAEAN,MAAS,GAAA,EAAA;AACX,KAAA;AAEA,IAAA,MAAMU,OAAO,CAACC,KAAAA,GAAAA;QACZX,MAAUW,IAAAA,KAAAA;AACZ,KAAA;AAEA,IAAA,OAAO,IAAIC,QAAS,CAAA;QAClB,MAAMC,OAAAA,CAAAA,CAAQJ,GAAG,EAAEK,QAAQ,EAAA;YACzB,MAAMb,KAAAA,EAAAA;YACNa,QAASL,CAAAA,GAAAA,CAAAA;AACX,SAAA;AAEA,QAAA,MAAMM,KAAMJ,CAAAA,CAAAA,KAAK,EAAEK,SAAS,EAAEF,QAAQ,EAAA;YACpC,MAAMX,IAAAA,GAAOQ,MAAMP,MAAM;YAEzB,IAAIO,KAAAA,CAAMP,MAAM,GAAGL,OAAS,EAAA;gBAC1Be,QAAS,CAAA,IAAIG,KAAM,CAAA,CAAC,mBAAmB,EAAEN,KAAMP,CAAAA,MAAM,CAAC,CAAC,EAAEL,OAAAA,CAAQ,CAAC,CAAA,CAAA;AAClE,gBAAA;AACF;AAEA,YAAA,IAAIC,MAAOI,CAAAA,MAAM,GAAGD,IAAAA,GAAOJ,OAAS,EAAA;gBAClC,MAAME,KAAAA,EAAAA;AACR;YAEAS,IAAKC,CAAAA,KAAAA,CAAAA;YAELG,QAAS,CAAA,IAAA,CAAA;AACX;AACF,KAAA,CAAA;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
export * from './source';
export * from './destination';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/file/providers/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAC;AACzB,cAAc,eAAe,CAAC"}

View File

@@ -0,0 +1,10 @@
'use strict';
var index = require('./source/index.js');
var index$1 = require('./destination/index.js');
exports.createLocalFileSourceProvider = index.createLocalFileSourceProvider;
exports.createLocalFileDestinationProvider = index$1.createLocalFileDestinationProvider;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}

View File

@@ -0,0 +1,3 @@
export { createLocalFileSourceProvider } from './source/index.mjs';
export { createLocalFileDestinationProvider } from './destination/index.mjs';
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";"}

View File

@@ -0,0 +1,40 @@
/// <reference types="node" />
import type { Readable } from 'stream';
import type { IMetadata, ISourceProvider, ProviderType } from '../../../../types';
import type { IDiagnosticReporter } from '../../../utils/diagnostic';
/**
* Provider options
*/
export interface ILocalFileSourceProviderOptions {
file: {
path: string;
};
encryption: {
enabled: boolean;
key?: string;
};
compression: {
enabled: boolean;
};
}
export declare const createLocalFileSourceProvider: (options: ILocalFileSourceProviderOptions) => LocalFileSourceProvider;
declare class LocalFileSourceProvider implements ISourceProvider {
#private;
type: ProviderType;
name: string;
options: ILocalFileSourceProviderOptions;
constructor(options: ILocalFileSourceProviderOptions);
/**
* Pre flight checks regarding the provided options, making sure that the file can be opened (decrypted, decompressed), etc.
*/
bootstrap(diagnostics: IDiagnosticReporter): Promise<void>;
getMetadata(): Promise<IMetadata | null>;
getSchemas(): Promise<any>;
createEntitiesReadStream(): Readable;
createSchemasReadStream(): Readable;
createLinksReadStream(): Readable;
createConfigurationReadStream(): Readable;
createAssetsReadStream(): Readable | Promise<Readable>;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/source/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAYvC,OAAO,KAAK,EAAU,SAAS,EAAE,eAAe,EAAE,YAAY,EAAS,MAAM,mBAAmB,CAAC;AACjG,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAarE;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,UAAU,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,GAAG,CAAC,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,WAAW,EAAE;QACX,OAAO,EAAE,OAAO,CAAC;KAClB,CAAC;CACH;AAED,eAAO,MAAM,6BAA6B,YAAa,+BAA+B,4BAErF,CAAC;AAEF,cAAM,uBAAwB,YAAW,eAAe;;IACtD,IAAI,EAAE,YAAY,CAAY;IAE9B,IAAI,SAAwB;IAE5B,OAAO,EAAE,+BAA+B,CAAC;gBAM7B,OAAO,EAAE,+BAA+B;IAqBpD;;OAEG;IACG,SAAS,CAAC,WAAW,EAAE,mBAAmB;IAgC1C,WAAW;IASX,UAAU;IAiBhB,wBAAwB,IAAI,QAAQ;IAKpC,uBAAuB,IAAI,QAAQ;IAKnC,qBAAqB,IAAI,QAAQ;IAKjC,6BAA6B,IAAI,QAAQ;IAMzC,sBAAsB,IAAI,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;CA6KvD"}

View File

@@ -0,0 +1,288 @@
'use strict';
var zip = require('zlib');
var path = require('path');
var stream$1 = require('stream');
var fse = require('fs-extra');
var tar = require('tar');
var fp = require('lodash/fp');
var streamChain = require('stream-chain');
var Parser = require('stream-json/jsonl/Parser');
require('crypto');
var decrypt = require('../../../utils/encryption/decrypt.js');
var stream = require('../../../utils/stream.js');
var schema = require('../../../utils/schema.js');
require('events');
var providers = require('../../../errors/providers.js');
var utils = require('./utils.js');
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
/**
* Constant for the metadata file path
*/ const METADATA_FILE_PATH = 'metadata.json';
const createLocalFileSourceProvider = (options)=>{
return new LocalFileSourceProvider(options);
};
var _metadata = /*#__PURE__*/ _class_private_field_loose_key("_metadata"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _loadMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadMetadata"), _loadAssetMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadAssetMetadata"), _getBackupStream = /*#__PURE__*/ _class_private_field_loose_key("_getBackupStream"), // `directory` must be posix formatted path
_streamJsonlDirectory = /*#__PURE__*/ _class_private_field_loose_key("_streamJsonlDirectory"), _parseJSONFile = /*#__PURE__*/ _class_private_field_loose_key("_parseJSONFile");
class LocalFileSourceProvider {
/**
* Pre flight checks regarding the provided options, making sure that the file can be opened (decrypted, decompressed), etc.
*/ async bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { path: filePath } = this.options.file;
try {
// Read the metadata to ensure the file can be parsed
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
// TODO: we might also need to read the schema.jsonl files & implements a custom stream-check
} catch (e) {
if (this.options?.encryption?.enabled) {
throw new providers.ProviderInitializationError(`Key is incorrect or the file '${filePath}' is not a valid Strapi data file.`);
}
throw new providers.ProviderInitializationError(`File '${filePath}' is not a valid Strapi data file.`);
}
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
throw new providers.ProviderInitializationError('Could not load metadata from Strapi data file.');
}
}
async getMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting metadata');
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
}
return _class_private_field_loose_base(this, _metadata)[_metadata] ?? null;
}
async getSchemas() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting schemas');
const schemaCollection = await stream.collect(this.createSchemasReadStream());
if (fp.isEmpty(schemaCollection)) {
throw new providers.ProviderInitializationError('Could not load schemas from Strapi data file.');
}
// Group schema by UID
const schemas = fp.keyBy('uid', schemaCollection);
// Transform to valid JSON
return schema.schemasToValidJSON(schemas);
}
createEntitiesReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('entities');
}
createSchemasReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('schemas');
}
createLinksReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('links');
}
createConfigurationReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration read stream');
// NOTE: TBD
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('configuration');
}
createAssetsReadStream() {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new stream$1.PassThrough({
objectMode: true
});
const loadAssetMetadata = _class_private_field_loose_base(this, _loadAssetMetadata)[_loadAssetMetadata].bind(this);
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets read stream');
stream$1.pipeline([
inStream,
new tar.Parse({
// find only files in the assets/uploads folder
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return utils.isFilePathInDirname('assets/uploads', filePath);
},
async onentry (entry) {
const { path: filePath, size = 0 } = entry;
const normalizedPath = utils.unknownPathToPosix(filePath);
const file = path.basename(normalizedPath);
let metadata;
try {
metadata = await loadAssetMetadata(`assets/metadata/${file}.json`);
} catch (error) {
throw new Error(`Failed to read metadata for ${file}`);
}
const asset = {
metadata,
filename: file,
filepath: normalizedPath,
stats: {
size
},
stream: entry
};
outStream.write(asset);
}
})
], ()=>outStream.end());
return outStream;
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _loadMetadata, {
value: loadMetadata
});
Object.defineProperty(this, _loadAssetMetadata, {
value: loadAssetMetadata
});
Object.defineProperty(this, _getBackupStream, {
value: getBackupStream
});
Object.defineProperty(this, _streamJsonlDirectory, {
value: streamJsonlDirectory
});
// For collecting an entire JSON file then parsing it, not for streaming JSONL
Object.defineProperty(this, _parseJSONFile, {
value: parseJSONFile
});
Object.defineProperty(this, _metadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.type = 'source';
this.name = 'source::local-file';
this.options = options;
const { encryption } = this.options;
if (encryption.enabled && encryption.key === undefined) {
throw new Error('Missing encryption key');
}
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-source-provider'
},
kind: 'info'
});
}
async function loadMetadata() {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
_class_private_field_loose_base(this, _metadata)[_metadata] = await _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, METADATA_FILE_PATH);
}
async function loadAssetMetadata(path) {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
return _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, path);
}
function getBackupStream() {
const { file, encryption, compression } = this.options;
const streams = [];
try {
streams.push(fse.createReadStream(file.path));
} catch (e) {
throw new Error(`Could not read backup file path provided at "${this.options.file.path}"`);
}
if (encryption.enabled && encryption.key) {
streams.push(decrypt.createDecryptionCipher(encryption.key));
}
if (compression.enabled) {
streams.push(zip.createGunzip());
}
return streamChain.chain(streams);
}
function streamJsonlDirectory(directory) {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new stream$1.PassThrough({
objectMode: true
});
stream$1.pipeline([
inStream,
new tar.Parse({
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return utils.isFilePathInDirname(directory, filePath);
},
async onentry (entry) {
const transforms = [
// JSONL parser to read the data chunks one by one (line by line)
Parser.parser({
checkErrors: true
}),
// The JSONL parser returns each line as key/value
(line)=>line.value
];
const stream = entry.pipe(streamChain.chain(transforms));
try {
for await (const chunk of stream){
outStream.write(chunk);
}
} catch (e) {
outStream.destroy(new providers.ProviderTransferError(`Error parsing backup files from backup file ${entry.path}: ${e.message}`, {
details: {
error: e
}
}));
}
}
})
], async ()=>{
// Manually send the 'end' event to the out stream
// once every entry has finished streaming its content
outStream.end();
});
return outStream;
}
async function parseJSONFile(fileStream, filePath) {
return new Promise((resolve, reject)=>{
stream$1.pipeline([
fileStream,
// Custom backup archive parsing
new tar.Parse({
/**
* Filter the parsed entries to only keep the one that matches the given filepath
*/ filter (entryPath, entry) {
if (entry.type !== 'File') {
return false;
}
return utils.isPathEquivalent(entryPath, filePath);
},
async onentry (entry) {
// Collect all the content of the entry file
const content = await entry.collect();
try {
// Parse from buffer array to string to JSON
const parsedContent = JSON.parse(Buffer.concat(content).toString());
// Resolve the Promise with the parsed content
resolve(parsedContent);
} catch (e) {
reject(e);
} finally{
// Cleanup (close the stream associated to the entry)
entry.destroy();
}
}
})
], ()=>{
// If the promise hasn't been resolved and we've parsed all
// the archive entries, then the file doesn't exist
reject(new Error(`File "${filePath}" not found`));
});
});
}
exports.createLocalFileSourceProvider = createLocalFileSourceProvider;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,286 @@
import zip from 'zlib';
import path from 'path';
import { PassThrough, pipeline } from 'stream';
import fse__default from 'fs-extra';
import tar from 'tar';
import { isEmpty, keyBy } from 'lodash/fp';
import { chain } from 'stream-chain';
import { parser } from 'stream-json/jsonl/Parser';
import 'crypto';
import { createDecryptionCipher } from '../../../utils/encryption/decrypt.mjs';
import { collect } from '../../../utils/stream.mjs';
import { schemasToValidJSON } from '../../../utils/schema.mjs';
import 'events';
import { ProviderInitializationError, ProviderTransferError } from '../../../errors/providers.mjs';
import { isFilePathInDirname, unknownPathToPosix, isPathEquivalent } from './utils.mjs';
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
/**
* Constant for the metadata file path
*/ const METADATA_FILE_PATH = 'metadata.json';
const createLocalFileSourceProvider = (options)=>{
return new LocalFileSourceProvider(options);
};
var _metadata = /*#__PURE__*/ _class_private_field_loose_key("_metadata"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _loadMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadMetadata"), _loadAssetMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadAssetMetadata"), _getBackupStream = /*#__PURE__*/ _class_private_field_loose_key("_getBackupStream"), // `directory` must be posix formatted path
_streamJsonlDirectory = /*#__PURE__*/ _class_private_field_loose_key("_streamJsonlDirectory"), _parseJSONFile = /*#__PURE__*/ _class_private_field_loose_key("_parseJSONFile");
class LocalFileSourceProvider {
/**
* Pre flight checks regarding the provided options, making sure that the file can be opened (decrypted, decompressed), etc.
*/ async bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { path: filePath } = this.options.file;
try {
// Read the metadata to ensure the file can be parsed
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
// TODO: we might also need to read the schema.jsonl files & implements a custom stream-check
} catch (e) {
if (this.options?.encryption?.enabled) {
throw new ProviderInitializationError(`Key is incorrect or the file '${filePath}' is not a valid Strapi data file.`);
}
throw new ProviderInitializationError(`File '${filePath}' is not a valid Strapi data file.`);
}
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
throw new ProviderInitializationError('Could not load metadata from Strapi data file.');
}
}
async getMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting metadata');
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
}
return _class_private_field_loose_base(this, _metadata)[_metadata] ?? null;
}
async getSchemas() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting schemas');
const schemaCollection = await collect(this.createSchemasReadStream());
if (isEmpty(schemaCollection)) {
throw new ProviderInitializationError('Could not load schemas from Strapi data file.');
}
// Group schema by UID
const schemas = keyBy('uid', schemaCollection);
// Transform to valid JSON
return schemasToValidJSON(schemas);
}
createEntitiesReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('entities');
}
createSchemasReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('schemas');
}
createLinksReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('links');
}
createConfigurationReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration read stream');
// NOTE: TBD
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('configuration');
}
createAssetsReadStream() {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new PassThrough({
objectMode: true
});
const loadAssetMetadata = _class_private_field_loose_base(this, _loadAssetMetadata)[_loadAssetMetadata].bind(this);
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets read stream');
pipeline([
inStream,
new tar.Parse({
// find only files in the assets/uploads folder
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return isFilePathInDirname('assets/uploads', filePath);
},
async onentry (entry) {
const { path: filePath, size = 0 } = entry;
const normalizedPath = unknownPathToPosix(filePath);
const file = path.basename(normalizedPath);
let metadata;
try {
metadata = await loadAssetMetadata(`assets/metadata/${file}.json`);
} catch (error) {
throw new Error(`Failed to read metadata for ${file}`);
}
const asset = {
metadata,
filename: file,
filepath: normalizedPath,
stats: {
size
},
stream: entry
};
outStream.write(asset);
}
})
], ()=>outStream.end());
return outStream;
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _loadMetadata, {
value: loadMetadata
});
Object.defineProperty(this, _loadAssetMetadata, {
value: loadAssetMetadata
});
Object.defineProperty(this, _getBackupStream, {
value: getBackupStream
});
Object.defineProperty(this, _streamJsonlDirectory, {
value: streamJsonlDirectory
});
// For collecting an entire JSON file then parsing it, not for streaming JSONL
Object.defineProperty(this, _parseJSONFile, {
value: parseJSONFile
});
Object.defineProperty(this, _metadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.type = 'source';
this.name = 'source::local-file';
this.options = options;
const { encryption } = this.options;
if (encryption.enabled && encryption.key === undefined) {
throw new Error('Missing encryption key');
}
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-source-provider'
},
kind: 'info'
});
}
async function loadMetadata() {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
_class_private_field_loose_base(this, _metadata)[_metadata] = await _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, METADATA_FILE_PATH);
}
async function loadAssetMetadata(path) {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
return _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, path);
}
function getBackupStream() {
const { file, encryption, compression } = this.options;
const streams = [];
try {
streams.push(fse__default.createReadStream(file.path));
} catch (e) {
throw new Error(`Could not read backup file path provided at "${this.options.file.path}"`);
}
if (encryption.enabled && encryption.key) {
streams.push(createDecryptionCipher(encryption.key));
}
if (compression.enabled) {
streams.push(zip.createGunzip());
}
return chain(streams);
}
function streamJsonlDirectory(directory) {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new PassThrough({
objectMode: true
});
pipeline([
inStream,
new tar.Parse({
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return isFilePathInDirname(directory, filePath);
},
async onentry (entry) {
const transforms = [
// JSONL parser to read the data chunks one by one (line by line)
parser({
checkErrors: true
}),
// The JSONL parser returns each line as key/value
(line)=>line.value
];
const stream = entry.pipe(chain(transforms));
try {
for await (const chunk of stream){
outStream.write(chunk);
}
} catch (e) {
outStream.destroy(new ProviderTransferError(`Error parsing backup files from backup file ${entry.path}: ${e.message}`, {
details: {
error: e
}
}));
}
}
})
], async ()=>{
// Manually send the 'end' event to the out stream
// once every entry has finished streaming its content
outStream.end();
});
return outStream;
}
async function parseJSONFile(fileStream, filePath) {
return new Promise((resolve, reject)=>{
pipeline([
fileStream,
// Custom backup archive parsing
new tar.Parse({
/**
* Filter the parsed entries to only keep the one that matches the given filepath
*/ filter (entryPath, entry) {
if (entry.type !== 'File') {
return false;
}
return isPathEquivalent(entryPath, filePath);
},
async onentry (entry) {
// Collect all the content of the entry file
const content = await entry.collect();
try {
// Parse from buffer array to string to JSON
const parsedContent = JSON.parse(Buffer.concat(content).toString());
// Resolve the Promise with the parsed content
resolve(parsedContent);
} catch (e) {
reject(e);
} finally{
// Cleanup (close the stream associated to the entry)
entry.destroy();
}
}
})
], ()=>{
// If the promise hasn't been resolved and we've parsed all
// the archive entries, then the file doesn't exist
reject(new Error(`File "${filePath}" not found`));
});
});
}
export { createLocalFileSourceProvider };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,38 @@
/**
* Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths
* on Windows systems, and posix paths on posix systems.
*
* We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to
* support manually-created tar files coming from Windows systems (ie, if a user creates a
* backup file with a windows tar tool rather than using the `export` command)
*
* Because of this, export/import files may never contain files with a forward slash in the name, even escaped
*
* */
/**
* Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName
* We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename
*
* @param {string} posixDirName A posix path pointing to a directory
* @param {string} filePath an unknown filesystem path pointing to a file
* @returns {boolean} is the file located in the given directory
*/
export declare const isFilePathInDirname: (posixDirName: string, filePath: string) => boolean;
/**
* Check if two paths that can be either in posix or win32 format resolves to the same file
*
* @param {string} pathA a path that may be either win32 or posix
* @param {string} pathB a path that may be either win32 or posix
*
* @returns {boolean} do paths point to the same place
*/
export declare const isPathEquivalent: (pathA: string, pathB: string) => boolean;
/**
* Convert an unknown format path (win32 or posix) to a posix path
*
* @param {string} filePath a path that may be either win32 or posix
*
* @returns {string} a posix path
*/
export declare const unknownPathToPosix: (filePath: string) => string;
//# sourceMappingURL=utils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/source/utils.ts"],"names":[],"mappings":"AAEA;;;;;;;;;;KAUK;AAEL;;;;;;;GAOG;AACH,eAAO,MAAM,mBAAmB,iBAAkB,MAAM,YAAY,MAAM,YAGzE,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,gBAAgB,UAAW,MAAM,SAAS,MAAM,YAM5D,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,kBAAkB,aAAc,MAAM,WAOlD,CAAC"}

View File

@@ -0,0 +1,56 @@
'use strict';
var path = require('path');
/**
* Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths
* on Windows systems, and posix paths on posix systems.
*
* We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to
* support manually-created tar files coming from Windows systems (ie, if a user creates a
* backup file with a windows tar tool rather than using the `export` command)
*
* Because of this, export/import files may never contain files with a forward slash in the name, even escaped
*
* */ /**
* Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName
* We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename
*
* @param {string} posixDirName A posix path pointing to a directory
* @param {string} filePath an unknown filesystem path pointing to a file
* @returns {boolean} is the file located in the given directory
*/ const isFilePathInDirname = (posixDirName, filePath)=>{
const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));
return isPathEquivalent(posixDirName, normalizedDir);
};
/**
* Check if two paths that can be either in posix or win32 format resolves to the same file
*
* @param {string} pathA a path that may be either win32 or posix
* @param {string} pathB a path that may be either win32 or posix
*
* @returns {boolean} do paths point to the same place
*/ const isPathEquivalent = (pathA, pathB)=>{
// Check if paths appear to be win32 or posix, and if win32 convert to posix
const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));
const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));
return !path.posix.relative(normalizedPathB, normalizedPathA).length;
};
/**
* Convert an unknown format path (win32 or posix) to a posix path
*
* @param {string} filePath a path that may be either win32 or posix
*
* @returns {string} a posix path
*/ const unknownPathToPosix = (filePath)=>{
// if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators
if (filePath.includes(path.posix.sep)) {
return filePath;
}
return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);
};
exports.isFilePathInDirname = isFilePathInDirname;
exports.isPathEquivalent = isPathEquivalent;
exports.unknownPathToPosix = unknownPathToPosix;
//# sourceMappingURL=utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sources":["../../../../src/file/providers/source/utils.ts"],"sourcesContent":["import path from 'path';\n\n/**\n * Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths\n * on Windows systems, and posix paths on posix systems.\n *\n * We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to\n * support manually-created tar files coming from Windows systems (ie, if a user creates a\n * backup file with a windows tar tool rather than using the `export` command)\n *\n * Because of this, export/import files may never contain files with a forward slash in the name, even escaped\n *\n * */\n\n/**\n * Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName\n * We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename\n *\n * @param {string} posixDirName A posix path pointing to a directory\n * @param {string} filePath an unknown filesystem path pointing to a file\n * @returns {boolean} is the file located in the given directory\n */\nexport const isFilePathInDirname = (posixDirName: string, filePath: string) => {\n const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));\n return isPathEquivalent(posixDirName, normalizedDir);\n};\n\n/**\n * Check if two paths that can be either in posix or win32 format resolves to the same file\n *\n * @param {string} pathA a path that may be either win32 or posix\n * @param {string} pathB a path that may be either win32 or posix\n *\n * @returns {boolean} do paths point to the same place\n */\nexport const isPathEquivalent = (pathA: string, pathB: string) => {\n // Check if paths appear to be win32 or posix, and if win32 convert to posix\n const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));\n const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));\n\n return !path.posix.relative(normalizedPathB, normalizedPathA).length;\n};\n\n/**\n * Convert an unknown format path (win32 or posix) to a posix path\n *\n * @param {string} filePath a path that may be either win32 or posix\n *\n * @returns {string} a posix path\n */\nexport const unknownPathToPosix = (filePath: string) => {\n // if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators\n if (filePath.includes(path.posix.sep)) {\n return filePath;\n }\n\n return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);\n};\n"],"names":["isFilePathInDirname","posixDirName","filePath","normalizedDir","path","posix","dirname","unknownPathToPosix","isPathEquivalent","pathA","pathB","normalizedPathA","normalize","normalizedPathB","relative","length","includes","sep","split","win32","join"],"mappings":";;;;AAEA;;;;;;;;;;;;;;;;;AAmBC,IACM,MAAMA,mBAAsB,GAAA,CAACC,YAAsBC,EAAAA,QAAAA,GAAAA;AACxD,IAAA,MAAMC,gBAAgBC,IAAKC,CAAAA,KAAK,CAACC,OAAO,CAACC,kBAAmBL,CAAAA,QAAAA,CAAAA,CAAAA;AAC5D,IAAA,OAAOM,iBAAiBP,YAAcE,EAAAA,aAAAA,CAAAA;AACxC;AAEA;;;;;;;AAOC,IACM,MAAMK,gBAAmB,GAAA,CAACC,KAAeC,EAAAA,KAAAA,GAAAA;;AAE9C,IAAA,MAAMC,kBAAkBP,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBE,CAAAA,KAAAA,CAAAA,CAAAA;AAChE,IAAA,MAAMI,kBAAkBT,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBG,CAAAA,KAAAA,CAAAA,CAAAA;IAEhE,OAAO,CAACN,KAAKC,KAAK,CAACS,QAAQ,CAACD,eAAAA,EAAiBF,iBAAiBI,MAAM;AACtE;AAEA;;;;;;IAOaR,MAAAA,kBAAAA,GAAqB,CAACL,QAAAA,GAAAA;;AAEjC,IAAA,IAAIA,SAASc,QAAQ,CAACZ,KAAKC,KAAK,CAACY,GAAG,CAAG,EAAA;QACrC,OAAOf,QAAAA;AACT;AAEA,IAAA,OAAOE,KAAKQ,SAAS,CAACV,QAAUgB,CAAAA,CAAAA,KAAK,CAACd,IAAKe,CAAAA,KAAK,CAACF,GAAG,EAAEG,IAAI,CAAChB,IAAKC,CAAAA,KAAK,CAACY,GAAG,CAAA;AAC3E;;;;;;"}

View File

@@ -0,0 +1,52 @@
import path from 'path';
/**
* Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths
* on Windows systems, and posix paths on posix systems.
*
* We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to
* support manually-created tar files coming from Windows systems (ie, if a user creates a
* backup file with a windows tar tool rather than using the `export` command)
*
* Because of this, export/import files may never contain files with a forward slash in the name, even escaped
*
* */ /**
* Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName
* We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename
*
* @param {string} posixDirName A posix path pointing to a directory
* @param {string} filePath an unknown filesystem path pointing to a file
* @returns {boolean} is the file located in the given directory
*/ const isFilePathInDirname = (posixDirName, filePath)=>{
const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));
return isPathEquivalent(posixDirName, normalizedDir);
};
/**
* Check if two paths that can be either in posix or win32 format resolves to the same file
*
* @param {string} pathA a path that may be either win32 or posix
* @param {string} pathB a path that may be either win32 or posix
*
* @returns {boolean} do paths point to the same place
*/ const isPathEquivalent = (pathA, pathB)=>{
// Check if paths appear to be win32 or posix, and if win32 convert to posix
const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));
const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));
return !path.posix.relative(normalizedPathB, normalizedPathA).length;
};
/**
* Convert an unknown format path (win32 or posix) to a posix path
*
* @param {string} filePath a path that may be either win32 or posix
*
* @returns {string} a posix path
*/ const unknownPathToPosix = (filePath)=>{
// if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators
if (filePath.includes(path.posix.sep)) {
return filePath;
}
return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);
};
export { isFilePathInDirname, isPathEquivalent, unknownPathToPosix };
//# sourceMappingURL=utils.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.mjs","sources":["../../../../src/file/providers/source/utils.ts"],"sourcesContent":["import path from 'path';\n\n/**\n * Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths\n * on Windows systems, and posix paths on posix systems.\n *\n * We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to\n * support manually-created tar files coming from Windows systems (ie, if a user creates a\n * backup file with a windows tar tool rather than using the `export` command)\n *\n * Because of this, export/import files may never contain files with a forward slash in the name, even escaped\n *\n * */\n\n/**\n * Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName\n * We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename\n *\n * @param {string} posixDirName A posix path pointing to a directory\n * @param {string} filePath an unknown filesystem path pointing to a file\n * @returns {boolean} is the file located in the given directory\n */\nexport const isFilePathInDirname = (posixDirName: string, filePath: string) => {\n const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));\n return isPathEquivalent(posixDirName, normalizedDir);\n};\n\n/**\n * Check if two paths that can be either in posix or win32 format resolves to the same file\n *\n * @param {string} pathA a path that may be either win32 or posix\n * @param {string} pathB a path that may be either win32 or posix\n *\n * @returns {boolean} do paths point to the same place\n */\nexport const isPathEquivalent = (pathA: string, pathB: string) => {\n // Check if paths appear to be win32 or posix, and if win32 convert to posix\n const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));\n const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));\n\n return !path.posix.relative(normalizedPathB, normalizedPathA).length;\n};\n\n/**\n * Convert an unknown format path (win32 or posix) to a posix path\n *\n * @param {string} filePath a path that may be either win32 or posix\n *\n * @returns {string} a posix path\n */\nexport const unknownPathToPosix = (filePath: string) => {\n // if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators\n if (filePath.includes(path.posix.sep)) {\n return filePath;\n }\n\n return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);\n};\n"],"names":["isFilePathInDirname","posixDirName","filePath","normalizedDir","path","posix","dirname","unknownPathToPosix","isPathEquivalent","pathA","pathB","normalizedPathA","normalize","normalizedPathB","relative","length","includes","sep","split","win32","join"],"mappings":";;AAEA;;;;;;;;;;;;;;;;;AAmBC,IACM,MAAMA,mBAAsB,GAAA,CAACC,YAAsBC,EAAAA,QAAAA,GAAAA;AACxD,IAAA,MAAMC,gBAAgBC,IAAKC,CAAAA,KAAK,CAACC,OAAO,CAACC,kBAAmBL,CAAAA,QAAAA,CAAAA,CAAAA;AAC5D,IAAA,OAAOM,iBAAiBP,YAAcE,EAAAA,aAAAA,CAAAA;AACxC;AAEA;;;;;;;AAOC,IACM,MAAMK,gBAAmB,GAAA,CAACC,KAAeC,EAAAA,KAAAA,GAAAA;;AAE9C,IAAA,MAAMC,kBAAkBP,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBE,CAAAA,KAAAA,CAAAA,CAAAA;AAChE,IAAA,MAAMI,kBAAkBT,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBG,CAAAA,KAAAA,CAAAA,CAAAA;IAEhE,OAAO,CAACN,KAAKC,KAAK,CAACS,QAAQ,CAACD,eAAAA,EAAiBF,iBAAiBI,MAAM;AACtE;AAEA;;;;;;IAOaR,MAAAA,kBAAAA,GAAqB,CAACL,QAAAA,GAAAA;;AAEjC,IAAA,IAAIA,SAASc,QAAQ,CAACZ,KAAKC,KAAK,CAACY,GAAG,CAAG,EAAA;QACrC,OAAOf,QAAAA;AACT;AAEA,IAAA,OAAOE,KAAKQ,SAAS,CAACV,QAAUgB,CAAAA,CAAAA,KAAK,CAACd,IAAKe,CAAAA,KAAK,CAACF,GAAG,EAAEG,IAAI,CAAChB,IAAKC,CAAAA,KAAK,CAACY,GAAG,CAAA;AAC3E;;;;"}

View File

@@ -0,0 +1,5 @@
export * as engine from './engine';
export * as strapi from './strapi';
export * as file from './file';
export * as utils from './utils';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC;AACnC,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC;AACnC,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC;AAC/B,OAAO,KAAK,KAAK,MAAM,SAAS,CAAC"}

View File

@@ -0,0 +1,14 @@
'use strict';
var index = require('./engine/index.js');
var index$1 = require('./strapi/index.js');
var index$2 = require('./file/index.js');
var index$3 = require('./utils/index.js');
exports.engine = index;
exports.strapi = index$1;
exports.file = index$2;
exports.utils = index$3;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;"}

View File

@@ -0,0 +1,9 @@
import * as index from './engine/index.mjs';
export { index as engine };
import * as index$1 from './strapi/index.mjs';
export { index$1 as strapi };
import * as index$2 from './file/index.mjs';
export { index$2 as file };
import * as index$3 from './utils/index.mjs';
export { index$3 as utils };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;"}

View File

@@ -0,0 +1,4 @@
export * as providers from './providers';
export * as queries from './queries';
export * as remote from './remote';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/strapi/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,aAAa,CAAC;AACzC,OAAO,KAAK,OAAO,MAAM,WAAW,CAAC;AACrC,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC"}

View File

@@ -0,0 +1,12 @@
'use strict';
var index = require('./providers/index.js');
var index$1 = require('./queries/index.js');
var index$2 = require('./remote/index.js');
exports.providers = index;
exports.queries = index$1;
exports.remote = index$2;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;"}

View File

@@ -0,0 +1,7 @@
import * as index from './providers/index.mjs';
export { index as providers };
import * as index$1 from './queries/index.mjs';
export { index$1 as queries };
import * as index$2 from './remote/index.mjs';
export { index$2 as remote };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;"}

View File

@@ -0,0 +1,5 @@
export * from './local-destination';
export * from './local-source';
export * from './remote-destination';
export * from './remote-source';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/strapi/providers/index.ts"],"names":[],"mappings":"AACA,cAAc,qBAAqB,CAAC;AACpC,cAAc,gBAAgB,CAAC;AAG/B,cAAc,sBAAsB,CAAC;AACrC,cAAc,iBAAiB,CAAC"}

View File

@@ -0,0 +1,16 @@
'use strict';
var index = require('./local-destination/index.js');
var index$1 = require('./local-source/index.js');
var index$2 = require('./remote-destination/index.js');
var index$3 = require('./remote-source/index.js');
// Local
exports.DEFAULT_CONFLICT_STRATEGY = index.DEFAULT_CONFLICT_STRATEGY;
exports.VALID_CONFLICT_STRATEGIES = index.VALID_CONFLICT_STRATEGIES;
exports.createLocalStrapiDestinationProvider = index.createLocalStrapiDestinationProvider;
exports.createLocalStrapiSourceProvider = index$1.createLocalStrapiSourceProvider;
exports.createRemoteStrapiDestinationProvider = index$2.createRemoteStrapiDestinationProvider;
exports.createRemoteStrapiSourceProvider = index$3.createRemoteStrapiSourceProvider;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":["../../../src/strapi/providers/index.ts"],"sourcesContent":["// Local\nexport * from './local-destination';\nexport * from './local-source';\n\n// Remote\nexport * from './remote-destination';\nexport * from './remote-source';\n"],"names":[],"mappings":";;;;;;;AAAA;;;;;;;;;"}

View File

@@ -0,0 +1,7 @@
export { DEFAULT_CONFLICT_STRATEGY, VALID_CONFLICT_STRATEGIES, createLocalStrapiDestinationProvider } from './local-destination/index.mjs';
export { createLocalStrapiSourceProvider } from './local-source/index.mjs';
export { createRemoteStrapiDestinationProvider } from './remote-destination/index.mjs';
export { createRemoteStrapiSourceProvider } from './remote-source/index.mjs';
// Local
//# sourceMappingURL=index.mjs.map

Some files were not shown because too many files have changed in this diff Show More