node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

View File

@@ -0,0 +1,2 @@
export * as providers from './providers';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/file/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,SAAS,MAAM,aAAa,CAAC"}

View File

@@ -0,0 +1,8 @@
'use strict';
var index = require('./providers/index.js');
exports.providers = index;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;"}

View File

@@ -0,0 +1,3 @@
import * as index from './providers/index.mjs';
export { index as providers };
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";"}

View File

@@ -0,0 +1,48 @@
/// <reference types="node" />
/// <reference types="stream-chain" />
/// <reference types="node" />
import zlib from 'zlib';
import { Writable } from 'stream';
import type { IDestinationProvider, IDestinationProviderTransferResults, IMetadata, ProviderType } from '../../../../types';
import type { IDiagnosticReporter } from '../../../utils/diagnostic';
export interface ILocalFileDestinationProviderOptions {
encryption: {
enabled: boolean;
key?: string;
};
compression: {
enabled: boolean;
};
file: {
path: string;
maxSize?: number;
maxSizeJsonl?: number;
};
}
export interface ILocalFileDestinationProviderTransferResults extends IDestinationProviderTransferResults {
file?: {
path?: string;
};
}
export declare const createLocalFileDestinationProvider: (options: ILocalFileDestinationProviderOptions) => LocalFileDestinationProvider;
declare class LocalFileDestinationProvider implements IDestinationProvider {
#private;
name: string;
type: ProviderType;
options: ILocalFileDestinationProviderOptions;
results: ILocalFileDestinationProviderTransferResults;
constructor(options: ILocalFileDestinationProviderOptions);
setMetadata(target: ProviderType, metadata: IMetadata): IDestinationProvider;
createGzip(): zlib.Gzip;
bootstrap(diagnostics: IDiagnosticReporter): void | Promise<void>;
close(): Promise<void>;
rollback(): Promise<void>;
getMetadata(): null;
createSchemasWriteStream(): import("stream-chain");
createEntitiesWriteStream(): Writable;
createLinksWriteStream(): Writable;
createConfigurationWriteStream(): Writable;
createAssetsWriteStream(): Writable;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/destination/index.ts"],"names":[],"mappings":";;;AACA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAY,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAO5C,OAAO,KAAK,EAEV,oBAAoB,EACpB,mCAAmC,EACnC,SAAS,EACT,YAAY,EAEb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAIrE,MAAM,WAAW,oCAAoC;IACnD,UAAU,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,GAAG,CAAC,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,WAAW,EAAE;QACX,OAAO,EAAE,OAAO,CAAC;KAClB,CAAC;IAEF,IAAI,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED,MAAM,WAAW,4CACf,SAAQ,mCAAmC;IAC3C,IAAI,CAAC,EAAE;QACL,IAAI,CAAC,EAAE,MAAM,CAAC;KACf,CAAC;CACH;AAED,eAAO,MAAM,kCAAkC,YACpC,oCAAoC,iCAG9C,CAAC;AAEF,cAAM,4BAA6B,YAAW,oBAAoB;;IAChE,IAAI,SAA6B;IAEjC,IAAI,EAAE,YAAY,CAAiB;IAEnC,OAAO,EAAE,oCAAoC,CAAC;IAE9C,OAAO,EAAE,4CAA4C,CAAM;gBAQ/C,OAAO,EAAE,oCAAoC;IA+BzD,WAAW,CAAC,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,SAAS,GAAG,oBAAoB;IAM5E,UAAU,IAAI,IAAI,CAAC,IAAI;IAKvB,SAAS,CAAC,WAAW,EAAE,mBAAmB,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAoC3D,KAAK;IAiBL,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAM/B,WAAW;IA4BX,wBAAwB;IAgBxB,yBAAyB,IAAI,QAAQ;IAgBrC,sBAAsB,IAAI,QAAQ;IAgBlC,8BAA8B,IAAI,QAAQ;IAgB1C,uBAAuB,IAAI,QAAQ;CA8CpC"}

View File

@@ -0,0 +1,248 @@
'use strict';
var path = require('path');
var zip = require('zlib');
var stream = require('stream');
var fse = require('fs-extra');
var tar = require('tar-stream');
var Stringer = require('stream-json/jsonl/Stringer');
var streamChain = require('stream-chain');
var encrypt = require('../../../utils/encryption/encrypt.js');
require('crypto');
var utils = require('./utils.js');
var providers = require('../../../errors/providers.js');
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const createLocalFileDestinationProvider = (options)=>{
return new LocalFileDestinationProvider(options);
};
var _providersMetadata = /*#__PURE__*/ _class_private_field_loose_key("_providersMetadata"), _archive = /*#__PURE__*/ _class_private_field_loose_key("_archive"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _archivePath = /*#__PURE__*/ _class_private_field_loose_key("_archivePath"), _writeMetadata = /*#__PURE__*/ _class_private_field_loose_key("_writeMetadata"), _getMetadataStream = /*#__PURE__*/ _class_private_field_loose_key("_getMetadataStream");
class LocalFileDestinationProvider {
setMetadata(target, metadata) {
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata][target] = metadata;
return this;
}
createGzip() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating gzip');
return zip.createGzip();
}
bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { compression, encryption } = this.options;
if (encryption.enabled && !encryption.key) {
throw new Error("Can't encrypt without a key");
}
_class_private_field_loose_base(this, _archive)[_archive].stream = tar.pack();
const outStream = fse.createWriteStream(_class_private_field_loose_base(this, _archivePath)[_archivePath]);
outStream.on('error', (err)=>{
if (err.code === 'ENOSPC') {
throw new providers.ProviderTransferError("Your server doesn't have space to proceed with the import.");
}
throw err;
});
const archiveTransforms = [];
if (compression.enabled) {
archiveTransforms.push(this.createGzip());
}
if (encryption.enabled && encryption.key) {
archiveTransforms.push(encrypt.createEncryptionCipher(encryption.key));
}
_class_private_field_loose_base(this, _archive)[_archive].pipeline = streamChain.chain([
_class_private_field_loose_base(this, _archive)[_archive].stream,
...archiveTransforms,
outStream
]);
this.results.file = {
path: _class_private_field_loose_base(this, _archivePath)[_archivePath]
};
}
async close() {
const { stream, pipeline } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
return;
}
await _class_private_field_loose_base(this, _writeMetadata)[_writeMetadata]();
stream.finalize();
if (pipeline && !pipeline.closed) {
await new Promise((resolve, reject)=>{
pipeline.on('close', resolve).on('error', reject);
});
}
}
async rollback() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('rolling back');
await this.close();
await fse.rm(_class_private_field_loose_base(this, _archivePath)[_archivePath], {
force: true
});
}
getMetadata() {
return null;
}
createSchemasWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas write stream');
const filePathFactory = utils.createFilePathFactory('schemas');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createEntitiesWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities write stream');
const filePathFactory = utils.createFilePathFactory('entities');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createLinksWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links write stream');
const filePathFactory = utils.createFilePathFactory('links');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createConfigurationWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration write stream');
const filePathFactory = utils.createFilePathFactory('configuration');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createAssetsWriteStream() {
const { stream: archiveStream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!archiveStream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets write stream');
return new stream.Writable({
objectMode: true,
write (data, _encoding, callback) {
// always write tar files with posix paths so we have a standard format for paths regardless of system
const entryPath = path.posix.join('assets', 'uploads', data.filename);
const entryMetadataPath = path.posix.join('assets', 'metadata', `${data.filename}.json`);
const stringifiedMetadata = JSON.stringify(data.metadata);
archiveStream.entry({
name: entryMetadataPath,
size: stringifiedMetadata.length
}, stringifiedMetadata);
const entry = archiveStream.entry({
name: entryPath,
size: data.stats.size
});
if (!entry) {
callback(new Error(`Failed to created an asset tar entry for ${entryPath}`));
return;
}
data.stream.pipe(entry);
entry.on('finish', ()=>{
callback(null);
}).on('error', (error)=>{
callback(error);
});
}
});
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _archivePath, {
get: get_archivePath,
set: void 0
});
Object.defineProperty(this, _writeMetadata, {
value: writeMetadata
});
Object.defineProperty(this, _getMetadataStream, {
value: getMetadataStream
});
Object.defineProperty(this, _providersMetadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _archive, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.name = 'destination::local-file';
this.type = 'destination';
this.results = {};
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata] = {};
_class_private_field_loose_base(this, _archive)[_archive] = {};
this.options = options;
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-destination-provider'
},
kind: 'info'
});
}
function get_archivePath() {
const { encryption, compression, file } = this.options;
let filePath = `${file.path}.tar`;
if (compression.enabled) {
filePath += '.gz';
}
if (encryption.enabled) {
filePath += '.enc';
}
return filePath;
}
async function writeMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('writing metadata');
const metadata = _class_private_field_loose_base(this, _providersMetadata)[_providersMetadata].source;
if (metadata) {
await new Promise((resolve)=>{
const outStream = _class_private_field_loose_base(this, _getMetadataStream)[_getMetadataStream]();
const data = JSON.stringify(metadata, null, 2);
stream.Readable.from(data).pipe(outStream).on('close', resolve);
});
}
}
function getMetadataStream() {
const { stream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
throw new Error('Archive stream is unavailable');
}
return utils.createTarEntryStream(stream, ()=>'metadata.json');
}
exports.createLocalFileDestinationProvider = createLocalFileDestinationProvider;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,246 @@
import path from 'path';
import zip from 'zlib';
import { Writable, Readable } from 'stream';
import { createWriteStream, rm } from 'fs-extra';
import tar from 'tar-stream';
import { stringer } from 'stream-json/jsonl/Stringer';
import { chain } from 'stream-chain';
import { createEncryptionCipher } from '../../../utils/encryption/encrypt.mjs';
import 'crypto';
import { createTarEntryStream, createFilePathFactory } from './utils.mjs';
import { ProviderTransferError } from '../../../errors/providers.mjs';
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const createLocalFileDestinationProvider = (options)=>{
return new LocalFileDestinationProvider(options);
};
var _providersMetadata = /*#__PURE__*/ _class_private_field_loose_key("_providersMetadata"), _archive = /*#__PURE__*/ _class_private_field_loose_key("_archive"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _archivePath = /*#__PURE__*/ _class_private_field_loose_key("_archivePath"), _writeMetadata = /*#__PURE__*/ _class_private_field_loose_key("_writeMetadata"), _getMetadataStream = /*#__PURE__*/ _class_private_field_loose_key("_getMetadataStream");
class LocalFileDestinationProvider {
setMetadata(target, metadata) {
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata][target] = metadata;
return this;
}
createGzip() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating gzip');
return zip.createGzip();
}
bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { compression, encryption } = this.options;
if (encryption.enabled && !encryption.key) {
throw new Error("Can't encrypt without a key");
}
_class_private_field_loose_base(this, _archive)[_archive].stream = tar.pack();
const outStream = createWriteStream(_class_private_field_loose_base(this, _archivePath)[_archivePath]);
outStream.on('error', (err)=>{
if (err.code === 'ENOSPC') {
throw new ProviderTransferError("Your server doesn't have space to proceed with the import.");
}
throw err;
});
const archiveTransforms = [];
if (compression.enabled) {
archiveTransforms.push(this.createGzip());
}
if (encryption.enabled && encryption.key) {
archiveTransforms.push(createEncryptionCipher(encryption.key));
}
_class_private_field_loose_base(this, _archive)[_archive].pipeline = chain([
_class_private_field_loose_base(this, _archive)[_archive].stream,
...archiveTransforms,
outStream
]);
this.results.file = {
path: _class_private_field_loose_base(this, _archivePath)[_archivePath]
};
}
async close() {
const { stream, pipeline } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
return;
}
await _class_private_field_loose_base(this, _writeMetadata)[_writeMetadata]();
stream.finalize();
if (pipeline && !pipeline.closed) {
await new Promise((resolve, reject)=>{
pipeline.on('close', resolve).on('error', reject);
});
}
}
async rollback() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('rolling back');
await this.close();
await rm(_class_private_field_loose_base(this, _archivePath)[_archivePath], {
force: true
});
}
getMetadata() {
return null;
}
createSchemasWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas write stream');
const filePathFactory = createFilePathFactory('schemas');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createEntitiesWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities write stream');
const filePathFactory = createFilePathFactory('entities');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createLinksWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links write stream');
const filePathFactory = createFilePathFactory('links');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createConfigurationWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration write stream');
const filePathFactory = createFilePathFactory('configuration');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createAssetsWriteStream() {
const { stream: archiveStream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!archiveStream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets write stream');
return new Writable({
objectMode: true,
write (data, _encoding, callback) {
// always write tar files with posix paths so we have a standard format for paths regardless of system
const entryPath = path.posix.join('assets', 'uploads', data.filename);
const entryMetadataPath = path.posix.join('assets', 'metadata', `${data.filename}.json`);
const stringifiedMetadata = JSON.stringify(data.metadata);
archiveStream.entry({
name: entryMetadataPath,
size: stringifiedMetadata.length
}, stringifiedMetadata);
const entry = archiveStream.entry({
name: entryPath,
size: data.stats.size
});
if (!entry) {
callback(new Error(`Failed to created an asset tar entry for ${entryPath}`));
return;
}
data.stream.pipe(entry);
entry.on('finish', ()=>{
callback(null);
}).on('error', (error)=>{
callback(error);
});
}
});
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _archivePath, {
get: get_archivePath,
set: void 0
});
Object.defineProperty(this, _writeMetadata, {
value: writeMetadata
});
Object.defineProperty(this, _getMetadataStream, {
value: getMetadataStream
});
Object.defineProperty(this, _providersMetadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _archive, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.name = 'destination::local-file';
this.type = 'destination';
this.results = {};
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata] = {};
_class_private_field_loose_base(this, _archive)[_archive] = {};
this.options = options;
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-destination-provider'
},
kind: 'info'
});
}
function get_archivePath() {
const { encryption, compression, file } = this.options;
let filePath = `${file.path}.tar`;
if (compression.enabled) {
filePath += '.gz';
}
if (encryption.enabled) {
filePath += '.enc';
}
return filePath;
}
async function writeMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('writing metadata');
const metadata = _class_private_field_loose_base(this, _providersMetadata)[_providersMetadata].source;
if (metadata) {
await new Promise((resolve)=>{
const outStream = _class_private_field_loose_base(this, _getMetadataStream)[_getMetadataStream]();
const data = JSON.stringify(metadata, null, 2);
Readable.from(data).pipe(outStream).on('close', resolve);
});
}
}
function getMetadataStream() {
const { stream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
throw new Error('Archive stream is unavailable');
}
return createTarEntryStream(stream, ()=>'metadata.json');
}
export { createLocalFileDestinationProvider };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
/// <reference types="node" />
import { Writable } from 'stream';
import tar from 'tar-stream';
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/
export declare const createFilePathFactory: (type: string) => (fileIndex?: number) => string;
export declare const createTarEntryStream: (archive: tar.Pack, pathFactory: (index?: number) => string, maxSize?: number) => Writable;
//# sourceMappingURL=utils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/destination/utils.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAElC,OAAO,GAAG,MAAM,YAAY,CAAC;AAE7B;;;GAGG;AACH,eAAO,MAAM,qBAAqB,SACzB,MAAM,6BACI,MAQhB,CAAC;AAEJ,eAAO,MAAM,oBAAoB,YACtB,IAAI,IAAI,eACJ,CAAC,KAAK,CAAC,EAAE,MAAM,KAAK,MAAM,+BAuDxC,CAAC"}

View File

@@ -0,0 +1,63 @@
'use strict';
var stream = require('stream');
var path = require('path');
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/ const createFilePathFactory = (type)=>(fileIndex = 0)=>{
// always write tar files with posix paths so we have a standard format for paths regardless of system
return path.posix.join(// "{type}" directory
type, // "${type}_XXXXX.jsonl" file
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`);
};
const createTarEntryStream = (archive, pathFactory, maxSize = 2.56e8)=>{
let fileIndex = 0;
let buffer = '';
const flush = async ()=>{
if (!buffer) {
return;
}
fileIndex += 1;
const name = pathFactory(fileIndex);
const size = buffer.length;
await new Promise((resolve, reject)=>{
archive.entry({
name,
size
}, buffer, (err)=>{
if (err) {
reject(err);
}
resolve();
});
});
buffer = '';
};
const push = (chunk)=>{
buffer += chunk;
};
return new stream.Writable({
async destroy (err, callback) {
await flush();
callback(err);
},
async write (chunk, _encoding, callback) {
const size = chunk.length;
if (chunk.length > maxSize) {
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
return;
}
if (buffer.length + size > maxSize) {
await flush();
}
push(chunk);
callback(null);
}
});
};
exports.createFilePathFactory = createFilePathFactory;
exports.createTarEntryStream = createTarEntryStream;
//# sourceMappingURL=utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sources":["../../../../src/file/providers/destination/utils.ts"],"sourcesContent":["import { Writable } from 'stream';\nimport { posix } from 'path';\nimport tar from 'tar-stream';\n\n/**\n * Create a file path factory for a given path & prefix.\n * Upon being called, the factory will return a file path for a given index\n */\nexport const createFilePathFactory =\n (type: string) =>\n (fileIndex = 0): string => {\n // always write tar files with posix paths so we have a standard format for paths regardless of system\n return posix.join(\n // \"{type}\" directory\n type,\n // \"${type}_XXXXX.jsonl\" file\n `${type}_${String(fileIndex).padStart(5, '0')}.jsonl`\n );\n };\n\nexport const createTarEntryStream = (\n archive: tar.Pack,\n pathFactory: (index?: number) => string,\n maxSize = 2.56e8\n) => {\n let fileIndex = 0;\n let buffer = '';\n\n const flush = async () => {\n if (!buffer) {\n return;\n }\n\n fileIndex += 1;\n const name = pathFactory(fileIndex);\n const size = buffer.length;\n\n await new Promise<void>((resolve, reject) => {\n archive.entry({ name, size }, buffer, (err) => {\n if (err) {\n reject(err);\n }\n\n resolve();\n });\n });\n\n buffer = '';\n };\n\n const push = (chunk: string | Buffer) => {\n buffer += chunk;\n };\n\n return new Writable({\n async destroy(err, callback) {\n await flush();\n callback(err);\n },\n\n async write(chunk, _encoding, callback) {\n const size = chunk.length;\n\n if (chunk.length > maxSize) {\n callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));\n return;\n }\n\n if (buffer.length + size > maxSize) {\n await flush();\n }\n\n push(chunk);\n\n callback(null);\n },\n });\n};\n"],"names":["createFilePathFactory","type","fileIndex","posix","join","String","padStart","createTarEntryStream","archive","pathFactory","maxSize","buffer","flush","name","size","length","Promise","resolve","reject","entry","err","push","chunk","Writable","destroy","callback","write","_encoding","Error"],"mappings":";;;;;AAIA;;;AAGC,IACYA,MAAAA,qBAAAA,GACX,CAACC,IACD,GAAA,CAACC,YAAY,CAAC,GAAA;;QAEZ,OAAOC,UAAAA,CAAMC,IAAI;AAEfH,QAAAA,IAAAA;QAEA,CAAC,EAAEA,IAAK,CAAA,CAAC,EAAEI,MAAAA,CAAOH,SAAWI,CAAAA,CAAAA,QAAQ,CAAC,CAAA,EAAG,GAAK,CAAA,CAAA,MAAM,CAAC,CAAA;;MAI9CC,oBAAuB,GAAA,CAClCC,OACAC,EAAAA,WAAAA,EACAC,UAAU,MAAM,GAAA;AAEhB,IAAA,IAAIR,SAAY,GAAA,CAAA;AAChB,IAAA,IAAIS,MAAS,GAAA,EAAA;AAEb,IAAA,MAAMC,KAAQ,GAAA,UAAA;AACZ,QAAA,IAAI,CAACD,MAAQ,EAAA;AACX,YAAA;AACF;QAEAT,SAAa,IAAA,CAAA;AACb,QAAA,MAAMW,OAAOJ,WAAYP,CAAAA,SAAAA,CAAAA;QACzB,MAAMY,IAAAA,GAAOH,OAAOI,MAAM;QAE1B,MAAM,IAAIC,OAAc,CAAA,CAACC,OAASC,EAAAA,MAAAA,GAAAA;AAChCV,YAAAA,OAAAA,CAAQW,KAAK,CAAC;AAAEN,gBAAAA,IAAAA;AAAMC,gBAAAA;AAAK,aAAA,EAAGH,QAAQ,CAACS,GAAAA,GAAAA;AACrC,gBAAA,IAAIA,GAAK,EAAA;oBACPF,MAAOE,CAAAA,GAAAA,CAAAA;AACT;AAEAH,gBAAAA,OAAAA,EAAAA;AACF,aAAA,CAAA;AACF,SAAA,CAAA;QAEAN,MAAS,GAAA,EAAA;AACX,KAAA;AAEA,IAAA,MAAMU,OAAO,CAACC,KAAAA,GAAAA;QACZX,MAAUW,IAAAA,KAAAA;AACZ,KAAA;AAEA,IAAA,OAAO,IAAIC,eAAS,CAAA;QAClB,MAAMC,OAAAA,CAAAA,CAAQJ,GAAG,EAAEK,QAAQ,EAAA;YACzB,MAAMb,KAAAA,EAAAA;YACNa,QAASL,CAAAA,GAAAA,CAAAA;AACX,SAAA;AAEA,QAAA,MAAMM,KAAMJ,CAAAA,CAAAA,KAAK,EAAEK,SAAS,EAAEF,QAAQ,EAAA;YACpC,MAAMX,IAAAA,GAAOQ,MAAMP,MAAM;YAEzB,IAAIO,KAAAA,CAAMP,MAAM,GAAGL,OAAS,EAAA;gBAC1Be,QAAS,CAAA,IAAIG,KAAM,CAAA,CAAC,mBAAmB,EAAEN,KAAMP,CAAAA,MAAM,CAAC,CAAC,EAAEL,OAAAA,CAAQ,CAAC,CAAA,CAAA;AAClE,gBAAA;AACF;AAEA,YAAA,IAAIC,MAAOI,CAAAA,MAAM,GAAGD,IAAAA,GAAOJ,OAAS,EAAA;gBAClC,MAAME,KAAAA,EAAAA;AACR;YAEAS,IAAKC,CAAAA,KAAAA,CAAAA;YAELG,QAAS,CAAA,IAAA,CAAA;AACX;AACF,KAAA,CAAA;AACF;;;;;"}

View File

@@ -0,0 +1,60 @@
import { Writable } from 'stream';
import { posix } from 'path';
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/ const createFilePathFactory = (type)=>(fileIndex = 0)=>{
// always write tar files with posix paths so we have a standard format for paths regardless of system
return posix.join(// "{type}" directory
type, // "${type}_XXXXX.jsonl" file
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`);
};
const createTarEntryStream = (archive, pathFactory, maxSize = 2.56e8)=>{
let fileIndex = 0;
let buffer = '';
const flush = async ()=>{
if (!buffer) {
return;
}
fileIndex += 1;
const name = pathFactory(fileIndex);
const size = buffer.length;
await new Promise((resolve, reject)=>{
archive.entry({
name,
size
}, buffer, (err)=>{
if (err) {
reject(err);
}
resolve();
});
});
buffer = '';
};
const push = (chunk)=>{
buffer += chunk;
};
return new Writable({
async destroy (err, callback) {
await flush();
callback(err);
},
async write (chunk, _encoding, callback) {
const size = chunk.length;
if (chunk.length > maxSize) {
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
return;
}
if (buffer.length + size > maxSize) {
await flush();
}
push(chunk);
callback(null);
}
});
};
export { createFilePathFactory, createTarEntryStream };
//# sourceMappingURL=utils.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.mjs","sources":["../../../../src/file/providers/destination/utils.ts"],"sourcesContent":["import { Writable } from 'stream';\nimport { posix } from 'path';\nimport tar from 'tar-stream';\n\n/**\n * Create a file path factory for a given path & prefix.\n * Upon being called, the factory will return a file path for a given index\n */\nexport const createFilePathFactory =\n (type: string) =>\n (fileIndex = 0): string => {\n // always write tar files with posix paths so we have a standard format for paths regardless of system\n return posix.join(\n // \"{type}\" directory\n type,\n // \"${type}_XXXXX.jsonl\" file\n `${type}_${String(fileIndex).padStart(5, '0')}.jsonl`\n );\n };\n\nexport const createTarEntryStream = (\n archive: tar.Pack,\n pathFactory: (index?: number) => string,\n maxSize = 2.56e8\n) => {\n let fileIndex = 0;\n let buffer = '';\n\n const flush = async () => {\n if (!buffer) {\n return;\n }\n\n fileIndex += 1;\n const name = pathFactory(fileIndex);\n const size = buffer.length;\n\n await new Promise<void>((resolve, reject) => {\n archive.entry({ name, size }, buffer, (err) => {\n if (err) {\n reject(err);\n }\n\n resolve();\n });\n });\n\n buffer = '';\n };\n\n const push = (chunk: string | Buffer) => {\n buffer += chunk;\n };\n\n return new Writable({\n async destroy(err, callback) {\n await flush();\n callback(err);\n },\n\n async write(chunk, _encoding, callback) {\n const size = chunk.length;\n\n if (chunk.length > maxSize) {\n callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));\n return;\n }\n\n if (buffer.length + size > maxSize) {\n await flush();\n }\n\n push(chunk);\n\n callback(null);\n },\n });\n};\n"],"names":["createFilePathFactory","type","fileIndex","posix","join","String","padStart","createTarEntryStream","archive","pathFactory","maxSize","buffer","flush","name","size","length","Promise","resolve","reject","entry","err","push","chunk","Writable","destroy","callback","write","_encoding","Error"],"mappings":";;;AAIA;;;AAGC,IACYA,MAAAA,qBAAAA,GACX,CAACC,IACD,GAAA,CAACC,YAAY,CAAC,GAAA;;QAEZ,OAAOC,KAAAA,CAAMC,IAAI;AAEfH,QAAAA,IAAAA;QAEA,CAAC,EAAEA,IAAK,CAAA,CAAC,EAAEI,MAAAA,CAAOH,SAAWI,CAAAA,CAAAA,QAAQ,CAAC,CAAA,EAAG,GAAK,CAAA,CAAA,MAAM,CAAC,CAAA;;MAI9CC,oBAAuB,GAAA,CAClCC,OACAC,EAAAA,WAAAA,EACAC,UAAU,MAAM,GAAA;AAEhB,IAAA,IAAIR,SAAY,GAAA,CAAA;AAChB,IAAA,IAAIS,MAAS,GAAA,EAAA;AAEb,IAAA,MAAMC,KAAQ,GAAA,UAAA;AACZ,QAAA,IAAI,CAACD,MAAQ,EAAA;AACX,YAAA;AACF;QAEAT,SAAa,IAAA,CAAA;AACb,QAAA,MAAMW,OAAOJ,WAAYP,CAAAA,SAAAA,CAAAA;QACzB,MAAMY,IAAAA,GAAOH,OAAOI,MAAM;QAE1B,MAAM,IAAIC,OAAc,CAAA,CAACC,OAASC,EAAAA,MAAAA,GAAAA;AAChCV,YAAAA,OAAAA,CAAQW,KAAK,CAAC;AAAEN,gBAAAA,IAAAA;AAAMC,gBAAAA;AAAK,aAAA,EAAGH,QAAQ,CAACS,GAAAA,GAAAA;AACrC,gBAAA,IAAIA,GAAK,EAAA;oBACPF,MAAOE,CAAAA,GAAAA,CAAAA;AACT;AAEAH,gBAAAA,OAAAA,EAAAA;AACF,aAAA,CAAA;AACF,SAAA,CAAA;QAEAN,MAAS,GAAA,EAAA;AACX,KAAA;AAEA,IAAA,MAAMU,OAAO,CAACC,KAAAA,GAAAA;QACZX,MAAUW,IAAAA,KAAAA;AACZ,KAAA;AAEA,IAAA,OAAO,IAAIC,QAAS,CAAA;QAClB,MAAMC,OAAAA,CAAAA,CAAQJ,GAAG,EAAEK,QAAQ,EAAA;YACzB,MAAMb,KAAAA,EAAAA;YACNa,QAASL,CAAAA,GAAAA,CAAAA;AACX,SAAA;AAEA,QAAA,MAAMM,KAAMJ,CAAAA,CAAAA,KAAK,EAAEK,SAAS,EAAEF,QAAQ,EAAA;YACpC,MAAMX,IAAAA,GAAOQ,MAAMP,MAAM;YAEzB,IAAIO,KAAAA,CAAMP,MAAM,GAAGL,OAAS,EAAA;gBAC1Be,QAAS,CAAA,IAAIG,KAAM,CAAA,CAAC,mBAAmB,EAAEN,KAAMP,CAAAA,MAAM,CAAC,CAAC,EAAEL,OAAAA,CAAQ,CAAC,CAAA,CAAA;AAClE,gBAAA;AACF;AAEA,YAAA,IAAIC,MAAOI,CAAAA,MAAM,GAAGD,IAAAA,GAAOJ,OAAS,EAAA;gBAClC,MAAME,KAAAA,EAAAA;AACR;YAEAS,IAAKC,CAAAA,KAAAA,CAAAA;YAELG,QAAS,CAAA,IAAA,CAAA;AACX;AACF,KAAA,CAAA;AACF;;;;"}

View File

@@ -0,0 +1,3 @@
export * from './source';
export * from './destination';
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/file/providers/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAC;AACzB,cAAc,eAAe,CAAC"}

View File

@@ -0,0 +1,10 @@
'use strict';
var index = require('./source/index.js');
var index$1 = require('./destination/index.js');
exports.createLocalFileSourceProvider = index.createLocalFileSourceProvider;
exports.createLocalFileDestinationProvider = index$1.createLocalFileDestinationProvider;
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}

View File

@@ -0,0 +1,3 @@
export { createLocalFileSourceProvider } from './source/index.mjs';
export { createLocalFileDestinationProvider } from './destination/index.mjs';
//# sourceMappingURL=index.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";"}

View File

@@ -0,0 +1,40 @@
/// <reference types="node" />
import type { Readable } from 'stream';
import type { IMetadata, ISourceProvider, ProviderType } from '../../../../types';
import type { IDiagnosticReporter } from '../../../utils/diagnostic';
/**
* Provider options
*/
export interface ILocalFileSourceProviderOptions {
file: {
path: string;
};
encryption: {
enabled: boolean;
key?: string;
};
compression: {
enabled: boolean;
};
}
export declare const createLocalFileSourceProvider: (options: ILocalFileSourceProviderOptions) => LocalFileSourceProvider;
declare class LocalFileSourceProvider implements ISourceProvider {
#private;
type: ProviderType;
name: string;
options: ILocalFileSourceProviderOptions;
constructor(options: ILocalFileSourceProviderOptions);
/**
* Pre flight checks regarding the provided options, making sure that the file can be opened (decrypted, decompressed), etc.
*/
bootstrap(diagnostics: IDiagnosticReporter): Promise<void>;
getMetadata(): Promise<IMetadata | null>;
getSchemas(): Promise<any>;
createEntitiesReadStream(): Readable;
createSchemasReadStream(): Readable;
createLinksReadStream(): Readable;
createConfigurationReadStream(): Readable;
createAssetsReadStream(): Readable | Promise<Readable>;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/source/index.ts"],"names":[],"mappings":";AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAYvC,OAAO,KAAK,EAAU,SAAS,EAAE,eAAe,EAAE,YAAY,EAAS,MAAM,mBAAmB,CAAC;AACjG,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAarE;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,UAAU,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,GAAG,CAAC,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,WAAW,EAAE;QACX,OAAO,EAAE,OAAO,CAAC;KAClB,CAAC;CACH;AAED,eAAO,MAAM,6BAA6B,YAAa,+BAA+B,4BAErF,CAAC;AAEF,cAAM,uBAAwB,YAAW,eAAe;;IACtD,IAAI,EAAE,YAAY,CAAY;IAE9B,IAAI,SAAwB;IAE5B,OAAO,EAAE,+BAA+B,CAAC;gBAM7B,OAAO,EAAE,+BAA+B;IAqBpD;;OAEG;IACG,SAAS,CAAC,WAAW,EAAE,mBAAmB;IAgC1C,WAAW;IASX,UAAU;IAiBhB,wBAAwB,IAAI,QAAQ;IAKpC,uBAAuB,IAAI,QAAQ;IAKnC,qBAAqB,IAAI,QAAQ;IAKjC,6BAA6B,IAAI,QAAQ;IAMzC,sBAAsB,IAAI,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;CA6KvD"}

View File

@@ -0,0 +1,288 @@
'use strict';
var zip = require('zlib');
var path = require('path');
var stream$1 = require('stream');
var fse = require('fs-extra');
var tar = require('tar');
var fp = require('lodash/fp');
var streamChain = require('stream-chain');
var Parser = require('stream-json/jsonl/Parser');
require('crypto');
var decrypt = require('../../../utils/encryption/decrypt.js');
var stream = require('../../../utils/stream.js');
var schema = require('../../../utils/schema.js');
require('events');
var providers = require('../../../errors/providers.js');
var utils = require('./utils.js');
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
/**
* Constant for the metadata file path
*/ const METADATA_FILE_PATH = 'metadata.json';
const createLocalFileSourceProvider = (options)=>{
return new LocalFileSourceProvider(options);
};
var _metadata = /*#__PURE__*/ _class_private_field_loose_key("_metadata"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _loadMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadMetadata"), _loadAssetMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadAssetMetadata"), _getBackupStream = /*#__PURE__*/ _class_private_field_loose_key("_getBackupStream"), // `directory` must be posix formatted path
_streamJsonlDirectory = /*#__PURE__*/ _class_private_field_loose_key("_streamJsonlDirectory"), _parseJSONFile = /*#__PURE__*/ _class_private_field_loose_key("_parseJSONFile");
class LocalFileSourceProvider {
/**
* Pre flight checks regarding the provided options, making sure that the file can be opened (decrypted, decompressed), etc.
*/ async bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { path: filePath } = this.options.file;
try {
// Read the metadata to ensure the file can be parsed
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
// TODO: we might also need to read the schema.jsonl files & implements a custom stream-check
} catch (e) {
if (this.options?.encryption?.enabled) {
throw new providers.ProviderInitializationError(`Key is incorrect or the file '${filePath}' is not a valid Strapi data file.`);
}
throw new providers.ProviderInitializationError(`File '${filePath}' is not a valid Strapi data file.`);
}
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
throw new providers.ProviderInitializationError('Could not load metadata from Strapi data file.');
}
}
async getMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting metadata');
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
}
return _class_private_field_loose_base(this, _metadata)[_metadata] ?? null;
}
async getSchemas() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting schemas');
const schemaCollection = await stream.collect(this.createSchemasReadStream());
if (fp.isEmpty(schemaCollection)) {
throw new providers.ProviderInitializationError('Could not load schemas from Strapi data file.');
}
// Group schema by UID
const schemas = fp.keyBy('uid', schemaCollection);
// Transform to valid JSON
return schema.schemasToValidJSON(schemas);
}
createEntitiesReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('entities');
}
createSchemasReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('schemas');
}
createLinksReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('links');
}
createConfigurationReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration read stream');
// NOTE: TBD
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('configuration');
}
createAssetsReadStream() {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new stream$1.PassThrough({
objectMode: true
});
const loadAssetMetadata = _class_private_field_loose_base(this, _loadAssetMetadata)[_loadAssetMetadata].bind(this);
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets read stream');
stream$1.pipeline([
inStream,
new tar.Parse({
// find only files in the assets/uploads folder
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return utils.isFilePathInDirname('assets/uploads', filePath);
},
async onentry (entry) {
const { path: filePath, size = 0 } = entry;
const normalizedPath = utils.unknownPathToPosix(filePath);
const file = path.basename(normalizedPath);
let metadata;
try {
metadata = await loadAssetMetadata(`assets/metadata/${file}.json`);
} catch (error) {
throw new Error(`Failed to read metadata for ${file}`);
}
const asset = {
metadata,
filename: file,
filepath: normalizedPath,
stats: {
size
},
stream: entry
};
outStream.write(asset);
}
})
], ()=>outStream.end());
return outStream;
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _loadMetadata, {
value: loadMetadata
});
Object.defineProperty(this, _loadAssetMetadata, {
value: loadAssetMetadata
});
Object.defineProperty(this, _getBackupStream, {
value: getBackupStream
});
Object.defineProperty(this, _streamJsonlDirectory, {
value: streamJsonlDirectory
});
// For collecting an entire JSON file then parsing it, not for streaming JSONL
Object.defineProperty(this, _parseJSONFile, {
value: parseJSONFile
});
Object.defineProperty(this, _metadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.type = 'source';
this.name = 'source::local-file';
this.options = options;
const { encryption } = this.options;
if (encryption.enabled && encryption.key === undefined) {
throw new Error('Missing encryption key');
}
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-source-provider'
},
kind: 'info'
});
}
async function loadMetadata() {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
_class_private_field_loose_base(this, _metadata)[_metadata] = await _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, METADATA_FILE_PATH);
}
async function loadAssetMetadata(path) {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
return _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, path);
}
function getBackupStream() {
const { file, encryption, compression } = this.options;
const streams = [];
try {
streams.push(fse.createReadStream(file.path));
} catch (e) {
throw new Error(`Could not read backup file path provided at "${this.options.file.path}"`);
}
if (encryption.enabled && encryption.key) {
streams.push(decrypt.createDecryptionCipher(encryption.key));
}
if (compression.enabled) {
streams.push(zip.createGunzip());
}
return streamChain.chain(streams);
}
function streamJsonlDirectory(directory) {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new stream$1.PassThrough({
objectMode: true
});
stream$1.pipeline([
inStream,
new tar.Parse({
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return utils.isFilePathInDirname(directory, filePath);
},
async onentry (entry) {
const transforms = [
// JSONL parser to read the data chunks one by one (line by line)
Parser.parser({
checkErrors: true
}),
// The JSONL parser returns each line as key/value
(line)=>line.value
];
const stream = entry.pipe(streamChain.chain(transforms));
try {
for await (const chunk of stream){
outStream.write(chunk);
}
} catch (e) {
outStream.destroy(new providers.ProviderTransferError(`Error parsing backup files from backup file ${entry.path}: ${e.message}`, {
details: {
error: e
}
}));
}
}
})
], async ()=>{
// Manually send the 'end' event to the out stream
// once every entry has finished streaming its content
outStream.end();
});
return outStream;
}
async function parseJSONFile(fileStream, filePath) {
return new Promise((resolve, reject)=>{
stream$1.pipeline([
fileStream,
// Custom backup archive parsing
new tar.Parse({
/**
* Filter the parsed entries to only keep the one that matches the given filepath
*/ filter (entryPath, entry) {
if (entry.type !== 'File') {
return false;
}
return utils.isPathEquivalent(entryPath, filePath);
},
async onentry (entry) {
// Collect all the content of the entry file
const content = await entry.collect();
try {
// Parse from buffer array to string to JSON
const parsedContent = JSON.parse(Buffer.concat(content).toString());
// Resolve the Promise with the parsed content
resolve(parsedContent);
} catch (e) {
reject(e);
} finally{
// Cleanup (close the stream associated to the entry)
entry.destroy();
}
}
})
], ()=>{
// If the promise hasn't been resolved and we've parsed all
// the archive entries, then the file doesn't exist
reject(new Error(`File "${filePath}" not found`));
});
});
}
exports.createLocalFileSourceProvider = createLocalFileSourceProvider;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,286 @@
import zip from 'zlib';
import path from 'path';
import { PassThrough, pipeline } from 'stream';
import fse__default from 'fs-extra';
import tar from 'tar';
import { isEmpty, keyBy } from 'lodash/fp';
import { chain } from 'stream-chain';
import { parser } from 'stream-json/jsonl/Parser';
import 'crypto';
import { createDecryptionCipher } from '../../../utils/encryption/decrypt.mjs';
import { collect } from '../../../utils/stream.mjs';
import { schemasToValidJSON } from '../../../utils/schema.mjs';
import 'events';
import { ProviderInitializationError, ProviderTransferError } from '../../../errors/providers.mjs';
import { isFilePathInDirname, unknownPathToPosix, isPathEquivalent } from './utils.mjs';
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
/**
* Constant for the metadata file path
*/ const METADATA_FILE_PATH = 'metadata.json';
const createLocalFileSourceProvider = (options)=>{
return new LocalFileSourceProvider(options);
};
var _metadata = /*#__PURE__*/ _class_private_field_loose_key("_metadata"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _loadMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadMetadata"), _loadAssetMetadata = /*#__PURE__*/ _class_private_field_loose_key("_loadAssetMetadata"), _getBackupStream = /*#__PURE__*/ _class_private_field_loose_key("_getBackupStream"), // `directory` must be posix formatted path
_streamJsonlDirectory = /*#__PURE__*/ _class_private_field_loose_key("_streamJsonlDirectory"), _parseJSONFile = /*#__PURE__*/ _class_private_field_loose_key("_parseJSONFile");
class LocalFileSourceProvider {
/**
* Pre flight checks regarding the provided options, making sure that the file can be opened (decrypted, decompressed), etc.
*/ async bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { path: filePath } = this.options.file;
try {
// Read the metadata to ensure the file can be parsed
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
// TODO: we might also need to read the schema.jsonl files & implements a custom stream-check
} catch (e) {
if (this.options?.encryption?.enabled) {
throw new ProviderInitializationError(`Key is incorrect or the file '${filePath}' is not a valid Strapi data file.`);
}
throw new ProviderInitializationError(`File '${filePath}' is not a valid Strapi data file.`);
}
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
throw new ProviderInitializationError('Could not load metadata from Strapi data file.');
}
}
async getMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting metadata');
if (!_class_private_field_loose_base(this, _metadata)[_metadata]) {
await _class_private_field_loose_base(this, _loadMetadata)[_loadMetadata]();
}
return _class_private_field_loose_base(this, _metadata)[_metadata] ?? null;
}
async getSchemas() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('getting schemas');
const schemaCollection = await collect(this.createSchemasReadStream());
if (isEmpty(schemaCollection)) {
throw new ProviderInitializationError('Could not load schemas from Strapi data file.');
}
// Group schema by UID
const schemas = keyBy('uid', schemaCollection);
// Transform to valid JSON
return schemasToValidJSON(schemas);
}
createEntitiesReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('entities');
}
createSchemasReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('schemas');
}
createLinksReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links read stream');
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('links');
}
createConfigurationReadStream() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration read stream');
// NOTE: TBD
return _class_private_field_loose_base(this, _streamJsonlDirectory)[_streamJsonlDirectory]('configuration');
}
createAssetsReadStream() {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new PassThrough({
objectMode: true
});
const loadAssetMetadata = _class_private_field_loose_base(this, _loadAssetMetadata)[_loadAssetMetadata].bind(this);
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets read stream');
pipeline([
inStream,
new tar.Parse({
// find only files in the assets/uploads folder
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return isFilePathInDirname('assets/uploads', filePath);
},
async onentry (entry) {
const { path: filePath, size = 0 } = entry;
const normalizedPath = unknownPathToPosix(filePath);
const file = path.basename(normalizedPath);
let metadata;
try {
metadata = await loadAssetMetadata(`assets/metadata/${file}.json`);
} catch (error) {
throw new Error(`Failed to read metadata for ${file}`);
}
const asset = {
metadata,
filename: file,
filepath: normalizedPath,
stats: {
size
},
stream: entry
};
outStream.write(asset);
}
})
], ()=>outStream.end());
return outStream;
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _loadMetadata, {
value: loadMetadata
});
Object.defineProperty(this, _loadAssetMetadata, {
value: loadAssetMetadata
});
Object.defineProperty(this, _getBackupStream, {
value: getBackupStream
});
Object.defineProperty(this, _streamJsonlDirectory, {
value: streamJsonlDirectory
});
// For collecting an entire JSON file then parsing it, not for streaming JSONL
Object.defineProperty(this, _parseJSONFile, {
value: parseJSONFile
});
Object.defineProperty(this, _metadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.type = 'source';
this.name = 'source::local-file';
this.options = options;
const { encryption } = this.options;
if (encryption.enabled && encryption.key === undefined) {
throw new Error('Missing encryption key');
}
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-source-provider'
},
kind: 'info'
});
}
async function loadMetadata() {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
_class_private_field_loose_base(this, _metadata)[_metadata] = await _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, METADATA_FILE_PATH);
}
async function loadAssetMetadata(path) {
const backupStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
return _class_private_field_loose_base(this, _parseJSONFile)[_parseJSONFile](backupStream, path);
}
function getBackupStream() {
const { file, encryption, compression } = this.options;
const streams = [];
try {
streams.push(fse__default.createReadStream(file.path));
} catch (e) {
throw new Error(`Could not read backup file path provided at "${this.options.file.path}"`);
}
if (encryption.enabled && encryption.key) {
streams.push(createDecryptionCipher(encryption.key));
}
if (compression.enabled) {
streams.push(zip.createGunzip());
}
return chain(streams);
}
function streamJsonlDirectory(directory) {
const inStream = _class_private_field_loose_base(this, _getBackupStream)[_getBackupStream]();
const outStream = new PassThrough({
objectMode: true
});
pipeline([
inStream,
new tar.Parse({
filter (filePath, entry) {
if (entry.type !== 'File') {
return false;
}
return isFilePathInDirname(directory, filePath);
},
async onentry (entry) {
const transforms = [
// JSONL parser to read the data chunks one by one (line by line)
parser({
checkErrors: true
}),
// The JSONL parser returns each line as key/value
(line)=>line.value
];
const stream = entry.pipe(chain(transforms));
try {
for await (const chunk of stream){
outStream.write(chunk);
}
} catch (e) {
outStream.destroy(new ProviderTransferError(`Error parsing backup files from backup file ${entry.path}: ${e.message}`, {
details: {
error: e
}
}));
}
}
})
], async ()=>{
// Manually send the 'end' event to the out stream
// once every entry has finished streaming its content
outStream.end();
});
return outStream;
}
async function parseJSONFile(fileStream, filePath) {
return new Promise((resolve, reject)=>{
pipeline([
fileStream,
// Custom backup archive parsing
new tar.Parse({
/**
* Filter the parsed entries to only keep the one that matches the given filepath
*/ filter (entryPath, entry) {
if (entry.type !== 'File') {
return false;
}
return isPathEquivalent(entryPath, filePath);
},
async onentry (entry) {
// Collect all the content of the entry file
const content = await entry.collect();
try {
// Parse from buffer array to string to JSON
const parsedContent = JSON.parse(Buffer.concat(content).toString());
// Resolve the Promise with the parsed content
resolve(parsedContent);
} catch (e) {
reject(e);
} finally{
// Cleanup (close the stream associated to the entry)
entry.destroy();
}
}
})
], ()=>{
// If the promise hasn't been resolved and we've parsed all
// the archive entries, then the file doesn't exist
reject(new Error(`File "${filePath}" not found`));
});
});
}
export { createLocalFileSourceProvider };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,38 @@
/**
* Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths
* on Windows systems, and posix paths on posix systems.
*
* We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to
* support manually-created tar files coming from Windows systems (ie, if a user creates a
* backup file with a windows tar tool rather than using the `export` command)
*
* Because of this, export/import files may never contain files with a forward slash in the name, even escaped
*
* */
/**
* Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName
* We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename
*
* @param {string} posixDirName A posix path pointing to a directory
* @param {string} filePath an unknown filesystem path pointing to a file
* @returns {boolean} is the file located in the given directory
*/
export declare const isFilePathInDirname: (posixDirName: string, filePath: string) => boolean;
/**
* Check if two paths that can be either in posix or win32 format resolves to the same file
*
* @param {string} pathA a path that may be either win32 or posix
* @param {string} pathB a path that may be either win32 or posix
*
* @returns {boolean} do paths point to the same place
*/
export declare const isPathEquivalent: (pathA: string, pathB: string) => boolean;
/**
* Convert an unknown format path (win32 or posix) to a posix path
*
* @param {string} filePath a path that may be either win32 or posix
*
* @returns {string} a posix path
*/
export declare const unknownPathToPosix: (filePath: string) => string;
//# sourceMappingURL=utils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/source/utils.ts"],"names":[],"mappings":"AAEA;;;;;;;;;;KAUK;AAEL;;;;;;;GAOG;AACH,eAAO,MAAM,mBAAmB,iBAAkB,MAAM,YAAY,MAAM,YAGzE,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,gBAAgB,UAAW,MAAM,SAAS,MAAM,YAM5D,CAAC;AAEF;;;;;;GAMG;AACH,eAAO,MAAM,kBAAkB,aAAc,MAAM,WAOlD,CAAC"}

View File

@@ -0,0 +1,56 @@
'use strict';
var path = require('path');
/**
* Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths
* on Windows systems, and posix paths on posix systems.
*
* We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to
* support manually-created tar files coming from Windows systems (ie, if a user creates a
* backup file with a windows tar tool rather than using the `export` command)
*
* Because of this, export/import files may never contain files with a forward slash in the name, even escaped
*
* */ /**
* Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName
* We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename
*
* @param {string} posixDirName A posix path pointing to a directory
* @param {string} filePath an unknown filesystem path pointing to a file
* @returns {boolean} is the file located in the given directory
*/ const isFilePathInDirname = (posixDirName, filePath)=>{
const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));
return isPathEquivalent(posixDirName, normalizedDir);
};
/**
* Check if two paths that can be either in posix or win32 format resolves to the same file
*
* @param {string} pathA a path that may be either win32 or posix
* @param {string} pathB a path that may be either win32 or posix
*
* @returns {boolean} do paths point to the same place
*/ const isPathEquivalent = (pathA, pathB)=>{
// Check if paths appear to be win32 or posix, and if win32 convert to posix
const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));
const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));
return !path.posix.relative(normalizedPathB, normalizedPathA).length;
};
/**
* Convert an unknown format path (win32 or posix) to a posix path
*
* @param {string} filePath a path that may be either win32 or posix
*
* @returns {string} a posix path
*/ const unknownPathToPosix = (filePath)=>{
// if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators
if (filePath.includes(path.posix.sep)) {
return filePath;
}
return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);
};
exports.isFilePathInDirname = isFilePathInDirname;
exports.isPathEquivalent = isPathEquivalent;
exports.unknownPathToPosix = unknownPathToPosix;
//# sourceMappingURL=utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sources":["../../../../src/file/providers/source/utils.ts"],"sourcesContent":["import path from 'path';\n\n/**\n * Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths\n * on Windows systems, and posix paths on posix systems.\n *\n * We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to\n * support manually-created tar files coming from Windows systems (ie, if a user creates a\n * backup file with a windows tar tool rather than using the `export` command)\n *\n * Because of this, export/import files may never contain files with a forward slash in the name, even escaped\n *\n * */\n\n/**\n * Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName\n * We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename\n *\n * @param {string} posixDirName A posix path pointing to a directory\n * @param {string} filePath an unknown filesystem path pointing to a file\n * @returns {boolean} is the file located in the given directory\n */\nexport const isFilePathInDirname = (posixDirName: string, filePath: string) => {\n const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));\n return isPathEquivalent(posixDirName, normalizedDir);\n};\n\n/**\n * Check if two paths that can be either in posix or win32 format resolves to the same file\n *\n * @param {string} pathA a path that may be either win32 or posix\n * @param {string} pathB a path that may be either win32 or posix\n *\n * @returns {boolean} do paths point to the same place\n */\nexport const isPathEquivalent = (pathA: string, pathB: string) => {\n // Check if paths appear to be win32 or posix, and if win32 convert to posix\n const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));\n const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));\n\n return !path.posix.relative(normalizedPathB, normalizedPathA).length;\n};\n\n/**\n * Convert an unknown format path (win32 or posix) to a posix path\n *\n * @param {string} filePath a path that may be either win32 or posix\n *\n * @returns {string} a posix path\n */\nexport const unknownPathToPosix = (filePath: string) => {\n // if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators\n if (filePath.includes(path.posix.sep)) {\n return filePath;\n }\n\n return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);\n};\n"],"names":["isFilePathInDirname","posixDirName","filePath","normalizedDir","path","posix","dirname","unknownPathToPosix","isPathEquivalent","pathA","pathB","normalizedPathA","normalize","normalizedPathB","relative","length","includes","sep","split","win32","join"],"mappings":";;;;AAEA;;;;;;;;;;;;;;;;;AAmBC,IACM,MAAMA,mBAAsB,GAAA,CAACC,YAAsBC,EAAAA,QAAAA,GAAAA;AACxD,IAAA,MAAMC,gBAAgBC,IAAKC,CAAAA,KAAK,CAACC,OAAO,CAACC,kBAAmBL,CAAAA,QAAAA,CAAAA,CAAAA;AAC5D,IAAA,OAAOM,iBAAiBP,YAAcE,EAAAA,aAAAA,CAAAA;AACxC;AAEA;;;;;;;AAOC,IACM,MAAMK,gBAAmB,GAAA,CAACC,KAAeC,EAAAA,KAAAA,GAAAA;;AAE9C,IAAA,MAAMC,kBAAkBP,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBE,CAAAA,KAAAA,CAAAA,CAAAA;AAChE,IAAA,MAAMI,kBAAkBT,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBG,CAAAA,KAAAA,CAAAA,CAAAA;IAEhE,OAAO,CAACN,KAAKC,KAAK,CAACS,QAAQ,CAACD,eAAAA,EAAiBF,iBAAiBI,MAAM;AACtE;AAEA;;;;;;IAOaR,MAAAA,kBAAAA,GAAqB,CAACL,QAAAA,GAAAA;;AAEjC,IAAA,IAAIA,SAASc,QAAQ,CAACZ,KAAKC,KAAK,CAACY,GAAG,CAAG,EAAA;QACrC,OAAOf,QAAAA;AACT;AAEA,IAAA,OAAOE,KAAKQ,SAAS,CAACV,QAAUgB,CAAAA,CAAAA,KAAK,CAACd,IAAKe,CAAAA,KAAK,CAACF,GAAG,EAAEG,IAAI,CAAChB,IAAKC,CAAAA,KAAK,CAACY,GAAG,CAAA;AAC3E;;;;;;"}

View File

@@ -0,0 +1,52 @@
import path from 'path';
/**
* Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths
* on Windows systems, and posix paths on posix systems.
*
* We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to
* support manually-created tar files coming from Windows systems (ie, if a user creates a
* backup file with a windows tar tool rather than using the `export` command)
*
* Because of this, export/import files may never contain files with a forward slash in the name, even escaped
*
* */ /**
* Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName
* We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename
*
* @param {string} posixDirName A posix path pointing to a directory
* @param {string} filePath an unknown filesystem path pointing to a file
* @returns {boolean} is the file located in the given directory
*/ const isFilePathInDirname = (posixDirName, filePath)=>{
const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));
return isPathEquivalent(posixDirName, normalizedDir);
};
/**
* Check if two paths that can be either in posix or win32 format resolves to the same file
*
* @param {string} pathA a path that may be either win32 or posix
* @param {string} pathB a path that may be either win32 or posix
*
* @returns {boolean} do paths point to the same place
*/ const isPathEquivalent = (pathA, pathB)=>{
// Check if paths appear to be win32 or posix, and if win32 convert to posix
const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));
const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));
return !path.posix.relative(normalizedPathB, normalizedPathA).length;
};
/**
* Convert an unknown format path (win32 or posix) to a posix path
*
* @param {string} filePath a path that may be either win32 or posix
*
* @returns {string} a posix path
*/ const unknownPathToPosix = (filePath)=>{
// if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators
if (filePath.includes(path.posix.sep)) {
return filePath;
}
return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);
};
export { isFilePathInDirname, isPathEquivalent, unknownPathToPosix };
//# sourceMappingURL=utils.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.mjs","sources":["../../../../src/file/providers/source/utils.ts"],"sourcesContent":["import path from 'path';\n\n/**\n * Note: in versions of the transfer engine <=4.9.0, exports were generated with windows paths\n * on Windows systems, and posix paths on posix systems.\n *\n * We now store all paths as posix, but need to leave a separator conversion for legacy purposes, and to\n * support manually-created tar files coming from Windows systems (ie, if a user creates a\n * backup file with a windows tar tool rather than using the `export` command)\n *\n * Because of this, export/import files may never contain files with a forward slash in the name, even escaped\n *\n * */\n\n/**\n * Check if the directory of a given filePath (which can be either posix or win32) resolves to the same as the given posix-format path posixDirName\n * We must be able to assume the first argument is a path to a directory and the second is a path to a file, otherwise path.dirname will interpret a path without any slashes as the filename\n *\n * @param {string} posixDirName A posix path pointing to a directory\n * @param {string} filePath an unknown filesystem path pointing to a file\n * @returns {boolean} is the file located in the given directory\n */\nexport const isFilePathInDirname = (posixDirName: string, filePath: string) => {\n const normalizedDir = path.posix.dirname(unknownPathToPosix(filePath));\n return isPathEquivalent(posixDirName, normalizedDir);\n};\n\n/**\n * Check if two paths that can be either in posix or win32 format resolves to the same file\n *\n * @param {string} pathA a path that may be either win32 or posix\n * @param {string} pathB a path that may be either win32 or posix\n *\n * @returns {boolean} do paths point to the same place\n */\nexport const isPathEquivalent = (pathA: string, pathB: string) => {\n // Check if paths appear to be win32 or posix, and if win32 convert to posix\n const normalizedPathA = path.posix.normalize(unknownPathToPosix(pathA));\n const normalizedPathB = path.posix.normalize(unknownPathToPosix(pathB));\n\n return !path.posix.relative(normalizedPathB, normalizedPathA).length;\n};\n\n/**\n * Convert an unknown format path (win32 or posix) to a posix path\n *\n * @param {string} filePath a path that may be either win32 or posix\n *\n * @returns {string} a posix path\n */\nexport const unknownPathToPosix = (filePath: string) => {\n // if it includes a forward slash, it must be posix already -- we will not support win32 with mixed path separators\n if (filePath.includes(path.posix.sep)) {\n return filePath;\n }\n\n return path.normalize(filePath).split(path.win32.sep).join(path.posix.sep);\n};\n"],"names":["isFilePathInDirname","posixDirName","filePath","normalizedDir","path","posix","dirname","unknownPathToPosix","isPathEquivalent","pathA","pathB","normalizedPathA","normalize","normalizedPathB","relative","length","includes","sep","split","win32","join"],"mappings":";;AAEA;;;;;;;;;;;;;;;;;AAmBC,IACM,MAAMA,mBAAsB,GAAA,CAACC,YAAsBC,EAAAA,QAAAA,GAAAA;AACxD,IAAA,MAAMC,gBAAgBC,IAAKC,CAAAA,KAAK,CAACC,OAAO,CAACC,kBAAmBL,CAAAA,QAAAA,CAAAA,CAAAA;AAC5D,IAAA,OAAOM,iBAAiBP,YAAcE,EAAAA,aAAAA,CAAAA;AACxC;AAEA;;;;;;;AAOC,IACM,MAAMK,gBAAmB,GAAA,CAACC,KAAeC,EAAAA,KAAAA,GAAAA;;AAE9C,IAAA,MAAMC,kBAAkBP,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBE,CAAAA,KAAAA,CAAAA,CAAAA;AAChE,IAAA,MAAMI,kBAAkBT,IAAKC,CAAAA,KAAK,CAACO,SAAS,CAACL,kBAAmBG,CAAAA,KAAAA,CAAAA,CAAAA;IAEhE,OAAO,CAACN,KAAKC,KAAK,CAACS,QAAQ,CAACD,eAAAA,EAAiBF,iBAAiBI,MAAM;AACtE;AAEA;;;;;;IAOaR,MAAAA,kBAAAA,GAAqB,CAACL,QAAAA,GAAAA;;AAEjC,IAAA,IAAIA,SAASc,QAAQ,CAACZ,KAAKC,KAAK,CAACY,GAAG,CAAG,EAAA;QACrC,OAAOf,QAAAA;AACT;AAEA,IAAA,OAAOE,KAAKQ,SAAS,CAACV,QAAUgB,CAAAA,CAAAA,KAAK,CAACd,IAAKe,CAAAA,KAAK,CAACF,GAAG,EAAEG,IAAI,CAAChB,IAAKC,CAAAA,KAAK,CAACY,GAAG,CAAA;AAC3E;;;;"}