node_modules ignore

This commit is contained in:
2025-05-08 23:43:47 +02:00
parent e19d52f172
commit 4574544c9f
65041 changed files with 10593536 additions and 0 deletions

View File

@@ -0,0 +1,48 @@
/// <reference types="node" />
/// <reference types="stream-chain" />
/// <reference types="node" />
import zlib from 'zlib';
import { Writable } from 'stream';
import type { IDestinationProvider, IDestinationProviderTransferResults, IMetadata, ProviderType } from '../../../../types';
import type { IDiagnosticReporter } from '../../../utils/diagnostic';
export interface ILocalFileDestinationProviderOptions {
encryption: {
enabled: boolean;
key?: string;
};
compression: {
enabled: boolean;
};
file: {
path: string;
maxSize?: number;
maxSizeJsonl?: number;
};
}
export interface ILocalFileDestinationProviderTransferResults extends IDestinationProviderTransferResults {
file?: {
path?: string;
};
}
export declare const createLocalFileDestinationProvider: (options: ILocalFileDestinationProviderOptions) => LocalFileDestinationProvider;
declare class LocalFileDestinationProvider implements IDestinationProvider {
#private;
name: string;
type: ProviderType;
options: ILocalFileDestinationProviderOptions;
results: ILocalFileDestinationProviderTransferResults;
constructor(options: ILocalFileDestinationProviderOptions);
setMetadata(target: ProviderType, metadata: IMetadata): IDestinationProvider;
createGzip(): zlib.Gzip;
bootstrap(diagnostics: IDiagnosticReporter): void | Promise<void>;
close(): Promise<void>;
rollback(): Promise<void>;
getMetadata(): null;
createSchemasWriteStream(): import("stream-chain");
createEntitiesWriteStream(): Writable;
createLinksWriteStream(): Writable;
createConfigurationWriteStream(): Writable;
createAssetsWriteStream(): Writable;
}
export {};
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/destination/index.ts"],"names":[],"mappings":";;;AACA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAY,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAO5C,OAAO,KAAK,EAEV,oBAAoB,EACpB,mCAAmC,EACnC,SAAS,EACT,YAAY,EAEb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,2BAA2B,CAAC;AAIrE,MAAM,WAAW,oCAAoC;IACnD,UAAU,EAAE;QACV,OAAO,EAAE,OAAO,CAAC;QACjB,GAAG,CAAC,EAAE,MAAM,CAAC;KACd,CAAC;IAEF,WAAW,EAAE;QACX,OAAO,EAAE,OAAO,CAAC;KAClB,CAAC;IAEF,IAAI,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,YAAY,CAAC,EAAE,MAAM,CAAC;KACvB,CAAC;CACH;AAED,MAAM,WAAW,4CACf,SAAQ,mCAAmC;IAC3C,IAAI,CAAC,EAAE;QACL,IAAI,CAAC,EAAE,MAAM,CAAC;KACf,CAAC;CACH;AAED,eAAO,MAAM,kCAAkC,YACpC,oCAAoC,iCAG9C,CAAC;AAEF,cAAM,4BAA6B,YAAW,oBAAoB;;IAChE,IAAI,SAA6B;IAEjC,IAAI,EAAE,YAAY,CAAiB;IAEnC,OAAO,EAAE,oCAAoC,CAAC;IAE9C,OAAO,EAAE,4CAA4C,CAAM;gBAQ/C,OAAO,EAAE,oCAAoC;IA+BzD,WAAW,CAAC,MAAM,EAAE,YAAY,EAAE,QAAQ,EAAE,SAAS,GAAG,oBAAoB;IAM5E,UAAU,IAAI,IAAI,CAAC,IAAI;IAKvB,SAAS,CAAC,WAAW,EAAE,mBAAmB,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;IAoC3D,KAAK;IAiBL,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC;IAM/B,WAAW;IA4BX,wBAAwB;IAgBxB,yBAAyB,IAAI,QAAQ;IAgBrC,sBAAsB,IAAI,QAAQ;IAgBlC,8BAA8B,IAAI,QAAQ;IAgB1C,uBAAuB,IAAI,QAAQ;CA8CpC"}

View File

@@ -0,0 +1,248 @@
'use strict';
var path = require('path');
var zip = require('zlib');
var stream = require('stream');
var fse = require('fs-extra');
var tar = require('tar-stream');
var Stringer = require('stream-json/jsonl/Stringer');
var streamChain = require('stream-chain');
var encrypt = require('../../../utils/encryption/encrypt.js');
require('crypto');
var utils = require('./utils.js');
var providers = require('../../../errors/providers.js');
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const createLocalFileDestinationProvider = (options)=>{
return new LocalFileDestinationProvider(options);
};
var _providersMetadata = /*#__PURE__*/ _class_private_field_loose_key("_providersMetadata"), _archive = /*#__PURE__*/ _class_private_field_loose_key("_archive"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _archivePath = /*#__PURE__*/ _class_private_field_loose_key("_archivePath"), _writeMetadata = /*#__PURE__*/ _class_private_field_loose_key("_writeMetadata"), _getMetadataStream = /*#__PURE__*/ _class_private_field_loose_key("_getMetadataStream");
class LocalFileDestinationProvider {
setMetadata(target, metadata) {
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata][target] = metadata;
return this;
}
createGzip() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating gzip');
return zip.createGzip();
}
bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { compression, encryption } = this.options;
if (encryption.enabled && !encryption.key) {
throw new Error("Can't encrypt without a key");
}
_class_private_field_loose_base(this, _archive)[_archive].stream = tar.pack();
const outStream = fse.createWriteStream(_class_private_field_loose_base(this, _archivePath)[_archivePath]);
outStream.on('error', (err)=>{
if (err.code === 'ENOSPC') {
throw new providers.ProviderTransferError("Your server doesn't have space to proceed with the import.");
}
throw err;
});
const archiveTransforms = [];
if (compression.enabled) {
archiveTransforms.push(this.createGzip());
}
if (encryption.enabled && encryption.key) {
archiveTransforms.push(encrypt.createEncryptionCipher(encryption.key));
}
_class_private_field_loose_base(this, _archive)[_archive].pipeline = streamChain.chain([
_class_private_field_loose_base(this, _archive)[_archive].stream,
...archiveTransforms,
outStream
]);
this.results.file = {
path: _class_private_field_loose_base(this, _archivePath)[_archivePath]
};
}
async close() {
const { stream, pipeline } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
return;
}
await _class_private_field_loose_base(this, _writeMetadata)[_writeMetadata]();
stream.finalize();
if (pipeline && !pipeline.closed) {
await new Promise((resolve, reject)=>{
pipeline.on('close', resolve).on('error', reject);
});
}
}
async rollback() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('rolling back');
await this.close();
await fse.rm(_class_private_field_loose_base(this, _archivePath)[_archivePath], {
force: true
});
}
getMetadata() {
return null;
}
createSchemasWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas write stream');
const filePathFactory = utils.createFilePathFactory('schemas');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createEntitiesWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities write stream');
const filePathFactory = utils.createFilePathFactory('entities');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createLinksWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links write stream');
const filePathFactory = utils.createFilePathFactory('links');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createConfigurationWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration write stream');
const filePathFactory = utils.createFilePathFactory('configuration');
const entryStream = utils.createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return streamChain.chain([
Stringer.stringer(),
entryStream
]);
}
createAssetsWriteStream() {
const { stream: archiveStream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!archiveStream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets write stream');
return new stream.Writable({
objectMode: true,
write (data, _encoding, callback) {
// always write tar files with posix paths so we have a standard format for paths regardless of system
const entryPath = path.posix.join('assets', 'uploads', data.filename);
const entryMetadataPath = path.posix.join('assets', 'metadata', `${data.filename}.json`);
const stringifiedMetadata = JSON.stringify(data.metadata);
archiveStream.entry({
name: entryMetadataPath,
size: stringifiedMetadata.length
}, stringifiedMetadata);
const entry = archiveStream.entry({
name: entryPath,
size: data.stats.size
});
if (!entry) {
callback(new Error(`Failed to created an asset tar entry for ${entryPath}`));
return;
}
data.stream.pipe(entry);
entry.on('finish', ()=>{
callback(null);
}).on('error', (error)=>{
callback(error);
});
}
});
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _archivePath, {
get: get_archivePath,
set: void 0
});
Object.defineProperty(this, _writeMetadata, {
value: writeMetadata
});
Object.defineProperty(this, _getMetadataStream, {
value: getMetadataStream
});
Object.defineProperty(this, _providersMetadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _archive, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.name = 'destination::local-file';
this.type = 'destination';
this.results = {};
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata] = {};
_class_private_field_loose_base(this, _archive)[_archive] = {};
this.options = options;
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-destination-provider'
},
kind: 'info'
});
}
function get_archivePath() {
const { encryption, compression, file } = this.options;
let filePath = `${file.path}.tar`;
if (compression.enabled) {
filePath += '.gz';
}
if (encryption.enabled) {
filePath += '.enc';
}
return filePath;
}
async function writeMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('writing metadata');
const metadata = _class_private_field_loose_base(this, _providersMetadata)[_providersMetadata].source;
if (metadata) {
await new Promise((resolve)=>{
const outStream = _class_private_field_loose_base(this, _getMetadataStream)[_getMetadataStream]();
const data = JSON.stringify(metadata, null, 2);
stream.Readable.from(data).pipe(outStream).on('close', resolve);
});
}
}
function getMetadataStream() {
const { stream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
throw new Error('Archive stream is unavailable');
}
return utils.createTarEntryStream(stream, ()=>'metadata.json');
}
exports.createLocalFileDestinationProvider = createLocalFileDestinationProvider;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,246 @@
import path from 'path';
import zip from 'zlib';
import { Writable, Readable } from 'stream';
import { createWriteStream, rm } from 'fs-extra';
import tar from 'tar-stream';
import { stringer } from 'stream-json/jsonl/Stringer';
import { chain } from 'stream-chain';
import { createEncryptionCipher } from '../../../utils/encryption/encrypt.mjs';
import 'crypto';
import { createTarEntryStream, createFilePathFactory } from './utils.mjs';
import { ProviderTransferError } from '../../../errors/providers.mjs';
function _class_private_field_loose_base(receiver, privateKey) {
if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) {
throw new TypeError("attempted to use private field on non-instance");
}
return receiver;
}
var id = 0;
function _class_private_field_loose_key(name) {
return "__private_" + id++ + "_" + name;
}
const createLocalFileDestinationProvider = (options)=>{
return new LocalFileDestinationProvider(options);
};
var _providersMetadata = /*#__PURE__*/ _class_private_field_loose_key("_providersMetadata"), _archive = /*#__PURE__*/ _class_private_field_loose_key("_archive"), _diagnostics = /*#__PURE__*/ _class_private_field_loose_key("_diagnostics"), _reportInfo = /*#__PURE__*/ _class_private_field_loose_key("_reportInfo"), _archivePath = /*#__PURE__*/ _class_private_field_loose_key("_archivePath"), _writeMetadata = /*#__PURE__*/ _class_private_field_loose_key("_writeMetadata"), _getMetadataStream = /*#__PURE__*/ _class_private_field_loose_key("_getMetadataStream");
class LocalFileDestinationProvider {
setMetadata(target, metadata) {
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata][target] = metadata;
return this;
}
createGzip() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating gzip');
return zip.createGzip();
}
bootstrap(diagnostics) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics] = diagnostics;
const { compression, encryption } = this.options;
if (encryption.enabled && !encryption.key) {
throw new Error("Can't encrypt without a key");
}
_class_private_field_loose_base(this, _archive)[_archive].stream = tar.pack();
const outStream = createWriteStream(_class_private_field_loose_base(this, _archivePath)[_archivePath]);
outStream.on('error', (err)=>{
if (err.code === 'ENOSPC') {
throw new ProviderTransferError("Your server doesn't have space to proceed with the import.");
}
throw err;
});
const archiveTransforms = [];
if (compression.enabled) {
archiveTransforms.push(this.createGzip());
}
if (encryption.enabled && encryption.key) {
archiveTransforms.push(createEncryptionCipher(encryption.key));
}
_class_private_field_loose_base(this, _archive)[_archive].pipeline = chain([
_class_private_field_loose_base(this, _archive)[_archive].stream,
...archiveTransforms,
outStream
]);
this.results.file = {
path: _class_private_field_loose_base(this, _archivePath)[_archivePath]
};
}
async close() {
const { stream, pipeline } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
return;
}
await _class_private_field_loose_base(this, _writeMetadata)[_writeMetadata]();
stream.finalize();
if (pipeline && !pipeline.closed) {
await new Promise((resolve, reject)=>{
pipeline.on('close', resolve).on('error', reject);
});
}
}
async rollback() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('rolling back');
await this.close();
await rm(_class_private_field_loose_base(this, _archivePath)[_archivePath], {
force: true
});
}
getMetadata() {
return null;
}
createSchemasWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating schemas write stream');
const filePathFactory = createFilePathFactory('schemas');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createEntitiesWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating entities write stream');
const filePathFactory = createFilePathFactory('entities');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createLinksWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating links write stream');
const filePathFactory = createFilePathFactory('links');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createConfigurationWriteStream() {
if (!_class_private_field_loose_base(this, _archive)[_archive].stream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating configuration write stream');
const filePathFactory = createFilePathFactory('configuration');
const entryStream = createTarEntryStream(_class_private_field_loose_base(this, _archive)[_archive].stream, filePathFactory, this.options.file.maxSizeJsonl);
return chain([
stringer(),
entryStream
]);
}
createAssetsWriteStream() {
const { stream: archiveStream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!archiveStream) {
throw new Error('Archive stream is unavailable');
}
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('creating assets write stream');
return new Writable({
objectMode: true,
write (data, _encoding, callback) {
// always write tar files with posix paths so we have a standard format for paths regardless of system
const entryPath = path.posix.join('assets', 'uploads', data.filename);
const entryMetadataPath = path.posix.join('assets', 'metadata', `${data.filename}.json`);
const stringifiedMetadata = JSON.stringify(data.metadata);
archiveStream.entry({
name: entryMetadataPath,
size: stringifiedMetadata.length
}, stringifiedMetadata);
const entry = archiveStream.entry({
name: entryPath,
size: data.stats.size
});
if (!entry) {
callback(new Error(`Failed to created an asset tar entry for ${entryPath}`));
return;
}
data.stream.pipe(entry);
entry.on('finish', ()=>{
callback(null);
}).on('error', (error)=>{
callback(error);
});
}
});
}
constructor(options){
Object.defineProperty(this, _reportInfo, {
value: reportInfo
});
Object.defineProperty(this, _archivePath, {
get: get_archivePath,
set: void 0
});
Object.defineProperty(this, _writeMetadata, {
value: writeMetadata
});
Object.defineProperty(this, _getMetadataStream, {
value: getMetadataStream
});
Object.defineProperty(this, _providersMetadata, {
writable: true,
value: void 0
});
Object.defineProperty(this, _archive, {
writable: true,
value: void 0
});
Object.defineProperty(this, _diagnostics, {
writable: true,
value: void 0
});
this.name = 'destination::local-file';
this.type = 'destination';
this.results = {};
_class_private_field_loose_base(this, _providersMetadata)[_providersMetadata] = {};
_class_private_field_loose_base(this, _archive)[_archive] = {};
this.options = options;
}
}
function reportInfo(message) {
_class_private_field_loose_base(this, _diagnostics)[_diagnostics]?.report({
details: {
createdAt: new Date(),
message,
origin: 'file-destination-provider'
},
kind: 'info'
});
}
function get_archivePath() {
const { encryption, compression, file } = this.options;
let filePath = `${file.path}.tar`;
if (compression.enabled) {
filePath += '.gz';
}
if (encryption.enabled) {
filePath += '.enc';
}
return filePath;
}
async function writeMetadata() {
_class_private_field_loose_base(this, _reportInfo)[_reportInfo]('writing metadata');
const metadata = _class_private_field_loose_base(this, _providersMetadata)[_providersMetadata].source;
if (metadata) {
await new Promise((resolve)=>{
const outStream = _class_private_field_loose_base(this, _getMetadataStream)[_getMetadataStream]();
const data = JSON.stringify(metadata, null, 2);
Readable.from(data).pipe(outStream).on('close', resolve);
});
}
}
function getMetadataStream() {
const { stream } = _class_private_field_loose_base(this, _archive)[_archive];
if (!stream) {
throw new Error('Archive stream is unavailable');
}
return createTarEntryStream(stream, ()=>'metadata.json');
}
export { createLocalFileDestinationProvider };
//# sourceMappingURL=index.mjs.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
/// <reference types="node" />
import { Writable } from 'stream';
import tar from 'tar-stream';
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/
export declare const createFilePathFactory: (type: string) => (fileIndex?: number) => string;
export declare const createTarEntryStream: (archive: tar.Pack, pathFactory: (index?: number) => string, maxSize?: number) => Writable;
//# sourceMappingURL=utils.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/file/providers/destination/utils.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAElC,OAAO,GAAG,MAAM,YAAY,CAAC;AAE7B;;;GAGG;AACH,eAAO,MAAM,qBAAqB,SACzB,MAAM,6BACI,MAQhB,CAAC;AAEJ,eAAO,MAAM,oBAAoB,YACtB,IAAI,IAAI,eACJ,CAAC,KAAK,CAAC,EAAE,MAAM,KAAK,MAAM,+BAuDxC,CAAC"}

View File

@@ -0,0 +1,63 @@
'use strict';
var stream = require('stream');
var path = require('path');
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/ const createFilePathFactory = (type)=>(fileIndex = 0)=>{
// always write tar files with posix paths so we have a standard format for paths regardless of system
return path.posix.join(// "{type}" directory
type, // "${type}_XXXXX.jsonl" file
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`);
};
const createTarEntryStream = (archive, pathFactory, maxSize = 2.56e8)=>{
let fileIndex = 0;
let buffer = '';
const flush = async ()=>{
if (!buffer) {
return;
}
fileIndex += 1;
const name = pathFactory(fileIndex);
const size = buffer.length;
await new Promise((resolve, reject)=>{
archive.entry({
name,
size
}, buffer, (err)=>{
if (err) {
reject(err);
}
resolve();
});
});
buffer = '';
};
const push = (chunk)=>{
buffer += chunk;
};
return new stream.Writable({
async destroy (err, callback) {
await flush();
callback(err);
},
async write (chunk, _encoding, callback) {
const size = chunk.length;
if (chunk.length > maxSize) {
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
return;
}
if (buffer.length + size > maxSize) {
await flush();
}
push(chunk);
callback(null);
}
});
};
exports.createFilePathFactory = createFilePathFactory;
exports.createTarEntryStream = createTarEntryStream;
//# sourceMappingURL=utils.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.js","sources":["../../../../src/file/providers/destination/utils.ts"],"sourcesContent":["import { Writable } from 'stream';\nimport { posix } from 'path';\nimport tar from 'tar-stream';\n\n/**\n * Create a file path factory for a given path & prefix.\n * Upon being called, the factory will return a file path for a given index\n */\nexport const createFilePathFactory =\n (type: string) =>\n (fileIndex = 0): string => {\n // always write tar files with posix paths so we have a standard format for paths regardless of system\n return posix.join(\n // \"{type}\" directory\n type,\n // \"${type}_XXXXX.jsonl\" file\n `${type}_${String(fileIndex).padStart(5, '0')}.jsonl`\n );\n };\n\nexport const createTarEntryStream = (\n archive: tar.Pack,\n pathFactory: (index?: number) => string,\n maxSize = 2.56e8\n) => {\n let fileIndex = 0;\n let buffer = '';\n\n const flush = async () => {\n if (!buffer) {\n return;\n }\n\n fileIndex += 1;\n const name = pathFactory(fileIndex);\n const size = buffer.length;\n\n await new Promise<void>((resolve, reject) => {\n archive.entry({ name, size }, buffer, (err) => {\n if (err) {\n reject(err);\n }\n\n resolve();\n });\n });\n\n buffer = '';\n };\n\n const push = (chunk: string | Buffer) => {\n buffer += chunk;\n };\n\n return new Writable({\n async destroy(err, callback) {\n await flush();\n callback(err);\n },\n\n async write(chunk, _encoding, callback) {\n const size = chunk.length;\n\n if (chunk.length > maxSize) {\n callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));\n return;\n }\n\n if (buffer.length + size > maxSize) {\n await flush();\n }\n\n push(chunk);\n\n callback(null);\n },\n });\n};\n"],"names":["createFilePathFactory","type","fileIndex","posix","join","String","padStart","createTarEntryStream","archive","pathFactory","maxSize","buffer","flush","name","size","length","Promise","resolve","reject","entry","err","push","chunk","Writable","destroy","callback","write","_encoding","Error"],"mappings":";;;;;AAIA;;;AAGC,IACYA,MAAAA,qBAAAA,GACX,CAACC,IACD,GAAA,CAACC,YAAY,CAAC,GAAA;;QAEZ,OAAOC,UAAAA,CAAMC,IAAI;AAEfH,QAAAA,IAAAA;QAEA,CAAC,EAAEA,IAAK,CAAA,CAAC,EAAEI,MAAAA,CAAOH,SAAWI,CAAAA,CAAAA,QAAQ,CAAC,CAAA,EAAG,GAAK,CAAA,CAAA,MAAM,CAAC,CAAA;;MAI9CC,oBAAuB,GAAA,CAClCC,OACAC,EAAAA,WAAAA,EACAC,UAAU,MAAM,GAAA;AAEhB,IAAA,IAAIR,SAAY,GAAA,CAAA;AAChB,IAAA,IAAIS,MAAS,GAAA,EAAA;AAEb,IAAA,MAAMC,KAAQ,GAAA,UAAA;AACZ,QAAA,IAAI,CAACD,MAAQ,EAAA;AACX,YAAA;AACF;QAEAT,SAAa,IAAA,CAAA;AACb,QAAA,MAAMW,OAAOJ,WAAYP,CAAAA,SAAAA,CAAAA;QACzB,MAAMY,IAAAA,GAAOH,OAAOI,MAAM;QAE1B,MAAM,IAAIC,OAAc,CAAA,CAACC,OAASC,EAAAA,MAAAA,GAAAA;AAChCV,YAAAA,OAAAA,CAAQW,KAAK,CAAC;AAAEN,gBAAAA,IAAAA;AAAMC,gBAAAA;AAAK,aAAA,EAAGH,QAAQ,CAACS,GAAAA,GAAAA;AACrC,gBAAA,IAAIA,GAAK,EAAA;oBACPF,MAAOE,CAAAA,GAAAA,CAAAA;AACT;AAEAH,gBAAAA,OAAAA,EAAAA;AACF,aAAA,CAAA;AACF,SAAA,CAAA;QAEAN,MAAS,GAAA,EAAA;AACX,KAAA;AAEA,IAAA,MAAMU,OAAO,CAACC,KAAAA,GAAAA;QACZX,MAAUW,IAAAA,KAAAA;AACZ,KAAA;AAEA,IAAA,OAAO,IAAIC,eAAS,CAAA;QAClB,MAAMC,OAAAA,CAAAA,CAAQJ,GAAG,EAAEK,QAAQ,EAAA;YACzB,MAAMb,KAAAA,EAAAA;YACNa,QAASL,CAAAA,GAAAA,CAAAA;AACX,SAAA;AAEA,QAAA,MAAMM,KAAMJ,CAAAA,CAAAA,KAAK,EAAEK,SAAS,EAAEF,QAAQ,EAAA;YACpC,MAAMX,IAAAA,GAAOQ,MAAMP,MAAM;YAEzB,IAAIO,KAAAA,CAAMP,MAAM,GAAGL,OAAS,EAAA;gBAC1Be,QAAS,CAAA,IAAIG,KAAM,CAAA,CAAC,mBAAmB,EAAEN,KAAMP,CAAAA,MAAM,CAAC,CAAC,EAAEL,OAAAA,CAAQ,CAAC,CAAA,CAAA;AAClE,gBAAA;AACF;AAEA,YAAA,IAAIC,MAAOI,CAAAA,MAAM,GAAGD,IAAAA,GAAOJ,OAAS,EAAA;gBAClC,MAAME,KAAAA,EAAAA;AACR;YAEAS,IAAKC,CAAAA,KAAAA,CAAAA;YAELG,QAAS,CAAA,IAAA,CAAA;AACX;AACF,KAAA,CAAA;AACF;;;;;"}

View File

@@ -0,0 +1,60 @@
import { Writable } from 'stream';
import { posix } from 'path';
/**
* Create a file path factory for a given path & prefix.
* Upon being called, the factory will return a file path for a given index
*/ const createFilePathFactory = (type)=>(fileIndex = 0)=>{
// always write tar files with posix paths so we have a standard format for paths regardless of system
return posix.join(// "{type}" directory
type, // "${type}_XXXXX.jsonl" file
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`);
};
const createTarEntryStream = (archive, pathFactory, maxSize = 2.56e8)=>{
let fileIndex = 0;
let buffer = '';
const flush = async ()=>{
if (!buffer) {
return;
}
fileIndex += 1;
const name = pathFactory(fileIndex);
const size = buffer.length;
await new Promise((resolve, reject)=>{
archive.entry({
name,
size
}, buffer, (err)=>{
if (err) {
reject(err);
}
resolve();
});
});
buffer = '';
};
const push = (chunk)=>{
buffer += chunk;
};
return new Writable({
async destroy (err, callback) {
await flush();
callback(err);
},
async write (chunk, _encoding, callback) {
const size = chunk.length;
if (chunk.length > maxSize) {
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
return;
}
if (buffer.length + size > maxSize) {
await flush();
}
push(chunk);
callback(null);
}
});
};
export { createFilePathFactory, createTarEntryStream };
//# sourceMappingURL=utils.mjs.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"utils.mjs","sources":["../../../../src/file/providers/destination/utils.ts"],"sourcesContent":["import { Writable } from 'stream';\nimport { posix } from 'path';\nimport tar from 'tar-stream';\n\n/**\n * Create a file path factory for a given path & prefix.\n * Upon being called, the factory will return a file path for a given index\n */\nexport const createFilePathFactory =\n (type: string) =>\n (fileIndex = 0): string => {\n // always write tar files with posix paths so we have a standard format for paths regardless of system\n return posix.join(\n // \"{type}\" directory\n type,\n // \"${type}_XXXXX.jsonl\" file\n `${type}_${String(fileIndex).padStart(5, '0')}.jsonl`\n );\n };\n\nexport const createTarEntryStream = (\n archive: tar.Pack,\n pathFactory: (index?: number) => string,\n maxSize = 2.56e8\n) => {\n let fileIndex = 0;\n let buffer = '';\n\n const flush = async () => {\n if (!buffer) {\n return;\n }\n\n fileIndex += 1;\n const name = pathFactory(fileIndex);\n const size = buffer.length;\n\n await new Promise<void>((resolve, reject) => {\n archive.entry({ name, size }, buffer, (err) => {\n if (err) {\n reject(err);\n }\n\n resolve();\n });\n });\n\n buffer = '';\n };\n\n const push = (chunk: string | Buffer) => {\n buffer += chunk;\n };\n\n return new Writable({\n async destroy(err, callback) {\n await flush();\n callback(err);\n },\n\n async write(chunk, _encoding, callback) {\n const size = chunk.length;\n\n if (chunk.length > maxSize) {\n callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));\n return;\n }\n\n if (buffer.length + size > maxSize) {\n await flush();\n }\n\n push(chunk);\n\n callback(null);\n },\n });\n};\n"],"names":["createFilePathFactory","type","fileIndex","posix","join","String","padStart","createTarEntryStream","archive","pathFactory","maxSize","buffer","flush","name","size","length","Promise","resolve","reject","entry","err","push","chunk","Writable","destroy","callback","write","_encoding","Error"],"mappings":";;;AAIA;;;AAGC,IACYA,MAAAA,qBAAAA,GACX,CAACC,IACD,GAAA,CAACC,YAAY,CAAC,GAAA;;QAEZ,OAAOC,KAAAA,CAAMC,IAAI;AAEfH,QAAAA,IAAAA;QAEA,CAAC,EAAEA,IAAK,CAAA,CAAC,EAAEI,MAAAA,CAAOH,SAAWI,CAAAA,CAAAA,QAAQ,CAAC,CAAA,EAAG,GAAK,CAAA,CAAA,MAAM,CAAC,CAAA;;MAI9CC,oBAAuB,GAAA,CAClCC,OACAC,EAAAA,WAAAA,EACAC,UAAU,MAAM,GAAA;AAEhB,IAAA,IAAIR,SAAY,GAAA,CAAA;AAChB,IAAA,IAAIS,MAAS,GAAA,EAAA;AAEb,IAAA,MAAMC,KAAQ,GAAA,UAAA;AACZ,QAAA,IAAI,CAACD,MAAQ,EAAA;AACX,YAAA;AACF;QAEAT,SAAa,IAAA,CAAA;AACb,QAAA,MAAMW,OAAOJ,WAAYP,CAAAA,SAAAA,CAAAA;QACzB,MAAMY,IAAAA,GAAOH,OAAOI,MAAM;QAE1B,MAAM,IAAIC,OAAc,CAAA,CAACC,OAASC,EAAAA,MAAAA,GAAAA;AAChCV,YAAAA,OAAAA,CAAQW,KAAK,CAAC;AAAEN,gBAAAA,IAAAA;AAAMC,gBAAAA;AAAK,aAAA,EAAGH,QAAQ,CAACS,GAAAA,GAAAA;AACrC,gBAAA,IAAIA,GAAK,EAAA;oBACPF,MAAOE,CAAAA,GAAAA,CAAAA;AACT;AAEAH,gBAAAA,OAAAA,EAAAA;AACF,aAAA,CAAA;AACF,SAAA,CAAA;QAEAN,MAAS,GAAA,EAAA;AACX,KAAA;AAEA,IAAA,MAAMU,OAAO,CAACC,KAAAA,GAAAA;QACZX,MAAUW,IAAAA,KAAAA;AACZ,KAAA;AAEA,IAAA,OAAO,IAAIC,QAAS,CAAA;QAClB,MAAMC,OAAAA,CAAAA,CAAQJ,GAAG,EAAEK,QAAQ,EAAA;YACzB,MAAMb,KAAAA,EAAAA;YACNa,QAASL,CAAAA,GAAAA,CAAAA;AACX,SAAA;AAEA,QAAA,MAAMM,KAAMJ,CAAAA,CAAAA,KAAK,EAAEK,SAAS,EAAEF,QAAQ,EAAA;YACpC,MAAMX,IAAAA,GAAOQ,MAAMP,MAAM;YAEzB,IAAIO,KAAAA,CAAMP,MAAM,GAAGL,OAAS,EAAA;gBAC1Be,QAAS,CAAA,IAAIG,KAAM,CAAA,CAAC,mBAAmB,EAAEN,KAAMP,CAAAA,MAAM,CAAC,CAAC,EAAEL,OAAAA,CAAQ,CAAC,CAAA,CAAA;AAClE,gBAAA;AACF;AAEA,YAAA,IAAIC,MAAOI,CAAAA,MAAM,GAAGD,IAAAA,GAAOJ,OAAS,EAAA;gBAClC,MAAME,KAAAA,EAAAA;AACR;YAEAS,IAAKC,CAAAA,KAAAA,CAAAA;YAELG,QAAS,CAAA,IAAA,CAAA;AACX;AACF,KAAA,CAAA;AACF;;;;"}