Integrate with PackXZExtract to support pack.xz libs.

All pack.xz libraries now have their MD5s properly calculated.
Sha1 validations are performed on jar libraries. The checksums
forge provides for compressed files are neither the sha1 or md5
of the initial or extracted file. Ignoring those for now.

Still TODO is integration with baseurl. Might move both that
and base path to the .env file to reduce redundency in command processing.
This commit is contained in:
Daniel Scalzi
2020-01-12 05:27:35 -05:00
parent 0674bd5808
commit 21d80fef82
9 changed files with 173 additions and 9 deletions

View File

@@ -2,6 +2,23 @@
Generate a distribution.json for Helios.
## Requirements
* Node.js 12
* Java 8+ (https://adoptopenjdk.net/)
* This is required to process [XZ](https://tukaani.org/xz/format.html) files.
## Setup
1. Clone the repository
2. Install the dependencies (`npm i`)
3. Create a `.env` file at the root directory and set the required values.
Example
```properties
JAVA_EXECUTABLE=C:\Program Files\AdoptOpenJDK\jdk-8.0.222.10-hotspot\bin\java.exe
```
## Usage
Nebula is not complete. The following usage is tentative.

Binary file not shown.

5
package-lock.json generated
View File

@@ -222,6 +222,11 @@
"integrity": "sha512-s2+XdvhPCOF01LRQBC8hf4vhbVmI2CGS5aZnxLJlT5FtdhPCDFq80q++zK2KlrVorVDdL5BOGZ/VfLrVtYNF+Q==",
"dev": true
},
"dotenv": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
"integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw=="
},
"emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",

View File

@@ -37,6 +37,7 @@
"dependencies": {
"adm-zip": "^0.4.13",
"axios": "^0.19.1",
"dotenv": "^8.2.0",
"fs-extra": "^8.1.0",
"yargs": "^15.1.0"
}

View File

@@ -1,11 +1,15 @@
/* tslint:disable:no-shadowed-variable */
import dotenv from 'dotenv'
import { writeFile } from 'fs-extra'
import { resolve as resolvePath } from 'path'
import { URL } from 'url'
import { inspect } from 'util'
import yargs from 'yargs'
import { DistributionStructure } from './model/struct/model/distribution.struct'
import { ResolverRegistry } from './resolver/ResolverRegistry'
dotenv.config()
function rootOption(yargs: yargs.Argv) {
return yargs.option('root', {
describe: 'File structure root.',
@@ -176,10 +180,11 @@ const testCommand: yargs.CommandModule = {
},
handler: async (argv) => {
console.debug(`Invoked test with mcVer ${argv.mcVer} forgeVer ${argv.forgeVer}`)
console.log(process.cwd())
const resolver = ResolverRegistry.getForgeResolver('1.12.2', '14.23.5.2847', 'D:/TestRoot2', 'D:/TestRoot2')
if (resolver != null) {
const mdl = await resolver.getModule()
console.log(mdl)
console.log(inspect(mdl, false, null, true))
}
}
}

View File

@@ -1,3 +1,4 @@
import { join } from 'path'
import { BaseFileStructure } from '../BaseFileStructure'
import { ForgeRepoStructure } from './forgerepo.struct'
import { LibRepoStructure } from './librepo.struct'
@@ -38,4 +39,8 @@ export class RepoStructure extends BaseFileStructure {
return this.libRepoStruct
}
public getTempDirectory() {
return join(this.absoluteRoot, 'temp')
}
}

View File

@@ -1,12 +1,14 @@
import AdmZip from 'adm-zip'
import { createHash } from 'crypto'
import { lstat, readFile, Stats } from 'fs-extra'
import { copy, lstat, mkdirs, pathExists, readFile, remove, Stats } from 'fs-extra'
import { basename, join } from 'path'
import { VersionManifest } from '../../../model/forge/versionmanifest'
import { Artifact } from '../../../model/spec/artifact'
import { Module } from '../../../model/spec/module'
import { Type } from '../../../model/spec/type'
import { ForgeRepoStructure } from '../../../model/struct/repo/forgerepo.struct'
import { MavenUtil } from '../../../util/maven'
import { PackXZExtractWrapper } from '../../../util/PackXZExtractWrapper'
import { ForgeResolver } from '../forge.resolver'
export class Forge18Adapter extends ForgeResolver {
@@ -80,6 +82,8 @@ export class Forge18Adapter extends ForgeResolver {
subModules: []
}
const postProcessQueue = []
for (const lib of versionManifest.libraries) {
if (lib.name.startsWith('net.minecraftforge:forge:')) {
// We've already processed forge.
@@ -91,23 +95,66 @@ export class Forge18Adapter extends ForgeResolver {
const extension = this.determineExtension(lib.checksums)
const localPath = libRepo.getArtifactById(lib.name, extension) as string
if (!await libRepo.artifactExists(localPath)) {
const postProcess = extension === 'jar.pack.xz'
let queueDownload = !await libRepo.artifactExists(localPath)
let libBuf
if (!queueDownload) {
libBuf = await readFile(localPath)
// VERIFY HASH
if (!postProcess) { // Checksums for .pack.xz in the version.json are completely useless.
if (lib.checksums != null) {
const sha1 = createHash('sha1').update(libBuf).digest('hex')
if (sha1 !== lib.checksums[0]) {
console.debug('Hashes do not match, redownloading..')
queueDownload = true
}
}
}
} else {
console.debug(`Not found locally, downloading..`)
queueDownload = true
}
if (queueDownload) {
await libRepo.downloadArtifactById(lib.url || 'https://libraries.minecraft.net/', lib.name, extension)
libBuf = await readFile(localPath)
} else {
console.debug('Using local copy.')
}
const libBuf = await readFile(localPath)
const stats = await lstat(localPath)
const mavenComponents = MavenUtil.getMavenComponents(lib.name)
const properId = MavenUtil.mavenComponentsToIdentifier(
mavenComponents.group, mavenComponents.artifact, mavenComponents.version,
mavenComponents.classifier, extension
)
forgeModule.subModules?.push({
id: lib.name,
name: `Minecraft Forge (${MavenUtil.getMavenComponents(lib.name)?.artifact})`,
id: properId,
name: `Minecraft Forge (${mavenComponents?.artifact})`,
type: Type.Library,
artifact: this.generateArtifact(libBuf, stats)
artifact: this.generateArtifact(libBuf as Buffer, stats)
})
if (postProcess) {
postProcessQueue.push({
id: properId,
localPath
})
}
}
for (const entry of await this.processPackXZFiles(postProcessQueue)) {
const el = forgeModule.subModules?.find((element) => element.id === entry.id)
if (el != null) {
el.artifact.MD5 = entry.MD5
} else {
console.error(`Error during post processing, could not update ${entry.id}`)
}
}
return forgeModule
@@ -125,4 +172,45 @@ export class Forge18Adapter extends ForgeResolver {
return checksums != null && checksums.length > 1 ? 'jar.pack.xz' : 'jar'
}
private async processPackXZFiles(
processingQueue: Array<{id: string, localPath: string}>): Promise<Array<{id: string, MD5: string}>> {
const accumulator = []
const tempDir = this.repoStructure.getTempDirectory()
if (await pathExists(tempDir)) {
await remove(tempDir)
}
await mkdirs(tempDir)
const files = []
for (const entry of processingQueue) {
const tmpFile = join(tempDir, basename(entry.localPath))
await copy(entry.localPath, tmpFile)
files.push(tmpFile)
}
console.debug('Spawning PackXZExtract.')
await PackXZExtractWrapper.extractUnpack(files)
console.debug('All filex extracted, calculating hashes..')
for (const entry of processingQueue) {
const tmpFileName = basename(entry.localPath)
const tmpFile = join(tempDir, tmpFileName.substring(0, tmpFileName.indexOf('.pack.xz')))
const buf = await readFile(tmpFile)
accumulator.push({
id: entry.id,
MD5: createHash('md5').update(buf).digest('hex')
})
}
console.debug('Complete, removing temp directory..')
await remove(tempDir)
return accumulator
}
}

View File

@@ -0,0 +1,43 @@
import { spawn } from 'child_process'
import { join } from 'path'
export class PackXZExtractWrapper {
public static getJavaExecutable() {
return process.env.JAVA_EXECUTABLE as string
}
public static getPackXZExtract() {
return join(process.cwd(), 'libraries', 'java', 'PackXZExtract.jar')
}
public static extractUnpack(paths: string[]) {
return PackXZExtractWrapper.execute('-packxz', paths)
}
public static extract(paths: string[]) {
return PackXZExtractWrapper.execute('-xz', paths)
}
public static unpack(paths: string[]) {
return PackXZExtractWrapper.execute('-pack', paths)
}
private static execute(command: string, paths: string[]) {
return new Promise((resolve, reject) => {
const child = spawn(PackXZExtractWrapper.getJavaExecutable(), [
'-jar',
PackXZExtractWrapper.getPackXZExtract(),
command,
paths.join(',')
])
child.stdout.on('data', (data) => console.log('[PackXZExtract]', data.toString('utf8')))
child.stderr.on('data', (data) => console.error('[PackXZExtract]', data.toString('utf8')))
child.on('close', (code, signal) => {
console.log('[PackXZExtract]', 'Exited with code', code)
resolve()
})
})
}
}

View File

@@ -17,7 +17,7 @@ export class MavenUtil {
public static getMavenComponents(id: string, extension = 'jar') {
if (!MavenUtil.isMavenIdentifier(id)) {
return null
throw new Error('Id is not a maven identifier.')
}
let result
@@ -38,7 +38,7 @@ export class MavenUtil {
}
}
return null
throw new Error('Failed to process maven data.')
}
public static mavenIdentifierToString(id: string, extension = 'jar') {