Integrate with PackXZExtract to support pack.xz libs.

All pack.xz libraries now have their MD5s properly calculated.
Sha1 validations are performed on jar libraries. The checksums
forge provides for compressed files are neither the sha1 or md5
of the initial or extracted file. Ignoring those for now.

Still TODO is integration with baseurl. Might move both that
and base path to the .env file to reduce redundency in command processing.
This commit is contained in:
Daniel Scalzi
2020-01-12 05:27:35 -05:00
parent 0674bd5808
commit 21d80fef82
9 changed files with 173 additions and 9 deletions

View File

@@ -1,12 +1,14 @@
import AdmZip from 'adm-zip'
import { createHash } from 'crypto'
import { lstat, readFile, Stats } from 'fs-extra'
import { copy, lstat, mkdirs, pathExists, readFile, remove, Stats } from 'fs-extra'
import { basename, join } from 'path'
import { VersionManifest } from '../../../model/forge/versionmanifest'
import { Artifact } from '../../../model/spec/artifact'
import { Module } from '../../../model/spec/module'
import { Type } from '../../../model/spec/type'
import { ForgeRepoStructure } from '../../../model/struct/repo/forgerepo.struct'
import { MavenUtil } from '../../../util/maven'
import { PackXZExtractWrapper } from '../../../util/PackXZExtractWrapper'
import { ForgeResolver } from '../forge.resolver'
export class Forge18Adapter extends ForgeResolver {
@@ -80,6 +82,8 @@ export class Forge18Adapter extends ForgeResolver {
subModules: []
}
const postProcessQueue = []
for (const lib of versionManifest.libraries) {
if (lib.name.startsWith('net.minecraftforge:forge:')) {
// We've already processed forge.
@@ -91,23 +95,66 @@ export class Forge18Adapter extends ForgeResolver {
const extension = this.determineExtension(lib.checksums)
const localPath = libRepo.getArtifactById(lib.name, extension) as string
if (!await libRepo.artifactExists(localPath)) {
const postProcess = extension === 'jar.pack.xz'
let queueDownload = !await libRepo.artifactExists(localPath)
let libBuf
if (!queueDownload) {
libBuf = await readFile(localPath)
// VERIFY HASH
if (!postProcess) { // Checksums for .pack.xz in the version.json are completely useless.
if (lib.checksums != null) {
const sha1 = createHash('sha1').update(libBuf).digest('hex')
if (sha1 !== lib.checksums[0]) {
console.debug('Hashes do not match, redownloading..')
queueDownload = true
}
}
}
} else {
console.debug(`Not found locally, downloading..`)
queueDownload = true
}
if (queueDownload) {
await libRepo.downloadArtifactById(lib.url || 'https://libraries.minecraft.net/', lib.name, extension)
libBuf = await readFile(localPath)
} else {
console.debug('Using local copy.')
}
const libBuf = await readFile(localPath)
const stats = await lstat(localPath)
const mavenComponents = MavenUtil.getMavenComponents(lib.name)
const properId = MavenUtil.mavenComponentsToIdentifier(
mavenComponents.group, mavenComponents.artifact, mavenComponents.version,
mavenComponents.classifier, extension
)
forgeModule.subModules?.push({
id: lib.name,
name: `Minecraft Forge (${MavenUtil.getMavenComponents(lib.name)?.artifact})`,
id: properId,
name: `Minecraft Forge (${mavenComponents?.artifact})`,
type: Type.Library,
artifact: this.generateArtifact(libBuf, stats)
artifact: this.generateArtifact(libBuf as Buffer, stats)
})
if (postProcess) {
postProcessQueue.push({
id: properId,
localPath
})
}
}
for (const entry of await this.processPackXZFiles(postProcessQueue)) {
const el = forgeModule.subModules?.find((element) => element.id === entry.id)
if (el != null) {
el.artifact.MD5 = entry.MD5
} else {
console.error(`Error during post processing, could not update ${entry.id}`)
}
}
return forgeModule
@@ -125,4 +172,45 @@ export class Forge18Adapter extends ForgeResolver {
return checksums != null && checksums.length > 1 ? 'jar.pack.xz' : 'jar'
}
private async processPackXZFiles(
processingQueue: Array<{id: string, localPath: string}>): Promise<Array<{id: string, MD5: string}>> {
const accumulator = []
const tempDir = this.repoStructure.getTempDirectory()
if (await pathExists(tempDir)) {
await remove(tempDir)
}
await mkdirs(tempDir)
const files = []
for (const entry of processingQueue) {
const tmpFile = join(tempDir, basename(entry.localPath))
await copy(entry.localPath, tmpFile)
files.push(tmpFile)
}
console.debug('Spawning PackXZExtract.')
await PackXZExtractWrapper.extractUnpack(files)
console.debug('All filex extracted, calculating hashes..')
for (const entry of processingQueue) {
const tmpFileName = basename(entry.localPath)
const tmpFile = join(tempDir, tmpFileName.substring(0, tmpFileName.indexOf('.pack.xz')))
const buf = await readFile(tmpFile)
accumulator.push({
id: entry.id,
MD5: createHash('md5').update(buf).digest('hex')
})
}
console.debug('Complete, removing temp directory..')
await remove(tempDir)
return accumulator
}
}