2017-05-16 23:26:46 -07:00
|
|
|
// Requirements
|
2018-06-04 20:08:03 -07:00
|
|
|
const AdmZip = require('adm-zip')
|
|
|
|
const async = require('async')
|
2017-05-17 22:49:28 -07:00
|
|
|
const child_process = require('child_process')
|
2018-06-04 20:08:03 -07:00
|
|
|
const crypto = require('crypto')
|
|
|
|
const EventEmitter = require('events')
|
2018-12-01 05:20:42 -08:00
|
|
|
const fs = require('fs-extra')
|
2022-11-26 13:57:30 -08:00
|
|
|
const nodeDiskInfo = require('node-disk-info')
|
2021-10-14 20:17:40 -07:00
|
|
|
const StreamZip = require('node-stream-zip')
|
2018-06-04 20:08:03 -07:00
|
|
|
const path = require('path')
|
|
|
|
const Registry = require('winreg')
|
|
|
|
const request = require('request')
|
|
|
|
const tar = require('tar-fs')
|
|
|
|
const zlib = require('zlib')
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-07-22 08:40:15 -07:00
|
|
|
const ConfigManager = require('./configmanager')
|
|
|
|
const DistroManager = require('./distromanager')
|
2018-09-22 23:19:16 -07:00
|
|
|
const isDev = require('./isdev')
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2017-05-16 23:26:46 -07:00
|
|
|
// Classes
|
|
|
|
|
|
|
|
/** Class representing a base asset. */
|
2017-11-29 22:40:56 -08:00
|
|
|
class Asset {
|
2017-05-16 23:26:46 -07:00
|
|
|
/**
|
|
|
|
* Create an asset.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {any} id The id of the asset.
|
|
|
|
* @param {string} hash The hash value of the asset.
|
|
|
|
* @param {number} size The size in bytes of the asset.
|
|
|
|
* @param {string} from The url where the asset can be found.
|
|
|
|
* @param {string} to The absolute local file path of the asset.
|
2017-05-16 23:26:46 -07:00
|
|
|
*/
|
|
|
|
constructor(id, hash, size, from, to){
|
|
|
|
this.id = id
|
|
|
|
this.hash = hash
|
|
|
|
this.size = size
|
|
|
|
this.from = from
|
|
|
|
this.to = to
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** Class representing a mojang library. */
|
2017-11-29 22:40:56 -08:00
|
|
|
class Library extends Asset {
|
2017-05-16 23:26:46 -07:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Converts the process.platform OS names to match mojang's OS names.
|
|
|
|
*/
|
|
|
|
static mojangFriendlyOS(){
|
|
|
|
const opSys = process.platform
|
|
|
|
if (opSys === 'darwin') {
|
2018-07-22 10:31:15 -07:00
|
|
|
return 'osx'
|
2017-05-16 23:26:46 -07:00
|
|
|
} else if (opSys === 'win32'){
|
2018-07-22 10:31:15 -07:00
|
|
|
return 'windows'
|
2017-05-16 23:26:46 -07:00
|
|
|
} else if (opSys === 'linux'){
|
2018-07-22 10:31:15 -07:00
|
|
|
return 'linux'
|
2017-05-16 23:26:46 -07:00
|
|
|
} else {
|
2018-07-22 10:31:15 -07:00
|
|
|
return 'unknown_os'
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Checks whether or not a library is valid for download on a particular OS, following
|
|
|
|
* the rule format specified in the mojang version data index. If the allow property has
|
|
|
|
* an OS specified, then the library can ONLY be downloaded on that OS. If the disallow
|
|
|
|
* property has instead specified an OS, the library can be downloaded on any OS EXCLUDING
|
|
|
|
* the one specified.
|
|
|
|
*
|
2018-12-16 21:11:23 -08:00
|
|
|
* If the rules are undefined, the natives property will be checked for a matching entry
|
|
|
|
* for the current OS.
|
|
|
|
*
|
|
|
|
* @param {Array.<Object>} rules The Library's download rules.
|
|
|
|
* @param {Object} natives The Library's natives object.
|
2018-03-28 13:42:10 -07:00
|
|
|
* @returns {boolean} True if the Library follows the specified rules, otherwise false.
|
2017-05-16 23:26:46 -07:00
|
|
|
*/
|
2018-12-16 21:11:23 -08:00
|
|
|
static validateRules(rules, natives){
|
|
|
|
if(rules == null) {
|
|
|
|
if(natives == null) {
|
|
|
|
return true
|
|
|
|
} else {
|
|
|
|
return natives[Library.mojangFriendlyOS()] != null
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for(let rule of rules){
|
|
|
|
const action = rule.action
|
|
|
|
const osProp = rule.os
|
|
|
|
if(action != null && osProp != null){
|
|
|
|
const osName = osProp.name
|
|
|
|
const osMoj = Library.mojangFriendlyOS()
|
|
|
|
if(action === 'allow'){
|
|
|
|
return osName === osMoj
|
|
|
|
} else if(action === 'disallow'){
|
|
|
|
return osName !== osMoj
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
}
|
2018-12-16 21:11:23 -08:00
|
|
|
}
|
|
|
|
return true
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-21 15:06:48 -07:00
|
|
|
class DistroModule extends Asset {
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create a DistroModule. This is for processing,
|
|
|
|
* not equivalent to the module objects in the
|
|
|
|
* distro index.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {any} id The id of the asset.
|
|
|
|
* @param {string} hash The hash value of the asset.
|
|
|
|
* @param {number} size The size in bytes of the asset.
|
|
|
|
* @param {string} from The url where the asset can be found.
|
|
|
|
* @param {string} to The absolute local file path of the asset.
|
|
|
|
* @param {string} type The the module type.
|
2017-05-21 15:06:48 -07:00
|
|
|
*/
|
|
|
|
constructor(id, hash, size, from, to, type){
|
|
|
|
super(id, hash, size, from, to)
|
|
|
|
this.type = type
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-05-16 23:26:46 -07:00
|
|
|
/**
|
|
|
|
* Class representing a download tracker. This is used to store meta data
|
|
|
|
* about a download queue, including the queue itself.
|
|
|
|
*/
|
|
|
|
class DLTracker {
|
2017-11-30 00:00:06 -08:00
|
|
|
|
2017-05-16 23:26:46 -07:00
|
|
|
/**
|
|
|
|
* Create a DLTracker
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Array.<Asset>} dlqueue An array containing assets queued for download.
|
|
|
|
* @param {number} dlsize The combined size of each asset in the download queue array.
|
|
|
|
* @param {function(Asset)} callback Optional callback which is called when an asset finishes downloading.
|
2017-05-16 23:26:46 -07:00
|
|
|
*/
|
2017-05-21 01:56:39 -07:00
|
|
|
constructor(dlqueue, dlsize, callback = null){
|
2017-05-16 23:26:46 -07:00
|
|
|
this.dlqueue = dlqueue
|
|
|
|
this.dlsize = dlsize
|
2017-05-21 01:56:39 -07:00
|
|
|
this.callback = callback
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
2017-11-30 00:00:06 -08:00
|
|
|
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
class Util {
|
2018-03-30 05:31:54 -07:00
|
|
|
|
|
|
|
/**
|
2019-03-05 20:05:01 -08:00
|
|
|
* Returns true if the actual version is greater than
|
|
|
|
* or equal to the desired version.
|
2018-03-30 05:31:54 -07:00
|
|
|
*
|
2019-03-05 20:05:01 -08:00
|
|
|
* @param {string} desired The desired version.
|
|
|
|
* @param {string} actual The actual version.
|
2018-03-30 05:31:54 -07:00
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
static mcVersionAtLeast(desired, actual){
|
|
|
|
const des = desired.split('.')
|
|
|
|
const act = actual.split('.')
|
2018-03-30 05:31:54 -07:00
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
for(let i=0; i<des.length; i++){
|
|
|
|
if(!(parseInt(act[i]) >= parseInt(des[i]))){
|
2018-03-30 05:31:54 -07:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
2020-06-02 16:30:12 -07:00
|
|
|
static isForgeGradle3(mcVersion, forgeVersion) {
|
|
|
|
|
|
|
|
if(Util.mcVersionAtLeast('1.13', mcVersion)) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
try {
|
2020-06-08 11:00:07 -07:00
|
|
|
|
|
|
|
const forgeVer = forgeVersion.split('-')[1]
|
2020-06-02 16:30:12 -07:00
|
|
|
|
2020-06-08 11:00:07 -07:00
|
|
|
const maxFG2 = [14, 23, 5, 2847]
|
|
|
|
const verSplit = forgeVer.split('.').map(v => Number(v))
|
2020-06-02 16:30:12 -07:00
|
|
|
|
2020-06-08 11:00:07 -07:00
|
|
|
for(let i=0; i<maxFG2.length; i++) {
|
|
|
|
if(verSplit[i] > maxFG2[i]) {
|
|
|
|
return true
|
|
|
|
} else if(verSplit[i] < maxFG2[i]) {
|
|
|
|
return false
|
|
|
|
}
|
2020-06-02 16:30:12 -07:00
|
|
|
}
|
|
|
|
|
2020-06-08 11:00:07 -07:00
|
|
|
return false
|
|
|
|
|
|
|
|
} catch(err) {
|
|
|
|
throw new Error('Forge version is complex (changed).. launcher requires a patch.')
|
|
|
|
}
|
2020-06-02 16:30:12 -07:00
|
|
|
}
|
|
|
|
|
2020-06-12 16:56:57 -07:00
|
|
|
static isAutoconnectBroken(forgeVersion) {
|
|
|
|
|
|
|
|
const minWorking = [31, 2, 15]
|
2020-06-29 07:31:05 -07:00
|
|
|
const verSplit = forgeVersion.split('.').map(v => Number(v))
|
2020-06-12 16:56:57 -07:00
|
|
|
|
|
|
|
if(verSplit[0] === 31) {
|
|
|
|
for(let i=0; i<minWorking.length; i++) {
|
|
|
|
if(verSplit[i] > minWorking[i]) {
|
|
|
|
return false
|
|
|
|
} else if(verSplit[i] < minWorking[i]) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-05-07 15:15:59 -07:00
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
class JavaGuard extends EventEmitter {
|
2018-05-07 15:15:59 -07:00
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
constructor(mcVersion){
|
|
|
|
super()
|
|
|
|
this.mcVersion = mcVersion
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
2017-05-21 15:06:48 -07:00
|
|
|
|
2018-03-31 10:05:05 -07:00
|
|
|
/**
|
2019-06-02 15:11:39 -07:00
|
|
|
* @typedef OpenJDKData
|
2018-03-31 10:05:05 -07:00
|
|
|
* @property {string} uri The base uri of the JRE.
|
2019-06-02 15:11:39 -07:00
|
|
|
* @property {number} size The size of the download.
|
|
|
|
* @property {string} name The name of the artifact.
|
2018-03-31 10:05:05 -07:00
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
2020-09-08 18:25:19 -07:00
|
|
|
* Fetch the last open JDK binary.
|
|
|
|
*
|
|
|
|
* HOTFIX: Uses Corretto 8 for macOS.
|
|
|
|
* See: https://github.com/dscalzi/HeliosLauncher/issues/70
|
|
|
|
* See: https://github.com/AdoptOpenJDK/openjdk-support/issues/101
|
2018-03-31 10:05:05 -07:00
|
|
|
*
|
2019-06-02 15:11:39 -07:00
|
|
|
* @param {string} major The major version of Java to fetch.
|
|
|
|
*
|
|
|
|
* @returns {Promise.<OpenJDKData>} Promise which resolved to an object containing the JRE download data.
|
2018-03-31 10:05:05 -07:00
|
|
|
*/
|
2019-06-02 15:11:39 -07:00
|
|
|
static _latestOpenJDK(major = '8'){
|
2018-03-31 10:05:05 -07:00
|
|
|
|
2020-09-08 18:25:19 -07:00
|
|
|
if(process.platform === 'darwin') {
|
|
|
|
return this._latestCorretto(major)
|
|
|
|
} else {
|
2021-10-14 20:17:40 -07:00
|
|
|
return this._latestAdoptium(major)
|
2020-09-08 18:25:19 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-14 20:17:40 -07:00
|
|
|
static _latestAdoptium(major) {
|
2020-09-08 18:25:19 -07:00
|
|
|
|
2021-10-14 20:17:40 -07:00
|
|
|
const majorNum = Number(major)
|
2019-06-02 15:11:39 -07:00
|
|
|
const sanitizedOS = process.platform === 'win32' ? 'windows' : (process.platform === 'darwin' ? 'mac' : process.platform)
|
2021-10-14 20:17:40 -07:00
|
|
|
const url = `https://api.adoptium.net/v3/assets/latest/${major}/hotspot?vendor=eclipse`
|
2019-06-02 15:11:39 -07:00
|
|
|
|
2018-03-31 10:05:05 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2019-06-02 15:11:39 -07:00
|
|
|
request({url, json: true}, (err, resp, body) => {
|
|
|
|
if(!err && body.length > 0){
|
2021-10-14 20:17:40 -07:00
|
|
|
|
|
|
|
const targetBinary = body.find(entry => {
|
|
|
|
return entry.version.major === majorNum
|
|
|
|
&& entry.binary.os === sanitizedOS
|
|
|
|
&& entry.binary.image_type === 'jdk'
|
|
|
|
&& entry.binary.architecture === 'x64'
|
2018-03-31 10:05:05 -07:00
|
|
|
})
|
2021-10-14 20:17:40 -07:00
|
|
|
|
|
|
|
if(targetBinary != null) {
|
|
|
|
resolve({
|
|
|
|
uri: targetBinary.binary.package.link,
|
|
|
|
size: targetBinary.binary.package.size,
|
|
|
|
name: targetBinary.binary.package.name
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
resolve(null)
|
|
|
|
}
|
2018-03-31 10:05:05 -07:00
|
|
|
} else {
|
|
|
|
resolve(null)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2020-09-08 18:25:19 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static _latestCorretto(major) {
|
|
|
|
|
|
|
|
let sanitizedOS, ext
|
|
|
|
|
|
|
|
switch(process.platform) {
|
|
|
|
case 'win32':
|
|
|
|
sanitizedOS = 'windows'
|
|
|
|
ext = 'zip'
|
|
|
|
break
|
|
|
|
case 'darwin':
|
|
|
|
sanitizedOS = 'macos'
|
|
|
|
ext = 'tar.gz'
|
|
|
|
break
|
|
|
|
case 'linux':
|
|
|
|
sanitizedOS = 'linux'
|
|
|
|
ext = 'tar.gz'
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
sanitizedOS = process.platform
|
|
|
|
ext = 'tar.gz'
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
const url = `https://corretto.aws/downloads/latest/amazon-corretto-${major}-x64-${sanitizedOS}-jdk.${ext}`
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
request.head({url, json: true}, (err, resp) => {
|
|
|
|
if(!err && resp.statusCode === 200){
|
|
|
|
resolve({
|
|
|
|
uri: url,
|
|
|
|
size: parseInt(resp.headers['content-length']),
|
|
|
|
name: url.substr(url.lastIndexOf('/')+1)
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
resolve(null)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
2018-03-31 10:05:05 -07:00
|
|
|
}
|
|
|
|
|
2018-04-07 15:06:49 -07:00
|
|
|
/**
|
|
|
|
* Returns the path of the OS-specific executable for the given Java
|
|
|
|
* installation. Supported OS's are win32, darwin, linux.
|
|
|
|
*
|
|
|
|
* @param {string} rootDir The root directory of the Java installation.
|
2018-04-12 13:12:45 -07:00
|
|
|
* @returns {string} The path to the Java executable.
|
2018-04-07 15:06:49 -07:00
|
|
|
*/
|
|
|
|
static javaExecFromRoot(rootDir){
|
|
|
|
if(process.platform === 'win32'){
|
|
|
|
return path.join(rootDir, 'bin', 'javaw.exe')
|
|
|
|
} else if(process.platform === 'darwin'){
|
2018-04-12 19:13:26 -07:00
|
|
|
return path.join(rootDir, 'Contents', 'Home', 'bin', 'java')
|
2018-04-07 15:06:49 -07:00
|
|
|
} else if(process.platform === 'linux'){
|
|
|
|
return path.join(rootDir, 'bin', 'java')
|
|
|
|
}
|
|
|
|
return rootDir
|
|
|
|
}
|
|
|
|
|
2018-04-12 13:12:45 -07:00
|
|
|
/**
|
|
|
|
* Check to see if the given path points to a Java executable.
|
|
|
|
*
|
|
|
|
* @param {string} pth The path to check against.
|
|
|
|
* @returns {boolean} True if the path points to a Java executable, otherwise false.
|
|
|
|
*/
|
|
|
|
static isJavaExecPath(pth){
|
2022-11-26 13:57:30 -08:00
|
|
|
if(pth == null) {
|
|
|
|
return false
|
|
|
|
}
|
2018-04-12 13:12:45 -07:00
|
|
|
if(process.platform === 'win32'){
|
|
|
|
return pth.endsWith(path.join('bin', 'javaw.exe'))
|
|
|
|
} else if(process.platform === 'darwin'){
|
|
|
|
return pth.endsWith(path.join('bin', 'java'))
|
|
|
|
} else if(process.platform === 'linux'){
|
|
|
|
return pth.endsWith(path.join('bin', 'java'))
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2018-03-31 17:45:24 -07:00
|
|
|
/**
|
|
|
|
* Load Mojang's launcher.json file.
|
|
|
|
*
|
|
|
|
* @returns {Promise.<Object>} Promise which resolves to Mojang's launcher.json object.
|
|
|
|
*/
|
|
|
|
static loadMojangLauncherData(){
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
request.get('https://launchermeta.mojang.com/mc/launcher.json', (err, resp, body) => {
|
|
|
|
if(err){
|
|
|
|
resolve(null)
|
|
|
|
} else {
|
|
|
|
resolve(JSON.parse(body))
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-04-12 13:12:45 -07:00
|
|
|
/**
|
2018-04-12 18:38:27 -07:00
|
|
|
* Parses a **full** Java Runtime version string and resolves
|
2018-12-31 07:39:27 -08:00
|
|
|
* the version information. Dynamically detects the formatting
|
|
|
|
* to use.
|
2018-04-12 18:38:27 -07:00
|
|
|
*
|
|
|
|
* @param {string} verString Full version string to parse.
|
|
|
|
* @returns Object containing the version information.
|
|
|
|
*/
|
|
|
|
static parseJavaRuntimeVersion(verString){
|
2018-12-31 07:39:27 -08:00
|
|
|
const major = verString.split('.')[0]
|
|
|
|
if(major == 1){
|
2019-03-05 20:05:01 -08:00
|
|
|
return JavaGuard._parseJavaRuntimeVersion_8(verString)
|
2018-12-31 07:39:27 -08:00
|
|
|
} else {
|
2019-03-05 20:05:01 -08:00
|
|
|
return JavaGuard._parseJavaRuntimeVersion_9(verString)
|
2018-12-31 07:39:27 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Parses a **full** Java Runtime version string and resolves
|
|
|
|
* the version information. Uses Java 8 formatting.
|
|
|
|
*
|
|
|
|
* @param {string} verString Full version string to parse.
|
|
|
|
* @returns Object containing the version information.
|
|
|
|
*/
|
|
|
|
static _parseJavaRuntimeVersion_8(verString){
|
2018-04-12 18:38:27 -07:00
|
|
|
// 1.{major}.0_{update}-b{build}
|
|
|
|
// ex. 1.8.0_152-b16
|
|
|
|
const ret = {}
|
|
|
|
let pts = verString.split('-')
|
|
|
|
ret.build = parseInt(pts[1].substring(1))
|
2018-04-12 19:40:48 -07:00
|
|
|
pts = pts[0].split('_')
|
2018-04-12 18:38:27 -07:00
|
|
|
ret.update = parseInt(pts[1])
|
2018-04-12 19:40:48 -07:00
|
|
|
ret.major = parseInt(pts[0].split('.')[1])
|
2018-04-12 18:38:27 -07:00
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2018-12-31 07:39:27 -08:00
|
|
|
/**
|
|
|
|
* Parses a **full** Java Runtime version string and resolves
|
|
|
|
* the version information. Uses Java 9+ formatting.
|
|
|
|
*
|
|
|
|
* @param {string} verString Full version string to parse.
|
|
|
|
* @returns Object containing the version information.
|
|
|
|
*/
|
|
|
|
static _parseJavaRuntimeVersion_9(verString){
|
|
|
|
// {major}.{minor}.{revision}+{build}
|
|
|
|
// ex. 10.0.2+13
|
|
|
|
const ret = {}
|
|
|
|
let pts = verString.split('+')
|
|
|
|
ret.build = parseInt(pts[1])
|
|
|
|
pts = pts[0].split('.')
|
|
|
|
ret.major = parseInt(pts[0])
|
|
|
|
ret.minor = parseInt(pts[1])
|
|
|
|
ret.revision = parseInt(pts[2])
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2018-04-12 18:38:27 -07:00
|
|
|
/**
|
|
|
|
* Validates the output of a JVM's properties. Currently validates that a JRE is x64
|
|
|
|
* and that the major = 8, update > 52.
|
2018-04-12 13:12:45 -07:00
|
|
|
*
|
|
|
|
* @param {string} stderr The output to validate.
|
|
|
|
*
|
2018-05-14 22:05:10 -07:00
|
|
|
* @returns {Promise.<Object>} A promise which resolves to a meta object about the JVM.
|
|
|
|
* The validity is stored inside the `valid` property.
|
2018-04-12 13:12:45 -07:00
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
_validateJVMProperties(stderr){
|
2018-04-12 13:12:45 -07:00
|
|
|
const res = stderr
|
|
|
|
const props = res.split('\n')
|
2018-04-12 18:38:27 -07:00
|
|
|
|
|
|
|
const goal = 2
|
|
|
|
let checksum = 0
|
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
const meta = {}
|
|
|
|
|
2018-04-12 13:12:45 -07:00
|
|
|
for(let i=0; i<props.length; i++){
|
|
|
|
if(props[i].indexOf('sun.arch.data.model') > -1){
|
|
|
|
let arch = props[i].split('=')[1].trim()
|
2018-05-14 22:05:10 -07:00
|
|
|
arch = parseInt(arch)
|
2018-04-12 13:12:45 -07:00
|
|
|
console.log(props[i].trim())
|
2018-05-14 22:05:10 -07:00
|
|
|
if(arch === 64){
|
|
|
|
meta.arch = arch
|
2018-04-12 18:38:27 -07:00
|
|
|
++checksum
|
|
|
|
if(checksum === goal){
|
2018-05-14 22:05:10 -07:00
|
|
|
break
|
2018-04-12 18:38:27 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if(props[i].indexOf('java.runtime.version') > -1){
|
|
|
|
let verString = props[i].split('=')[1].trim()
|
|
|
|
console.log(props[i].trim())
|
2019-03-05 20:05:01 -08:00
|
|
|
const verOb = JavaGuard.parseJavaRuntimeVersion(verString)
|
2022-11-26 13:57:30 -08:00
|
|
|
// TODO implement a support matrix eventually. Right now this is good enough
|
|
|
|
// 1.7-1.16 = Java 8
|
|
|
|
// 1.17+ = Java 17
|
|
|
|
// Actual support may vary, but we're going with this rule for simplicity.
|
2018-12-31 07:39:27 -08:00
|
|
|
if(verOb.major < 9){
|
|
|
|
// Java 8
|
2022-11-26 13:57:30 -08:00
|
|
|
if(!Util.mcVersionAtLeast('1.17', this.mcVersion)){
|
|
|
|
if(verOb.major === 8 && verOb.update > 52){
|
|
|
|
meta.version = verOb
|
|
|
|
++checksum
|
|
|
|
if(checksum === goal){
|
|
|
|
break
|
|
|
|
}
|
2018-12-31 07:39:27 -08:00
|
|
|
}
|
|
|
|
}
|
2022-11-27 14:45:31 -08:00
|
|
|
} else if(verOb.major >= 17) {
|
2018-12-31 07:39:27 -08:00
|
|
|
// Java 9+
|
2021-08-25 22:11:09 -07:00
|
|
|
if(Util.mcVersionAtLeast('1.17', this.mcVersion)){
|
|
|
|
meta.version = verOb
|
2018-12-31 07:39:27 -08:00
|
|
|
++checksum
|
|
|
|
if(checksum === goal){
|
|
|
|
break
|
2021-08-25 22:11:09 -07:00
|
|
|
}
|
2018-04-12 18:38:27 -07:00
|
|
|
}
|
|
|
|
}
|
2020-09-08 18:25:19 -07:00
|
|
|
// Space included so we get only the vendor.
|
|
|
|
} else if(props[i].lastIndexOf('java.vendor ') > -1) {
|
|
|
|
let vendorName = props[i].split('=')[1].trim()
|
|
|
|
console.log(props[i].trim())
|
|
|
|
meta.vendor = vendorName
|
2018-04-12 13:12:45 -07:00
|
|
|
}
|
|
|
|
}
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
meta.valid = checksum === goal
|
2018-04-12 18:38:27 -07:00
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
return meta
|
2018-04-12 13:12:45 -07:00
|
|
|
}
|
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
2018-03-30 05:31:54 -07:00
|
|
|
* Validates that a Java binary is at least 64 bit. This makes use of the non-standard
|
|
|
|
* command line option -XshowSettings:properties. The output of this contains a property,
|
|
|
|
* sun.arch.data.model = ARCH, in which ARCH is either 32 or 64. This option is supported
|
|
|
|
* in Java 8 and 9. Since this is a non-standard option. This will resolve to true if
|
|
|
|
* the function's code throws errors. That would indicate that the option is changed or
|
|
|
|
* removed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*
|
2018-04-07 15:06:49 -07:00
|
|
|
* @param {string} binaryExecPath Path to the java executable we wish to validate.
|
2018-03-30 05:31:54 -07:00
|
|
|
*
|
2018-05-14 22:05:10 -07:00
|
|
|
* @returns {Promise.<Object>} A promise which resolves to a meta object about the JVM.
|
|
|
|
* The validity is stored inside the `valid` property.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
_validateJavaBinary(binaryExecPath){
|
2018-03-30 05:31:54 -07:00
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
2019-03-05 20:05:01 -08:00
|
|
|
if(!JavaGuard.isJavaExecPath(binaryExecPath)){
|
2018-06-14 00:49:55 -07:00
|
|
|
resolve({valid: false})
|
|
|
|
} else if(fs.existsSync(binaryExecPath)){
|
2019-06-02 15:11:39 -07:00
|
|
|
// Workaround (javaw.exe no longer outputs this information.)
|
|
|
|
console.log(typeof binaryExecPath)
|
|
|
|
if(binaryExecPath.indexOf('javaw.exe') > -1) {
|
|
|
|
binaryExecPath.replace('javaw.exe', 'java.exe')
|
|
|
|
}
|
2018-04-07 15:06:49 -07:00
|
|
|
child_process.exec('"' + binaryExecPath + '" -XshowSettings:properties', (err, stdout, stderr) => {
|
2018-03-30 05:31:54 -07:00
|
|
|
try {
|
|
|
|
// Output is stored in stderr?
|
2019-03-05 20:05:01 -08:00
|
|
|
resolve(this._validateJVMProperties(stderr))
|
2018-03-30 05:31:54 -07:00
|
|
|
} catch (err){
|
|
|
|
// Output format might have changed, validation cannot be completed.
|
2018-05-14 22:05:10 -07:00
|
|
|
resolve({valid: false})
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
})
|
|
|
|
} else {
|
2018-05-14 22:05:10 -07:00
|
|
|
resolve({valid: false})
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Checks for the presence of the environment variable JAVA_HOME. If it exits, we will check
|
|
|
|
* to see if the value points to a path which exists. If the path exits, the path is returned.
|
|
|
|
*
|
|
|
|
* @returns {string} The path defined by JAVA_HOME, if it exists. Otherwise null.
|
|
|
|
*/
|
|
|
|
static _scanJavaHome(){
|
|
|
|
const jHome = process.env.JAVA_HOME
|
|
|
|
try {
|
|
|
|
let res = fs.existsSync(jHome)
|
|
|
|
return res ? jHome : null
|
|
|
|
} catch (err) {
|
|
|
|
// Malformed JAVA_HOME property.
|
|
|
|
return null
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Scans the registry for 64-bit Java entries. The paths of each entry are added to
|
|
|
|
* a set and returned. Currently, only Java 8 (1.8) is supported.
|
|
|
|
*
|
|
|
|
* @returns {Promise.<Set.<string>>} A promise which resolves to a set of 64-bit Java root
|
|
|
|
* paths found in the registry.
|
|
|
|
*/
|
|
|
|
static _scanRegistry(){
|
|
|
|
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
// Keys for Java v9.0.0 and later:
|
|
|
|
// 'SOFTWARE\\JavaSoft\\JRE'
|
|
|
|
// 'SOFTWARE\\JavaSoft\\JDK'
|
|
|
|
// Forge does not yet support Java 9, therefore we do not.
|
|
|
|
|
|
|
|
// Keys for Java 1.8 and prior:
|
|
|
|
const regKeys = [
|
|
|
|
'\\SOFTWARE\\JavaSoft\\Java Runtime Environment',
|
|
|
|
'\\SOFTWARE\\JavaSoft\\Java Development Kit'
|
|
|
|
]
|
|
|
|
|
2018-05-10 01:36:52 -07:00
|
|
|
let keysDone = 0
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
const candidates = new Set()
|
|
|
|
|
|
|
|
for(let i=0; i<regKeys.length; i++){
|
|
|
|
const key = new Registry({
|
|
|
|
hive: Registry.HKLM,
|
|
|
|
key: regKeys[i],
|
|
|
|
arch: 'x64'
|
|
|
|
})
|
|
|
|
key.keyExists((err, exists) => {
|
|
|
|
if(exists) {
|
|
|
|
key.keys((err, javaVers) => {
|
|
|
|
if(err){
|
2018-05-10 01:36:52 -07:00
|
|
|
keysDone++
|
2018-03-30 05:31:54 -07:00
|
|
|
console.error(err)
|
2018-05-10 01:36:52 -07:00
|
|
|
|
|
|
|
// REG KEY DONE
|
|
|
|
// DUE TO ERROR
|
|
|
|
if(keysDone === regKeys.length){
|
2018-03-30 05:31:54 -07:00
|
|
|
resolve(candidates)
|
|
|
|
}
|
|
|
|
} else {
|
2018-05-10 01:36:52 -07:00
|
|
|
if(javaVers.length === 0){
|
|
|
|
// REG KEY DONE
|
|
|
|
// NO SUBKEYS
|
|
|
|
keysDone++
|
|
|
|
if(keysDone === regKeys.length){
|
|
|
|
resolve(candidates)
|
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
} else {
|
2018-05-10 01:36:52 -07:00
|
|
|
|
|
|
|
let numDone = 0
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
for(let j=0; j<javaVers.length; j++){
|
|
|
|
const javaVer = javaVers[j]
|
|
|
|
const vKey = javaVer.key.substring(javaVer.key.lastIndexOf('\\')+1)
|
|
|
|
// Only Java 8 is supported currently.
|
|
|
|
if(parseFloat(vKey) === 1.8){
|
|
|
|
javaVer.get('JavaHome', (err, res) => {
|
|
|
|
const jHome = res.value
|
|
|
|
if(jHome.indexOf('(x86)') === -1){
|
|
|
|
candidates.add(jHome)
|
|
|
|
}
|
2018-05-10 01:36:52 -07:00
|
|
|
|
|
|
|
// SUBKEY DONE
|
|
|
|
|
|
|
|
numDone++
|
|
|
|
if(numDone === javaVers.length){
|
|
|
|
keysDone++
|
|
|
|
if(keysDone === regKeys.length){
|
|
|
|
resolve(candidates)
|
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
})
|
|
|
|
} else {
|
2018-05-10 01:36:52 -07:00
|
|
|
|
|
|
|
// SUBKEY DONE
|
|
|
|
// NOT JAVA 8
|
|
|
|
|
|
|
|
numDone++
|
|
|
|
if(numDone === javaVers.length){
|
|
|
|
keysDone++
|
|
|
|
if(keysDone === regKeys.length){
|
|
|
|
resolve(candidates)
|
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
})
|
|
|
|
} else {
|
2018-05-10 01:36:52 -07:00
|
|
|
|
|
|
|
// REG KEY DONE
|
|
|
|
// DUE TO NON-EXISTANCE
|
|
|
|
|
|
|
|
keysDone++
|
|
|
|
if(keysDone === regKeys.length){
|
2018-03-30 05:31:54 -07:00
|
|
|
resolve(candidates)
|
|
|
|
}
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
})
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
/**
|
|
|
|
* See if JRE exists in the Internet Plug-Ins folder.
|
|
|
|
*
|
|
|
|
* @returns {string} The path of the JRE if found, otherwise null.
|
|
|
|
*/
|
|
|
|
static _scanInternetPlugins(){
|
|
|
|
// /Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Home/bin/java
|
|
|
|
const pth = '/Library/Internet Plug-Ins/JavaAppletPlugin.plugin'
|
2019-03-05 20:05:01 -08:00
|
|
|
const res = fs.existsSync(JavaGuard.javaExecFromRoot(pth))
|
2018-05-14 22:05:10 -07:00
|
|
|
return res ? pth : null
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Scan a directory for root JVM folders.
|
|
|
|
*
|
|
|
|
* @param {string} scanDir The directory to scan.
|
|
|
|
* @returns {Promise.<Set.<string>>} A promise which resolves to a set of the discovered
|
|
|
|
* root JVM folders.
|
|
|
|
*/
|
2020-09-13 00:05:08 -07:00
|
|
|
static async _scanFileSystem(scanDir){
|
2018-05-14 22:05:10 -07:00
|
|
|
|
2020-09-13 00:05:08 -07:00
|
|
|
let res = new Set()
|
2018-05-14 22:05:10 -07:00
|
|
|
|
2020-09-13 00:05:08 -07:00
|
|
|
if(await fs.pathExists(scanDir)) {
|
2018-05-14 22:05:10 -07:00
|
|
|
|
2020-09-13 00:05:08 -07:00
|
|
|
const files = await fs.readdir(scanDir)
|
|
|
|
for(let i=0; i<files.length; i++){
|
2018-05-14 22:05:10 -07:00
|
|
|
|
2020-09-13 00:05:08 -07:00
|
|
|
const combinedPath = path.join(scanDir, files[i])
|
|
|
|
const execPath = JavaGuard.javaExecFromRoot(combinedPath)
|
2018-05-14 22:05:10 -07:00
|
|
|
|
2020-09-13 00:05:08 -07:00
|
|
|
if(await fs.pathExists(execPath)) {
|
|
|
|
res.add(combinedPath)
|
2018-05-14 22:05:10 -07:00
|
|
|
}
|
2020-09-13 00:05:08 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return res
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
*
|
|
|
|
* @param {Set.<string>} rootSet A set of JVM root strings to validate.
|
|
|
|
* @returns {Promise.<Object[]>} A promise which resolves to an array of meta objects
|
|
|
|
* for each valid JVM root directory.
|
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
async _validateJavaRootSet(rootSet){
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
const rootArr = Array.from(rootSet)
|
|
|
|
const validArr = []
|
|
|
|
|
|
|
|
for(let i=0; i<rootArr.length; i++){
|
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
const execPath = JavaGuard.javaExecFromRoot(rootArr[i])
|
|
|
|
const metaOb = await this._validateJavaBinary(execPath)
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
if(metaOb.valid){
|
|
|
|
metaOb.execPath = execPath
|
|
|
|
validArr.push(metaOb)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
return validArr
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Sort an array of JVM meta objects. Best candidates are placed before all others.
|
|
|
|
* Sorts based on version and gives priority to JREs over JDKs if versions match.
|
|
|
|
*
|
|
|
|
* @param {Object[]} validArr An array of JVM meta objects.
|
|
|
|
* @returns {Object[]} A sorted array of JVM meta objects.
|
|
|
|
*/
|
|
|
|
static _sortValidJavaArray(validArr){
|
|
|
|
const retArr = validArr.sort((a, b) => {
|
|
|
|
|
|
|
|
if(a.version.major === b.version.major){
|
2018-12-31 07:39:27 -08:00
|
|
|
|
|
|
|
if(a.version.major < 9){
|
|
|
|
// Java 8
|
|
|
|
if(a.version.update === b.version.update){
|
|
|
|
if(a.version.build === b.version.build){
|
|
|
|
|
|
|
|
// Same version, give priority to JRE.
|
|
|
|
if(a.execPath.toLowerCase().indexOf('jdk') > -1){
|
|
|
|
return b.execPath.toLowerCase().indexOf('jdk') > -1 ? 0 : 1
|
|
|
|
} else {
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
} else {
|
2018-12-31 07:39:27 -08:00
|
|
|
return a.version.build > b.version.build ? -1 : 1
|
2018-05-14 22:05:10 -07:00
|
|
|
}
|
|
|
|
} else {
|
2018-12-31 07:39:27 -08:00
|
|
|
return a.version.update > b.version.update ? -1 : 1
|
2018-05-14 22:05:10 -07:00
|
|
|
}
|
|
|
|
} else {
|
2018-12-31 07:39:27 -08:00
|
|
|
// Java 9+
|
|
|
|
if(a.version.minor === b.version.minor){
|
|
|
|
if(a.version.revision === b.version.revision){
|
|
|
|
|
|
|
|
// Same version, give priority to JRE.
|
|
|
|
if(a.execPath.toLowerCase().indexOf('jdk') > -1){
|
|
|
|
return b.execPath.toLowerCase().indexOf('jdk') > -1 ? 0 : 1
|
|
|
|
} else {
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
return a.version.revision > b.version.revision ? -1 : 1
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return a.version.minor > b.version.minor ? -1 : 1
|
|
|
|
}
|
2018-05-14 22:05:10 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
2018-05-14 23:30:26 -07:00
|
|
|
return a.version.major > b.version.major ? -1 : 1
|
2018-05-14 22:05:10 -07:00
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
return retArr
|
|
|
|
}
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
/**
|
|
|
|
* Attempts to find a valid x64 installation of Java on Windows machines.
|
|
|
|
* Possible paths will be pulled from the registry and the JAVA_HOME environment
|
|
|
|
* variable. The paths will be sorted with higher versions preceeding lower, and
|
|
|
|
* JREs preceeding JDKs. The binaries at the sorted paths will then be validated.
|
|
|
|
* The first validated is returned.
|
|
|
|
*
|
|
|
|
* Higher versions > Lower versions
|
|
|
|
* If versions are equal, JRE > JDK.
|
|
|
|
*
|
2018-04-02 15:40:32 -07:00
|
|
|
* @param {string} dataDir The base launcher directory.
|
2018-04-07 15:06:49 -07:00
|
|
|
* @returns {Promise.<string>} A Promise which resolves to the executable path of a valid
|
2018-03-31 17:45:24 -07:00
|
|
|
* x64 Java installation. If none are found, null is returned.
|
2018-03-30 05:31:54 -07:00
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
async _win32JavaValidate(dataDir){
|
2018-03-30 05:31:54 -07:00
|
|
|
|
|
|
|
// Get possible paths from the registry.
|
2019-03-05 20:05:01 -08:00
|
|
|
let pathSet1 = await JavaGuard._scanRegistry()
|
2020-09-13 00:05:08 -07:00
|
|
|
if(pathSet1.size === 0){
|
2022-11-26 13:57:30 -08:00
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
// Do a manual file system scan of program files.
|
2022-11-26 13:57:30 -08:00
|
|
|
// Check all drives
|
|
|
|
const driveMounts = nodeDiskInfo.getDiskInfoSync().map(({ mounted }) => mounted)
|
|
|
|
for(const mount of driveMounts) {
|
|
|
|
pathSet1 = new Set([
|
|
|
|
...pathSet1,
|
|
|
|
...(await JavaGuard._scanFileSystem(`${mount}\\Program Files\\Java`)),
|
|
|
|
...(await JavaGuard._scanFileSystem(`${mount}\\Program Files\\Eclipse Adoptium`)),
|
|
|
|
...(await JavaGuard._scanFileSystem(`${mount}\\Program Files\\Eclipse Foundation`)),
|
|
|
|
...(await JavaGuard._scanFileSystem(`${mount}\\Program Files\\AdoptOpenJDK`))
|
|
|
|
])
|
|
|
|
}
|
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
|
2018-04-02 15:40:32 -07:00
|
|
|
// Get possible paths from the data directory.
|
2019-03-05 20:05:01 -08:00
|
|
|
const pathSet2 = await JavaGuard._scanFileSystem(path.join(dataDir, 'runtime', 'x64'))
|
2018-04-02 15:40:32 -07:00
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
// Merge the results.
|
|
|
|
const uberSet = new Set([...pathSet1, ...pathSet2])
|
|
|
|
|
|
|
|
// Validate JAVA_HOME.
|
2019-03-05 20:05:01 -08:00
|
|
|
const jHome = JavaGuard._scanJavaHome()
|
2018-03-30 05:31:54 -07:00
|
|
|
if(jHome != null && jHome.indexOf('(x86)') === -1){
|
2018-05-14 22:05:10 -07:00
|
|
|
uberSet.add(jHome)
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
let pathArr = await this._validateJavaRootSet(uberSet)
|
|
|
|
pathArr = JavaGuard._sortValidJavaArray(pathArr)
|
2018-03-30 05:31:54 -07:00
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
if(pathArr.length > 0){
|
|
|
|
return pathArr[0].execPath
|
|
|
|
} else {
|
|
|
|
return null
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2018-04-12 13:12:45 -07:00
|
|
|
/**
|
2018-05-14 22:05:10 -07:00
|
|
|
* Attempts to find a valid x64 installation of Java on MacOS.
|
|
|
|
* The system JVM directory is scanned for possible installations.
|
|
|
|
* The JAVA_HOME enviroment variable and internet plugins directory
|
|
|
|
* are also scanned and validated.
|
2018-04-12 13:12:45 -07:00
|
|
|
*
|
2018-05-14 22:05:10 -07:00
|
|
|
* Higher versions > Lower versions
|
|
|
|
* If versions are equal, JRE > JDK.
|
|
|
|
*
|
|
|
|
* @param {string} dataDir The base launcher directory.
|
|
|
|
* @returns {Promise.<string>} A Promise which resolves to the executable path of a valid
|
|
|
|
* x64 Java installation. If none are found, null is returned.
|
2018-03-30 05:31:54 -07:00
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
async _darwinJavaValidate(dataDir){
|
2018-04-12 13:12:45 -07:00
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
const pathSet1 = await JavaGuard._scanFileSystem('/Library/Java/JavaVirtualMachines')
|
|
|
|
const pathSet2 = await JavaGuard._scanFileSystem(path.join(dataDir, 'runtime', 'x64'))
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
const uberSet = new Set([...pathSet1, ...pathSet2])
|
2018-04-12 13:12:45 -07:00
|
|
|
|
2018-04-12 18:38:27 -07:00
|
|
|
// Check Internet Plugins folder.
|
2019-03-05 20:05:01 -08:00
|
|
|
const iPPath = JavaGuard._scanInternetPlugins()
|
2018-04-12 13:12:45 -07:00
|
|
|
if(iPPath != null){
|
2018-05-14 22:05:10 -07:00
|
|
|
uberSet.add(iPPath)
|
2018-04-12 13:12:45 -07:00
|
|
|
}
|
|
|
|
|
2018-04-12 18:38:27 -07:00
|
|
|
// Check the JAVA_HOME environment variable.
|
2019-03-05 20:05:01 -08:00
|
|
|
let jHome = JavaGuard._scanJavaHome()
|
2018-04-12 13:12:45 -07:00
|
|
|
if(jHome != null){
|
2018-04-12 19:13:26 -07:00
|
|
|
// Ensure we are at the absolute root.
|
|
|
|
if(jHome.contains('/Contents/Home')){
|
|
|
|
jHome = jHome.substring(0, jHome.indexOf('/Contents/Home'))
|
|
|
|
}
|
2018-05-14 22:05:10 -07:00
|
|
|
uberSet.add(jHome)
|
2018-04-12 13:12:45 -07:00
|
|
|
}
|
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
let pathArr = await this._validateJavaRootSet(uberSet)
|
|
|
|
pathArr = JavaGuard._sortValidJavaArray(pathArr)
|
2018-04-12 18:38:27 -07:00
|
|
|
|
2018-05-14 22:05:10 -07:00
|
|
|
if(pathArr.length > 0){
|
|
|
|
return pathArr[0].execPath
|
|
|
|
} else {
|
|
|
|
return null
|
2018-04-12 13:12:45 -07:00
|
|
|
}
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
/**
|
2018-05-14 22:05:10 -07:00
|
|
|
* Attempts to find a valid x64 installation of Java on Linux.
|
|
|
|
* The system JVM directory is scanned for possible installations.
|
|
|
|
* The JAVA_HOME enviroment variable is also scanned and validated.
|
|
|
|
*
|
|
|
|
* Higher versions > Lower versions
|
|
|
|
* If versions are equal, JRE > JDK.
|
|
|
|
*
|
|
|
|
* @param {string} dataDir The base launcher directory.
|
|
|
|
* @returns {Promise.<string>} A Promise which resolves to the executable path of a valid
|
|
|
|
* x64 Java installation. If none are found, null is returned.
|
2018-03-30 05:31:54 -07:00
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
async _linuxJavaValidate(dataDir){
|
2018-05-14 22:05:10 -07:00
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
const pathSet1 = await JavaGuard._scanFileSystem('/usr/lib/jvm')
|
|
|
|
const pathSet2 = await JavaGuard._scanFileSystem(path.join(dataDir, 'runtime', 'x64'))
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
const uberSet = new Set([...pathSet1, ...pathSet2])
|
|
|
|
|
|
|
|
// Validate JAVA_HOME
|
2019-03-05 20:05:01 -08:00
|
|
|
const jHome = JavaGuard._scanJavaHome()
|
2018-05-14 22:05:10 -07:00
|
|
|
if(jHome != null){
|
|
|
|
uberSet.add(jHome)
|
|
|
|
}
|
|
|
|
|
2019-03-05 20:05:01 -08:00
|
|
|
let pathArr = await this._validateJavaRootSet(uberSet)
|
|
|
|
pathArr = JavaGuard._sortValidJavaArray(pathArr)
|
2018-05-14 22:05:10 -07:00
|
|
|
|
|
|
|
if(pathArr.length > 0){
|
|
|
|
return pathArr[0].execPath
|
|
|
|
} else {
|
|
|
|
return null
|
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
|
2018-03-31 17:45:24 -07:00
|
|
|
/**
|
|
|
|
* Retrieve the path of a valid x64 Java installation.
|
|
|
|
*
|
2018-04-02 15:40:32 -07:00
|
|
|
* @param {string} dataDir The base launcher directory.
|
2018-03-31 17:45:24 -07:00
|
|
|
* @returns {string} A path to a valid x64 Java installation, null if none found.
|
|
|
|
*/
|
2019-03-05 20:05:01 -08:00
|
|
|
async validateJava(dataDir){
|
|
|
|
return await this['_' + process.platform + 'JavaValidate'](dataDir)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Central object class used for control flow. This object stores data about
|
|
|
|
* categories of downloads. Each category is assigned an identifier with a
|
|
|
|
* DLTracker object as its value. Combined information is also stored, such as
|
|
|
|
* the total size of all the queued files in each category. This event is used
|
|
|
|
* to emit events so that external modules can listen into processing done in
|
|
|
|
* this module.
|
|
|
|
*/
|
|
|
|
class AssetGuard extends EventEmitter {
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Create an instance of AssetGuard.
|
|
|
|
* On creation the object's properties are never-null default
|
|
|
|
* values. Each identifier is resolved to an empty DLTracker.
|
|
|
|
*
|
|
|
|
* @param {string} commonPath The common path for shared game files.
|
|
|
|
* @param {string} javaexec The path to a java executable which will be used
|
|
|
|
* to finalize installation.
|
|
|
|
*/
|
|
|
|
constructor(commonPath, javaexec){
|
|
|
|
super()
|
|
|
|
this.totaldlsize = 0
|
|
|
|
this.progress = 0
|
|
|
|
this.assets = new DLTracker([], 0)
|
|
|
|
this.libraries = new DLTracker([], 0)
|
|
|
|
this.files = new DLTracker([], 0)
|
|
|
|
this.forge = new DLTracker([], 0)
|
|
|
|
this.java = new DLTracker([], 0)
|
|
|
|
this.extractQueue = []
|
|
|
|
this.commonPath = commonPath
|
|
|
|
this.javaexec = javaexec
|
|
|
|
}
|
|
|
|
|
|
|
|
// Static Utility Functions
|
|
|
|
// #region
|
|
|
|
|
|
|
|
// Static Hash Validation Functions
|
|
|
|
// #region
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Calculates the hash for a file using the specified algorithm.
|
|
|
|
*
|
|
|
|
* @param {Buffer} buf The buffer containing file data.
|
|
|
|
* @param {string} algo The hash algorithm.
|
|
|
|
* @returns {string} The calculated hash in hex.
|
|
|
|
*/
|
|
|
|
static _calculateHash(buf, algo){
|
|
|
|
return crypto.createHash(algo).update(buf).digest('hex')
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Used to parse a checksums file. This is specifically designed for
|
|
|
|
* the checksums.sha1 files found inside the forge scala dependencies.
|
|
|
|
*
|
|
|
|
* @param {string} content The string content of the checksums file.
|
|
|
|
* @returns {Object} An object with keys being the file names, and values being the hashes.
|
|
|
|
*/
|
|
|
|
static _parseChecksumsFile(content){
|
|
|
|
let finalContent = {}
|
|
|
|
let lines = content.split('\n')
|
|
|
|
for(let i=0; i<lines.length; i++){
|
|
|
|
let bits = lines[i].split(' ')
|
|
|
|
if(bits[1] == null) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
finalContent[bits[1]] = bits[0]
|
|
|
|
}
|
|
|
|
return finalContent
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Validate that a file exists and matches a given hash value.
|
|
|
|
*
|
|
|
|
* @param {string} filePath The path of the file to validate.
|
|
|
|
* @param {string} algo The hash algorithm to check against.
|
|
|
|
* @param {string} hash The existing hash to check against.
|
|
|
|
* @returns {boolean} True if the file exists and calculated hash matches the given hash, otherwise false.
|
|
|
|
*/
|
|
|
|
static _validateLocal(filePath, algo, hash){
|
|
|
|
if(fs.existsSync(filePath)){
|
|
|
|
//No hash provided, have to assume it's good.
|
|
|
|
if(hash == null){
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
let buf = fs.readFileSync(filePath)
|
|
|
|
let calcdhash = AssetGuard._calculateHash(buf, algo)
|
2020-07-07 19:47:22 -07:00
|
|
|
return calcdhash === hash.toLowerCase()
|
2019-03-05 20:05:01 -08:00
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Validates a file in the style used by forge's version index.
|
|
|
|
*
|
|
|
|
* @param {string} filePath The path of the file to validate.
|
|
|
|
* @param {Array.<string>} checksums The checksums listed in the forge version index.
|
|
|
|
* @returns {boolean} True if the file exists and the hashes match, otherwise false.
|
|
|
|
*/
|
|
|
|
static _validateForgeChecksum(filePath, checksums){
|
|
|
|
if(fs.existsSync(filePath)){
|
|
|
|
if(checksums == null || checksums.length === 0){
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
let buf = fs.readFileSync(filePath)
|
|
|
|
let calcdhash = AssetGuard._calculateHash(buf, 'sha1')
|
|
|
|
let valid = checksums.includes(calcdhash)
|
|
|
|
if(!valid && filePath.endsWith('.jar')){
|
|
|
|
valid = AssetGuard._validateForgeJar(filePath, checksums)
|
|
|
|
}
|
|
|
|
return valid
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Validates a forge jar file dependency who declares a checksums.sha1 file.
|
|
|
|
* This can be an expensive task as it usually requires that we calculate thousands
|
|
|
|
* of hashes.
|
|
|
|
*
|
|
|
|
* @param {Buffer} buf The buffer of the jar file.
|
|
|
|
* @param {Array.<string>} checksums The checksums listed in the forge version index.
|
|
|
|
* @returns {boolean} True if all hashes declared in the checksums.sha1 file match the actual hashes.
|
|
|
|
*/
|
|
|
|
static _validateForgeJar(buf, checksums){
|
|
|
|
// Double pass method was the quickest I found. I tried a version where we store data
|
|
|
|
// to only require a single pass, plus some quick cleanup but that seemed to take slightly more time.
|
|
|
|
|
|
|
|
const hashes = {}
|
|
|
|
let expected = {}
|
|
|
|
|
|
|
|
const zip = new AdmZip(buf)
|
|
|
|
const zipEntries = zip.getEntries()
|
|
|
|
|
|
|
|
//First pass
|
|
|
|
for(let i=0; i<zipEntries.length; i++){
|
|
|
|
let entry = zipEntries[i]
|
|
|
|
if(entry.entryName === 'checksums.sha1'){
|
|
|
|
expected = AssetGuard._parseChecksumsFile(zip.readAsText(entry))
|
|
|
|
}
|
|
|
|
hashes[entry.entryName] = AssetGuard._calculateHash(entry.getData(), 'sha1')
|
|
|
|
}
|
|
|
|
|
|
|
|
if(!checksums.includes(hashes['checksums.sha1'])){
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
//Check against expected
|
|
|
|
const expectedEntries = Object.keys(expected)
|
|
|
|
for(let i=0; i<expectedEntries.length; i++){
|
|
|
|
if(expected[expectedEntries[i]] !== hashes[expectedEntries[i]]){
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// Miscellaneous Static Functions
|
|
|
|
// #region
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Extracts and unpacks a file from .pack.xz format.
|
|
|
|
*
|
|
|
|
* @param {Array.<string>} filePaths The paths of the files to be extracted and unpacked.
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the extraction has completed.
|
|
|
|
*/
|
|
|
|
static _extractPackXZ(filePaths, javaExecutable){
|
|
|
|
console.log('[PackXZExtract] Starting')
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
|
|
|
|
let libPath
|
|
|
|
if(isDev){
|
|
|
|
libPath = path.join(process.cwd(), 'libraries', 'java', 'PackXZExtract.jar')
|
|
|
|
} else {
|
|
|
|
if(process.platform === 'darwin'){
|
|
|
|
libPath = path.join(process.cwd(),'Contents', 'Resources', 'libraries', 'java', 'PackXZExtract.jar')
|
|
|
|
} else {
|
|
|
|
libPath = path.join(process.cwd(), 'resources', 'libraries', 'java', 'PackXZExtract.jar')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
const filePath = filePaths.join(',')
|
|
|
|
const child = child_process.spawn(javaExecutable, ['-jar', libPath, '-packxz', filePath])
|
|
|
|
child.stdout.on('data', (data) => {
|
|
|
|
console.log('[PackXZExtract]', data.toString('utf8'))
|
|
|
|
})
|
|
|
|
child.stderr.on('data', (data) => {
|
|
|
|
console.log('[PackXZExtract]', data.toString('utf8'))
|
|
|
|
})
|
|
|
|
child.on('close', (code, signal) => {
|
|
|
|
console.log('[PackXZExtract]', 'Exited with code', code)
|
|
|
|
resolve()
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Function which finalizes the forge installation process. This creates a 'version'
|
|
|
|
* instance for forge and saves its version.json file into that instance. If that
|
|
|
|
* instance already exists, the contents of the version.json file are read and returned
|
|
|
|
* in a promise.
|
|
|
|
*
|
|
|
|
* @param {Asset} asset The Asset object representing Forge.
|
|
|
|
* @param {string} commonPath The common path for shared game files.
|
|
|
|
* @returns {Promise.<Object>} A promise which resolves to the contents of forge's version.json.
|
|
|
|
*/
|
|
|
|
static _finalizeForgeAsset(asset, commonPath){
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
fs.readFile(asset.to, (err, data) => {
|
|
|
|
const zip = new AdmZip(data)
|
|
|
|
const zipEntries = zip.getEntries()
|
|
|
|
|
|
|
|
for(let i=0; i<zipEntries.length; i++){
|
|
|
|
if(zipEntries[i].entryName === 'version.json'){
|
|
|
|
const forgeVersion = JSON.parse(zip.readAsText(zipEntries[i]))
|
|
|
|
const versionPath = path.join(commonPath, 'versions', forgeVersion.id)
|
|
|
|
const versionFile = path.join(versionPath, forgeVersion.id + '.json')
|
|
|
|
if(!fs.existsSync(versionFile)){
|
|
|
|
fs.ensureDirSync(versionPath)
|
|
|
|
fs.writeFileSync(path.join(versionPath, forgeVersion.id + '.json'), zipEntries[i].getData())
|
|
|
|
resolve(forgeVersion)
|
|
|
|
} else {
|
|
|
|
//Read the saved file to allow for user modifications.
|
|
|
|
resolve(JSON.parse(fs.readFileSync(versionFile, 'utf-8')))
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
//We didn't find forge's version.json.
|
|
|
|
reject('Unable to finalize Forge processing, version.json not found! Has forge changed their format?')
|
|
|
|
})
|
|
|
|
})
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// #endregion
|
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
// Validation Functions
|
2018-03-30 05:31:54 -07:00
|
|
|
// #region
|
2017-11-30 00:00:06 -08:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Loads the version data for a given minecraft version.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {string} version The game version for which to load the index data.
|
|
|
|
* @param {boolean} force Optional. If true, the version index will be downloaded even if it exists locally. Defaults to false.
|
|
|
|
* @returns {Promise.<Object>} Promise which resolves to the version data object.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
loadVersionData(version, force = false){
|
|
|
|
const self = this
|
2018-10-25 09:45:32 -07:00
|
|
|
return new Promise(async (resolve, reject) => {
|
2018-06-03 21:17:20 -07:00
|
|
|
const versionPath = path.join(self.commonPath, 'versions', version)
|
2018-10-25 09:45:32 -07:00
|
|
|
const versionFile = path.join(versionPath, version + '.json')
|
2017-11-30 00:00:06 -08:00
|
|
|
if(!fs.existsSync(versionFile) || force){
|
2018-10-25 09:45:32 -07:00
|
|
|
const url = await self._getVersionDataUrl(version)
|
2017-11-30 00:00:06 -08:00
|
|
|
//This download will never be tracked as it's essential and trivial.
|
2018-03-31 17:45:24 -07:00
|
|
|
console.log('Preparing download of ' + version + ' assets.')
|
2018-12-01 05:20:42 -08:00
|
|
|
fs.ensureDirSync(versionPath)
|
2018-03-31 17:45:24 -07:00
|
|
|
const stream = request(url).pipe(fs.createWriteStream(versionFile))
|
|
|
|
stream.on('finish', () => {
|
|
|
|
resolve(JSON.parse(fs.readFileSync(versionFile)))
|
2017-11-30 00:00:06 -08:00
|
|
|
})
|
2017-05-16 23:26:46 -07:00
|
|
|
} else {
|
2018-03-31 17:45:24 -07:00
|
|
|
resolve(JSON.parse(fs.readFileSync(versionFile)))
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-10-25 09:45:32 -07:00
|
|
|
/**
|
|
|
|
* Parses Mojang's version manifest and retrieves the url of the version
|
|
|
|
* data index.
|
|
|
|
*
|
|
|
|
* @param {string} version The version to lookup.
|
|
|
|
* @returns {Promise.<string>} Promise which resolves to the url of the version data index.
|
|
|
|
* If the version could not be found, resolves to null.
|
|
|
|
*/
|
|
|
|
_getVersionDataUrl(version){
|
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
request('https://launchermeta.mojang.com/mc/game/version_manifest.json', (error, resp, body) => {
|
|
|
|
if(error){
|
|
|
|
reject(error)
|
|
|
|
} else {
|
|
|
|
const manifest = JSON.parse(body)
|
|
|
|
|
|
|
|
for(let v of manifest.versions){
|
|
|
|
if(v.id === version){
|
|
|
|
resolve(v.url)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
resolve(null)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
|
|
|
|
// Asset (Category=''') Validation Functions
|
|
|
|
// #region
|
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
|
|
|
* Public asset validation function. This function will handle the validation of assets.
|
|
|
|
* It will parse the asset index specified in the version data, analyzing each
|
|
|
|
* asset entry. In this analysis it will check to see if the local file exists and is valid.
|
|
|
|
* If not, it will be added to the download queue for the 'assets' identifier.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Object} versionData The version data for the assets.
|
|
|
|
* @param {boolean} force Optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
validateAssets(versionData, force = false){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-12-02 19:41:47 -08:00
|
|
|
self._assetChainIndexData(versionData, force).then(() => {
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-05-16 23:26:46 -07:00
|
|
|
})
|
|
|
|
})
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
//Chain the asset tasks to provide full async. The below functions are private.
|
|
|
|
/**
|
|
|
|
* Private function used to chain the asset validation process. This function retrieves
|
|
|
|
* the index data.
|
|
|
|
* @param {Object} versionData
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {boolean} force
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
_assetChainIndexData(versionData, force = false){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
//Asset index constants.
|
|
|
|
const assetIndex = versionData.assetIndex
|
|
|
|
const name = assetIndex.id + '.json'
|
2018-06-03 21:17:20 -07:00
|
|
|
const indexPath = path.join(self.commonPath, 'assets', 'indexes')
|
2017-11-30 00:00:06 -08:00
|
|
|
const assetIndexLoc = path.join(indexPath, name)
|
|
|
|
|
|
|
|
let data = null
|
|
|
|
if(!fs.existsSync(assetIndexLoc) || force){
|
|
|
|
console.log('Downloading ' + versionData.id + ' asset index.')
|
2018-12-01 05:20:42 -08:00
|
|
|
fs.ensureDirSync(indexPath)
|
2017-11-30 00:00:06 -08:00
|
|
|
const stream = request(assetIndex.url).pipe(fs.createWriteStream(assetIndexLoc))
|
2018-05-07 15:15:59 -07:00
|
|
|
stream.on('finish', () => {
|
2017-11-30 00:00:06 -08:00
|
|
|
data = JSON.parse(fs.readFileSync(assetIndexLoc, 'utf-8'))
|
2017-12-02 19:41:47 -08:00
|
|
|
self._assetChainValidateAssets(versionData, data).then(() => {
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-11-30 00:00:06 -08:00
|
|
|
})
|
|
|
|
})
|
|
|
|
} else {
|
2017-05-16 23:26:46 -07:00
|
|
|
data = JSON.parse(fs.readFileSync(assetIndexLoc, 'utf-8'))
|
2017-12-02 19:41:47 -08:00
|
|
|
self._assetChainValidateAssets(versionData, data).then(() => {
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-05-16 23:26:46 -07:00
|
|
|
})
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Private function used to chain the asset validation process. This function processes
|
|
|
|
* the assets and enqueues missing or invalid files.
|
|
|
|
* @param {Object} versionData
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {boolean} force
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
_assetChainValidateAssets(versionData, indexData){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
|
|
|
|
//Asset constants
|
2020-09-16 10:49:10 -07:00
|
|
|
const resourceURL = 'https://resources.download.minecraft.net/'
|
2018-06-03 21:17:20 -07:00
|
|
|
const localPath = path.join(self.commonPath, 'assets')
|
2017-11-30 00:00:06 -08:00
|
|
|
const objectPath = path.join(localPath, 'objects')
|
|
|
|
|
|
|
|
const assetDlQueue = []
|
2018-04-07 15:06:49 -07:00
|
|
|
let dlSize = 0
|
|
|
|
let acc = 0
|
|
|
|
const total = Object.keys(indexData.objects).length
|
2017-11-30 00:00:06 -08:00
|
|
|
//const objKeys = Object.keys(data.objects)
|
2018-05-07 15:15:59 -07:00
|
|
|
async.forEachOfLimit(indexData.objects, 10, (value, key, cb) => {
|
2018-04-07 15:06:49 -07:00
|
|
|
acc++
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('progress', 'assets', acc, total)
|
2017-11-30 00:00:06 -08:00
|
|
|
const hash = value.hash
|
|
|
|
const assetName = path.join(hash.substring(0, 2), hash)
|
2018-07-22 10:31:15 -07:00
|
|
|
const urlName = hash.substring(0, 2) + '/' + hash
|
2018-07-22 08:40:15 -07:00
|
|
|
const ast = new Asset(key, hash, value.size, resourceURL + urlName, path.join(objectPath, assetName))
|
2017-11-30 00:00:06 -08:00
|
|
|
if(!AssetGuard._validateLocal(ast.to, 'sha1', ast.hash)){
|
|
|
|
dlSize += (ast.size*1)
|
|
|
|
assetDlQueue.push(ast)
|
|
|
|
}
|
|
|
|
cb()
|
2018-05-07 15:15:59 -07:00
|
|
|
}, (err) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
self.assets = new DLTracker(assetDlQueue, dlSize)
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-05-16 23:26:46 -07:00
|
|
|
})
|
2017-11-30 00:00:06 -08:00
|
|
|
})
|
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
|
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// Library (Category=''') Validation Functions
|
|
|
|
// #region
|
2017-11-30 00:00:06 -08:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Public library validation function. This function will handle the validation of libraries.
|
|
|
|
* It will parse the version data, analyzing each library entry. In this analysis, it will
|
|
|
|
* check to see if the local file exists and is valid. If not, it will be added to the download
|
|
|
|
* queue for the 'libraries' identifier.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Object} versionData The version data for the assets.
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
validateLibraries(versionData){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
|
|
|
|
const libArr = versionData.libraries
|
2018-06-03 21:17:20 -07:00
|
|
|
const libPath = path.join(self.commonPath, 'libraries')
|
2017-11-30 00:00:06 -08:00
|
|
|
|
|
|
|
const libDlQueue = []
|
|
|
|
let dlSize = 0
|
|
|
|
|
|
|
|
//Check validity of each library. If the hashs don't match, download the library.
|
2018-05-07 15:15:59 -07:00
|
|
|
async.eachLimit(libArr, 5, (lib, cb) => {
|
2018-12-16 21:11:23 -08:00
|
|
|
if(Library.validateRules(lib.rules, lib.natives)){
|
2018-11-18 01:19:17 -08:00
|
|
|
let artifact = (lib.natives == null) ? lib.downloads.artifact : lib.downloads.classifiers[lib.natives[Library.mojangFriendlyOS()].replace('${arch}', process.arch.replace('x', ''))]
|
2017-11-30 00:00:06 -08:00
|
|
|
const libItm = new Library(lib.name, artifact.sha1, artifact.size, artifact.url, path.join(libPath, artifact.path))
|
|
|
|
if(!AssetGuard._validateLocal(libItm.to, 'sha1', libItm.hash)){
|
|
|
|
dlSize += (libItm.size*1)
|
|
|
|
libDlQueue.push(libItm)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
cb()
|
2018-05-07 15:15:59 -07:00
|
|
|
}, (err) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
self.libraries = new DLTracker(libDlQueue, dlSize)
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-05-16 23:26:46 -07:00
|
|
|
})
|
2017-11-30 00:00:06 -08:00
|
|
|
})
|
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// Miscellaneous (Category=files) Validation Functions
|
|
|
|
// #region
|
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
|
|
|
* Public miscellaneous mojang file validation function. These files will be enqueued under
|
|
|
|
* the 'files' identifier.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Object} versionData The version data for the assets.
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
validateMiscellaneous(versionData){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise(async (resolve, reject) => {
|
2017-12-02 19:41:47 -08:00
|
|
|
await self.validateClient(versionData)
|
|
|
|
await self.validateLogConfig(versionData)
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-05-16 23:26:46 -07:00
|
|
|
})
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
|
|
|
* Validate client file - artifact renamed from client.jar to '{version}'.jar.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Object} versionData The version data for the assets.
|
|
|
|
* @param {boolean} force Optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
validateClient(versionData, force = false){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
const clientData = versionData.downloads.client
|
|
|
|
const version = versionData.id
|
2018-06-03 21:17:20 -07:00
|
|
|
const targetPath = path.join(self.commonPath, 'versions', version)
|
2017-11-30 00:00:06 -08:00
|
|
|
const targetFile = version + '.jar'
|
|
|
|
|
|
|
|
let client = new Asset(version + ' client', clientData.sha1, clientData.size, clientData.url, path.join(targetPath, targetFile))
|
|
|
|
|
|
|
|
if(!AssetGuard._validateLocal(client.to, 'sha1', client.hash) || force){
|
|
|
|
self.files.dlqueue.push(client)
|
|
|
|
self.files.dlsize += client.size*1
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-11-30 00:00:06 -08:00
|
|
|
} else {
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
})
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
|
|
|
* Validate log config.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Object} versionData The version data for the assets.
|
|
|
|
* @param {boolean} force Optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
|
|
|
* @returns {Promise.<void>} An empty promise to indicate the async processing has completed.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2017-12-02 19:41:47 -08:00
|
|
|
validateLogConfig(versionData){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2017-11-30 00:00:06 -08:00
|
|
|
const client = versionData.logging.client
|
|
|
|
const file = client.file
|
2018-06-03 21:17:20 -07:00
|
|
|
const targetPath = path.join(self.commonPath, 'assets', 'log_configs')
|
2017-11-30 00:00:06 -08:00
|
|
|
|
|
|
|
let logConfig = new Asset(file.id, file.sha1, file.size, file.url, path.join(targetPath, file.id))
|
|
|
|
|
|
|
|
if(!AssetGuard._validateLocal(logConfig.to, 'sha1', logConfig.hash)){
|
|
|
|
self.files.dlqueue.push(logConfig)
|
|
|
|
self.files.dlsize += logConfig.size*1
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-11-30 00:00:06 -08:00
|
|
|
} else {
|
2018-05-07 15:15:59 -07:00
|
|
|
resolve()
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// Distribution (Category=forge) Validation Functions
|
|
|
|
// #region
|
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
|
|
|
* Validate the distribution.
|
|
|
|
*
|
2018-07-22 08:40:15 -07:00
|
|
|
* @param {Server} server The Server to validate.
|
2018-03-28 13:42:10 -07:00
|
|
|
* @returns {Promise.<Object>} A promise which resolves to the server distribution object.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2018-07-22 08:40:15 -07:00
|
|
|
validateDistribution(server){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2018-07-22 08:40:15 -07:00
|
|
|
self.forge = self._parseDistroModules(server.getModules(), server.getMinecraftVersion(), server.getID())
|
|
|
|
resolve(server)
|
2017-11-30 00:00:06 -08:00
|
|
|
})
|
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-06-03 21:17:20 -07:00
|
|
|
_parseDistroModules(modules, version, servid){
|
2017-11-30 00:00:06 -08:00
|
|
|
let alist = []
|
2018-07-22 10:31:15 -07:00
|
|
|
let asize = 0
|
2018-07-22 08:40:15 -07:00
|
|
|
for(let ob of modules){
|
|
|
|
let obArtifact = ob.getArtifact()
|
|
|
|
let obPath = obArtifact.getPath()
|
2018-10-30 22:25:38 -07:00
|
|
|
let artifact = new DistroModule(ob.getIdentifier(), obArtifact.getHash(), obArtifact.getSize(), obArtifact.getURL(), obPath, ob.getType())
|
2017-11-30 00:00:06 -08:00
|
|
|
const validationPath = obPath.toLowerCase().endsWith('.pack.xz') ? obPath.substring(0, obPath.toLowerCase().lastIndexOf('.pack.xz')) : obPath
|
|
|
|
if(!AssetGuard._validateLocal(validationPath, 'MD5', artifact.hash)){
|
|
|
|
asize += artifact.size*1
|
|
|
|
alist.push(artifact)
|
2018-10-30 22:25:38 -07:00
|
|
|
if(validationPath !== obPath) this.extractQueue.push(obPath)
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
//Recursively process the submodules then combine the results.
|
2018-07-22 08:40:15 -07:00
|
|
|
if(ob.getSubModules() != null){
|
|
|
|
let dltrack = this._parseDistroModules(ob.getSubModules(), version, servid)
|
2017-11-30 00:00:06 -08:00
|
|
|
asize += dltrack.dlsize*1
|
|
|
|
alist = alist.concat(dltrack.dlqueue)
|
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
|
2018-10-30 22:25:38 -07:00
|
|
|
return new DLTracker(alist, asize)
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Loads Forge's version.json data into memory for the specified server id.
|
|
|
|
*
|
2018-07-22 08:40:15 -07:00
|
|
|
* @param {string} server The Server to load Forge data for.
|
2018-03-28 13:42:10 -07:00
|
|
|
* @returns {Promise.<Object>} A promise which resolves to Forge's version.json data.
|
2017-11-30 00:00:06 -08:00
|
|
|
*/
|
2018-07-22 08:40:15 -07:00
|
|
|
loadForgeData(server){
|
2017-11-30 00:00:06 -08:00
|
|
|
const self = this
|
2018-05-07 15:15:59 -07:00
|
|
|
return new Promise(async (resolve, reject) => {
|
2018-07-22 08:40:15 -07:00
|
|
|
const modules = server.getModules()
|
|
|
|
for(let ob of modules){
|
|
|
|
const type = ob.getType()
|
|
|
|
if(type === DistroManager.Types.ForgeHosted || type === DistroManager.Types.Forge){
|
2020-06-02 16:30:12 -07:00
|
|
|
if(Util.isForgeGradle3(server.getMinecraftVersion(), ob.getVersion())){
|
2019-03-06 19:47:45 -08:00
|
|
|
// Read Manifest
|
2019-02-18 03:31:01 -08:00
|
|
|
for(let sub of ob.getSubModules()){
|
|
|
|
if(sub.getType() === DistroManager.Types.VersionManifest){
|
2019-03-06 19:47:45 -08:00
|
|
|
resolve(JSON.parse(fs.readFileSync(sub.getArtifact().getPath(), 'utf-8')))
|
2019-02-18 03:31:01 -08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
reject('No forge version manifest found!')
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
let obArtifact = ob.getArtifact()
|
|
|
|
let obPath = obArtifact.getPath()
|
|
|
|
let asset = new DistroModule(ob.getIdentifier(), obArtifact.getHash(), obArtifact.getSize(), obArtifact.getURL(), obPath, type)
|
|
|
|
try {
|
|
|
|
let forgeData = await AssetGuard._finalizeForgeAsset(asset, self.commonPath)
|
|
|
|
resolve(forgeData)
|
|
|
|
} catch (err){
|
|
|
|
reject(err)
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
2017-05-21 15:06:48 -07:00
|
|
|
}
|
2017-05-21 01:56:39 -07:00
|
|
|
}
|
2017-11-30 00:00:06 -08:00
|
|
|
reject('No forge module found!')
|
2017-05-20 22:06:01 -07:00
|
|
|
})
|
|
|
|
}
|
2017-08-26 11:43:24 -07:00
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
_parseForgeLibraries(){
|
|
|
|
/* TODO
|
|
|
|
* Forge asset validations are already implemented. When there's nothing much
|
|
|
|
* to work on, implement forge downloads using forge's version.json. This is to
|
|
|
|
* have the code on standby if we ever need it (since it's half implemented already).
|
|
|
|
*/
|
|
|
|
}
|
2017-05-21 15:06:48 -07:00
|
|
|
|
2018-03-31 10:05:05 -07:00
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// Java (Category=''') Validation (download) Functions
|
|
|
|
// #region
|
|
|
|
|
2022-11-27 14:45:31 -08:00
|
|
|
_enqueueOpenJDK(dataDir, mcVersion){
|
2018-03-31 17:45:24 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
2022-11-27 14:45:31 -08:00
|
|
|
const major = Util.mcVersionAtLeast('1.17', mcVersion) ? '17' : '8'
|
|
|
|
JavaGuard._latestOpenJDK(major).then(verData => {
|
2018-03-31 17:45:24 -07:00
|
|
|
if(verData != null){
|
2018-03-31 10:05:05 -07:00
|
|
|
|
2019-06-02 15:11:39 -07:00
|
|
|
dataDir = path.join(dataDir, 'runtime', 'x64')
|
|
|
|
const fDir = path.join(dataDir, verData.name)
|
|
|
|
const jre = new Asset(verData.name, null, verData.size, verData.uri, fDir)
|
|
|
|
this.java = new DLTracker([jre], jre.size, (a, self) => {
|
|
|
|
if(verData.name.endsWith('zip')){
|
|
|
|
|
2021-10-14 20:17:40 -07:00
|
|
|
this._extractJdkZip(a.to, dataDir, self)
|
2019-06-02 15:11:39 -07:00
|
|
|
|
|
|
|
} else {
|
|
|
|
// Tar.gz
|
|
|
|
let h = null
|
|
|
|
fs.createReadStream(a.to)
|
|
|
|
.on('error', err => console.log(err))
|
|
|
|
.pipe(zlib.createGunzip())
|
|
|
|
.on('error', err => console.log(err))
|
|
|
|
.pipe(tar.extract(dataDir, {
|
|
|
|
map: (header) => {
|
|
|
|
if(h == null){
|
|
|
|
h = header.name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}))
|
|
|
|
.on('error', err => console.log(err))
|
|
|
|
.on('finish', () => {
|
|
|
|
fs.unlink(a.to, err => {
|
|
|
|
if(err){
|
|
|
|
console.log(err)
|
|
|
|
}
|
|
|
|
if(h.indexOf('/') > -1){
|
|
|
|
h = h.substring(0, h.indexOf('/'))
|
|
|
|
}
|
|
|
|
const pos = path.join(dataDir, h)
|
|
|
|
self.emit('complete', 'java', JavaGuard.javaExecFromRoot(pos))
|
|
|
|
})
|
|
|
|
})
|
2018-03-31 17:45:24 -07:00
|
|
|
}
|
|
|
|
})
|
2019-06-02 15:11:39 -07:00
|
|
|
resolve(true)
|
2018-03-31 10:05:05 -07:00
|
|
|
|
2018-03-31 17:45:24 -07:00
|
|
|
} else {
|
|
|
|
resolve(false)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
})
|
2018-03-31 10:05:05 -07:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2021-10-14 20:17:40 -07:00
|
|
|
async _extractJdkZip(zipPath, runtimeDir, self) {
|
|
|
|
|
|
|
|
const zip = new StreamZip.async({
|
|
|
|
file: zipPath,
|
|
|
|
storeEntries: true
|
|
|
|
})
|
|
|
|
|
|
|
|
let pos = ''
|
|
|
|
try {
|
|
|
|
const entries = await zip.entries()
|
|
|
|
pos = path.join(runtimeDir, Object.keys(entries)[0])
|
|
|
|
|
|
|
|
console.log('Extracting jdk..')
|
|
|
|
await zip.extract(null, runtimeDir)
|
|
|
|
console.log('Cleaning up..')
|
|
|
|
await fs.remove(zipPath)
|
|
|
|
console.log('Jdk extraction complete.')
|
|
|
|
|
|
|
|
} catch(err) {
|
|
|
|
console.log(err)
|
|
|
|
} finally {
|
|
|
|
zip.close()
|
|
|
|
self.emit('complete', 'java', JavaGuard.javaExecFromRoot(pos))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-02 15:11:39 -07:00
|
|
|
// _enqueueMojangJRE(dir){
|
|
|
|
// return new Promise((resolve, reject) => {
|
|
|
|
// // Mojang does not host the JRE for linux.
|
|
|
|
// if(process.platform === 'linux'){
|
|
|
|
// resolve(false)
|
|
|
|
// }
|
|
|
|
// AssetGuard.loadMojangLauncherData().then(data => {
|
|
|
|
// if(data != null) {
|
|
|
|
|
|
|
|
// try {
|
|
|
|
// const mJRE = data[Library.mojangFriendlyOS()]['64'].jre
|
|
|
|
// const url = mJRE.url
|
|
|
|
|
|
|
|
// request.head(url, (err, resp, body) => {
|
|
|
|
// if(err){
|
|
|
|
// resolve(false)
|
|
|
|
// } else {
|
|
|
|
// const name = url.substring(url.lastIndexOf('/')+1)
|
|
|
|
// const fDir = path.join(dir, name)
|
|
|
|
// const jre = new Asset('jre' + mJRE.version, mJRE.sha1, resp.headers['content-length'], url, fDir)
|
|
|
|
// this.java = new DLTracker([jre], jre.size, a => {
|
|
|
|
// fs.readFile(a.to, (err, data) => {
|
|
|
|
// // Data buffer needs to be decompressed from lzma,
|
|
|
|
// // not really possible using node.js
|
|
|
|
// })
|
|
|
|
// })
|
|
|
|
// }
|
|
|
|
// })
|
|
|
|
// } catch (err){
|
|
|
|
// resolve(false)
|
|
|
|
// }
|
|
|
|
|
|
|
|
// }
|
|
|
|
// })
|
|
|
|
// })
|
|
|
|
// }
|
2018-03-31 10:05:05 -07:00
|
|
|
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// #endregion
|
|
|
|
|
|
|
|
// Control Flow Functions
|
|
|
|
// #region
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Initiate an async download process for an AssetGuard DLTracker.
|
|
|
|
*
|
|
|
|
* @param {string} identifier The identifier of the AssetGuard DLTracker.
|
|
|
|
* @param {number} limit Optional. The number of async processes to run in parallel.
|
|
|
|
* @returns {boolean} True if the process began, otherwise false.
|
|
|
|
*/
|
|
|
|
startAsyncProcess(identifier, limit = 5){
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
const self = this
|
2018-07-22 08:40:15 -07:00
|
|
|
const dlTracker = this[identifier]
|
|
|
|
const dlQueue = dlTracker.dlqueue
|
|
|
|
|
|
|
|
if(dlQueue.length > 0){
|
|
|
|
console.log('DLQueue', dlQueue)
|
|
|
|
|
|
|
|
async.eachLimit(dlQueue, limit, (asset, cb) => {
|
|
|
|
|
2018-12-01 05:20:42 -08:00
|
|
|
fs.ensureDirSync(path.join(asset.to, '..'))
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
let req = request(asset.from)
|
|
|
|
req.pause()
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
req.on('response', (resp) => {
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
if(resp.statusCode === 200){
|
2018-07-22 08:40:15 -07:00
|
|
|
|
|
|
|
let doHashCheck = false
|
|
|
|
const contentLength = parseInt(resp.headers['content-length'])
|
|
|
|
|
|
|
|
if(contentLength !== asset.size){
|
|
|
|
console.log(`WARN: Got ${contentLength} bytes for ${asset.id}: Expected ${asset.size}`)
|
|
|
|
doHashCheck = true
|
|
|
|
|
|
|
|
// Adjust download
|
|
|
|
this.totaldlsize -= asset.size
|
|
|
|
this.totaldlsize += contentLength
|
|
|
|
}
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
let writeStream = fs.createWriteStream(asset.to)
|
|
|
|
writeStream.on('close', () => {
|
2018-07-22 08:40:15 -07:00
|
|
|
if(dlTracker.callback != null){
|
|
|
|
dlTracker.callback.apply(dlTracker, [asset, self])
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
2018-07-22 08:40:15 -07:00
|
|
|
|
|
|
|
if(doHashCheck){
|
|
|
|
const v = AssetGuard._validateLocal(asset.to, asset.type != null ? 'md5' : 'sha1', asset.hash)
|
|
|
|
if(v){
|
|
|
|
console.log(`Hashes match for ${asset.id}, byte mismatch is an issue in the distro index.`)
|
|
|
|
} else {
|
|
|
|
console.error(`Hashes do not match, ${asset.id} may be corrupted.`)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
cb()
|
|
|
|
})
|
|
|
|
req.pipe(writeStream)
|
|
|
|
req.resume()
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
} else {
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
req.abort()
|
2018-07-22 08:40:15 -07:00
|
|
|
console.log(`Failed to download ${asset.id}(${typeof asset.from === 'object' ? asset.from.url : asset.from}). Response code ${resp.statusCode}`)
|
2018-03-30 05:31:54 -07:00
|
|
|
self.progress += asset.size*1
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('progress', 'download', self.progress, self.totaldlsize)
|
2018-03-30 05:31:54 -07:00
|
|
|
cb()
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
})
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-05-06 18:45:20 -07:00
|
|
|
req.on('error', (err) => {
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('error', 'download', err)
|
2018-05-06 18:45:20 -07:00
|
|
|
})
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-05-07 15:15:59 -07:00
|
|
|
req.on('data', (chunk) => {
|
2018-03-30 05:31:54 -07:00
|
|
|
self.progress += chunk.length
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('progress', 'download', self.progress, self.totaldlsize)
|
2018-03-30 05:31:54 -07:00
|
|
|
})
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-05-07 15:15:59 -07:00
|
|
|
}, (err) => {
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
if(err){
|
2018-07-22 10:31:15 -07:00
|
|
|
console.log('An item in ' + identifier + ' failed to process')
|
2018-03-30 05:31:54 -07:00
|
|
|
} else {
|
|
|
|
console.log('All ' + identifier + ' have been processed successfully')
|
|
|
|
}
|
2018-07-22 08:40:15 -07:00
|
|
|
|
|
|
|
//self.totaldlsize -= dlTracker.dlsize
|
|
|
|
//self.progress -= dlTracker.dlsize
|
2018-03-30 05:31:54 -07:00
|
|
|
self[identifier] = new DLTracker([], 0)
|
2018-07-22 08:40:15 -07:00
|
|
|
|
|
|
|
if(self.progress >= self.totaldlsize) {
|
2018-04-28 20:34:23 -07:00
|
|
|
if(self.extractQueue.length > 0){
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('progress', 'extract', 1, 1)
|
|
|
|
//self.emit('extracting')
|
2018-04-28 20:34:23 -07:00
|
|
|
AssetGuard._extractPackXZ(self.extractQueue, self.javaexec).then(() => {
|
|
|
|
self.extractQueue = []
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('complete', 'download')
|
2018-04-28 20:34:23 -07:00
|
|
|
})
|
|
|
|
} else {
|
2018-07-22 08:40:15 -07:00
|
|
|
self.emit('complete', 'download')
|
2018-04-28 20:34:23 -07:00
|
|
|
}
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
})
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
return true
|
2018-07-22 08:40:15 -07:00
|
|
|
|
|
|
|
} else {
|
|
|
|
return false
|
2018-03-30 05:31:54 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-30 00:00:06 -08:00
|
|
|
/**
|
|
|
|
* This function will initiate the download processed for the specified identifiers. If no argument is
|
|
|
|
* given, all identifiers will be initiated. Note that in order for files to be processed you need to run
|
|
|
|
* the processing function corresponding to that identifier. If you run this function without processing
|
|
|
|
* the files, it is likely nothing will be enqueued in the object and processing will complete
|
2018-07-22 08:40:15 -07:00
|
|
|
* immediately. Once all downloads are complete, this function will fire the 'complete' event on the
|
2017-11-30 00:00:06 -08:00
|
|
|
* global object instance.
|
|
|
|
*
|
2018-03-28 13:42:10 -07:00
|
|
|
* @param {Array.<{id: string, limit: number}>} identifiers Optional. The identifiers to process and corresponding parallel async task limit.
|
2017-08-26 11:43:24 -07:00
|
|
|
*/
|
2017-11-30 00:00:06 -08:00
|
|
|
processDlQueues(identifiers = [{id:'assets', limit:20}, {id:'libraries', limit:5}, {id:'files', limit:5}, {id:'forge', limit:5}]){
|
2018-07-22 08:40:15 -07:00
|
|
|
return new Promise((resolve, reject) => {
|
|
|
|
let shouldFire = true
|
2017-05-22 22:45:20 -07:00
|
|
|
|
2018-07-22 08:40:15 -07:00
|
|
|
// Assign dltracking variables.
|
|
|
|
this.totaldlsize = 0
|
|
|
|
this.progress = 0
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-07-22 08:40:15 -07:00
|
|
|
for(let iden of identifiers){
|
|
|
|
this.totaldlsize += this[iden.id].dlsize
|
|
|
|
}
|
2017-05-16 23:26:46 -07:00
|
|
|
|
2018-07-22 08:40:15 -07:00
|
|
|
this.once('complete', (data) => {
|
|
|
|
resolve()
|
|
|
|
})
|
2017-11-27 01:31:54 -08:00
|
|
|
|
2018-07-22 08:40:15 -07:00
|
|
|
for(let iden of identifiers){
|
|
|
|
let r = this.startAsyncProcess(iden.id, iden.limit)
|
|
|
|
if(r) shouldFire = false
|
|
|
|
}
|
|
|
|
|
|
|
|
if(shouldFire){
|
|
|
|
this.emit('complete', 'download')
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
async validateEverything(serverid, dev = false){
|
|
|
|
|
2019-02-18 03:31:01 -08:00
|
|
|
try {
|
|
|
|
if(!ConfigManager.isLoaded()){
|
|
|
|
ConfigManager.load()
|
|
|
|
}
|
|
|
|
DistroManager.setDevMode(dev)
|
|
|
|
const dI = await DistroManager.pullLocal()
|
|
|
|
|
|
|
|
const server = dI.getServer(serverid)
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2019-02-18 03:31:01 -08:00
|
|
|
// Validate Everything
|
|
|
|
|
|
|
|
await this.validateDistribution(server)
|
|
|
|
this.emit('validate', 'distribution')
|
|
|
|
const versionData = await this.loadVersionData(server.getMinecraftVersion())
|
|
|
|
this.emit('validate', 'version')
|
|
|
|
await this.validateAssets(versionData)
|
|
|
|
this.emit('validate', 'assets')
|
|
|
|
await this.validateLibraries(versionData)
|
|
|
|
this.emit('validate', 'libraries')
|
|
|
|
await this.validateMiscellaneous(versionData)
|
|
|
|
this.emit('validate', 'files')
|
|
|
|
await this.processDlQueues()
|
|
|
|
//this.emit('complete', 'download')
|
|
|
|
const forgeData = await this.loadForgeData(server)
|
|
|
|
|
|
|
|
return {
|
|
|
|
versionData,
|
|
|
|
forgeData
|
|
|
|
}
|
|
|
|
|
|
|
|
} catch (err){
|
|
|
|
return {
|
|
|
|
versionData: null,
|
|
|
|
forgeData: null,
|
|
|
|
error: err
|
|
|
|
}
|
2017-11-30 00:00:06 -08:00
|
|
|
}
|
2019-02-18 03:31:01 -08:00
|
|
|
|
2018-07-22 08:40:15 -07:00
|
|
|
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
|
2018-03-30 05:31:54 -07:00
|
|
|
// #endregion
|
|
|
|
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = {
|
2019-03-05 20:05:01 -08:00
|
|
|
Util,
|
2017-11-30 00:00:06 -08:00
|
|
|
AssetGuard,
|
2019-03-05 20:05:01 -08:00
|
|
|
JavaGuard,
|
2017-05-16 23:26:46 -07:00
|
|
|
Asset,
|
2017-11-30 00:00:06 -08:00
|
|
|
Library
|
2017-05-16 23:26:46 -07:00
|
|
|
}
|