mirror of
https://github.com/dscalzi/HeliosLauncher.git
synced 2024-12-22 11:42:14 -08:00
Large update to AssetGuard to make the module purely object based. This fixed several issues which were present with the static implementation and seems to have increased performance. Several other bugs related to the front-end scripts have been fixed.
This commit is contained in:
parent
4c2c46f535
commit
52ab270ce3
@ -1,10 +1,10 @@
|
|||||||
const mojang = require('mojang')
|
const mojang = require('mojang')
|
||||||
const path = require('path')
|
const path = require('path')
|
||||||
const AssetGuard = require(path.join(__dirname, 'assets', 'js', 'assetguard.js'))
|
const {AssetGuard} = require(path.join(__dirname, 'assets', 'js', 'assetguard.js'))
|
||||||
const ProcessBuilder = require(path.join(__dirname, 'assets', 'js', 'processbuilder.js'))
|
const ProcessBuilder = require(path.join(__dirname, 'assets', 'js', 'processbuilder.js'))
|
||||||
const {GAME_DIRECTORY, DEFAULT_CONFIG} = require(path.join(__dirname, 'assets', 'js', 'constants.js'))
|
const {GAME_DIRECTORY, DEFAULT_CONFIG} = require(path.join(__dirname, 'assets', 'js', 'constants.js'))
|
||||||
|
|
||||||
document.onreadystatechange = function(){
|
document.addEventListener('readystatechange', function(){
|
||||||
if (document.readyState === 'interactive'){
|
if (document.readyState === 'interactive'){
|
||||||
|
|
||||||
// Bind launch button
|
// Bind launch button
|
||||||
@ -14,7 +14,10 @@ document.onreadystatechange = function(){
|
|||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}, false)
|
||||||
|
|
||||||
|
// Keep reference to AssetGuard object temporarily
|
||||||
|
let tracker;
|
||||||
|
|
||||||
testdownloads = async function(){
|
testdownloads = async function(){
|
||||||
const content = document.getElementById("launch_content")
|
const content = document.getElementById("launch_content")
|
||||||
@ -28,45 +31,48 @@ testdownloads = async function(){
|
|||||||
details.style.display = 'flex'
|
details.style.display = 'flex'
|
||||||
content.style.display = 'none'
|
content.style.display = 'none'
|
||||||
|
|
||||||
|
tracker = new AssetGuard()
|
||||||
|
|
||||||
det_text.innerHTML = 'Loading version information..'
|
det_text.innerHTML = 'Loading version information..'
|
||||||
const versionData = await AssetGuard.loadVersionData('1.11.2', GAME_DIRECTORY)
|
const versionData = await tracker.loadVersionData('1.11.2', GAME_DIRECTORY)
|
||||||
progress.setAttribute('value', 20)
|
progress.setAttribute('value', 20)
|
||||||
progress_text.innerHTML = '20%'
|
progress_text.innerHTML = '20%'
|
||||||
|
|
||||||
det_text.innerHTML = 'Validating asset integrity..'
|
det_text.innerHTML = 'Validating asset integrity..'
|
||||||
await AssetGuard.validateAssets(versionData, GAME_DIRECTORY)
|
await tracker.validateAssets(versionData, GAME_DIRECTORY)
|
||||||
progress.setAttribute('value', 40)
|
progress.setAttribute('value', 40)
|
||||||
progress_text.innerHTML = '40%'
|
progress_text.innerHTML = '40%'
|
||||||
console.log('assets done')
|
console.log('assets done')
|
||||||
|
|
||||||
det_text.innerHTML = 'Validating library integrity..'
|
det_text.innerHTML = 'Validating library integrity..'
|
||||||
await AssetGuard.validateLibraries(versionData, GAME_DIRECTORY)
|
await tracker.validateLibraries(versionData, GAME_DIRECTORY)
|
||||||
progress.setAttribute('value', 60)
|
progress.setAttribute('value', 60)
|
||||||
progress_text.innerHTML = '60%'
|
progress_text.innerHTML = '60%'
|
||||||
console.log('libs done')
|
console.log('libs done')
|
||||||
|
|
||||||
det_text.innerHTML = 'Validating miscellaneous file integrity..'
|
det_text.innerHTML = 'Validating miscellaneous file integrity..'
|
||||||
await AssetGuard.validateMiscellaneous(versionData, GAME_DIRECTORY)
|
await tracker.validateMiscellaneous(versionData, GAME_DIRECTORY)
|
||||||
progress.setAttribute('value', 80)
|
progress.setAttribute('value', 80)
|
||||||
progress_text.innerHTML = '80%'
|
progress_text.innerHTML = '80%'
|
||||||
console.log('files done')
|
console.log('files done')
|
||||||
|
|
||||||
det_text.innerHTML = 'Validating server distribution files..'
|
det_text.innerHTML = 'Validating server distribution files..'
|
||||||
const serv = await AssetGuard.validateDistribution('WesterosCraft-1.11.2', GAME_DIRECTORY)
|
const serv = await tracker.validateDistribution('WesterosCraft-1.11.2', GAME_DIRECTORY)
|
||||||
progress.setAttribute('value', 100)
|
progress.setAttribute('value', 100)
|
||||||
progress_text.innerHTML = '100%'
|
progress_text.innerHTML = '100%'
|
||||||
console.log('forge stuff done')
|
console.log('forge stuff done')
|
||||||
|
|
||||||
det_text.innerHTML = 'Downloading files..'
|
det_text.innerHTML = 'Downloading files..'
|
||||||
AssetGuard.instance.on('totaldlprogress', function(data){
|
tracker.on('totaldlprogress', function(data){
|
||||||
progress.setAttribute('max', data.total)
|
progress.setAttribute('max', data.total)
|
||||||
progress.setAttribute('value', data.acc)
|
progress.setAttribute('value', data.acc)
|
||||||
progress_text.innerHTML = parseInt((data.acc/data.total)*100) + '%'
|
progress_text.innerHTML = parseInt((data.acc/data.total)*100) + '%'
|
||||||
})
|
})
|
||||||
|
|
||||||
AssetGuard.instance.on('dlcomplete', async function(){
|
tracker.on('dlcomplete', async function(){
|
||||||
|
|
||||||
det_text.innerHTML = 'Preparing to launch..'
|
det_text.innerHTML = 'Preparing to launch..'
|
||||||
const forgeData = await AssetGuard.loadForgeData('WesterosCraft-1.11.2', GAME_DIRECTORY)
|
const forgeData = await tracker.loadForgeData('WesterosCraft-1.11.2', GAME_DIRECTORY)
|
||||||
const authUser = await mojang.auth('EMAIL', 'PASS', DEFAULT_CONFIG.getClientToken(), {
|
const authUser = await mojang.auth('EMAIL', 'PASS', DEFAULT_CONFIG.getClientToken(), {
|
||||||
name: 'Minecraft',
|
name: 'Minecraft',
|
||||||
version: 1
|
version: 1
|
||||||
@ -94,6 +100,8 @@ testdownloads = async function(){
|
|||||||
content.style.display = 'inline-flex'
|
content.style.display = 'inline-flex'
|
||||||
}, 5000)
|
}, 5000)
|
||||||
}
|
}
|
||||||
|
// Remove reference to tracker.
|
||||||
|
tracker = null
|
||||||
})
|
})
|
||||||
AssetGuard.processDlQueues()
|
tracker.processDlQueues()
|
||||||
}
|
}
|
@ -2,21 +2,21 @@
|
|||||||
* AssetGuard
|
* AssetGuard
|
||||||
*
|
*
|
||||||
* This module aims to provide a comprehensive and stable method for processing
|
* This module aims to provide a comprehensive and stable method for processing
|
||||||
* and downloading game assets for the WesterosCraft server. A central object
|
* and downloading game assets for the WesterosCraft server. Download meta is
|
||||||
* stores download meta for several identifiers (categories). This meta data
|
* for several identifiers (categories) is stored inside of an AssetGuard object.
|
||||||
* is initially empty until one of the module's processing functions are called.
|
* This meta data is initially empty until one of the module's processing functions
|
||||||
* That function will process the corresponding asset index and validate any exisitng
|
* are called. That function will process the corresponding asset index and validate
|
||||||
* local files. If a file is missing or fails validation, it will be placed into an
|
* any exisitng local files. If a file is missing or fails validation, it will be
|
||||||
* array which acts as a queue. This queue is wrapped in a download tracker object
|
* placed into a download queue (array). This queue is wrapped in a download tracker object
|
||||||
* so that essential information can be cached. The download tracker object is then
|
* so that essential information can be cached. The download tracker object is then
|
||||||
* assigned as the value of the identifier in the central object. These download
|
* assigned as the value of the identifier in the AssetGuard object. These download
|
||||||
* trackers will remain idle until an async process is started to process them.
|
* trackers will remain idle until an async process is started to process them.
|
||||||
*
|
*
|
||||||
* Once the async process is started, any enqueued assets will be downloaded. The central
|
* Once the async process is started, any enqueued assets will be downloaded. The AssetGuard
|
||||||
* object will emit events throughout the download whose name correspond to the identifier
|
* object will emit events throughout the download whose name correspond to the identifier
|
||||||
* being processed. For example, if the 'assets' identifier was being processed, whenever
|
* being processed. For example, if the 'assets' identifier was being processed, whenever
|
||||||
* the download stream recieves data, the event 'assetsdlprogress' will be emitted off of
|
* the download stream recieves data, the event 'assetsdlprogress' will be emitted off of
|
||||||
* the central object instance. This can be listened to by external modules allowing for
|
* the AssetGuard instance. This can be listened to by external modules allowing for
|
||||||
* categorical tracking of the downloading process.
|
* categorical tracking of the downloading process.
|
||||||
*
|
*
|
||||||
* @module assetguard
|
* @module assetguard
|
||||||
@ -136,6 +136,7 @@ class DistroModule extends Asset {
|
|||||||
* about a download queue, including the queue itself.
|
* about a download queue, including the queue itself.
|
||||||
*/
|
*/
|
||||||
class DLTracker {
|
class DLTracker {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a DLTracker
|
* Create a DLTracker
|
||||||
*
|
*
|
||||||
@ -148,6 +149,7 @@ class DLTracker {
|
|||||||
this.dlsize = dlsize
|
this.dlsize = dlsize
|
||||||
this.callback = callback
|
this.callback = callback
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -159,6 +161,7 @@ class DLTracker {
|
|||||||
* this module.
|
* this module.
|
||||||
*/
|
*/
|
||||||
class AssetGuard extends EventEmitter {
|
class AssetGuard extends EventEmitter {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AssetGuard class should only ever have one instance which is defined in
|
* AssetGuard class should only ever have one instance which is defined in
|
||||||
* this module. On creation the object's properties are never-null default
|
* this module. On creation the object's properties are never-null default
|
||||||
@ -173,14 +176,8 @@ class AssetGuard extends EventEmitter{
|
|||||||
this.files = new DLTracker([], 0)
|
this.files = new DLTracker([], 0)
|
||||||
this.forge = new DLTracker([], 0)
|
this.forge = new DLTracker([], 0)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
// Static Utility Functions
|
||||||
* Global static final instance of AssetGuard
|
|
||||||
*/
|
|
||||||
const instance = new AssetGuard()
|
|
||||||
|
|
||||||
// Utility Functions
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolve an artifact id into a path. For example, on windows
|
* Resolve an artifact id into a path. For example, on windows
|
||||||
@ -191,7 +188,7 @@ const instance = new AssetGuard()
|
|||||||
* @param {String} extension - the extension of the file at the resolved path.
|
* @param {String} extension - the extension of the file at the resolved path.
|
||||||
* @returns {String} - the resolved relative path from the artifact id.
|
* @returns {String} - the resolved relative path from the artifact id.
|
||||||
*/
|
*/
|
||||||
function _resolvePath(artifactid, extension){
|
static _resolvePath(artifactid, extension){
|
||||||
let ps = artifactid.split(':')
|
let ps = artifactid.split(':')
|
||||||
let cs = ps[0].split('.')
|
let cs = ps[0].split('.')
|
||||||
|
|
||||||
@ -211,7 +208,7 @@ function _resolvePath(artifactid, extension){
|
|||||||
* @param {String} extension - the extension of the file at the resolved url.
|
* @param {String} extension - the extension of the file at the resolved url.
|
||||||
* @returns {String} - the resolved relative URL from the artifact id.
|
* @returns {String} - the resolved relative URL from the artifact id.
|
||||||
*/
|
*/
|
||||||
function _resolveURL(artifactid, extension){
|
static _resolveURL(artifactid, extension){
|
||||||
let ps = artifactid.split(':')
|
let ps = artifactid.split(':')
|
||||||
let cs = ps[0].split('.')
|
let cs = ps[0].split('.')
|
||||||
|
|
||||||
@ -229,7 +226,7 @@ function _resolveURL(artifactid, extension){
|
|||||||
* @param {String} algo - the hash algorithm.
|
* @param {String} algo - the hash algorithm.
|
||||||
* @returns {String} - the calculated hash in hex.
|
* @returns {String} - the calculated hash in hex.
|
||||||
*/
|
*/
|
||||||
function _calculateHash(buf, algo){
|
static _calculateHash(buf, algo){
|
||||||
return crypto.createHash(algo).update(buf).digest('hex')
|
return crypto.createHash(algo).update(buf).digest('hex')
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -240,7 +237,7 @@ function _calculateHash(buf, algo){
|
|||||||
* @param {String} content - the string content of the checksums file.
|
* @param {String} content - the string content of the checksums file.
|
||||||
* @returns {Object} - an object with keys being the file names, and values being the hashes.
|
* @returns {Object} - an object with keys being the file names, and values being the hashes.
|
||||||
*/
|
*/
|
||||||
function _parseChecksumsFile(content){
|
static _parseChecksumsFile(content){
|
||||||
let finalContent = {}
|
let finalContent = {}
|
||||||
let lines = content.split('\n')
|
let lines = content.split('\n')
|
||||||
for(let i=0; i<lines.length; i++){
|
for(let i=0; i<lines.length; i++){
|
||||||
@ -261,7 +258,7 @@ function _parseChecksumsFile(content){
|
|||||||
* @param {String} hash - the existing hash to check against.
|
* @param {String} hash - the existing hash to check against.
|
||||||
* @returns {Boolean} - true if the file exists and calculated hash matches the given hash, otherwise false.
|
* @returns {Boolean} - true if the file exists and calculated hash matches the given hash, otherwise false.
|
||||||
*/
|
*/
|
||||||
function _validateLocal(filePath, algo, hash){
|
static _validateLocal(filePath, algo, hash){
|
||||||
if(fs.existsSync(filePath)){
|
if(fs.existsSync(filePath)){
|
||||||
//No hash provided, have to assume it's good.
|
//No hash provided, have to assume it's good.
|
||||||
if(hash == null){
|
if(hash == null){
|
||||||
@ -269,7 +266,7 @@ function _validateLocal(filePath, algo, hash){
|
|||||||
}
|
}
|
||||||
let fileName = path.basename(filePath)
|
let fileName = path.basename(filePath)
|
||||||
let buf = fs.readFileSync(filePath)
|
let buf = fs.readFileSync(filePath)
|
||||||
let calcdhash = _calculateHash(buf, algo)
|
let calcdhash = AssetGuard._calculateHash(buf, algo)
|
||||||
return calcdhash === hash
|
return calcdhash === hash
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
@ -282,16 +279,16 @@ function _validateLocal(filePath, algo, hash){
|
|||||||
* @param {Array.<String>} checksums - the checksums listed in the forge version index.
|
* @param {Array.<String>} checksums - the checksums listed in the forge version index.
|
||||||
* @returns {Boolean} - true if the file exists and the hashes match, otherwise false.
|
* @returns {Boolean} - true if the file exists and the hashes match, otherwise false.
|
||||||
*/
|
*/
|
||||||
function _validateForgeChecksum(filePath, checksums){
|
static _validateForgeChecksum(filePath, checksums){
|
||||||
if(fs.existsSync(filePath)){
|
if(fs.existsSync(filePath)){
|
||||||
if(checksums == null || checksums.length === 0){
|
if(checksums == null || checksums.length === 0){
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
let buf = fs.readFileSync(filePath)
|
let buf = fs.readFileSync(filePath)
|
||||||
let calcdhash = _calculateHash(buf, 'sha1')
|
let calcdhash = AssetGuard._calculateHash(buf, 'sha1')
|
||||||
let valid = checksums.includes(calcdhash)
|
let valid = checksums.includes(calcdhash)
|
||||||
if(!valid && filePath.endsWith('.jar')){
|
if(!valid && filePath.endsWith('.jar')){
|
||||||
valid = _validateForgeJar(filePath, checksums)
|
valid = AssetGuard._validateForgeJar(filePath, checksums)
|
||||||
}
|
}
|
||||||
return valid
|
return valid
|
||||||
}
|
}
|
||||||
@ -307,7 +304,7 @@ function _validateForgeChecksum(filePath, checksums){
|
|||||||
* @param {Array.<String>} checksums - the checksums listed in the forge version index.
|
* @param {Array.<String>} checksums - the checksums listed in the forge version index.
|
||||||
* @returns {Boolean} - true if all hashes declared in the checksums.sha1 file match the actual hashes.
|
* @returns {Boolean} - true if all hashes declared in the checksums.sha1 file match the actual hashes.
|
||||||
*/
|
*/
|
||||||
function _validateForgeJar(buf, checksums){
|
static _validateForgeJar(buf, checksums){
|
||||||
// Double pass method was the quickest I found. I tried a version where we store data
|
// Double pass method was the quickest I found. I tried a version where we store data
|
||||||
// to only require a single pass, plus some quick cleanup but that seemed to take slightly more time.
|
// to only require a single pass, plus some quick cleanup but that seemed to take slightly more time.
|
||||||
|
|
||||||
@ -321,9 +318,9 @@ function _validateForgeJar(buf, checksums){
|
|||||||
for(let i=0; i<zipEntries.length; i++){
|
for(let i=0; i<zipEntries.length; i++){
|
||||||
let entry = zipEntries[i]
|
let entry = zipEntries[i]
|
||||||
if(entry.entryName === 'checksums.sha1'){
|
if(entry.entryName === 'checksums.sha1'){
|
||||||
expected = _parseChecksumsFile(zip.readAsText(entry))
|
expected = AssetGuard._parseChecksumsFile(zip.readAsText(entry))
|
||||||
}
|
}
|
||||||
hashes[entry.entryName] = _calculateHash(entry.getData(), 'sha1')
|
hashes[entry.entryName] = AssetGuard._calculateHash(entry.getData(), 'sha1')
|
||||||
}
|
}
|
||||||
|
|
||||||
if(!checksums.includes(hashes['checksums.sha1'])){
|
if(!checksums.includes(hashes['checksums.sha1'])){
|
||||||
@ -346,7 +343,7 @@ function _validateForgeJar(buf, checksums){
|
|||||||
* @param {Array.<String>} filePaths - The paths of the files to be extracted and unpacked.
|
* @param {Array.<String>} filePaths - The paths of the files to be extracted and unpacked.
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the extraction has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the extraction has completed.
|
||||||
*/
|
*/
|
||||||
function _extractPackXZ(filePaths){
|
static _extractPackXZ(filePaths){
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
const libPath = path.join(__dirname, '..', 'libraries', 'java', 'PackXZExtract.jar')
|
const libPath = path.join(__dirname, '..', 'libraries', 'java', 'PackXZExtract.jar')
|
||||||
const filePath = filePaths.join(',')
|
const filePath = filePaths.join(',')
|
||||||
@ -374,7 +371,7 @@ function _extractPackXZ(filePaths){
|
|||||||
* @param {String} basePath
|
* @param {String} basePath
|
||||||
* @returns {Promise.<Object>} - A promise which resolves to the contents of forge's version.json.
|
* @returns {Promise.<Object>} - A promise which resolves to the contents of forge's version.json.
|
||||||
*/
|
*/
|
||||||
function _finalizeForgeAsset(asset, basePath){
|
static _finalizeForgeAsset(asset, basePath){
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
fs.readFile(asset.to, (err, data) => {
|
fs.readFile(asset.to, (err, data) => {
|
||||||
const zip = new AdmZip(data)
|
const zip = new AdmZip(data)
|
||||||
@ -409,10 +406,11 @@ function _finalizeForgeAsset(asset, basePath){
|
|||||||
* @param {Number} limit - optional. The number of async processes to run in parallel.
|
* @param {Number} limit - optional. The number of async processes to run in parallel.
|
||||||
* @returns {Boolean} - true if the process began, otherwise false.
|
* @returns {Boolean} - true if the process began, otherwise false.
|
||||||
*/
|
*/
|
||||||
function startAsyncProcess(identifier, limit = 5){
|
startAsyncProcess(identifier, limit = 5){
|
||||||
|
const self = this
|
||||||
let win = remote.getCurrentWindow()
|
let win = remote.getCurrentWindow()
|
||||||
let acc = 0
|
let acc = 0
|
||||||
const concurrentDlTracker = instance[identifier]
|
const concurrentDlTracker = this[identifier]
|
||||||
const concurrentDlQueue = concurrentDlTracker.dlqueue.slice(0)
|
const concurrentDlQueue = concurrentDlTracker.dlqueue.slice(0)
|
||||||
if(concurrentDlQueue.length === 0){
|
if(concurrentDlQueue.length === 0){
|
||||||
return false
|
return false
|
||||||
@ -437,35 +435,35 @@ function startAsyncProcess(identifier, limit = 5){
|
|||||||
} else {
|
} else {
|
||||||
req.abort()
|
req.abort()
|
||||||
console.log('Failed to download ' + asset.from + '. Response code', resp.statusCode)
|
console.log('Failed to download ' + asset.from + '. Response code', resp.statusCode)
|
||||||
instance.progress += asset.size*1
|
self.progress += asset.size*1
|
||||||
win.setProgressBar(instance.progress/instance.totaldlsize)
|
win.setProgressBar(self.progress/self.totaldlsize)
|
||||||
instance.emit('totaldlprogress', {acc: instance.progress, total: instance.totaldlsize})
|
self.emit('totaldlprogress', {acc: self.progress, total: self.totaldlsize})
|
||||||
cb()
|
cb()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
req.on('data', function(chunk){
|
req.on('data', function(chunk){
|
||||||
count += chunk.length
|
count += chunk.length
|
||||||
instance.progress += chunk.length
|
self.progress += chunk.length
|
||||||
acc += chunk.length
|
acc += chunk.length
|
||||||
instance.emit(identifier + 'dlprogress', acc)
|
self.emit(identifier + 'dlprogress', acc)
|
||||||
//console.log(identifier + ' Progress', acc/instance[identifier].dlsize)
|
//console.log(identifier + ' Progress', acc/this[identifier].dlsize)
|
||||||
win.setProgressBar(instance.progress/instance.totaldlsize)
|
win.setProgressBar(self.progress/self.totaldlsize)
|
||||||
instance.emit('totaldlprogress', {acc: instance.progress, total: instance.totaldlsize})
|
self.emit('totaldlprogress', {acc: self.progress, total: self.totaldlsize})
|
||||||
})
|
})
|
||||||
}, function(err){
|
}, function(err){
|
||||||
if(err){
|
if(err){
|
||||||
instance.emit(identifier + 'dlerror')
|
self.emit(identifier + 'dlerror')
|
||||||
console.log('An item in ' + identifier + ' failed to process');
|
console.log('An item in ' + identifier + ' failed to process');
|
||||||
} else {
|
} else {
|
||||||
instance.emit(identifier + 'dlcomplete')
|
self.emit(identifier + 'dlcomplete')
|
||||||
console.log('All ' + identifier + ' have been processed successfully')
|
console.log('All ' + identifier + ' have been processed successfully')
|
||||||
}
|
}
|
||||||
instance.totaldlsize -= instance[identifier].dlsize
|
self.totaldlsize -= self[identifier].dlsize
|
||||||
instance.progress -= instance[identifier].dlsize
|
self.progress -= self[identifier].dlsize
|
||||||
instance[identifier] = new DLTracker([], 0)
|
self[identifier] = new DLTracker([], 0)
|
||||||
if(instance.totaldlsize === 0) {
|
if(self.totaldlsize === 0) {
|
||||||
win.setProgressBar(-1)
|
win.setProgressBar(-1)
|
||||||
instance.emit('dlcomplete')
|
self.emit('dlcomplete')
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
return true
|
return true
|
||||||
@ -482,7 +480,7 @@ function startAsyncProcess(identifier, limit = 5){
|
|||||||
* @param {Boolean} force - optional. If true, the version index will be downloaded even if it exists locally. Defaults to false.
|
* @param {Boolean} force - optional. If true, the version index will be downloaded even if it exists locally. Defaults to false.
|
||||||
* @returns {Promise.<Object>} - Promise which resolves to the version data object.
|
* @returns {Promise.<Object>} - Promise which resolves to the version data object.
|
||||||
*/
|
*/
|
||||||
function loadVersionData(version, basePath, force = false){
|
loadVersionData(version, basePath, force = false){
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
const name = version + '.json'
|
const name = version + '.json'
|
||||||
const url = 'https://s3.amazonaws.com/Minecraft.Download/versions/' + version + '/' + name
|
const url = 'https://s3.amazonaws.com/Minecraft.Download/versions/' + version + '/' + name
|
||||||
@ -515,9 +513,10 @@ function loadVersionData(version, basePath, force = false){
|
|||||||
* @param {Boolean} force - optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
* @param {Boolean} force - optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function validateAssets(versionData, basePath, force = false){
|
validateAssets(versionData, basePath, force = false){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
_assetChainIndexData(versionData, basePath, force).then(() => {
|
self._assetChainIndexData(versionData, basePath, force).then(() => {
|
||||||
fulfill()
|
fulfill()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -532,7 +531,8 @@ function validateAssets(versionData, basePath, force = false){
|
|||||||
* @param {Boolean} force
|
* @param {Boolean} force
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function _assetChainIndexData(versionData, basePath, force = false){
|
_assetChainIndexData(versionData, basePath, force = false){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
//Asset index constants.
|
//Asset index constants.
|
||||||
const assetIndex = versionData.assetIndex
|
const assetIndex = versionData.assetIndex
|
||||||
@ -547,13 +547,13 @@ function _assetChainIndexData(versionData, basePath, force = false){
|
|||||||
const stream = request(assetIndex.url).pipe(fs.createWriteStream(assetIndexLoc))
|
const stream = request(assetIndex.url).pipe(fs.createWriteStream(assetIndexLoc))
|
||||||
stream.on('finish', function() {
|
stream.on('finish', function() {
|
||||||
data = JSON.parse(fs.readFileSync(assetIndexLoc, 'utf-8'))
|
data = JSON.parse(fs.readFileSync(assetIndexLoc, 'utf-8'))
|
||||||
_assetChainValidateAssets(versionData, basePath, data).then(() => {
|
self._assetChainValidateAssets(versionData, basePath, data).then(() => {
|
||||||
fulfill()
|
fulfill()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
data = JSON.parse(fs.readFileSync(assetIndexLoc, 'utf-8'))
|
data = JSON.parse(fs.readFileSync(assetIndexLoc, 'utf-8'))
|
||||||
_assetChainValidateAssets(versionData, basePath, data).then(() => {
|
self._assetChainValidateAssets(versionData, basePath, data).then(() => {
|
||||||
fulfill()
|
fulfill()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -568,7 +568,8 @@ function _assetChainIndexData(versionData, basePath, force = false){
|
|||||||
* @param {Boolean} force
|
* @param {Boolean} force
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function _assetChainValidateAssets(versionData, basePath, indexData){
|
_assetChainValidateAssets(versionData, basePath, indexData){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
|
|
||||||
//Asset constants
|
//Asset constants
|
||||||
@ -585,13 +586,13 @@ function _assetChainValidateAssets(versionData, basePath, indexData){
|
|||||||
const assetName = path.join(hash.substring(0, 2), hash)
|
const assetName = path.join(hash.substring(0, 2), hash)
|
||||||
const urlName = hash.substring(0, 2) + "/" + hash
|
const urlName = hash.substring(0, 2) + "/" + hash
|
||||||
const ast = new Asset(key, hash, String(value.size), resourceURL + urlName, path.join(objectPath, assetName))
|
const ast = new Asset(key, hash, String(value.size), resourceURL + urlName, path.join(objectPath, assetName))
|
||||||
if(!_validateLocal(ast.to, 'sha1', ast.hash)){
|
if(!AssetGuard._validateLocal(ast.to, 'sha1', ast.hash)){
|
||||||
dlSize += (ast.size*1)
|
dlSize += (ast.size*1)
|
||||||
assetDlQueue.push(ast)
|
assetDlQueue.push(ast)
|
||||||
}
|
}
|
||||||
cb()
|
cb()
|
||||||
}, function(err){
|
}, function(err){
|
||||||
instance.assets = new DLTracker(assetDlQueue, dlSize)
|
self.assets = new DLTracker(assetDlQueue, dlSize)
|
||||||
fulfill()
|
fulfill()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -607,7 +608,8 @@ function _assetChainValidateAssets(versionData, basePath, indexData){
|
|||||||
* @param {String} basePath - the absolute file path which will be prepended to the given relative paths.
|
* @param {String} basePath - the absolute file path which will be prepended to the given relative paths.
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function validateLibraries(versionData, basePath){
|
validateLibraries(versionData, basePath){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
|
|
||||||
const libArr = versionData.libraries
|
const libArr = versionData.libraries
|
||||||
@ -621,14 +623,14 @@ function validateLibraries(versionData, basePath){
|
|||||||
if(Library.validateRules(lib.rules)){
|
if(Library.validateRules(lib.rules)){
|
||||||
let artifact = (lib.natives == null) ? lib.downloads.artifact : lib.downloads.classifiers[lib.natives[Library.mojangFriendlyOS()]]
|
let artifact = (lib.natives == null) ? lib.downloads.artifact : lib.downloads.classifiers[lib.natives[Library.mojangFriendlyOS()]]
|
||||||
const libItm = new Library(lib.name, artifact.sha1, artifact.size, artifact.url, path.join(libPath, artifact.path))
|
const libItm = new Library(lib.name, artifact.sha1, artifact.size, artifact.url, path.join(libPath, artifact.path))
|
||||||
if(!_validateLocal(libItm.to, 'sha1', libItm.hash)){
|
if(!AssetGuard._validateLocal(libItm.to, 'sha1', libItm.hash)){
|
||||||
dlSize += (libItm.size*1)
|
dlSize += (libItm.size*1)
|
||||||
libDlQueue.push(libItm)
|
libDlQueue.push(libItm)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cb()
|
cb()
|
||||||
}, function(err){
|
}, function(err){
|
||||||
instance.libraries = new DLTracker(libDlQueue, dlSize)
|
self.libraries = new DLTracker(libDlQueue, dlSize)
|
||||||
fulfill()
|
fulfill()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@ -642,10 +644,11 @@ function validateLibraries(versionData, basePath){
|
|||||||
* @param {String} basePath - the absolute file path which will be prepended to the given relative paths.
|
* @param {String} basePath - the absolute file path which will be prepended to the given relative paths.
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function validateMiscellaneous(versionData, basePath){
|
validateMiscellaneous(versionData, basePath){
|
||||||
|
const self = this
|
||||||
return new Promise(async function(fulfill, reject){
|
return new Promise(async function(fulfill, reject){
|
||||||
await validateClient(versionData, basePath)
|
await self.validateClient(versionData, basePath)
|
||||||
await validateLogConfig(versionData, basePath)
|
await self.validateLogConfig(versionData, basePath)
|
||||||
fulfill()
|
fulfill()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -658,7 +661,8 @@ function validateMiscellaneous(versionData, basePath){
|
|||||||
* @param {Boolean} force - optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
* @param {Boolean} force - optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function validateClient(versionData, basePath, force = false){
|
validateClient(versionData, basePath, force = false){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
const clientData = versionData.downloads.client
|
const clientData = versionData.downloads.client
|
||||||
const version = versionData.id
|
const version = versionData.id
|
||||||
@ -667,9 +671,9 @@ function validateClient(versionData, basePath, force = false){
|
|||||||
|
|
||||||
let client = new Asset(version + ' client', clientData.sha1, clientData.size, clientData.url, path.join(targetPath, targetFile))
|
let client = new Asset(version + ' client', clientData.sha1, clientData.size, clientData.url, path.join(targetPath, targetFile))
|
||||||
|
|
||||||
if(!_validateLocal(client.to, 'sha1', client.hash) || force){
|
if(!AssetGuard._validateLocal(client.to, 'sha1', client.hash) || force){
|
||||||
instance.files.dlqueue.push(client)
|
self.files.dlqueue.push(client)
|
||||||
instance.files.dlsize += client.size*1
|
self.files.dlsize += client.size*1
|
||||||
fulfill()
|
fulfill()
|
||||||
} else {
|
} else {
|
||||||
fulfill()
|
fulfill()
|
||||||
@ -685,7 +689,8 @@ function validateClient(versionData, basePath, force = false){
|
|||||||
* @param {Boolean} force - optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
* @param {Boolean} force - optional. If true, the asset index will be downloaded even if it exists locally. Defaults to false.
|
||||||
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
* @returns {Promise.<Void>} - An empty promise to indicate the async processing has completed.
|
||||||
*/
|
*/
|
||||||
function validateLogConfig(versionData, basePath){
|
validateLogConfig(versionData, basePath){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
const client = versionData.logging.client
|
const client = versionData.logging.client
|
||||||
const file = client.file
|
const file = client.file
|
||||||
@ -693,9 +698,9 @@ function validateLogConfig(versionData, basePath){
|
|||||||
|
|
||||||
let logConfig = new Asset(file.id, file.sha1, file.size, file.url, path.join(targetPath, file.id))
|
let logConfig = new Asset(file.id, file.sha1, file.size, file.url, path.join(targetPath, file.id))
|
||||||
|
|
||||||
if(!_validateLocal(logConfig.to, 'sha1', logConfig.hash)){
|
if(!AssetGuard._validateLocal(logConfig.to, 'sha1', logConfig.hash)){
|
||||||
instance.files.dlqueue.push(logConfig)
|
self.files.dlqueue.push(logConfig)
|
||||||
instance.files.dlsize += logConfig.size*1
|
self.files.dlsize += logConfig.size*1
|
||||||
fulfill()
|
fulfill()
|
||||||
} else {
|
} else {
|
||||||
fulfill()
|
fulfill()
|
||||||
@ -710,9 +715,10 @@ function validateLogConfig(versionData, basePath){
|
|||||||
* @param {String} basePath - the absolute file path which will be prepended to the given relative paths.
|
* @param {String} basePath - the absolute file path which will be prepended to the given relative paths.
|
||||||
* @returns {Promise.<Object>} - A promise which resolves to the server distribution object.
|
* @returns {Promise.<Object>} - A promise which resolves to the server distribution object.
|
||||||
*/
|
*/
|
||||||
function validateDistribution(serverpackid, basePath){
|
validateDistribution(serverpackid, basePath){
|
||||||
|
const self = this
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
_chainValidateDistributionIndex(basePath).then((value) => {
|
self._chainValidateDistributionIndex(basePath).then((value) => {
|
||||||
let servers = value.servers
|
let servers = value.servers
|
||||||
let serv = null
|
let serv = null
|
||||||
for(let i=0; i<servers.length; i++){
|
for(let i=0; i<servers.length; i++){
|
||||||
@ -722,15 +728,15 @@ function validateDistribution(serverpackid, basePath){
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
instance.forge = _parseDistroModules(serv.modules, basePath, serv.mc_version)
|
self.forge = self._parseDistroModules(serv.modules, basePath, serv.mc_version)
|
||||||
//Correct our workaround here.
|
//Correct our workaround here.
|
||||||
let decompressqueue = instance.forge.callback
|
let decompressqueue = self.forge.callback
|
||||||
instance.forge.callback = function(asset){
|
self.forge.callback = function(asset){
|
||||||
if(asset.to.toLowerCase().endsWith('.pack.xz')){
|
if(asset.to.toLowerCase().endsWith('.pack.xz')){
|
||||||
_extractPackXZ([asset.to])
|
AssetGuard._extractPackXZ([asset.to])
|
||||||
}
|
}
|
||||||
if(asset.type === 'forge-hosted' || asset.type === 'forge'){
|
if(asset.type === 'forge-hosted' || asset.type === 'forge'){
|
||||||
_finalizeForgeAsset(asset, basePath)
|
AssetGuard._finalizeForgeAsset(asset, basePath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fulfill(serv)
|
fulfill(serv)
|
||||||
@ -740,7 +746,7 @@ function validateDistribution(serverpackid, basePath){
|
|||||||
|
|
||||||
//TODO The distro index should be downloaded in the 'pre-loader'. This is because
|
//TODO The distro index should be downloaded in the 'pre-loader'. This is because
|
||||||
//we will eventually NEED the index to generate the server list on the ui.
|
//we will eventually NEED the index to generate the server list on the ui.
|
||||||
function _chainValidateDistributionIndex(basePath){
|
_chainValidateDistributionIndex(basePath){
|
||||||
return new Promise(function(fulfill, reject){
|
return new Promise(function(fulfill, reject){
|
||||||
//const distroURL = 'http://mc.westeroscraft.com/WesterosCraftLauncher/westeroscraft.json'
|
//const distroURL = 'http://mc.westeroscraft.com/WesterosCraftLauncher/westeroscraft.json'
|
||||||
const targetFile = path.join(basePath, 'westeroscraft.json')
|
const targetFile = path.join(basePath, 'westeroscraft.json')
|
||||||
@ -752,7 +758,7 @@ function _chainValidateDistributionIndex(basePath){
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function _parseDistroModules(modules, basePath, version){
|
_parseDistroModules(modules, basePath, version){
|
||||||
let alist = []
|
let alist = []
|
||||||
let asize = 0;
|
let asize = 0;
|
||||||
//This may be removed soon, considering the most efficient way to extract.
|
//This may be removed soon, considering the most efficient way to extract.
|
||||||
@ -761,7 +767,7 @@ function _parseDistroModules(modules, basePath, version){
|
|||||||
let ob = modules[i]
|
let ob = modules[i]
|
||||||
let obType = ob.type
|
let obType = ob.type
|
||||||
let obArtifact = ob.artifact
|
let obArtifact = ob.artifact
|
||||||
let obPath = obArtifact.path == null ? _resolvePath(ob.id, obArtifact.extension) : obArtifact.path
|
let obPath = obArtifact.path == null ? AssetGuard._resolvePath(ob.id, obArtifact.extension) : obArtifact.path
|
||||||
switch(obType){
|
switch(obType){
|
||||||
case 'forge-hosted':
|
case 'forge-hosted':
|
||||||
case 'forge':
|
case 'forge':
|
||||||
@ -782,14 +788,14 @@ function _parseDistroModules(modules, basePath, version){
|
|||||||
}
|
}
|
||||||
let artifact = new DistroModule(ob.id, obArtifact.MD5, obArtifact.size, obArtifact.url, obPath, obType)
|
let artifact = new DistroModule(ob.id, obArtifact.MD5, obArtifact.size, obArtifact.url, obPath, obType)
|
||||||
const validationPath = obPath.toLowerCase().endsWith('.pack.xz') ? obPath.substring(0, obPath.toLowerCase().lastIndexOf('.pack.xz')) : obPath
|
const validationPath = obPath.toLowerCase().endsWith('.pack.xz') ? obPath.substring(0, obPath.toLowerCase().lastIndexOf('.pack.xz')) : obPath
|
||||||
if(!_validateLocal(validationPath, 'MD5', artifact.hash)){
|
if(!AssetGuard._validateLocal(validationPath, 'MD5', artifact.hash)){
|
||||||
asize += artifact.size*1
|
asize += artifact.size*1
|
||||||
alist.push(artifact)
|
alist.push(artifact)
|
||||||
if(validationPath !== obPath) decompressqueue.push(obPath)
|
if(validationPath !== obPath) decompressqueue.push(obPath)
|
||||||
}
|
}
|
||||||
//Recursively process the submodules then combine the results.
|
//Recursively process the submodules then combine the results.
|
||||||
if(ob.sub_modules != null){
|
if(ob.sub_modules != null){
|
||||||
let dltrack = _parseDistroModules(ob.sub_modules, basePath, version)
|
let dltrack = this._parseDistroModules(ob.sub_modules, basePath, version)
|
||||||
asize += dltrack.dlsize*1
|
asize += dltrack.dlsize*1
|
||||||
alist = alist.concat(dltrack.dlqueue)
|
alist = alist.concat(dltrack.dlqueue)
|
||||||
decompressqueue = decompressqueue.concat(dltrack.callback)
|
decompressqueue = decompressqueue.concat(dltrack.callback)
|
||||||
@ -807,9 +813,10 @@ function _parseDistroModules(modules, basePath, version){
|
|||||||
* @param {String} basePath
|
* @param {String} basePath
|
||||||
* @returns {Promise.<Object>} - A promise which resolves to Forge's version.json data.
|
* @returns {Promise.<Object>} - A promise which resolves to Forge's version.json data.
|
||||||
*/
|
*/
|
||||||
function loadForgeData(serverpack, basePath){
|
loadForgeData(serverpack, basePath){
|
||||||
|
const self = this
|
||||||
return new Promise(async function(fulfill, reject){
|
return new Promise(async function(fulfill, reject){
|
||||||
let distro = await _chainValidateDistributionIndex(basePath)
|
let distro = await self._chainValidateDistributionIndex(basePath)
|
||||||
|
|
||||||
const servers = distro.servers
|
const servers = distro.servers
|
||||||
let serv = null
|
let serv = null
|
||||||
@ -825,9 +832,9 @@ function loadForgeData(serverpack, basePath){
|
|||||||
const ob = modules[i]
|
const ob = modules[i]
|
||||||
if(ob.type === 'forge-hosted' || ob.type === 'forge'){
|
if(ob.type === 'forge-hosted' || ob.type === 'forge'){
|
||||||
let obArtifact = ob.artifact
|
let obArtifact = ob.artifact
|
||||||
let obPath = obArtifact.path == null ? path.join(basePath, 'libraries', _resolvePath(ob.id, obArtifact.extension)) : obArtifact.path
|
let obPath = obArtifact.path == null ? path.join(basePath, 'libraries', AssetGuard._resolvePath(ob.id, obArtifact.extension)) : obArtifact.path
|
||||||
let asset = new DistroModule(ob.id, obArtifact.MD5, obArtifact.size, obArtifact.url, obPath, ob.type)
|
let asset = new DistroModule(ob.id, obArtifact.MD5, obArtifact.size, obArtifact.url, obPath, ob.type)
|
||||||
let forgeData = await _finalizeForgeAsset(asset, basePath)
|
let forgeData = await AssetGuard._finalizeForgeAsset(asset, basePath)
|
||||||
fulfill(forgeData)
|
fulfill(forgeData)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -836,7 +843,7 @@ function loadForgeData(serverpack, basePath){
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function _parseForgeLibraries(){
|
_parseForgeLibraries(){
|
||||||
/* TODO
|
/* TODO
|
||||||
* Forge asset validations are already implemented. When there's nothing much
|
* Forge asset validations are already implemented. When there's nothing much
|
||||||
* to work on, implement forge downloads using forge's version.json. This is to
|
* to work on, implement forge downloads using forge's version.json. This is to
|
||||||
@ -848,46 +855,40 @@ function _parseForgeLibraries(){
|
|||||||
* This function will initiate the download processed for the specified identifiers. If no argument is
|
* This function will initiate the download processed for the specified identifiers. If no argument is
|
||||||
* given, all identifiers will be initiated. Note that in order for files to be processed you need to run
|
* given, all identifiers will be initiated. Note that in order for files to be processed you need to run
|
||||||
* the processing function corresponding to that identifier. If you run this function without processing
|
* the processing function corresponding to that identifier. If you run this function without processing
|
||||||
* the files, it is likely nothing will be enqueued in the global object and processing will complete
|
* the files, it is likely nothing will be enqueued in the object and processing will complete
|
||||||
* immediately. Once all downloads are complete, this function will fire the 'dlcomplete' event on the
|
* immediately. Once all downloads are complete, this function will fire the 'dlcomplete' event on the
|
||||||
* global object instance.
|
* global object instance.
|
||||||
*
|
*
|
||||||
* @param {Array.<{id: string, limit: number}>} identifiers - optional. The identifiers to process and corresponding parallel async task limit.
|
* @param {Array.<{id: string, limit: number}>} identifiers - optional. The identifiers to process and corresponding parallel async task limit.
|
||||||
*/
|
*/
|
||||||
function processDlQueues(identifiers = [{id:'assets', limit:20}, {id:'libraries', limit:5}, {id:'files', limit:5}, {id:'forge', limit:5}]){
|
processDlQueues(identifiers = [{id:'assets', limit:20}, {id:'libraries', limit:5}, {id:'files', limit:5}, {id:'forge', limit:5}]){
|
||||||
this.progress = 0;
|
this.progress = 0;
|
||||||
let win = remote.getCurrentWindow()
|
let win = remote.getCurrentWindow()
|
||||||
|
|
||||||
let shouldFire = true
|
let shouldFire = true
|
||||||
|
|
||||||
// Assign global dltracking variables.
|
// Assign dltracking variables.
|
||||||
instance.totaldlsize = 0
|
this.totaldlsize = 0
|
||||||
instance.progress = 0
|
this.progress = 0
|
||||||
for(let i=0; i<identifiers.length; i++){
|
for(let i=0; i<identifiers.length; i++){
|
||||||
instance.totaldlsize += instance[identifiers[i].id].dlsize
|
this.totaldlsize += this[identifiers[i].id].dlsize
|
||||||
}
|
}
|
||||||
|
|
||||||
for(let i=0; i<identifiers.length; i++){
|
for(let i=0; i<identifiers.length; i++){
|
||||||
let iden = identifiers[i]
|
let iden = identifiers[i]
|
||||||
let r = startAsyncProcess(iden.id, iden.limit)
|
let r = this.startAsyncProcess(iden.id, iden.limit)
|
||||||
if(r) shouldFire = false
|
if(r) shouldFire = false
|
||||||
}
|
}
|
||||||
|
|
||||||
if(shouldFire){
|
if(shouldFire){
|
||||||
instance.emit('dlcomplete')
|
this.emit('dlcomplete')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
}
|
||||||
loadVersionData,
|
|
||||||
loadForgeData,
|
module.exports = {
|
||||||
validateAssets,
|
AssetGuard,
|
||||||
validateLibraries,
|
Asset,
|
||||||
validateMiscellaneous,
|
Library
|
||||||
validateDistribution,
|
|
||||||
processDlQueues,
|
|
||||||
instance,
|
|
||||||
Asset,
|
|
||||||
Library,
|
|
||||||
_resolvePath
|
|
||||||
}
|
}
|
@ -6,7 +6,7 @@
|
|||||||
* TODO why are logs not working??????
|
* TODO why are logs not working??????
|
||||||
*/
|
*/
|
||||||
const AdmZip = require('adm-zip')
|
const AdmZip = require('adm-zip')
|
||||||
const ag = require('./assetguard.js')
|
const {AssetGuard, Library} = require('./assetguard.js')
|
||||||
const child_process = require('child_process')
|
const child_process = require('child_process')
|
||||||
const {DEFAULT_CONFIG} = require('./constants')
|
const {DEFAULT_CONFIG} = require('./constants')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
@ -232,7 +232,7 @@ class ProcessBuilder {
|
|||||||
const nativePath = path.join(this.dir, 'natives')
|
const nativePath = path.join(this.dir, 'natives')
|
||||||
for(let i=0; i<libArr.length; i++){
|
for(let i=0; i<libArr.length; i++){
|
||||||
const lib = libArr[i]
|
const lib = libArr[i]
|
||||||
if(ag.Library.validateRules(lib.rules)){
|
if(Library.validateRules(lib.rules)){
|
||||||
if(lib.natives == null){
|
if(lib.natives == null){
|
||||||
const dlInfo = lib.downloads
|
const dlInfo = lib.downloads
|
||||||
const artifact = dlInfo.artifact
|
const artifact = dlInfo.artifact
|
||||||
@ -243,7 +243,7 @@ class ProcessBuilder {
|
|||||||
const natives = lib.natives
|
const natives = lib.natives
|
||||||
const extractInst = lib.extract
|
const extractInst = lib.extract
|
||||||
const exclusionArr = extractInst.exclude
|
const exclusionArr = extractInst.exclude
|
||||||
const opSys = ag.Library.mojangFriendlyOS()
|
const opSys = Library.mojangFriendlyOS()
|
||||||
const indexId = natives[opSys]
|
const indexId = natives[opSys]
|
||||||
const dlInfo = lib.downloads
|
const dlInfo = lib.downloads
|
||||||
const classifiers = dlInfo.classifiers
|
const classifiers = dlInfo.classifiers
|
||||||
@ -304,7 +304,7 @@ class ProcessBuilder {
|
|||||||
for(let i=0; i<mdles.length; i++){
|
for(let i=0; i<mdles.length; i++){
|
||||||
if(mdles[i].type != null && (mdles[i].type === 'forge-hosted' || mdles[i].type === 'library')){
|
if(mdles[i].type != null && (mdles[i].type === 'forge-hosted' || mdles[i].type === 'library')){
|
||||||
let lib = mdles[i]
|
let lib = mdles[i]
|
||||||
libs.push(path.join(this.libPath, lib.artifact.path == null ? ag._resolvePath(lib.id, lib.artifact.extension) : lib.artifact.path))
|
libs.push(path.join(this.libPath, lib.artifact.path == null ? AssetGuard._resolvePath(lib.id, lib.artifact.extension) : lib.artifact.path))
|
||||||
if(lib.sub_modules != null){
|
if(lib.sub_modules != null){
|
||||||
const res = this._resolveModuleLibraries(lib)
|
const res = this._resolveModuleLibraries(lib)
|
||||||
if(res.length > 0){
|
if(res.length > 0){
|
||||||
@ -341,7 +341,7 @@ class ProcessBuilder {
|
|||||||
for(let i=0; i<mdle.sub_modules.length; i++){
|
for(let i=0; i<mdle.sub_modules.length; i++){
|
||||||
const sm = mdle.sub_modules[i]
|
const sm = mdle.sub_modules[i]
|
||||||
if(sm.type != null && sm.type == 'library'){
|
if(sm.type != null && sm.type == 'library'){
|
||||||
libs.push(path.join(this.libPath, sm.artifact.path == null ? ag._resolvePath(sm.id, sm.artifact.extension) : sm.artifact.path))
|
libs.push(path.join(this.libPath, sm.artifact.path == null ? AssetGuard._resolvePath(sm.id, sm.artifact.extension) : sm.artifact.path))
|
||||||
}
|
}
|
||||||
// If this module has submodules, we need to resolve the libraries for those.
|
// If this module has submodules, we need to resolve the libraries for those.
|
||||||
// To avoid unnecessary recursive calls, base case is checked here.
|
// To avoid unnecessary recursive calls, base case is checked here.
|
||||||
|
@ -10,9 +10,8 @@ $(function(){
|
|||||||
console.log('UICore Initialized');
|
console.log('UICore Initialized');
|
||||||
})*/
|
})*/
|
||||||
|
|
||||||
document.onreadystatechange = function () {
|
document.addEventListener('readystatechange', function () {
|
||||||
if (document.readyState === "interactive") {
|
if (document.readyState === 'interactive'){
|
||||||
|
|
||||||
console.log('UICore Initializing..');
|
console.log('UICore Initializing..');
|
||||||
|
|
||||||
// Bind close button.
|
// Bind close button.
|
||||||
@ -37,6 +36,8 @@ document.onreadystatechange = function () {
|
|||||||
window.minimize()
|
window.minimize()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
} else if(document.readyState === 'complete'){
|
||||||
|
|
||||||
// Bind progress bar length to length of bot wrapper
|
// Bind progress bar length to length of bot wrapper
|
||||||
const targetWidth = document.getElementById("launch_content").getBoundingClientRect().width
|
const targetWidth = document.getElementById("launch_content").getBoundingClientRect().width
|
||||||
const targetWidth2 = document.getElementById("server_selection").getBoundingClientRect().width
|
const targetWidth2 = document.getElementById("server_selection").getBoundingClientRect().width
|
||||||
@ -45,8 +46,10 @@ document.onreadystatechange = function () {
|
|||||||
document.getElementById("launch_progress").style.width = targetWidth2
|
document.getElementById("launch_progress").style.width = targetWidth2
|
||||||
document.getElementById("launch_details_right").style.maxWidth = targetWidth2
|
document.getElementById("launch_details_right").style.maxWidth = targetWidth2
|
||||||
document.getElementById("launch_progress_label").style.width = targetWidth3
|
document.getElementById("launch_progress_label").style.width = targetWidth3
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
}, false)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open web links in the user's default browser.
|
* Open web links in the user's default browser.
|
||||||
|
Loading…
Reference in New Issue
Block a user