2017-04-23 12:24:07 -07:00
|
|
|
const fs = require('fs')
|
|
|
|
const request = require('request')
|
|
|
|
const path = require('path')
|
2017-04-23 23:20:38 -07:00
|
|
|
const mkpath = require('mkdirp');
|
|
|
|
const async = require('async')
|
2017-04-23 12:24:07 -07:00
|
|
|
|
|
|
|
function Asset(from, to, size){
|
|
|
|
this.from = from
|
|
|
|
this.to = to
|
|
|
|
this.size = size
|
|
|
|
}
|
|
|
|
|
2017-04-24 15:48:02 -07:00
|
|
|
function AssetIndex(id, sha1, size, url, totalSize){
|
|
|
|
this.id = id
|
|
|
|
this.sha1 = sha1
|
|
|
|
this.size = size
|
|
|
|
this.url = url
|
|
|
|
this.totalSize = totalSize
|
|
|
|
}
|
|
|
|
|
|
|
|
function ClientDownload(){
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
function ServerDownload(){
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
function Library(){
|
|
|
|
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* This function will download the version index data and read it into a Javascript
|
|
|
|
* Object. This object will then be returned.
|
|
|
|
*/
|
|
|
|
exports.parseVersionData = function(version, basePath){
|
2017-04-23 12:24:07 -07:00
|
|
|
const name = version + '.json'
|
2017-04-24 15:48:02 -07:00
|
|
|
const baseURL = 'https://s3.amazonaws.com/Minecraft.Download/versions/' + version + '/' + name
|
|
|
|
const versionPath = path.join(basePath, 'versions', version)
|
|
|
|
|
|
|
|
return new Promise(function(fulfill, reject){
|
|
|
|
request.head(baseURL, function(err, res, body){
|
|
|
|
console.log('Preparing download of ' + version + ' assets.')
|
|
|
|
mkpath.sync(versionPath)
|
|
|
|
const stream = request(baseURL).pipe(fs.createWriteStream(path.join(versionPath, name)))
|
|
|
|
stream.on('finish', function(){
|
|
|
|
fulfill(JSON.parse(fs.readFileSync(path.join(versionPath, name))))
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Download the client for version. This file is 'client.jar' although
|
|
|
|
* it must be renamed to '{version}'.jar.
|
|
|
|
*/
|
|
|
|
exports.downloadClient = function(versionData, basePath){
|
|
|
|
const dls = versionData['downloads']
|
|
|
|
const clientData = dls['client']
|
|
|
|
const url = clientData['url']
|
|
|
|
const size = clientData['size']
|
|
|
|
const version = versionData['id']
|
|
|
|
const targetPath = path.join(basePath, 'versions', version)
|
|
|
|
const targetFile = version + '.jar'
|
|
|
|
|
|
|
|
request.head(url, function(err, res, body){
|
|
|
|
console.log('Downloading ' + version + ' client..')
|
|
|
|
mkpath.sync(targetPath)
|
|
|
|
const stream = request(url).pipe(fs.createWriteStream(path.join(targetPath, targetFile)))
|
|
|
|
stream.on('finish', function(){
|
|
|
|
console.log('Finished downloading ' + version + ' client.')
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
exports.downloadLogConfig = function(versionData, basePath){
|
|
|
|
const logging = versionData['logging']
|
|
|
|
const client = logging['client']
|
|
|
|
const file = client['file']
|
|
|
|
const version = versionData['id']
|
|
|
|
const targetPath = path.join(basePath, 'assets', 'log_configs')
|
|
|
|
const name = file['id']
|
|
|
|
const url = file['url']
|
|
|
|
|
|
|
|
request.head(url, function(err, res, body){
|
|
|
|
console.log('Downloading ' + version + ' log config..')
|
|
|
|
mkpath.sync(targetPath)
|
|
|
|
const stream = request(url).pipe(fs.createWriteStream(path.join(targetPath, name)))
|
|
|
|
stream.on('finish', function(){
|
|
|
|
console.log('Finished downloading ' + version + ' log config..')
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
exports.downloadLibraries = function(versionData, basePath){
|
|
|
|
const libArr = versionData['libraries']
|
|
|
|
const libPath = path.join(basePath, 'libraries')
|
|
|
|
async.eachLimit(libArr, 1, function(lib, cb){
|
|
|
|
if(validateRules(lib['rules'])){
|
|
|
|
if(lib['natives'] == null){
|
|
|
|
const dlInfo = lib['downloads']
|
|
|
|
const artifact = dlInfo['artifact']
|
|
|
|
const libSize = artifact['size']
|
|
|
|
const to = path.join(libPath, artifact['path'])
|
|
|
|
const from = artifact['url']
|
|
|
|
|
|
|
|
mkpath.sync(path.join(to, ".."))
|
|
|
|
let req = request(from)
|
|
|
|
let writeStream = fs.createWriteStream(to)
|
|
|
|
req.pipe(writeStream)
|
|
|
|
let acc = 0;
|
|
|
|
req.on('data', function(chunk){
|
|
|
|
acc += chunk.length
|
|
|
|
console.log('Progress', acc/libSize)
|
|
|
|
})
|
|
|
|
writeStream.on('close', function(){
|
|
|
|
cb()
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
//TODO Perform native extraction.
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}, function(err){
|
|
|
|
if(err){
|
|
|
|
console.log('A file failed to process');
|
|
|
|
} else {
|
|
|
|
console.log('All files have been processed successfully');
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
validateRules = function(rules){
|
|
|
|
if(rules == null) return true;
|
|
|
|
|
|
|
|
rules.forEach(function(rule){
|
|
|
|
const action = rule['action']
|
|
|
|
if(action != null){
|
|
|
|
if(action === 'disallow'){
|
|
|
|
osName = action['os']
|
|
|
|
if(osName != null){
|
|
|
|
if(osName === mojangFriendlyOS()){
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
mojangFriendlyOS = function(){
|
|
|
|
const opSys = process.platform
|
|
|
|
if (opSys === 'darwin') {
|
|
|
|
return 'osx'
|
|
|
|
} else if (opSys === 'win32'){
|
|
|
|
return 'windows'
|
|
|
|
} else if (opSys === 'linux'){
|
|
|
|
return 'linux'
|
|
|
|
} else {
|
|
|
|
return 'unknown_os'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Given an index url, this function will asynchonously download the
|
|
|
|
* assets associated with that version.
|
|
|
|
*/
|
|
|
|
exports.downloadAssets = function(versionData, basePath){
|
|
|
|
//Asset index constants.
|
|
|
|
const assetIndex = versionData['assetIndex']
|
|
|
|
const indexURL = assetIndex['url']
|
|
|
|
const datasize = assetIndex['totalSize']
|
|
|
|
const gameVersion = versionData['id']
|
|
|
|
const assetVersion = assetIndex['id']
|
|
|
|
const name = assetVersion + '.json'
|
|
|
|
|
|
|
|
//Asset constants
|
2017-04-23 12:24:07 -07:00
|
|
|
const resourceURL = 'http://resources.download.minecraft.net/'
|
|
|
|
const localPath = path.join(basePath, 'assets')
|
|
|
|
const indexPath = path.join(localPath, 'indexes')
|
|
|
|
const objectPath = path.join(localPath, 'objects')
|
|
|
|
|
|
|
|
request.head(indexURL, function (err, res, body) {
|
2017-04-24 15:48:02 -07:00
|
|
|
console.log('Downloading ' + gameVersion + ' asset index.')
|
2017-04-23 12:24:07 -07:00
|
|
|
mkpath.sync(indexPath)
|
2017-04-23 23:20:38 -07:00
|
|
|
const stream = request(indexURL).pipe(fs.createWriteStream(path.join(indexPath, name)))
|
2017-04-23 12:24:07 -07:00
|
|
|
stream.on('finish', function() {
|
2017-04-23 23:20:38 -07:00
|
|
|
const data = JSON.parse(fs.readFileSync(path.join(indexPath, name), 'utf-8'))
|
|
|
|
const assetArr = []
|
2017-04-23 12:24:07 -07:00
|
|
|
Object.keys(data['objects']).forEach(function(key, index){
|
2017-04-23 23:20:38 -07:00
|
|
|
const ob = data['objects'][key]
|
|
|
|
const hash = String(ob['hash'])
|
|
|
|
const assetName = path.join(hash.substring(0, 2), hash)
|
|
|
|
const urlName = hash.substring(0, 2) + "/" + hash
|
|
|
|
const ast = new Asset(resourceURL + urlName, path.join(objectPath, assetName), ob['size'])
|
2017-04-23 12:24:07 -07:00
|
|
|
assetArr.push(ast)
|
|
|
|
})
|
2017-04-23 23:20:38 -07:00
|
|
|
let acc = 0;
|
|
|
|
async.eachLimit(assetArr, 5, function(asset, cb){
|
|
|
|
mkpath.sync(path.join(asset.to, ".."))
|
|
|
|
let req = request(asset.from)
|
|
|
|
let writeStream = fs.createWriteStream(asset.to)
|
|
|
|
req.pipe(writeStream)
|
|
|
|
req.on('data', function(chunk){
|
|
|
|
acc += chunk.length
|
|
|
|
console.log('Progress', acc/datasize)
|
|
|
|
})
|
|
|
|
writeStream.on('close', function(){
|
|
|
|
cb()
|
|
|
|
})
|
|
|
|
}, function(err){
|
|
|
|
if(err){
|
|
|
|
console.log('A file failed to process');
|
|
|
|
} else {
|
|
|
|
console.log('All files have been processed successfully');
|
|
|
|
}
|
2017-04-23 12:24:07 -07:00
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|