From e91f8f693b0e103ce4ebc317c61fa6f78551f386 Mon Sep 17 00:00:00 2001 From: Jai A Date: Sun, 7 Nov 2021 18:42:33 -0700 Subject: [PATCH] Add local libs to modrinth maven and other fixes --- daedalus/Cargo.toml | 2 +- daedalus/src/lib.rs | 66 +++++++--- daedalus/src/modded.rs | 5 +- daedalus_client/Cargo.toml | 2 +- daedalus_client/src/fabric.rs | 6 +- daedalus_client/src/forge.rs | 209 +++++++++++++++++++++++-------- daedalus_client/src/main.rs | 67 ++++++---- daedalus_client/src/minecraft.rs | 10 +- 8 files changed, 261 insertions(+), 106 deletions(-) diff --git a/daedalus/Cargo.toml b/daedalus/Cargo.toml index d7252669b..4f674a30a 100644 --- a/daedalus/Cargo.toml +++ b/daedalus/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "daedalus" -version = "0.1.3" +version = "0.1.4" authors = ["Jai A "] edition = "2018" license = "MIT" diff --git a/daedalus/src/lib.rs b/daedalus/src/lib.rs index b24d304a0..77657323d 100644 --- a/daedalus/src/lib.rs +++ b/daedalus/src/lib.rs @@ -51,23 +51,57 @@ pub fn get_path_from_artifact(artifact: &str) -> Result { let name = name_items.get(1).ok_or_else(|| { Error::ParseError(format!("Unable to find name for library {}", &artifact)) })?; - let version_ext = name_items.get(2).ok_or_else(|| { - Error::ParseError(format!("Unable to find version for library {}", &artifact)) - })?.split('@').collect::>(); - let version = version_ext.get(0).ok_or_else(|| { - Error::ParseError(format!("Unable to find version for library {}", &artifact)) - })?; - let ext = version_ext.get(1); - Ok(format!( - "{}/{}/{}/{}-{}.{}", - package.replace(".", "/"), - name, - version, - name, - version, - ext.unwrap_or(&"jar") - )) + if name_items.len() == 3 { + let version_ext = name_items + .get(2) + .ok_or_else(|| { + Error::ParseError(format!("Unable to find version for library {}", &artifact)) + })? + .split('@') + .collect::>(); + let version = version_ext.get(0).ok_or_else(|| { + Error::ParseError(format!("Unable to find version for library {}", &artifact)) + })?; + let ext = version_ext.get(1); + + Ok(format!( + "{}/{}/{}/{}-{}.{}", + package.replace(".", "/"), + name, + version, + name, + version, + ext.unwrap_or(&"jar") + )) + } else { + let version = name_items.get(2).ok_or_else(|| { + Error::ParseError(format!("Unable to find version for library {}", &artifact)) + })?; + + let data_ext = name_items + .get(3) + .ok_or_else(|| { + Error::ParseError(format!("Unable to find data for library {}", &artifact)) + })? + .split('@') + .collect::>(); + let data = data_ext.get(0).ok_or_else(|| { + Error::ParseError(format!("Unable to find data for library {}", &artifact)) + })?; + let ext = data_ext.get(1); + + Ok(format!( + "{}/{}/{}/{}-{}-{}.{}", + package.replace(".", "/"), + name, + version, + name, + version, + data, + ext.unwrap_or(&"jar") + )) + } } /// Downloads a file from specified mirrors diff --git a/daedalus/src/modded.rs b/daedalus/src/modded.rs index 471701c88..6cc42098b 100644 --- a/daedalus/src/modded.rs +++ b/daedalus/src/modded.rs @@ -76,7 +76,10 @@ pub fn merge_partial_version(partial: PartialVersionInfo, merge: VersionInfo) -> if let Some(merge_args) = merge.arguments { let mut new_map = HashMap::new(); - fn add_keys(new_map: &mut HashMap>, args: HashMap>) { + fn add_keys( + new_map: &mut HashMap>, + args: HashMap>, + ) { for (type_, arguments) in args { for arg in arguments { if let Some(vec) = new_map.get_mut(&type_) { diff --git a/daedalus_client/Cargo.toml b/daedalus_client/Cargo.toml index f36f6f3b1..f30de5b53 100644 --- a/daedalus_client/Cargo.toml +++ b/daedalus_client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "daedalus_client" -version = "0.1.0" +version = "0.1.4" authors = ["Jai A "] edition = "2018" diff --git a/daedalus_client/src/fabric.rs b/daedalus_client/src/fabric.rs index 2734bc26f..0024a6ab2 100644 --- a/daedalus_client/src/fabric.rs +++ b/daedalus_client/src/fabric.rs @@ -2,12 +2,12 @@ use crate::{format_url, upload_file_to_bucket, Error}; use daedalus::download_file; use daedalus::minecraft::Library; use daedalus::modded::{LoaderType, LoaderVersion, Manifest, PartialVersionInfo, Version}; -use tokio::sync::Mutex; +use log::info; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::sync::Arc; use std::time::{Duration, Instant}; -use log::info; +use tokio::sync::Mutex; pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error> { let mut list = fetch_fabric_versions(None).await?; @@ -125,7 +125,7 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error Some("application/java-archive".to_string()), uploaded_files_mutex.as_ref(), ) - .await?; + .await?; Ok::(lib) }, diff --git a/daedalus_client/src/forge.rs b/daedalus_client/src/forge.rs index ff20be0cb..48dae3abe 100644 --- a/daedalus_client/src/forge.rs +++ b/daedalus_client/src/forge.rs @@ -6,6 +6,7 @@ use daedalus::modded::{ LoaderType, LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry, }; use lazy_static::lazy_static; +use log::info; use semver::{Version, VersionReq}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -13,7 +14,6 @@ use std::io::Read; use std::sync::Arc; use std::time::{Duration, Instant}; use tokio::sync::Mutex; -use log::info; lazy_static! { static ref FORGE_MANIFEST_V1_QUERY: VersionReq = @@ -99,26 +99,30 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error info!("Forge - Installer Start {}", loader_version_full.clone()); let bytes = download_file(&*format!("https://maven.minecraftforge.net/net/minecraftforge/forge/{0}/forge-{0}-installer.jar", loader_version_full), None).await?; - let reader = std::io::Cursor::new(&*bytes); + let reader = std::io::Cursor::new(bytes); - if let Ok(mut archive) = zip::ZipArchive::new(reader) { + if let Ok(archive) = zip::ZipArchive::new(reader) { if FORGE_MANIFEST_V1_QUERY.matches(&version) { - let profile = { - let mut install_profile = archive.by_name("install_profile.json")?; + let mut archive_clone = archive.clone(); + let profile = tokio::task::spawn_blocking(move || { + let mut install_profile = archive_clone.by_name("install_profile.json")?; let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - serde_json::from_str::(&*contents)? - }; + Ok::(serde_json::from_str::(&*contents)?) + }).await??; - let forge_universal_bytes = { - let mut forge_universal_file = archive.by_name(&*profile.install.file_path)?; + let mut archive_clone = archive.clone(); + let file_path = profile.install.file_path.clone(); + let forge_universal_bytes = tokio::task::spawn_blocking(move || { + let mut forge_universal_file = archive_clone.by_name(&*file_path)?; let mut forge_universal = Vec::new(); forge_universal_file.read_to_end(&mut forge_universal)?; - bytes::Bytes::from(forge_universal) - }; + + Ok::(bytes::Bytes::from(forge_universal)) + }).await??; let forge_universal_path = profile.install.path.clone(); let now = Instant::now(); @@ -205,73 +209,170 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error loaders: map }) } else if FORGE_MANIFEST_V2_QUERY_P1.matches(&version) || FORGE_MANIFEST_V2_QUERY_P2.matches(&version) || FORGE_MANIFEST_V3_QUERY.matches(&version) { - let profile = { - let mut install_profile = archive.by_name("install_profile.json")?; + let mut archive_clone = archive.clone(); + let mut profile = tokio::task::spawn_blocking(move || { + let mut install_profile = archive_clone.by_name("install_profile.json")?; let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - serde_json::from_str::(&*contents)? - }; + Ok::(serde_json::from_str::(&*contents)?) + }).await??; - let version_info = { - let mut install_profile = archive.by_name("version.json")?; + let mut archive_clone = archive.clone(); + let version_info = tokio::task::spawn_blocking(move || { + let mut install_profile = archive_clone.by_name("version.json")?; let mut contents = String::new(); install_profile.read_to_string(&mut contents)?; - serde_json::from_str::(&*contents)? - }; - let forge_universal_bytes = { - if let Some(path) = &profile.path { - let mut forge_universal_file = archive.by_name(&*format!("maven/{}", daedalus::get_path_from_artifact(&*path)?))?; - let mut forge_universal = Vec::new(); - forge_universal_file.read_to_end(&mut forge_universal)?; + Ok::(serde_json::from_str::(&*contents)?) + }).await??; - Some(bytes::Bytes::from(forge_universal)) - } else { - None + + let mut libs : Vec = profile.libraries.into_iter().chain(version_info.libraries).collect(); + + let mut local_libs : HashMap = HashMap::new(); + + for lib in &libs { + if lib.downloads.as_ref().map(|x| x.artifact.as_ref().map(|x| x.url.is_empty())).flatten().unwrap_or(false) { + let mut archive_clone = archive.clone(); + let lib_name_clone = lib.name.clone(); + + let lib_bytes = tokio::task::spawn_blocking(move || { + let mut lib_file = archive_clone.by_name(&*format!("maven/{}", daedalus::get_path_from_artifact(&*lib_name_clone)?))?; + let mut lib_bytes = Vec::new(); + lib_file.read_to_end(&mut lib_bytes)?; + + Ok::(bytes::Bytes::from(lib_bytes)) + }).await??; + + local_libs.insert(lib.name.clone(), lib_bytes); } - }; + } - let now = Instant::now(); - let libs = futures::future::try_join_all(profile.libraries.into_iter().chain(version_info.libraries).map(|mut lib| async { - if let Some(ref mut downloads) = lib.downloads { - if let Some(ref mut artifact) = downloads.artifact { - { - let mut visited_assets = visited_assets.lock().await; + let path = profile.path.clone(); + let version = profile.version.clone(); - if visited_assets.contains(&lib.name) { - artifact.url = format_url(&*format!("maven/{}", artifact.path)); + for entry in profile.data.values_mut() { + if entry.client.starts_with('/') || entry.server.starts_with('/') { + macro_rules! read_data { + ($value:expr) => { + let mut archive_clone = archive.clone(); + let value_clone = $value.clone(); + let lib_bytes = tokio::task::spawn_blocking(move || { + let mut lib_file = archive_clone.by_name(&value_clone[1..value_clone.len()])?; + let mut lib_bytes = Vec::new(); + lib_file.read_to_end(&mut lib_bytes)?; - return Ok::(lib); - } else { - visited_assets.push(lib.name.clone()) + Ok::(bytes::Bytes::from(lib_bytes)) + }).await??; + + let split = $value.split('/').last(); + + if let Some(last) = split { + let mut file = last.split('.'); + + if let Some(file_name) = file.next() { + if let Some(ext) = file.next() { + let path = format!("{}:{}@{}", path.as_deref().unwrap_or(&*format!("net.minecraftforge:forge:{}", version)), file_name, ext); + $value = format!("[{}]", &path); + local_libs.insert(path.clone(), bytes::Bytes::from(lib_bytes)); + + libs.push(Library { + downloads: None, + extract: None, + name: path, + url: Some("".to_string()), + natives: None, + rules: None, + checksums: None + }); + } + } } } + } - let artifact_path = - daedalus::get_path_from_artifact(&*lib.name)?; + if entry.client.starts_with('/') { + read_data!(entry.client); + } - let artifact_bytes = if artifact.url.is_empty() { - forge_universal_bytes.clone().unwrap_or_default() + // Do we really need to support server installs? Keeping this here + // just in case + // + // if entry.server.starts_with('/') { + // read_data!(entry.server); + // } + } + } + + let now = Instant::now(); + let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async { + let artifact_path = + daedalus::get_path_from_artifact(&*lib.name)?; + + { + let mut visited_assets = visited_assets.lock().await; + + if visited_assets.contains(&lib.name) { + if let Some(ref mut downloads) = lib.downloads { + if let Some(ref mut artifact) = downloads.artifact { + artifact.url = format_url(&*format!("maven/{}", artifact_path)); + } + } else if lib.url.is_some() { + lib.url = Some(format_url(&*format!("maven/{}", artifact_path))); + } + + return Ok::(lib); + } else { + visited_assets.push(lib.name.clone()) + } + } + + let artifact_bytes = if let Some(ref mut downloads) = lib.downloads { + if let Some(ref mut artifact) = downloads.artifact { + let res = if artifact.url.is_empty() { + local_libs.get(&lib.name).cloned() } else { - daedalus::download_file( + Some(daedalus::download_file( &*artifact.url, Some(&*artifact.sha1), ) - .await? + .await?) }; - artifact.url = format_url(&*format!("maven/{}", artifact.path)); + if res.is_some() { + artifact.url = format_url(&*format!("maven/{}", artifact_path)); + } - upload_file_to_bucket( - format!("{}/{}", "maven", artifact_path), - artifact_bytes.to_vec(), - Some("application/java-archive".to_string()), - uploaded_files_mutex.as_ref() - ).await?; + res + } else { None } + } else if let Some(ref mut url) = lib.url { + let res = if url.is_empty() { + local_libs.get(&lib.name).cloned() + } else { + Some(daedalus::download_file( + url, + None, + ) + .await?) + }; + + if res.is_some() { + lib.url = Some(format_url(&*format!("maven/{}", artifact_path))); } + + res + } else { None }; + + if let Some(bytes) = artifact_bytes { + upload_file_to_bucket( + format!("{}/{}", "maven", artifact_path), + bytes.to_vec(), + Some("application/java-archive".to_string()), + uploaded_files_mutex.as_ref() + ).await?; } Ok::(lib) @@ -334,7 +435,7 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error info!("Chunk {} Start", chunk_index); let now = Instant::now(); - let chunk: Vec<_> = versions_peek.by_ref().take(100).collect(); + let chunk: Vec<_> = versions_peek.by_ref().take(10).collect(); futures::future::try_join_all(chunk).await?; tokio::time::sleep(Duration::from_secs(1)).await; @@ -356,7 +457,7 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error game_versions: versions.into_inner(), })?, Some("application/json".to_string()), - uploaded_files_mutex.as_ref() + uploaded_files_mutex.as_ref(), ) .await?; } diff --git a/daedalus_client/src/main.rs b/daedalus_client/src/main.rs index f599b118b..123e2b4f2 100644 --- a/daedalus_client/src/main.rs +++ b/daedalus_client/src/main.rs @@ -62,7 +62,7 @@ async fn main() { Err(err) => error!("{:?}", err), }; - match purge_digitalocean_cache(uploaded_files).await { + match purge_digitalocean_cache(uploaded_files).await { Ok(..) => {} Err(err) => error!("{:?}", err), }; @@ -139,27 +139,39 @@ pub async fn upload_file_to_bucket( ) -> Result<(), Error> { let key = format!("{}/{}", &*dotenv::var("BASE_FOLDER").unwrap(), path); - CLIENT - .put_object(PutObjectRequest { - bucket: dotenv::var("S3_BUCKET_NAME").unwrap(), - key: key.clone(), - body: Some(bytes.into()), - acl: Some("public-read".to_string()), - content_type, - ..Default::default() - }) - .await - .map_err(|err| Error::S3Error { - inner: err, - file: format!("{}/{}", &*dotenv::var("BASE_FOLDER").unwrap(), path), - })?; + for attempt in 1..=4 { + let result = CLIENT + .put_object(PutObjectRequest { + bucket: dotenv::var("S3_BUCKET_NAME").unwrap(), + key: key.clone(), + body: Some(bytes.clone().into()), + acl: Some("public-read".to_string()), + content_type: content_type.clone(), + ..Default::default() + }) + .await + .map_err(|err| Error::S3Error { + inner: err, + file: format!("{}/{}", &*dotenv::var("BASE_FOLDER").unwrap(), path), + }); - { - let mut uploaded_files = uploaded_files.lock().await; - uploaded_files.push(key); + match result { + Ok(_) => { + { + let mut uploaded_files = uploaded_files.lock().await; + uploaded_files.push(key); + } + + return Ok(()); + } + Err(_) if attempt <= 3 => continue, + Err(_) => { + result?; + } + } } - Ok(()) + unreachable!() } pub fn format_url(path: &str) -> String { @@ -181,9 +193,9 @@ pub async fn purge_digitalocean_cache(files: Vec) -> Result<(), Error> { .ok() .map(|x| x.parse::().ok()) .flatten() - .unwrap_or(false) { - - return Ok(()) + .unwrap_or(false) + { + return Ok(()); } let client = reqwest::Client::new(); @@ -193,11 +205,16 @@ pub async fn purge_digitalocean_cache(files: Vec) -> Result<(), Error> { "https://api.digitalocean.com/v2/cdn/endpoints/{}/cache", &*dotenv::var("DO_ENDPOINT_ID").unwrap() )) - .header("Authorization", &*format!("Bearer {}", &*dotenv::var("DO_ACCESS_KEY").unwrap())) + .header( + "Authorization", + &*format!("Bearer {}", &*dotenv::var("DO_ACCESS_KEY").unwrap()), + ) .json(&PurgeCacheRequest { files }) - .send().await.map_err(|err| Error::FetchError { + .send() + .await + .map_err(|err| Error::FetchError { inner: err, - item: "purging digital ocean cache".to_string() + item: "purging digital ocean cache".to_string(), })?; Ok(()) diff --git a/daedalus_client/src/minecraft.rs b/daedalus_client/src/minecraft.rs index 69484d806..babb6a189 100644 --- a/daedalus_client/src/minecraft.rs +++ b/daedalus_client/src/minecraft.rs @@ -1,9 +1,9 @@ use crate::{format_url, upload_file_to_bucket, Error}; use daedalus::download_file; -use tokio::sync::Mutex; +use log::info; use std::sync::Arc; use std::time::{Duration, Instant}; -use log::info; +use tokio::sync::Mutex; pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error> { let old_manifest = @@ -107,7 +107,7 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error assets_path, assets_index.to_vec(), Some("application/json".to_string()), - uploaded_files_mutex.as_ref() + uploaded_files_mutex.as_ref(), )); } } @@ -117,7 +117,7 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error version_path, serde_json::to_vec(&version_info)?, Some("application/json".to_string()), - uploaded_files_mutex.as_ref() + uploaded_files_mutex.as_ref(), )); } @@ -156,7 +156,7 @@ pub async fn retrieve_data(uploaded_files: &mut Vec) -> Result<(), Error ), serde_json::to_vec(&*cloned_manifest.lock().await)?, Some("application/json".to_string()), - uploaded_files_mutex.as_ref() + uploaded_files_mutex.as_ref(), ) .await?;