Daedalus Rewrite + Code Cleanup (#16)

* [wip] rewrite daedalus, vanilla, fabric, and quilt

* finish forge + neo

* fix docker

* fix neoforge 1.21+

* update concurrency limit

* finish

* remove mac garb
This commit is contained in:
Geometrically
2024-06-25 15:47:27 -07:00
committed by GitHub
parent ac07ac5234
commit 8b16cd1b36
19 changed files with 2334 additions and 2529 deletions

View File

@@ -0,0 +1,63 @@
use tracing_error::InstrumentError;
#[derive(thiserror::Error, Debug)]
pub enum ErrorKind {
#[error("Daedalus Error: {0}")]
Daedalus(#[from] daedalus::Error),
#[error("Invalid input: {0}")]
InvalidInput(String),
#[error("Error while managing asynchronous tasks")]
TaskError(#[from] tokio::task::JoinError),
#[error("Error while deserializing JSON: {0}")]
SerdeJSON(#[from] serde_json::Error),
#[error("Error while deserializing XML: {0}")]
SerdeXML(#[from] serde_xml_rs::Error),
#[error("Failed to validate file checksum at url {url} with hash {hash} after {tries} tries")]
ChecksumFailure {
hash: String,
url: String,
tries: u32,
},
#[error("Unable to fetch {item}")]
Fetch { inner: reqwest::Error, item: String },
#[error("Error while uploading file to S3: {file}")]
S3 {
inner: s3::error::S3Error,
file: String,
},
#[error("Error acquiring semaphore: {0}")]
Acquire(#[from] tokio::sync::AcquireError),
#[error("Tracing error: {0}")]
Tracing(#[from] tracing::subscriber::SetGlobalDefaultError),
#[error("Zip error: {0}")]
Zip(#[from] async_zip::error::ZipError),
}
#[derive(Debug)]
pub struct Error {
pub source: tracing_error::TracedError<ErrorKind>,
}
impl std::fmt::Display for Error {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "{}", self.source)
}
}
impl<E: Into<ErrorKind>> From<E> for Error {
fn from(source: E) -> Self {
let error = Into::<ErrorKind>::into(source);
Self {
source: error.in_current_span(),
}
}
}
impl ErrorKind {
pub fn as_error(self) -> Error {
self.into()
}
}
pub type Result<T> = core::result::Result<T, Error>;

View File

@@ -1,372 +1,276 @@
use crate::{download_file, format_url, upload_file_to_bucket, Error};
use daedalus::minecraft::{Library, VersionManifest};
use daedalus::modded::{
LoaderVersion, Manifest, PartialVersionInfo, Version, DUMMY_REPLACE_STRING,
};
use serde::{Deserialize, Serialize};
use crate::util::{download_file, fetch_json, format_url};
use crate::{insert_mirrored_artifact, Error, MirrorArtifact, UploadFile};
use daedalus::modded::{Manifest, PartialVersionInfo, DUMMY_REPLACE_STRING};
use dashmap::DashMap;
use serde::Deserialize;
use std::sync::Arc;
use tokio::sync::{Mutex, RwLock, Semaphore};
use tokio::sync::Semaphore;
pub async fn retrieve_data(
minecraft_versions: &VersionManifest,
uploaded_files: &mut Vec<String>,
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
pub async fn fetch_fabric(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let list = fetch_fabric_versions(None, semaphore.clone()).await?;
let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!(
"fabric/v{}/manifest.json",
fetch(
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
)))
"fabric",
"https://meta.fabricmc.net/v2",
"https://maven.fabricmc.net/",
semaphore,
upload_files,
mirror_artifacts,
)
.await
}
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
pub async fn fetch_quilt(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
fetch(
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
"quilt",
"https://meta.quiltmc.org/v3",
"https://meta.quiltmc.org/",
semaphore,
upload_files,
mirror_artifacts,
)
.await
}
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
async fn fetch(
format_version: usize,
mod_loader: &str,
meta_url: &str,
maven_url: &str,
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let modrinth_manifest = fetch_json::<Manifest>(
&format_url(&format!("{mod_loader}/v{format_version}/manifest.json",)),
&semaphore,
)
.await
.ok();
let mut versions = if let Some(old_manifest) = old_manifest {
old_manifest.game_versions
} else {
Vec::new()
};
let loaders_mutex = RwLock::new(Vec::new());
{
let mut loaders = loaders_mutex.write().await;
for (index, loader) in list.loader.iter().enumerate() {
if versions.iter().any(|x| {
x.id == DUMMY_REPLACE_STRING
&& x.loaders.iter().any(|x| x.id == loader.version)
}) {
if index == 0 {
loaders.push((
Box::new(loader.stable),
loader.version.clone(),
Box::new(true),
))
}
} else {
loaders.push((
Box::new(loader.stable),
loader.version.clone(),
Box::new(false),
))
}
}
}
const DUMMY_GAME_VERSION: &str = "1.19.4-rc2";
let loader_version_mutex = Mutex::new(Vec::new());
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
let loader_versions = futures::future::try_join_all(
loaders_mutex.read().await.clone().into_iter().map(
|(stable, loader, skip_upload)| async {
let version = fetch_fabric_version(
DUMMY_GAME_VERSION,
&loader,
semaphore.clone(),
)
.await?;
Ok::<(Box<bool>, String, PartialVersionInfo, Box<bool>), Error>(
(stable, loader, version, skip_upload),
)
},
),
let fabric_manifest = fetch_json::<FabricVersions>(
&format!("{meta_url}/versions"),
&semaphore,
)
.await?;
let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new()));
futures::future::try_join_all(loader_versions.into_iter()
.map(
|(stable, loader, version, skip_upload)| async {
let libs = futures::future::try_join_all(
version.libraries.into_iter().map(|mut lib| async {
{
let mut visited_assets =
visited_artifacts_mutex.lock().await;
// We check Modrinth's fabric version manifest and compare if the fabric version exists in Modrinth's database
// We also check intermediary versions that are newly added to query
let (fetch_fabric_versions, fetch_intermediary_versions) =
if let Some(modrinth_manifest) = modrinth_manifest {
let (mut fetch_versions, mut fetch_intermediary_versions) =
(Vec::new(), Vec::new());
if visited_assets.contains(&lib.name) {
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
lib.url = Some(format_url("maven/"));
for version in &fabric_manifest.loader {
if !modrinth_manifest
.game_versions
.iter()
.any(|x| x.loaders.iter().any(|x| x.id == version.version))
{
fetch_versions.push(version);
}
}
return Ok(lib);
} else {
visited_assets.push(lib.name.clone())
}
for version in &fabric_manifest.intermediary {
if !modrinth_manifest
.game_versions
.iter()
.any(|x| x.id == version.version)
&& fabric_manifest
.game
.iter()
.any(|x| x.version == version.version)
{
fetch_intermediary_versions.push(version);
}
}
(fetch_versions, fetch_intermediary_versions)
} else {
(
fabric_manifest.loader.iter().collect(),
fabric_manifest.intermediary.iter().collect(),
)
};
const DUMMY_GAME_VERSION: &str = "1.21";
if !fetch_intermediary_versions.is_empty() {
for x in &fetch_intermediary_versions {
insert_mirrored_artifact(
&x.maven,
maven_url.to_string(),
mirror_artifacts,
)?;
}
}
if !fetch_fabric_versions.is_empty() {
let fabric_version_manifest_urls = fetch_fabric_versions
.iter()
.map(|x| {
format!(
"{}/versions/loader/{}/{}/profile/json",
meta_url, DUMMY_GAME_VERSION, x.version
)
})
.collect::<Vec<_>>();
let fabric_version_manifests = futures::future::try_join_all(
fabric_version_manifest_urls
.iter()
.map(|x| download_file(x, None, &semaphore)),
)
.await?
.into_iter()
.map(|x| serde_json::from_slice(&x))
.collect::<Result<Vec<PartialVersionInfo>, serde_json::Error>>()?;
let patched_version_manifests = fabric_version_manifests
.into_iter()
.map(|mut version_info| {
for lib in &mut version_info.libraries {
let new_name = lib
.name
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
// If a library is not intermediary, we add it to mirror artifacts to be mirrored
if lib.name == new_name {
insert_mirrored_artifact(
&new_name,
lib.url
.clone()
.unwrap_or_else(|| maven_url.to_string()),
mirror_artifacts,
)?;
} else {
lib.name = new_name;
}
if lib.name.contains(DUMMY_GAME_VERSION) {
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
futures::future::try_join_all(list.game.clone().into_iter().map(|game_version| async {
let semaphore = semaphore.clone();
let uploaded_files_mutex = uploaded_files_mutex.clone();
let lib_name = lib.name.clone();
let lib_url = lib.url.clone();
async move {
let artifact_path =
daedalus::get_path_from_artifact(&lib_name.replace(DUMMY_REPLACE_STRING, &game_version.version))?;
let artifact = download_file(
&format!(
"{}{}",
lib_url.unwrap_or_else(|| {
"https://maven.fabricmc.net/".to_string()
}),
artifact_path
),
None,
semaphore.clone(),
)
.await?;
upload_file_to_bucket(
format!("{}/{}", "maven", artifact_path),
artifact.to_vec(),
Some("application/java-archive".to_string()),
&uploaded_files_mutex,
semaphore.clone(),
)
.await?;
Ok::<(), Error>(())
}.await?;
Ok::<(), Error>(())
})).await?;
lib.url = Some(format_url("maven/"));
return Ok(lib);
}
let artifact_path =
daedalus::get_path_from_artifact(&lib.name)?;
let artifact = download_file(
&format!(
"{}{}",
lib.url.unwrap_or_else(|| {
"https://maven.fabricmc.net/".to_string()
}),
artifact_path
),
None,
semaphore.clone(),
)
.await?;
lib.url = Some(format_url("maven/"));
upload_file_to_bucket(
format!("{}/{}", "maven", artifact_path),
artifact.to_vec(),
Some("application/java-archive".to_string()),
&uploaded_files_mutex,
semaphore.clone(),
)
.await?;
Ok::<Library, Error>(lib)
}),
)
.await?;
if async move {
*skip_upload
}.await {
return Ok::<(), Error>(())
}
let version_path = format!(
"fabric/v{}/versions/{}.json",
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
&loader
);
upload_file_to_bucket(
version_path.clone(),
serde_json::to_vec(&PartialVersionInfo {
arguments: version.arguments,
id: version
.id
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
main_class: version.main_class,
release_time: version.release_time,
time: version.time,
type_: version.type_,
inherits_from: version
.inherits_from
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
libraries: libs,
minecraft_arguments: version.minecraft_arguments,
processors: None,
data: None,
})?,
Some("application/json".to_string()),
&uploaded_files_mutex,
semaphore.clone(),
)
.await?;
{
let mut loader_version_map = loader_version_mutex.lock().await;
async move {
loader_version_map.push(LoaderVersion {
id: loader.to_string(),
url: format_url(&version_path),
stable: *stable,
});
}
.await;
}
Ok::<(), Error>(())
},
))
.await?;
version_info.id = version_info
.id
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
version_info.inherits_from = version_info
.inherits_from
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
let mut loader_version_mutex = loader_version_mutex.into_inner();
if !loader_version_mutex.is_empty() {
if let Some(version) =
versions.iter_mut().find(|x| x.id == DUMMY_REPLACE_STRING)
{
version.loaders.append(&mut loader_version_mutex);
} else {
versions.push(Version {
id: DUMMY_REPLACE_STRING.to_string(),
stable: true,
loaders: loader_version_mutex,
});
}
}
for version in &list.game {
if !versions.iter().any(|x| x.id == version.version) {
versions.push(Version {
id: version.version.clone(),
stable: version.stable,
loaders: vec![],
});
}
}
versions.sort_by(|x, y| {
minecraft_versions
.versions
Ok(version_info)
})
.collect::<Result<Vec<_>, Error>>()?;
let serialized_version_manifests = patched_version_manifests
.iter()
.position(|z| x.id == z.id)
.unwrap_or_default()
.cmp(
&minecraft_versions
.versions
.iter()
.position(|z| y.id == z.id)
.unwrap_or_default(),
)
});
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
.collect::<Result<Vec<_>, serde_json::Error>>()?;
for version in &mut versions {
version.loaders.sort_by(|x, y| {
list.loader
.iter()
.position(|z| x.id == *z.version)
.unwrap_or_default()
.cmp(
&list
.loader
.iter()
.position(|z| y.id == z.version)
.unwrap_or_default(),
)
})
serialized_version_manifests
.into_iter()
.enumerate()
.for_each(|(index, bytes)| {
let loader = fetch_fabric_versions[index];
let version_path = format!(
"{mod_loader}/v{format_version}/versions/{}.json",
loader.version
);
upload_files.insert(
version_path,
UploadFile {
file: bytes,
content_type: Some("application/json".to_string()),
},
);
});
}
upload_file_to_bucket(
format!(
"fabric/v{}/manifest.json",
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
),
serde_json::to_vec(&Manifest {
game_versions: versions,
})?,
Some("application/json".to_string()),
&uploaded_files_mutex,
semaphore,
)
.await?;
if !fetch_fabric_versions.is_empty()
|| !fetch_intermediary_versions.is_empty()
{
let fabric_manifest_path =
format!("{mod_loader}/v{format_version}/manifest.json",);
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
let loader_versions = daedalus::modded::Version {
id: DUMMY_REPLACE_STRING.to_string(),
stable: true,
loaders: fabric_manifest
.loader
.into_iter()
.map(|x| {
let version_path = format!(
"{mod_loader}/v{format_version}/versions/{}.json",
x.version,
);
daedalus::modded::LoaderVersion {
id: x.version,
url: format_url(&version_path),
stable: x.stable,
}
})
.collect(),
};
let manifest = daedalus::modded::Manifest {
game_versions: std::iter::once(loader_versions)
.chain(fabric_manifest.game.into_iter().map(|x| {
daedalus::modded::Version {
id: x.version,
stable: x.stable,
loaders: vec![],
}
}))
.collect(),
};
upload_files.insert(
fabric_manifest_path,
UploadFile {
file: bytes::Bytes::from(serde_json::to_vec(&manifest)?),
content_type: Some("application/json".to_string()),
},
);
}
Ok(())
}
const FABRIC_META_URL: &str = "https://meta.fabricmc.net/v2";
async fn fetch_fabric_version(
version_number: &str,
loader_version: &str,
semaphore: Arc<Semaphore>,
) -> Result<PartialVersionInfo, Error> {
Ok(serde_json::from_slice(
&download_file(
&format!(
"{}/versions/loader/{}/{}/profile/json",
FABRIC_META_URL, version_number, loader_version
),
None,
semaphore,
)
.await?,
)?)
}
#[derive(Serialize, Deserialize, Debug, Clone)]
/// Versions of fabric components
#[derive(Deserialize, Debug, Clone)]
struct FabricVersions {
/// Versions of Minecraft that fabric supports
pub game: Vec<FabricGameVersion>,
/// Available versions of the fabric loader
pub loader: Vec<FabricLoaderVersion>,
pub game: Vec<FabricGameVersion>,
#[serde(alias = "hashed")]
pub intermediary: Vec<FabricIntermediaryVersion>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
/// A version of Minecraft that fabric supports
struct FabricGameVersion {
/// The version number of the game
pub version: String,
/// Whether the Minecraft version is stable or not
pub stable: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
/// A version of the fabric loader
#[derive(Deserialize, Debug, Clone)]
struct FabricLoaderVersion {
/// The separator to get the build number
pub separator: String,
/// The build number
pub build: u32,
/// The maven artifact
pub maven: String,
/// The version number of the fabric loader
// pub separator: String,
// pub build: u32,
// pub maven: String,
pub version: String,
/// Whether the loader is stable or not
#[serde(default)]
pub stable: bool,
}
/// Fetches the list of fabric versions
async fn fetch_fabric_versions(
url: Option<&str>,
semaphore: Arc<Semaphore>,
) -> Result<FabricVersions, Error> {
Ok(serde_json::from_slice(
&download_file(
url.unwrap_or(&*format!("{}/versions", FABRIC_META_URL)),
None,
semaphore,
)
.await?,
)?)
#[derive(Deserialize, Debug, Clone)]
struct FabricIntermediaryVersion {
pub maven: String,
pub version: String,
}
#[derive(Deserialize, Debug, Clone)]
struct FabricGameVersion {
pub version: String,
pub stable: bool,
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,118 +1,147 @@
use log::{error, info, warn};
use s3::creds::Credentials;
use s3::error::S3Error;
use s3::{Bucket, Region};
use crate::util::{
format_url, upload_file_to_bucket, upload_url_to_bucket_mirrors,
REQWEST_CLIENT,
};
use daedalus::get_path_from_artifact;
use dashmap::{DashMap, DashSet};
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::Semaphore;
use tracing_error::ErrorLayer;
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
mod error;
mod fabric;
mod forge;
mod minecraft;
mod neo;
mod quilt;
pub mod util;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("{0}")]
DaedalusError(#[from] daedalus::Error),
#[error("Error while deserializing JSON")]
SerdeError(#[from] serde_json::Error),
#[error("Error while deserializing XML")]
XMLError(#[from] serde_xml_rs::Error),
#[error("Unable to fetch {item}")]
FetchError { inner: reqwest::Error, item: String },
#[error("Error while managing asynchronous tasks")]
TaskError(#[from] tokio::task::JoinError),
#[error("Error while uploading file to S3")]
S3Error { inner: S3Error, file: String },
#[error("Error while parsing version as semver: {0}")]
SemVerError(#[from] semver::Error),
#[error("Error while reading zip file: {0}")]
ZipError(#[from] zip::result::ZipError),
#[error("Error while reading zip file: {0}")]
IoError(#[from] std::io::Error),
#[error("Error while obtaining strong reference to Arc")]
ArcError,
#[error("Error acquiring semaphore: {0}")]
AcquireError(#[from] tokio::sync::AcquireError),
}
pub use error::{Error, ErrorKind, Result};
#[tokio::main]
async fn main() {
env_logger::init();
async fn main() -> Result<()> {
dotenvy::dotenv().ok();
let subscriber = tracing_subscriber::registry()
.with(fmt::layer())
.with(EnvFilter::from_default_env())
.with(ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)?;
tracing::info!("Initialized tracing. Starting Daedalus!");
if check_env_vars() {
error!("Some environment variables are missing!");
tracing::error!("Some environment variables are missing!");
return;
return Ok(());
}
let mut timer = tokio::time::interval(Duration::from_secs(60 * 60));
let semaphore = Arc::new(Semaphore::new(10));
let semaphore = Arc::new(Semaphore::new(
dotenvy::var("CONCURRENCY_LIMIT")
.ok()
.and_then(|x| x.parse().ok())
.unwrap_or(10),
));
loop {
timer.tick().await;
// path, upload file
let upload_files: DashMap<String, UploadFile> = DashMap::new();
// path, mirror artifact
let mirror_artifacts: DashMap<String, MirrorArtifact> = DashMap::new();
let mut uploaded_files = Vec::new();
minecraft::fetch(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
fabric::fetch_fabric(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
fabric::fetch_quilt(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
forge::fetch_neo(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
forge::fetch_forge(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
let versions = match minecraft::retrieve_data(
&mut uploaded_files,
semaphore.clone(),
futures::future::try_join_all(upload_files.iter().map(|x| {
upload_file_to_bucket(
x.key().clone(),
x.value().file.clone(),
x.value().content_type.clone(),
&semaphore,
)
.await
{
Ok(res) => Some(res),
Err(err) => {
error!("{:?}", err);
}))
.await?;
None
futures::future::try_join_all(mirror_artifacts.iter().map(|x| {
upload_url_to_bucket_mirrors(
format!("maven/{}", x.key()),
x.value().mirrors.iter().map(|x| x.key().clone()).collect(),
&semaphore,
)
}))
.await?;
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token)
.json(&serde_json::json!({
"files": chunk
}))
.send()
.await
.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: "cloudflare clear cache".to_string(),
}
})?
.error_for_status()
.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: "cloudflare clear cache".to_string(),
}
})?;
}
};
if let Some(manifest) = versions {
match fabric::retrieve_data(
&manifest,
&mut uploaded_files,
semaphore.clone(),
)
.await
{
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
match forge::retrieve_data(
&manifest,
&mut uploaded_files,
semaphore.clone(),
)
.await
{
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
match quilt::retrieve_data(
&manifest,
&mut uploaded_files,
semaphore.clone(),
)
.await
{
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
match neo::retrieve_data(
&manifest,
&mut uploaded_files,
semaphore.clone(),
)
.await
{
Ok(..) => {}
Err(err) => error!("{:?}", err),
};
}
}
Ok(())
}
pub struct UploadFile {
file: bytes::Bytes,
content_type: Option<String>,
}
pub struct MirrorArtifact {
pub mirrors: DashSet<String>,
}
pub fn insert_mirrored_artifact(
artifact: &str,
mirror: String,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<()> {
mirror_artifacts
.entry(get_path_from_artifact(artifact)?)
.or_insert(MirrorArtifact {
mirrors: DashSet::new(),
})
.mirrors
.insert(mirror);
Ok(())
}
fn check_env_vars() -> bool {
@@ -124,7 +153,7 @@ fn check_env_vars() -> bool {
.and_then(|s| s.parse::<T>().ok())
.is_none()
{
warn!(
tracing::warn!(
"Variable `{}` missing in dotenvy or not of type `{}`",
var,
std::any::type_name::<T>()
@@ -143,110 +172,14 @@ fn check_env_vars() -> bool {
failed |= check_var::<String>("S3_REGION");
failed |= check_var::<String>("S3_BUCKET_NAME");
if dotenvy::var("CLOUDFLARE_INTEGRATION")
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
{
failed |= check_var::<String>("CLOUDFLARE_TOKEN");
failed |= check_var::<String>("CLOUDFLARE_ZONE_ID");
}
failed
}
lazy_static::lazy_static! {
static ref CLIENT : Bucket = {
let region = dotenvy::var("S3_REGION").unwrap();
let b = Bucket::new(
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
if &*region == "r2" {
Region::R2 {
account_id: dotenvy::var("S3_URL").unwrap(),
}
} else {
Region::Custom {
region: region.clone(),
endpoint: dotenvy::var("S3_URL").unwrap(),
}
},
Credentials::new(
Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()),
Some(&*dotenvy::var("S3_SECRET").unwrap()),
None,
None,
None,
).unwrap(),
).unwrap();
if region == "path-style" {
b.with_path_style()
} else {
b
}
};
}
pub async fn upload_file_to_bucket(
path: String,
bytes: Vec<u8>,
content_type: Option<String>,
uploaded_files: &tokio::sync::Mutex<Vec<String>>,
semaphore: Arc<Semaphore>,
) -> Result<(), Error> {
let _permit = semaphore.acquire().await?;
info!("{} started uploading", path);
let key = path.clone();
for attempt in 1..=4 {
let result = if let Some(ref content_type) = content_type {
CLIENT
.put_object_with_content_type(key.clone(), &bytes, content_type)
.await
} else {
CLIENT.put_object(key.clone(), &bytes).await
}
.map_err(|err| Error::S3Error {
inner: err,
file: path.clone(),
});
match result {
Ok(_) => {
{
info!("{} done uploading", path);
let mut uploaded_files = uploaded_files.lock().await;
uploaded_files.push(key);
}
return Ok(());
}
Err(_) if attempt <= 3 => continue,
Err(_) => {
result?;
}
}
}
unreachable!()
}
pub fn format_url(path: &str) -> String {
format!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path)
}
pub async fn download_file(
url: &str,
sha1: Option<&str>,
semaphore: Arc<Semaphore>,
) -> Result<bytes::Bytes, Error> {
let _permit = semaphore.acquire().await?;
info!("{} started downloading", url);
let val = daedalus::download_file(url, sha1).await?;
info!("{} finished downloading", url);
Ok(val)
}
pub async fn download_file_mirrors(
base: &str,
mirrors: &[&str],
sha1: Option<&str>,
semaphore: Arc<Semaphore>,
) -> Result<bytes::Bytes, Error> {
let _permit = semaphore.acquire().await?;
info!("{} started downloading", base);
let val = daedalus::download_file_mirrors(base, mirrors, sha1).await?;
info!("{} finished downloading", base);
Ok(val)
}

View File

@@ -1,286 +1,181 @@
use crate::download_file;
use crate::{format_url, upload_file_to_bucket, Error};
use daedalus::get_hash;
use daedalus::minecraft::{
merge_partial_library, Library, PartialLibrary, VersionManifest,
use crate::util::fetch_json;
use crate::{
util::download_file, util::format_url, util::sha1_async, Error,
MirrorArtifact, UploadFile,
};
use log::info;
use daedalus::minecraft::{
merge_partial_library, Library, PartialLibrary, VersionInfo,
VersionManifest, VERSION_MANIFEST_URL,
};
use dashmap::DashMap;
use serde::Deserialize;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{Mutex, Semaphore};
use tokio::sync::Semaphore;
pub async fn retrieve_data(
uploaded_files: &mut Vec<String>,
#[tracing::instrument(skip(semaphore, upload_files, _mirror_artifacts))]
pub async fn fetch(
semaphore: Arc<Semaphore>,
) -> Result<VersionManifest, Error> {
let old_manifest = daedalus::minecraft::fetch_version_manifest(Some(
&*format_url(&format!(
upload_files: &DashMap<String, UploadFile>,
_mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let modrinth_manifest = fetch_json::<VersionManifest>(
&format_url(&format!(
"minecraft/v{}/manifest.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION
)),
))
&semaphore,
)
.await
.ok();
let mojang_manifest =
fetch_json::<VersionManifest>(VERSION_MANIFEST_URL, &semaphore).await?;
let mut manifest =
daedalus::minecraft::fetch_version_manifest(None).await?;
let cloned_manifest =
Arc::new(Mutex::new(old_manifest.clone().unwrap_or(manifest.clone())));
// TODO: experimental snapshots: https://github.com/PrismLauncher/meta/blob/main/meta/common/mojang-minecraft-experiments.json
// TODO: old snapshots: https://github.com/PrismLauncher/meta/blob/main/meta/common/mojang-minecraft-old-snapshots.json
let patches = fetch_library_patches()?;
let cloned_patches = Arc::new(&patches);
// We check Modrinth's version manifest and compare if the version 1) exists in Modrinth's database and 2) is unchanged
// If they are not, we will fetch them
let (fetch_versions, existing_versions) =
if let Some(mut modrinth_manifest) = modrinth_manifest {
let (mut fetch_versions, mut existing_versions) =
(Vec::new(), Vec::new());
let visited_assets_mutex = Arc::new(Mutex::new(Vec::new()));
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
for version in mojang_manifest.versions {
if let Some(index) = modrinth_manifest
.versions
.iter()
.position(|x| x.id == version.id)
{
let modrinth_version =
modrinth_manifest.versions.remove(index);
let now = Instant::now();
let mut version_futures = Vec::new();
for version in manifest.versions.iter_mut() {
version_futures.push(async {
let old_version = if let Some(old_manifest) = &old_manifest {
old_manifest.versions.iter().find(|x| x.id == version.id)
} else {
None
};
if old_version.is_some() {
return Ok(());
if modrinth_version
.original_sha1
.as_ref()
.map(|x| x == &version.sha1)
.unwrap_or(false)
{
existing_versions.push(modrinth_version);
} else {
fetch_versions.push(version);
}
} else {
fetch_versions.push(version);
}
}
let visited_assets_mutex = Arc::clone(&visited_assets_mutex);
let cloned_manifest_mutex = Arc::clone(&cloned_manifest);
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
let semaphore = Arc::clone(&semaphore);
let patches = Arc::clone(&cloned_patches);
(fetch_versions, existing_versions)
} else {
(mojang_manifest.versions, Vec::new())
};
let assets_hash =
old_version.and_then(|x| x.assets_index_sha1.clone());
if !fetch_versions.is_empty() {
let version_manifests = futures::future::try_join_all(
fetch_versions
.iter()
.map(|x| download_file(&x.url, Some(&x.sha1), &semaphore)),
)
.await?
.into_iter()
.map(|x| serde_json::from_slice(&x))
.collect::<Result<Vec<VersionInfo>, serde_json::Error>>()?;
async move {
let mut upload_futures = Vec::new();
let mut version_info =
daedalus::minecraft::fetch_version_info(version).await?;
fn patch_library(
patches: &Vec<LibraryPatch>,
mut library: Library,
) -> Vec<Library> {
let mut val = Vec::new();
let actual_patches = patches
.iter()
.filter(|x| x.match_.contains(&library.name))
.collect::<Vec<_>>();
if !actual_patches.is_empty() {
for patch in actual_patches {
if let Some(override_) = &patch.override_ {
library = merge_partial_library(
override_.clone(),
library,
);
}
if let Some(additional_libraries) =
&patch.additional_libraries
{
for additional_library in additional_libraries {
if patch
.patch_additional_libraries
.unwrap_or(false)
{
let mut libs = patch_library(
patches,
additional_library.clone(),
);
val.append(&mut libs)
} else {
val.push(additional_library.clone());
}
}
}
}
val.push(library);
} else {
val.push(library);
// Patch libraries of Minecraft versions for M-series Mac Support, Better Linux Compatibility, etc
let library_patches = fetch_library_patches()?;
let patched_version_manifests = version_manifests
.into_iter()
.map(|mut x| {
if !library_patches.is_empty() {
let mut new_libraries = Vec::new();
for library in x.libraries {
let mut libs = patch_library(&library_patches, library);
new_libraries.append(&mut libs)
}
val
x.libraries = new_libraries
}
let mut new_libraries = Vec::new();
for library in version_info.libraries.clone() {
let mut libs = patch_library(&patches, library);
new_libraries.append(&mut libs)
}
version_info.libraries = new_libraries;
x
})
.collect::<Vec<_>>();
let version_info_hash = get_hash(bytes::Bytes::from(
serde_json::to_vec(&version_info)?,
))
.await?;
// serialize + compute hashes
let serialized_version_manifests = patched_version_manifests
.iter()
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
.collect::<Result<Vec<_>, serde_json::Error>>()?;
let hashes_version_manifests = futures::future::try_join_all(
serialized_version_manifests
.iter()
.map(|x| sha1_async(x.clone())),
)
.await?;
// We upload the new version manifests and add them to the versions list
let mut new_versions = patched_version_manifests
.into_iter()
.zip(serialized_version_manifests.into_iter())
.zip(hashes_version_manifests.into_iter())
.map(|((version, bytes), hash)| {
let version_path = format!(
"minecraft/v{}/versions/{}.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION,
version.id
);
let assets_path = format!(
"minecraft/v{}/assets/{}.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION,
version_info.asset_index.id
let url = format_url(&version_path);
upload_files.insert(
version_path,
UploadFile {
file: bytes,
content_type: Some("application/json".to_string()),
},
);
let assets_index_url = version_info.asset_index.url.clone();
{
let mut cloned_manifest =
cloned_manifest_mutex.lock().await;
if let Some(position) = cloned_manifest
.versions
daedalus::minecraft::Version {
original_sha1: fetch_versions
.iter()
.position(|x| version.id == x.id)
{
cloned_manifest.versions[position].url =
format_url(&version_path);
cloned_manifest.versions[position].assets_index_sha1 =
Some(version_info.asset_index.sha1.clone());
cloned_manifest.versions[position].assets_index_url =
Some(format_url(&assets_path));
cloned_manifest.versions[position].sha1 =
version_info_hash;
} else {
cloned_manifest.versions.insert(
0,
daedalus::minecraft::Version {
id: version_info.id.clone(),
type_: version_info.type_.clone(),
url: format_url(&version_path),
time: version_info.time,
release_time: version_info.release_time,
sha1: version_info_hash,
compliance_level: 1,
assets_index_url: Some(
version_info.asset_index.sha1.clone(),
),
assets_index_sha1: Some(
version_info.asset_index.sha1.clone(),
),
},
)
}
.find(|x| x.id == version.id)
.map(|x| x.sha1.clone()),
id: version.id,
type_: version.type_,
url,
time: version.time,
release_time: version.release_time,
sha1: hash,
compliance_level: 1,
}
})
.chain(existing_versions.into_iter())
.collect::<Vec<_>>();
let mut download_assets = false;
new_versions.sort_by(|a, b| b.release_time.cmp(&a.release_time));
{
let mut visited_assets = visited_assets_mutex.lock().await;
if !visited_assets.contains(&version_info.asset_index.id) {
if let Some(assets_hash) = assets_hash {
if version_info.asset_index.sha1 != assets_hash {
download_assets = true;
}
} else {
download_assets = true;
}
}
if download_assets {
visited_assets
.push(version_info.asset_index.id.clone());
}
}
if download_assets {
let assets_index = download_file(
&assets_index_url,
Some(&version_info.asset_index.sha1),
semaphore.clone(),
)
.await?;
{
upload_futures.push(upload_file_to_bucket(
assets_path,
assets_index.to_vec(),
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
semaphore.clone(),
));
}
}
{
upload_futures.push(upload_file_to_bucket(
version_path,
serde_json::to_vec(&version_info)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
semaphore.clone(),
));
}
futures::future::try_join_all(upload_futures).await?;
Ok::<(), Error>(())
}
.await?;
Ok::<(), Error>(())
})
}
{
let mut versions = version_futures.into_iter().peekable();
let mut chunk_index = 0;
while versions.peek().is_some() {
let now = Instant::now();
let chunk: Vec<_> = versions.by_ref().take(100).collect();
futures::future::try_join_all(chunk).await?;
chunk_index += 1;
let elapsed = now.elapsed();
info!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
}
}
//futures::future::try_join_all(version_futures).await?;
upload_file_to_bucket(
format!(
// create and upload the new manifest
let version_manifest_path = format!(
"minecraft/v{}/manifest.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION
),
serde_json::to_vec(&*cloned_manifest.lock().await)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
semaphore,
)
.await?;
);
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
let new_manifest = VersionManifest {
latest: mojang_manifest.latest,
versions: new_versions,
};
upload_files.insert(
version_manifest_path,
UploadFile {
file: bytes::Bytes::from(serde_json::to_vec(&new_manifest)?),
content_type: Some("application/json".to_string()),
},
);
}
let elapsed = now.elapsed();
info!("Elapsed: {:.2?}", elapsed);
Ok(Arc::try_unwrap(cloned_manifest)
.map_err(|_| Error::ArcError)?
.into_inner())
Ok(())
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
/// A version of the fabric loader
struct LibraryPatch {
pub struct LibraryPatch {
#[serde(rename = "_comment")]
pub _comment: String,
#[serde(rename = "match")]
@@ -291,8 +186,45 @@ struct LibraryPatch {
pub patch_additional_libraries: Option<bool>,
}
/// Fetches the list of fabric versions
fn fetch_library_patches() -> Result<Vec<LibraryPatch>, Error> {
let patches = include_bytes!("../library-patches.json");
Ok(serde_json::from_slice(patches)?)
}
pub fn patch_library(
patches: &Vec<LibraryPatch>,
mut library: Library,
) -> Vec<Library> {
let mut val = Vec::new();
let actual_patches = patches
.iter()
.filter(|x| x.match_.contains(&library.name))
.collect::<Vec<_>>();
if !actual_patches.is_empty() {
for patch in actual_patches {
if let Some(override_) = &patch.override_ {
library = merge_partial_library(override_.clone(), library);
}
if let Some(additional_libraries) = &patch.additional_libraries {
for additional_library in additional_libraries {
if patch.patch_additional_libraries.unwrap_or(false) {
let mut libs =
patch_library(patches, additional_library.clone());
val.append(&mut libs)
} else {
val.push(additional_library.clone());
}
}
}
}
val.push(library);
} else {
val.push(library);
}
val
}

View File

@@ -1,495 +0,0 @@
use crate::{download_file, format_url, upload_file_to_bucket, Error};
use daedalus::minecraft::{Library, VersionManifest};
use daedalus::modded::{
LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry,
};
use log::info;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::io::Read;
use std::sync::Arc;
use std::time::Instant;
use tokio::sync::{Mutex, Semaphore};
pub async fn retrieve_data(
minecraft_versions: &VersionManifest,
uploaded_files: &mut Vec<String>,
semaphore: Arc<Semaphore>,
) -> Result<(), Error> {
let maven_metadata = fetch_maven_metadata(semaphore.clone()).await?;
let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!(
"neo/v{}/manifest.json",
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
)))
.await
.ok();
let old_versions =
Arc::new(Mutex::new(if let Some(old_manifest) = old_manifest {
old_manifest.game_versions
} else {
Vec::new()
}));
let versions = Arc::new(Mutex::new(Vec::new()));
let visited_assets_mutex = Arc::new(Mutex::new(Vec::new()));
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
let mut version_futures = Vec::new();
for (minecraft_version, loader_versions) in maven_metadata.clone() {
let mut loaders = Vec::new();
for (full, loader_version, new_forge) in loader_versions {
let version = Version::parse(&loader_version)?;
loaders.push((full, version, new_forge.to_string()))
}
if !loaders.is_empty() {
version_futures.push(async {
let mut loaders_versions = Vec::new();
{
let loaders_futures = loaders.into_iter().map(|(loader_version_full, _, new_forge)| async {
let versions_mutex = Arc::clone(&old_versions);
let visited_assets = Arc::clone(&visited_assets_mutex);
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
let semaphore = Arc::clone(&semaphore);
let minecraft_version = minecraft_version.clone();
async move {
{
let versions = versions_mutex.lock().await;
let version = versions.iter().find(|x|
x.id == minecraft_version).and_then(|x| x.loaders.iter().find(|x| x.id == loader_version_full));
if let Some(version) = version {
return Ok::<Option<LoaderVersion>, Error>(Some(version.clone()));
}
}
info!("Forge - Installer Start {}", loader_version_full.clone());
let bytes = download_file(&format!("https://maven.neoforged.net/net/neoforged/{1}/{0}/{1}-{0}-installer.jar", loader_version_full, if &*new_forge == "true" { "neoforge" } else { "forge" }), None, semaphore.clone()).await?;
let reader = std::io::Cursor::new(bytes);
if let Ok(archive) = zip::ZipArchive::new(reader) {
let mut archive_clone = archive.clone();
let mut profile = tokio::task::spawn_blocking(move || {
let mut install_profile = archive_clone.by_name("install_profile.json")?;
let mut contents = String::new();
install_profile.read_to_string(&mut contents)?;
Ok::<ForgeInstallerProfileV2, Error>(serde_json::from_str::<ForgeInstallerProfileV2>(&contents)?)
}).await??;
let mut archive_clone = archive.clone();
let version_info = tokio::task::spawn_blocking(move || {
let mut install_profile = archive_clone.by_name("version.json")?;
let mut contents = String::new();
install_profile.read_to_string(&mut contents)?;
Ok::<PartialVersionInfo, Error>(serde_json::from_str::<PartialVersionInfo>(&contents)?)
}).await??;
let mut libs : Vec<Library> = version_info.libraries.into_iter().chain(profile.libraries.into_iter().map(|x| Library {
downloads: x.downloads,
extract: x.extract,
name: x.name,
url: x.url,
natives: x.natives,
rules: x.rules,
checksums: x.checksums,
include_in_classpath: false
})).collect();
let mut local_libs : HashMap<String, bytes::Bytes> = HashMap::new();
for lib in &libs {
if lib.downloads.as_ref().and_then(|x| x.artifact.as_ref().map(|x| x.url.is_empty())).unwrap_or(false) {
let mut archive_clone = archive.clone();
let lib_name_clone = lib.name.clone();
let lib_bytes = tokio::task::spawn_blocking(move || {
let mut lib_file = archive_clone.by_name(&format!("maven/{}", daedalus::get_path_from_artifact(&lib_name_clone)?))?;
let mut lib_bytes = Vec::new();
lib_file.read_to_end(&mut lib_bytes)?;
Ok::<bytes::Bytes, Error>(bytes::Bytes::from(lib_bytes))
}).await??;
local_libs.insert(lib.name.clone(), lib_bytes);
}
}
let path = profile.path.clone();
let version = profile.version.clone();
for entry in profile.data.values_mut() {
if entry.client.starts_with('/') || entry.server.starts_with('/') {
macro_rules! read_data {
($value:expr) => {
let mut archive_clone = archive.clone();
let value_clone = $value.clone();
let lib_bytes = tokio::task::spawn_blocking(move || {
let mut lib_file = archive_clone.by_name(&value_clone[1..value_clone.len()])?;
let mut lib_bytes = Vec::new();
lib_file.read_to_end(&mut lib_bytes)?;
Ok::<bytes::Bytes, Error>(bytes::Bytes::from(lib_bytes))
}).await??;
let split = $value.split('/').last();
if let Some(last) = split {
let mut file = last.split('.');
if let Some(file_name) = file.next() {
if let Some(ext) = file.next() {
let path = format!("{}:{}@{}", path.as_deref().unwrap_or(&*format!("net.minecraftforge:forge:{}", version)), file_name, ext);
$value = format!("[{}]", &path);
local_libs.insert(path.clone(), bytes::Bytes::from(lib_bytes));
libs.push(Library {
downloads: None,
extract: None,
name: path,
url: Some("".to_string()),
natives: None,
rules: None,
checksums: None,
include_in_classpath: false,
});
}
}
}
}
}
if entry.client.starts_with('/') {
read_data!(entry.client);
}
if entry.server.starts_with('/') {
read_data!(entry.server);
}
}
}
let now = Instant::now();
let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async {
let artifact_path =
daedalus::get_path_from_artifact(&lib.name)?;
{
let mut visited_assets = visited_assets.lock().await;
if visited_assets.contains(&lib.name) {
if let Some(ref mut downloads) = lib.downloads {
if let Some(ref mut artifact) = downloads.artifact {
artifact.url = format_url(&format!("maven/{}", artifact_path));
}
} else if lib.url.is_some() {
lib.url = Some(format_url("maven/"));
}
return Ok::<Library, Error>(lib);
} else {
visited_assets.push(lib.name.clone())
}
}
let artifact_bytes = if let Some(ref mut downloads) = lib.downloads {
if let Some(ref mut artifact) = downloads.artifact {
let res = if artifact.url.is_empty() {
local_libs.get(&lib.name).cloned()
} else {
Some(download_file(
&artifact.url,
Some(&*artifact.sha1),
semaphore.clone(),
)
.await?)
};
if res.is_some() {
artifact.url = format_url(&format!("maven/{}", artifact_path));
}
res
} else { None }
} else if let Some(ref mut url) = lib.url {
let res = if url.is_empty() {
local_libs.get(&lib.name).cloned()
} else {
Some(download_file(
url,
None,
semaphore.clone(),
)
.await?)
};
if res.is_some() {
lib.url = Some(format_url("maven/"));
}
res
} else { None };
if let Some(bytes) = artifact_bytes {
upload_file_to_bucket(
format!("{}/{}", "maven", artifact_path),
bytes.to_vec(),
Some("application/java-archive".to_string()),
uploaded_files_mutex.as_ref(),
semaphore.clone(),
).await?;
}
Ok::<Library, Error>(lib)
})).await?;
let elapsed = now.elapsed();
info!("Elapsed lib DL: {:.2?}", elapsed);
let new_profile = PartialVersionInfo {
id: version_info.id,
inherits_from: version_info.inherits_from,
release_time: version_info.release_time,
time: version_info.time,
main_class: version_info.main_class,
minecraft_arguments: version_info.minecraft_arguments,
arguments: version_info.arguments,
libraries: libs,
type_: version_info.type_,
data: Some(profile.data),
processors: Some(profile.processors),
};
let version_path = format!(
"neo/v{}/versions/{}.json",
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
new_profile.id
);
upload_file_to_bucket(
version_path.clone(),
serde_json::to_vec(&new_profile)?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
semaphore.clone(),
).await?;
return Ok(Some(LoaderVersion {
id: loader_version_full,
url: format_url(&version_path),
stable: false
}));
}
Ok(None)
}.await
});
{
let len = loaders_futures.len();
let mut versions = loaders_futures.into_iter().peekable();
let mut chunk_index = 0;
while versions.peek().is_some() {
let now = Instant::now();
let chunk: Vec<_> = versions.by_ref().take(1).collect();
let res = futures::future::try_join_all(chunk).await?;
loaders_versions.extend(res.into_iter().flatten());
chunk_index += 1;
let elapsed = now.elapsed();
info!("Loader Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed);
}
}
}
versions.lock().await.push(daedalus::modded::Version {
id: minecraft_version,
stable: true,
loaders: loaders_versions
});
Ok::<(), Error>(())
});
}
}
{
let len = version_futures.len();
let mut versions = version_futures.into_iter().peekable();
let mut chunk_index = 0;
while versions.peek().is_some() {
let now = Instant::now();
let chunk: Vec<_> = versions.by_ref().take(1).collect();
futures::future::try_join_all(chunk).await?;
chunk_index += 1;
let elapsed = now.elapsed();
info!("Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed);
}
}
if let Ok(versions) = Arc::try_unwrap(versions) {
let mut versions = versions.into_inner();
versions.sort_by(|x, y| {
minecraft_versions
.versions
.iter()
.position(|z| x.id == z.id)
.unwrap_or_default()
.cmp(
&minecraft_versions
.versions
.iter()
.position(|z| y.id == z.id)
.unwrap_or_default(),
)
});
for version in &mut versions {
let loader_versions = maven_metadata.get(&version.id);
if let Some(loader_versions) = loader_versions {
version.loaders.sort_by(|x, y| {
loader_versions
.iter()
.position(|z| y.id == z.1)
.unwrap_or_default()
.cmp(
&loader_versions
.iter()
.position(|z| x.id == z.1)
.unwrap_or_default(),
)
});
version.loaders.reverse();
}
}
upload_file_to_bucket(
format!(
"neo/v{}/manifest.json",
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
),
serde_json::to_vec(&Manifest {
game_versions: versions,
})?,
Some("application/json".to_string()),
uploaded_files_mutex.as_ref(),
semaphore,
)
.await?;
}
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
}
Ok(())
}
const DEFAULT_MAVEN_METADATA_URL_1: &str =
"https://maven.neoforged.net/net/neoforged/forge/maven-metadata.xml";
const DEFAULT_MAVEN_METADATA_URL_2: &str =
"https://maven.neoforged.net/net/neoforged/neoforge/maven-metadata.xml";
#[derive(Debug, Deserialize)]
struct Metadata {
versioning: Versioning,
}
#[derive(Debug, Deserialize)]
struct Versioning {
versions: Versions,
}
#[derive(Debug, Deserialize)]
struct Versions {
version: Vec<String>,
}
pub async fn fetch_maven_metadata(
semaphore: Arc<Semaphore>,
) -> Result<HashMap<String, Vec<(String, String, bool)>>, Error> {
async fn fetch_values(
url: &str,
semaphore: Arc<Semaphore>,
) -> Result<Metadata, Error> {
Ok(serde_xml_rs::from_str(
&String::from_utf8(
download_file(url, None, semaphore).await?.to_vec(),
)
.unwrap_or_default(),
)?)
}
let forge_values =
fetch_values(DEFAULT_MAVEN_METADATA_URL_1, semaphore.clone()).await?;
let neo_values =
fetch_values(DEFAULT_MAVEN_METADATA_URL_2, semaphore).await?;
let mut map: HashMap<String, Vec<(String, String, bool)>> = HashMap::new();
for value in forge_values.versioning.versions.version {
let original = value.clone();
let parts: Vec<&str> = value.split('-').collect();
if parts.len() == 2 {
map.entry(parts[0].to_string()).or_default().push((
original,
parts[1].to_string(),
false,
));
}
}
for value in neo_values.versioning.versions.version {
let original = value.clone();
let mut parts = value.split('.');
if let Some(major) = parts.next() {
if let Some(minor) = parts.next() {
let game_version = format!("1.{}.{}", major, minor);
map.entry(game_version.clone()).or_default().push((
original.clone(),
format!("{}-{}", game_version, original),
true,
));
}
}
}
Ok(map)
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ForgeInstallerProfileV2 {
pub spec: i32,
pub profile: String,
pub version: String,
pub json: String,
pub path: Option<String>,
pub minecraft: String,
pub data: HashMap<String, SidedDataEntry>,
pub libraries: Vec<Library>,
pub processors: Vec<Processor>,
}

View File

@@ -1,370 +0,0 @@
use crate::{download_file, format_url, upload_file_to_bucket, Error};
use daedalus::minecraft::{Library, VersionManifest};
use daedalus::modded::{
LoaderVersion, Manifest, PartialVersionInfo, Version, DUMMY_REPLACE_STRING,
};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use tokio::sync::{Mutex, RwLock, Semaphore};
pub async fn retrieve_data(
minecraft_versions: &VersionManifest,
uploaded_files: &mut Vec<String>,
semaphore: Arc<Semaphore>,
) -> Result<(), Error> {
let list = fetch_quilt_versions(None, semaphore.clone()).await?;
let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!(
"quilt/v{}/manifest.json",
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
)))
.await
.ok();
let mut versions = if let Some(old_manifest) = old_manifest {
old_manifest.game_versions
} else {
Vec::new()
};
let loaders_mutex = RwLock::new(Vec::new());
{
let mut loaders = loaders_mutex.write().await;
for (index, loader) in list.loader.iter().enumerate() {
if versions.iter().any(|x| {
x.id == DUMMY_REPLACE_STRING
&& x.loaders.iter().any(|x| x.id == loader.version)
}) {
if index == 0 {
loaders.push((
Box::new(false),
loader.version.clone(),
Box::new(true),
))
}
} else {
loaders.push((
Box::new(false),
loader.version.clone(),
Box::new(false),
))
}
}
}
const DUMMY_GAME_VERSION: &str = "1.19.4-rc2";
let loader_version_mutex = Mutex::new(Vec::new());
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
let loader_versions = futures::future::try_join_all(
loaders_mutex.read().await.clone().into_iter().map(
|(stable, loader, skip_upload)| async {
let version = fetch_quilt_version(
DUMMY_GAME_VERSION,
&loader,
semaphore.clone(),
)
.await?;
Ok::<(Box<bool>, String, PartialVersionInfo, Box<bool>), Error>(
(stable, loader, version, skip_upload),
)
},
),
)
.await?;
let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new()));
futures::future::try_join_all(loader_versions.into_iter()
.map(
|(stable, loader, version, skip_upload)| async {
let libs = futures::future::try_join_all(
version.libraries.into_iter().map(|mut lib| async {
{
let mut visited_assets =
visited_artifacts_mutex.lock().await;
if visited_assets.contains(&lib.name) {
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
lib.url = Some(format_url("maven/"));
return Ok(lib);
} else {
visited_assets.push(lib.name.clone())
}
}
if lib.name.contains(DUMMY_GAME_VERSION) {
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
futures::future::try_join_all(list.game.clone().into_iter().map(|game_version| async {
let semaphore = semaphore.clone();
let uploaded_files_mutex = uploaded_files_mutex.clone();
let lib_name = lib.name.clone();
let lib_url = lib.url.clone();
async move {
let artifact_path =
daedalus::get_path_from_artifact(&lib_name.replace(DUMMY_REPLACE_STRING, &game_version.version))?;
let artifact = download_file(
&format!(
"{}{}",
lib_url.unwrap_or_else(|| {
"https://maven.quiltmc.org/".to_string()
}),
artifact_path
),
None,
semaphore.clone(),
)
.await?;
upload_file_to_bucket(
format!("{}/{}", "maven", artifact_path),
artifact.to_vec(),
Some("application/java-archive".to_string()),
&uploaded_files_mutex,
semaphore.clone(),
)
.await?;
Ok::<(), Error>(())
}.await?;
Ok::<(), Error>(())
})).await?;
lib.url = Some(format_url("maven/"));
return Ok(lib);
}
let artifact_path =
daedalus::get_path_from_artifact(&lib.name)?;
let artifact = download_file(
&format!(
"{}{}",
lib.url.unwrap_or_else(|| {
"https://maven.quiltmc.org/".to_string()
}),
artifact_path
),
None,
semaphore.clone(),
)
.await?;
lib.url = Some(format_url("maven/"));
upload_file_to_bucket(
format!("{}/{}", "maven", artifact_path),
artifact.to_vec(),
Some("application/java-archive".to_string()),
&uploaded_files_mutex,
semaphore.clone(),
)
.await?;
Ok::<Library, Error>(lib)
}),
)
.await?;
if async move {
*skip_upload
}.await {
return Ok::<(), Error>(())
}
let version_path = format!(
"quilt/v{}/versions/{}.json",
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
&loader
);
upload_file_to_bucket(
version_path.clone(),
serde_json::to_vec(&PartialVersionInfo {
arguments: version.arguments,
id: version
.id
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
main_class: version.main_class,
release_time: version.release_time,
time: version.time,
type_: version.type_,
inherits_from: version
.inherits_from
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
libraries: libs,
minecraft_arguments: version.minecraft_arguments,
processors: None,
data: None,
})?,
Some("application/json".to_string()),
&uploaded_files_mutex,
semaphore.clone(),
)
.await?;
{
let mut loader_version_map = loader_version_mutex.lock().await;
async move {
loader_version_map.push(LoaderVersion {
id: loader.to_string(),
url: format_url(&version_path),
stable: *stable,
});
}
.await;
}
Ok::<(), Error>(())
},
))
.await?;
let mut loader_version_mutex = loader_version_mutex.into_inner();
if !loader_version_mutex.is_empty() {
if let Some(version) =
versions.iter_mut().find(|x| x.id == DUMMY_REPLACE_STRING)
{
version.loaders.append(&mut loader_version_mutex);
} else {
versions.push(Version {
id: DUMMY_REPLACE_STRING.to_string(),
stable: true,
loaders: loader_version_mutex,
});
}
}
for version in &list.game {
if !versions.iter().any(|x| x.id == version.version) {
versions.push(Version {
id: version.version.clone(),
stable: version.stable,
loaders: vec![],
});
}
}
versions.sort_by(|x, y| {
minecraft_versions
.versions
.iter()
.position(|z| x.id == z.id)
.unwrap_or_default()
.cmp(
&minecraft_versions
.versions
.iter()
.position(|z| y.id == z.id)
.unwrap_or_default(),
)
});
for version in &mut versions {
version.loaders.sort_by(|x, y| {
list.loader
.iter()
.position(|z| x.id == *z.version)
.unwrap_or_default()
.cmp(
&list
.loader
.iter()
.position(|z| y.id == z.version)
.unwrap_or_default(),
)
})
}
upload_file_to_bucket(
format!(
"quilt/v{}/manifest.json",
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
),
serde_json::to_vec(&Manifest {
game_versions: versions,
})?,
Some("application/json".to_string()),
&uploaded_files_mutex,
semaphore,
)
.await?;
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
uploaded_files.extend(uploaded_files_mutex.into_inner());
}
Ok(())
}
const QUILT_META_URL: &str = "https://meta.quiltmc.org/v3";
async fn fetch_quilt_version(
version_number: &str,
loader_version: &str,
semaphore: Arc<Semaphore>,
) -> Result<PartialVersionInfo, Error> {
Ok(serde_json::from_slice(
&download_file(
&format!(
"{}/versions/loader/{}/{}/profile/json",
QUILT_META_URL, version_number, loader_version
),
None,
semaphore,
)
.await?,
)?)
}
#[derive(Serialize, Deserialize, Debug, Clone)]
/// Versions of quilt components
struct QuiltVersions {
/// Versions of Minecraft that quilt supports
pub game: Vec<QuiltGameVersion>,
/// Available versions of the quilt loader
pub loader: Vec<QuiltLoaderVersion>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
/// A version of Minecraft that quilt supports
struct QuiltGameVersion {
/// The version number of the game
pub version: String,
/// Whether the Minecraft version is stable or not
pub stable: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
/// A version of the quilt loader
struct QuiltLoaderVersion {
/// The separator to get the build number
pub separator: String,
/// The build number
pub build: u32,
/// The maven artifact
pub maven: String,
/// The version number of the quilt loader
pub version: String,
}
/// Fetches the list of quilt versions
async fn fetch_quilt_versions(
url: Option<&str>,
semaphore: Arc<Semaphore>,
) -> Result<QuiltVersions, Error> {
Ok(serde_json::from_slice(
&download_file(
url.unwrap_or(&*format!("{}/versions", QUILT_META_URL)),
None,
semaphore,
)
.await?,
)?)
}

369
daedalus_client/src/util.rs Normal file
View File

@@ -0,0 +1,369 @@
use crate::{Error, ErrorKind};
use bytes::{Bytes, BytesMut};
use futures::StreamExt;
use s3::creds::Credentials;
use s3::{Bucket, Region};
use serde::de::DeserializeOwned;
use std::sync::Arc;
use tokio::sync::Semaphore;
lazy_static::lazy_static! {
static ref BUCKET : Bucket = {
let region = dotenvy::var("S3_REGION").unwrap();
let b = Bucket::new(
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
if &*region == "r2" {
Region::R2 {
account_id: dotenvy::var("S3_URL").unwrap(),
}
} else {
Region::Custom {
region: region.clone(),
endpoint: dotenvy::var("S3_URL").unwrap(),
}
},
Credentials::new(
Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()),
Some(&*dotenvy::var("S3_SECRET").unwrap()),
None,
None,
None,
).unwrap(),
).unwrap();
if region == "path-style" {
b.with_path_style()
} else {
b
}
};
}
lazy_static::lazy_static! {
pub static ref REQWEST_CLIENT: reqwest::Client = {
let mut headers = reqwest::header::HeaderMap::new();
if let Ok(header) = reqwest::header::HeaderValue::from_str(&format!(
"modrinth/daedalus/{} (support@modrinth.com)",
env!("CARGO_PKG_VERSION")
)) {
headers.insert(reqwest::header::USER_AGENT, header);
}
reqwest::Client::builder()
.tcp_keepalive(Some(std::time::Duration::from_secs(10)))
.timeout(std::time::Duration::from_secs(15))
.default_headers(headers)
.build()
.unwrap()
};
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn upload_file_to_bucket(
path: String,
bytes: Bytes,
content_type: Option<String>,
semaphore: &Arc<Semaphore>,
) -> Result<(), Error> {
let _permit = semaphore.acquire().await?;
let key = path.clone();
const RETRIES: i32 = 3;
for attempt in 1..=(RETRIES + 1) {
tracing::trace!("Attempting file upload, attempt {attempt}");
let result = if let Some(ref content_type) = content_type {
BUCKET
.put_object_with_content_type(key.clone(), &bytes, content_type)
.await
} else {
BUCKET.put_object(key.clone(), &bytes).await
}
.map_err(|err| ErrorKind::S3 {
inner: err,
file: path.clone(),
});
match result {
Ok(_) => return Ok(()),
Err(_) if attempt <= RETRIES => continue,
Err(_) => {
result?;
}
}
}
unreachable!()
}
pub async fn upload_url_to_bucket_mirrors(
base: String,
mirrors: Vec<String>,
semaphore: &Arc<Semaphore>,
) -> Result<(), Error> {
if mirrors.is_empty() {
return Err(ErrorKind::InvalidInput(
"No mirrors provided!".to_string(),
)
.into());
}
for (index, mirror) in mirrors.iter().enumerate() {
let result = upload_url_to_bucket(
&base,
&format!("{}{}", mirror, base),
semaphore,
)
.await;
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
return result;
}
}
unreachable!()
}
#[tracing::instrument(skip(semaphore))]
pub async fn upload_url_to_bucket(
path: &str,
url: &str,
semaphore: &Arc<Semaphore>,
) -> Result<(), Error> {
let _permit = semaphore.acquire().await?;
const RETRIES: i32 = 3;
for attempt in 1..=(RETRIES + 1) {
tracing::trace!("Attempting streaming file upload, attempt {attempt}");
let result: Result<(), Error> = {
let response =
REQWEST_CLIENT.get(url).send().await.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: url.to_string(),
}
})?;
let content_type = response
.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
.unwrap_or("application/octet-stream")
.to_string();
let total_size = response.content_length().unwrap_or(0);
const MIN_PART_SIZE: usize = 5 * 1024 * 1024;
if total_size < MIN_PART_SIZE as u64 {
let data =
response.bytes().await.map_err(|err| ErrorKind::Fetch {
inner: err,
item: url.to_string(),
})?;
BUCKET.put_object(&path, &data).await.map_err(|err| {
ErrorKind::S3 {
inner: err,
file: path.to_string(),
}
})?;
} else {
let mut stream = response.bytes_stream();
let multipart = BUCKET
.initiate_multipart_upload(path, &content_type)
.await
.map_err(|err| ErrorKind::S3 {
inner: err,
file: path.to_string(),
})?;
let mut parts = Vec::new();
let mut buffer = BytesMut::new();
async fn upload_part(
parts: &mut Vec<s3::serde_types::Part>,
buffer: Vec<u8>,
path: &str,
upload_id: &str,
content_type: &str,
) -> Result<(), Error> {
let part = BUCKET
.put_multipart_chunk(
buffer,
path,
(parts.len() + 1) as u32,
upload_id,
content_type,
)
.await
.map_err(|err| ErrorKind::S3 {
inner: err,
file: path.to_string(),
})?;
parts.push(part);
Ok(())
}
while let Some(chunk) = stream.next().await {
let chunk = chunk.map_err(|err| ErrorKind::Fetch {
inner: err,
item: url.to_string(),
})?;
buffer.extend_from_slice(&chunk);
if buffer.len() >= MIN_PART_SIZE {
upload_part(
&mut parts,
buffer.to_vec(),
path,
&multipart.upload_id,
&content_type,
)
.await?;
buffer.clear();
}
}
if !buffer.is_empty() {
let part = BUCKET
.put_multipart_chunk(
buffer.to_vec(),
path,
(parts.len() + 1) as u32,
&multipart.upload_id,
&content_type,
)
.await
.map_err(|err| ErrorKind::S3 {
inner: err,
file: path.to_string(),
})?;
parts.push(part);
}
BUCKET
.complete_multipart_upload(
path,
&multipart.upload_id,
parts,
)
.await
.map_err(|err| ErrorKind::S3 {
inner: err,
file: path.to_string(),
})?;
}
Ok(())
};
match result {
Ok(_) => return Ok(()),
Err(_) if attempt <= RETRIES => continue,
Err(_) => {
result?;
}
}
}
unreachable!()
}
#[tracing::instrument(skip(bytes))]
pub async fn sha1_async(bytes: Bytes) -> Result<String, Error> {
let hash = tokio::task::spawn_blocking(move || {
sha1_smol::Sha1::from(bytes).hexdigest()
})
.await?;
Ok(hash)
}
#[tracing::instrument(skip(semaphore))]
pub async fn download_file(
url: &str,
sha1: Option<&str>,
semaphore: &Arc<Semaphore>,
) -> Result<bytes::Bytes, crate::Error> {
let _permit = semaphore.acquire().await?;
tracing::trace!("Starting file download");
const RETRIES: u32 = 10;
for attempt in 1..=(RETRIES + 1) {
let result = REQWEST_CLIENT
.get(url)
.send()
.await
.and_then(|x| x.error_for_status());
match result {
Ok(x) => {
let bytes = x.bytes().await;
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
if &*sha1_async(bytes.clone()).await? != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(
crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into(),
);
}
}
}
return Ok(bytes);
} else if attempt <= RETRIES {
continue;
} else if let Err(err) = bytes {
return Err(crate::ErrorKind::Fetch {
inner: err,
item: url.to_string(),
}
.into());
}
}
Err(_) if attempt <= RETRIES => continue,
Err(err) => {
return Err(crate::ErrorKind::Fetch {
inner: err,
item: url.to_string(),
}
.into())
}
}
}
unreachable!()
}
pub async fn fetch_json<T: DeserializeOwned>(
url: &str,
semaphore: &Arc<Semaphore>,
) -> Result<T, Error> {
Ok(serde_json::from_slice(
&download_file(url, None, semaphore).await?,
)?)
}
pub async fn fetch_xml<T: DeserializeOwned>(
url: &str,
semaphore: &Arc<Semaphore>,
) -> Result<T, Error> {
Ok(serde_xml_rs::from_reader(
&*download_file(url, None, semaphore).await?,
)?)
}
pub fn format_url(path: &str) -> String {
format!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path)
}