Daedalus Rewrite + Code Cleanup (#16)

* [wip] rewrite daedalus, vanilla, fabric, and quilt

* finish forge + neo

* fix docker

* fix neoforge 1.21+

* update concurrency limit

* finish

* remove mac garb
This commit is contained in:
Geometrically
2024-06-25 15:47:27 -07:00
committed by GitHub
parent ac07ac5234
commit 8b16cd1b36
19 changed files with 2334 additions and 2529 deletions

View File

@@ -12,30 +12,6 @@ pub mod modded;
#[derive(thiserror::Error, Debug)]
/// An error type representing possible errors when fetching metadata
pub enum Error {
#[error("Failed to validate file checksum at url {url} with hash {hash} after {tries} tries")]
/// A checksum was failed to validate for a file
ChecksumFailure {
/// The checksum's hash
hash: String,
/// The URL of the file attempted to be downloaded
url: String,
/// The amount of tries that the file was downloaded until failure
tries: u32,
},
/// There was an error while deserializing metadata
#[error("Error while deserializing JSON")]
SerdeError(#[from] serde_json::Error),
/// There was a network error when fetching an object
#[error("Unable to fetch {item}")]
FetchError {
/// The internal reqwest error
inner: reqwest::Error,
/// The item that was failed to be fetched
item: String,
},
/// There was an error when managing async tasks
#[error("Error while managing asynchronous tasks")]
TaskError(#[from] tokio::task::JoinError),
/// Error while parsing input
#[error("{0}")]
ParseError(String),
@@ -124,100 +100,3 @@ pub fn get_path_from_artifact(artifact: &str) -> Result<String, Error> {
))
}
}
/// Downloads a file from specified mirrors
pub async fn download_file_mirrors(
base: &str,
mirrors: &[&str],
sha1: Option<&str>,
) -> Result<bytes::Bytes, Error> {
if mirrors.is_empty() {
return Err(Error::ParseError("No mirrors provided!".to_string()));
}
for (index, mirror) in mirrors.iter().enumerate() {
let result = download_file(&format!("{}{}", mirror, base), sha1).await;
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
return result;
}
}
unreachable!()
}
/// Downloads a file with retry and checksum functionality
pub async fn download_file(
url: &str,
sha1: Option<&str>,
) -> Result<bytes::Bytes, Error> {
let mut headers = reqwest::header::HeaderMap::new();
if let Ok(header) = reqwest::header::HeaderValue::from_str(&format!(
"modrinth/daedalus/{} (support@modrinth.com)",
env!("CARGO_PKG_VERSION")
)) {
headers.insert(reqwest::header::USER_AGENT, header);
}
let client = reqwest::Client::builder()
.tcp_keepalive(Some(std::time::Duration::from_secs(10)))
.timeout(std::time::Duration::from_secs(15))
.default_headers(headers)
.build()
.map_err(|err| Error::FetchError {
inner: err,
item: url.to_string(),
})?;
for attempt in 1..=4 {
let result = client.get(url).send().await;
match result {
Ok(x) => {
let bytes = x.bytes().await;
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
if &*get_hash(bytes.clone()).await? != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(Error::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
});
}
}
}
return Ok(bytes);
} else if attempt <= 3 {
continue;
} else if let Err(err) = bytes {
return Err(Error::FetchError {
inner: err,
item: url.to_string(),
});
}
}
Err(_) if attempt <= 3 => continue,
Err(err) => {
return Err(Error::FetchError {
inner: err,
item: url.to_string(),
})
}
}
}
unreachable!()
}
/// Computes a checksum of the input bytes
pub async fn get_hash(bytes: bytes::Bytes) -> Result<String, Error> {
let hash =
tokio::task::spawn_blocking(|| sha1::Sha1::from(bytes).hexdigest())
.await?;
Ok(hash)
}