Files
AstralRinth/apps/daedalus_client/src/main.rs
Josiah Glosson cf190d86d5 Update Rust dependencies (#4139)
* Update Rust version

* Update async-compression 0.4.25 -> 0.4.27

* Update async-tungstenite 0.29.1 -> 0.30.0

* Update bytemuck 1.23.0 -> 1.23.1

* Update clap 4.5.40 -> 4.5.43

* Update deadpool-redis 0.21.1 -> 0.22.0 and redis 0.31.0 -> 0.32.4

* Update enumset 1.1.6 -> 1.1.7

* Update hyper-util 0.1.14 -> 0.1.16

* Update indexmap 2.9.0 -> 2.10.0

* Update indicatif 0.17.11 -> 0.18.0

* Update jemalloc_pprof 0.7.0 -> 0.8.1

* Update lettre 0.11.17 -> 0.11.18

* Update meilisearch-sdk 0.28.0 -> 0.29.1

* Update notify 8.0.0 -> 8.2.0 and notify-debouncer-mini 0.6.0 -> 0.7.0

* Update quick-xml 0.37.5 -> 0.38.1

* Fix theseus lint

* Update reqwest 0.12.20 -> 0.12.22

* Cargo fmt in theseus

* Update rgb 0.8.50 -> 0.8.52

* Update sentry 0.41.0 -> 0.42.0 and sentry-actix 0.41.0 -> 0.42.0

* Update serde_json 1.0.140 -> 1.0.142

* Update serde_with 3.13.0 -> 3.14.0

* Update spdx 0.10.8 -> 0.10.9

* Update sysinfo 0.35.2 -> 0.36.1

* Update tauri suite

* Fix build by updating mappings

* Update tokio 1.45.1 -> 1.47.1 and tokio-util 0.7.15 -> 0.7.16

* Update tracing-actix-web 0.7.18 -> 0.7.19

* Update zip 4.2.0 -> 4.3.0

* Misc Cargo.lock updates

* Update Dockerfiles
2025-08-08 22:50:44 +00:00

217 lines
6.0 KiB
Rust

use crate::util::{
REQWEST_CLIENT, format_url, upload_file_to_bucket,
upload_url_to_bucket_mirrors,
};
use daedalus::get_path_from_artifact;
use dashmap::{DashMap, DashSet};
use std::sync::Arc;
use tokio::sync::Semaphore;
use tracing_error::ErrorLayer;
use tracing_subscriber::{EnvFilter, fmt, prelude::*};
mod error;
mod fabric;
mod forge;
mod minecraft;
pub mod util;
pub use error::{Error, ErrorKind, Result};
#[tokio::main]
async fn main() -> Result<()> {
dotenvy::dotenv().ok();
let subscriber = tracing_subscriber::registry()
.with(fmt::layer())
.with(EnvFilter::from_default_env())
.with(ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)?;
tracing::info!("Initialized tracing. Starting Daedalus!");
if check_env_vars() {
tracing::error!("Some environment variables are missing!");
return Ok(());
}
let semaphore = Arc::new(Semaphore::new(
dotenvy::var("CONCURRENCY_LIMIT")
.ok()
.and_then(|x| x.parse().ok())
.unwrap_or(10),
));
// path, upload file
let upload_files: DashMap<String, UploadFile> = DashMap::new();
// path, mirror artifact
let mirror_artifacts: DashMap<String, MirrorArtifact> = DashMap::new();
minecraft::fetch(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
fabric::fetch_fabric(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
fabric::fetch_quilt(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
forge::fetch_neo(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
forge::fetch_forge(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
futures::future::try_join_all(upload_files.iter().map(|x| {
upload_file_to_bucket(
x.key().clone(),
x.value().file.clone(),
x.value().content_type.clone(),
&semaphore,
)
}))
.await?;
futures::future::try_join_all(mirror_artifacts.iter().map(|x| {
upload_url_to_bucket_mirrors(
format!("maven/{}", x.key()),
x.value()
.mirrors
.iter()
.map(|mirror| {
if mirror.entire_url {
mirror.path.clone()
} else {
format!("{}{}", mirror.path, x.key())
}
})
.collect(),
x.sha1.clone(),
&semaphore,
)
}))
.await?;
if dotenvy::var("CLOUDFLARE_INTEGRATION")
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
{
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token)
.json(&serde_json::json!({
"files": chunk
}))
.send()
.await
.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: "cloudflare clear cache".to_string(),
}
})?
.error_for_status()
.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: "cloudflare clear cache".to_string(),
}
})?;
}
}
Ok(())
}
pub struct UploadFile {
file: bytes::Bytes,
content_type: Option<String>,
}
pub struct MirrorArtifact {
pub sha1: Option<String>,
pub mirrors: DashSet<Mirror>,
}
#[derive(Eq, PartialEq, Hash)]
pub struct Mirror {
path: String,
entire_url: bool,
}
#[tracing::instrument(skip(mirror_artifacts))]
pub fn insert_mirrored_artifact(
artifact: &str,
sha1: Option<String>,
mirrors: Vec<String>,
entire_url: bool,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<()> {
let val = mirror_artifacts
.entry(get_path_from_artifact(artifact)?)
.or_insert(MirrorArtifact {
sha1,
mirrors: DashSet::new(),
});
for mirror in mirrors {
val.mirrors.insert(Mirror {
path: mirror,
entire_url,
});
}
Ok(())
}
fn check_env_vars() -> bool {
let mut failed = false;
fn check_var<T: std::str::FromStr>(var: &str) -> bool {
if dotenvy::var(var)
.ok()
.and_then(|s| s.parse::<T>().ok())
.is_none()
{
tracing::warn!(
"Variable `{}` missing in dotenvy or not of type `{}`",
var,
std::any::type_name::<T>()
);
true
} else {
false
}
}
failed |= check_var::<String>("BASE_URL");
failed |= check_var::<String>("S3_ACCESS_TOKEN");
failed |= check_var::<String>("S3_SECRET");
failed |= check_var::<String>("S3_URL");
failed |= check_var::<String>("S3_REGION");
failed |= check_var::<String>("S3_BUCKET_NAME");
if dotenvy::var("CLOUDFLARE_INTEGRATION")
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
{
failed |= check_var::<String>("CLOUDFLARE_TOKEN");
failed |= check_var::<String>("CLOUDFLARE_ZONE_ID");
}
failed
}