You've already forked AstralRinth
forked from didirus/AstralRinth
Daedalus Rewrite + Code Cleanup (#16)
* [wip] rewrite daedalus, vanilla, fabric, and quilt * finish forge + neo * fix docker * fix neoforge 1.21+ * update concurrency limit * finish * remove mac garb
This commit is contained in:
16
.env
16
.env
@@ -1,9 +1,15 @@
|
||||
RUST_LOG=info
|
||||
RUST_LOG=warn,daedalus_client=trace
|
||||
|
||||
BASE_URL=https://modrinth-cdn-staging.nyc3.digitaloceanspaces.com
|
||||
BASE_URL=http://localhost:9000/meta
|
||||
|
||||
CONCURRENCY_LIMIT=10
|
||||
|
||||
S3_ACCESS_TOKEN=none
|
||||
S3_SECRET=none
|
||||
S3_URL=none
|
||||
S3_REGION=none
|
||||
S3_BUCKET_NAME=none
|
||||
S3_URL=http://localhost:9000
|
||||
S3_REGION=path-style
|
||||
S3_BUCKET_NAME=meta
|
||||
|
||||
CLOUDFLARE_INTEGRATION=false
|
||||
CLOUDFLARE_TOKEN=none
|
||||
CLOUDFLARE_ZONE_ID=none
|
||||
49
.github/workflows/run.yml
vendored
Normal file
49
.github/workflows/run.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Run Meta
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '*/5 * * * *'
|
||||
|
||||
jobs:
|
||||
run-docker:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
- name: Pull Docker image from GHCR
|
||||
run: docker pull ghcr.io/modrinth/daedalus:latest
|
||||
|
||||
- name: Run Docker container
|
||||
env:
|
||||
BASE_URL: ${{ secrets.BASE_URL }}
|
||||
S3_ACCESS_TOKEN: ${{ secrets.S3_ACCESS_TOKEN }}
|
||||
S3_SECRET: ${{ secrets.S3_SECRET }}
|
||||
S3_URL: ${{ secrets.S3_URL }}
|
||||
S3_REGION: ${{ secrets.S3_REGION }}
|
||||
S3_BUCKET_NAME: ${{ secrets.S3_BUCKET_NAME }}
|
||||
CLOUDFLARE_INTEGRATION: ${{ secrets.CLOUDFLARE_INTEGRATION }}
|
||||
CLOUDFLARE_TOKEN: ${{ secrets.CLOUDFLARE_TOKEN }}
|
||||
CLOUDFLARE_ZONE_ID: ${{ secrets.CLOUDFLARE_ZONE_ID }}
|
||||
run: |
|
||||
docker run -d \
|
||||
--name daedalus \
|
||||
-e BASE_URL=$BASE_URL \
|
||||
-e S3_ACCESS_TOKEN=$S3_ACCESS_TOKEN \
|
||||
-e S3_SECRET=$S3_SECRET \
|
||||
-e S3_URL=$S3_URL \
|
||||
-e S3_REGION=$S3_REGION \
|
||||
-e S3_BUCKET_NAME=$S3_BUCKET_NAME \
|
||||
-e CLOUDFLARE_INTEGRATION=$CLOUDFLARE_INTEGRATION \
|
||||
-e CLOUDFLARE_TOKEN=$CLOUDFLARE_TOKEN \
|
||||
-e CLOUDFLARE_ZONE_ID=$CLOUDFLARE_ZONE_ID \
|
||||
ghcr.io/modrinth/daedalus:latest
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
||||
### Intellij ###
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
caches/
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
|
||||
1
.idea/daedalus.iml
generated
1
.idea/daedalus.iml
generated
@@ -6,6 +6,7 @@
|
||||
<sourceFolder url="file://$MODULE_DIR$/daedalus/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/daedalus_client/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/daedalus_client_new/src" isTestSource="false" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM rust:1.68.2 as build
|
||||
FROM rust:1.79.0 as build
|
||||
ENV PKG_CONFIG_ALLOW_CROSS=1
|
||||
|
||||
WORKDIR /usr/src/daedalus
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "daedalus"
|
||||
version = "0.1.27"
|
||||
authors = ["Jai A <jaiagr+gpg@pm.me>"]
|
||||
edition = "2018"
|
||||
version = "0.2.0"
|
||||
authors = ["Jai A <jai@modrinth.com>"]
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
description = "Utilities for querying and parsing Minecraft metadata"
|
||||
repository = "https://github.com/modrinth/daedalus/"
|
||||
@@ -14,12 +14,8 @@ readme = "README.md"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
bytes = "1"
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
sha1 = { version = "0.6.1", features = ["std"]}
|
||||
bincode = {version = "2.0.0-rc.2", features = ["serde"], optional = true}
|
||||
|
||||
@@ -12,30 +12,6 @@ pub mod modded;
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
/// An error type representing possible errors when fetching metadata
|
||||
pub enum Error {
|
||||
#[error("Failed to validate file checksum at url {url} with hash {hash} after {tries} tries")]
|
||||
/// A checksum was failed to validate for a file
|
||||
ChecksumFailure {
|
||||
/// The checksum's hash
|
||||
hash: String,
|
||||
/// The URL of the file attempted to be downloaded
|
||||
url: String,
|
||||
/// The amount of tries that the file was downloaded until failure
|
||||
tries: u32,
|
||||
},
|
||||
/// There was an error while deserializing metadata
|
||||
#[error("Error while deserializing JSON")]
|
||||
SerdeError(#[from] serde_json::Error),
|
||||
/// There was a network error when fetching an object
|
||||
#[error("Unable to fetch {item}")]
|
||||
FetchError {
|
||||
/// The internal reqwest error
|
||||
inner: reqwest::Error,
|
||||
/// The item that was failed to be fetched
|
||||
item: String,
|
||||
},
|
||||
/// There was an error when managing async tasks
|
||||
#[error("Error while managing asynchronous tasks")]
|
||||
TaskError(#[from] tokio::task::JoinError),
|
||||
/// Error while parsing input
|
||||
#[error("{0}")]
|
||||
ParseError(String),
|
||||
@@ -124,100 +100,3 @@ pub fn get_path_from_artifact(artifact: &str) -> Result<String, Error> {
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Downloads a file from specified mirrors
|
||||
pub async fn download_file_mirrors(
|
||||
base: &str,
|
||||
mirrors: &[&str],
|
||||
sha1: Option<&str>,
|
||||
) -> Result<bytes::Bytes, Error> {
|
||||
if mirrors.is_empty() {
|
||||
return Err(Error::ParseError("No mirrors provided!".to_string()));
|
||||
}
|
||||
|
||||
for (index, mirror) in mirrors.iter().enumerate() {
|
||||
let result = download_file(&format!("{}{}", mirror, base), sha1).await;
|
||||
|
||||
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
/// Downloads a file with retry and checksum functionality
|
||||
pub async fn download_file(
|
||||
url: &str,
|
||||
sha1: Option<&str>,
|
||||
) -> Result<bytes::Bytes, Error> {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
if let Ok(header) = reqwest::header::HeaderValue::from_str(&format!(
|
||||
"modrinth/daedalus/{} (support@modrinth.com)",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
)) {
|
||||
headers.insert(reqwest::header::USER_AGENT, header);
|
||||
}
|
||||
let client = reqwest::Client::builder()
|
||||
.tcp_keepalive(Some(std::time::Duration::from_secs(10)))
|
||||
.timeout(std::time::Duration::from_secs(15))
|
||||
.default_headers(headers)
|
||||
.build()
|
||||
.map_err(|err| Error::FetchError {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
})?;
|
||||
|
||||
for attempt in 1..=4 {
|
||||
let result = client.get(url).send().await;
|
||||
|
||||
match result {
|
||||
Ok(x) => {
|
||||
let bytes = x.bytes().await;
|
||||
|
||||
if let Ok(bytes) = bytes {
|
||||
if let Some(sha1) = sha1 {
|
||||
if &*get_hash(bytes.clone()).await? != sha1 {
|
||||
if attempt <= 3 {
|
||||
continue;
|
||||
} else {
|
||||
return Err(Error::ChecksumFailure {
|
||||
hash: sha1.to_string(),
|
||||
url: url.to_string(),
|
||||
tries: attempt,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(bytes);
|
||||
} else if attempt <= 3 {
|
||||
continue;
|
||||
} else if let Err(err) = bytes {
|
||||
return Err(Error::FetchError {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(_) if attempt <= 3 => continue,
|
||||
Err(err) => {
|
||||
return Err(Error::FetchError {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
/// Computes a checksum of the input bytes
|
||||
pub async fn get_hash(bytes: bytes::Bytes) -> Result<String, Error> {
|
||||
let hash =
|
||||
tokio::task::spawn_blocking(|| sha1::Sha1::from(bytes).hexdigest())
|
||||
.await?;
|
||||
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
use crate::modded::{Processor, SidedDataEntry};
|
||||
use crate::{download_file, Error};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[cfg(feature = "bincode")]
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
/// The latest version of the format the model structs deserialize to
|
||||
pub const CURRENT_FORMAT_VERSION: usize = 0;
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
/// The version type
|
||||
@@ -37,7 +32,6 @@ impl VersionType {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// A game version of Minecraft
|
||||
@@ -50,26 +44,18 @@ pub struct Version {
|
||||
/// A link to additional information about the version
|
||||
pub url: String,
|
||||
/// The latest time a file in this version was updated
|
||||
#[cfg_attr(feature = "bincode", bincode(with_serde))]
|
||||
pub time: DateTime<Utc>,
|
||||
/// The time this version was released
|
||||
#[cfg_attr(feature = "bincode", bincode(with_serde))]
|
||||
pub release_time: DateTime<Utc>,
|
||||
/// The SHA1 hash of the additional information about the version
|
||||
pub sha1: String,
|
||||
/// Whether the version supports the latest player safety features
|
||||
pub compliance_level: u32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
/// (Modrinth Provided) The link to the assets index for this version
|
||||
/// This is only available when using the Modrinth mirror
|
||||
pub assets_index_url: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
/// (Modrinth Provided) The SHA1 hash of the assets index for this version
|
||||
/// This is only available when using the Modrinth mirror
|
||||
pub assets_index_sha1: Option<String>,
|
||||
/// (Modrinth Provided) The SHA1 hash of the original unmodified Minecraft versions JSON
|
||||
pub original_sha1: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// The latest snapshot and release of the game
|
||||
pub struct LatestVersion {
|
||||
@@ -79,7 +65,6 @@ pub struct LatestVersion {
|
||||
pub snapshot: String,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// Data of all game versions of Minecraft
|
||||
pub struct VersionManifest {
|
||||
@@ -93,16 +78,6 @@ pub struct VersionManifest {
|
||||
pub const VERSION_MANIFEST_URL: &str =
|
||||
"https://piston-meta.mojang.com/mc/game/version_manifest_v2.json";
|
||||
|
||||
/// Fetches a version manifest from the specified URL. If no URL is specified, the default is used.
|
||||
pub async fn fetch_version_manifest(
|
||||
url: Option<&str>,
|
||||
) -> Result<VersionManifest, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(url.unwrap_or(VERSION_MANIFEST_URL), None).await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Information about the assets of the game
|
||||
@@ -119,7 +94,6 @@ pub struct AssetIndex {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
/// The type of download
|
||||
@@ -136,7 +110,6 @@ pub enum DownloadType {
|
||||
WindowsServer,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
/// Download information of a file
|
||||
pub struct Download {
|
||||
@@ -148,7 +121,6 @@ pub struct Download {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// Download information of a library
|
||||
pub struct LibraryDownload {
|
||||
@@ -163,7 +135,6 @@ pub struct LibraryDownload {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A list of files that should be downloaded for libraries
|
||||
pub struct LibraryDownloads {
|
||||
@@ -176,7 +147,6 @@ pub struct LibraryDownloads {
|
||||
pub classifiers: Option<HashMap<String, LibraryDownload>>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
/// The action a rule can follow
|
||||
@@ -187,7 +157,6 @@ pub enum RuleAction {
|
||||
Disallow,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
/// An enum representing the different types of operating systems
|
||||
@@ -210,7 +179,6 @@ pub enum Os {
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A rule which depends on what OS the user is on
|
||||
pub struct OsRule {
|
||||
@@ -225,7 +193,6 @@ pub struct OsRule {
|
||||
pub arch: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A rule which depends on the toggled features of the launcher
|
||||
pub struct FeatureRule {
|
||||
@@ -248,7 +215,6 @@ pub struct FeatureRule {
|
||||
pub is_quick_play_realms: Option<bool>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A rule deciding whether a file is downloaded, an argument is used, etc.
|
||||
pub struct Rule {
|
||||
@@ -262,7 +228,6 @@ pub struct Rule {
|
||||
pub features: Option<FeatureRule>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// Information delegating the extraction of the library
|
||||
pub struct LibraryExtract {
|
||||
@@ -271,7 +236,6 @@ pub struct LibraryExtract {
|
||||
pub exclude: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Information about the java version the game needs
|
||||
@@ -282,7 +246,6 @@ pub struct JavaVersion {
|
||||
pub major_version: u32,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A library which the game relies on to run
|
||||
pub struct Library {
|
||||
@@ -309,6 +272,9 @@ pub struct Library {
|
||||
#[serde(default = "default_include_in_classpath")]
|
||||
/// Whether the library should be included in the classpath at the game's launch
|
||||
pub include_in_classpath: bool,
|
||||
#[serde(default = "default_downloadable")]
|
||||
/// Whether the library should be downloaded
|
||||
pub downloadable: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
@@ -397,8 +363,10 @@ pub fn merge_partial_library(
|
||||
fn default_include_in_classpath() -> bool {
|
||||
true
|
||||
}
|
||||
fn default_downloadable() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
/// A container for an argument or multiple arguments
|
||||
@@ -409,7 +377,6 @@ pub enum ArgumentValue {
|
||||
Many(Vec<String>),
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
/// A command line argument passed to a program
|
||||
@@ -425,7 +392,6 @@ pub enum Argument {
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Clone, Copy)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
/// The type of argument
|
||||
@@ -436,7 +402,6 @@ pub enum ArgumentType {
|
||||
Jvm,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// Information about a version
|
||||
@@ -481,16 +446,6 @@ pub struct VersionInfo {
|
||||
pub processors: Option<Vec<Processor>>,
|
||||
}
|
||||
|
||||
/// Fetches detailed information about a version from the manifest
|
||||
pub async fn fetch_version_info(
|
||||
version: &Version,
|
||||
) -> Result<VersionInfo, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(&version.url, Some(&version.sha1)).await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
/// An asset of the game
|
||||
pub struct Asset {
|
||||
@@ -500,23 +455,9 @@ pub struct Asset {
|
||||
pub size: u32,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
/// An index containing all assets the game needs
|
||||
pub struct AssetsIndex {
|
||||
/// A hashmap containing the filename (key) and asset (value)
|
||||
pub objects: HashMap<String, Asset>,
|
||||
}
|
||||
|
||||
/// Fetches the assets index from the version info
|
||||
pub async fn fetch_assets_index(
|
||||
version: &VersionInfo,
|
||||
) -> Result<AssetsIndex, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(
|
||||
&version.asset_index.url,
|
||||
Some(&version.asset_index.sha1),
|
||||
)
|
||||
.await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use crate::{download_file, Error};
|
||||
|
||||
use crate::minecraft::{
|
||||
Argument, ArgumentType, Library, VersionInfo, VersionType,
|
||||
};
|
||||
@@ -7,9 +5,6 @@ use chrono::{DateTime, TimeZone, Utc};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[cfg(feature = "bincode")]
|
||||
use bincode::{Decode, Encode};
|
||||
|
||||
/// The latest version of the format the fabric model structs deserialize to
|
||||
pub const CURRENT_FABRIC_FORMAT_VERSION: usize = 0;
|
||||
/// The latest version of the format the fabric model structs deserialize to
|
||||
@@ -23,7 +18,6 @@ pub const CURRENT_NEOFORGE_FORMAT_VERSION: usize = 0;
|
||||
pub const DUMMY_REPLACE_STRING: &str = "${modrinth.gameVersion}";
|
||||
|
||||
/// A data variable entry that depends on the side of the installation
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct SidedDataEntry {
|
||||
/// The value on the client
|
||||
@@ -43,7 +37,6 @@ where
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// A partial version returned by fabric meta
|
||||
@@ -53,11 +46,9 @@ pub struct PartialVersionInfo {
|
||||
/// The version ID this partial version inherits from
|
||||
pub inherits_from: String,
|
||||
/// The time that the version was released
|
||||
#[cfg_attr(feature = "bincode", bincode(with_serde))]
|
||||
#[serde(deserialize_with = "deserialize_date")]
|
||||
pub release_time: DateTime<Utc>,
|
||||
/// The latest time a file in this version was updated
|
||||
#[cfg_attr(feature = "bincode", bincode(with_serde))]
|
||||
#[serde(deserialize_with = "deserialize_date")]
|
||||
pub time: DateTime<Utc>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -83,7 +74,6 @@ pub struct PartialVersionInfo {
|
||||
}
|
||||
|
||||
/// A processor to be ran after downloading the files
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Processor {
|
||||
/// Maven coordinates for the JAR library of this processor.
|
||||
@@ -101,13 +91,6 @@ pub struct Processor {
|
||||
pub sides: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// Fetches the version manifest of a game version's URL
|
||||
pub async fn fetch_partial_version(
|
||||
url: &str,
|
||||
) -> Result<PartialVersionInfo, Error> {
|
||||
Ok(serde_json::from_slice(&download_file(url, None).await?)?)
|
||||
}
|
||||
|
||||
/// Merges a partial version into a complete one
|
||||
pub fn merge_partial_version(
|
||||
partial: PartialVersionInfo,
|
||||
@@ -154,15 +137,10 @@ pub fn merge_partial_version(
|
||||
.libraries
|
||||
.into_iter()
|
||||
.chain(merge.libraries)
|
||||
.map(|x| Library {
|
||||
downloads: x.downloads,
|
||||
extract: x.extract,
|
||||
name: x.name.replace(DUMMY_REPLACE_STRING, &merge_id),
|
||||
url: x.url,
|
||||
natives: x.natives,
|
||||
rules: x.rules,
|
||||
checksums: x.checksums,
|
||||
include_in_classpath: x.include_in_classpath,
|
||||
.map(|mut x| {
|
||||
x.name = x.name.replace(DUMMY_REPLACE_STRING, &merge_id);
|
||||
|
||||
x
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
main_class: if let Some(main_class) = partial.main_class {
|
||||
@@ -180,7 +158,6 @@ pub fn merge_partial_version(
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// A manifest containing information about a mod loader's versions
|
||||
@@ -189,7 +166,6 @@ pub struct Manifest {
|
||||
pub game_versions: Vec<Version>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A game version of Minecraft
|
||||
pub struct Version {
|
||||
@@ -201,7 +177,6 @@ pub struct Version {
|
||||
pub loaders: Vec<LoaderVersion>,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "bincode", derive(Encode, Decode))]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A version of a Minecraft mod loader
|
||||
pub struct LoaderVersion {
|
||||
@@ -212,8 +187,3 @@ pub struct LoaderVersion {
|
||||
/// Whether the loader is stable or not
|
||||
pub stable: bool,
|
||||
}
|
||||
|
||||
/// Fetches the manifest of a mod loader
|
||||
pub async fn fetch_manifest(url: &str) -> Result<Manifest, Error> {
|
||||
Ok(serde_json::from_slice(&download_file(url, None).await?)?)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "daedalus_client"
|
||||
version = "0.1.27"
|
||||
authors = ["Jai A <jaiagr+gpg@pm.me>"]
|
||||
edition = "2018"
|
||||
version = "0.2.0"
|
||||
authors = ["Jai A <jai@modrinth.com>"]
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@@ -11,16 +11,23 @@ daedalus = { path = "../daedalus" }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
futures = "0.3.25"
|
||||
dotenvy = "0.15.6"
|
||||
log = "0.4.17"
|
||||
env_logger= "0.10.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde-xml-rs = "0.6.0"
|
||||
lazy_static = "1.4.0"
|
||||
thiserror = "1.0"
|
||||
reqwest = "0.11.13"
|
||||
zip = "0.6.3"
|
||||
reqwest = { version = "0.12.5", features = ["stream", "json"] }
|
||||
async_zip = { version = "0.0.17", features = ["full"] }
|
||||
semver = "1.0"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
bytes = "1.3.0"
|
||||
rust-s3 = "0.33.0"
|
||||
bytes = "1.6.0"
|
||||
rust-s3 = "0.34.0"
|
||||
dashmap = "5.5.3"
|
||||
sha1_smol = { version = "1.0.0", features = ["std"] }
|
||||
indexmap = { version = "2.2.6", features = ["serde"]}
|
||||
itertools = "0.13.0"
|
||||
tracing-error = "0.2.0"
|
||||
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
tracing-futures = { version = "0.2.5", features = ["futures", "tokio"] }
|
||||
@@ -1,14 +1,23 @@
|
||||
[
|
||||
{
|
||||
"_comment": "Only allow osx-arm64 for existing LWJGL 3.3.2",
|
||||
"_comment": "Only allow osx-arm64 for existing LWJGL 3.3.2/3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-freetype-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-glfw-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-jemalloc-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-openal-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-opengl-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-stb-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-tinyfd-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-natives-macos-arm64:3.3.2"
|
||||
"org.lwjgl:lwjgl-natives-macos-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-freetype-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-glfw-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-jemalloc-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-openal-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-opengl-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-stb-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-tinyfd-natives-macos-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-natives-macos-arm64:3.3.3"
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
@@ -22,15 +31,24 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"_comment": "Only allow windows-arm64 for existing LWJGL 3.3.2",
|
||||
"_comment": "Only allow windows-arm64 for existing LWJGL 3.3.2/3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-freetype-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-glfw-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-jemalloc-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-openal-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-opengl-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-stb-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-tinyfd-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-natives-windows-arm64:3.3.2"
|
||||
"org.lwjgl:lwjgl-natives-windows-arm64:3.3.2",
|
||||
"org.lwjgl:lwjgl-freetype-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-glfw-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-jemalloc-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-openal-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-opengl-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-stb-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-tinyfd-natives-windows-arm64:3.3.3",
|
||||
"org.lwjgl:lwjgl-natives-windows-arm64:3.3.3"
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
@@ -165,7 +183,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -202,12 +222,13 @@
|
||||
"org.lwjgl.lwjgl:lwjgl:2.9.1-nightly-20131120",
|
||||
"org.lwjgl.lwjgl:lwjgl:2.9.1-nightly-20131017",
|
||||
"org.lwjgl.lwjgl:lwjgl:2.9.1-nightly-20130708-debug3",
|
||||
"org.lwjgl.lwjgl:lwjgl:2.9.1",
|
||||
"org.lwjgl.lwjgl:lwjgl:2.9.0"
|
||||
"org.lwjgl.lwjgl:lwjgl:2.9.1"
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -268,12 +289,13 @@
|
||||
"org.lwjgl.lwjgl:lwjgl_util:2.9.1-nightly-20131120",
|
||||
"org.lwjgl.lwjgl:lwjgl_util:2.9.1-nightly-20131017",
|
||||
"org.lwjgl.lwjgl:lwjgl_util:2.9.1-nightly-20130708-debug3",
|
||||
"org.lwjgl.lwjgl:lwjgl_util:2.9.1",
|
||||
"org.lwjgl.lwjgl:lwjgl_util:2.9.0"
|
||||
"org.lwjgl.lwjgl:lwjgl_util:2.9.1"
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -335,8 +357,7 @@
|
||||
"org.lwjgl.lwjgl:lwjgl-platform:2.9.1-nightly-20131120",
|
||||
"org.lwjgl.lwjgl:lwjgl-platform:2.9.1-nightly-20131017",
|
||||
"org.lwjgl.lwjgl:lwjgl-platform:2.9.1-nightly-20130708-debug3",
|
||||
"org.lwjgl.lwjgl:lwjgl-platform:2.9.1",
|
||||
"org.lwjgl.lwjgl:lwjgl-platform:2.9.0"
|
||||
"org.lwjgl.lwjgl:lwjgl-platform:2.9.1"
|
||||
],
|
||||
"override": {
|
||||
"downloads": {
|
||||
@@ -375,7 +396,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -547,7 +570,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -719,7 +744,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -891,7 +918,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -1063,7 +1092,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -1235,7 +1266,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -1407,7 +1440,9 @@
|
||||
],
|
||||
"override": {
|
||||
"rules": [
|
||||
|
||||
{
|
||||
"action": "allow"
|
||||
},
|
||||
{
|
||||
"action": "disallow",
|
||||
"os": {
|
||||
@@ -1993,6 +2028,32 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.2",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-freetype:3.3.2"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "896e7d9b8f60d7273f3d491c69270afc67ece3ce",
|
||||
"size": 1073374,
|
||||
"url": "https://build.lwjgl.org/release/3.3.2/bin/lwjgl-freetype/lwjgl-freetype-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-freetype-natives-linux-arm64:3.3.2-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.2",
|
||||
"match": [
|
||||
@@ -2175,6 +2236,32 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.2",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-freetype:3.3.2"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "b7f77ceb951182659fd400437272aa7e96709968",
|
||||
"size": 924657,
|
||||
"url": "https://build.lwjgl.org/release/3.3.2/bin/lwjgl-freetype/lwjgl-freetype-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-freetype-natives-linux-arm32:3.3.2-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.2",
|
||||
"match": [
|
||||
@@ -2357,6 +2444,422 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-freetype:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "498965aac06c4a0d42df1fbef6bacd05bde7f974",
|
||||
"size": 1093516,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-freetype/lwjgl-freetype-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-freetype-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-glfw:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "492a0f11f85b85899a6568f07511160c1b87cd38",
|
||||
"size": 122159,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-glfw/lwjgl-glfw-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-glfw-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-jemalloc:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "eff8b86798191192fe2cba2dc2776109f30c239d",
|
||||
"size": 209315,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-jemalloc/lwjgl-jemalloc-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-jemalloc-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-openal:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "ad8f302118a65bb8d615f8a2a680db58fb8f835e",
|
||||
"size": 592963,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-openal/lwjgl-openal-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-openal-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-opengl:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "2096f6b94b2d68745d858fbfe53aacf5f0c8074c",
|
||||
"size": 58625,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-opengl/lwjgl-opengl-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-opengl-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-stb:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "ddc177afc2be1ee8d93684b11363b80589a13fe1",
|
||||
"size": 207418,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-stb/lwjgl-stb-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-stb-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-tinyfd:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "2823a8c955c758d0954d282888075019ef99cec7",
|
||||
"size": 43864,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-tinyfd/lwjgl-tinyfd-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-tinyfd-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm64 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "f35d8b6ffe1ac1e3a5eb1d4e33de80f044ad5fd8",
|
||||
"size": 91294,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl/lwjgl-natives-linux-arm64.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-natives-linux-arm64:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm64"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-freetype:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "7dd3b1f751571adaf2c4dc882bc675a5d1e796e6",
|
||||
"size": 942636,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-freetype/lwjgl-freetype-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-freetype-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-glfw:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "d9af485c32545b37dd5359b163161d42d7534dcf",
|
||||
"size": 112560,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-glfw/lwjgl-glfw-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-glfw-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-jemalloc:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "109b6931880d02d4e65ced38928a16e41d19873e",
|
||||
"size": 178324,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-jemalloc/lwjgl-jemalloc-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-jemalloc-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-openal:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "e1702aa09d20359d6cf5cb2999fa7685a785eca7",
|
||||
"size": 505618,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-openal/lwjgl-openal-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-openal-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-opengl:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "dbba17fc5ac0985d14a57c11f9537617d67b9952",
|
||||
"size": 59263,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-opengl/lwjgl-opengl-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-opengl-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-stb:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "1ae28ff044699ff29b0e980ffabd73fba8a664b3",
|
||||
"size": 154931,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-stb/lwjgl-stb-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-stb-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl-tinyfd:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "c2a0a05c82c4b9f69ded0b6ad5f417addea78ce2",
|
||||
"size": 49495,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl-tinyfd/lwjgl-tinyfd-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-tinyfd-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Add linux-arm32 support for LWJGL 3.3.3",
|
||||
"match": [
|
||||
"org.lwjgl:lwjgl:3.3.3"
|
||||
],
|
||||
"additionalLibraries": [
|
||||
{
|
||||
"downloads": {
|
||||
"artifact": {
|
||||
"sha1": "2075c51a80f0ef0f22ba616ba54007ac2b0debd4",
|
||||
"size": 89565,
|
||||
"url": "https://build.lwjgl.org/release/3.3.3/bin/lwjgl/lwjgl-natives-linux-arm32.jar"
|
||||
}
|
||||
},
|
||||
"name": "org.lwjgl:lwjgl-natives-linux-arm32:3.3.3-lwjgl.1",
|
||||
"rules": [
|
||||
{
|
||||
"action": "allow",
|
||||
"os": {
|
||||
"name": "linux-arm32"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"_comment": "Replace glfw from 3.3.1 with version from 3.3.2 to prevent stack smashing",
|
||||
"match": [
|
||||
|
||||
63
daedalus_client/src/error.rs
Normal file
63
daedalus_client/src/error.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use tracing_error::InstrumentError;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum ErrorKind {
|
||||
#[error("Daedalus Error: {0}")]
|
||||
Daedalus(#[from] daedalus::Error),
|
||||
#[error("Invalid input: {0}")]
|
||||
InvalidInput(String),
|
||||
#[error("Error while managing asynchronous tasks")]
|
||||
TaskError(#[from] tokio::task::JoinError),
|
||||
#[error("Error while deserializing JSON: {0}")]
|
||||
SerdeJSON(#[from] serde_json::Error),
|
||||
#[error("Error while deserializing XML: {0}")]
|
||||
SerdeXML(#[from] serde_xml_rs::Error),
|
||||
#[error("Failed to validate file checksum at url {url} with hash {hash} after {tries} tries")]
|
||||
ChecksumFailure {
|
||||
hash: String,
|
||||
url: String,
|
||||
tries: u32,
|
||||
},
|
||||
#[error("Unable to fetch {item}")]
|
||||
Fetch { inner: reqwest::Error, item: String },
|
||||
#[error("Error while uploading file to S3: {file}")]
|
||||
S3 {
|
||||
inner: s3::error::S3Error,
|
||||
file: String,
|
||||
},
|
||||
#[error("Error acquiring semaphore: {0}")]
|
||||
Acquire(#[from] tokio::sync::AcquireError),
|
||||
#[error("Tracing error: {0}")]
|
||||
Tracing(#[from] tracing::subscriber::SetGlobalDefaultError),
|
||||
#[error("Zip error: {0}")]
|
||||
Zip(#[from] async_zip::error::ZipError),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
pub source: tracing_error::TracedError<ErrorKind>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(fmt, "{}", self.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Into<ErrorKind>> From<E> for Error {
|
||||
fn from(source: E) -> Self {
|
||||
let error = Into::<ErrorKind>::into(source);
|
||||
|
||||
Self {
|
||||
source: error.in_current_span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorKind {
|
||||
pub fn as_error(self) -> Error {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = core::result::Result<T, Error>;
|
||||
@@ -1,372 +1,276 @@
|
||||
use crate::{download_file, format_url, upload_file_to_bucket, Error};
|
||||
use daedalus::minecraft::{Library, VersionManifest};
|
||||
use daedalus::modded::{
|
||||
LoaderVersion, Manifest, PartialVersionInfo, Version, DUMMY_REPLACE_STRING,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use crate::util::{download_file, fetch_json, format_url};
|
||||
use crate::{insert_mirrored_artifact, Error, MirrorArtifact, UploadFile};
|
||||
use daedalus::modded::{Manifest, PartialVersionInfo, DUMMY_REPLACE_STRING};
|
||||
use dashmap::DashMap;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{Mutex, RwLock, Semaphore};
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
pub async fn retrieve_data(
|
||||
minecraft_versions: &VersionManifest,
|
||||
uploaded_files: &mut Vec<String>,
|
||||
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
|
||||
pub async fn fetch_fabric(
|
||||
semaphore: Arc<Semaphore>,
|
||||
upload_files: &DashMap<String, UploadFile>,
|
||||
mirror_artifacts: &DashMap<String, MirrorArtifact>,
|
||||
) -> Result<(), Error> {
|
||||
let list = fetch_fabric_versions(None, semaphore.clone()).await?;
|
||||
let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!(
|
||||
"fabric/v{}/manifest.json",
|
||||
fetch(
|
||||
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
|
||||
)))
|
||||
"fabric",
|
||||
"https://meta.fabricmc.net/v2",
|
||||
"https://maven.fabricmc.net/",
|
||||
semaphore,
|
||||
upload_files,
|
||||
mirror_artifacts,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
|
||||
pub async fn fetch_quilt(
|
||||
semaphore: Arc<Semaphore>,
|
||||
upload_files: &DashMap<String, UploadFile>,
|
||||
mirror_artifacts: &DashMap<String, MirrorArtifact>,
|
||||
) -> Result<(), Error> {
|
||||
fetch(
|
||||
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
|
||||
"quilt",
|
||||
"https://meta.quiltmc.org/v3",
|
||||
"https://meta.quiltmc.org/",
|
||||
semaphore,
|
||||
upload_files,
|
||||
mirror_artifacts,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
|
||||
async fn fetch(
|
||||
format_version: usize,
|
||||
mod_loader: &str,
|
||||
meta_url: &str,
|
||||
maven_url: &str,
|
||||
semaphore: Arc<Semaphore>,
|
||||
upload_files: &DashMap<String, UploadFile>,
|
||||
mirror_artifacts: &DashMap<String, MirrorArtifact>,
|
||||
) -> Result<(), Error> {
|
||||
let modrinth_manifest = fetch_json::<Manifest>(
|
||||
&format_url(&format!("{mod_loader}/v{format_version}/manifest.json",)),
|
||||
&semaphore,
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let mut versions = if let Some(old_manifest) = old_manifest {
|
||||
old_manifest.game_versions
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let loaders_mutex = RwLock::new(Vec::new());
|
||||
|
||||
{
|
||||
let mut loaders = loaders_mutex.write().await;
|
||||
|
||||
for (index, loader) in list.loader.iter().enumerate() {
|
||||
if versions.iter().any(|x| {
|
||||
x.id == DUMMY_REPLACE_STRING
|
||||
&& x.loaders.iter().any(|x| x.id == loader.version)
|
||||
}) {
|
||||
if index == 0 {
|
||||
loaders.push((
|
||||
Box::new(loader.stable),
|
||||
loader.version.clone(),
|
||||
Box::new(true),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
loaders.push((
|
||||
Box::new(loader.stable),
|
||||
loader.version.clone(),
|
||||
Box::new(false),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DUMMY_GAME_VERSION: &str = "1.19.4-rc2";
|
||||
|
||||
let loader_version_mutex = Mutex::new(Vec::new());
|
||||
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let loader_versions = futures::future::try_join_all(
|
||||
loaders_mutex.read().await.clone().into_iter().map(
|
||||
|(stable, loader, skip_upload)| async {
|
||||
let version = fetch_fabric_version(
|
||||
DUMMY_GAME_VERSION,
|
||||
&loader,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<(Box<bool>, String, PartialVersionInfo, Box<bool>), Error>(
|
||||
(stable, loader, version, skip_upload),
|
||||
)
|
||||
},
|
||||
),
|
||||
let fabric_manifest = fetch_json::<FabricVersions>(
|
||||
&format!("{meta_url}/versions"),
|
||||
&semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
futures::future::try_join_all(loader_versions.into_iter()
|
||||
.map(
|
||||
|(stable, loader, version, skip_upload)| async {
|
||||
let libs = futures::future::try_join_all(
|
||||
version.libraries.into_iter().map(|mut lib| async {
|
||||
{
|
||||
let mut visited_assets =
|
||||
visited_artifacts_mutex.lock().await;
|
||||
// We check Modrinth's fabric version manifest and compare if the fabric version exists in Modrinth's database
|
||||
// We also check intermediary versions that are newly added to query
|
||||
let (fetch_fabric_versions, fetch_intermediary_versions) =
|
||||
if let Some(modrinth_manifest) = modrinth_manifest {
|
||||
let (mut fetch_versions, mut fetch_intermediary_versions) =
|
||||
(Vec::new(), Vec::new());
|
||||
|
||||
if visited_assets.contains(&lib.name) {
|
||||
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
lib.url = Some(format_url("maven/"));
|
||||
for version in &fabric_manifest.loader {
|
||||
if !modrinth_manifest
|
||||
.game_versions
|
||||
.iter()
|
||||
.any(|x| x.loaders.iter().any(|x| x.id == version.version))
|
||||
{
|
||||
fetch_versions.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(lib);
|
||||
} else {
|
||||
visited_assets.push(lib.name.clone())
|
||||
}
|
||||
for version in &fabric_manifest.intermediary {
|
||||
if !modrinth_manifest
|
||||
.game_versions
|
||||
.iter()
|
||||
.any(|x| x.id == version.version)
|
||||
&& fabric_manifest
|
||||
.game
|
||||
.iter()
|
||||
.any(|x| x.version == version.version)
|
||||
{
|
||||
fetch_intermediary_versions.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
(fetch_versions, fetch_intermediary_versions)
|
||||
} else {
|
||||
(
|
||||
fabric_manifest.loader.iter().collect(),
|
||||
fabric_manifest.intermediary.iter().collect(),
|
||||
)
|
||||
};
|
||||
|
||||
const DUMMY_GAME_VERSION: &str = "1.21";
|
||||
|
||||
if !fetch_intermediary_versions.is_empty() {
|
||||
for x in &fetch_intermediary_versions {
|
||||
insert_mirrored_artifact(
|
||||
&x.maven,
|
||||
maven_url.to_string(),
|
||||
mirror_artifacts,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
if !fetch_fabric_versions.is_empty() {
|
||||
let fabric_version_manifest_urls = fetch_fabric_versions
|
||||
.iter()
|
||||
.map(|x| {
|
||||
format!(
|
||||
"{}/versions/loader/{}/{}/profile/json",
|
||||
meta_url, DUMMY_GAME_VERSION, x.version
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let fabric_version_manifests = futures::future::try_join_all(
|
||||
fabric_version_manifest_urls
|
||||
.iter()
|
||||
.map(|x| download_file(x, None, &semaphore)),
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| serde_json::from_slice(&x))
|
||||
.collect::<Result<Vec<PartialVersionInfo>, serde_json::Error>>()?;
|
||||
|
||||
let patched_version_manifests = fabric_version_manifests
|
||||
.into_iter()
|
||||
.map(|mut version_info| {
|
||||
for lib in &mut version_info.libraries {
|
||||
let new_name = lib
|
||||
.name
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
// If a library is not intermediary, we add it to mirror artifacts to be mirrored
|
||||
if lib.name == new_name {
|
||||
insert_mirrored_artifact(
|
||||
&new_name,
|
||||
lib.url
|
||||
.clone()
|
||||
.unwrap_or_else(|| maven_url.to_string()),
|
||||
mirror_artifacts,
|
||||
)?;
|
||||
} else {
|
||||
lib.name = new_name;
|
||||
}
|
||||
|
||||
if lib.name.contains(DUMMY_GAME_VERSION) {
|
||||
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
futures::future::try_join_all(list.game.clone().into_iter().map(|game_version| async {
|
||||
let semaphore = semaphore.clone();
|
||||
let uploaded_files_mutex = uploaded_files_mutex.clone();
|
||||
let lib_name = lib.name.clone();
|
||||
let lib_url = lib.url.clone();
|
||||
|
||||
async move {
|
||||
let artifact_path =
|
||||
daedalus::get_path_from_artifact(&lib_name.replace(DUMMY_REPLACE_STRING, &game_version.version))?;
|
||||
|
||||
let artifact = download_file(
|
||||
&format!(
|
||||
"{}{}",
|
||||
lib_url.unwrap_or_else(|| {
|
||||
"https://maven.fabricmc.net/".to_string()
|
||||
}),
|
||||
artifact_path
|
||||
),
|
||||
None,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!("{}/{}", "maven", artifact_path),
|
||||
artifact.to_vec(),
|
||||
Some("application/java-archive".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<(), Error>(())
|
||||
}.await?;
|
||||
|
||||
Ok::<(), Error>(())
|
||||
})).await?;
|
||||
lib.url = Some(format_url("maven/"));
|
||||
|
||||
return Ok(lib);
|
||||
}
|
||||
|
||||
let artifact_path =
|
||||
daedalus::get_path_from_artifact(&lib.name)?;
|
||||
|
||||
let artifact = download_file(
|
||||
&format!(
|
||||
"{}{}",
|
||||
lib.url.unwrap_or_else(|| {
|
||||
"https://maven.fabricmc.net/".to_string()
|
||||
}),
|
||||
artifact_path
|
||||
),
|
||||
None,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
lib.url = Some(format_url("maven/"));
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!("{}/{}", "maven", artifact_path),
|
||||
artifact.to_vec(),
|
||||
Some("application/java-archive".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<Library, Error>(lib)
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if async move {
|
||||
*skip_upload
|
||||
}.await {
|
||||
return Ok::<(), Error>(())
|
||||
}
|
||||
|
||||
|
||||
let version_path = format!(
|
||||
"fabric/v{}/versions/{}.json",
|
||||
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
|
||||
&loader
|
||||
);
|
||||
|
||||
upload_file_to_bucket(
|
||||
version_path.clone(),
|
||||
serde_json::to_vec(&PartialVersionInfo {
|
||||
arguments: version.arguments,
|
||||
id: version
|
||||
.id
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
|
||||
main_class: version.main_class,
|
||||
release_time: version.release_time,
|
||||
time: version.time,
|
||||
type_: version.type_,
|
||||
inherits_from: version
|
||||
.inherits_from
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
|
||||
libraries: libs,
|
||||
minecraft_arguments: version.minecraft_arguments,
|
||||
processors: None,
|
||||
data: None,
|
||||
})?,
|
||||
Some("application/json".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
{
|
||||
let mut loader_version_map = loader_version_mutex.lock().await;
|
||||
async move {
|
||||
loader_version_map.push(LoaderVersion {
|
||||
id: loader.to_string(),
|
||||
url: format_url(&version_path),
|
||||
stable: *stable,
|
||||
});
|
||||
}
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<(), Error>(())
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
version_info.id = version_info
|
||||
.id
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
version_info.inherits_from = version_info
|
||||
.inherits_from
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
|
||||
let mut loader_version_mutex = loader_version_mutex.into_inner();
|
||||
if !loader_version_mutex.is_empty() {
|
||||
if let Some(version) =
|
||||
versions.iter_mut().find(|x| x.id == DUMMY_REPLACE_STRING)
|
||||
{
|
||||
version.loaders.append(&mut loader_version_mutex);
|
||||
} else {
|
||||
versions.push(Version {
|
||||
id: DUMMY_REPLACE_STRING.to_string(),
|
||||
stable: true,
|
||||
loaders: loader_version_mutex,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for version in &list.game {
|
||||
if !versions.iter().any(|x| x.id == version.version) {
|
||||
versions.push(Version {
|
||||
id: version.version.clone(),
|
||||
stable: version.stable,
|
||||
loaders: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
versions.sort_by(|x, y| {
|
||||
minecraft_versions
|
||||
.versions
|
||||
Ok(version_info)
|
||||
})
|
||||
.collect::<Result<Vec<_>, Error>>()?;
|
||||
let serialized_version_manifests = patched_version_manifests
|
||||
.iter()
|
||||
.position(|z| x.id == z.id)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
&minecraft_versions
|
||||
.versions
|
||||
.iter()
|
||||
.position(|z| y.id == z.id)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
});
|
||||
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
|
||||
.collect::<Result<Vec<_>, serde_json::Error>>()?;
|
||||
|
||||
for version in &mut versions {
|
||||
version.loaders.sort_by(|x, y| {
|
||||
list.loader
|
||||
.iter()
|
||||
.position(|z| x.id == *z.version)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
&list
|
||||
.loader
|
||||
.iter()
|
||||
.position(|z| y.id == z.version)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
})
|
||||
serialized_version_manifests
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.for_each(|(index, bytes)| {
|
||||
let loader = fetch_fabric_versions[index];
|
||||
|
||||
let version_path = format!(
|
||||
"{mod_loader}/v{format_version}/versions/{}.json",
|
||||
loader.version
|
||||
);
|
||||
|
||||
upload_files.insert(
|
||||
version_path,
|
||||
UploadFile {
|
||||
file: bytes,
|
||||
content_type: Some("application/json".to_string()),
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!(
|
||||
"fabric/v{}/manifest.json",
|
||||
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
|
||||
),
|
||||
serde_json::to_vec(&Manifest {
|
||||
game_versions: versions,
|
||||
})?,
|
||||
Some("application/json".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore,
|
||||
)
|
||||
.await?;
|
||||
if !fetch_fabric_versions.is_empty()
|
||||
|| !fetch_intermediary_versions.is_empty()
|
||||
{
|
||||
let fabric_manifest_path =
|
||||
format!("{mod_loader}/v{format_version}/manifest.json",);
|
||||
|
||||
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
|
||||
uploaded_files.extend(uploaded_files_mutex.into_inner());
|
||||
let loader_versions = daedalus::modded::Version {
|
||||
id: DUMMY_REPLACE_STRING.to_string(),
|
||||
stable: true,
|
||||
loaders: fabric_manifest
|
||||
.loader
|
||||
.into_iter()
|
||||
.map(|x| {
|
||||
let version_path = format!(
|
||||
"{mod_loader}/v{format_version}/versions/{}.json",
|
||||
x.version,
|
||||
);
|
||||
|
||||
daedalus::modded::LoaderVersion {
|
||||
id: x.version,
|
||||
url: format_url(&version_path),
|
||||
stable: x.stable,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let manifest = daedalus::modded::Manifest {
|
||||
game_versions: std::iter::once(loader_versions)
|
||||
.chain(fabric_manifest.game.into_iter().map(|x| {
|
||||
daedalus::modded::Version {
|
||||
id: x.version,
|
||||
stable: x.stable,
|
||||
loaders: vec![],
|
||||
}
|
||||
}))
|
||||
.collect(),
|
||||
};
|
||||
|
||||
upload_files.insert(
|
||||
fabric_manifest_path,
|
||||
UploadFile {
|
||||
file: bytes::Bytes::from(serde_json::to_vec(&manifest)?),
|
||||
content_type: Some("application/json".to_string()),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const FABRIC_META_URL: &str = "https://meta.fabricmc.net/v2";
|
||||
|
||||
async fn fetch_fabric_version(
|
||||
version_number: &str,
|
||||
loader_version: &str,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<PartialVersionInfo, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(
|
||||
&format!(
|
||||
"{}/versions/loader/{}/{}/profile/json",
|
||||
FABRIC_META_URL, version_number, loader_version
|
||||
),
|
||||
None,
|
||||
semaphore,
|
||||
)
|
||||
.await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// Versions of fabric components
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
struct FabricVersions {
|
||||
/// Versions of Minecraft that fabric supports
|
||||
pub game: Vec<FabricGameVersion>,
|
||||
/// Available versions of the fabric loader
|
||||
pub loader: Vec<FabricLoaderVersion>,
|
||||
pub game: Vec<FabricGameVersion>,
|
||||
#[serde(alias = "hashed")]
|
||||
pub intermediary: Vec<FabricIntermediaryVersion>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A version of Minecraft that fabric supports
|
||||
struct FabricGameVersion {
|
||||
/// The version number of the game
|
||||
pub version: String,
|
||||
/// Whether the Minecraft version is stable or not
|
||||
pub stable: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A version of the fabric loader
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
struct FabricLoaderVersion {
|
||||
/// The separator to get the build number
|
||||
pub separator: String,
|
||||
/// The build number
|
||||
pub build: u32,
|
||||
/// The maven artifact
|
||||
pub maven: String,
|
||||
/// The version number of the fabric loader
|
||||
// pub separator: String,
|
||||
// pub build: u32,
|
||||
// pub maven: String,
|
||||
pub version: String,
|
||||
/// Whether the loader is stable or not
|
||||
#[serde(default)]
|
||||
pub stable: bool,
|
||||
}
|
||||
/// Fetches the list of fabric versions
|
||||
async fn fetch_fabric_versions(
|
||||
url: Option<&str>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<FabricVersions, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(
|
||||
url.unwrap_or(&*format!("{}/versions", FABRIC_META_URL)),
|
||||
None,
|
||||
semaphore,
|
||||
)
|
||||
.await?,
|
||||
)?)
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
struct FabricIntermediaryVersion {
|
||||
pub maven: String,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
struct FabricGameVersion {
|
||||
pub version: String,
|
||||
pub stable: bool,
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,118 +1,147 @@
|
||||
use log::{error, info, warn};
|
||||
use s3::creds::Credentials;
|
||||
use s3::error::S3Error;
|
||||
use s3::{Bucket, Region};
|
||||
use crate::util::{
|
||||
format_url, upload_file_to_bucket, upload_url_to_bucket_mirrors,
|
||||
REQWEST_CLIENT,
|
||||
};
|
||||
use daedalus::get_path_from_artifact;
|
||||
use dashmap::{DashMap, DashSet};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::Semaphore;
|
||||
use tracing_error::ErrorLayer;
|
||||
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
|
||||
|
||||
mod error;
|
||||
mod fabric;
|
||||
mod forge;
|
||||
mod minecraft;
|
||||
mod neo;
|
||||
mod quilt;
|
||||
pub mod util;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("{0}")]
|
||||
DaedalusError(#[from] daedalus::Error),
|
||||
#[error("Error while deserializing JSON")]
|
||||
SerdeError(#[from] serde_json::Error),
|
||||
#[error("Error while deserializing XML")]
|
||||
XMLError(#[from] serde_xml_rs::Error),
|
||||
#[error("Unable to fetch {item}")]
|
||||
FetchError { inner: reqwest::Error, item: String },
|
||||
#[error("Error while managing asynchronous tasks")]
|
||||
TaskError(#[from] tokio::task::JoinError),
|
||||
#[error("Error while uploading file to S3")]
|
||||
S3Error { inner: S3Error, file: String },
|
||||
#[error("Error while parsing version as semver: {0}")]
|
||||
SemVerError(#[from] semver::Error),
|
||||
#[error("Error while reading zip file: {0}")]
|
||||
ZipError(#[from] zip::result::ZipError),
|
||||
#[error("Error while reading zip file: {0}")]
|
||||
IoError(#[from] std::io::Error),
|
||||
#[error("Error while obtaining strong reference to Arc")]
|
||||
ArcError,
|
||||
#[error("Error acquiring semaphore: {0}")]
|
||||
AcquireError(#[from] tokio::sync::AcquireError),
|
||||
}
|
||||
pub use error::{Error, ErrorKind, Result};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
env_logger::init();
|
||||
async fn main() -> Result<()> {
|
||||
dotenvy::dotenv().ok();
|
||||
|
||||
let subscriber = tracing_subscriber::registry()
|
||||
.with(fmt::layer())
|
||||
.with(EnvFilter::from_default_env())
|
||||
.with(ErrorLayer::default());
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber)?;
|
||||
|
||||
tracing::info!("Initialized tracing. Starting Daedalus!");
|
||||
|
||||
if check_env_vars() {
|
||||
error!("Some environment variables are missing!");
|
||||
tracing::error!("Some environment variables are missing!");
|
||||
|
||||
return;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut timer = tokio::time::interval(Duration::from_secs(60 * 60));
|
||||
let semaphore = Arc::new(Semaphore::new(10));
|
||||
let semaphore = Arc::new(Semaphore::new(
|
||||
dotenvy::var("CONCURRENCY_LIMIT")
|
||||
.ok()
|
||||
.and_then(|x| x.parse().ok())
|
||||
.unwrap_or(10),
|
||||
));
|
||||
|
||||
loop {
|
||||
timer.tick().await;
|
||||
// path, upload file
|
||||
let upload_files: DashMap<String, UploadFile> = DashMap::new();
|
||||
// path, mirror artifact
|
||||
let mirror_artifacts: DashMap<String, MirrorArtifact> = DashMap::new();
|
||||
|
||||
let mut uploaded_files = Vec::new();
|
||||
minecraft::fetch(semaphore.clone(), &upload_files, &mirror_artifacts)
|
||||
.await?;
|
||||
fabric::fetch_fabric(semaphore.clone(), &upload_files, &mirror_artifacts)
|
||||
.await?;
|
||||
fabric::fetch_quilt(semaphore.clone(), &upload_files, &mirror_artifacts)
|
||||
.await?;
|
||||
forge::fetch_neo(semaphore.clone(), &upload_files, &mirror_artifacts)
|
||||
.await?;
|
||||
forge::fetch_forge(semaphore.clone(), &upload_files, &mirror_artifacts)
|
||||
.await?;
|
||||
|
||||
let versions = match minecraft::retrieve_data(
|
||||
&mut uploaded_files,
|
||||
semaphore.clone(),
|
||||
futures::future::try_join_all(upload_files.iter().map(|x| {
|
||||
upload_file_to_bucket(
|
||||
x.key().clone(),
|
||||
x.value().file.clone(),
|
||||
x.value().content_type.clone(),
|
||||
&semaphore,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(res) => Some(res),
|
||||
Err(err) => {
|
||||
error!("{:?}", err);
|
||||
}))
|
||||
.await?;
|
||||
|
||||
None
|
||||
futures::future::try_join_all(mirror_artifacts.iter().map(|x| {
|
||||
upload_url_to_bucket_mirrors(
|
||||
format!("maven/{}", x.key()),
|
||||
x.value().mirrors.iter().map(|x| x.key().clone()).collect(),
|
||||
&semaphore,
|
||||
)
|
||||
}))
|
||||
.await?;
|
||||
|
||||
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
|
||||
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
|
||||
let cache_clears = upload_files
|
||||
.into_iter()
|
||||
.map(|x| format_url(&x.0))
|
||||
.chain(
|
||||
mirror_artifacts
|
||||
.into_iter()
|
||||
.map(|x| format_url(&format!("maven/{}", x.0))),
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Cloudflare ratelimits cache clears to 500 files per request
|
||||
for chunk in cache_clears.chunks(500) {
|
||||
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
|
||||
.bearer_auth(&token)
|
||||
.json(&serde_json::json!({
|
||||
"files": chunk
|
||||
}))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: "cloudflare clear cache".to_string(),
|
||||
}
|
||||
})?
|
||||
.error_for_status()
|
||||
.map_err(|err| {
|
||||
ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: "cloudflare clear cache".to_string(),
|
||||
}
|
||||
})?;
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(manifest) = versions {
|
||||
match fabric::retrieve_data(
|
||||
&manifest,
|
||||
&mut uploaded_files,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(..) => {}
|
||||
Err(err) => error!("{:?}", err),
|
||||
};
|
||||
match forge::retrieve_data(
|
||||
&manifest,
|
||||
&mut uploaded_files,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(..) => {}
|
||||
Err(err) => error!("{:?}", err),
|
||||
};
|
||||
match quilt::retrieve_data(
|
||||
&manifest,
|
||||
&mut uploaded_files,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(..) => {}
|
||||
Err(err) => error!("{:?}", err),
|
||||
};
|
||||
match neo::retrieve_data(
|
||||
&manifest,
|
||||
&mut uploaded_files,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(..) => {}
|
||||
Err(err) => error!("{:?}", err),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub struct UploadFile {
|
||||
file: bytes::Bytes,
|
||||
content_type: Option<String>,
|
||||
}
|
||||
|
||||
pub struct MirrorArtifact {
|
||||
pub mirrors: DashSet<String>,
|
||||
}
|
||||
|
||||
pub fn insert_mirrored_artifact(
|
||||
artifact: &str,
|
||||
mirror: String,
|
||||
mirror_artifacts: &DashMap<String, MirrorArtifact>,
|
||||
) -> Result<()> {
|
||||
mirror_artifacts
|
||||
.entry(get_path_from_artifact(artifact)?)
|
||||
.or_insert(MirrorArtifact {
|
||||
mirrors: DashSet::new(),
|
||||
})
|
||||
.mirrors
|
||||
.insert(mirror);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_env_vars() -> bool {
|
||||
@@ -124,7 +153,7 @@ fn check_env_vars() -> bool {
|
||||
.and_then(|s| s.parse::<T>().ok())
|
||||
.is_none()
|
||||
{
|
||||
warn!(
|
||||
tracing::warn!(
|
||||
"Variable `{}` missing in dotenvy or not of type `{}`",
|
||||
var,
|
||||
std::any::type_name::<T>()
|
||||
@@ -143,110 +172,14 @@ fn check_env_vars() -> bool {
|
||||
failed |= check_var::<String>("S3_REGION");
|
||||
failed |= check_var::<String>("S3_BUCKET_NAME");
|
||||
|
||||
if dotenvy::var("CLOUDFLARE_INTEGRATION")
|
||||
.ok()
|
||||
.and_then(|x| x.parse::<bool>().ok())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
failed |= check_var::<String>("CLOUDFLARE_TOKEN");
|
||||
failed |= check_var::<String>("CLOUDFLARE_ZONE_ID");
|
||||
}
|
||||
|
||||
failed
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref CLIENT : Bucket = {
|
||||
let region = dotenvy::var("S3_REGION").unwrap();
|
||||
let b = Bucket::new(
|
||||
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
|
||||
if &*region == "r2" {
|
||||
Region::R2 {
|
||||
account_id: dotenvy::var("S3_URL").unwrap(),
|
||||
}
|
||||
} else {
|
||||
Region::Custom {
|
||||
region: region.clone(),
|
||||
endpoint: dotenvy::var("S3_URL").unwrap(),
|
||||
}
|
||||
},
|
||||
Credentials::new(
|
||||
Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()),
|
||||
Some(&*dotenvy::var("S3_SECRET").unwrap()),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
).unwrap(),
|
||||
).unwrap();
|
||||
|
||||
if region == "path-style" {
|
||||
b.with_path_style()
|
||||
} else {
|
||||
b
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn upload_file_to_bucket(
|
||||
path: String,
|
||||
bytes: Vec<u8>,
|
||||
content_type: Option<String>,
|
||||
uploaded_files: &tokio::sync::Mutex<Vec<String>>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<(), Error> {
|
||||
let _permit = semaphore.acquire().await?;
|
||||
info!("{} started uploading", path);
|
||||
let key = path.clone();
|
||||
|
||||
for attempt in 1..=4 {
|
||||
let result = if let Some(ref content_type) = content_type {
|
||||
CLIENT
|
||||
.put_object_with_content_type(key.clone(), &bytes, content_type)
|
||||
.await
|
||||
} else {
|
||||
CLIENT.put_object(key.clone(), &bytes).await
|
||||
}
|
||||
.map_err(|err| Error::S3Error {
|
||||
inner: err,
|
||||
file: path.clone(),
|
||||
});
|
||||
|
||||
match result {
|
||||
Ok(_) => {
|
||||
{
|
||||
info!("{} done uploading", path);
|
||||
let mut uploaded_files = uploaded_files.lock().await;
|
||||
uploaded_files.push(key);
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
Err(_) if attempt <= 3 => continue,
|
||||
Err(_) => {
|
||||
result?;
|
||||
}
|
||||
}
|
||||
}
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
pub fn format_url(path: &str) -> String {
|
||||
format!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path)
|
||||
}
|
||||
|
||||
pub async fn download_file(
|
||||
url: &str,
|
||||
sha1: Option<&str>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<bytes::Bytes, Error> {
|
||||
let _permit = semaphore.acquire().await?;
|
||||
info!("{} started downloading", url);
|
||||
let val = daedalus::download_file(url, sha1).await?;
|
||||
info!("{} finished downloading", url);
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
pub async fn download_file_mirrors(
|
||||
base: &str,
|
||||
mirrors: &[&str],
|
||||
sha1: Option<&str>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<bytes::Bytes, Error> {
|
||||
let _permit = semaphore.acquire().await?;
|
||||
info!("{} started downloading", base);
|
||||
let val = daedalus::download_file_mirrors(base, mirrors, sha1).await?;
|
||||
info!("{} finished downloading", base);
|
||||
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
@@ -1,286 +1,181 @@
|
||||
use crate::download_file;
|
||||
use crate::{format_url, upload_file_to_bucket, Error};
|
||||
use daedalus::get_hash;
|
||||
use daedalus::minecraft::{
|
||||
merge_partial_library, Library, PartialLibrary, VersionManifest,
|
||||
use crate::util::fetch_json;
|
||||
use crate::{
|
||||
util::download_file, util::format_url, util::sha1_async, Error,
|
||||
MirrorArtifact, UploadFile,
|
||||
};
|
||||
use log::info;
|
||||
use daedalus::minecraft::{
|
||||
merge_partial_library, Library, PartialLibrary, VersionInfo,
|
||||
VersionManifest, VERSION_MANIFEST_URL,
|
||||
};
|
||||
use dashmap::DashMap;
|
||||
use serde::Deserialize;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
pub async fn retrieve_data(
|
||||
uploaded_files: &mut Vec<String>,
|
||||
#[tracing::instrument(skip(semaphore, upload_files, _mirror_artifacts))]
|
||||
pub async fn fetch(
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<VersionManifest, Error> {
|
||||
let old_manifest = daedalus::minecraft::fetch_version_manifest(Some(
|
||||
&*format_url(&format!(
|
||||
upload_files: &DashMap<String, UploadFile>,
|
||||
_mirror_artifacts: &DashMap<String, MirrorArtifact>,
|
||||
) -> Result<(), Error> {
|
||||
let modrinth_manifest = fetch_json::<VersionManifest>(
|
||||
&format_url(&format!(
|
||||
"minecraft/v{}/manifest.json",
|
||||
daedalus::minecraft::CURRENT_FORMAT_VERSION
|
||||
)),
|
||||
))
|
||||
&semaphore,
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
let mojang_manifest =
|
||||
fetch_json::<VersionManifest>(VERSION_MANIFEST_URL, &semaphore).await?;
|
||||
|
||||
let mut manifest =
|
||||
daedalus::minecraft::fetch_version_manifest(None).await?;
|
||||
let cloned_manifest =
|
||||
Arc::new(Mutex::new(old_manifest.clone().unwrap_or(manifest.clone())));
|
||||
// TODO: experimental snapshots: https://github.com/PrismLauncher/meta/blob/main/meta/common/mojang-minecraft-experiments.json
|
||||
// TODO: old snapshots: https://github.com/PrismLauncher/meta/blob/main/meta/common/mojang-minecraft-old-snapshots.json
|
||||
|
||||
let patches = fetch_library_patches()?;
|
||||
let cloned_patches = Arc::new(&patches);
|
||||
// We check Modrinth's version manifest and compare if the version 1) exists in Modrinth's database and 2) is unchanged
|
||||
// If they are not, we will fetch them
|
||||
let (fetch_versions, existing_versions) =
|
||||
if let Some(mut modrinth_manifest) = modrinth_manifest {
|
||||
let (mut fetch_versions, mut existing_versions) =
|
||||
(Vec::new(), Vec::new());
|
||||
|
||||
let visited_assets_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
for version in mojang_manifest.versions {
|
||||
if let Some(index) = modrinth_manifest
|
||||
.versions
|
||||
.iter()
|
||||
.position(|x| x.id == version.id)
|
||||
{
|
||||
let modrinth_version =
|
||||
modrinth_manifest.versions.remove(index);
|
||||
|
||||
let now = Instant::now();
|
||||
|
||||
let mut version_futures = Vec::new();
|
||||
|
||||
for version in manifest.versions.iter_mut() {
|
||||
version_futures.push(async {
|
||||
let old_version = if let Some(old_manifest) = &old_manifest {
|
||||
old_manifest.versions.iter().find(|x| x.id == version.id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if old_version.is_some() {
|
||||
return Ok(());
|
||||
if modrinth_version
|
||||
.original_sha1
|
||||
.as_ref()
|
||||
.map(|x| x == &version.sha1)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
existing_versions.push(modrinth_version);
|
||||
} else {
|
||||
fetch_versions.push(version);
|
||||
}
|
||||
} else {
|
||||
fetch_versions.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
let visited_assets_mutex = Arc::clone(&visited_assets_mutex);
|
||||
let cloned_manifest_mutex = Arc::clone(&cloned_manifest);
|
||||
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
|
||||
let semaphore = Arc::clone(&semaphore);
|
||||
let patches = Arc::clone(&cloned_patches);
|
||||
(fetch_versions, existing_versions)
|
||||
} else {
|
||||
(mojang_manifest.versions, Vec::new())
|
||||
};
|
||||
|
||||
let assets_hash =
|
||||
old_version.and_then(|x| x.assets_index_sha1.clone());
|
||||
if !fetch_versions.is_empty() {
|
||||
let version_manifests = futures::future::try_join_all(
|
||||
fetch_versions
|
||||
.iter()
|
||||
.map(|x| download_file(&x.url, Some(&x.sha1), &semaphore)),
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| serde_json::from_slice(&x))
|
||||
.collect::<Result<Vec<VersionInfo>, serde_json::Error>>()?;
|
||||
|
||||
async move {
|
||||
let mut upload_futures = Vec::new();
|
||||
|
||||
let mut version_info =
|
||||
daedalus::minecraft::fetch_version_info(version).await?;
|
||||
|
||||
fn patch_library(
|
||||
patches: &Vec<LibraryPatch>,
|
||||
mut library: Library,
|
||||
) -> Vec<Library> {
|
||||
let mut val = Vec::new();
|
||||
|
||||
let actual_patches = patches
|
||||
.iter()
|
||||
.filter(|x| x.match_.contains(&library.name))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !actual_patches.is_empty() {
|
||||
for patch in actual_patches {
|
||||
if let Some(override_) = &patch.override_ {
|
||||
library = merge_partial_library(
|
||||
override_.clone(),
|
||||
library,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(additional_libraries) =
|
||||
&patch.additional_libraries
|
||||
{
|
||||
for additional_library in additional_libraries {
|
||||
if patch
|
||||
.patch_additional_libraries
|
||||
.unwrap_or(false)
|
||||
{
|
||||
let mut libs = patch_library(
|
||||
patches,
|
||||
additional_library.clone(),
|
||||
);
|
||||
val.append(&mut libs)
|
||||
} else {
|
||||
val.push(additional_library.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val.push(library);
|
||||
} else {
|
||||
val.push(library);
|
||||
// Patch libraries of Minecraft versions for M-series Mac Support, Better Linux Compatibility, etc
|
||||
let library_patches = fetch_library_patches()?;
|
||||
let patched_version_manifests = version_manifests
|
||||
.into_iter()
|
||||
.map(|mut x| {
|
||||
if !library_patches.is_empty() {
|
||||
let mut new_libraries = Vec::new();
|
||||
for library in x.libraries {
|
||||
let mut libs = patch_library(&library_patches, library);
|
||||
new_libraries.append(&mut libs)
|
||||
}
|
||||
|
||||
val
|
||||
x.libraries = new_libraries
|
||||
}
|
||||
|
||||
let mut new_libraries = Vec::new();
|
||||
for library in version_info.libraries.clone() {
|
||||
let mut libs = patch_library(&patches, library);
|
||||
new_libraries.append(&mut libs)
|
||||
}
|
||||
version_info.libraries = new_libraries;
|
||||
x
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let version_info_hash = get_hash(bytes::Bytes::from(
|
||||
serde_json::to_vec(&version_info)?,
|
||||
))
|
||||
.await?;
|
||||
// serialize + compute hashes
|
||||
let serialized_version_manifests = patched_version_manifests
|
||||
.iter()
|
||||
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
|
||||
.collect::<Result<Vec<_>, serde_json::Error>>()?;
|
||||
let hashes_version_manifests = futures::future::try_join_all(
|
||||
serialized_version_manifests
|
||||
.iter()
|
||||
.map(|x| sha1_async(x.clone())),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// We upload the new version manifests and add them to the versions list
|
||||
let mut new_versions = patched_version_manifests
|
||||
.into_iter()
|
||||
.zip(serialized_version_manifests.into_iter())
|
||||
.zip(hashes_version_manifests.into_iter())
|
||||
.map(|((version, bytes), hash)| {
|
||||
let version_path = format!(
|
||||
"minecraft/v{}/versions/{}.json",
|
||||
daedalus::minecraft::CURRENT_FORMAT_VERSION,
|
||||
version.id
|
||||
);
|
||||
let assets_path = format!(
|
||||
"minecraft/v{}/assets/{}.json",
|
||||
daedalus::minecraft::CURRENT_FORMAT_VERSION,
|
||||
version_info.asset_index.id
|
||||
|
||||
let url = format_url(&version_path);
|
||||
upload_files.insert(
|
||||
version_path,
|
||||
UploadFile {
|
||||
file: bytes,
|
||||
content_type: Some("application/json".to_string()),
|
||||
},
|
||||
);
|
||||
let assets_index_url = version_info.asset_index.url.clone();
|
||||
|
||||
{
|
||||
let mut cloned_manifest =
|
||||
cloned_manifest_mutex.lock().await;
|
||||
|
||||
if let Some(position) = cloned_manifest
|
||||
.versions
|
||||
daedalus::minecraft::Version {
|
||||
original_sha1: fetch_versions
|
||||
.iter()
|
||||
.position(|x| version.id == x.id)
|
||||
{
|
||||
cloned_manifest.versions[position].url =
|
||||
format_url(&version_path);
|
||||
cloned_manifest.versions[position].assets_index_sha1 =
|
||||
Some(version_info.asset_index.sha1.clone());
|
||||
cloned_manifest.versions[position].assets_index_url =
|
||||
Some(format_url(&assets_path));
|
||||
cloned_manifest.versions[position].sha1 =
|
||||
version_info_hash;
|
||||
} else {
|
||||
cloned_manifest.versions.insert(
|
||||
0,
|
||||
daedalus::minecraft::Version {
|
||||
id: version_info.id.clone(),
|
||||
type_: version_info.type_.clone(),
|
||||
url: format_url(&version_path),
|
||||
time: version_info.time,
|
||||
release_time: version_info.release_time,
|
||||
sha1: version_info_hash,
|
||||
compliance_level: 1,
|
||||
assets_index_url: Some(
|
||||
version_info.asset_index.sha1.clone(),
|
||||
),
|
||||
assets_index_sha1: Some(
|
||||
version_info.asset_index.sha1.clone(),
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
.find(|x| x.id == version.id)
|
||||
.map(|x| x.sha1.clone()),
|
||||
id: version.id,
|
||||
type_: version.type_,
|
||||
url,
|
||||
time: version.time,
|
||||
release_time: version.release_time,
|
||||
sha1: hash,
|
||||
compliance_level: 1,
|
||||
}
|
||||
})
|
||||
.chain(existing_versions.into_iter())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut download_assets = false;
|
||||
new_versions.sort_by(|a, b| b.release_time.cmp(&a.release_time));
|
||||
|
||||
{
|
||||
let mut visited_assets = visited_assets_mutex.lock().await;
|
||||
|
||||
if !visited_assets.contains(&version_info.asset_index.id) {
|
||||
if let Some(assets_hash) = assets_hash {
|
||||
if version_info.asset_index.sha1 != assets_hash {
|
||||
download_assets = true;
|
||||
}
|
||||
} else {
|
||||
download_assets = true;
|
||||
}
|
||||
}
|
||||
|
||||
if download_assets {
|
||||
visited_assets
|
||||
.push(version_info.asset_index.id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if download_assets {
|
||||
let assets_index = download_file(
|
||||
&assets_index_url,
|
||||
Some(&version_info.asset_index.sha1),
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
{
|
||||
upload_futures.push(upload_file_to_bucket(
|
||||
assets_path,
|
||||
assets_index.to_vec(),
|
||||
Some("application/json".to_string()),
|
||||
uploaded_files_mutex.as_ref(),
|
||||
semaphore.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
upload_futures.push(upload_file_to_bucket(
|
||||
version_path,
|
||||
serde_json::to_vec(&version_info)?,
|
||||
Some("application/json".to_string()),
|
||||
uploaded_files_mutex.as_ref(),
|
||||
semaphore.clone(),
|
||||
));
|
||||
}
|
||||
|
||||
futures::future::try_join_all(upload_futures).await?;
|
||||
|
||||
Ok::<(), Error>(())
|
||||
}
|
||||
.await?;
|
||||
|
||||
Ok::<(), Error>(())
|
||||
})
|
||||
}
|
||||
|
||||
{
|
||||
let mut versions = version_futures.into_iter().peekable();
|
||||
let mut chunk_index = 0;
|
||||
while versions.peek().is_some() {
|
||||
let now = Instant::now();
|
||||
|
||||
let chunk: Vec<_> = versions.by_ref().take(100).collect();
|
||||
futures::future::try_join_all(chunk).await?;
|
||||
|
||||
chunk_index += 1;
|
||||
|
||||
let elapsed = now.elapsed();
|
||||
info!("Chunk {} Elapsed: {:.2?}", chunk_index, elapsed);
|
||||
}
|
||||
}
|
||||
//futures::future::try_join_all(version_futures).await?;
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!(
|
||||
// create and upload the new manifest
|
||||
let version_manifest_path = format!(
|
||||
"minecraft/v{}/manifest.json",
|
||||
daedalus::minecraft::CURRENT_FORMAT_VERSION
|
||||
),
|
||||
serde_json::to_vec(&*cloned_manifest.lock().await)?,
|
||||
Some("application/json".to_string()),
|
||||
uploaded_files_mutex.as_ref(),
|
||||
semaphore,
|
||||
)
|
||||
.await?;
|
||||
);
|
||||
|
||||
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
|
||||
uploaded_files.extend(uploaded_files_mutex.into_inner());
|
||||
let new_manifest = VersionManifest {
|
||||
latest: mojang_manifest.latest,
|
||||
versions: new_versions,
|
||||
};
|
||||
|
||||
upload_files.insert(
|
||||
version_manifest_path,
|
||||
UploadFile {
|
||||
file: bytes::Bytes::from(serde_json::to_vec(&new_manifest)?),
|
||||
content_type: Some("application/json".to_string()),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let elapsed = now.elapsed();
|
||||
info!("Elapsed: {:.2?}", elapsed);
|
||||
|
||||
Ok(Arc::try_unwrap(cloned_manifest)
|
||||
.map_err(|_| Error::ArcError)?
|
||||
.into_inner())
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
/// A version of the fabric loader
|
||||
struct LibraryPatch {
|
||||
pub struct LibraryPatch {
|
||||
#[serde(rename = "_comment")]
|
||||
pub _comment: String,
|
||||
#[serde(rename = "match")]
|
||||
@@ -291,8 +186,45 @@ struct LibraryPatch {
|
||||
pub patch_additional_libraries: Option<bool>,
|
||||
}
|
||||
|
||||
/// Fetches the list of fabric versions
|
||||
fn fetch_library_patches() -> Result<Vec<LibraryPatch>, Error> {
|
||||
let patches = include_bytes!("../library-patches.json");
|
||||
Ok(serde_json::from_slice(patches)?)
|
||||
}
|
||||
|
||||
pub fn patch_library(
|
||||
patches: &Vec<LibraryPatch>,
|
||||
mut library: Library,
|
||||
) -> Vec<Library> {
|
||||
let mut val = Vec::new();
|
||||
|
||||
let actual_patches = patches
|
||||
.iter()
|
||||
.filter(|x| x.match_.contains(&library.name))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !actual_patches.is_empty() {
|
||||
for patch in actual_patches {
|
||||
if let Some(override_) = &patch.override_ {
|
||||
library = merge_partial_library(override_.clone(), library);
|
||||
}
|
||||
|
||||
if let Some(additional_libraries) = &patch.additional_libraries {
|
||||
for additional_library in additional_libraries {
|
||||
if patch.patch_additional_libraries.unwrap_or(false) {
|
||||
let mut libs =
|
||||
patch_library(patches, additional_library.clone());
|
||||
val.append(&mut libs)
|
||||
} else {
|
||||
val.push(additional_library.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val.push(library);
|
||||
} else {
|
||||
val.push(library);
|
||||
}
|
||||
|
||||
val
|
||||
}
|
||||
|
||||
@@ -1,495 +0,0 @@
|
||||
use crate::{download_file, format_url, upload_file_to_bucket, Error};
|
||||
use daedalus::minecraft::{Library, VersionManifest};
|
||||
use daedalus::modded::{
|
||||
LoaderVersion, Manifest, PartialVersionInfo, Processor, SidedDataEntry,
|
||||
};
|
||||
use log::info;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
|
||||
pub async fn retrieve_data(
|
||||
minecraft_versions: &VersionManifest,
|
||||
uploaded_files: &mut Vec<String>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<(), Error> {
|
||||
let maven_metadata = fetch_maven_metadata(semaphore.clone()).await?;
|
||||
let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!(
|
||||
"neo/v{}/manifest.json",
|
||||
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
|
||||
)))
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let old_versions =
|
||||
Arc::new(Mutex::new(if let Some(old_manifest) = old_manifest {
|
||||
old_manifest.game_versions
|
||||
} else {
|
||||
Vec::new()
|
||||
}));
|
||||
|
||||
let versions = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let visited_assets_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let mut version_futures = Vec::new();
|
||||
|
||||
for (minecraft_version, loader_versions) in maven_metadata.clone() {
|
||||
let mut loaders = Vec::new();
|
||||
|
||||
for (full, loader_version, new_forge) in loader_versions {
|
||||
let version = Version::parse(&loader_version)?;
|
||||
|
||||
loaders.push((full, version, new_forge.to_string()))
|
||||
}
|
||||
|
||||
if !loaders.is_empty() {
|
||||
version_futures.push(async {
|
||||
let mut loaders_versions = Vec::new();
|
||||
|
||||
{
|
||||
let loaders_futures = loaders.into_iter().map(|(loader_version_full, _, new_forge)| async {
|
||||
let versions_mutex = Arc::clone(&old_versions);
|
||||
let visited_assets = Arc::clone(&visited_assets_mutex);
|
||||
let uploaded_files_mutex = Arc::clone(&uploaded_files_mutex);
|
||||
let semaphore = Arc::clone(&semaphore);
|
||||
let minecraft_version = minecraft_version.clone();
|
||||
|
||||
async move {
|
||||
{
|
||||
let versions = versions_mutex.lock().await;
|
||||
let version = versions.iter().find(|x|
|
||||
x.id == minecraft_version).and_then(|x| x.loaders.iter().find(|x| x.id == loader_version_full));
|
||||
|
||||
if let Some(version) = version {
|
||||
return Ok::<Option<LoaderVersion>, Error>(Some(version.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
info!("Forge - Installer Start {}", loader_version_full.clone());
|
||||
let bytes = download_file(&format!("https://maven.neoforged.net/net/neoforged/{1}/{0}/{1}-{0}-installer.jar", loader_version_full, if &*new_forge == "true" { "neoforge" } else { "forge" }), None, semaphore.clone()).await?;
|
||||
|
||||
let reader = std::io::Cursor::new(bytes);
|
||||
|
||||
if let Ok(archive) = zip::ZipArchive::new(reader) {
|
||||
let mut archive_clone = archive.clone();
|
||||
let mut profile = tokio::task::spawn_blocking(move || {
|
||||
let mut install_profile = archive_clone.by_name("install_profile.json")?;
|
||||
|
||||
let mut contents = String::new();
|
||||
install_profile.read_to_string(&mut contents)?;
|
||||
|
||||
Ok::<ForgeInstallerProfileV2, Error>(serde_json::from_str::<ForgeInstallerProfileV2>(&contents)?)
|
||||
}).await??;
|
||||
|
||||
let mut archive_clone = archive.clone();
|
||||
let version_info = tokio::task::spawn_blocking(move || {
|
||||
let mut install_profile = archive_clone.by_name("version.json")?;
|
||||
|
||||
let mut contents = String::new();
|
||||
install_profile.read_to_string(&mut contents)?;
|
||||
|
||||
Ok::<PartialVersionInfo, Error>(serde_json::from_str::<PartialVersionInfo>(&contents)?)
|
||||
}).await??;
|
||||
|
||||
|
||||
let mut libs : Vec<Library> = version_info.libraries.into_iter().chain(profile.libraries.into_iter().map(|x| Library {
|
||||
downloads: x.downloads,
|
||||
extract: x.extract,
|
||||
name: x.name,
|
||||
url: x.url,
|
||||
natives: x.natives,
|
||||
rules: x.rules,
|
||||
checksums: x.checksums,
|
||||
include_in_classpath: false
|
||||
})).collect();
|
||||
|
||||
let mut local_libs : HashMap<String, bytes::Bytes> = HashMap::new();
|
||||
|
||||
for lib in &libs {
|
||||
if lib.downloads.as_ref().and_then(|x| x.artifact.as_ref().map(|x| x.url.is_empty())).unwrap_or(false) {
|
||||
let mut archive_clone = archive.clone();
|
||||
let lib_name_clone = lib.name.clone();
|
||||
|
||||
let lib_bytes = tokio::task::spawn_blocking(move || {
|
||||
let mut lib_file = archive_clone.by_name(&format!("maven/{}", daedalus::get_path_from_artifact(&lib_name_clone)?))?;
|
||||
let mut lib_bytes = Vec::new();
|
||||
lib_file.read_to_end(&mut lib_bytes)?;
|
||||
|
||||
Ok::<bytes::Bytes, Error>(bytes::Bytes::from(lib_bytes))
|
||||
}).await??;
|
||||
|
||||
local_libs.insert(lib.name.clone(), lib_bytes);
|
||||
}
|
||||
}
|
||||
|
||||
let path = profile.path.clone();
|
||||
let version = profile.version.clone();
|
||||
|
||||
for entry in profile.data.values_mut() {
|
||||
if entry.client.starts_with('/') || entry.server.starts_with('/') {
|
||||
macro_rules! read_data {
|
||||
($value:expr) => {
|
||||
let mut archive_clone = archive.clone();
|
||||
let value_clone = $value.clone();
|
||||
let lib_bytes = tokio::task::spawn_blocking(move || {
|
||||
let mut lib_file = archive_clone.by_name(&value_clone[1..value_clone.len()])?;
|
||||
let mut lib_bytes = Vec::new();
|
||||
lib_file.read_to_end(&mut lib_bytes)?;
|
||||
|
||||
Ok::<bytes::Bytes, Error>(bytes::Bytes::from(lib_bytes))
|
||||
}).await??;
|
||||
|
||||
let split = $value.split('/').last();
|
||||
|
||||
if let Some(last) = split {
|
||||
let mut file = last.split('.');
|
||||
|
||||
if let Some(file_name) = file.next() {
|
||||
if let Some(ext) = file.next() {
|
||||
let path = format!("{}:{}@{}", path.as_deref().unwrap_or(&*format!("net.minecraftforge:forge:{}", version)), file_name, ext);
|
||||
$value = format!("[{}]", &path);
|
||||
local_libs.insert(path.clone(), bytes::Bytes::from(lib_bytes));
|
||||
|
||||
libs.push(Library {
|
||||
downloads: None,
|
||||
extract: None,
|
||||
name: path,
|
||||
url: Some("".to_string()),
|
||||
natives: None,
|
||||
rules: None,
|
||||
checksums: None,
|
||||
include_in_classpath: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if entry.client.starts_with('/') {
|
||||
read_data!(entry.client);
|
||||
}
|
||||
|
||||
if entry.server.starts_with('/') {
|
||||
read_data!(entry.server);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let now = Instant::now();
|
||||
let libs = futures::future::try_join_all(libs.into_iter().map(|mut lib| async {
|
||||
let artifact_path =
|
||||
daedalus::get_path_from_artifact(&lib.name)?;
|
||||
|
||||
{
|
||||
let mut visited_assets = visited_assets.lock().await;
|
||||
|
||||
if visited_assets.contains(&lib.name) {
|
||||
if let Some(ref mut downloads) = lib.downloads {
|
||||
if let Some(ref mut artifact) = downloads.artifact {
|
||||
artifact.url = format_url(&format!("maven/{}", artifact_path));
|
||||
}
|
||||
} else if lib.url.is_some() {
|
||||
lib.url = Some(format_url("maven/"));
|
||||
}
|
||||
|
||||
return Ok::<Library, Error>(lib);
|
||||
} else {
|
||||
visited_assets.push(lib.name.clone())
|
||||
}
|
||||
}
|
||||
|
||||
let artifact_bytes = if let Some(ref mut downloads) = lib.downloads {
|
||||
if let Some(ref mut artifact) = downloads.artifact {
|
||||
let res = if artifact.url.is_empty() {
|
||||
local_libs.get(&lib.name).cloned()
|
||||
} else {
|
||||
Some(download_file(
|
||||
&artifact.url,
|
||||
Some(&*artifact.sha1),
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?)
|
||||
};
|
||||
|
||||
if res.is_some() {
|
||||
artifact.url = format_url(&format!("maven/{}", artifact_path));
|
||||
}
|
||||
|
||||
res
|
||||
} else { None }
|
||||
} else if let Some(ref mut url) = lib.url {
|
||||
let res = if url.is_empty() {
|
||||
local_libs.get(&lib.name).cloned()
|
||||
} else {
|
||||
Some(download_file(
|
||||
url,
|
||||
None,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?)
|
||||
};
|
||||
|
||||
if res.is_some() {
|
||||
lib.url = Some(format_url("maven/"));
|
||||
}
|
||||
|
||||
res
|
||||
} else { None };
|
||||
|
||||
if let Some(bytes) = artifact_bytes {
|
||||
upload_file_to_bucket(
|
||||
format!("{}/{}", "maven", artifact_path),
|
||||
bytes.to_vec(),
|
||||
Some("application/java-archive".to_string()),
|
||||
uploaded_files_mutex.as_ref(),
|
||||
semaphore.clone(),
|
||||
).await?;
|
||||
}
|
||||
|
||||
Ok::<Library, Error>(lib)
|
||||
})).await?;
|
||||
|
||||
let elapsed = now.elapsed();
|
||||
info!("Elapsed lib DL: {:.2?}", elapsed);
|
||||
|
||||
let new_profile = PartialVersionInfo {
|
||||
id: version_info.id,
|
||||
inherits_from: version_info.inherits_from,
|
||||
release_time: version_info.release_time,
|
||||
time: version_info.time,
|
||||
main_class: version_info.main_class,
|
||||
minecraft_arguments: version_info.minecraft_arguments,
|
||||
arguments: version_info.arguments,
|
||||
libraries: libs,
|
||||
type_: version_info.type_,
|
||||
data: Some(profile.data),
|
||||
processors: Some(profile.processors),
|
||||
};
|
||||
|
||||
let version_path = format!(
|
||||
"neo/v{}/versions/{}.json",
|
||||
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
|
||||
new_profile.id
|
||||
);
|
||||
|
||||
upload_file_to_bucket(
|
||||
version_path.clone(),
|
||||
serde_json::to_vec(&new_profile)?,
|
||||
Some("application/json".to_string()),
|
||||
uploaded_files_mutex.as_ref(),
|
||||
semaphore.clone(),
|
||||
).await?;
|
||||
|
||||
return Ok(Some(LoaderVersion {
|
||||
id: loader_version_full,
|
||||
url: format_url(&version_path),
|
||||
stable: false
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}.await
|
||||
});
|
||||
|
||||
{
|
||||
let len = loaders_futures.len();
|
||||
let mut versions = loaders_futures.into_iter().peekable();
|
||||
let mut chunk_index = 0;
|
||||
while versions.peek().is_some() {
|
||||
let now = Instant::now();
|
||||
|
||||
let chunk: Vec<_> = versions.by_ref().take(1).collect();
|
||||
let res = futures::future::try_join_all(chunk).await?;
|
||||
loaders_versions.extend(res.into_iter().flatten());
|
||||
|
||||
chunk_index += 1;
|
||||
|
||||
let elapsed = now.elapsed();
|
||||
info!("Loader Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
versions.lock().await.push(daedalus::modded::Version {
|
||||
id: minecraft_version,
|
||||
stable: true,
|
||||
loaders: loaders_versions
|
||||
});
|
||||
|
||||
Ok::<(), Error>(())
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let len = version_futures.len();
|
||||
let mut versions = version_futures.into_iter().peekable();
|
||||
let mut chunk_index = 0;
|
||||
while versions.peek().is_some() {
|
||||
let now = Instant::now();
|
||||
|
||||
let chunk: Vec<_> = versions.by_ref().take(1).collect();
|
||||
futures::future::try_join_all(chunk).await?;
|
||||
|
||||
chunk_index += 1;
|
||||
|
||||
let elapsed = now.elapsed();
|
||||
info!("Chunk {}/{len} Elapsed: {:.2?}", chunk_index, elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(versions) = Arc::try_unwrap(versions) {
|
||||
let mut versions = versions.into_inner();
|
||||
|
||||
versions.sort_by(|x, y| {
|
||||
minecraft_versions
|
||||
.versions
|
||||
.iter()
|
||||
.position(|z| x.id == z.id)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
&minecraft_versions
|
||||
.versions
|
||||
.iter()
|
||||
.position(|z| y.id == z.id)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
});
|
||||
|
||||
for version in &mut versions {
|
||||
let loader_versions = maven_metadata.get(&version.id);
|
||||
if let Some(loader_versions) = loader_versions {
|
||||
version.loaders.sort_by(|x, y| {
|
||||
loader_versions
|
||||
.iter()
|
||||
.position(|z| y.id == z.1)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
&loader_versions
|
||||
.iter()
|
||||
.position(|z| x.id == z.1)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
});
|
||||
version.loaders.reverse();
|
||||
}
|
||||
}
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!(
|
||||
"neo/v{}/manifest.json",
|
||||
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
|
||||
),
|
||||
serde_json::to_vec(&Manifest {
|
||||
game_versions: versions,
|
||||
})?,
|
||||
Some("application/json".to_string()),
|
||||
uploaded_files_mutex.as_ref(),
|
||||
semaphore,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
|
||||
uploaded_files.extend(uploaded_files_mutex.into_inner());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const DEFAULT_MAVEN_METADATA_URL_1: &str =
|
||||
"https://maven.neoforged.net/net/neoforged/forge/maven-metadata.xml";
|
||||
const DEFAULT_MAVEN_METADATA_URL_2: &str =
|
||||
"https://maven.neoforged.net/net/neoforged/neoforge/maven-metadata.xml";
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Metadata {
|
||||
versioning: Versioning,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Versioning {
|
||||
versions: Versions,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Versions {
|
||||
version: Vec<String>,
|
||||
}
|
||||
|
||||
pub async fn fetch_maven_metadata(
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<HashMap<String, Vec<(String, String, bool)>>, Error> {
|
||||
async fn fetch_values(
|
||||
url: &str,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<Metadata, Error> {
|
||||
Ok(serde_xml_rs::from_str(
|
||||
&String::from_utf8(
|
||||
download_file(url, None, semaphore).await?.to_vec(),
|
||||
)
|
||||
.unwrap_or_default(),
|
||||
)?)
|
||||
}
|
||||
|
||||
let forge_values =
|
||||
fetch_values(DEFAULT_MAVEN_METADATA_URL_1, semaphore.clone()).await?;
|
||||
let neo_values =
|
||||
fetch_values(DEFAULT_MAVEN_METADATA_URL_2, semaphore).await?;
|
||||
|
||||
let mut map: HashMap<String, Vec<(String, String, bool)>> = HashMap::new();
|
||||
|
||||
for value in forge_values.versioning.versions.version {
|
||||
let original = value.clone();
|
||||
|
||||
let parts: Vec<&str> = value.split('-').collect();
|
||||
if parts.len() == 2 {
|
||||
map.entry(parts[0].to_string()).or_default().push((
|
||||
original,
|
||||
parts[1].to_string(),
|
||||
false,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
for value in neo_values.versioning.versions.version {
|
||||
let original = value.clone();
|
||||
|
||||
let mut parts = value.split('.');
|
||||
|
||||
if let Some(major) = parts.next() {
|
||||
if let Some(minor) = parts.next() {
|
||||
let game_version = format!("1.{}.{}", major, minor);
|
||||
|
||||
map.entry(game_version.clone()).or_default().push((
|
||||
original.clone(),
|
||||
format!("{}-{}", game_version, original),
|
||||
true,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForgeInstallerProfileV2 {
|
||||
pub spec: i32,
|
||||
pub profile: String,
|
||||
pub version: String,
|
||||
pub json: String,
|
||||
pub path: Option<String>,
|
||||
pub minecraft: String,
|
||||
pub data: HashMap<String, SidedDataEntry>,
|
||||
pub libraries: Vec<Library>,
|
||||
pub processors: Vec<Processor>,
|
||||
}
|
||||
@@ -1,370 +0,0 @@
|
||||
use crate::{download_file, format_url, upload_file_to_bucket, Error};
|
||||
use daedalus::minecraft::{Library, VersionManifest};
|
||||
use daedalus::modded::{
|
||||
LoaderVersion, Manifest, PartialVersionInfo, Version, DUMMY_REPLACE_STRING,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{Mutex, RwLock, Semaphore};
|
||||
|
||||
pub async fn retrieve_data(
|
||||
minecraft_versions: &VersionManifest,
|
||||
uploaded_files: &mut Vec<String>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<(), Error> {
|
||||
let list = fetch_quilt_versions(None, semaphore.clone()).await?;
|
||||
let old_manifest = daedalus::modded::fetch_manifest(&format_url(&format!(
|
||||
"quilt/v{}/manifest.json",
|
||||
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
|
||||
)))
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let mut versions = if let Some(old_manifest) = old_manifest {
|
||||
old_manifest.game_versions
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let loaders_mutex = RwLock::new(Vec::new());
|
||||
|
||||
{
|
||||
let mut loaders = loaders_mutex.write().await;
|
||||
|
||||
for (index, loader) in list.loader.iter().enumerate() {
|
||||
if versions.iter().any(|x| {
|
||||
x.id == DUMMY_REPLACE_STRING
|
||||
&& x.loaders.iter().any(|x| x.id == loader.version)
|
||||
}) {
|
||||
if index == 0 {
|
||||
loaders.push((
|
||||
Box::new(false),
|
||||
loader.version.clone(),
|
||||
Box::new(true),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
loaders.push((
|
||||
Box::new(false),
|
||||
loader.version.clone(),
|
||||
Box::new(false),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DUMMY_GAME_VERSION: &str = "1.19.4-rc2";
|
||||
|
||||
let loader_version_mutex = Mutex::new(Vec::new());
|
||||
let uploaded_files_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
|
||||
let loader_versions = futures::future::try_join_all(
|
||||
loaders_mutex.read().await.clone().into_iter().map(
|
||||
|(stable, loader, skip_upload)| async {
|
||||
let version = fetch_quilt_version(
|
||||
DUMMY_GAME_VERSION,
|
||||
&loader,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<(Box<bool>, String, PartialVersionInfo, Box<bool>), Error>(
|
||||
(stable, loader, version, skip_upload),
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let visited_artifacts_mutex = Arc::new(Mutex::new(Vec::new()));
|
||||
futures::future::try_join_all(loader_versions.into_iter()
|
||||
.map(
|
||||
|(stable, loader, version, skip_upload)| async {
|
||||
let libs = futures::future::try_join_all(
|
||||
version.libraries.into_iter().map(|mut lib| async {
|
||||
{
|
||||
let mut visited_assets =
|
||||
visited_artifacts_mutex.lock().await;
|
||||
|
||||
if visited_assets.contains(&lib.name) {
|
||||
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
lib.url = Some(format_url("maven/"));
|
||||
|
||||
return Ok(lib);
|
||||
} else {
|
||||
visited_assets.push(lib.name.clone())
|
||||
}
|
||||
}
|
||||
|
||||
if lib.name.contains(DUMMY_GAME_VERSION) {
|
||||
lib.name = lib.name.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
|
||||
futures::future::try_join_all(list.game.clone().into_iter().map(|game_version| async {
|
||||
let semaphore = semaphore.clone();
|
||||
let uploaded_files_mutex = uploaded_files_mutex.clone();
|
||||
let lib_name = lib.name.clone();
|
||||
let lib_url = lib.url.clone();
|
||||
|
||||
async move {
|
||||
let artifact_path =
|
||||
daedalus::get_path_from_artifact(&lib_name.replace(DUMMY_REPLACE_STRING, &game_version.version))?;
|
||||
|
||||
let artifact = download_file(
|
||||
&format!(
|
||||
"{}{}",
|
||||
lib_url.unwrap_or_else(|| {
|
||||
"https://maven.quiltmc.org/".to_string()
|
||||
}),
|
||||
artifact_path
|
||||
),
|
||||
None,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!("{}/{}", "maven", artifact_path),
|
||||
artifact.to_vec(),
|
||||
Some("application/java-archive".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<(), Error>(())
|
||||
}.await?;
|
||||
|
||||
Ok::<(), Error>(())
|
||||
})).await?;
|
||||
lib.url = Some(format_url("maven/"));
|
||||
|
||||
return Ok(lib);
|
||||
}
|
||||
|
||||
let artifact_path =
|
||||
daedalus::get_path_from_artifact(&lib.name)?;
|
||||
|
||||
let artifact = download_file(
|
||||
&format!(
|
||||
"{}{}",
|
||||
lib.url.unwrap_or_else(|| {
|
||||
"https://maven.quiltmc.org/".to_string()
|
||||
}),
|
||||
artifact_path
|
||||
),
|
||||
None,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
lib.url = Some(format_url("maven/"));
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!("{}/{}", "maven", artifact_path),
|
||||
artifact.to_vec(),
|
||||
Some("application/java-archive".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<Library, Error>(lib)
|
||||
}),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if async move {
|
||||
*skip_upload
|
||||
}.await {
|
||||
return Ok::<(), Error>(())
|
||||
}
|
||||
|
||||
let version_path = format!(
|
||||
"quilt/v{}/versions/{}.json",
|
||||
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
|
||||
&loader
|
||||
);
|
||||
|
||||
upload_file_to_bucket(
|
||||
version_path.clone(),
|
||||
serde_json::to_vec(&PartialVersionInfo {
|
||||
arguments: version.arguments,
|
||||
id: version
|
||||
.id
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
|
||||
main_class: version.main_class,
|
||||
release_time: version.release_time,
|
||||
time: version.time,
|
||||
type_: version.type_,
|
||||
inherits_from: version
|
||||
.inherits_from
|
||||
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING),
|
||||
libraries: libs,
|
||||
minecraft_arguments: version.minecraft_arguments,
|
||||
processors: None,
|
||||
data: None,
|
||||
})?,
|
||||
Some("application/json".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
{
|
||||
let mut loader_version_map = loader_version_mutex.lock().await;
|
||||
async move {
|
||||
loader_version_map.push(LoaderVersion {
|
||||
id: loader.to_string(),
|
||||
url: format_url(&version_path),
|
||||
stable: *stable,
|
||||
});
|
||||
}
|
||||
.await;
|
||||
}
|
||||
|
||||
Ok::<(), Error>(())
|
||||
},
|
||||
))
|
||||
.await?;
|
||||
|
||||
let mut loader_version_mutex = loader_version_mutex.into_inner();
|
||||
if !loader_version_mutex.is_empty() {
|
||||
if let Some(version) =
|
||||
versions.iter_mut().find(|x| x.id == DUMMY_REPLACE_STRING)
|
||||
{
|
||||
version.loaders.append(&mut loader_version_mutex);
|
||||
} else {
|
||||
versions.push(Version {
|
||||
id: DUMMY_REPLACE_STRING.to_string(),
|
||||
stable: true,
|
||||
loaders: loader_version_mutex,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for version in &list.game {
|
||||
if !versions.iter().any(|x| x.id == version.version) {
|
||||
versions.push(Version {
|
||||
id: version.version.clone(),
|
||||
stable: version.stable,
|
||||
loaders: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
versions.sort_by(|x, y| {
|
||||
minecraft_versions
|
||||
.versions
|
||||
.iter()
|
||||
.position(|z| x.id == z.id)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
&minecraft_versions
|
||||
.versions
|
||||
.iter()
|
||||
.position(|z| y.id == z.id)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
});
|
||||
|
||||
for version in &mut versions {
|
||||
version.loaders.sort_by(|x, y| {
|
||||
list.loader
|
||||
.iter()
|
||||
.position(|z| x.id == *z.version)
|
||||
.unwrap_or_default()
|
||||
.cmp(
|
||||
&list
|
||||
.loader
|
||||
.iter()
|
||||
.position(|z| y.id == z.version)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
upload_file_to_bucket(
|
||||
format!(
|
||||
"quilt/v{}/manifest.json",
|
||||
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
|
||||
),
|
||||
serde_json::to_vec(&Manifest {
|
||||
game_versions: versions,
|
||||
})?,
|
||||
Some("application/json".to_string()),
|
||||
&uploaded_files_mutex,
|
||||
semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Ok(uploaded_files_mutex) = Arc::try_unwrap(uploaded_files_mutex) {
|
||||
uploaded_files.extend(uploaded_files_mutex.into_inner());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const QUILT_META_URL: &str = "https://meta.quiltmc.org/v3";
|
||||
|
||||
async fn fetch_quilt_version(
|
||||
version_number: &str,
|
||||
loader_version: &str,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<PartialVersionInfo, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(
|
||||
&format!(
|
||||
"{}/versions/loader/{}/{}/profile/json",
|
||||
QUILT_META_URL, version_number, loader_version
|
||||
),
|
||||
None,
|
||||
semaphore,
|
||||
)
|
||||
.await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// Versions of quilt components
|
||||
struct QuiltVersions {
|
||||
/// Versions of Minecraft that quilt supports
|
||||
pub game: Vec<QuiltGameVersion>,
|
||||
/// Available versions of the quilt loader
|
||||
pub loader: Vec<QuiltLoaderVersion>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A version of Minecraft that quilt supports
|
||||
struct QuiltGameVersion {
|
||||
/// The version number of the game
|
||||
pub version: String,
|
||||
/// Whether the Minecraft version is stable or not
|
||||
pub stable: bool,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
/// A version of the quilt loader
|
||||
struct QuiltLoaderVersion {
|
||||
/// The separator to get the build number
|
||||
pub separator: String,
|
||||
/// The build number
|
||||
pub build: u32,
|
||||
/// The maven artifact
|
||||
pub maven: String,
|
||||
/// The version number of the quilt loader
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
/// Fetches the list of quilt versions
|
||||
async fn fetch_quilt_versions(
|
||||
url: Option<&str>,
|
||||
semaphore: Arc<Semaphore>,
|
||||
) -> Result<QuiltVersions, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(
|
||||
url.unwrap_or(&*format!("{}/versions", QUILT_META_URL)),
|
||||
None,
|
||||
semaphore,
|
||||
)
|
||||
.await?,
|
||||
)?)
|
||||
}
|
||||
369
daedalus_client/src/util.rs
Normal file
369
daedalus_client/src/util.rs
Normal file
@@ -0,0 +1,369 @@
|
||||
use crate::{Error, ErrorKind};
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use futures::StreamExt;
|
||||
use s3::creds::Credentials;
|
||||
use s3::{Bucket, Region};
|
||||
use serde::de::DeserializeOwned;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref BUCKET : Bucket = {
|
||||
let region = dotenvy::var("S3_REGION").unwrap();
|
||||
let b = Bucket::new(
|
||||
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
|
||||
if &*region == "r2" {
|
||||
Region::R2 {
|
||||
account_id: dotenvy::var("S3_URL").unwrap(),
|
||||
}
|
||||
} else {
|
||||
Region::Custom {
|
||||
region: region.clone(),
|
||||
endpoint: dotenvy::var("S3_URL").unwrap(),
|
||||
}
|
||||
},
|
||||
Credentials::new(
|
||||
Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()),
|
||||
Some(&*dotenvy::var("S3_SECRET").unwrap()),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
).unwrap(),
|
||||
).unwrap();
|
||||
|
||||
if region == "path-style" {
|
||||
b.with_path_style()
|
||||
} else {
|
||||
b
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref REQWEST_CLIENT: reqwest::Client = {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
if let Ok(header) = reqwest::header::HeaderValue::from_str(&format!(
|
||||
"modrinth/daedalus/{} (support@modrinth.com)",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
)) {
|
||||
headers.insert(reqwest::header::USER_AGENT, header);
|
||||
}
|
||||
|
||||
reqwest::Client::builder()
|
||||
.tcp_keepalive(Some(std::time::Duration::from_secs(10)))
|
||||
.timeout(std::time::Duration::from_secs(15))
|
||||
.default_headers(headers)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(bytes, semaphore))]
|
||||
pub async fn upload_file_to_bucket(
|
||||
path: String,
|
||||
bytes: Bytes,
|
||||
content_type: Option<String>,
|
||||
semaphore: &Arc<Semaphore>,
|
||||
) -> Result<(), Error> {
|
||||
let _permit = semaphore.acquire().await?;
|
||||
let key = path.clone();
|
||||
|
||||
const RETRIES: i32 = 3;
|
||||
for attempt in 1..=(RETRIES + 1) {
|
||||
tracing::trace!("Attempting file upload, attempt {attempt}");
|
||||
let result = if let Some(ref content_type) = content_type {
|
||||
BUCKET
|
||||
.put_object_with_content_type(key.clone(), &bytes, content_type)
|
||||
.await
|
||||
} else {
|
||||
BUCKET.put_object(key.clone(), &bytes).await
|
||||
}
|
||||
.map_err(|err| ErrorKind::S3 {
|
||||
inner: err,
|
||||
file: path.clone(),
|
||||
});
|
||||
|
||||
match result {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(_) if attempt <= RETRIES => continue,
|
||||
Err(_) => {
|
||||
result?;
|
||||
}
|
||||
}
|
||||
}
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
pub async fn upload_url_to_bucket_mirrors(
|
||||
base: String,
|
||||
mirrors: Vec<String>,
|
||||
semaphore: &Arc<Semaphore>,
|
||||
) -> Result<(), Error> {
|
||||
if mirrors.is_empty() {
|
||||
return Err(ErrorKind::InvalidInput(
|
||||
"No mirrors provided!".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
for (index, mirror) in mirrors.iter().enumerate() {
|
||||
let result = upload_url_to_bucket(
|
||||
&base,
|
||||
&format!("{}{}", mirror, base),
|
||||
semaphore,
|
||||
)
|
||||
.await;
|
||||
|
||||
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(semaphore))]
|
||||
pub async fn upload_url_to_bucket(
|
||||
path: &str,
|
||||
url: &str,
|
||||
semaphore: &Arc<Semaphore>,
|
||||
) -> Result<(), Error> {
|
||||
let _permit = semaphore.acquire().await?;
|
||||
|
||||
const RETRIES: i32 = 3;
|
||||
for attempt in 1..=(RETRIES + 1) {
|
||||
tracing::trace!("Attempting streaming file upload, attempt {attempt}");
|
||||
|
||||
let result: Result<(), Error> = {
|
||||
let response =
|
||||
REQWEST_CLIENT.get(url).send().await.map_err(|err| {
|
||||
ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
}
|
||||
})?;
|
||||
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get(reqwest::header::CONTENT_TYPE)
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.unwrap_or("application/octet-stream")
|
||||
.to_string();
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
|
||||
const MIN_PART_SIZE: usize = 5 * 1024 * 1024;
|
||||
|
||||
if total_size < MIN_PART_SIZE as u64 {
|
||||
let data =
|
||||
response.bytes().await.map_err(|err| ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
})?;
|
||||
BUCKET.put_object(&path, &data).await.map_err(|err| {
|
||||
ErrorKind::S3 {
|
||||
inner: err,
|
||||
file: path.to_string(),
|
||||
}
|
||||
})?;
|
||||
} else {
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
let multipart = BUCKET
|
||||
.initiate_multipart_upload(path, &content_type)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::S3 {
|
||||
inner: err,
|
||||
file: path.to_string(),
|
||||
})?;
|
||||
|
||||
let mut parts = Vec::new();
|
||||
let mut buffer = BytesMut::new();
|
||||
|
||||
async fn upload_part(
|
||||
parts: &mut Vec<s3::serde_types::Part>,
|
||||
buffer: Vec<u8>,
|
||||
path: &str,
|
||||
upload_id: &str,
|
||||
content_type: &str,
|
||||
) -> Result<(), Error> {
|
||||
let part = BUCKET
|
||||
.put_multipart_chunk(
|
||||
buffer,
|
||||
path,
|
||||
(parts.len() + 1) as u32,
|
||||
upload_id,
|
||||
content_type,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::S3 {
|
||||
inner: err,
|
||||
file: path.to_string(),
|
||||
})?;
|
||||
|
||||
parts.push(part);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk.map_err(|err| ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
})?;
|
||||
|
||||
buffer.extend_from_slice(&chunk);
|
||||
|
||||
if buffer.len() >= MIN_PART_SIZE {
|
||||
upload_part(
|
||||
&mut parts,
|
||||
buffer.to_vec(),
|
||||
path,
|
||||
&multipart.upload_id,
|
||||
&content_type,
|
||||
)
|
||||
.await?;
|
||||
buffer.clear();
|
||||
}
|
||||
}
|
||||
|
||||
if !buffer.is_empty() {
|
||||
let part = BUCKET
|
||||
.put_multipart_chunk(
|
||||
buffer.to_vec(),
|
||||
path,
|
||||
(parts.len() + 1) as u32,
|
||||
&multipart.upload_id,
|
||||
&content_type,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::S3 {
|
||||
inner: err,
|
||||
file: path.to_string(),
|
||||
})?;
|
||||
|
||||
parts.push(part);
|
||||
}
|
||||
|
||||
BUCKET
|
||||
.complete_multipart_upload(
|
||||
path,
|
||||
&multipart.upload_id,
|
||||
parts,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::S3 {
|
||||
inner: err,
|
||||
file: path.to_string(),
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(_) if attempt <= RETRIES => continue,
|
||||
Err(_) => {
|
||||
result?;
|
||||
}
|
||||
}
|
||||
}
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(bytes))]
|
||||
pub async fn sha1_async(bytes: Bytes) -> Result<String, Error> {
|
||||
let hash = tokio::task::spawn_blocking(move || {
|
||||
sha1_smol::Sha1::from(bytes).hexdigest()
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(semaphore))]
|
||||
pub async fn download_file(
|
||||
url: &str,
|
||||
sha1: Option<&str>,
|
||||
semaphore: &Arc<Semaphore>,
|
||||
) -> Result<bytes::Bytes, crate::Error> {
|
||||
let _permit = semaphore.acquire().await?;
|
||||
tracing::trace!("Starting file download");
|
||||
|
||||
const RETRIES: u32 = 10;
|
||||
for attempt in 1..=(RETRIES + 1) {
|
||||
let result = REQWEST_CLIENT
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.and_then(|x| x.error_for_status());
|
||||
|
||||
match result {
|
||||
Ok(x) => {
|
||||
let bytes = x.bytes().await;
|
||||
|
||||
if let Ok(bytes) = bytes {
|
||||
if let Some(sha1) = sha1 {
|
||||
if &*sha1_async(bytes.clone()).await? != sha1 {
|
||||
if attempt <= 3 {
|
||||
continue;
|
||||
} else {
|
||||
return Err(
|
||||
crate::ErrorKind::ChecksumFailure {
|
||||
hash: sha1.to_string(),
|
||||
url: url.to_string(),
|
||||
tries: attempt,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(bytes);
|
||||
} else if attempt <= RETRIES {
|
||||
continue;
|
||||
} else if let Err(err) = bytes {
|
||||
return Err(crate::ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
Err(_) if attempt <= RETRIES => continue,
|
||||
Err(err) => {
|
||||
return Err(crate::ErrorKind::Fetch {
|
||||
inner: err,
|
||||
item: url.to_string(),
|
||||
}
|
||||
.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
pub async fn fetch_json<T: DeserializeOwned>(
|
||||
url: &str,
|
||||
semaphore: &Arc<Semaphore>,
|
||||
) -> Result<T, Error> {
|
||||
Ok(serde_json::from_slice(
|
||||
&download_file(url, None, semaphore).await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
pub async fn fetch_xml<T: DeserializeOwned>(
|
||||
url: &str,
|
||||
semaphore: &Arc<Semaphore>,
|
||||
) -> Result<T, Error> {
|
||||
Ok(serde_xml_rs::from_reader(
|
||||
&*download_file(url, None, semaphore).await?,
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn format_url(path: &str) -> String {
|
||||
format!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path)
|
||||
}
|
||||
Reference in New Issue
Block a user