Start monorepo migration

This commit is contained in:
Jai A
2024-10-19 14:40:58 -07:00
parent 679ffbcce7
commit f212fcf892
31 changed files with 69 additions and 220 deletions

View File

@@ -0,0 +1,63 @@
use tracing_error::InstrumentError;
#[derive(thiserror::Error, Debug)]
pub enum ErrorKind {
#[error("Daedalus Error: {0}")]
Daedalus(#[from] daedalus::Error),
#[error("Invalid input: {0}")]
InvalidInput(String),
#[error("Error while managing asynchronous tasks")]
TaskError(#[from] tokio::task::JoinError),
#[error("Error while deserializing JSON: {0}")]
SerdeJSON(#[from] serde_json::Error),
#[error("Error while deserializing XML: {0}")]
SerdeXML(#[from] serde_xml_rs::Error),
#[error("Failed to validate file checksum at url {url} with hash {hash} after {tries} tries")]
ChecksumFailure {
hash: String,
url: String,
tries: u32,
},
#[error("Unable to fetch {item}")]
Fetch { inner: reqwest::Error, item: String },
#[error("Error while uploading file to S3: {file}")]
S3 {
inner: s3::error::S3Error,
file: String,
},
#[error("Error acquiring semaphore: {0}")]
Acquire(#[from] tokio::sync::AcquireError),
#[error("Tracing error: {0}")]
Tracing(#[from] tracing::subscriber::SetGlobalDefaultError),
#[error("Zip error: {0}")]
Zip(#[from] async_zip::error::ZipError),
}
#[derive(Debug)]
pub struct Error {
pub source: tracing_error::TracedError<ErrorKind>,
}
impl std::fmt::Display for Error {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "{}", self.source)
}
}
impl<E: Into<ErrorKind>> From<E> for Error {
fn from(source: E) -> Self {
let error = Into::<ErrorKind>::into(source);
Self {
source: error.in_current_span(),
}
}
}
impl ErrorKind {
pub fn as_error(self) -> Error {
self.into()
}
}
pub type Result<T> = core::result::Result<T, Error>;

View File

@@ -0,0 +1,301 @@
use crate::util::{download_file, fetch_json, format_url};
use crate::{insert_mirrored_artifact, Error, MirrorArtifact, UploadFile};
use daedalus::modded::{Manifest, PartialVersionInfo, DUMMY_REPLACE_STRING};
use dashmap::DashMap;
use serde::Deserialize;
use std::sync::Arc;
use tokio::sync::Semaphore;
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
pub async fn fetch_fabric(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
fetch(
daedalus::modded::CURRENT_FABRIC_FORMAT_VERSION,
"fabric",
"https://meta.fabricmc.net/v2",
"https://maven.fabricmc.net/",
&[],
semaphore,
upload_files,
mirror_artifacts,
)
.await
}
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
pub async fn fetch_quilt(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
fetch(
daedalus::modded::CURRENT_QUILT_FORMAT_VERSION,
"quilt",
"https://meta.quiltmc.org/v3",
"https://maven.quiltmc.org/repository/release/",
&[
// This version is broken as it contains invalid library coordinates
"0.17.5-beta.4",
],
semaphore,
upload_files,
mirror_artifacts,
)
.await
}
#[allow(clippy::too_many_arguments)]
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
async fn fetch(
format_version: usize,
mod_loader: &str,
meta_url: &str,
maven_url: &str,
skip_versions: &[&str],
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let modrinth_manifest = fetch_json::<Manifest>(
&format_url(&format!("{mod_loader}/v{format_version}/manifest.json",)),
&semaphore,
)
.await
.ok();
let fabric_manifest = fetch_json::<FabricVersions>(
&format!("{meta_url}/versions"),
&semaphore,
)
.await?;
// We check Modrinth's fabric version manifest and compare if the fabric version exists in Modrinth's database
// We also check intermediary versions that are newly added to query
let (fetch_fabric_versions, fetch_intermediary_versions) =
if let Some(modrinth_manifest) = modrinth_manifest {
let (mut fetch_versions, mut fetch_intermediary_versions) =
(Vec::new(), Vec::new());
for version in &fabric_manifest.loader {
if !modrinth_manifest
.game_versions
.iter()
.any(|x| x.loaders.iter().any(|x| x.id == version.version))
&& !skip_versions.contains(&&*version.version)
{
fetch_versions.push(version);
}
}
for version in &fabric_manifest.intermediary {
if !modrinth_manifest
.game_versions
.iter()
.any(|x| x.id == version.version)
&& fabric_manifest
.game
.iter()
.any(|x| x.version == version.version)
{
fetch_intermediary_versions.push(version);
}
}
(fetch_versions, fetch_intermediary_versions)
} else {
(
fabric_manifest
.loader
.iter()
.filter(|x| !skip_versions.contains(&&*x.version))
.collect(),
fabric_manifest.intermediary.iter().collect(),
)
};
const DUMMY_GAME_VERSION: &str = "1.21";
if !fetch_intermediary_versions.is_empty() {
for x in &fetch_intermediary_versions {
insert_mirrored_artifact(
&x.maven,
None,
vec![maven_url.to_string()],
false,
mirror_artifacts,
)?;
}
}
if !fetch_fabric_versions.is_empty() {
let fabric_version_manifest_urls = fetch_fabric_versions
.iter()
.map(|x| {
format!(
"{}/versions/loader/{}/{}/profile/json",
meta_url, DUMMY_GAME_VERSION, x.version
)
})
.collect::<Vec<_>>();
let fabric_version_manifests = futures::future::try_join_all(
fabric_version_manifest_urls
.iter()
.map(|x| download_file(x, None, &semaphore)),
)
.await?
.into_iter()
.map(|x| serde_json::from_slice(&x))
.collect::<Result<Vec<PartialVersionInfo>, serde_json::Error>>()?;
let patched_version_manifests = fabric_version_manifests
.into_iter()
.map(|mut version_info| {
for lib in &mut version_info.libraries {
let new_name = lib
.name
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
// Hard-code: This library is not present on fabric's maven, so we fetch it from MC libraries
if &*lib.name == "net.minecraft:launchwrapper:1.12" {
lib.url = Some(
"https://libraries.minecraft.net/".to_string(),
);
}
// If a library is not intermediary, we add it to mirror artifacts to be mirrored
if lib.name == new_name {
insert_mirrored_artifact(
&new_name,
None,
vec![lib
.url
.clone()
.unwrap_or_else(|| maven_url.to_string())],
false,
mirror_artifacts,
)?;
} else {
lib.name = new_name;
}
lib.url = Some(format_url("maven/"));
}
version_info.id = version_info
.id
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
version_info.inherits_from = version_info
.inherits_from
.replace(DUMMY_GAME_VERSION, DUMMY_REPLACE_STRING);
Ok(version_info)
})
.collect::<Result<Vec<_>, Error>>()?;
let serialized_version_manifests = patched_version_manifests
.iter()
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
.collect::<Result<Vec<_>, serde_json::Error>>()?;
serialized_version_manifests
.into_iter()
.enumerate()
.for_each(|(index, bytes)| {
let loader = fetch_fabric_versions[index];
let version_path = format!(
"{mod_loader}/v{format_version}/versions/{}.json",
loader.version
);
upload_files.insert(
version_path,
UploadFile {
file: bytes,
content_type: Some("application/json".to_string()),
},
);
});
}
if !fetch_fabric_versions.is_empty()
|| !fetch_intermediary_versions.is_empty()
{
let fabric_manifest_path =
format!("{mod_loader}/v{format_version}/manifest.json",);
let loader_versions = daedalus::modded::Version {
id: DUMMY_REPLACE_STRING.to_string(),
stable: true,
loaders: fabric_manifest
.loader
.into_iter()
.map(|x| {
let version_path = format!(
"{mod_loader}/v{format_version}/versions/{}.json",
x.version,
);
daedalus::modded::LoaderVersion {
id: x.version,
url: format_url(&version_path),
stable: x.stable,
}
})
.collect(),
};
let manifest = daedalus::modded::Manifest {
game_versions: std::iter::once(loader_versions)
.chain(fabric_manifest.game.into_iter().map(|x| {
daedalus::modded::Version {
id: x.version,
stable: x.stable,
loaders: vec![],
}
}))
.collect(),
};
upload_files.insert(
fabric_manifest_path,
UploadFile {
file: bytes::Bytes::from(serde_json::to_vec(&manifest)?),
content_type: Some("application/json".to_string()),
},
);
}
Ok(())
}
#[derive(Deserialize, Debug, Clone)]
struct FabricVersions {
pub loader: Vec<FabricLoaderVersion>,
pub game: Vec<FabricGameVersion>,
#[serde(alias = "hashed")]
pub intermediary: Vec<FabricIntermediaryVersion>,
}
#[derive(Deserialize, Debug, Clone)]
struct FabricLoaderVersion {
// pub separator: String,
// pub build: u32,
// pub maven: String,
pub version: String,
#[serde(default)]
pub stable: bool,
}
#[derive(Deserialize, Debug, Clone)]
struct FabricIntermediaryVersion {
pub maven: String,
pub version: String,
}
#[derive(Deserialize, Debug, Clone)]
struct FabricGameVersion {
pub version: String,
pub stable: bool,
}

View File

@@ -0,0 +1,793 @@
use crate::util::{
download_file, fetch_json, fetch_xml, format_url,
};
use crate::{insert_mirrored_artifact, Error, MirrorArtifact, UploadFile};
use chrono::{DateTime, Utc};
use daedalus::get_path_from_artifact;
use daedalus::modded::PartialVersionInfo;
use dashmap::DashMap;
use futures::io::Cursor;
use indexmap::IndexMap;
use itertools::Itertools;
use serde::de::DeserializeOwned;
use serde::Deserialize;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::Semaphore;
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
pub async fn fetch_forge(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let forge_manifest = fetch_json::<IndexMap<String, Vec<String>>>(
"https://files.minecraftforge.net/net/minecraftforge/forge/maven-metadata.json",
&semaphore,
)
.await?;
let mut format_version = 0;
let forge_versions = forge_manifest.into_iter().flat_map(|(game_version, versions)| versions.into_iter().map(|loader_version| {
// Forge versions can be in these specific formats:
// 1.10.2-12.18.1.2016-failtests
// 1.9-12.16.0.1886
// 1.9-12.16.0.1880-1.9
// 1.14.4-28.1.30
// This parses them to get the actual Forge version. Ex: 1.15.2-31.1.87 -> 31.1.87
let version_split = loader_version.split('-').nth(1).unwrap_or(&loader_version).to_string();
// Forge has 3 installer formats:
// - Format 0 (Unsupported ATM): Forge Legacy (pre-1.5.2). Uses Binary Patch method to install
// To install: Download patch, download minecraft client JAR. Combine patch and client JAR and delete META-INF/.
// (pre-1.3-2) Client URL: https://maven.minecraftforge.net/net/minecraftforge/forge/{version}/forge-{version}-client.zip
// (pre-1.3-2) Server URL: https://maven.minecraftforge.net/net/minecraftforge/forge/{version}/forge-{version}-server.zip
// (1.3-2-onwards) Universal URL: https://maven.minecraftforge.net/net/minecraftforge/forge/{version}/forge-{version}-universal.zip
// - Format 1: Forge Installer Legacy (1.5.2-1.12.2ish)
// To install: Extract install_profile.json from archive. "versionInfo" is the profile's version info. Convert it to the modern format
// Extract forge library from archive. Path is at "install"."path".
// - Format 2: Forge Installer Modern
// To install: Extract install_profile.json from archive. Extract version.json from archive. Combine the two and extract all libraries
// which are embedded into the installer JAR.
// Then upload. The launcher will need to run processors!
if format_version != 1 && &*version_split == "7.8.0.684" {
format_version = 1;
} else if format_version != 2 && &*version_split == "14.23.5.2851" {
format_version = 2;
}
ForgeVersion {
format_version,
installer_url: format!("https://maven.minecraftforge.net/net/minecraftforge/forge/{0}/forge-{0}-installer.jar", loader_version),
raw: loader_version,
loader_version: version_split,
game_version: game_version.clone(),
}
})
.collect::<Vec<_>>())
// TODO: support format version 0 (see above)
.filter(|x| x.format_version != 0)
.filter(|x| {
// These following Forge versions are broken and cannot be installed
const BLACKLIST : &[&str] = &[
// Not supported due to `data` field being `[]` even though the type is a map
"1.12.2-14.23.5.2851",
// Malformed Archives
"1.6.1-8.9.0.749",
"1.6.1-8.9.0.751",
"1.6.4-9.11.1.960",
"1.6.4-9.11.1.961",
"1.6.4-9.11.1.963",
"1.6.4-9.11.1.964",
];
!BLACKLIST.contains(&&*x.raw)
})
.collect::<Vec<_>>();
fetch(
daedalus::modded::CURRENT_FORGE_FORMAT_VERSION,
"forge",
"https://maven.minecraftforge.net/",
forge_versions,
semaphore,
upload_files,
mirror_artifacts,
)
.await
}
#[tracing::instrument(skip(semaphore, upload_files, mirror_artifacts))]
pub async fn fetch_neo(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
#[derive(Debug, Deserialize)]
struct Metadata {
versioning: Versioning,
}
#[derive(Debug, Deserialize)]
struct Versioning {
versions: Versions,
}
#[derive(Debug, Deserialize)]
struct Versions {
version: Vec<String>,
}
let forge_versions = fetch_xml::<Metadata>(
"https://maven.neoforged.net/net/neoforged/forge/maven-metadata.xml",
&semaphore,
)
.await?;
let neo_versions = fetch_xml::<Metadata>(
"https://maven.neoforged.net/net/neoforged/neoforge/maven-metadata.xml",
&semaphore,
)
.await?;
let parsed_versions = forge_versions.versioning.versions.version.into_iter().map(|loader_version| {
// NeoForge Forge versions can be in these specific formats:
// 1.20.1-47.1.74
// 47.1.82
// This parses them to get the actual Forge version. Ex: 1.20.1-47.1.74 -> 47.1.74
let version_split = loader_version.split('-').nth(1).unwrap_or(&loader_version).to_string();
Ok(ForgeVersion {
format_version: 2,
installer_url: format!("https://maven.neoforged.net/net/neoforged/forge/{0}/forge-{0}-installer.jar", loader_version),
raw: loader_version,
loader_version: version_split,
game_version: "1.20.1".to_string(), // All NeoForge Forge versions are for 1.20.1
})
}).chain(neo_versions.versioning.versions.version.into_iter().map(|loader_version| {
let mut parts = loader_version.split('.');
// NeoForge Forge versions are in this format: 20.2.29-beta, 20.6.119
// Where the first number is the major MC version, the second is the minor MC version, and the third is the NeoForge version
let major = parts.next().ok_or_else(
|| crate::ErrorKind::InvalidInput(format!("Unable to find major game version for NeoForge {loader_version}"))
)?;
let minor = parts.next().ok_or_else(
|| crate::ErrorKind::InvalidInput(format!("Unable to find minor game version for NeoForge {loader_version}"))
)?;
let game_version = if minor == "0" {
format!("1.{major}")
} else {
format!("1.{major}.{minor}")
};
Ok(ForgeVersion {
format_version: 2,
installer_url: format!("https://maven.neoforged.net/net/neoforged/neoforge/{0}/neoforge-{0}-installer.jar", loader_version),
loader_version: loader_version.clone(),
raw: loader_version,
game_version,
})
}))
.collect::<Result<Vec<_>, Error>>()?
.into_iter()
.filter(|x| {
// These following Forge versions are broken and cannot be installed
const BLACKLIST : &[&str] = &[
// Unreachable / 404
"1.20.1-47.1.7",
"47.1.82",
];
!BLACKLIST.contains(&&*x.raw)
}).collect();
fetch(
daedalus::modded::CURRENT_NEOFORGE_FORMAT_VERSION,
"neo",
"https://maven.neoforged.net/",
parsed_versions,
semaphore,
upload_files,
mirror_artifacts,
)
.await
}
#[tracing::instrument(skip(
forge_versions,
semaphore,
upload_files,
mirror_artifacts
))]
async fn fetch(
format_version: usize,
mod_loader: &str,
maven_url: &str,
forge_versions: Vec<ForgeVersion>,
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let modrinth_manifest = fetch_json::<daedalus::modded::Manifest>(
&format_url(&format!("{mod_loader}/v{format_version}/manifest.json",)),
&semaphore,
)
.await
.ok();
let fetch_versions = if let Some(modrinth_manifest) = modrinth_manifest {
let mut fetch_versions = Vec::new();
for version in &forge_versions {
if !modrinth_manifest.game_versions.iter().any(|x| {
x.id == version.game_version
&& x.loaders.iter().any(|x| x.id == version.loader_version)
}) {
fetch_versions.push(version);
}
}
fetch_versions
} else {
forge_versions.iter().collect()
};
if !fetch_versions.is_empty() {
let forge_installers = futures::future::try_join_all(
fetch_versions
.iter()
.map(|x| download_file(&x.installer_url, None, &semaphore)),
)
.await?;
#[tracing::instrument(skip(raw, upload_files, mirror_artifacts))]
async fn read_forge_installer(
raw: bytes::Bytes,
loader: &ForgeVersion,
maven_url: &str,
mod_loader: &str,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<PartialVersionInfo, Error> {
tracing::trace!(
"Reading forge installer for {}",
loader.loader_version
);
type ZipFileReader = async_zip::base::read::seek::ZipFileReader<
Cursor<bytes::Bytes>,
>;
let cursor = Cursor::new(raw);
let mut zip = ZipFileReader::new(cursor).await?;
#[tracing::instrument(skip(zip))]
async fn read_file(
zip: &mut ZipFileReader,
file_name: &str,
) -> Result<Option<Vec<u8>>, Error> {
let zip_index_option =
zip.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == file_name
});
if let Some(zip_index) = zip_index_option {
let mut buffer = Vec::new();
let mut reader = zip.reader_with_entry(zip_index).await?;
reader.read_to_end_checked(&mut buffer).await?;
Ok(Some(buffer))
} else {
Ok(None)
}
}
#[tracing::instrument(skip(zip))]
async fn read_json<T: DeserializeOwned>(
zip: &mut ZipFileReader,
file_name: &str,
) -> Result<Option<T>, Error> {
if let Some(file) = read_file(zip, file_name).await? {
Ok(Some(serde_json::from_slice(&file)?))
} else {
Ok(None)
}
}
if loader.format_version == 1 {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ForgeInstallerProfileInstallDataV1 {
// pub mirror_list: String,
// pub target: String,
/// Path to the Forge universal library
pub file_path: String,
// pub logo: String,
// pub welcome: String,
// pub version: String,
/// Maven coordinates of the Forge universal library
pub path: String,
// pub profile_name: String,
pub minecraft: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ForgeInstallerProfileManifestV1 {
pub id: String,
pub libraries: Vec<daedalus::minecraft::Library>,
pub main_class: Option<String>,
pub minecraft_arguments: Option<String>,
pub release_time: DateTime<Utc>,
pub time: DateTime<Utc>,
pub type_: daedalus::minecraft::VersionType,
// pub assets: Option<String>,
// pub inherits_from: Option<String>,
// pub jar: Option<String>,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ForgeInstallerProfileV1 {
pub install: ForgeInstallerProfileInstallDataV1,
pub version_info: ForgeInstallerProfileManifestV1,
}
let install_profile = read_json::<ForgeInstallerProfileV1>(
&mut zip,
"install_profile.json",
)
.await?
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"No install_profile.json present for loader {}",
loader.installer_url
))
})?;
let forge_library =
read_file(&mut zip, &install_profile.install.file_path)
.await?
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"No forge library present for loader {}",
loader.installer_url
))
})?;
upload_files.insert(
format!(
"maven/{}",
get_path_from_artifact(&install_profile.install.path)?
),
UploadFile {
file: bytes::Bytes::from(forge_library),
content_type: None,
},
);
Ok(PartialVersionInfo {
id: install_profile.version_info.id,
inherits_from: install_profile.install.minecraft,
release_time: install_profile.version_info.release_time,
time: install_profile.version_info.time,
main_class: install_profile.version_info.main_class,
minecraft_arguments: install_profile
.version_info
.minecraft_arguments
.clone(),
arguments: install_profile
.version_info
.minecraft_arguments
.map(|x| {
[(
daedalus::minecraft::ArgumentType::Game,
x.split(' ')
.map(|x| {
daedalus::minecraft::Argument::Normal(
x.to_string(),
)
})
.collect(),
)]
.iter()
.cloned()
.collect()
}),
libraries: install_profile
.version_info
.libraries
.into_iter()
.map(|mut lib| {
// For all libraries besides the forge lib extracted, we mirror them from maven servers
// unless the URL is empty/null or available on Minecraft's servers
if let Some(ref url) = lib.url {
if lib.name == install_profile.install.path {
lib.url = Some(format_url("maven/"));
} else if !url.is_empty()
&& !url.contains(
"https://libraries.minecraft.net/",
)
{
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://maven.creeperhost.net/"
.to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
lib.url = Some(format_url("maven/"));
}
}
Ok(lib)
})
.collect::<Result<Vec<_>, Error>>()?,
type_: install_profile.version_info.type_,
data: None,
processors: None,
})
} else if loader.format_version == 2 {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ForgeInstallerProfileV2 {
// pub spec: i32,
// pub profile: String,
// pub version: String,
// pub json: String,
// pub path: Option<String>,
// pub minecraft: String,
pub data: HashMap<String, daedalus::modded::SidedDataEntry>,
pub libraries: Vec<daedalus::minecraft::Library>,
pub processors: Vec<daedalus::modded::Processor>,
}
let install_profile = read_json::<ForgeInstallerProfileV2>(
&mut zip,
"install_profile.json",
)
.await?
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"No install_profile.json present for loader {}",
loader.installer_url
))
})?;
let mut version_info =
read_json::<PartialVersionInfo>(&mut zip, "version.json")
.await?
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"No version.json present for loader {}",
loader.installer_url
))
})?;
version_info.processors = Some(install_profile.processors);
version_info.libraries.extend(
install_profile.libraries.into_iter().map(|mut x| {
x.include_in_classpath = false;
x
}),
);
async fn mirror_forge_library(
mut zip: ZipFileReader,
mut lib: daedalus::minecraft::Library,
maven_url: &str,
upload_files: &DashMap<String, UploadFile>,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<daedalus::minecraft::Library, Error>
{
let artifact_path = get_path_from_artifact(&lib.name)?;
if let Some(ref mut artifact) =
lib.downloads.as_mut().and_then(|x| x.artifact.as_mut())
{
if !artifact.url.is_empty() {
insert_mirrored_artifact(
&lib.name,
Some(artifact.sha1.clone()),
vec![artifact.url.clone()],
true,
mirror_artifacts,
)?;
artifact.url =
format_url(&format!("maven/{}", artifact_path));
return Ok(lib);
}
} else if let Some(url) = &lib.url {
if !url.is_empty() {
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/"
.to_string(),
"https://maven.creeperhost.net/"
.to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
lib.url = Some(format_url("maven/"));
return Ok(lib);
}
}
// Other libraries are generally available in the "maven" directory of the installer. If they are
// not present here, they will be generated by Forge processors.
let extract_path = format!("maven/{artifact_path}");
if let Some(file) =
read_file(&mut zip, &extract_path).await?
{
upload_files.insert(
extract_path,
UploadFile {
file: bytes::Bytes::from(file),
content_type: None,
},
);
lib.url = Some(format_url("maven/"));
} else {
lib.downloadable = false;
}
Ok(lib)
}
version_info.libraries = futures::future::try_join_all(
version_info.libraries.into_iter().map(|lib| {
mirror_forge_library(
zip.clone(),
lib,
maven_url,
upload_files,
mirror_artifacts,
)
}),
)
.await?;
// In Minecraft Forge modern installers, processors are run during the install process. Some processors
// are extracted from the installer JAR. This function finds these files, extracts them, and uploads them
// and registers them as libraries instead.
// Ex:
// "BINPATCH": {
// "client": "/data/client.lzma",
// "server": "/data/server.lzma"
// },
// Becomes:
// "BINPATCH": {
// "client": "[net.minecraftforge:forge:1.20.3-49.0.1:shim:client@lzma]",
// "server": "[net.minecraftforge:forge:1.20.3-49.0.1:shim:server@lzma]"
// },
// And the resulting library is added to the profile's libraries
let mut new_data = HashMap::new();
for (key, entry) in install_profile.data {
async fn extract_data(
zip: &mut ZipFileReader,
key: &str,
value: &str,
upload_files: &DashMap<String, UploadFile>,
libs: &mut Vec<daedalus::minecraft::Library>,
mod_loader: &str,
version: &ForgeVersion,
) -> Result<String, Error> {
let extract_file =
read_file(zip, &value[1..value.len()])
.await?
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"Unable reading data key {key} at path {value}",
))
})?;
let file_name = value.split('/').last()
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"Unable reading filename for data key {key} at path {value}",
))
})?;
let mut file = file_name.split('.');
let file_name = file.next()
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"Unable reading filename only for data key {key} at path {value}",
))
})?;
let ext = file.next()
.ok_or_else(|| {
crate::ErrorKind::InvalidInput(format!(
"Unable reading extension only for data key {key} at path {value}",
))
})?;
let path = format!(
"com.modrinth.daedalus:{}-installer-extracts:{}:{}@{}",
mod_loader,
version.raw,
file_name,
ext
);
upload_files.insert(
format!("maven/{}", get_path_from_artifact(&path)?),
UploadFile {
file: bytes::Bytes::from(extract_file),
content_type: None,
},
);
libs.push(daedalus::minecraft::Library {
downloads: None,
extract: None,
name: path.clone(),
url: Some(format_url("maven/")),
natives: None,
rules: None,
checksums: None,
include_in_classpath: false,
downloadable: true,
});
Ok(format!("[{path}]"))
}
let client = if entry.client.starts_with('/') {
extract_data(
&mut zip,
&key,
&entry.client,
upload_files,
&mut version_info.libraries,
mod_loader,
loader,
)
.await?
} else {
entry.client.clone()
};
let server = if entry.server.starts_with('/') {
extract_data(
&mut zip,
&key,
&entry.server,
upload_files,
&mut version_info.libraries,
mod_loader,
loader,
)
.await?
} else {
entry.server.clone()
};
new_data.insert(
key.clone(),
daedalus::modded::SidedDataEntry { client, server },
);
}
version_info.data = Some(new_data);
Ok(version_info)
} else {
Err(crate::ErrorKind::InvalidInput(format!(
"Unknown format version {} for loader {}",
loader.format_version, loader.installer_url
))
.into())
}
}
let forge_version_infos = futures::future::try_join_all(
forge_installers
.into_iter()
.enumerate()
.map(|(index, raw)| {
let loader = fetch_versions[index];
read_forge_installer(
raw,
loader,
maven_url,
mod_loader,
upload_files,
mirror_artifacts,
)
}),
)
.await?;
let serialized_version_manifests = forge_version_infos
.iter()
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
.collect::<Result<Vec<_>, serde_json::Error>>()?;
serialized_version_manifests
.into_iter()
.enumerate()
.for_each(|(index, bytes)| {
let loader = fetch_versions[index];
let version_path = format!(
"{mod_loader}/v{format_version}/versions/{}.json",
loader.loader_version
);
upload_files.insert(
version_path,
UploadFile {
file: bytes,
content_type: Some("application/json".to_string()),
},
);
});
let forge_manifest_path =
format!("{mod_loader}/v{format_version}/manifest.json",);
let manifest = daedalus::modded::Manifest {
game_versions: forge_versions
.into_iter()
.rev()
.chunk_by(|x| x.game_version.clone())
.into_iter()
.map(|(game_version, loaders)| daedalus::modded::Version {
id: game_version,
stable: true,
loaders: loaders
.map(|x| daedalus::modded::LoaderVersion {
url: format_url(&format!(
"{mod_loader}/v{format_version}/versions/{}.json",
x.loader_version
)),
id: x.loader_version,
stable: false,
})
.collect(),
})
.collect(),
};
upload_files.insert(
forge_manifest_path,
UploadFile {
file: bytes::Bytes::from(serde_json::to_vec(&manifest)?),
content_type: Some("application/json".to_string()),
},
);
}
Ok(())
}
#[derive(Debug)]
struct ForgeVersion {
pub format_version: usize,
pub raw: String,
pub loader_version: String,
pub game_version: String,
pub installer_url: String,
}

View File

@@ -0,0 +1,218 @@
use crate::util::{
format_url, upload_file_to_bucket, upload_url_to_bucket_mirrors,
REQWEST_CLIENT,
};
use daedalus::get_path_from_artifact;
use dashmap::{DashMap, DashSet};
use std::sync::Arc;
use tokio::sync::Semaphore;
use tracing_error::ErrorLayer;
use tracing_subscriber::{fmt, prelude::*, EnvFilter};
mod error;
mod fabric;
mod forge;
mod minecraft;
pub mod util;
pub use error::{Error, ErrorKind, Result};
#[tokio::main]
async fn main() -> Result<()> {
dotenvy::dotenv().ok();
let subscriber = tracing_subscriber::registry()
.with(fmt::layer())
.with(EnvFilter::from_default_env())
.with(ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)?;
tracing::info!("Initialized tracing. Starting Daedalus!");
if check_env_vars() {
tracing::error!("Some environment variables are missing!");
return Ok(());
}
let semaphore = Arc::new(Semaphore::new(
dotenvy::var("CONCURRENCY_LIMIT")
.ok()
.and_then(|x| x.parse().ok())
.unwrap_or(10),
));
// path, upload file
let upload_files: DashMap<String, UploadFile> = DashMap::new();
// path, mirror artifact
let mirror_artifacts: DashMap<String, MirrorArtifact> = DashMap::new();
minecraft::fetch(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
fabric::fetch_fabric(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
fabric::fetch_quilt(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
forge::fetch_neo(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
forge::fetch_forge(semaphore.clone(), &upload_files, &mirror_artifacts)
.await?;
futures::future::try_join_all(upload_files.iter().map(|x| {
upload_file_to_bucket(
x.key().clone(),
x.value().file.clone(),
x.value().content_type.clone(),
&semaphore,
)
}))
.await?;
futures::future::try_join_all(mirror_artifacts.iter().map(|x| {
upload_url_to_bucket_mirrors(
format!("maven/{}", x.key()),
x.value()
.mirrors
.iter()
.map(|mirror| {
if mirror.entire_url {
mirror.path.clone()
} else {
format!("{}{}", mirror.path, x.key())
}
})
.collect(),
x.sha1.clone(),
&semaphore,
)
}))
.await?;
if dotenvy::var("CLOUDFLARE_INTEGRATION")
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
{
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token)
.json(&serde_json::json!({
"files": chunk
}))
.send()
.await
.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: "cloudflare clear cache".to_string(),
}
})?
.error_for_status()
.map_err(|err| {
ErrorKind::Fetch {
inner: err,
item: "cloudflare clear cache".to_string(),
}
})?;
}
}
}
}
Ok(())
}
pub struct UploadFile {
file: bytes::Bytes,
content_type: Option<String>,
}
pub struct MirrorArtifact {
pub sha1: Option<String>,
pub mirrors: DashSet<Mirror>,
}
#[derive(Eq, PartialEq, Hash)]
pub struct Mirror {
path: String,
entire_url: bool,
}
#[tracing::instrument(skip(mirror_artifacts))]
pub fn insert_mirrored_artifact(
artifact: &str,
sha1: Option<String>,
mirrors: Vec<String>,
entire_url: bool,
mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<()> {
let val = mirror_artifacts
.entry(get_path_from_artifact(artifact)?)
.or_insert(MirrorArtifact {
sha1,
mirrors: DashSet::new(),
});
for mirror in mirrors {
val.mirrors.insert(Mirror {
path: mirror,
entire_url,
});
}
Ok(())
}
fn check_env_vars() -> bool {
let mut failed = false;
fn check_var<T: std::str::FromStr>(var: &str) -> bool {
if dotenvy::var(var)
.ok()
.and_then(|s| s.parse::<T>().ok())
.is_none()
{
tracing::warn!(
"Variable `{}` missing in dotenvy or not of type `{}`",
var,
std::any::type_name::<T>()
);
true
} else {
false
}
}
failed |= check_var::<String>("BASE_URL");
failed |= check_var::<String>("S3_ACCESS_TOKEN");
failed |= check_var::<String>("S3_SECRET");
failed |= check_var::<String>("S3_URL");
failed |= check_var::<String>("S3_REGION");
failed |= check_var::<String>("S3_BUCKET_NAME");
if dotenvy::var("CLOUDFLARE_INTEGRATION")
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
{
failed |= check_var::<String>("CLOUDFLARE_TOKEN");
failed |= check_var::<String>("CLOUDFLARE_ZONE_ID");
}
failed
}

View File

@@ -0,0 +1,230 @@
use crate::util::fetch_json;
use crate::{
util::download_file, util::format_url, util::sha1_async, Error,
MirrorArtifact, UploadFile,
};
use daedalus::minecraft::{
merge_partial_library, Library, PartialLibrary, VersionInfo,
VersionManifest, VERSION_MANIFEST_URL,
};
use dashmap::DashMap;
use serde::Deserialize;
use std::sync::Arc;
use tokio::sync::Semaphore;
#[tracing::instrument(skip(semaphore, upload_files, _mirror_artifacts))]
pub async fn fetch(
semaphore: Arc<Semaphore>,
upload_files: &DashMap<String, UploadFile>,
_mirror_artifacts: &DashMap<String, MirrorArtifact>,
) -> Result<(), Error> {
let modrinth_manifest = fetch_json::<VersionManifest>(
&format_url(&format!(
"minecraft/v{}/manifest.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION
)),
&semaphore,
)
.await
.ok();
let mojang_manifest =
fetch_json::<VersionManifest>(VERSION_MANIFEST_URL, &semaphore).await?;
// TODO: experimental snapshots: https://github.com/PrismLauncher/meta/blob/main/meta/common/mojang-minecraft-experiments.json
// TODO: old snapshots: https://github.com/PrismLauncher/meta/blob/main/meta/common/mojang-minecraft-old-snapshots.json
// We check Modrinth's version manifest and compare if the version 1) exists in Modrinth's database and 2) is unchanged
// If they are not, we will fetch them
let (fetch_versions, existing_versions) =
if let Some(mut modrinth_manifest) = modrinth_manifest {
let (mut fetch_versions, mut existing_versions) =
(Vec::new(), Vec::new());
for version in mojang_manifest.versions {
if let Some(index) = modrinth_manifest
.versions
.iter()
.position(|x| x.id == version.id)
{
let modrinth_version =
modrinth_manifest.versions.remove(index);
if modrinth_version
.original_sha1
.as_ref()
.map(|x| x == &version.sha1)
.unwrap_or(false)
{
existing_versions.push(modrinth_version);
} else {
fetch_versions.push(version);
}
} else {
fetch_versions.push(version);
}
}
(fetch_versions, existing_versions)
} else {
(mojang_manifest.versions, Vec::new())
};
if !fetch_versions.is_empty() {
let version_manifests = futures::future::try_join_all(
fetch_versions
.iter()
.map(|x| download_file(&x.url, Some(&x.sha1), &semaphore)),
)
.await?
.into_iter()
.map(|x| serde_json::from_slice(&x))
.collect::<Result<Vec<VersionInfo>, serde_json::Error>>()?;
// Patch libraries of Minecraft versions for M-series Mac Support, Better Linux Compatibility, etc
let library_patches = fetch_library_patches()?;
let patched_version_manifests = version_manifests
.into_iter()
.map(|mut x| {
if !library_patches.is_empty() {
let mut new_libraries = Vec::new();
for library in x.libraries {
let mut libs = patch_library(&library_patches, library);
new_libraries.append(&mut libs)
}
x.libraries = new_libraries
}
x
})
.collect::<Vec<_>>();
// serialize + compute hashes
let serialized_version_manifests = patched_version_manifests
.iter()
.map(|x| serde_json::to_vec(x).map(bytes::Bytes::from))
.collect::<Result<Vec<_>, serde_json::Error>>()?;
let hashes_version_manifests = futures::future::try_join_all(
serialized_version_manifests
.iter()
.map(|x| sha1_async(x.clone())),
)
.await?;
// We upload the new version manifests and add them to the versions list
let mut new_versions = patched_version_manifests
.into_iter()
.zip(serialized_version_manifests.into_iter())
.zip(hashes_version_manifests.into_iter())
.map(|((version, bytes), hash)| {
let version_path = format!(
"minecraft/v{}/versions/{}.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION,
version.id
);
let url = format_url(&version_path);
upload_files.insert(
version_path,
UploadFile {
file: bytes,
content_type: Some("application/json".to_string()),
},
);
daedalus::minecraft::Version {
original_sha1: fetch_versions
.iter()
.find(|x| x.id == version.id)
.map(|x| x.sha1.clone()),
id: version.id,
type_: version.type_,
url,
time: version.time,
release_time: version.release_time,
sha1: hash,
compliance_level: 1,
}
})
.chain(existing_versions.into_iter())
.collect::<Vec<_>>();
new_versions.sort_by(|a, b| b.release_time.cmp(&a.release_time));
// create and upload the new manifest
let version_manifest_path = format!(
"minecraft/v{}/manifest.json",
daedalus::minecraft::CURRENT_FORMAT_VERSION
);
let new_manifest = VersionManifest {
latest: mojang_manifest.latest,
versions: new_versions,
};
upload_files.insert(
version_manifest_path,
UploadFile {
file: bytes::Bytes::from(serde_json::to_vec(&new_manifest)?),
content_type: Some("application/json".to_string()),
},
);
}
Ok(())
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct LibraryPatch {
#[serde(rename = "_comment")]
pub _comment: String,
#[serde(rename = "match")]
pub match_: Vec<String>,
pub additional_libraries: Option<Vec<Library>>,
#[serde(rename = "override")]
pub override_: Option<PartialLibrary>,
pub patch_additional_libraries: Option<bool>,
}
fn fetch_library_patches() -> Result<Vec<LibraryPatch>, Error> {
let patches = include_bytes!("../library-patches.json");
Ok(serde_json::from_slice(patches)?)
}
pub fn patch_library(
patches: &Vec<LibraryPatch>,
mut library: Library,
) -> Vec<Library> {
let mut val = Vec::new();
let actual_patches = patches
.iter()
.filter(|x| x.match_.contains(&library.name))
.collect::<Vec<_>>();
if !actual_patches.is_empty() {
for patch in actual_patches {
if let Some(override_) = &patch.override_ {
library = merge_partial_library(override_.clone(), library);
}
if let Some(additional_libraries) = &patch.additional_libraries {
for additional_library in additional_libraries {
if patch.patch_additional_libraries.unwrap_or(false) {
let mut libs =
patch_library(patches, additional_library.clone());
val.append(&mut libs)
} else {
val.push(additional_library.clone());
}
}
}
}
val.push(library);
} else {
val.push(library);
}
val
}

View File

@@ -0,0 +1,234 @@
use crate::{Error, ErrorKind};
use bytes::Bytes;
use s3::creds::Credentials;
use s3::{Bucket, Region};
use serde::de::DeserializeOwned;
use std::sync::Arc;
use tokio::sync::Semaphore;
lazy_static::lazy_static! {
static ref BUCKET : Bucket = {
let region = dotenvy::var("S3_REGION").unwrap();
let b = Bucket::new(
&dotenvy::var("S3_BUCKET_NAME").unwrap(),
if &*region == "r2" {
Region::R2 {
account_id: dotenvy::var("S3_URL").unwrap(),
}
} else {
Region::Custom {
region: region.clone(),
endpoint: dotenvy::var("S3_URL").unwrap(),
}
},
Credentials::new(
Some(&*dotenvy::var("S3_ACCESS_TOKEN").unwrap()),
Some(&*dotenvy::var("S3_SECRET").unwrap()),
None,
None,
None,
).unwrap(),
).unwrap();
if region == "path-style" {
b.with_path_style()
} else {
b
}
};
}
lazy_static::lazy_static! {
pub static ref REQWEST_CLIENT: reqwest::Client = {
let mut headers = reqwest::header::HeaderMap::new();
if let Ok(header) = reqwest::header::HeaderValue::from_str(&format!(
"modrinth/daedalus/{} (support@modrinth.com)",
env!("CARGO_PKG_VERSION")
)) {
headers.insert(reqwest::header::USER_AGENT, header);
}
reqwest::Client::builder()
.tcp_keepalive(Some(std::time::Duration::from_secs(10)))
.timeout(std::time::Duration::from_secs(15))
.default_headers(headers)
.build()
.unwrap()
};
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn upload_file_to_bucket(
path: String,
bytes: Bytes,
content_type: Option<String>,
semaphore: &Arc<Semaphore>,
) -> Result<(), Error> {
let _permit = semaphore.acquire().await?;
let key = path.clone();
const RETRIES: i32 = 3;
for attempt in 1..=(RETRIES + 1) {
tracing::trace!("Attempting file upload, attempt {attempt}");
let result = if let Some(ref content_type) = content_type {
BUCKET
.put_object_with_content_type(key.clone(), &bytes, content_type)
.await
} else {
BUCKET.put_object(key.clone(), &bytes).await
}
.map_err(|err| ErrorKind::S3 {
inner: err,
file: path.clone(),
});
match result {
Ok(_) => return Ok(()),
Err(_) if attempt <= RETRIES => continue,
Err(_) => {
result?;
}
}
}
unreachable!()
}
pub async fn upload_url_to_bucket_mirrors(
upload_path: String,
mirrors: Vec<String>,
sha1: Option<String>,
semaphore: &Arc<Semaphore>,
) -> Result<(), Error> {
if mirrors.is_empty() {
return Err(ErrorKind::InvalidInput(
"No mirrors provided!".to_string(),
)
.into());
}
for (index, mirror) in mirrors.iter().enumerate() {
let result = upload_url_to_bucket(
upload_path.clone(),
mirror.clone(),
sha1.clone(),
semaphore,
)
.await;
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
return result;
}
}
unreachable!()
}
#[tracing::instrument(skip(semaphore))]
pub async fn upload_url_to_bucket(
path: String,
url: String,
sha1: Option<String>,
semaphore: &Arc<Semaphore>,
) -> Result<(), Error> {
let data = download_file(&url, sha1.as_deref(), semaphore).await?;
upload_file_to_bucket(path, data, None, semaphore).await?;
Ok(())
}
#[tracing::instrument(skip(bytes))]
pub async fn sha1_async(bytes: Bytes) -> Result<String, Error> {
let hash = tokio::task::spawn_blocking(move || {
sha1_smol::Sha1::from(bytes).hexdigest()
})
.await?;
Ok(hash)
}
#[tracing::instrument(skip(semaphore))]
pub async fn download_file(
url: &str,
sha1: Option<&str>,
semaphore: &Arc<Semaphore>,
) -> Result<bytes::Bytes, crate::Error> {
let _permit = semaphore.acquire().await?;
tracing::trace!("Starting file download");
const RETRIES: u32 = 10;
for attempt in 1..=(RETRIES + 1) {
let result = REQWEST_CLIENT
.get(url.replace("http://", "https://"))
.send()
.await
.and_then(|x| x.error_for_status());
match result {
Ok(x) => {
let bytes = x.bytes().await;
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
if &*sha1_async(bytes.clone()).await? != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(
crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into(),
);
}
}
}
return Ok(bytes);
} else if attempt <= RETRIES {
continue;
} else if let Err(err) = bytes {
return Err(crate::ErrorKind::Fetch {
inner: err,
item: url.to_string(),
}
.into());
}
}
Err(_) if attempt <= RETRIES => continue,
Err(err) => {
return Err(crate::ErrorKind::Fetch {
inner: err,
item: url.to_string(),
}
.into())
}
}
}
unreachable!()
}
pub async fn fetch_json<T: DeserializeOwned>(
url: &str,
semaphore: &Arc<Semaphore>,
) -> Result<T, Error> {
Ok(serde_json::from_slice(
&download_file(url, None, semaphore).await?,
)?)
}
pub async fn fetch_xml<T: DeserializeOwned>(
url: &str,
semaphore: &Arc<Semaphore>,
) -> Result<T, Error> {
Ok(serde_xml_rs::from_reader(
&*download_file(url, None, semaphore).await?,
)?)
}
pub fn format_url(path: &str) -> String {
format!("{}/{}", &*dotenvy::var("BASE_URL").unwrap(), path)
}