Move files in preparation for monorepo migration

This commit is contained in:
Jai A
2024-07-03 13:23:21 -07:00
parent 8140db32dd
commit a04cb54d86
232 changed files with 0 additions and 7116 deletions

74
libs/theseus/Cargo.toml Normal file
View File

@@ -0,0 +1,74 @@
[package]
name = "theseus"
version = "0.7.2"
authors = ["Jai A <jaiagr+gpg@pm.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
theseus_macros = { path = "../theseus_macros" }
bytes = "1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_ini = "0.2.0"
toml = "0.8.12"
sha1_smol = { version = "1.0.0", features = ["std"] }
sha2 = "0.10.8"
url = "2.2"
uuid = { version = "1.1", features = ["serde", "v4"] }
zip = "0.6.5"
async_zip = { version = "0.0.17", features = ["full"] }
flate2 = "1.0.28"
tempfile = "3.5.0"
urlencoding = "2.1.3"
chrono = { version = "0.4.19", features = ["serde"] }
daedalus = { version = "0.1.25" }
dirs = "5.0.1"
regex = "1.5"
sys-info = "0.9.0"
sysinfo = "0.30.8"
thiserror = "1.0"
tracing = "0.1.37"
tracing-subscriber = { version = "0.3.18", features = ["chrono", "env-filter"] }
tracing-error = "0.2.0"
tracing-appender = "0.2.3"
paste = { version = "1.0" }
tauri = { version = "1.6.1", optional = true }
indicatif = { version = "0.17.3", optional = true }
async-tungstenite = { version = "0.25.1", features = ["tokio-runtime", "tokio-native-tls"] }
futures = "0.3"
reqwest = { version = "0.12.3", features = ["json", "stream", "deflate", "gzip", "brotli"] }
tokio = { version = "1", features = ["full"] }
tokio-stream = { version = "0.1", features = ["fs"] }
async-recursion = "1.0.4"
notify = { version = "6.1.1", default-features = false }
notify-debouncer-mini = { version = "0.4.1", default-features = false }
lazy_static = "1.4.0"
dunce = "1.0.3"
whoami = "1.4.0"
discord-rich-presence = "0.2.3"
p256 = { version = "0.13.2", features = ["ecdsa"] }
rand = "0.8"
byteorder = "1.5.0"
base64 = "0.22.0"
[target.'cfg(windows)'.dependencies]
winreg = "0.52.0"
[features]
tauri = ["dep:tauri"]
cli = ["dep:indicatif"]

Binary file not shown.

View File

@@ -0,0 +1,22 @@
public final class JavaInfo {
private static final String[] CHECKED_PROPERTIES = new String[] {
"os.arch",
"java.version"
};
public static void main(String[] args) {
int returnCode = 0;
for (String key : CHECKED_PROPERTIES) {
String property = System.getProperty(key);
if (property != null) {
System.out.println(key + "=" + property);
} else {
returnCode = 1;
}
}
System.exit(returnCode);
}
}

View File

@@ -0,0 +1,75 @@
use std::path::PathBuf;
use crate::{
event::{
emit::{emit_command, emit_warning},
CommandPayload,
},
util::io,
};
/// Handles external functions (such as through URL deep linkage)
/// Link is extracted value (link) in somewhat URL format, such as
/// subdomain1/subdomain2
/// (Does not include modrinth://)
pub async fn handle_url(sublink: &str) -> crate::Result<CommandPayload> {
Ok(match sublink.split_once('/') {
// /mod/{id} - Installs a mod of mod id
Some(("mod", id)) => CommandPayload::InstallMod { id: id.to_string() },
// /version/{id} - Installs a specific version of id
Some(("version", id)) => {
CommandPayload::InstallVersion { id: id.to_string() }
}
// /modpack/{id} - Installs a modpack of modpack id
Some(("modpack", id)) => {
CommandPayload::InstallModpack { id: id.to_string() }
}
_ => {
emit_warning(&format!(
"Invalid command, unrecognized path: {sublink}"
))
.await?;
return Err(crate::ErrorKind::InputError(format!(
"Invalid command, unrecognized path: {sublink}"
))
.into());
}
})
}
pub async fn parse_command(
command_string: &str,
) -> crate::Result<CommandPayload> {
tracing::debug!("Parsing command: {}", &command_string);
// modrinth://some-command
// This occurs when following a web redirect link
if let Some(sublink) = command_string.strip_prefix("modrinth://") {
Ok(handle_url(sublink).await?)
} else {
// We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?;
if let Some(ext) = path.extension() {
if ext == "mrpack" {
return Ok(CommandPayload::RunMRPack { path });
}
}
emit_warning(&format!(
"Invalid command, unrecognized filetype: {}",
path.display()
))
.await?;
Err(crate::ErrorKind::InputError(format!(
"Invalid command, unrecognized filetype: {}",
path.display()
))
.into())
}
}
pub async fn parse_and_emit_command(command_string: &str) -> crate::Result<()> {
let command = parse_command(command_string).await?;
emit_command(command).await?;
Ok(())
}

174
libs/theseus/src/api/jre.rs Normal file
View File

@@ -0,0 +1,174 @@
//! Authentication flow interface
use reqwest::Method;
use serde::Deserialize;
use std::path::PathBuf;
use crate::event::emit::{emit_loading, init_loading};
use crate::state::CredentialsStore;
use crate::util::fetch::{fetch_advanced, fetch_json};
use crate::util::io;
use crate::util::jre::extract_java_majorminor_version;
use crate::{
util::jre::{self, JavaVersion},
LoadingBarType, State,
};
// Searches for jres on the system given a java version (ex: 1.8, 1.17, 1.18)
// Allow higher allows for versions higher than the given version to be returned ('at least')
pub async fn find_filtered_jres(
java_version: Option<u32>,
) -> crate::Result<Vec<JavaVersion>> {
let jres = jre::get_all_jre().await?;
// Filter out JREs that are not 1.17 or higher
Ok(if let Some(java_version) = java_version {
jres.into_iter()
.filter(|jre| {
let jre_version = extract_java_majorminor_version(&jre.version);
if let Ok(jre_version) = jre_version {
jre_version.1 == java_version
} else {
false
}
})
.collect()
} else {
jres
})
}
#[theseus_macros::debug_pin]
pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
let state = State::get().await?;
let loading_bar = init_loading(
LoadingBarType::JavaDownload {
version: java_version,
},
100.0,
"Downloading java version",
)
.await?;
#[derive(Deserialize)]
struct Package {
pub download_url: String,
pub name: PathBuf,
}
emit_loading(&loading_bar, 0.0, Some("Fetching java version")).await?;
let packages = fetch_json::<Vec<Package>>(
Method::GET,
&format!(
"https://api.azul.com/metadata/v1/zulu/packages?arch={}&java_version={}&os={}&archive_type=zip&javafx_bundled=false&java_package_type=jre&page_size=1",
std::env::consts::ARCH, java_version, std::env::consts::OS
),
None,
None,
&state.fetch_semaphore,
&CredentialsStore(None),
).await?;
emit_loading(&loading_bar, 10.0, Some("Downloading java version")).await?;
if let Some(download) = packages.first() {
let file = fetch_advanced(
Method::GET,
&download.download_url,
None,
None,
None,
Some((&loading_bar, 80.0)),
&state.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
let path = state.directories.java_versions_dir().await;
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(file))
.map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to read java zip".to_string(),
))
})?;
// removes the old installation of java
if let Some(file) = archive.file_names().next() {
if let Some(dir) = file.split('/').next() {
let path = path.join(dir);
if path.exists() {
io::remove_dir_all(path).await?;
}
}
}
emit_loading(&loading_bar, 0.0, Some("Extracting java")).await?;
archive.extract(&path).map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to extract java zip".to_string(),
))
})?;
emit_loading(&loading_bar, 10.0, Some("Done extracting java")).await?;
let mut base_path = path.join(
download
.name
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
);
#[cfg(target_os = "macos")]
{
base_path = base_path
.join(format!("zulu-{}.jre", java_version))
.join("Contents")
.join("Home")
.join("bin")
.join("java")
}
#[cfg(not(target_os = "macos"))]
{
base_path = base_path.join("bin").join(jre::JAVA_BIN)
}
Ok(base_path)
} else {
Err(crate::ErrorKind::LauncherError(format!(
"No Java Version found for Java version {}, OS {}, and Architecture {}",
java_version, std::env::consts::OS, std::env::consts::ARCH,
)).into())
}
}
// Validates JRE at a given at a given path
pub async fn check_jre(path: PathBuf) -> crate::Result<Option<JavaVersion>> {
Ok(jre::check_java_at_filepath(&path).await)
}
// Test JRE at a given path
pub async fn test_jre(
path: PathBuf,
major_version: u32,
) -> crate::Result<bool> {
let jre = match jre::check_java_at_filepath(&path).await {
Some(jre) => jre,
None => return Ok(false),
};
let (major, _) = extract_java_majorminor_version(&jre.version)?;
Ok(major == major_version)
}
// Gets maximum memory in KiB.
pub async fn get_max_memory() -> crate::Result<u64> {
Ok(sys_info::mem_info()
.map_err(|_| {
crate::Error::from(crate::ErrorKind::LauncherError(
"Unable to get computer memory".to_string(),
))
})?
.total)
}

View File

@@ -0,0 +1,372 @@
use std::io::{Read, SeekFrom};
use std::time::SystemTime;
use futures::TryFutureExt;
use serde::{Deserialize, Serialize};
use tokio::{
fs::File,
io::{AsyncReadExt, AsyncSeekExt},
};
use crate::{
prelude::{Credentials, DirectoryInfo},
util::io::{self, IOError},
{state::ProfilePathId, State},
};
#[derive(Serialize, Debug)]
pub struct Logs {
pub log_type: LogType,
pub filename: String,
pub age: u64,
pub output: Option<CensoredString>,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq)]
pub enum LogType {
InfoLog,
CrashReport,
}
#[derive(Serialize, Debug)]
pub struct LatestLogCursor {
pub cursor: u64,
pub output: CensoredString,
pub new_file: bool,
}
#[derive(Serialize, Debug)] // Not deserialize
#[serde(transparent)]
pub struct CensoredString(String);
impl CensoredString {
pub fn censor(mut s: String, credentials_set: &Vec<Credentials>) -> Self {
let username = whoami::username();
s = s
.replace(&format!("/{}/", username), "/{COMPUTER_USERNAME}/")
.replace(&format!("\\{}\\", username), "\\{COMPUTER_USERNAME}\\");
for credentials in credentials_set {
s = s
.replace(&credentials.access_token, "{MINECRAFT_ACCESS_TOKEN}")
.replace(&credentials.username, "{MINECRAFT_USERNAME}")
.replace(
&credentials.id.as_simple().to_string(),
"{MINECRAFT_UUID}",
)
.replace(
&credentials.id.as_hyphenated().to_string(),
"{MINECRAFT_UUID}",
);
}
Self(s)
}
}
impl Logs {
async fn build(
log_type: LogType,
age: SystemTime,
profile_subpath: &ProfilePathId,
filename: String,
clear_contents: Option<bool>,
) -> crate::Result<Self> {
Ok(Self {
log_type,
age: age
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_else(|_| std::time::Duration::from_secs(0))
.as_secs(),
output: if clear_contents.unwrap_or(false) {
None
} else {
Some(
get_output_by_filename(
profile_subpath,
log_type,
&filename,
)
.await?,
)
},
filename,
})
}
}
#[tracing::instrument]
pub async fn get_logs_from_type(
profile_path: &ProfilePathId,
log_type: LogType,
clear_contents: Option<bool>,
logs: &mut Vec<crate::Result<Logs>>,
) -> crate::Result<()> {
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(profile_path).await?
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(profile_path).await?
}
};
if logs_folder.exists() {
for entry in std::fs::read_dir(&logs_folder)
.map_err(|e| IOError::with_path(e, &logs_folder))?
{
let entry: std::fs::DirEntry =
entry.map_err(|e| IOError::with_path(e, &logs_folder))?;
let age = entry.metadata()?.created().unwrap_or_else(|_| SystemTime::UNIX_EPOCH);
let path = entry.path();
if !path.is_file() {
continue;
}
if let Some(file_name) = path.file_name() {
let file_name = file_name.to_string_lossy().to_string();
logs.push(
Logs::build(
log_type,
age,
&profile_path,
file_name,
clear_contents,
)
.await,
);
}
}
}
Ok(())
}
#[tracing::instrument]
pub async fn get_logs(
profile_path_id: ProfilePathId,
clear_contents: Option<bool>,
) -> crate::Result<Vec<Logs>> {
let profile_path = profile_path_id.profile_path().await?;
let mut logs = Vec::new();
get_logs_from_type(
&profile_path,
LogType::InfoLog,
clear_contents,
&mut logs,
)
.await?;
get_logs_from_type(
&profile_path,
LogType::CrashReport,
clear_contents,
&mut logs,
)
.await?;
let mut logs = logs.into_iter().collect::<crate::Result<Vec<Logs>>>()?;
logs.sort_by(|a, b| b.age.cmp(&a.age).then(b.filename.cmp(&a.filename)));
Ok(logs)
}
#[tracing::instrument]
pub async fn get_logs_by_filename(
profile_path_id: ProfilePathId,
log_type: LogType,
filename: String,
) -> crate::Result<Logs> {
let profile_path = profile_path_id.profile_path().await?;
let path = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(&profile_path).await
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(&profile_path).await
}
}?
.join(&filename);
let metadata = std::fs::metadata(&path)?;
let age = metadata.created().unwrap_or_else(|_| SystemTime::UNIX_EPOCH);
Logs::build(log_type, age, &profile_path, filename, Some(true)).await
}
#[tracing::instrument]
pub async fn get_output_by_filename(
profile_subpath: &ProfilePathId,
log_type: LogType,
file_name: &str,
) -> crate::Result<CensoredString> {
let state = State::get().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(profile_subpath).await?
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(profile_subpath).await?
}
};
let path = logs_folder.join(file_name);
let credentials: Vec<Credentials> = state
.users
.read()
.await
.users
.clone()
.into_values()
.collect();
// Load .gz file into String
if let Some(ext) = path.extension() {
if ext == "gz" {
let file = std::fs::File::open(&path)
.map_err(|e| IOError::with_path(e, &path))?;
let mut contents = [0; 1024];
let mut result = String::new();
let mut gz =
flate2::read::GzDecoder::new(std::io::BufReader::new(file));
while gz
.read(&mut contents)
.map_err(|e| IOError::with_path(e, &path))?
> 0
{
result.push_str(&String::from_utf8_lossy(&contents));
contents = [0; 1024];
}
return Ok(CensoredString::censor(result, &credentials));
} else if ext == "log" || ext == "txt" {
let mut result = String::new();
let mut contents = [0; 1024];
let mut file = std::fs::File::open(&path)
.map_err(|e| IOError::with_path(e, &path))?;
// iteratively read the file to a String
while file
.read(&mut contents)
.map_err(|e| IOError::with_path(e, &path))?
> 0
{
result.push_str(&String::from_utf8_lossy(&contents));
contents = [0; 1024];
}
let result = CensoredString::censor(result, &credentials);
return Ok(result);
}
}
Err(crate::ErrorKind::OtherError(format!(
"File extension not supported: {}",
path.display()
))
.into())
}
#[tracing::instrument]
pub async fn delete_logs(profile_path_id: ProfilePathId) -> crate::Result<()> {
let profile_path = profile_path_id.profile_path().await?;
let logs_folder = DirectoryInfo::profile_logs_dir(&profile_path).await?;
for entry in std::fs::read_dir(&logs_folder)
.map_err(|e| IOError::with_path(e, &logs_folder))?
{
let entry = entry.map_err(|e| IOError::with_path(e, &logs_folder))?;
let path = entry.path();
if path.is_dir() {
io::remove_dir_all(&path).await?;
}
}
Ok(())
}
#[tracing::instrument]
pub async fn delete_logs_by_filename(
profile_path_id: ProfilePathId,
log_type: LogType,
filename: &str,
) -> crate::Result<()> {
let profile_path = profile_path_id.profile_path().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(&profile_path).await
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(&profile_path).await
}
}?;
let path = logs_folder.join(filename);
io::remove_dir_all(&path).await?;
Ok(())
}
#[tracing::instrument]
pub async fn get_latest_log_cursor(
profile_path: ProfilePathId,
cursor: u64, // 0 to start at beginning of file
) -> crate::Result<LatestLogCursor> {
get_generic_live_log_cursor(profile_path, "latest.log", cursor).await
}
#[tracing::instrument]
pub async fn get_generic_live_log_cursor(
profile_path_id: ProfilePathId,
log_file_name: &str,
mut cursor: u64, // 0 to start at beginning of file
) -> crate::Result<LatestLogCursor> {
let profile_path = profile_path_id.profile_path().await?;
let state = State::get().await?;
let logs_folder = DirectoryInfo::profile_logs_dir(&profile_path).await?;
let path = logs_folder.join(log_file_name);
if !path.exists() {
// Allow silent failure if latest.log doesn't exist (as the instance may have been launched, but not yet created the file)
return Ok(LatestLogCursor {
cursor: 0,
new_file: false,
output: CensoredString("".to_string()),
});
}
let mut file = File::open(&path)
.await
.map_err(|e| IOError::with_path(e, &path))?;
let metadata = file
.metadata()
.await
.map_err(|e| IOError::with_path(e, &path))?;
let mut new_file = false;
if cursor > metadata.len() {
// Cursor is greater than file length, reset cursor to 0
// Likely cause is that the file was rotated while the log was being read
cursor = 0;
new_file = true;
}
let mut buffer = Vec::new();
file.seek(SeekFrom::Start(cursor))
.map_err(|e| IOError::with_path(e, &path))
.await?; // Seek to cursor
let bytes_read = file
.read_to_end(&mut buffer)
.map_err(|e| IOError::with_path(e, &path))
.await?; // Read to end of file
let output = String::from_utf8_lossy(&buffer).to_string(); // Convert to String
let cursor = cursor + bytes_read as u64; // Update cursor
let credentials: Vec<Credentials> = state
.users
.read()
.await
.users
.clone()
.into_values()
.collect();
let output = CensoredString::censor(output, &credentials);
Ok(LatestLogCursor {
cursor,
new_file,
output,
})
}

View File

@@ -0,0 +1,43 @@
use crate::State;
pub use daedalus::minecraft::VersionManifest;
pub use daedalus::modded::Manifest;
#[tracing::instrument]
pub async fn get_minecraft_versions() -> crate::Result<VersionManifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.minecraft.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_fabric_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.fabric.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_forge_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.forge.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_quilt_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.quilt.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_neoforge_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.neoforge.clone();
Ok(tags)
}

View File

@@ -0,0 +1,76 @@
//! Authentication flow interface
use crate::state::{Credentials, MinecraftLoginFlow};
use crate::State;
#[tracing::instrument]
pub async fn begin_login() -> crate::Result<MinecraftLoginFlow> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.login_begin().await
}
#[tracing::instrument]
pub async fn finish_login(
code: &str,
flow: MinecraftLoginFlow,
) -> crate::Result<Credentials> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.login_finish(code, flow).await
}
#[tracing::instrument]
pub async fn get_default_user() -> crate::Result<Option<uuid::Uuid>> {
let state = State::get().await?;
let users = state.users.read().await;
Ok(users.default_user)
}
#[tracing::instrument]
pub async fn set_default_user(user: uuid::Uuid) -> crate::Result<()> {
let user = get_user(user).await?;
let state = State::get().await?;
let mut users = state.users.write().await;
users.default_user = Some(user.id);
users.save().await?;
Ok(())
}
/// Remove a user account from the database
#[tracing::instrument]
pub async fn remove_user(user: uuid::Uuid) -> crate::Result<()> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.remove(user).await?;
Ok(())
}
/// Get a copy of the list of all user credentials
#[tracing::instrument]
pub async fn users() -> crate::Result<Vec<Credentials>> {
let state = State::get().await?;
let users = state.users.read().await;
Ok(users.users.values().cloned().collect())
}
/// Get a specific user by user ID
/// Prefer to use 'refresh' instead of this function
#[tracing::instrument]
pub async fn get_user(user: uuid::Uuid) -> crate::Result<Credentials> {
let state = State::get().await?;
let users = state.users.read().await;
let user = users
.users
.get(&user)
.ok_or_else(|| {
crate::ErrorKind::OtherError(format!(
"Tried to get nonexistent user with ID {user}"
))
.as_error()
})?
.clone();
Ok(user)
}

View File

@@ -0,0 +1,40 @@
//! API for interacting with Theseus
pub mod handler;
pub mod jre;
pub mod logs;
pub mod metadata;
pub mod minecraft_auth;
pub mod mr_auth;
pub mod pack;
pub mod process;
pub mod profile;
pub mod safety;
pub mod settings;
pub mod tags;
pub mod data {
pub use crate::state::{
Credentials, DirectoryInfo, Hooks, JavaSettings, LinkedData,
MemorySettings, ModLoader, ModrinthCredentials,
ModrinthCredentialsResult, ModrinthProject, ModrinthTeamMember,
ModrinthUser, ModrinthVersion, ProfileMetadata, ProjectMetadata,
Settings, Theme, WindowSize,
};
}
pub mod prelude {
pub use crate::{
data::*,
event::CommandPayload,
jre, metadata, minecraft_auth, pack, process,
profile::{self, create, Profile},
settings,
state::JavaGlobals,
state::{Dependency, ProfilePathId, ProjectPathId},
util::{
io::{canonicalize, IOError},
jre::JavaVersion,
},
State,
};
}

View File

@@ -0,0 +1,144 @@
use crate::state::{
ModrinthAuthFlow, ModrinthCredentials, ModrinthCredentialsResult,
};
use crate::ErrorKind;
#[tracing::instrument]
pub async fn authenticate_begin_flow(provider: &str) -> crate::Result<String> {
let state = crate::State::get().await?;
// Don't start an uncompleteable new flow if there's an existing locked one
let mut write: tokio::sync::RwLockWriteGuard<'_, Option<ModrinthAuthFlow>> =
state.modrinth_auth_flow.write().await;
let mut flow = ModrinthAuthFlow::new(provider).await?;
let url = flow.prepare_login_url().await?;
*write = Some(flow);
Ok(url)
}
#[tracing::instrument]
pub async fn authenticate_await_complete_flow(
) -> crate::Result<ModrinthCredentialsResult> {
let state = crate::State::get().await?;
let mut write = state.modrinth_auth_flow.write().await;
if let Some(ref mut flow) = *write {
let creds = flow.extract_credentials(&state.fetch_semaphore).await?;
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
}
Ok(creds)
} else {
Err(ErrorKind::OtherError(
"No active Modrinth authenication flow!".to_string(),
)
.into())
}
}
#[tracing::instrument]
pub async fn cancel_flow() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.modrinth_auth_flow.write().await;
if let Some(ref mut flow) = *write {
flow.close().await?;
}
*write = None;
Ok(())
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
) -> crate::Result<ModrinthCredentialsResult> {
let state = crate::State::get().await?;
let creds = crate::state::login_password(
username,
password,
challenge,
&state.fetch_semaphore,
)
.await?;
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
}
Ok(creds)
}
#[tracing::instrument]
pub async fn login_2fa(
code: &str,
flow: &str,
) -> crate::Result<ModrinthCredentials> {
let state = crate::State::get().await?;
let creds =
crate::state::login_2fa(code, flow, &state.fetch_semaphore).await?;
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
Ok(creds)
}
#[tracing::instrument]
pub async fn create_account(
username: &str,
email: &str,
password: &str,
challenge: &str,
sign_up_newsletter: bool,
) -> crate::Result<ModrinthCredentials> {
let state = crate::State::get().await?;
let creds = crate::state::create_account(
username,
email,
password,
challenge,
sign_up_newsletter,
&state.fetch_semaphore,
)
.await?;
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
Ok(creds)
}
#[tracing::instrument]
pub async fn refresh() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.credentials.write().await;
crate::state::refresh_credentials(&mut write, &state.fetch_semaphore)
.await?;
Ok(())
}
#[tracing::instrument]
pub async fn logout() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.credentials.write().await;
write.logout().await?;
Ok(())
}
#[tracing::instrument]
pub async fn get_credentials() -> crate::Result<Option<ModrinthCredentials>> {
let state = crate::State::get().await?;
let read = state.credentials.read().await;
Ok(read.0.clone())
}

View File

@@ -0,0 +1,266 @@
use std::{collections::HashMap, path::PathBuf};
use serde::{Deserialize, Serialize};
use crate::{
pack::{
self,
import::{self, copy_dotminecraft},
install_from::CreatePackDescription,
},
prelude::{ModLoader, Profile, ProfilePathId},
state::{LinkedData, ProfileInstallStage},
util::io,
State,
};
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ATInstance {
pub id: String, // minecraft version id ie: 1.12.1, not a name
pub launcher: ATLauncher,
pub java_version: ATJavaVersion,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncher {
pub name: String,
pub pack: String,
pub version: String, // ie: 1.6
pub loader_version: ATLauncherLoaderVersion,
pub modrinth_project: Option<ATLauncherModrinthProject>,
pub modrinth_version: Option<ATLauncherModrinthVersion>,
pub modrinth_manifest: Option<pack::install_from::PackFormat>,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ATJavaVersion {
pub major_version: u8,
pub component: String,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherLoaderVersion {
pub r#type: String,
pub version: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ATLauncherModrinthProject {
pub id: String,
pub slug: String,
pub project_type: String,
pub team: String,
pub client_side: Option<String>,
pub server_side: Option<String>,
pub categories: Vec<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ATLauncherModrinthVersion {
pub id: String,
pub project_id: String,
pub name: String,
pub version_number: String,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherModrinthVersionFile {
pub hashes: HashMap<String, String>,
pub url: String,
pub filename: String,
pub primary: bool,
pub size: u64,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherModrinthVersionDependency {
pub project_id: Option<String>,
pub version_id: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherMod {
pub name: String,
pub version: String,
pub file: String,
pub modrinth_project: Option<ATLauncherModrinthProject>,
pub modrinth_version: Option<ATLauncherModrinthVersion>,
}
// Check if folder has a instance.json that parses
pub async fn is_valid_atlauncher(instance_folder: PathBuf) -> bool {
let instance: String =
io::read_to_string(&instance_folder.join("instance.json"))
.await
.unwrap_or("".to_string());
let instance: Result<ATInstance, serde_json::Error> =
serde_json::from_str::<ATInstance>(&instance);
if let Err(e) = instance {
tracing::warn!(
"Could not parse instance.json at {}: {}",
instance_folder.display(),
e
);
false
} else {
true
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn import_atlauncher(
atlauncher_base_path: PathBuf, // path to base atlauncher folder
instance_folder: String, // instance folder in atlauncher_base_path
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
let atlauncher_instance_path = atlauncher_base_path
.join("instances")
.join(instance_folder.clone());
// Load instance.json
let atinstance: String =
io::read_to_string(&atlauncher_instance_path.join("instance.json"))
.await?;
let atinstance: ATInstance =
serde_json::from_str::<ATInstance>(&atinstance)?;
// Icon path should be {instance_folder}/instance.png if it exists,
// Second possibility is ATLauncher/configs/images/{safe_pack_name}.png (safe pack name is alphanumeric lowercase)
let icon_path_primary = atlauncher_instance_path.join("instance.png");
let safe_pack_name = atinstance
.launcher
.pack
.replace(|c: char| !c.is_alphanumeric(), "")
.to_lowercase();
let icon_path_secondary = atlauncher_base_path
.join("configs")
.join("images")
.join(safe_pack_name + ".png");
let icon = match (icon_path_primary.exists(), icon_path_secondary.exists())
{
(true, _) => import::recache_icon(icon_path_primary).await?,
(_, true) => import::recache_icon(icon_path_secondary).await?,
_ => None,
};
// Create description from instance.cfg
let description = CreatePackDescription {
icon,
override_title: Some(atinstance.launcher.name.clone()),
project_id: None,
version_id: None,
existing_loading_bar: None,
profile_path: profile_path.clone(),
};
let backup_name = format!("ATLauncher-{}", instance_folder);
let minecraft_folder = atlauncher_instance_path;
import_atlauncher_unmanaged(
profile_path,
minecraft_folder,
backup_name,
description,
atinstance,
)
.await?;
Ok(())
}
async fn import_atlauncher_unmanaged(
profile_path: ProfilePathId,
minecraft_folder: PathBuf,
backup_name: String,
description: CreatePackDescription,
atinstance: ATInstance,
) -> crate::Result<()> {
let mod_loader = format!(
"\"{}\"",
atinstance.launcher.loader_version.r#type.to_lowercase()
);
let mod_loader: ModLoader = serde_json::from_str::<ModLoader>(&mod_loader)
.map_err(|_| {
crate::ErrorKind::InputError(format!(
"Could not parse mod loader type: {}",
mod_loader
))
})?;
let game_version = atinstance.id;
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
Some(atinstance.launcher.loader_version.version.clone()),
)
.await?
} else {
None
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = description
.override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.linked_data = Some(LinkedData {
project_id: description.project_id.clone(),
version_id: description.version_id.clone(),
locked: Some(
description.project_id.is_some()
&& description.version_id.is_some(),
),
});
prof.metadata.icon = description.icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
// Moves .minecraft folder over (ie: overrides such as resourcepacks, mods, etc)
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
minecraft_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,198 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::prelude::Profile;
use crate::state::CredentialsStore;
use crate::{
prelude::{ModLoader, ProfilePathId},
state::ProfileInstallStage,
util::{
fetch::{fetch, write_cached_icon},
io,
},
State,
};
use super::{copy_dotminecraft, recache_icon};
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MinecraftInstance {
pub name: Option<String>,
pub base_mod_loader: Option<MinecraftInstanceModLoader>,
pub profile_image_path: Option<PathBuf>,
pub installed_modpack: Option<InstalledModpack>,
pub game_version: String, // Minecraft game version. Non-prioritized, use this if Vanilla
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct MinecraftInstanceModLoader {
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct InstalledModpack {
pub thumbnail_url: Option<String>,
}
// Check if folder has a minecraftinstance.json that parses
pub async fn is_valid_curseforge(instance_folder: PathBuf) -> bool {
let minecraftinstance: String =
io::read_to_string(&instance_folder.join("minecraftinstance.json"))
.await
.unwrap_or("".to_string());
let minecraftinstance: Result<MinecraftInstance, serde_json::Error> =
serde_json::from_str::<MinecraftInstance>(&minecraftinstance);
minecraftinstance.is_ok()
}
pub async fn import_curseforge(
curseforge_instance_folder: PathBuf, // instance's folder
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
// Load minecraftinstance.json
let minecraft_instance: String = io::read_to_string(
&curseforge_instance_folder.join("minecraftinstance.json"),
)
.await?;
let minecraft_instance: MinecraftInstance =
serde_json::from_str::<MinecraftInstance>(&minecraft_instance)?;
let override_title: Option<String> = minecraft_instance.name.clone();
let backup_name = format!(
"Curseforge-{}",
curseforge_instance_folder
.file_name()
.map(|a| a.to_string_lossy().to_string())
.unwrap_or("Unknown".to_string())
);
let state = State::get().await?;
// Recache Curseforge Icon if it exists
let mut icon = None;
if let Some(icon_path) = minecraft_instance.profile_image_path.clone() {
icon = recache_icon(icon_path).await?;
} else if let Some(InstalledModpack {
thumbnail_url: Some(thumbnail_url),
}) = minecraft_instance.installed_modpack.clone()
{
let icon_bytes = fetch(
&thumbnail_url,
None,
&state.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
let filename = thumbnail_url.rsplit('/').last();
if let Some(filename) = filename {
icon = Some(
write_cached_icon(
filename,
&state.directories.caches_dir(),
icon_bytes,
&state.io_semaphore,
)
.await?,
);
}
}
// base mod loader is always None for vanilla
if let Some(instance_mod_loader) = minecraft_instance.base_mod_loader {
let game_version = minecraft_instance.game_version;
// CF allows Forge, Fabric, and Vanilla
let mut mod_loader = None;
let mut loader_version = None;
match instance_mod_loader.name.split('-').collect::<Vec<&str>>()[..] {
["forge", version] => {
mod_loader = Some(ModLoader::Forge);
loader_version = Some(version.to_string());
}
["fabric", version, _game_version] => {
mod_loader = Some(ModLoader::Fabric);
loader_version = Some(version.to_string());
}
_ => {}
}
let mod_loader = mod_loader.unwrap_or(ModLoader::Vanilla);
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
loader_version,
)
.await?
} else {
None
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.icon = icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
} else {
// create a vanilla profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.metadata.icon = icon.clone();
prof.metadata.game_version =
minecraft_instance.game_version.clone();
prof.metadata.loader_version = None;
prof.metadata.loader = ModLoader::Vanilla;
async { Ok(()) }
})
.await?;
}
// Copy in contained folders as overrides
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
curseforge_instance_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,134 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::{
prelude::{ModLoader, Profile, ProfilePathId},
state::ProfileInstallStage,
util::io,
State,
};
use super::{copy_dotminecraft, recache_icon};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GDLauncherConfig {
pub background: Option<String>,
pub loader: GDLauncherLoader,
// pub mods: Vec<GDLauncherMod>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GDLauncherLoader {
pub loader_type: ModLoader,
pub loader_version: Option<String>,
pub mc_version: String,
pub source: Option<String>,
pub source_name: Option<String>,
}
// Check if folder has a config.json that parses
pub async fn is_valid_gdlauncher(instance_folder: PathBuf) -> bool {
let config: String =
io::read_to_string(&instance_folder.join("config.json"))
.await
.unwrap_or("".to_string());
let config: Result<GDLauncherConfig, serde_json::Error> =
serde_json::from_str::<GDLauncherConfig>(&config);
config.is_ok()
}
pub async fn import_gdlauncher(
gdlauncher_instance_folder: PathBuf, // instance's folder
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
// Load config.json
let config: String =
io::read_to_string(&gdlauncher_instance_folder.join("config.json"))
.await?;
let config: GDLauncherConfig =
serde_json::from_str::<GDLauncherConfig>(&config)?;
let override_title: Option<String> = config.loader.source_name.clone();
let backup_name = format!(
"GDLauncher-{}",
gdlauncher_instance_folder
.file_name()
.map(|a| a.to_string_lossy().to_string())
.unwrap_or("Unknown".to_string())
);
// Re-cache icon
let icon = config
.background
.clone()
.map(|b| gdlauncher_instance_folder.join(b));
let icon = if let Some(icon) = icon {
recache_icon(icon).await?
} else {
None
};
let game_version = config.loader.mc_version;
let mod_loader = config.loader.loader_type;
let loader_version = config.loader.loader_version;
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
loader_version,
)
.await?
} else {
None
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.icon = icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
// Copy in contained folders as overrides
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
gdlauncher_instance_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,344 @@
use std::path::{Path, PathBuf};
use serde::{de, Deserialize, Serialize};
use crate::{
pack::{
import::{self, copy_dotminecraft},
install_from::{self, CreatePackDescription, PackDependency},
},
prelude::{Profile, ProfilePathId},
util::io,
State,
};
// instance.cfg
// https://github.com/PrismLauncher/PrismLauncher/blob/develop/launcher/minecraft/MinecraftInstance.cpp
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
#[serde(untagged)]
enum MMCInstanceEnum {
General(MMCInstanceGeneral),
Instance(MMCInstance),
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
struct MMCInstanceGeneral {
pub general: MMCInstance,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
pub struct MMCInstance {
pub java_path: Option<String>,
pub jvm_args: Option<String>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_bool")]
pub managed_pack: Option<bool>,
#[serde(rename = "ManagedPackID")]
pub managed_pack_id: Option<String>,
pub managed_pack_type: Option<MMCManagedPackType>,
#[serde(rename = "ManagedPackVersionID")]
pub managed_pack_version_id: Option<String>,
pub managed_pack_version_name: Option<String>,
#[serde(rename = "iconKey")]
pub icon_key: Option<String>,
#[serde(rename = "name")]
pub name: Option<String>,
}
// serde_ini reads 'true' and 'false' as strings, so we need to convert them to booleans
fn deserialize_optional_bool<'de, D>(
deserializer: D,
) -> Result<Option<bool>, D::Error>
where
D: de::Deserializer<'de>,
{
let s = Option::<String>::deserialize(deserializer)?;
match s {
Some(string) => match string.as_str() {
"true" => Ok(Some(true)),
"false" => Ok(Some(false)),
_ => Err(de::Error::custom("expected 'true' or 'false'")),
},
None => Ok(None),
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "lowercase")]
pub enum MMCManagedPackType {
Modrinth,
Flame,
ATLauncher,
#[serde(other)]
Unknown,
}
// mmc-pack.json
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MMCPack {
components: Vec<MMCComponent>,
format_version: u32,
}
// https://github.com/PrismLauncher/PrismLauncher/blob/develop/launcher/minecraft/Component.h
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MMCComponent {
pub uid: String,
#[serde(default)]
pub version: Option<String>,
#[serde(default)]
pub dependency_only: bool,
#[serde(default)]
pub important: bool,
#[serde(default)]
pub disabled: bool,
pub cached_name: Option<String>,
pub cached_version: Option<String>,
#[serde(default)]
pub cached_requires: Vec<MMCComponentRequirement>,
#[serde(default)]
pub cached_conflicts: Vec<MMCComponentRequirement>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MMCComponentRequirement {
pub uid: String,
pub equals_version: Option<String>,
pub suggests: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
#[serde(untagged)]
enum MMCLauncherEnum {
General(MMCLauncherGeneral),
Instance(MMCLauncher),
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
struct MMCLauncherGeneral {
pub general: MMCLauncher,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
pub struct MMCLauncher {
instance_dir: String,
}
// Checks if if its a folder, and the folder contains instance.cfg and mmc-pack.json, and they both parse
#[tracing::instrument]
pub async fn is_valid_mmc(instance_folder: PathBuf) -> bool {
let instance_cfg = instance_folder.join("instance.cfg");
let mmc_pack = instance_folder.join("mmc-pack.json");
let mmc_pack = match io::read_to_string(&mmc_pack).await {
Ok(mmc_pack) => mmc_pack,
Err(_) => return false,
};
load_instance_cfg(&instance_cfg).await.is_ok()
&& serde_json::from_str::<MMCPack>(&mmc_pack).is_ok()
}
#[tracing::instrument]
pub async fn get_instances_subpath(config: PathBuf) -> Option<String> {
let launcher = io::read_to_string(&config).await.ok()?;
let launcher: MMCLauncherEnum = serde_ini::from_str(&launcher).ok()?;
match launcher {
MMCLauncherEnum::General(p) => Some(p.general.instance_dir),
MMCLauncherEnum::Instance(p) => Some(p.instance_dir),
}
}
// Loading the INI (instance.cfg) file
async fn load_instance_cfg(file_path: &Path) -> crate::Result<MMCInstance> {
let instance_cfg: String = io::read_to_string(file_path).await?;
let instance_cfg_enum: MMCInstanceEnum =
serde_ini::from_str::<MMCInstanceEnum>(&instance_cfg)?;
match instance_cfg_enum {
MMCInstanceEnum::General(instance_cfg) => Ok(instance_cfg.general),
MMCInstanceEnum::Instance(instance_cfg) => Ok(instance_cfg),
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn import_mmc(
mmc_base_path: PathBuf, // path to base mmc folder
instance_folder: String, // instance folder in mmc_base_path
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
let mmc_instance_path = mmc_base_path
.join("instances")
.join(instance_folder.clone());
let mmc_pack =
io::read_to_string(&mmc_instance_path.join("mmc-pack.json")).await?;
let mmc_pack: MMCPack = serde_json::from_str::<MMCPack>(&mmc_pack)?;
let instance_cfg =
load_instance_cfg(&mmc_instance_path.join("instance.cfg")).await?;
// Re-cache icon
let icon = if let Some(icon_key) = instance_cfg.icon_key {
let icon_path = mmc_base_path.join("icons").join(icon_key);
import::recache_icon(icon_path).await?
} else {
None
};
// Create description from instance.cfg
let description = CreatePackDescription {
icon,
override_title: instance_cfg.name,
project_id: instance_cfg.managed_pack_id,
version_id: instance_cfg.managed_pack_version_id,
existing_loading_bar: None,
profile_path: profile_path.clone(),
};
// Managed pack
let backup_name = "Imported Modpack".to_string();
if instance_cfg.managed_pack.unwrap_or(false) {
match instance_cfg.managed_pack_type {
Some(MMCManagedPackType::Modrinth) => {
// Modrinth Managed Pack
// Kept separate as we may in the future want to add special handling for modrinth managed packs
let backup_name = "Imported Modrinth Modpack".to_string();
let minecraft_folder = mmc_base_path.join("instances").join(instance_folder).join(".minecraft");
import_mmc_unmanaged(profile_path, minecraft_folder, backup_name, description, mmc_pack).await?;
}
Some(MMCManagedPackType::Flame) | Some(MMCManagedPackType::ATLauncher) => {
// For flame/atlauncher managed packs
// Treat as unmanaged, but with 'minecraft' folder instead of '.minecraft'
let minecraft_folder = mmc_base_path.join("instances").join(instance_folder).join("minecraft");
import_mmc_unmanaged(profile_path, minecraft_folder, backup_name, description, mmc_pack).await?;
},
Some(_) => {
// For managed packs that aren't modrinth, flame, atlauncher
// Treat as unmanaged
let backup_name = "ImportedModpack".to_string();
let minecraft_folder = mmc_base_path.join("instances").join(instance_folder).join(".minecraft");
import_mmc_unmanaged(profile_path, minecraft_folder, backup_name, description, mmc_pack).await?;
},
_ => return Err(crate::ErrorKind::InputError({
"Instance is managed, but managed pack type not specified in instance.cfg".to_string()
}).into())
}
} else {
// Direclty import unmanaged pack
let backup_name = "Imported Modpack".to_string();
let minecraft_folder = mmc_base_path
.join("instances")
.join(instance_folder)
.join(".minecraft");
import_mmc_unmanaged(
profile_path,
minecraft_folder,
backup_name,
description,
mmc_pack,
)
.await?;
}
Ok(())
}
async fn import_mmc_unmanaged(
profile_path: ProfilePathId,
minecraft_folder: PathBuf,
backup_name: String,
description: CreatePackDescription,
mmc_pack: MMCPack,
) -> crate::Result<()> {
// Pack dependencies stored in mmc-pack.json, we convert to .mrpack pack dependencies
let dependencies = mmc_pack
.components
.iter()
.filter_map(|component| {
if component.uid.starts_with("net.fabricmc.fabric-loader") {
return Some((
PackDependency::FabricLoader,
component.version.clone().unwrap_or_default(),
));
}
if component.uid.starts_with("net.minecraftforge") {
return Some((
PackDependency::Forge,
component.version.clone().unwrap_or_default(),
));
}
if component.uid.starts_with("org.quiltmc.quilt-loader") {
return Some((
PackDependency::QuiltLoader,
component.version.clone().unwrap_or_default(),
));
}
if component.uid.starts_with("net.minecraft") {
return Some((
PackDependency::Minecraft,
component.version.clone().unwrap_or_default(),
));
}
None
})
.collect();
// Sets profile information to be that loaded from mmc-pack.json and instance.cfg
install_from::set_profile_information(
profile_path.clone(),
&description,
&backup_name,
&dependencies,
false,
)
.await?;
// Moves .minecraft folder over (ie: overrides such as resourcepacks, mods, etc)
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
minecraft_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,320 @@
use std::{
fmt,
path::{Path, PathBuf},
};
use io::IOError;
use serde::{Deserialize, Serialize};
use crate::{
event::{
emit::{emit_loading, init_or_edit_loading},
LoadingBarId,
},
prelude::ProfilePathId,
state::Profiles,
util::{
fetch::{self, IoSemaphore},
io,
},
};
pub mod atlauncher;
pub mod curseforge;
pub mod gdlauncher;
pub mod mmc;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum ImportLauncherType {
MultiMC,
PrismLauncher,
ATLauncher,
GDLauncher,
Curseforge,
#[serde(other)]
Unknown,
}
// impl display
impl fmt::Display for ImportLauncherType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ImportLauncherType::MultiMC => write!(f, "MultiMC"),
ImportLauncherType::PrismLauncher => write!(f, "PrismLauncher"),
ImportLauncherType::ATLauncher => write!(f, "ATLauncher"),
ImportLauncherType::GDLauncher => write!(f, "GDLauncher"),
ImportLauncherType::Curseforge => write!(f, "Curseforge"),
ImportLauncherType::Unknown => write!(f, "Unknown"),
}
}
}
// Return a list of importable instances from a launcher type and base path, by iterating through the folder and checking
pub async fn get_importable_instances(
launcher_type: ImportLauncherType,
base_path: PathBuf,
) -> crate::Result<Vec<String>> {
// Some launchers have a different folder structure for instances
let instances_subfolder = match launcher_type {
ImportLauncherType::GDLauncher | ImportLauncherType::ATLauncher => {
"instances".to_string()
}
ImportLauncherType::Curseforge => "Instances".to_string(),
ImportLauncherType::MultiMC => {
mmc::get_instances_subpath(base_path.clone().join("multimc.cfg"))
.await
.unwrap_or_else(|| "instances".to_string())
}
ImportLauncherType::PrismLauncher => mmc::get_instances_subpath(
base_path.clone().join("prismlauncher.cfg"),
)
.await
.unwrap_or_else(|| "instances".to_string()),
ImportLauncherType::Unknown => {
return Err(crate::ErrorKind::InputError(
"Launcher type Unknown".to_string(),
)
.into())
}
};
let instances_folder = base_path.join(&instances_subfolder);
let mut instances = Vec::new();
let mut dir = io::read_dir(&instances_folder).await.map_err(| _ | {
crate::ErrorKind::InputError(format!(
"Invalid {launcher_type} launcher path, could not find '{instances_subfolder}' subfolder."
))
})?;
while let Some(entry) = dir
.next_entry()
.await
.map_err(|e| IOError::with_path(e, &instances_folder))?
{
let path = entry.path();
if path.is_dir() {
// Check instance is valid of this launcher type
if is_valid_importable_instance(path.clone(), launcher_type).await {
let name = path.file_name();
if let Some(name) = name {
instances.push(name.to_string_lossy().to_string());
}
}
}
}
Ok(instances)
}
// Import an instance from a launcher type and base path
// Note: this *deletes* the submitted empty profile
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn import_instance(
profile_path: ProfilePathId, // This should be a blank profile
launcher_type: ImportLauncherType,
base_path: PathBuf,
instance_folder: String,
) -> crate::Result<()> {
tracing::debug!("Importing instance from {instance_folder}");
let res = match launcher_type {
ImportLauncherType::MultiMC | ImportLauncherType::PrismLauncher => {
mmc::import_mmc(
base_path, // path to base mmc folder
instance_folder, // instance folder in mmc_base_path
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::ATLauncher => {
atlauncher::import_atlauncher(
base_path, // path to atlauncher folder
instance_folder, // instance folder in atlauncher
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::GDLauncher => {
gdlauncher::import_gdlauncher(
base_path.join("instances").join(instance_folder), // path to gdlauncher folder
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::Curseforge => {
curseforge::import_curseforge(
base_path.join("Instances").join(instance_folder), // path to curseforge folder
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::Unknown => {
return Err(crate::ErrorKind::InputError(
"Launcher type Unknown".to_string(),
)
.into());
}
};
// If import failed, delete the profile
match res {
Ok(_) => {}
Err(e) => {
tracing::warn!("Import failed: {:?}", e);
let _ = crate::api::profile::remove(&profile_path).await;
return Err(e);
}
}
// Check existing managed packs for potential updates
tokio::task::spawn(Profiles::update_modrinth_versions());
tracing::debug!("Completed import.");
Ok(())
}
/// Returns the default path for the given launcher type
/// None if it can't be found or doesn't exist
pub fn get_default_launcher_path(
r#type: ImportLauncherType,
) -> Option<PathBuf> {
let path = match r#type {
ImportLauncherType::MultiMC => None, // multimc data is *in* app dir
ImportLauncherType::PrismLauncher => {
Some(dirs::data_dir()?.join("PrismLauncher"))
}
ImportLauncherType::ATLauncher => {
Some(dirs::data_dir()?.join("ATLauncher"))
}
ImportLauncherType::GDLauncher => {
Some(dirs::data_dir()?.join("gdlauncher_next"))
}
ImportLauncherType::Curseforge => {
Some(dirs::home_dir()?.join("curseforge").join("minecraft"))
}
ImportLauncherType::Unknown => None,
};
let path = path?;
if path.exists() {
Some(path)
} else {
None
}
}
/// Checks if this PathBuf is a valid instance for the given launcher type
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn is_valid_importable_instance(
instance_path: PathBuf,
r#type: ImportLauncherType,
) -> bool {
match r#type {
ImportLauncherType::MultiMC | ImportLauncherType::PrismLauncher => {
mmc::is_valid_mmc(instance_path).await
}
ImportLauncherType::ATLauncher => {
atlauncher::is_valid_atlauncher(instance_path).await
}
ImportLauncherType::GDLauncher => {
gdlauncher::is_valid_gdlauncher(instance_path).await
}
ImportLauncherType::Curseforge => {
curseforge::is_valid_curseforge(instance_path).await
}
ImportLauncherType::Unknown => false,
}
}
/// Caches an image file in the filesystem into the cache directory, and returns the path to the cached file.
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn recache_icon(
icon_path: PathBuf,
) -> crate::Result<Option<PathBuf>> {
let state = crate::State::get().await?;
let bytes = tokio::fs::read(&icon_path).await;
if let Ok(bytes) = bytes {
let bytes = bytes::Bytes::from(bytes);
let cache_dir = &state.directories.caches_dir();
let semaphore = &state.io_semaphore;
Ok(Some(
fetch::write_cached_icon(
&icon_path.to_string_lossy(),
cache_dir,
bytes,
semaphore,
)
.await?,
))
} else {
// could not find icon (for instance, prism default icon, etc)
Ok(None)
}
}
pub async fn copy_dotminecraft(
profile_path_id: ProfilePathId,
dotminecraft: PathBuf,
io_semaphore: &IoSemaphore,
existing_loading_bar: Option<LoadingBarId>,
) -> crate::Result<LoadingBarId> {
// Get full path to profile
let profile_path = profile_path_id.get_full_path().await?;
// Gets all subfiles recursively in src
let subfiles = get_all_subfiles(&dotminecraft).await?;
let total_subfiles = subfiles.len() as u64;
let loading_bar = init_or_edit_loading(
existing_loading_bar,
crate::LoadingBarType::CopyProfile {
import_location: dotminecraft.clone(),
profile_name: profile_path_id.to_string(),
},
total_subfiles as f64,
"Copying files in profile",
)
.await?;
// Copy each file
for src_child in subfiles {
let dst_child =
src_child.strip_prefix(&dotminecraft).map_err(|_| {
crate::ErrorKind::InputError(format!(
"Invalid file: {}",
&src_child.display()
))
})?;
let dst_child = profile_path.join(dst_child);
// sleep for cpu for 1 millisecond
tokio::time::sleep(std::time::Duration::from_millis(1)).await;
fetch::copy(&src_child, &dst_child, io_semaphore).await?;
emit_loading(&loading_bar, 1.0, None).await?;
}
Ok(loading_bar)
}
/// Recursively get a list of all subfiles in src
/// uses async recursion
#[theseus_macros::debug_pin]
#[async_recursion::async_recursion]
#[tracing::instrument]
pub async fn get_all_subfiles(src: &Path) -> crate::Result<Vec<PathBuf>> {
if !src.is_dir() {
return Ok(vec![src.to_path_buf()]);
}
let mut files = Vec::new();
let mut dir = io::read_dir(&src).await?;
while let Some(child) = dir
.next_entry()
.await
.map_err(|e| IOError::with_path(e, src))?
{
let src_child = child.path();
files.append(&mut get_all_subfiles(&src_child).await?);
}
Ok(files)
}

View File

@@ -0,0 +1,419 @@
use crate::config::MODRINTH_API_URL;
use crate::data::ModLoader;
use crate::event::emit::{emit_loading, init_loading};
use crate::event::{LoadingBarId, LoadingBarType};
use crate::prelude::ProfilePathId;
use crate::state::{
LinkedData, ModrinthProject, ModrinthVersion, ProfileInstallStage, SideType,
};
use crate::util::fetch::{
fetch, fetch_advanced, fetch_json, write_cached_icon,
};
use crate::util::io;
use crate::{InnerProjectPathUnix, State};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFormat {
pub game: String,
pub format_version: i32,
pub version_id: String,
pub name: String,
pub summary: Option<String>,
pub files: Vec<PackFile>,
pub dependencies: HashMap<PackDependency, String>,
}
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: InnerProjectPathUnix,
pub hashes: HashMap<PackFileHash, String>,
pub env: Option<HashMap<EnvType, SideType>>,
pub downloads: Vec<String>,
pub file_size: u32,
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase", from = "String")]
pub enum PackFileHash {
Sha1,
Sha512,
Unknown(String),
}
impl From<String> for PackFileHash {
fn from(s: String) -> Self {
return match s.as_str() {
"sha1" => PackFileHash::Sha1,
"sha512" => PackFileHash::Sha512,
_ => PackFileHash::Unknown(s),
};
}
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase")]
pub enum EnvType {
Client,
Server,
}
#[derive(Serialize, Deserialize, Clone, Copy, Hash, PartialEq, Eq, Debug)]
pub enum PackDependency {
#[serde(rename = "forge")]
Forge,
#[serde(rename = "neoforge")]
#[serde(alias = "neo-forge")]
NeoForge,
#[serde(rename = "fabric-loader")]
FabricLoader,
#[serde(rename = "quilt-loader")]
QuiltLoader,
#[serde(rename = "minecraft")]
Minecraft,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum CreatePackLocation {
// Create a pack from a modrinth version ID (such as a modpack)
FromVersionId {
project_id: String,
version_id: String,
title: String,
icon_url: Option<String>,
},
// Create a pack from a file (such as an .mrpack for installing from a file, or a folder name for importing)
FromFile {
path: PathBuf,
},
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreatePackProfile {
pub name: String, // the name of the profile, and relative path
pub game_version: String, // the game version of the profile
pub modloader: ModLoader, // the modloader to use
pub loader_version: Option<String>, // the modloader version to use, set to "latest", "stable", or the ID of your chosen loader. defaults to latest
pub icon: Option<PathBuf>, // the icon for the profile
pub icon_url: Option<String>, // the URL icon for a profile (ONLY USED FOR TEMPORARY PROFILES)
pub linked_data: Option<LinkedData>, // the linked project ID (mainly for modpacks)- used for updating
pub skip_install_profile: Option<bool>,
pub no_watch: Option<bool>,
}
// default
impl Default for CreatePackProfile {
fn default() -> Self {
CreatePackProfile {
name: "Untitled".to_string(),
game_version: "1.19.4".to_string(),
modloader: ModLoader::Vanilla,
loader_version: None,
icon: None,
icon_url: None,
linked_data: None,
skip_install_profile: Some(true),
no_watch: Some(false),
}
}
}
#[derive(Clone)]
pub struct CreatePack {
pub file: bytes::Bytes,
pub description: CreatePackDescription,
}
#[derive(Clone, Debug)]
pub struct CreatePackDescription {
pub icon: Option<PathBuf>,
pub override_title: Option<String>,
pub project_id: Option<String>,
pub version_id: Option<String>,
pub existing_loading_bar: Option<LoadingBarId>,
pub profile_path: ProfilePathId,
}
pub fn get_profile_from_pack(
location: CreatePackLocation,
) -> CreatePackProfile {
match location {
CreatePackLocation::FromVersionId {
project_id,
version_id,
title,
icon_url,
} => CreatePackProfile {
name: title,
icon_url,
linked_data: Some(LinkedData {
project_id: Some(project_id),
version_id: Some(version_id),
locked: Some(true),
}),
..Default::default()
},
CreatePackLocation::FromFile { path } => {
let file_name = path
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string();
CreatePackProfile {
name: file_name,
..Default::default()
}
}
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn generate_pack_from_version_id(
project_id: String,
version_id: String,
title: String,
icon_url: Option<String>,
profile_path: ProfilePathId,
// Existing loading bar. Unlike when existing_loading_bar is used, this one is pre-initialized with PackFileDownload
// For example, you might use this if multiple packs are being downloaded at once and you want to use the same loading bar
initialized_loading_bar: Option<LoadingBarId>,
) -> crate::Result<CreatePack> {
let state = State::get().await?;
let loading_bar = if let Some(bar) = initialized_loading_bar {
emit_loading(&bar, 0.0, Some("Downloading pack file")).await?;
bar
} else {
init_loading(
LoadingBarType::PackFileDownload {
profile_path: profile_path.get_full_path().await?,
pack_name: title,
icon: icon_url,
pack_version: version_id.clone(),
},
100.0,
"Downloading pack file",
)
.await?
};
emit_loading(&loading_bar, 0.0, Some("Fetching version")).await?;
let creds = state.credentials.read().await;
let version: ModrinthVersion = fetch_json(
Method::GET,
&format!("{}version/{}", MODRINTH_API_URL, version_id),
None,
None,
&state.fetch_semaphore,
&creds,
)
.await?;
emit_loading(&loading_bar, 10.0, None).await?;
let (url, hash) =
if let Some(file) = version.files.iter().find(|x| x.primary) {
Some((file.url.clone(), file.hashes.get("sha1")))
} else {
version
.files
.first()
.map(|file| (file.url.clone(), file.hashes.get("sha1")))
}
.ok_or_else(|| {
crate::ErrorKind::InputError(
"Specified version has no files".to_string(),
)
})?;
let file = fetch_advanced(
Method::GET,
&url,
hash.map(|x| &**x),
None,
None,
Some((&loading_bar, 70.0)),
&state.fetch_semaphore,
&creds,
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Fetching project metadata")).await?;
let project: ModrinthProject = fetch_json(
Method::GET,
&format!("{}project/{}", MODRINTH_API_URL, version.project_id),
None,
None,
&state.fetch_semaphore,
&creds,
)
.await?;
emit_loading(&loading_bar, 10.0, Some("Retrieving icon")).await?;
let icon = if let Some(icon_url) = project.icon_url {
let state = State::get().await?;
let icon_bytes =
fetch(&icon_url, None, &state.fetch_semaphore, &creds).await?;
drop(creds);
let filename = icon_url.rsplit('/').next();
if let Some(filename) = filename {
Some(
write_cached_icon(
filename,
&state.directories.caches_dir(),
icon_bytes,
&state.io_semaphore,
)
.await?,
)
} else {
None
}
} else {
None
};
emit_loading(&loading_bar, 10.0, None).await?;
Ok(CreatePack {
file,
description: CreatePackDescription {
icon,
override_title: None,
project_id: Some(project_id),
version_id: Some(version_id),
existing_loading_bar: Some(loading_bar),
profile_path,
},
})
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn generate_pack_from_file(
path: PathBuf,
profile_path: ProfilePathId,
) -> crate::Result<CreatePack> {
let file = io::read(&path).await?;
Ok(CreatePack {
file: bytes::Bytes::from(file),
description: CreatePackDescription {
icon: None,
override_title: None,
project_id: None,
version_id: None,
existing_loading_bar: None,
profile_path,
},
})
}
/// Sets generated profile attributes to the pack ones (using profile::edit)
/// This includes the pack name, icon, game version, loader version, and loader
#[theseus_macros::debug_pin]
pub async fn set_profile_information(
profile_path: ProfilePathId,
description: &CreatePackDescription,
backup_name: &str,
dependencies: &HashMap<PackDependency, String>,
ignore_lock: bool, // do not change locked status
) -> crate::Result<()> {
let mut game_version: Option<&String> = None;
let mut mod_loader = None;
let mut loader_version = None;
for (key, value) in dependencies {
match key {
PackDependency::Forge => {
mod_loader = Some(ModLoader::Forge);
loader_version = Some(value);
}
PackDependency::NeoForge => {
mod_loader = Some(ModLoader::NeoForge);
loader_version = Some(value);
}
PackDependency::FabricLoader => {
mod_loader = Some(ModLoader::Fabric);
loader_version = Some(value);
}
PackDependency::QuiltLoader => {
mod_loader = Some(ModLoader::Quilt);
loader_version = Some(value);
}
PackDependency::Minecraft => game_version = Some(value),
}
}
let game_version = if let Some(game_version) = game_version {
game_version
} else {
return Err(crate::ErrorKind::InputError(
"Pack did not specify Minecraft version".to_string(),
)
.into());
};
let mod_loader = mod_loader.unwrap_or(ModLoader::Vanilla);
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
loader_version.cloned(),
)
.await?
} else {
None
};
// Sets values in profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = description
.override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
let project_id = description.project_id.clone();
let version_id = description.version_id.clone();
prof.metadata.linked_data = if project_id.is_some()
&& version_id.is_some()
{
Some(LinkedData {
project_id,
version_id,
locked: if !ignore_lock {
Some(true)
} else {
prof.metadata.linked_data.as_ref().and_then(|x| x.locked)
},
})
} else {
None
};
prof.metadata.icon = description.icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
Ok(())
}

View File

@@ -0,0 +1,430 @@
use crate::config::MODRINTH_API_URL;
use crate::event::emit::{
emit_loading, init_or_edit_loading, loading_try_for_each_concurrent,
};
use crate::event::LoadingBarType;
use crate::pack::install_from::{
set_profile_information, EnvType, PackFile, PackFileHash,
};
use crate::prelude::{ModrinthVersion, ProfilePathId, ProjectMetadata};
use crate::state::{ProfileInstallStage, Profiles, SideType};
use crate::util::fetch::{fetch_json, fetch_mirrors, write};
use crate::util::io;
use crate::{profile, State};
use async_zip::base::read::seek::ZipFileReader;
use reqwest::Method;
use serde_json::json;
use std::collections::HashMap;
use std::io::Cursor;
use std::path::{Component, PathBuf};
use super::install_from::{
generate_pack_from_file, generate_pack_from_version_id, CreatePack,
CreatePackLocation, PackFormat,
};
/// Install a pack
/// Wrapper around install_pack_files that generates a pack creation description, and
/// attempts to install the pack files. If it fails, it will remove the profile (fail safely)
/// Install a modpack from a mrpack file (a modrinth .zip format)
#[theseus_macros::debug_pin]
pub async fn install_zipped_mrpack(
location: CreatePackLocation,
profile_path: ProfilePathId,
) -> crate::Result<ProfilePathId> {
// Get file from description
let create_pack: CreatePack = match location {
CreatePackLocation::FromVersionId {
project_id,
version_id,
title,
icon_url,
} => {
generate_pack_from_version_id(
project_id,
version_id,
title,
icon_url,
profile_path.clone(),
None,
)
.await?
}
CreatePackLocation::FromFile { path } => {
generate_pack_from_file(path, profile_path.clone()).await?
}
};
// Install pack files, and if it fails, fail safely by removing the profile
let result = install_zipped_mrpack_files(create_pack, false).await;
// Check existing managed packs for potential updates
tokio::task::spawn(Profiles::update_modrinth_versions());
match result {
Ok(profile) => Ok(profile),
Err(err) => {
let _ = crate::api::profile::remove(&profile_path).await;
Err(err)
}
}
}
/// Install all pack files from a description
/// Does not remove the profile if it fails
#[theseus_macros::debug_pin]
pub async fn install_zipped_mrpack_files(
create_pack: CreatePack,
ignore_lock: bool,
) -> crate::Result<ProfilePathId> {
let state = &State::get().await?;
let file = create_pack.file;
let description = create_pack.description.clone(); // make a copy for profile edit function
let icon = create_pack.description.icon;
let project_id = create_pack.description.project_id;
let version_id = create_pack.description.version_id;
let existing_loading_bar = create_pack.description.existing_loading_bar;
let profile_path = create_pack.description.profile_path;
let icon_exists = icon.is_some();
let reader: Cursor<&bytes::Bytes> = Cursor::new(&file);
// Create zip reader around file
let mut zip_reader =
ZipFileReader::with_tokio(reader).await.map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to read input modpack zip".to_string(),
))
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Sets generated profile attributes to the pack ones (using profile::edit)
set_profile_information(
profile_path.clone(),
&description,
&pack.name,
&pack.dependencies,
ignore_lock,
)
.await?;
let profile_path = profile_path.clone();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.get_full_path().await?.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
pack_version: version_id,
},
100.0,
"Downloading modpack",
)
.await?;
let num_files = pack.files.len();
use futures::StreamExt;
loading_try_for_each_concurrent(
futures::stream::iter(pack.files.into_iter())
.map(Ok::<PackFile, crate::Error>),
None,
Some(&loading_bar),
70.0,
num_files,
None,
|project| {
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env {
if env
.get(&EnvType::Client)
.map(|x| x == &SideType::Unsupported)
.unwrap_or(false)
{
return Ok(());
}
}
let creds = state.credentials.read().await;
let file = fetch_mirrors(
&project
.downloads
.iter()
.map(|x| &**x)
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&creds,
)
.await?;
drop(creds);
let project_path = project.path.to_string();
let path =
std::path::Path::new(&project_path).components().next();
if let Some(path) = path {
match path {
Component::CurDir | Component::Normal(_) => {
let path = profile_path
.get_full_path()
.await?
.join(&project_path);
write(&path, &file, &state.io_semaphore)
.await?;
}
_ => {}
};
}
Ok(())
}
},
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Extracting overrides")).await?;
let mut total_len = 0;
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
total_len += 1;
}
}
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
// Reads the file into the 'content' variable
let mut content = Vec::new();
let mut reader = zip_reader.reader_with_entry(index).await?;
reader.read_to_end_checked(&mut content).await?;
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
if new_path.file_name().is_some() {
write(
&profile_path.get_full_path().await?.join(new_path),
&content,
&state.io_semaphore,
)
.await?;
}
emit_loading(
&loading_bar,
30.0 / total_len as f64,
Some(&format!(
"Extracting override {}/{}",
index, total_len
)),
)
.await?;
}
}
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon =
profile_path.get_full_path().await?.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path, None).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
State::sync().await?;
}
Ok::<ProfilePathId, crate::Error>(profile_path.clone())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
}
}
#[tracing::instrument(skip(mrpack_file))]
#[theseus_macros::debug_pin]
pub async fn remove_all_related_files(
profile_path: ProfilePathId,
mrpack_file: bytes::Bytes,
) -> crate::Result<()> {
let reader: Cursor<&bytes::Bytes> = Cursor::new(&mrpack_file);
// Create zip reader around file
let mut zip_reader =
ZipFileReader::with_tokio(reader).await.map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to read input modpack zip".to_string(),
))
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Set install stage to installing, and do not change it back (as files are being removed and are not being reinstalled here)
crate::api::profile::edit(&profile_path, |prof| {
prof.install_stage = ProfileInstallStage::PackInstalling;
async { Ok(()) }
})
.await?;
// First, remove all modrinth projects by their version hashes
// Remove all modrinth projects by their version hashes
// We need to do a fetch to get the project ids from Modrinth
let state = State::get().await?;
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha512)?.clone()))
.collect::<Vec<_>>();
let creds = state.credentials.read().await;
// First, get project info by hash
let files_url = format!("{}version_files", MODRINTH_API_URL);
let hash_projects = fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
None,
Some(json!({
"hashes": all_hashes,
"algorithm": "sha512",
})),
&state.fetch_semaphore,
&creds,
)
.await?;
let to_remove = hash_projects
.into_values()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile =
profile::get(&profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(
profile_path.to_string(),
)
})?;
for (project_id, project) in &profile.projects {
if let ProjectMetadata::Modrinth { project, .. } = &project.metadata
{
if to_remove.contains(&project.id) {
let path = profile
.get_profile_full_path()
.await?
.join(project_id.0.clone());
if path.exists() {
io::remove_file(&path).await?;
}
}
}
}
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
let path: PathBuf = profile_path
.get_full_path()
.await?
.join(file.path.to_string());
if path.exists() {
io::remove_file(&path).await?;
}
}
// Iterate over each 'overrides' file and remove it
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
// Remove this file if a corresponding one exists in the filesystem
let existing_file =
profile_path.get_full_path().await?.join(&new_path);
if existing_file.exists() {
io::remove_file(&existing_file).await?;
}
}
}
Ok(())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
}
}

View File

@@ -0,0 +1,3 @@
pub mod import;
pub mod install_from;
pub mod install_mrpack;

View File

@@ -0,0 +1,130 @@
//! Theseus process management interface
use uuid::Uuid;
use crate::state::{MinecraftChild, ProfilePathId};
pub use crate::{
state::{
Hooks, JavaSettings, MemorySettings, Profile, Settings, WindowSize,
},
State,
};
// Gets whether a child process stored in the state by UUID has finished
#[tracing::instrument]
pub async fn has_finished_by_uuid(uuid: Uuid) -> crate::Result<bool> {
Ok(get_exit_status_by_uuid(uuid).await?.is_some())
}
// Gets the exit status of a child process stored in the state by UUID
#[tracing::instrument]
pub async fn get_exit_status_by_uuid(
uuid: Uuid,
) -> crate::Result<Option<i32>> {
let state = State::get().await?;
let children = state.children.read().await;
children.exit_status(uuid).await
}
// Gets the UUID of each stored process in the state
#[tracing::instrument]
pub async fn get_all_uuids() -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
Ok(children.keys())
}
// Gets the UUID of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_uuids() -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_keys().await
}
// Gets the Profile paths of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_profile_paths() -> crate::Result<Vec<ProfilePathId>>
{
let state = State::get().await?;
let children = state.children.read().await;
children.running_profile_paths().await
}
// Gets the Profiles (cloned) of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_profiles() -> crate::Result<Vec<Profile>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_profiles().await
}
// Gets the UUID of each stored process in the state by profile path
#[tracing::instrument]
pub async fn get_uuids_by_profile_path(
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_keys_with_profile(profile_path).await
}
// Kill a child process stored in the state by UUID, as a string
#[tracing::instrument]
pub async fn kill_by_uuid(uuid: Uuid) -> crate::Result<()> {
let state = State::get().await?;
let children = state.children.read().await;
if let Some(mchild) = children.get(uuid) {
let mut mchild = mchild.write().await;
kill(&mut mchild).await
} else {
// No error returned for already finished process
Ok(())
}
}
// Wait for a child process stored in the state by UUID
#[tracing::instrument]
pub async fn wait_for_by_uuid(uuid: Uuid) -> crate::Result<()> {
let state = State::get().await?;
let children = state.children.read().await;
// No error returned for already killed process
if let Some(mchild) = children.get(uuid) {
let mut mchild = mchild.write().await;
wait_for(&mut mchild).await
} else {
// No error returned for already finished process
Ok(())
}
}
// Kill a running child process directly
#[tracing::instrument(skip(running))]
pub async fn kill(running: &mut MinecraftChild) -> crate::Result<()> {
running.current_child.write().await.kill().await?;
Ok(())
}
// Await on the completion of a child process directly
#[tracing::instrument(skip(running))]
pub async fn wait_for(running: &mut MinecraftChild) -> crate::Result<()> {
// We do not wait on the Child directly, but wait on the thread manager.
// This way we can still run all cleanup hook functions that happen after.
running
.manager
.take()
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Process manager already completed or missing for process {}",
running.uuid
))
})?
.await?
.map_err(|err| {
crate::ErrorKind::LauncherError(format!(
"Error running minecraft: {err}"
))
})?;
Ok(())
}

View File

@@ -0,0 +1,315 @@
//! Theseus profile management interface
use crate::pack::install_from::CreatePackProfile;
use crate::prelude::ProfilePathId;
use crate::state::LinkedData;
use crate::util::io::{self, canonicalize};
use crate::{
event::{emit::emit_profile, ProfilePayloadType},
prelude::ModLoader,
};
use crate::{pack, profile, ErrorKind};
pub use crate::{
state::{JavaSettings, Profile},
State,
};
use daedalus::modded::LoaderVersion;
use std::path::PathBuf;
use tracing::{info, trace};
use uuid::Uuid;
// Creates a profile of a given name and adds it to the in-memory state
// Returns relative filepath as ProfilePathId which can be used to access it in the State
#[tracing::instrument]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn profile_create(
mut name: String, // the name of the profile, and relative path
game_version: String, // the game version of the profile
modloader: ModLoader, // the modloader to use
loader_version: Option<String>, // the modloader version to use, set to "latest", "stable", or the ID of your chosen loader. defaults to latest
icon: Option<PathBuf>, // the icon for the profile
icon_url: Option<String>, // the URL icon for a profile (ONLY USED FOR TEMPORARY PROFILES)
linked_data: Option<LinkedData>, // the linked project ID (mainly for modpacks)- used for updating
skip_install_profile: Option<bool>,
no_watch: Option<bool>,
) -> crate::Result<ProfilePathId> {
name = profile::sanitize_profile_name(&name);
trace!("Creating new profile. {}", name);
let state = State::get().await?;
let uuid = Uuid::new_v4();
let mut path = state.directories.profiles_dir().await.join(&name);
if path.exists() {
let mut new_name;
let mut new_path;
let mut which = 1;
loop {
new_name = format!("{name} ({which})");
new_path = state.directories.profiles_dir().await.join(&new_name);
if !new_path.exists() {
break;
}
which += 1;
}
tracing::debug!(
"Folder collision: {}, renaming to: {}",
path.display(),
new_path.display()
);
path = new_path;
name = new_name;
}
io::create_dir_all(&path).await?;
info!(
"Creating profile at path {}",
&canonicalize(&path)?.display()
);
let loader = if modloader != ModLoader::Vanilla {
get_loader_version_from_loader(
game_version.clone(),
modloader,
loader_version,
)
.await?
} else {
None
};
let mut profile = Profile::new(uuid, name, game_version).await?;
let result = async {
if let Some(ref icon) = icon {
let bytes =
io::read(state.directories.caches_dir().join(icon)).await?;
profile
.set_icon(
&state.directories.caches_dir(),
&state.io_semaphore,
bytes::Bytes::from(bytes),
&icon.to_string_lossy(),
)
.await?;
}
profile.metadata.icon_url = icon_url;
if let Some(loader_version) = loader {
profile.metadata.loader = modloader;
profile.metadata.loader_version = Some(loader_version);
}
profile.metadata.linked_data = linked_data;
if let Some(linked_data) = &mut profile.metadata.linked_data {
linked_data.locked = Some(
linked_data.project_id.is_some()
&& linked_data.version_id.is_some(),
);
}
emit_profile(
uuid,
&profile.profile_id(),
&profile.metadata.name,
ProfilePayloadType::Created,
)
.await?;
{
let mut profiles = state.profiles.write().await;
profiles
.insert(profile.clone(), no_watch.unwrap_or_default())
.await?;
}
if !skip_install_profile.unwrap_or(false) {
crate::launcher::install_minecraft(&profile, None, false).await?;
}
State::sync().await?;
Ok(profile.profile_id())
}
.await;
match result {
Ok(profile) => Ok(profile),
Err(err) => {
let _ = crate::api::profile::remove(&profile.profile_id()).await;
Err(err)
}
}
}
pub async fn profile_create_from_creator(
profile: CreatePackProfile,
) -> crate::Result<ProfilePathId> {
profile_create(
profile.name,
profile.game_version,
profile.modloader,
profile.loader_version,
profile.icon,
profile.icon_url,
profile.linked_data,
profile.skip_install_profile,
profile.no_watch,
)
.await
}
pub async fn profile_create_from_duplicate(
copy_from: ProfilePathId,
) -> crate::Result<ProfilePathId> {
// Original profile
let profile = profile::get(&copy_from, None).await?.ok_or_else(|| {
ErrorKind::UnmanagedProfileError(copy_from.to_string())
})?;
let profile_path_id = profile_create(
profile.metadata.name.clone(),
profile.metadata.game_version.clone(),
profile.metadata.loader,
profile.metadata.loader_version.clone().map(|it| it.id),
profile.metadata.icon.clone(),
profile.metadata.icon_url.clone(),
profile.metadata.linked_data.clone(),
Some(true),
Some(true),
)
.await?;
// Copy it over using the import system (essentially importing from the same profile)
let state = State::get().await?;
let bar = pack::import::copy_dotminecraft(
profile_path_id.clone(),
copy_from.get_full_path().await?,
&state.io_semaphore,
None,
)
.await?;
let duplicated_profile =
profile::get(&profile_path_id, None).await?.ok_or_else(|| {
ErrorKind::UnmanagedProfileError(profile_path_id.to_string())
})?;
crate::launcher::install_minecraft(&duplicated_profile, Some(bar), false)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
// emit profile edited
emit_profile(
profile.uuid,
&profile.profile_id(),
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
Ok(profile_path_id)
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub(crate) async fn get_loader_version_from_loader(
game_version: String,
loader: ModLoader,
loader_version: Option<String>,
) -> crate::Result<Option<LoaderVersion>> {
let state = State::get().await?;
let metadata = state.metadata.read().await;
let version = loader_version.unwrap_or_else(|| "latest".to_string());
let filter = |it: &LoaderVersion| match version.as_str() {
"latest" => true,
"stable" => it.stable,
id => {
it.id == *id
|| format!("{}-{}", game_version, id) == it.id
|| format!("{}-{}-{}", game_version, id, game_version) == it.id
}
};
let loader_data = match loader {
ModLoader::Forge => &metadata.forge,
ModLoader::Fabric => &metadata.fabric,
ModLoader::Quilt => &metadata.quilt,
ModLoader::NeoForge => &metadata.neoforge,
_ => {
return Err(
ProfileCreationError::NoManifest(loader.to_string()).into()
)
}
};
let loaders = &loader_data
.game_versions
.iter()
.find(|it| {
it.id
.replace(daedalus::modded::DUMMY_REPLACE_STRING, &game_version)
== game_version
})
.ok_or_else(|| {
ProfileCreationError::ModloaderUnsupported(
loader.to_string(),
game_version.clone(),
)
})?
.loaders;
let loader_version = loaders
.iter()
.find(|&x| filter(x))
.cloned()
.or(
// If stable was searched for but not found, return latest by default
if version == "stable" {
loaders.iter().next().cloned()
} else {
None
},
)
.ok_or_else(|| {
ProfileCreationError::InvalidVersionModloader(
version,
loader.to_string(),
)
})?;
Ok(Some(loader_version))
}
#[derive(thiserror::Error, Debug)]
pub enum ProfileCreationError {
#[error("Profile .json exists: {0}")]
ProfileExistsError(PathBuf),
#[error("Modloader {0} unsupported for Minecraft version {1}")]
ModloaderUnsupported(String, String),
#[error("Invalid version {0} for modloader {1}")]
InvalidVersionModloader(String, String),
#[error("Could not get manifest for loader {0}. This is a bug in the GUI")]
NoManifest(String),
#[error("Could not get State.")]
NoState,
#[error("Attempted to create project in something other than a folder.")]
NotFolder,
#[error("You are trying to create a profile in a non-empty directory")]
NotEmptyFolder,
#[error("IO error: {0}")]
IOError(#[from] std::io::Error),
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,233 @@
use crate::{
event::{
emit::{emit_profile, init_loading, loading_try_for_each_concurrent},
ProfilePayloadType,
},
pack::{self, install_from::generate_pack_from_version_id},
prelude::{ProfilePathId, ProjectPathId},
profile::get,
state::{ProfileInstallStage, Project},
LoadingBarType, State,
};
use futures::try_join;
/// Updates a managed modrinth pack to the version specified by new_version_id
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update_managed_modrinth_version(
profile_path: &ProfilePathId,
new_version_id: &String,
) -> crate::Result<()> {
let profile = get(profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
.as_error()
})?;
let unmanaged_err = || {
crate::ErrorKind::InputError(
format!("Profile at {} is not a managed modrinth pack, or has been disconnected.", profile_path),
)
};
// Extract modrinth pack information, if appropriate
let linked_data = profile
.metadata
.linked_data
.as_ref()
.ok_or_else(unmanaged_err)?;
let project_id: &String =
linked_data.project_id.as_ref().ok_or_else(unmanaged_err)?;
let version_id =
linked_data.version_id.as_ref().ok_or_else(unmanaged_err)?;
// Replace the pack with the new version
replace_managed_modrinth(
profile_path,
&profile,
project_id,
version_id,
Some(new_version_id),
true, // switching versions should ignore the lock
)
.await?;
emit_profile(
profile.uuid,
profile_path,
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
Ok(())
}
/// Repair a managed modrinth pack by 'updating' it to the current version
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn repair_managed_modrinth(
profile_path: &ProfilePathId,
) -> crate::Result<()> {
let profile = get(profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
.as_error()
})?;
let unmanaged_err = || {
crate::ErrorKind::InputError(
format!("Profile at {} is not a managed modrinth pack, or has been disconnected.", profile_path),
)
};
// For repairing specifically, first we remove all installed projects (to ensure we do remove ones that aren't in the pack)
// We do a project removal followed by removing everything in the .mrpack, to ensure we only
// remove relevant projects and not things like save files
let projects_map = profile.projects.clone();
let stream = futures::stream::iter(
projects_map
.into_iter()
.map(Ok::<(ProjectPathId, Project), crate::Error>),
);
loading_try_for_each_concurrent(
stream,
None,
None,
0.0,
0,
None,
|(project_id, _)| {
let profile = profile.clone();
async move {
profile.remove_project(&project_id, Some(true)).await?;
Ok(())
}
},
)
.await?;
// Extract modrinth pack information, if appropriate
let linked_data = profile
.metadata
.linked_data
.as_ref()
.ok_or_else(unmanaged_err)?;
let project_id: &String =
linked_data.project_id.as_ref().ok_or_else(unmanaged_err)?;
let version_id =
linked_data.version_id.as_ref().ok_or_else(unmanaged_err)?;
// Replace the pack with the same version
replace_managed_modrinth(
profile_path,
&profile,
project_id,
version_id,
None,
false, // do not ignore lock, as repairing can reset the lock
)
.await?;
emit_profile(
profile.uuid,
profile_path,
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
Ok(())
}
/// Replace a managed modrinth pack with a new version
/// If new_version_id is None, the pack is 'reinstalled' in-place
#[tracing::instrument(skip(profile))]
#[theseus_macros::debug_pin]
async fn replace_managed_modrinth(
profile_path: &ProfilePathId,
profile: &crate::state::Profile,
project_id: &String,
version_id: &String,
new_version_id: Option<&String>,
ignore_lock: bool,
) -> crate::Result<()> {
crate::profile::edit(profile_path, |profile| {
profile.install_stage = ProfileInstallStage::Installing;
async { Ok(()) }
})
.await?;
// Fetch .mrpacks for both old and new versions
// TODO: this will need to be updated if we revert the hacky pack method we needed for compiler speed
let (old_pack_creator, new_pack_creator) =
if let Some(new_version_id) = new_version_id {
let shared_loading_bar = init_loading(
LoadingBarType::PackFileDownload {
profile_path: profile_path.get_full_path().await?,
pack_name: profile.metadata.name.clone(),
icon: None,
pack_version: version_id.clone(),
},
200.0, // These two downloads will share the same loading bar
"Downloading pack file",
)
.await?;
// download in parallel, then join.
try_join!(
generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
Some(shared_loading_bar.clone())
),
generate_pack_from_version_id(
project_id.clone(),
new_version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
Some(shared_loading_bar)
)
)?
} else {
// If new_version_id is None, we don't need to download the new pack, so we clone the old one
let mut old_pack_creator = generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
None,
)
.await?;
old_pack_creator.description.existing_loading_bar = None;
(old_pack_creator.clone(), old_pack_creator)
};
// Removal - remove all files that were added by the old pack
// - remove all installed projects
// - remove all overrides
pack::install_mrpack::remove_all_related_files(
profile_path.clone(),
old_pack_creator.file,
)
.await?;
// Reinstallation - install all files that are added by the new pack
// - install all projects
// - install all overrides
// - edits the profile to update the new data
// - (functionals almost identically to rteinstalling the pack 'in-place')
pack::install_mrpack::install_zipped_mrpack_files(
new_pack_creator,
ignore_lock,
)
.await?;
Ok(())
}

View File

@@ -0,0 +1,5 @@
use crate::state::{ProcessType, SafeProcesses};
pub async fn check_safe_loading_bars() -> crate::Result<bool> {
SafeProcesses::is_complete(ProcessType::LoadingBar).await
}

View File

@@ -0,0 +1,246 @@
//! Theseus profile management interface
use std::path::{Path, PathBuf};
use tokio::fs;
use io::IOError;
use tokio::sync::RwLock;
use crate::{
event::emit::{emit_loading, init_loading},
prelude::DirectoryInfo,
state::{self, Profiles},
util::{fetch, io},
};
pub use crate::{
state::{
Hooks, JavaSettings, MemorySettings, Profile, Settings, WindowSize,
},
State,
};
/// Gets entire settings
#[tracing::instrument]
pub async fn get() -> crate::Result<Settings> {
let state = State::get().await?;
let settings = state.settings.read().await;
Ok(settings.clone())
}
/// Sets entire settings
#[tracing::instrument]
pub async fn set(settings: Settings) -> crate::Result<()> {
let state = State::get().await?;
if settings.loaded_config_dir
!= state.settings.read().await.loaded_config_dir
{
return Err(crate::ErrorKind::OtherError(
"Cannot change config directory as setting".to_string(),
)
.as_error());
}
let (reset_io, reset_fetch) = async {
let read = state.settings.read().await;
(
settings.max_concurrent_writes != read.max_concurrent_writes,
settings.max_concurrent_downloads != read.max_concurrent_downloads,
)
}
.await;
let updated_discord_rpc = {
let read = state.settings.read().await;
settings.disable_discord_rpc != read.disable_discord_rpc
};
{
*state.settings.write().await = settings;
}
if updated_discord_rpc {
state.discord_rpc.clear_to_default(true).await?;
}
if reset_io {
state.reset_io_semaphore().await;
}
if reset_fetch {
state.reset_fetch_semaphore().await;
}
State::sync().await?;
Ok(())
}
/// Sets the new config dir, the location of all Theseus data except for the settings.json and caches
/// Takes control of the entire state and blocks until completion
pub async fn set_config_dir(new_config_dir: PathBuf) -> crate::Result<()> {
tracing::trace!("Changing config dir to: {}", new_config_dir.display());
if !new_config_dir.is_dir() {
return Err(crate::ErrorKind::FSError(format!(
"New config dir is not a folder: {}",
new_config_dir.display()
))
.as_error());
}
if !is_dir_writeable(new_config_dir.clone()).await? {
return Err(crate::ErrorKind::FSError(format!(
"New config dir is not writeable: {}",
new_config_dir.display()
))
.as_error());
}
let loading_bar = init_loading(
crate::LoadingBarType::ConfigChange {
new_path: new_config_dir.clone(),
},
100.0,
"Changing configuration directory",
)
.await?;
tracing::trace!("Changing config dir, taking control of the state");
// Take control of the state
let mut state_write = State::get_write().await?;
let old_config_dir =
state_write.directories.config_dir.read().await.clone();
// Reset file watcher
tracing::trace!("Reset file watcher");
let file_watcher = state::init_watcher().await?;
state_write.file_watcher = RwLock::new(file_watcher);
// Getting files to be moved
let mut config_entries = io::read_dir(&old_config_dir).await?;
let across_drives = is_different_drive(&old_config_dir, &new_config_dir);
let mut entries = vec![];
let mut deletable_entries = vec![];
while let Some(entry) = config_entries
.next_entry()
.await
.map_err(|e| IOError::with_path(e, &old_config_dir))?
{
let entry_path = entry.path();
if let Some(file_name) = entry_path.file_name() {
// We are only moving the profiles and metadata folders
if file_name == state::PROFILES_FOLDER_NAME
|| file_name == state::METADATA_FOLDER_NAME
{
if across_drives {
entries.extend(
crate::pack::import::get_all_subfiles(&entry_path)
.await?,
);
deletable_entries.push(entry_path.clone());
} else {
entries.push(entry_path.clone());
}
}
}
}
tracing::trace!("Moving files");
let semaphore = &state_write.io_semaphore;
let num_entries = entries.len() as f64;
for entry_path in entries {
let relative_path = entry_path.strip_prefix(&old_config_dir)?;
let new_path = new_config_dir.join(relative_path);
if across_drives {
fetch::copy(&entry_path, &new_path, semaphore).await?;
} else {
io::rename(entry_path.clone(), new_path.clone()).await?;
}
emit_loading(&loading_bar, 80.0 * (1.0 / num_entries), None).await?;
}
tracing::trace!("Setting configuration setting");
// Set load config dir setting
let settings = {
let mut settings = state_write.settings.write().await;
settings.loaded_config_dir = Some(new_config_dir.clone());
// Some java paths are hardcoded to within our config dir, so we need to update them
tracing::trace!("Updating java keys");
for key in settings.java_globals.keys() {
if let Some(java) = settings.java_globals.get_mut(&key) {
// If the path is within the old config dir path, update it to the new config dir
if let Ok(relative_path) = PathBuf::from(java.path.clone())
.strip_prefix(&old_config_dir)
{
java.path = new_config_dir
.join(relative_path)
.to_string_lossy()
.to_string();
}
}
}
tracing::trace!("Syncing settings");
settings
.sync(&state_write.directories.settings_file())
.await?;
settings.clone()
};
tracing::trace!("Reinitializing directory");
// Set new state information
state_write.directories = DirectoryInfo::init(&settings)?;
// Delete entries that were from a different drive
let deletable_entries_len = deletable_entries.len();
if deletable_entries_len > 0 {
tracing::trace!("Deleting old files");
}
for entry in deletable_entries {
io::remove_dir_all(entry).await?;
emit_loading(
&loading_bar,
10.0 * (1.0 / deletable_entries_len as f64),
None,
)
.await?;
}
// Reset file watcher
tracing::trace!("Reset file watcher");
let mut file_watcher = state::init_watcher().await?;
// Reset profiles (for filepaths, file watcher, etc)
state_write.profiles = RwLock::new(
Profiles::init(&state_write.directories, &mut file_watcher).await?,
);
state_write.file_watcher = RwLock::new(file_watcher);
emit_loading(&loading_bar, 10.0, None).await?;
tracing::info!(
"Successfully switched config folder to: {}",
new_config_dir.display()
);
Ok(())
}
// Function to check if two paths are on different drives/roots
fn is_different_drive(path1: &Path, path2: &Path) -> bool {
let root1 = path1.components().next();
let root2 = path2.components().next();
root1 != root2
}
pub async fn is_dir_writeable(new_config_dir: PathBuf) -> crate::Result<bool> {
let temp_path = new_config_dir.join(".tmp");
match fs::write(temp_path.clone(), "test").await {
Ok(_) => {
fs::remove_file(temp_path).await?;
Ok(true)
}
Err(e) => {
tracing::error!("Error writing to new config dir: {}", e);
Ok(false)
}
}
}

View File

@@ -0,0 +1,60 @@
//! Theseus tag management interface
pub use crate::{
state::{Category, DonationPlatform, GameVersion, Loader, Tags},
State,
};
// Get bundled set of tags
#[tracing::instrument]
pub async fn get_tag_bundle() -> crate::Result<Tags> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_tag_bundle())
}
/// Get category tags
#[tracing::instrument]
pub async fn get_category_tags() -> crate::Result<Vec<Category>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_categories())
}
/// Get report type tags
#[tracing::instrument]
pub async fn get_report_type_tags() -> crate::Result<Vec<String>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_report_types())
}
/// Get loader tags
#[tracing::instrument]
pub async fn get_loader_tags() -> crate::Result<Vec<Loader>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_loaders())
}
/// Get game version tags
#[tracing::instrument]
pub async fn get_game_version_tags() -> crate::Result<Vec<GameVersion>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_game_versions())
}
/// Get donation platform tags
#[tracing::instrument]
pub async fn get_donation_platform_tags() -> crate::Result<Vec<DonationPlatform>>
{
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_donation_platforms())
}

View File

@@ -0,0 +1,3 @@
//! Configuration structs
pub const MODRINTH_API_URL: &str = "https://api.modrinth.com/v2/";

143
libs/theseus/src/error.rs Normal file
View File

@@ -0,0 +1,143 @@
//! Theseus error type
use crate::{profile, util};
use tracing_error::InstrumentError;
#[derive(thiserror::Error, Debug)]
pub enum ErrorKind {
#[error("Filesystem error: {0}")]
FSError(String),
#[error("Serialization error (INI): {0}")]
INIError(#[from] serde_ini::de::Error),
#[error("Serialization error (JSON): {0}")]
JSONError(#[from] serde_json::Error),
#[error("Error parsing UUID: {0}")]
UUIDError(#[from] uuid::Error),
#[error("Error parsing URL: {0}")]
URLError(#[from] url::ParseError),
#[error("Unable to read {0} from any source")]
NoValueFor(String),
#[error("Metadata error: {0}")]
MetadataError(#[from] daedalus::Error),
#[error("Minecraft authentication error: {0}")]
MinecraftAuthenticationError(
#[from] crate::state::MinecraftAuthenticationError,
),
#[error("I/O error: {0}")]
IOError(#[from] util::io::IOError),
#[error("I/O (std) error: {0}")]
StdIOError(#[from] std::io::Error),
#[error("Error launching Minecraft: {0}")]
LauncherError(String),
#[error("Error fetching URL: {0}")]
FetchError(#[from] reqwest::Error),
#[error("Websocket error: {0}")]
WSError(#[from] async_tungstenite::tungstenite::Error),
#[error("Websocket closed before {0} could be received!")]
WSClosedError(String),
#[error("Incorrect Sha1 hash for download: {0} != {1}")]
HashError(String, String),
#[error("Regex error: {0}")]
RegexError(#[from] regex::Error),
#[error("Paths stored in the database need to be valid UTF-8: {0}")]
UTFError(std::path::PathBuf),
#[error("Invalid input: {0}")]
InputError(String),
#[error("Join handle error: {0}")]
JoinError(#[from] tokio::task::JoinError),
#[error("Recv error: {0}")]
RecvError(#[from] tokio::sync::oneshot::error::RecvError),
#[error("Error acquiring semaphore: {0}")]
AcquireError(#[from] tokio::sync::AcquireError),
#[error("Profile {0} is not managed by the app!")]
UnmanagedProfileError(String),
#[error("Could not create profile: {0}")]
ProfileCreationError(#[from] profile::create::ProfileCreationError),
#[error("User is not logged in, no credentials available!")]
NoCredentialsError,
#[error("JRE error: {0}")]
JREError(#[from] crate::util::jre::JREError),
#[error("Error parsing date: {0}")]
ChronoParseError(#[from] chrono::ParseError),
#[error("Event error: {0}")]
EventError(#[from] crate::event::EventError),
#[error("Zip error: {0}")]
ZipError(#[from] async_zip::error::ZipError),
#[error("File watching error: {0}")]
NotifyError(#[from] notify::Error),
#[error("Error stripping prefix: {0}")]
StripPrefixError(#[from] std::path::StripPrefixError),
#[error("Error: {0}")]
OtherError(String),
#[cfg(feature = "tauri")]
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}
#[derive(Debug)]
pub struct Error {
pub raw: std::sync::Arc<ErrorKind>,
pub source: tracing_error::TracedError<std::sync::Arc<ErrorKind>>,
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.source.source()
}
}
impl std::fmt::Display for Error {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "{}", self.source)
}
}
impl<E: Into<ErrorKind>> From<E> for Error {
fn from(source: E) -> Self {
let error = Into::<ErrorKind>::into(source);
let boxed_error = std::sync::Arc::new(error);
Self {
raw: boxed_error.clone(),
source: boxed_error.in_current_span(),
}
}
}
impl ErrorKind {
pub fn as_error(self) -> Error {
self.into()
}
}
pub type Result<T> = core::result::Result<T, Error>;

View File

@@ -0,0 +1,415 @@
use super::LoadingBarId;
use crate::{
event::{
CommandPayload, EventError, LoadingBar, LoadingBarType,
ProcessPayloadType, ProfilePayloadType,
},
prelude::ProfilePathId,
state::{ProcessType, SafeProcesses},
};
use futures::prelude::*;
#[cfg(feature = "tauri")]
use crate::event::{
LoadingPayload, ProcessPayload, ProfilePayload, WarningPayload,
};
#[cfg(feature = "tauri")]
use tauri::Manager;
use uuid::Uuid;
#[cfg(feature = "cli")]
const CLI_PROGRESS_BAR_TOTAL: u64 = 1000;
/*
Events are a way we can communciate with the Tauri frontend from the Rust backend.
We include a feature flag for Tauri, so that we can compile this code without Tauri.
To use events, we need to do the following:
1) Make sure we are using the tauri feature flag
2) Initialize the EventState with EventState::init() *before* initializing the theseus State
3) Call emit_x functions to send events to the frontend
For emit_loading() specifically, we need to inialize the loading bar with init_loading() first and pass the received loader in
For example:
pub async fn loading_function() -> crate::Result<()> {
loading_function()).await;
}
pub async fn loading_function() -> crate::Result<()> {
let loading_bar = init_loading(LoadingBarType::StateInit, 100.0, "Loading something long...").await;
for i in 0..100 {
emit_loading(&loading_bar, 1.0, None).await?;
tokio::time::sleep(Duration::from_millis(100)).await;
}
}
*/
/// Initialize a loading bar for use in emit_loading
/// This will generate a LoadingBarId, which is used to refer to the loading bar uniquely.
/// total is the total amount of work to be done- all emissions will be considered a fraction of this value (should be 1 or 100 for simplicity)
/// title is the title of the loading bar
/// The app will wait for this loading bar to finish before exiting, as it is considered safe.
#[theseus_macros::debug_pin]
pub async fn init_loading(
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
let key = init_loading_unsafe(bar_type, total, title).await?;
SafeProcesses::add_uuid(ProcessType::LoadingBar, key.0).await?;
Ok(key)
}
/// An unsafe loading bar can be created without adding it to the SafeProcesses list,
/// meaning that the app won't ask to wait for it to finish before exiting.
#[theseus_macros::debug_pin]
pub async fn init_loading_unsafe(
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
let event_state = crate::EventState::get().await?;
let key = LoadingBarId(Uuid::new_v4());
event_state.loading_bars.write().await.insert(
key.0,
LoadingBar {
loading_bar_uuid: key.0,
message: title.to_string(),
total,
current: 0.0,
last_sent: 0.0,
bar_type,
#[cfg(feature = "cli")]
cli_progress_bar: {
let pb = indicatif::ProgressBar::new(CLI_PROGRESS_BAR_TOTAL);
pb.set_position(0);
pb.set_style(
indicatif::ProgressStyle::default_bar()
.template(
"{spinner:.green} [{elapsed_precise}] [{bar:.lime/green}] {pos}/{len} {msg}",
).unwrap()
.progress_chars("#>-"),
);
pb
},
},
);
// attempt an initial loading_emit event to the frontend
emit_loading(&key, 0.0, None).await?;
Ok(key)
}
pub async fn init_or_edit_loading(
id: Option<LoadingBarId>,
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
if let Some(id) = id {
edit_loading(&id, bar_type, total, title).await?;
Ok(id)
} else {
init_loading(bar_type, total, title).await
}
}
// Edits a loading bar's type
// This also resets the bar's current progress to 0
pub async fn edit_loading(
id: &LoadingBarId,
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<()> {
let event_state = crate::EventState::get().await?;
if let Some(bar) = event_state.loading_bars.write().await.get_mut(&id.0) {
bar.bar_type = bar_type;
bar.total = total;
bar.message = title.to_string();
bar.current = 0.0;
bar.last_sent = 0.0;
#[cfg(feature = "cli")]
{
bar.cli_progress_bar.reset(); // indicatif::ProgressBar::new(CLI_PROGRESS_BAR_TOTAL as u64);
}
};
emit_loading(id, 0.0, None).await?;
Ok(())
}
// emit_loading emits a loading event to the frontend
// key refers to the loading bar to update
// increment refers to by what relative increment to the loading struct's total to update
// message is the message to display on the loading bar- if None, use the loading bar's default one
// By convention, fraction is the fraction of the progress bar that is filled
#[allow(unused_variables)]
#[tracing::instrument(level = "debug")]
#[theseus_macros::debug_pin]
pub async fn emit_loading(
key: &LoadingBarId,
increment_frac: f64,
message: Option<&str>,
) -> crate::Result<()> {
let event_state = crate::EventState::get().await?;
let mut loading_bar = event_state.loading_bars.write().await;
let loading_bar = match loading_bar.get_mut(&key.0) {
Some(f) => f,
None => {
return Err(EventError::NoLoadingBar(key.0).into());
}
};
// Tick up loading bar
loading_bar.current += increment_frac;
let display_frac = loading_bar.current / loading_bar.total;
let opt_display_frac = if display_frac >= 1.0 {
None // by convention, when its done, we submit None
// any further updates will be ignored (also sending None)
} else {
Some(display_frac)
};
if f64::abs(display_frac - loading_bar.last_sent) > 0.005 {
// Emit event to indicatif progress bar
#[cfg(feature = "cli")]
{
loading_bar.cli_progress_bar.set_message(
message
.map(|x| x.to_string())
.unwrap_or(loading_bar.message.clone()),
);
loading_bar.cli_progress_bar.set_position(
(display_frac * CLI_PROGRESS_BAR_TOTAL as f64).round() as u64,
);
}
//Emit event to tauri
#[cfg(feature = "tauri")]
event_state
.app
.emit_all(
"loading",
LoadingPayload {
fraction: opt_display_frac,
message: message
.unwrap_or(&loading_bar.message)
.to_string(),
event: loading_bar.bar_type.clone(),
loader_uuid: loading_bar.loading_bar_uuid,
},
)
.map_err(EventError::from)?;
loading_bar.last_sent = display_frac;
}
Ok(())
}
// emit_warning(message)
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_warning(message: &str) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"warning",
WarningPayload {
message: message.to_string(),
},
)
.map_err(EventError::from)?;
}
tracing::warn!("{}", message);
Ok(())
}
// emit_offline(bool)
// This is used to emit an event to the frontend that the app is offline after a refresh (or online)
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_offline(offline: bool) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all("offline", offline)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_command(CommandPayload::Something { something })
// ie: installing a pack, opening an .mrpack, etc
// Generally used for url deep links and file opens that we we want to handle in the frontend
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_command(command: CommandPayload) -> crate::Result<()> {
tracing::debug!("Command: {}", serde_json::to_string(&command)?);
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all("command", command)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_process(uuid, pid, event, message)
#[allow(unused_variables)]
pub async fn emit_process(
uuid: Uuid,
pid: u32,
event: ProcessPayloadType,
message: &str,
) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"process",
ProcessPayload {
uuid,
pid,
event,
message: message.to_string(),
},
)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_profile(path, event)
#[allow(unused_variables)]
pub async fn emit_profile(
uuid: Uuid,
profile_path_id: &ProfilePathId,
name: &str,
event: ProfilePayloadType,
) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let path = profile_path_id.get_full_path().await?;
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"profile",
ProfilePayload {
uuid,
profile_path_id: profile_path_id.clone(),
path,
name: name.to_string(),
event,
},
)
.map_err(EventError::from)?;
}
Ok(())
}
// loading_join! macro
// loading_join!(key: Option<&LoadingBarId>, total: f64, message: Option<&str>; task1, task2, task3...)
// This will submit a loading event with the given message for each task as they complete
// task1, task2, task3 are async tasks that yuo want to to join on await on
// Key is the key to use for which loading bar to submit these results to- a LoadingBarId. If None, it does nothing
// Total is the total amount of progress that the loading bar should take up by all futures in this (will be split evenly amongst them).
// If message is Some(t) you will overwrite this loading bar's message with a custom one
// For example, if you want the tasks to range as 0.1, 0.2, 0.3 (of the progress bar), you would do:
// loading_join!(loading_bar, 0.1; task1, task2, task3)
// This will await on each of the tasks, and as each completes, it will emit a loading event for 0.033, 0.066, 0.099, etc
// This should function as a drop-in replacement for tokio::try_join_all! in most cases- except the function *itself* calls ? rather than needing it.
#[macro_export]
macro_rules! count {
() => (0usize);
( $x:tt $($xs:tt)* ) => (1usize + $crate::count!($($xs)*));
}
#[macro_export]
macro_rules! loading_join {
($key:expr, $total:expr, $message:expr; $($task:expr $(,)?)+) => {
{
let key = $key;
let message : Option<&str> = $message;
let num_futures = $crate::count!($($task)*);
let increment = $total / num_futures as f64;
paste::paste! {
$( let [ <unique_name $task>] = {
{
let key = key.clone();
let message = message.clone();
async move {
let res = $task.await;
if let Some(key) = key {
$crate::event::emit::emit_loading(key, increment, message).await?;
}
res
}
}
};)+
}
paste::paste! {
tokio::try_join! (
$( [ <unique_name $task>] ),+
)
}
}
};
}
// A drop in replacement to try_for_each_concurrent that emits loading events as it goes
// Key is the key to use for which loading bar- a LoadingBarId. If None, does nothing
// Total is the total amount of progress that the loading bar should take up by all futures in this (will be split evenly amongst them).
// If message is Some(t) you will overwrite this loading bar's message with a custom one
// num_futs is the number of futures that will be run, which is needed as we allow Iterator to be passed in, which doesn't have a size
#[tracing::instrument(skip(stream, f))]
#[theseus_macros::debug_pin]
pub async fn loading_try_for_each_concurrent<I, F, Fut, T>(
stream: I,
limit: Option<usize>,
key: Option<&LoadingBarId>,
total: f64,
num_futs: usize, // num is in here as we allow Iterator to be passed in, which doesn't have a size
message: Option<&str>,
f: F,
) -> crate::Result<()>
where
I: futures::TryStreamExt<Error = crate::Error> + TryStream<Ok = T>,
F: FnMut(T) -> Fut + Send,
Fut: Future<Output = crate::Result<()>> + Send,
T: Send,
{
let mut f = f;
stream
.try_for_each_concurrent(limit, |item| {
let f = f(item);
async move {
f.await?;
if let Some(key) = key {
emit_loading(key, total / (num_futs as f64), message)
.await?;
}
Ok(())
}
})
.await
}

View File

@@ -0,0 +1,278 @@
//! Theseus state management system
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use tokio::sync::OnceCell;
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::prelude::ProfilePathId;
use crate::state::SafeProcesses;
pub mod emit;
// Global event state
// Stores the Tauri app handle and other event-related state variables
static EVENT_STATE: OnceCell<Arc<EventState>> = OnceCell::const_new();
pub struct EventState {
/// Tauri app
#[cfg(feature = "tauri")]
pub app: tauri::AppHandle,
pub loading_bars: RwLock<HashMap<Uuid, LoadingBar>>,
}
impl EventState {
#[cfg(feature = "tauri")]
pub async fn init(app: tauri::AppHandle) -> crate::Result<Arc<Self>> {
EVENT_STATE
.get_or_try_init(|| async {
Ok(Arc::new(Self {
app,
loading_bars: RwLock::new(HashMap::new()),
}))
})
.await
.cloned()
}
#[cfg(not(feature = "tauri"))]
pub async fn init() -> crate::Result<Arc<Self>> {
EVENT_STATE
.get_or_try_init(|| async {
Ok(Arc::new(Self {
loading_bars: RwLock::new(HashMap::new()),
}))
})
.await
.cloned()
}
#[cfg(feature = "tauri")]
pub async fn get() -> crate::Result<Arc<Self>> {
Ok(EVENT_STATE.get().ok_or(EventError::NotInitialized)?.clone())
}
// Initialization requires no app handle in non-tauri mode, so we can just use the same function
#[cfg(not(feature = "tauri"))]
pub async fn get() -> crate::Result<Arc<Self>> {
Self::init().await
}
// Values provided should not be used directly, as they are clones and are not guaranteed to be up-to-date
pub async fn list_progress_bars() -> crate::Result<HashMap<Uuid, LoadingBar>>
{
let value = Self::get().await?;
let read = value.loading_bars.read().await;
let mut display_list: HashMap<Uuid, LoadingBar> = HashMap::new();
for (uuid, loading_bar) in read.iter() {
display_list.insert(*uuid, loading_bar.clone());
}
Ok(display_list)
}
#[cfg(feature = "tauri")]
pub async fn get_main_window() -> crate::Result<Option<tauri::Window>> {
use tauri::Manager;
let value = Self::get().await?;
Ok(value.app.get_window("main"))
}
}
#[derive(Serialize, Debug, Clone)]
pub struct LoadingBar {
// loading_bar_uuid not be used directly by external functions as it may not reflect the current state of the loading bar/hashmap
pub loading_bar_uuid: Uuid,
pub message: String,
pub total: f64,
pub current: f64,
#[serde(skip)]
pub last_sent: f64,
pub bar_type: LoadingBarType,
#[cfg(feature = "cli")]
#[serde(skip)]
pub cli_progress_bar: indicatif::ProgressBar,
}
#[derive(Serialize, Debug, Clone)]
pub struct LoadingBarId(Uuid);
// When Loading bar id is dropped, we should remove it from the hashmap
impl Drop for LoadingBarId {
fn drop(&mut self) {
let loader_uuid = self.0;
tokio::spawn(async move {
if let Ok(event_state) = EventState::get().await {
let mut bars = event_state.loading_bars.write().await;
#[cfg(any(feature = "tauri", feature = "cli"))]
if let Some(bar) = bars.remove(&loader_uuid) {
#[cfg(feature = "tauri")]
{
let loader_uuid = bar.loading_bar_uuid;
let event = bar.bar_type.clone();
let fraction = bar.current / bar.total;
use tauri::Manager;
let _ = event_state.app.emit_all(
"loading",
LoadingPayload {
fraction: None,
message: "Completed".to_string(),
event,
loader_uuid,
},
);
tracing::trace!(
"Exited at {fraction} for loading bar: {:?}",
loader_uuid
);
}
// Emit event to indicatif progress bar arc
#[cfg(feature = "cli")]
{
let cli_progress_bar = bar.cli_progress_bar;
cli_progress_bar.finish();
}
}
#[cfg(not(any(feature = "tauri", feature = "cli")))]
bars.remove(&loader_uuid);
}
// complete calls state, and since a LoadingBarId is created in state initialization, we only complete if its already initializaed
// to avoid an infinite loop.
if crate::State::initialized() {
let _ = SafeProcesses::complete(
crate::state::ProcessType::LoadingBar,
loader_uuid,
)
.await;
}
});
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum LoadingBarType {
StateInit,
JavaDownload {
version: u32,
},
PackFileDownload {
profile_path: PathBuf,
pack_name: String,
icon: Option<String>,
pack_version: String,
},
PackDownload {
profile_path: PathBuf,
pack_name: String,
icon: Option<PathBuf>,
pack_id: Option<String>,
pack_version: Option<String>,
},
MinecraftDownload {
profile_path: PathBuf,
profile_name: String,
},
ProfileUpdate {
profile_path: PathBuf,
profile_name: String,
},
ZipExtract {
profile_path: PathBuf,
profile_name: String,
},
ConfigChange {
new_path: PathBuf,
},
CopyProfile {
import_location: PathBuf,
profile_name: String,
},
}
#[derive(Serialize, Clone)]
pub struct LoadingPayload {
pub event: LoadingBarType,
pub loader_uuid: Uuid,
pub fraction: Option<f64>, // by convention, if optional, it means the loading is done
pub message: String,
}
#[derive(Serialize, Clone)]
pub struct OfflinePayload {
pub offline: bool,
}
#[derive(Serialize, Clone)]
pub struct WarningPayload {
pub message: String,
}
#[derive(Serialize, Clone)]
#[serde(tag = "event")]
pub enum CommandPayload {
InstallMod {
id: String,
},
InstallVersion {
id: String,
},
InstallModpack {
id: String,
},
RunMRPack {
// run or install .mrpack
path: PathBuf,
},
}
#[derive(Serialize, Clone)]
pub struct ProcessPayload {
pub uuid: Uuid, // processes in state are going to be identified by UUIDs, as they might change to different processes
pub pid: u32,
pub event: ProcessPayloadType,
pub message: String,
}
#[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType {
Launched,
Updated, // eg: if the MinecraftChild changes to its post-command process instead of the Minecraft process
Finished,
}
#[derive(Serialize, Clone)]
pub struct ProfilePayload {
pub uuid: Uuid,
pub profile_path_id: ProfilePathId,
pub path: PathBuf,
pub name: String,
pub event: ProfilePayloadType,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum ProfilePayloadType {
Created,
Added, // also triggered when Created
Synced,
Edited,
Removed,
}
#[derive(Debug, thiserror::Error)]
pub enum EventError {
#[error("Event state was not properly initialized")]
NotInitialized,
#[error("Non-existent loading bar of key: {0}")]
NoLoadingBar(Uuid),
#[cfg(feature = "tauri")]
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}

View File

@@ -0,0 +1,436 @@
//! Minecraft CLI argument logic
use crate::launcher::parse_rules;
use crate::state::Credentials;
use crate::{
state::{MemorySettings, WindowSize},
util::{io::IOError, platform::classpath_separator},
};
use daedalus::{
get_path_from_artifact,
minecraft::{Argument, ArgumentValue, Library, VersionType},
modded::SidedDataEntry,
};
use dunce::canonicalize;
use std::collections::HashSet;
use std::io::{BufRead, BufReader};
use std::{collections::HashMap, path::Path};
use uuid::Uuid;
// Replaces the space separator with a newline character, as to not split the arguments
const TEMPORARY_REPLACE_CHAR: &str = "\n";
pub fn get_class_paths(
libraries_path: &Path,
libraries: &[Library],
client_path: &Path,
java_arch: &str,
minecraft_updated: bool,
) -> crate::Result<String> {
let mut cps = libraries
.iter()
.filter_map(|library| {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, minecraft_updated) {
return None;
}
}
if !library.include_in_classpath {
return None;
}
Some(get_lib_path(libraries_path, &library.name, false))
})
.collect::<Result<HashSet<_>, _>>()?;
cps.insert(
canonicalize(client_path)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified class path {} does not exist",
client_path.to_string_lossy()
))
.as_error()
})?
.to_string_lossy()
.to_string(),
);
Ok(cps
.into_iter()
.collect::<Vec<_>>()
.join(classpath_separator(java_arch)))
}
pub fn get_class_paths_jar<T: AsRef<str>>(
libraries_path: &Path,
libraries: &[T],
java_arch: &str,
) -> crate::Result<String> {
let cps = libraries
.iter()
.map(|library| get_lib_path(libraries_path, library.as_ref(), false))
.collect::<Result<Vec<_>, _>>()?;
Ok(cps.join(classpath_separator(java_arch)))
}
pub fn get_lib_path(
libraries_path: &Path,
lib: &str,
allow_not_exist: bool,
) -> crate::Result<String> {
let mut path = libraries_path.to_path_buf();
path.push(get_path_from_artifact(lib)?);
if !path.exists() && allow_not_exist {
return Ok(path.to_string_lossy().to_string());
}
let path = &canonicalize(&path).map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Library file at path {} does not exist",
path.to_string_lossy()
))
.as_error()
})?;
Ok(path.to_string_lossy().to_string())
}
#[allow(clippy::too_many_arguments)]
pub fn get_jvm_arguments(
arguments: Option<&[Argument]>,
natives_path: &Path,
libraries_path: &Path,
class_paths: &str,
version_name: &str,
memory: MemorySettings,
custom_args: Vec<String>,
java_arch: &str,
) -> crate::Result<Vec<String>> {
let mut parsed_arguments = Vec::new();
if let Some(args) = arguments {
parse_arguments(
args,
&mut parsed_arguments,
|arg| {
parse_jvm_argument(
arg.to_string(),
natives_path,
libraries_path,
class_paths,
version_name,
java_arch,
)
},
java_arch,
)?;
} else {
parsed_arguments.push(format!(
"-Djava.library.path={}",
canonicalize(natives_path)
.map_err(|_| crate::ErrorKind::LauncherError(format!(
"Specified natives path {} does not exist",
natives_path.to_string_lossy()
))
.as_error())?
.to_string_lossy()
));
parsed_arguments.push("-cp".to_string());
parsed_arguments.push(class_paths.to_string());
}
parsed_arguments.push(format!("-Xmx{}M", memory.maximum));
for arg in custom_args {
if !arg.is_empty() {
parsed_arguments.push(arg);
}
}
Ok(parsed_arguments)
}
fn parse_jvm_argument(
mut argument: String,
natives_path: &Path,
libraries_path: &Path,
class_paths: &str,
version_name: &str,
java_arch: &str,
) -> crate::Result<String> {
argument.retain(|c| !c.is_whitespace());
Ok(argument
.replace(
"${natives_directory}",
&canonicalize(natives_path)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified natives path {} does not exist",
natives_path.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace(
"${library_directory}",
&canonicalize(libraries_path)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified libraries path {} does not exist",
libraries_path.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace("${classpath_separator}", classpath_separator(java_arch))
.replace("${launcher_name}", "theseus")
.replace("${launcher_version}", env!("CARGO_PKG_VERSION"))
.replace("${version_name}", version_name)
.replace("${classpath}", class_paths))
}
#[allow(clippy::too_many_arguments)]
pub fn get_minecraft_arguments(
arguments: Option<&[Argument]>,
legacy_arguments: Option<&str>,
credentials: &Credentials,
version: &str,
asset_index_name: &str,
game_directory: &Path,
assets_directory: &Path,
version_type: &VersionType,
resolution: WindowSize,
java_arch: &str,
) -> crate::Result<Vec<String>> {
if let Some(arguments) = arguments {
let mut parsed_arguments = Vec::new();
parse_arguments(
arguments,
&mut parsed_arguments,
|arg| {
parse_minecraft_argument(
arg,
&credentials.access_token,
&credentials.username,
credentials.id,
version,
asset_index_name,
game_directory,
assets_directory,
version_type,
resolution,
)
},
java_arch,
)?;
Ok(parsed_arguments)
} else if let Some(legacy_arguments) = legacy_arguments {
let mut parsed_arguments = Vec::new();
for x in legacy_arguments.split(' ') {
parsed_arguments.push(parse_minecraft_argument(
&x.replace(' ', TEMPORARY_REPLACE_CHAR),
&credentials.access_token,
&credentials.username,
credentials.id,
version,
asset_index_name,
game_directory,
assets_directory,
version_type,
resolution,
)?);
}
Ok(parsed_arguments)
} else {
Ok(Vec::new())
}
}
#[allow(clippy::too_many_arguments)]
fn parse_minecraft_argument(
argument: &str,
access_token: &str,
username: &str,
uuid: Uuid,
version: &str,
asset_index_name: &str,
game_directory: &Path,
assets_directory: &Path,
version_type: &VersionType,
resolution: WindowSize,
) -> crate::Result<String> {
Ok(argument
.replace("${accessToken}", access_token)
.replace("${auth_access_token}", access_token)
.replace("${auth_session}", access_token)
.replace("${auth_player_name}", username)
// TODO: add auth xuid eventually
.replace("${auth_xuid}", "0")
.replace("${auth_uuid}", &uuid.simple().to_string())
.replace("${uuid}", &uuid.simple().to_string())
.replace("${clientid}", "c4502edb-87c6-40cb-b595-64a280cf8906")
.replace("${user_properties}", "{}")
.replace("${user_type}", "msa")
.replace("${version_name}", version)
.replace("${assets_index_name}", asset_index_name)
.replace(
"${game_directory}",
&canonicalize(game_directory)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified game directory {} does not exist",
game_directory.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace(
"${assets_root}",
&canonicalize(assets_directory)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified assets directory {} does not exist",
assets_directory.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace(
"${game_assets}",
&canonicalize(assets_directory)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified assets directory {} does not exist",
assets_directory.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace("${version_type}", version_type.as_str())
.replace("${resolution_width}", &resolution.0.to_string())
.replace("${resolution_height}", &resolution.1.to_string()))
}
fn parse_arguments<F>(
arguments: &[Argument],
parsed_arguments: &mut Vec<String>,
parse_function: F,
java_arch: &str,
) -> crate::Result<()>
where
F: Fn(&str) -> crate::Result<String>,
{
for argument in arguments {
match argument {
Argument::Normal(arg) => {
let parsed =
parse_function(&arg.replace(' ', TEMPORARY_REPLACE_CHAR))?;
for arg in parsed.split(TEMPORARY_REPLACE_CHAR) {
parsed_arguments.push(arg.to_string());
}
}
Argument::Ruled { rules, value } => {
if parse_rules(rules, java_arch, true) {
match value {
ArgumentValue::Single(arg) => {
parsed_arguments.push(parse_function(
&arg.replace(' ', TEMPORARY_REPLACE_CHAR),
)?);
}
ArgumentValue::Many(args) => {
for arg in args {
parsed_arguments.push(parse_function(
&arg.replace(' ', TEMPORARY_REPLACE_CHAR),
)?);
}
}
}
}
}
}
}
Ok(())
}
pub fn get_processor_arguments<T: AsRef<str>>(
libraries_path: &Path,
arguments: &[T],
data: &HashMap<String, SidedDataEntry>,
) -> crate::Result<Vec<String>> {
let mut new_arguments = Vec::new();
for argument in arguments {
let trimmed_arg = &argument.as_ref()[1..argument.as_ref().len() - 1];
if argument.as_ref().starts_with('{') {
if let Some(entry) = data.get(trimmed_arg) {
new_arguments.push(if entry.client.starts_with('[') {
get_lib_path(
libraries_path,
&entry.client[1..entry.client.len() - 1],
true,
)?
} else {
entry.client.clone()
})
}
} else if argument.as_ref().starts_with('[') {
new_arguments.push(get_lib_path(libraries_path, trimmed_arg, true)?)
} else {
new_arguments.push(argument.as_ref().to_string())
}
}
Ok(new_arguments)
}
pub async fn get_processor_main_class(
path: String,
) -> crate::Result<Option<String>> {
let main_class = tokio::task::spawn_blocking(move || {
let zipfile = std::fs::File::open(&path)
.map_err(|e| IOError::with_path(e, &path))?;
let mut archive = zip::ZipArchive::new(zipfile).map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Cannot read processor at {}",
path
))
.as_error()
})?;
let file = archive.by_name("META-INF/MANIFEST.MF").map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Cannot read processor manifest at {}",
path
))
.as_error()
})?;
let reader = BufReader::new(file);
for line in reader.lines() {
let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:") {
if let Some(class) = line.split(':').nth(1) {
return Ok(Some(class.to_string()));
}
}
}
Ok::<Option<String>, crate::Error>(None)
})
.await??;
Ok(main_class)
}

View File

@@ -0,0 +1,353 @@
//! Downloader for Minecraft data
use crate::launcher::parse_rules;
use crate::state::CredentialsStore;
use crate::{
event::{
emit::{emit_loading, loading_try_for_each_concurrent},
LoadingBarId,
},
state::State,
util::{fetch::*, io, platform::OsExt},
};
use daedalus::{
self as d,
minecraft::{
Asset, AssetsIndex, Library, Os, Version as GameVersion,
VersionInfo as GameVersionInfo,
},
modded::LoaderVersion,
};
use futures::prelude::*;
use tokio::sync::OnceCell;
#[tracing::instrument(skip(st, version))]
pub async fn download_minecraft(
st: &State,
version: &GameVersionInfo,
loading_bar: &LoadingBarId,
java_arch: &str,
force: bool,
minecraft_updated: bool,
) -> crate::Result<()> {
tracing::info!("Downloading Minecraft version {}", version.id);
// 5
let assets_index =
download_assets_index(st, version, Some(loading_bar), force).await?;
let amount = if version
.processors
.as_ref()
.map(|x| !x.is_empty())
.unwrap_or(false)
{
25.0
} else {
40.0
};
tokio::try_join! {
// Total loading sums to 90/60
download_client(st, version, Some(loading_bar), force), // 10
download_assets(st, version.assets == "legacy", &assets_index, Some(loading_bar), amount, force), // 40
download_libraries(st, version.libraries.as_slice(), &version.id, Some(loading_bar), amount, java_arch, force, minecraft_updated) // 40
}?;
tracing::info!("Done downloading Minecraft!");
Ok(())
}
#[tracing::instrument(skip_all, fields(version = version.id.as_str(), loader = ?loader))]
#[theseus_macros::debug_pin]
pub async fn download_version_info(
st: &State,
version: &GameVersion,
loader: Option<&LoaderVersion>,
force: Option<bool>,
loading_bar: Option<&LoadingBarId>,
) -> crate::Result<GameVersionInfo> {
let version_id = loader
.map_or(version.id.clone(), |it| format!("{}-{}", version.id, it.id));
tracing::debug!("Loading version info for Minecraft {version_id}");
let path = st
.directories
.version_dir(&version_id)
.await
.join(format!("{version_id}.json"));
let res = if path.exists() && !force.unwrap_or(false) {
io::read(path)
.err_into::<crate::Error>()
.await
.and_then(|ref it| Ok(serde_json::from_slice(it)?))
} else {
tracing::info!("Downloading version info for version {}", &version.id);
let mut info = d::minecraft::fetch_version_info(version).await?;
if let Some(loader) = loader {
let partial = d::modded::fetch_partial_version(&loader.url).await?;
info = d::modded::merge_partial_version(partial, info);
}
info.id = version_id.clone();
write(&path, &serde_json::to_vec(&info)?, &st.io_semaphore).await?;
Ok(info)
}?;
if let Some(loading_bar) = loading_bar {
emit_loading(loading_bar, 5.0, None).await?;
}
tracing::debug!("Loaded version info for Minecraft {version_id}");
Ok(res)
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
pub async fn download_client(
st: &State,
version_info: &GameVersionInfo,
loading_bar: Option<&LoadingBarId>,
force: bool,
) -> crate::Result<()> {
let version = &version_info.id;
tracing::debug!("Locating client for version {version}");
let client_download = version_info
.downloads
.get(&d::minecraft::DownloadType::Client)
.ok_or(
crate::ErrorKind::LauncherError(format!(
"No client downloads exist for version {version}"
))
.as_error(),
)?;
let path = st
.directories
.version_dir(version)
.await
.join(format!("{version}.jar"));
if !path.exists() || force {
let bytes = fetch(
&client_download.url,
Some(&client_download.sha1),
&st.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched client version {version}");
}
if let Some(loading_bar) = loading_bar {
emit_loading(loading_bar, 9.0, None).await?;
}
tracing::debug!("Client loaded for version {version}!");
Ok(())
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
pub async fn download_assets_index(
st: &State,
version: &GameVersionInfo,
loading_bar: Option<&LoadingBarId>,
force: bool,
) -> crate::Result<AssetsIndex> {
tracing::debug!("Loading assets index");
let path = st
.directories
.assets_index_dir()
.await
.join(format!("{}.json", &version.asset_index.id));
let res = if path.exists() && !force {
io::read(path)
.err_into::<crate::Error>()
.await
.and_then(|ref it| Ok(serde_json::from_slice(it)?))
} else {
let index = d::minecraft::fetch_assets_index(version).await?;
write(&path, &serde_json::to_vec(&index)?, &st.io_semaphore).await?;
tracing::info!("Fetched assets index");
Ok(index)
}?;
if let Some(loading_bar) = loading_bar {
emit_loading(loading_bar, 5.0, None).await?;
}
tracing::debug!("Assets index successfully loaded!");
Ok(res)
}
#[tracing::instrument(skip(st, index))]
#[theseus_macros::debug_pin]
pub async fn download_assets(
st: &State,
with_legacy: bool,
index: &AssetsIndex,
loading_bar: Option<&LoadingBarId>,
loading_amount: f64,
force: bool,
) -> crate::Result<()> {
tracing::debug!("Loading assets");
let num_futs = index.objects.len();
let assets = stream::iter(index.objects.iter())
.map(Ok::<(&String, &Asset), crate::Error>);
loading_try_for_each_concurrent(assets,
None,
loading_bar,
loading_amount,
num_futs,
None,
|(name, asset)| async move {
let hash = &asset.hash;
let resource_path = st.directories.object_dir(hash).await;
let url = format!(
"https://resources.download.minecraft.net/{sub_hash}/{hash}",
sub_hash = &hash[..2]
);
let fetch_cell = OnceCell::<bytes::Bytes>::new();
tokio::try_join! {
async {
if !resource_path.exists() || force {
let resource = fetch_cell
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &CredentialsStore(None)))
.await?;
write(&resource_path, resource, &st.io_semaphore).await?;
tracing::trace!("Fetched asset with hash {hash}");
}
Ok::<_, crate::Error>(())
},
async {
let resource_path = st.directories.legacy_assets_dir().await.join(
name.replace('/', &String::from(std::path::MAIN_SEPARATOR))
);
if with_legacy && !resource_path.exists() || force {
let resource = fetch_cell
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &CredentialsStore(None)))
.await?;
write(&resource_path, resource, &st.io_semaphore).await?;
tracing::trace!("Fetched legacy asset with hash {hash}");
}
Ok::<_, crate::Error>(())
},
}?;
tracing::trace!("Loaded asset with hash {hash}");
Ok(())
}).await?;
tracing::debug!("Done loading assets!");
Ok(())
}
#[tracing::instrument(skip(st, libraries))]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn download_libraries(
st: &State,
libraries: &[Library],
version: &str,
loading_bar: Option<&LoadingBarId>,
loading_amount: f64,
java_arch: &str,
force: bool,
minecraft_updated: bool,
) -> crate::Result<()> {
tracing::debug!("Loading libraries");
tokio::try_join! {
io::create_dir_all(st.directories.libraries_dir().await),
io::create_dir_all(st.directories.version_natives_dir(version).await)
}?;
let num_files = libraries.len();
loading_try_for_each_concurrent(
stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name);
return Ok(());
}
}
tokio::try_join! {
async {
let artifact_path = d::get_path_from_artifact(&library.name)?;
let path = st.directories.libraries_dir().await.join(&artifact_path);
match library.downloads {
_ if path.exists() && !force => Ok(()),
Some(d::minecraft::LibraryDownloads {
artifact: Some(ref artifact),
..
}) => {
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &CredentialsStore(None))
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
}
_ => {
let url = [
library
.url
.as_deref()
.unwrap_or("https://libraries.minecraft.net/"),
&artifact_path
].concat();
let bytes = fetch(&url, None, &st.fetch_semaphore, &CredentialsStore(None)).await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
}
}
},
async {
// HACK: pseudo try block using or else
if let Some((os_key, classifiers)) = None.or_else(|| Some((
library
.natives
.as_ref()?
.get(&Os::native_arch(java_arch))?,
library
.downloads
.as_ref()?
.classifiers
.as_ref()?
))) {
let parsed_key = os_key.replace(
"${arch}",
crate::util::platform::ARCH_WIDTH,
);
if let Some(native) = classifiers.get(&parsed_key) {
let data = fetch(&native.url, Some(&native.sha1), &st.fetch_semaphore, &CredentialsStore(None)).await?;
let reader = std::io::Cursor::new(&data);
if let Ok(mut archive) = zip::ZipArchive::new(reader) {
match archive.extract(st.directories.version_natives_dir(version).await) {
Ok(_) => tracing::debug!("Fetched native {}", &library.name),
Err(err) => tracing::error!("Failed extracting native {}. err: {}", &library.name, err)
}
} else {
tracing::error!("Failed extracting native {}", &library.name)
}
}
}
Ok(())
}
}?;
tracing::debug!("Loaded library {}", library.name);
Ok(())
}
).await?;
tracing::debug!("Done loading libraries!");
Ok(())
}

View File

@@ -0,0 +1,633 @@
//! Logic for launching Minecraft
use crate::event::emit::{emit_loading, init_or_edit_loading};
use crate::event::{LoadingBarId, LoadingBarType};
use crate::launcher::io::IOError;
use crate::prelude::JavaVersion;
use crate::state::{Credentials, ProfileInstallStage};
use crate::util::io;
use crate::{
process,
state::{self as st, MinecraftChild},
State,
};
use chrono::Utc;
use daedalus as d;
use daedalus::minecraft::{RuleAction, VersionInfo};
use st::Profile;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::process::Command;
use uuid::Uuid;
mod args;
pub mod download;
// All nones -> disallowed
// 1+ true -> allowed
// 1+ false -> disallowed
#[tracing::instrument]
pub fn parse_rules(
rules: &[d::minecraft::Rule],
java_version: &str,
minecraft_updated: bool,
) -> bool {
let mut x = rules
.iter()
.map(|x| parse_rule(x, java_version, minecraft_updated))
.collect::<Vec<Option<bool>>>();
if rules
.iter()
.all(|x| matches!(x.action, RuleAction::Disallow))
{
x.push(Some(true))
}
!(x.iter().any(|x| x == &Some(false)) || x.iter().all(|x| x.is_none()))
}
// if anything is disallowed, it should NOT be included
// if anything is not disallowed, it shouldn't factor in final result
// if anything is not allowed, it shouldn't factor in final result
// if anything is allowed, it should be included
#[tracing::instrument]
pub fn parse_rule(
rule: &d::minecraft::Rule,
java_version: &str,
minecraft_updated: bool,
) -> Option<bool> {
use d::minecraft::{Rule, RuleAction};
let res = match rule {
Rule {
os: Some(ref os), ..
} => {
crate::util::platform::os_rule(os, java_version, minecraft_updated)
}
Rule {
features: Some(ref features),
..
} => {
!features.is_demo_user.unwrap_or(true)
|| features.has_custom_resolution.unwrap_or(false)
|| !features.has_quick_plays_support.unwrap_or(true)
|| !features.is_quick_play_multiplayer.unwrap_or(true)
|| !features.is_quick_play_realms.unwrap_or(true)
|| !features.is_quick_play_singleplayer.unwrap_or(true)
}
_ => return Some(true),
};
match rule.action {
RuleAction::Allow => {
if res {
Some(true)
} else {
None
}
}
RuleAction::Disallow => {
if res {
Some(false)
} else {
None
}
}
}
}
macro_rules! processor_rules {
($dest:expr; $($name:literal : client => $client:expr, server => $server:expr;)+) => {
$(std::collections::HashMap::insert(
$dest,
String::from($name),
daedalus::modded::SidedDataEntry {
client: String::from($client),
server: String::from($server),
},
);)+
}
}
pub async fn get_java_version_from_profile(
profile: &Profile,
version_info: &VersionInfo,
) -> crate::Result<Option<JavaVersion>> {
if let Some(java) = profile.java.clone().and_then(|x| x.override_version) {
Ok(Some(java))
} else {
let key = version_info
.java_version
.as_ref()
.map(|it| it.major_version)
.unwrap_or(8);
let state = State::get().await?;
let settings = state.settings.read().await;
if let Some(java) = settings.java_globals.get(&format!("JAVA_{key}")) {
return Ok(Some(java.clone()));
}
Ok(None)
}
}
#[tracing::instrument(skip(profile))]
#[theseus_macros::debug_pin]
pub async fn install_minecraft(
profile: &Profile,
existing_loading_bar: Option<LoadingBarId>,
repairing: bool,
) -> crate::Result<()> {
let sync_projects = existing_loading_bar.is_some();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::MinecraftDownload {
// If we are downloading minecraft for a profile, provide its name and uuid
profile_name: profile.metadata.name.clone(),
profile_path: profile.get_profile_full_path().await?,
},
100.0,
"Downloading Minecraft",
)
.await?;
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.install_stage = ProfileInstallStage::Installing;
async { Ok(()) }
})
.await?;
State::sync().await?;
if sync_projects {
Profile::sync_projects_task(profile.profile_id(), true);
}
let state = State::get().await?;
let instance_path =
&io::canonicalize(profile.get_profile_full_path().await?)?;
let metadata = state.metadata.read().await;
let version_index = metadata
.minecraft
.versions
.iter()
.position(|it| it.id == profile.metadata.game_version)
.ok_or(crate::ErrorKind::LauncherError(format!(
"Invalid game version: {}",
profile.metadata.game_version
)))?;
let version = &metadata.minecraft.versions[version_index];
let minecraft_updated = version_index
<= metadata
.minecraft
.versions
.iter()
.position(|x| x.id == "22w16a")
.unwrap_or(0);
let version_jar = profile
.metadata
.loader_version
.as_ref()
.map_or(version.id.clone(), |it| {
format!("{}-{}", version.id.clone(), it.id.clone())
});
// Download version info (5)
let mut version_info = download::download_version_info(
&state,
version,
profile.metadata.loader_version.as_ref(),
Some(repairing),
Some(&loading_bar),
)
.await?;
// TODO: check if java exists, if not install it add to install step
let key = version_info
.java_version
.as_ref()
.map(|it| it.major_version)
.unwrap_or(8);
let (java_version, set_java) = if let Some(java_version) =
get_java_version_from_profile(profile, &version_info).await?
{
(std::path::PathBuf::from(java_version.path), false)
} else {
let path = crate::api::jre::auto_install_java(key).await?;
(path, true)
};
// Test jre version
let java_version = crate::api::jre::check_jre(java_version.clone())
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Java path invalid or non-functional: {:?}",
java_version
))
})?;
if set_java {
{
let mut settings = state.settings.write().await;
settings
.java_globals
.insert(format!("JAVA_{key}"), java_version.clone());
}
State::sync().await?;
}
// Download minecraft (5-90)
download::download_minecraft(
&state,
&version_info,
&loading_bar,
&java_version.architecture,
repairing,
minecraft_updated,
)
.await?;
if let Some(processors) = &version_info.processors {
let client_path = state
.directories
.version_dir(&version_jar)
.await
.join(format!("{version_jar}.jar"));
let libraries_dir = state.directories.libraries_dir().await;
if let Some(ref mut data) = version_info.data {
processor_rules! {
data;
"SIDE":
client => "client",
server => "";
"MINECRAFT_JAR" :
client => client_path.to_string_lossy(),
server => "";
"MINECRAFT_VERSION":
client => profile.metadata.game_version.clone(),
server => "";
"ROOT":
client => instance_path.to_string_lossy(),
server => "";
"LIBRARY_DIR":
client => libraries_dir.to_string_lossy(),
server => "";
}
emit_loading(&loading_bar, 0.0, Some("Running forge processors"))
.await?;
let total_length = processors.len();
// Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides {
if !sides.contains(&String::from("client")) {
continue;
}
}
let cp = wrap_ref_builder!(cp = processor.classpath.clone() => {
cp.push(processor.jar.clone())
});
let child = Command::new(&java_version.path)
.arg("-cp")
.arg(args::get_class_paths_jar(
&libraries_dir,
&cp,
&java_version.architecture,
)?)
.arg(
args::get_processor_main_class(args::get_lib_path(
&libraries_dir,
&processor.jar,
false,
)?)
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find processor main class for {}",
processor.jar
))
})?,
)
.args(args::get_processor_arguments(
&libraries_dir,
&processor.args,
data,
)?)
.output()
.await
.map_err(|e| IOError::with_path(e, &java_version.path))
.map_err(|err| {
crate::ErrorKind::LauncherError(format!(
"Error running processor: {err}",
))
})?;
if !child.status.success() {
return Err(crate::ErrorKind::LauncherError(format!(
"Processor error: {}",
String::from_utf8_lossy(&child.stderr)
))
.as_error());
}
emit_loading(
&loading_bar,
30.0 / total_length as f64,
Some(&format!(
"Running forge processor {}/{}",
index, total_length
)),
)
.await?;
}
}
}
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.install_stage = ProfileInstallStage::Installed;
async { Ok(()) }
})
.await?;
State::sync().await?;
emit_loading(&loading_bar, 1.0, Some("Finished installing")).await?;
Ok(())
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn launch_minecraft(
java_args: &[String],
env_args: &[(String, String)],
mc_set_options: &[(String, String)],
wrapper: &Option<String>,
memory: &st::MemorySettings,
resolution: &st::WindowSize,
credentials: &Credentials,
post_exit_hook: Option<String>,
profile: &Profile,
) -> crate::Result<Arc<tokio::sync::RwLock<MinecraftChild>>> {
if profile.install_stage == ProfileInstallStage::PackInstalling
|| profile.install_stage == ProfileInstallStage::Installing
{
return Err(crate::ErrorKind::LauncherError(
"Profile is still installing".to_string(),
)
.into());
}
if profile.install_stage != ProfileInstallStage::Installed {
install_minecraft(profile, None, false).await?;
}
let state = State::get().await?;
let metadata = state.metadata.read().await;
let instance_path = profile.get_profile_full_path().await?;
let instance_path = &io::canonicalize(instance_path)?;
let version_index = metadata
.minecraft
.versions
.iter()
.position(|it| it.id == profile.metadata.game_version)
.ok_or(crate::ErrorKind::LauncherError(format!(
"Invalid game version: {}",
profile.metadata.game_version
)))?;
let version = &metadata.minecraft.versions[version_index];
let minecraft_updated = version_index
<= metadata
.minecraft
.versions
.iter()
.position(|x| x.id == "22w16a")
.unwrap_or(0);
let version_jar = profile
.metadata
.loader_version
.as_ref()
.map_or(version.id.clone(), |it| {
format!("{}-{}", version.id.clone(), it.id.clone())
});
let version_info = download::download_version_info(
&state,
version,
profile.metadata.loader_version.as_ref(),
None,
None,
)
.await?;
let java_version = get_java_version_from_profile(profile, &version_info)
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Missing correct java installation".to_string(),
)
})?;
// Test jre version
let java_version =
crate::api::jre::check_jre(java_version.path.clone().into())
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Java path invalid or non-functional: {}",
java_version.path
))
})?;
let client_path = state
.directories
.version_dir(&version_jar)
.await
.join(format!("{version_jar}.jar"));
let args = version_info.arguments.clone().unwrap_or_default();
let mut command = match wrapper {
Some(hook) => {
wrap_ref_builder!(it = Command::new(hook) => {it.arg(&java_version.path)})
}
None => Command::new(&java_version.path),
};
let env_args = Vec::from(env_args);
// Check if profile has a running profile, and reject running the command if it does
// Done late so a quick double call doesn't launch two instances
let existing_processes =
process::get_uuids_by_profile_path(profile.profile_id()).await?;
if let Some(uuid) = existing_processes.first() {
return Err(crate::ErrorKind::LauncherError(format!(
"Profile {} is already running at UUID: {uuid}",
profile.profile_id()
))
.as_error());
}
command
.args(
args::get_jvm_arguments(
args.get(&d::minecraft::ArgumentType::Jvm)
.map(|x| x.as_slice()),
&state.directories.version_natives_dir(&version_jar).await,
&state.directories.libraries_dir().await,
&args::get_class_paths(
&state.directories.libraries_dir().await,
version_info.libraries.as_slice(),
&client_path,
&java_version.architecture,
minecraft_updated,
)?,
&version_jar,
*memory,
Vec::from(java_args),
&java_version.architecture,
)?
.into_iter()
.collect::<Vec<_>>(),
)
.arg(version_info.main_class.clone())
.args(
args::get_minecraft_arguments(
args.get(&d::minecraft::ArgumentType::Game)
.map(|x| x.as_slice()),
version_info.minecraft_arguments.as_deref(),
credentials,
&version.id,
&version_info.asset_index.id,
instance_path,
&state.directories.assets_dir().await,
&version.type_,
*resolution,
&java_version.architecture,
)?
.into_iter()
.collect::<Vec<_>>(),
)
.current_dir(instance_path.clone());
// CARGO-set DYLD_LIBRARY_PATH breaks Minecraft on macOS during testing on playground
#[cfg(target_os = "macos")]
if std::env::var("CARGO").is_ok() {
command.env_remove("DYLD_FALLBACK_LIBRARY_PATH");
}
// Java options should be set in instance options (the existence of _JAVA_OPTIONS overwites them)
command.env_remove("_JAVA_OPTIONS");
command.envs(env_args);
// Overwrites the minecraft options.txt file with the settings from the profile
// Uses 'a:b' syntax which is not quite yaml
use regex::Regex;
if !mc_set_options.is_empty() {
let options_path = instance_path.join("options.txt");
let mut options_string = String::new();
if options_path.exists() {
options_string = io::read_to_string(&options_path).await?;
}
for (key, value) in mc_set_options {
let re = Regex::new(&format!(r"(?m)^{}:.*$", regex::escape(key)))?;
// check if the regex exists in the file
if !re.is_match(&options_string) {
// The key was not found in the file, so append it
options_string.push_str(&format!("\n{}:{}", key, value));
} else {
let replaced_string = re
.replace_all(&options_string, &format!("{}:{}", key, value))
.to_string();
options_string = replaced_string;
}
}
io::write(&options_path, options_string).await?;
}
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.metadata.last_played = Some(Utc::now());
async { Ok(()) }
})
.await?;
State::sync().await?;
let mut censor_strings = HashMap::new();
let username = whoami::username();
censor_strings.insert(
format!("/{}/", username),
"/{COMPUTER_USERNAME}/".to_string(),
);
censor_strings.insert(
format!("\\{}\\", username),
"\\{COMPUTER_USERNAME}\\".to_string(),
);
censor_strings.insert(
credentials.access_token.clone(),
"{MINECRAFT_ACCESS_TOKEN}".to_string(),
);
censor_strings.insert(
credentials.username.clone(),
"{MINECRAFT_USERNAME}".to_string(),
);
censor_strings.insert(
credentials.id.as_simple().to_string(),
"{MINECRAFT_UUID}".to_string(),
);
censor_strings.insert(
credentials.id.as_hyphenated().to_string(),
"{MINECRAFT_UUID}".to_string(),
);
// If in tauri, and the 'minimize on launch' setting is enabled, minimize the window
#[cfg(feature = "tauri")]
{
use crate::EventState;
let window = EventState::get_main_window().await?;
if let Some(window) = window {
let settings = state.settings.read().await;
if settings.hide_on_process {
window.minimize()?;
}
}
}
if !*state.offline.read().await {
// Add game played to discord rich presence
let _ = state
.discord_rpc
.set_activity(&format!("Playing {}", profile.metadata.name), true)
.await;
}
// Create Minecraft child by inserting it into the state
// This also spawns the process and prepares the subsequent processes
let mut state_children = state.children.write().await;
state_children
.insert_new_process(
Uuid::new_v4(),
profile.profile_id(),
command,
post_exit_hook,
censor_strings,
)
.await
}

26
libs/theseus/src/lib.rs Normal file
View File

@@ -0,0 +1,26 @@
/*!
# Theseus
Theseus is a library which provides utilities for launching minecraft, creating Modrinth mod packs,
and launching Modrinth mod packs
*/
#![warn(unused_import_braces)]
#![deny(unused_must_use)]
#[macro_use]
mod util;
mod api;
mod config;
mod error;
mod event;
mod launcher;
mod logger;
mod state;
pub use api::*;
pub use error::*;
pub use event::{EventState, LoadingBar, LoadingBarType};
pub use logger::start_logger;
pub use state::InnerProjectPathUnix;
pub use state::State;

View File

@@ -0,0 +1,77 @@
/*
tracing is set basd on the environment variable RUST_LOG=xxx, depending on the amount of logs to show
ERROR > WARN > INFO > DEBUG > TRACE
eg. RUST_LOG=info will show info, warn, and error logs
RUST_LOG="theseus=trace" will show *all* messages but from theseus only (and not dependencies using similar crates)
RUST_LOG="theseus=trace" will show *all* messages but from theseus only (and not dependencies using similar crates)
Error messages returned to Tauri will display as traced error logs if they return an error.
This will also include an attached span trace if the error is from a tracing error, and the level is set to info, debug, or trace
on unix:
RUST_LOG="theseus=trace" {run command}
The default is theseus=show, meaning only logs from theseus will be displayed, and at the info or higher level.
*/
use tracing_appender::non_blocking::WorkerGuard;
// Handling for the live development logging
// This will log to the console, and will not log to a file
#[cfg(debug_assertions)]
pub fn start_logger() -> Option<WorkerGuard> {
use tracing_subscriber::prelude::*;
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| {
tracing_subscriber::EnvFilter::new("theseus=info,theseus_gui=info")
});
let subscriber = tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer())
.with(filter)
.with(tracing_error::ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)
.expect("setting default subscriber failed");
None
}
// Handling for the live production logging
// This will log to a file in the logs directory, and will not show any logs in the console
#[cfg(not(debug_assertions))]
pub fn start_logger() -> Option<WorkerGuard> {
use crate::prelude::DirectoryInfo;
use tracing_appender::rolling::{RollingFileAppender, Rotation};
use tracing_subscriber::fmt::time::ChronoLocal;
use tracing_subscriber::prelude::*;
// Initialize and get logs directory path
let logs_dir = if let Some(d) = DirectoryInfo::launcher_logs_dir() {
d
} else {
eprintln!("Could not start logger");
return None;
};
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("theseus=info"));
let file_appender =
RollingFileAppender::new(Rotation::DAILY, logs_dir, "theseus.log");
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
let subscriber = tracing_subscriber::registry()
.with(
tracing_subscriber::fmt::layer()
.with_writer(non_blocking)
.with_ansi(false) // disable ANSI escape codes
.with_timer(ChronoLocal::rfc_3339()),
)
.with(filter)
.with(tracing_error::ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)
.expect("Setting default subscriber failed");
Some(guard)
}

View File

@@ -0,0 +1,728 @@
use super::{Profile, ProfilePathId};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde::Serialize;
use std::{collections::HashMap, sync::Arc};
use tokio::process::Child;
use tokio::process::Command;
use tokio::sync::RwLock;
use crate::event::emit::emit_process;
use crate::event::ProcessPayloadType;
use crate::util::fetch::read_json;
use crate::util::io::IOError;
use crate::{profile, ErrorKind};
use tokio::task::JoinHandle;
use uuid::Uuid;
const PROCESSES_JSON: &str = "processes.json";
// Child processes (instances of Minecraft)
// A wrapper over a Hashmap connecting PID -> MinecraftChild
pub struct Children(HashMap<Uuid, Arc<RwLock<MinecraftChild>>>);
#[derive(Debug)]
pub enum ChildType {
// A child process that is being managed by tokio
TokioChild(Child),
// A child process that was rescued from a cache (e.g. a process that was launched by theseus before the launcher was restarted)
// This may not have all the same functionality as a TokioChild
RescuedPID(u32),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ProcessCache {
pub pid: u32,
pub uuid: Uuid,
pub start_time: u64,
pub name: String,
pub exe: String,
pub profile_relative_path: ProfilePathId,
pub post_command: Option<String>,
}
impl ChildType {
pub async fn try_wait(&mut self) -> crate::Result<Option<i32>> {
match self {
ChildType::TokioChild(child) => Ok(child
.try_wait()
.map_err(IOError::from)?
.map(|x| x.code().unwrap_or(0))),
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if !system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
return Ok(Some(0));
}
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
if process.status() == sysinfo::ProcessStatus::Run {
Ok(None)
} else {
Ok(Some(0))
}
} else {
Ok(Some(0))
}
}
}
}
pub async fn kill(&mut self) -> crate::Result<()> {
match self {
ChildType::TokioChild(child) => {
Ok(child.kill().await.map_err(IOError::from)?)
}
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
process.kill();
}
}
Ok(())
}
}
}
pub fn id(&self) -> Option<u32> {
match self {
ChildType::TokioChild(child) => child.id(),
ChildType::RescuedPID(pid) => Some(*pid),
}
}
// Caches the process so that it can be restored if the launcher is restarted
// Stored in the caches/metadata/processes.json file
pub async fn cache_process(
&self,
uuid: uuid::Uuid,
profile_path_id: ProfilePathId,
post_command: Option<String>,
) -> crate::Result<()> {
let pid = match self {
ChildType::TokioChild(child) => child.id().unwrap_or(0),
ChildType::RescuedPID(pid) => *pid,
};
let state = crate::State::get().await?;
let mut system = sysinfo::System::new();
system.refresh_processes();
let process =
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
pid
))
})?;
let start_time = process.start_time();
let name = process.name().to_string();
let Some(path) = process.exe() else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
pid
))
.into());
};
let exe = path.to_string_lossy().to_string();
let cached_process = ProcessCache {
pid,
start_time,
name,
exe,
post_command,
uuid,
profile_relative_path: profile_path_id,
};
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.insert(uuid, cached_process);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
// Removes the process from the cache (ie: on process exit)
pub async fn remove_cache(&self, uuid: uuid::Uuid) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.remove(&uuid);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
}
// Minecraft Child, bundles together the PID, the actual Child, and the easily queryable stdout and stderr streams (if needed)
#[derive(Debug)]
pub struct MinecraftChild {
pub uuid: Uuid,
pub profile_relative_path: ProfilePathId,
pub manager: Option<JoinHandle<crate::Result<i32>>>, // None when future has completed and been handled
pub current_child: Arc<RwLock<ChildType>>,
pub last_updated_playtime: DateTime<Utc>, // The last time we updated the playtime for the associated profile
}
impl Children {
pub fn new() -> Self {
Children(HashMap::new())
}
// Loads cached processes from the caches/metadata/processes.json file, re-inserts them into the hashmap, and removes them from the file
// This will only be called once, on startup. Only processes who match a cached process (name, time started, pid, etc) will be re-inserted
pub async fn rescue_cache(&mut self) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
// Overwrite the file with an empty hashmap- we will re-insert the cached processes
let empty = HashMap::<uuid::Uuid, ProcessCache>::new();
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&empty)?,
&state.io_semaphore,
)
.await?;
// Return the cached processes
children_json
} else {
HashMap::new()
};
for (_, cache) in children_caches.drain() {
let uuid = cache.uuid;
match self.insert_cached_process(cache).await {
Ok(child) => {
self.0.insert(uuid, child);
}
Err(e) => tracing::warn!(
"Failed to rescue cached process {}: {}",
uuid,
e
),
}
}
Ok(())
}
// Runs the command in process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// The threads for stdout and stderr are spawned here
// Unlike a Hashmap's 'insert', this directly returns the reference to the MinecraftChild rather than any previously stored MinecraftChild that may exist
#[tracing::instrument(skip(
self,
uuid,
mc_command,
post_command,
censor_strings
))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_new_process(
&mut self,
uuid: Uuid,
profile_relative_path: ProfilePathId,
mut mc_command: Command,
post_command: Option<String>, // Command to run after minecraft.
censor_strings: HashMap<String, String>,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
// Takes the first element of the commands vector and spawns it
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
let child = ChildType::TokioChild(mc_proc);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Caches process so that it can be restored if the launcher is restarted
child
.cache_process(
uuid,
profile_relative_path.clone(),
post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
uuid,
post_command,
pid,
current_child.clone(),
profile_relative_path.clone(),
)));
emit_process(
uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid,
profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(uuid, mchild.clone());
Ok(mchild)
}
// Rescues a cached process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// Essentially 'reconnects' to a process that was launched by theseus before the launcher was restarted
// However, this may not have all the same functionality as a TokioChild, as we only have the PID and not the actual Child
// Only processes who match a cached process (name, time started, pid, etc) will be re-inserted. The function fails with an error if the process is notably different.
#[tracing::instrument(skip(self, cached_process,))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_cached_process(
&mut self,
cached_process: ProcessCache,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
let _state = crate::State::get().await?;
// Takes the first element of the commands vector and spawns it
// Checks processes, compares cached process to actual process
// Fails if notably different (meaning that the PID was reused, and we shouldn't reconnect to it)
{
let mut system = sysinfo::System::new();
system.refresh_processes();
let process = system
.process(sysinfo::Pid::from_u32(cached_process.pid))
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
cached_process.pid
))
})?;
if cached_process.start_time != process.start_time() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different start time than actual process {}", cached_process.pid, process.start_time())).into());
}
if cached_process.name != process.name() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different name than actual process {}", cached_process.pid, process.name())).into());
}
if let Some(path) = process.exe() {
if cached_process.exe != path.to_string_lossy() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different exe than actual process {}", cached_process.pid, path.to_string_lossy())).into());
}
} else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
cached_process.pid
))
.into());
}
}
let child = ChildType::RescuedPID(cached_process.pid);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Re-caches process so that it can be restored if the launcher is restarted
child
.cache_process(
cached_process.uuid,
cached_process.profile_relative_path.clone(),
cached_process.post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
cached_process.uuid,
cached_process.post_command,
pid,
current_child.clone(),
cached_process.profile_relative_path.clone(),
)));
emit_process(
cached_process.uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid: cached_process.uuid,
profile_relative_path: cached_process.profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(cached_process.uuid, mchild.clone());
Ok(mchild)
}
// Spawns a new child process and inserts it into the hashmap
// Also, as the process ends, it spawns the follow-up process if it exists
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
#[tracing::instrument(skip(current_child))]
#[theseus_macros::debug_pin]
async fn sequential_process_manager(
uuid: Uuid,
post_command: Option<String>,
mut current_pid: u32,
current_child: Arc<RwLock<ChildType>>,
associated_profile: ProfilePathId,
) -> crate::Result<i32> {
let current_child = current_child.clone();
// Wait on current Minecraft Child
let mut mc_exit_status;
let mut last_updated_playtime = Utc::now();
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
// Auto-update playtime every minute
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if diff >= 60 {
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
last_updated_playtime = Utc::now();
}
}
// Now fully complete- update playtime one last time
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
// Publish play time update
// Allow failure, it will be stored locally and sent next time
// Sent in another thread as first call may take a couple seconds and hold up process ending
let associated_profile_clone = associated_profile.clone();
tokio::spawn(async move {
if let Err(e) =
profile::try_update_playtime(&associated_profile_clone.clone())
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile_clone,
e
);
}
});
{
// Clear game played for Discord RPC
// May have other active processes, so we clear to the next running process
let state = crate::State::get().await?;
let _ = state.discord_rpc.clear_to_default(true).await;
}
// If in tauri, window should show itself again after process exists if it was hidden
#[cfg(feature = "tauri")]
{
let window = crate::EventState::get_main_window().await?;
if let Some(window) = window {
window.unminimize()?;
}
}
{
let current_child = current_child.write().await;
current_child.remove_cache(uuid).await?;
}
if !mc_exit_status == 0 {
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
return Ok(mc_exit_status); // Err for a non-zero exit is handled in helper
}
// If a post-command exist, switch to it and wait on it
// First, create the command by splitting arguments
let post_command = if let Some(hook) = post_command {
let mut cmd = hook.split(' ');
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command
.args(&cmd.collect::<Vec<&str>>())
.current_dir(associated_profile.get_full_path().await?);
Some(command)
} else {
None
}
} else {
None
};
if let Some(mut m_command) = post_command {
{
let mut current_child: tokio::sync::RwLockWriteGuard<
'_,
ChildType,
> = current_child.write().await;
let new_child = m_command.spawn().map_err(IOError::from)?;
current_pid = new_child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID"
.to_string(),
)
})?;
*current_child = ChildType::TokioChild(new_child);
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Updated,
"Completed Minecraft, switching to post-commands",
)
.await?;
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(10))
.await;
}
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
Ok(mc_exit_status)
}
// Returns a ref to the child
pub fn get(&self, uuid: Uuid) -> Option<Arc<RwLock<MinecraftChild>>> {
self.0.get(&uuid).cloned()
}
// Gets all PID keys
pub fn keys(&self) -> Vec<Uuid> {
self.0.keys().cloned().collect()
}
// Get exit status of a child by PID
// Returns None if the child is still running
pub async fn exit_status(&self, uuid: Uuid) -> crate::Result<Option<i32>> {
if let Some(child) = self.get(uuid) {
let child = child.write().await;
let status = child.current_child.write().await.try_wait().await?;
Ok(status)
} else {
Ok(None)
}
}
// Gets all PID keys of running children
pub async fn running_keys(&self) -> crate::Result<Vec<Uuid>> {
let mut keys = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all PID keys of running children with a given profile path
pub async fn running_keys_with_profile(
&self,
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
let running_keys = self.running_keys().await?;
let mut keys = Vec::new();
for key in running_keys {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.read().await;
if child.profile_relative_path == profile_path {
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all profiles of running children
pub async fn running_profile_paths(
&self,
) -> crate::Result<Vec<ProfilePathId>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
profiles.push(child.profile_relative_path.clone());
}
}
}
Ok(profiles)
}
// Gets all profiles of running children
// Returns clones because it would be serialized anyway
pub async fn running_profiles(&self) -> crate::Result<Vec<Profile>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
if let Some(prof) = crate::api::profile::get(
&child.profile_relative_path.clone(),
None,
)
.await?
{
profiles.push(prof);
}
}
}
}
Ok(profiles)
}
}
impl Default for Children {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,211 @@
//! Theseus directory information
use std::fs;
use std::path::PathBuf;
use tokio::sync::RwLock;
use super::{ProfilePathId, Settings};
pub const SETTINGS_FILE_NAME: &str = "settings.json";
pub const CACHES_FOLDER_NAME: &str = "caches";
pub const LAUNCHER_LOGS_FOLDER_NAME: &str = "launcher_logs";
pub const PROFILES_FOLDER_NAME: &str = "profiles";
pub const METADATA_FOLDER_NAME: &str = "meta";
#[derive(Debug)]
pub struct DirectoryInfo {
pub settings_dir: PathBuf, // Base settings directory- settings.json and icon cache.
pub config_dir: RwLock<PathBuf>, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
pub working_dir: PathBuf,
}
impl DirectoryInfo {
// Get the settings directory
// init() is not needed for this function
pub fn get_initial_settings_dir() -> Option<PathBuf> {
Self::env_path("THESEUS_CONFIG_DIR")
.or_else(|| Some(dirs::config_dir()?.join("com.modrinth.theseus")))
}
#[inline]
pub fn get_initial_settings_file() -> crate::Result<PathBuf> {
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
Ok(settings_dir.join("settings.json"))
}
/// Get all paths needed for Theseus to operate properly
#[tracing::instrument]
pub fn init(settings: &Settings) -> crate::Result<Self> {
// Working directory
let working_dir = std::env::current_dir().map_err(|err| {
crate::ErrorKind::FSError(format!(
"Could not open working directory: {err}"
))
})?;
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid settings dir".to_string(),
),
)?;
fs::create_dir_all(&settings_dir).map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error creating Theseus config directory: {err}"
))
})?;
// config directory (for instances, etc.)
// by default this is the same as the settings directory
let config_dir = settings.loaded_config_dir.clone().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
Ok(Self {
settings_dir,
config_dir: RwLock::new(config_dir),
working_dir,
})
}
/// Get the Minecraft instance metadata directory
#[inline]
pub async fn metadata_dir(&self) -> PathBuf {
self.config_dir.read().await.join(METADATA_FOLDER_NAME)
}
/// Get the Minecraft java versions metadata directory
#[inline]
pub async fn java_versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("java_versions")
}
/// Get the Minecraft versions metadata directory
#[inline]
pub async fn versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("versions")
}
/// Get the metadata directory for a given version
#[inline]
pub async fn version_dir(&self, version: &str) -> PathBuf {
self.versions_dir().await.join(version)
}
/// Get the Minecraft libraries metadata directory
#[inline]
pub async fn libraries_dir(&self) -> PathBuf {
self.metadata_dir().await.join("libraries")
}
/// Get the Minecraft assets metadata directory
#[inline]
pub async fn assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("assets")
}
/// Get the assets index directory
#[inline]
pub async fn assets_index_dir(&self) -> PathBuf {
self.assets_dir().await.join("indexes")
}
/// Get the assets objects directory
#[inline]
pub async fn objects_dir(&self) -> PathBuf {
self.assets_dir().await.join("objects")
}
/// Get the directory for a specific object
#[inline]
pub async fn object_dir(&self, hash: &str) -> PathBuf {
self.objects_dir().await.join(&hash[..2]).join(hash)
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn legacy_assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("resources")
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn natives_dir(&self) -> PathBuf {
self.metadata_dir().await.join("natives")
}
/// Get the natives directory for a version of Minecraft
#[inline]
pub async fn version_natives_dir(&self, version: &str) -> PathBuf {
self.natives_dir().await.join(version)
}
/// Get the directory containing instance icons
#[inline]
pub async fn icon_dir(&self) -> PathBuf {
self.config_dir.read().await.join("icons")
}
/// Get the profiles directory for created profiles
#[inline]
pub async fn profiles_dir(&self) -> PathBuf {
self.config_dir.read().await.join(PROFILES_FOLDER_NAME)
}
/// Gets the logs dir for a given profile
#[inline]
pub async fn profile_logs_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("logs"))
}
/// Gets the crash reports dir for a given profile
#[inline]
pub async fn crash_reports_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("crash-reports"))
}
#[inline]
pub fn launcher_logs_dir() -> Option<PathBuf> {
Self::get_initial_settings_dir()
.map(|d| d.join(LAUNCHER_LOGS_FOLDER_NAME))
}
/// Get the file containing the global database
#[inline]
pub async fn database_file(&self) -> PathBuf {
self.config_dir.read().await.join("data.bin")
}
/// Get the settings file for Theseus
#[inline]
pub fn settings_file(&self) -> PathBuf {
self.settings_dir.join(SETTINGS_FILE_NAME)
}
/// Get the cache directory for Theseus
#[inline]
pub fn caches_dir(&self) -> PathBuf {
self.settings_dir.join(CACHES_FOLDER_NAME)
}
#[inline]
pub async fn caches_meta_dir(&self) -> PathBuf {
self.caches_dir().join("metadata")
}
/// Get path from environment variable
#[inline]
fn env_path(name: &str) -> Option<PathBuf> {
std::env::var_os(name).map(PathBuf::from)
}
}

View File

@@ -0,0 +1,216 @@
use std::sync::{atomic::AtomicBool, Arc};
use discord_rich_presence::{
activity::{Activity, Assets},
DiscordIpc, DiscordIpcClient,
};
use tokio::sync::RwLock;
use crate::State;
pub struct DiscordGuard {
client: Arc<RwLock<DiscordIpcClient>>,
connected: Arc<AtomicBool>,
}
impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub async fn init(is_offline: bool) -> crate::Result<DiscordGuard> {
let mut dipc =
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not create Discord client {}",
e,
))
})?;
let connected = if !is_offline {
let res = dipc.connect(); // Do not need to connect to Discord to use app
if res.is_ok() {
Arc::new(AtomicBool::new(true))
} else {
Arc::new(AtomicBool::new(false))
}
} else {
Arc::new(AtomicBool::new(false))
};
let client = Arc::new(RwLock::new(dipc));
Ok(DiscordGuard { client, connected })
}
/// If the client failed connecting during init(), this will check for connection and attempt to reconnect
/// This MUST be called first in any client method that requires a connection, because those can PANIC if the client is not connected
/// (No connection is different than a failed connection, the latter will not panic and can be retried)
pub async fn retry_if_not_ready(&self) -> bool {
let mut client = self.client.write().await;
if !self.connected.load(std::sync::atomic::Ordering::Relaxed) {
if client.connect().is_ok() {
self.connected
.store(true, std::sync::atomic::Ordering::Relaxed);
return true;
}
return false;
}
true
}
// check online
pub async fn check_online(&self) -> bool {
let state = match State::get().await {
Ok(s) => s,
Err(_) => return false,
};
let offline = state.offline.read().await;
if *offline {
return false;
}
true
}
/// Set the activity to the given message
/// First checks if discord is disabled, and if so, clear the activity instead
pub async fn set_activity(
&self,
msg: &str,
reconnect_if_fail: bool,
) -> crate::Result<()> {
if !self.check_online().await {
return Ok(());
}
// Check if discord is disabled, and if so, clear the activity instead
let state = State::get().await?;
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
Ok(self.clear_activity(true).await?)
} else {
Ok(self.force_set_activity(msg, reconnect_if_fail).await?)
}
}
/// Sets the activity to the given message, regardless of if discord is disabled or offline
/// Should not be used except for in the above method, or if it is already known that discord is enabled (specifically for state initialization) and we are connected to the internet
pub async fn force_set_activity(
&self,
msg: &str,
reconnect_if_fail: bool,
) -> crate::Result<()> {
// Attempt to connect if not connected. Do not continue if it fails, as the client.set_activity can panic if it never was connected
if !self.retry_if_not_ready().await {
return Ok(());
}
let activity = Activity::new().state(msg).assets(
Assets::new()
.large_image("modrinth_simple")
.large_text("Modrinth Logo"),
);
// Attempt to set the activity
// If the existing connection fails, attempt to reconnect and try again
let mut client: tokio::sync::RwLockWriteGuard<'_, DiscordIpcClient> =
self.client.write().await;
let res = client.set_activity(activity.clone());
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not update Discord activity {}",
e,
))
};
if reconnect_if_fail {
if let Err(_e) = res {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {}",
e,
))
})?;
return Ok(client
.set_activity(activity)
.map_err(could_not_set_err)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_set_err)?;
}
Ok(())
}
/// Clear the activity entirely ('disabling' the RPC until the next set_activity)
pub async fn clear_activity(
&self,
reconnect_if_fail: bool,
) -> crate::Result<()> {
// Attempt to connect if not connected. Do not continue if it fails, as the client.clear_activity can panic if it never was connected
if !self.check_online().await || !self.retry_if_not_ready().await {
return Ok(());
}
// Attempt to clear the activity
// If the existing connection fails, attempt to reconnect and try again
let mut client = self.client.write().await;
let res = client.clear_activity();
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not clear Discord activity {}",
e,
))
};
if reconnect_if_fail {
if res.is_err() {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {}",
e,
))
})?;
return Ok(client
.clear_activity()
.map_err(could_not_clear_err)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_clear_err)?;
}
Ok(())
}
/// Clear the activity, but if there is a running profile, set the activity to that instead
pub async fn clear_to_default(
&self,
reconnect_if_fail: bool,
) -> crate::Result<()> {
let state: Arc<tokio::sync::RwLockReadGuard<'_, State>> =
State::get().await?;
{
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
println!("Discord is disabled, clearing activity");
return self.clear_activity(true).await;
}
}
if let Some(existing_child) = state
.children
.read()
.await
.running_profile_paths()
.await?
.first()
{
self.set_activity(
&format!("Playing {}", existing_child),
reconnect_if_fail,
)
.await?;
} else {
self.set_activity("Idling...", reconnect_if_fail).await?;
}
Ok(())
}
}

View File

@@ -0,0 +1,66 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use crate::prelude::JavaVersion;
use crate::util::jre;
// All stored Java versions, chosen by the user
// A wrapper over a Hashmap connecting key -> java version
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JavaGlobals(HashMap<String, JavaVersion>);
impl JavaGlobals {
pub fn new() -> JavaGlobals {
JavaGlobals(HashMap::new())
}
pub fn insert(&mut self, key: String, java: JavaVersion) {
self.0.insert(key, java);
}
pub fn remove(&mut self, key: &String) {
self.0.remove(key);
}
pub fn get(&self, key: &String) -> Option<&JavaVersion> {
self.0.get(key)
}
pub fn get_mut(&mut self, key: &String) -> Option<&mut JavaVersion> {
self.0.get_mut(key)
}
pub fn count(&self) -> usize {
self.0.len()
}
pub fn keys(&self) -> Vec<String> {
self.0.keys().cloned().collect()
}
// Validates that every path here is a valid Java version and that the version matches the version stored here
// If false, when checked, the user should be prompted to reselect the Java version
pub async fn is_all_valid(&self) -> bool {
for (_, java) in self.0.iter() {
let jre = jre::check_java_at_filepath(
PathBuf::from(&java.path).as_path(),
)
.await;
if let Some(jre) = jre {
if jre.version != java.version {
return false;
}
} else {
return false;
}
}
true
}
}
impl Default for JavaGlobals {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,173 @@
//! Theseus metadata
use crate::data::DirectoryInfo;
use crate::util::fetch::{read_json, write, IoSemaphore};
use crate::State;
use daedalus::{
minecraft::{fetch_version_manifest, VersionManifest as MinecraftManifest},
modded::{
fetch_manifest as fetch_loader_manifest, Manifest as LoaderManifest,
},
};
use serde::{Deserialize, Serialize};
const METADATA_URL: &str = "https://meta.modrinth.com";
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Metadata {
pub minecraft: MinecraftManifest,
pub forge: LoaderManifest,
pub fabric: LoaderManifest,
pub quilt: LoaderManifest,
pub neoforge: LoaderManifest,
}
impl Metadata {
fn get_manifest(name: &str) -> String {
format!("{METADATA_URL}/{name}/v0/manifest.json")
}
pub async fn fetch() -> crate::Result<Self> {
let (minecraft, forge, fabric, quilt, neoforge) = tokio::try_join! {
async {
let url = Self::get_manifest("minecraft");
fetch_version_manifest(Some(&url)).await
},
async {
let url = Self::get_manifest("forge");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("fabric");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("quilt");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("neo");
fetch_loader_manifest(&url).await
}
}?;
Ok(Self {
minecraft,
forge,
fabric,
quilt,
neoforge,
})
}
// Attempt to fetch metadata and store in sled DB
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let mut metadata = None;
let metadata_path = dirs.caches_meta_dir().await.join("metadata.json");
let metadata_backup_path =
dirs.caches_meta_dir().await.join("metadata.json.bak");
if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_path, io_semaphore).await
{
metadata = Some(metadata_json);
} else if fetch_online {
let res = async {
let metadata_fetch = Self::fetch().await?;
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
write(
&metadata_backup_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
metadata = Some(metadata_fetch);
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to fetch launcher metadata: {err}")
}
}
} else if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_backup_path, io_semaphore).await
{
metadata = Some(metadata_json);
std::fs::copy(&metadata_backup_path, &metadata_path).map_err(
|err| {
crate::ErrorKind::FSError(format!(
"Error restoring metadata backup: {err}"
))
.as_error()
},
)?;
}
if let Some(meta) = metadata {
Ok(meta)
} else {
Err(
crate::ErrorKind::NoValueFor(String::from("launcher metadata"))
.as_error(),
)
}
}
pub async fn update() {
let res = async {
let metadata_fetch = Metadata::fetch().await?;
let state = State::get().await?;
let metadata_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json");
let metadata_backup_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json.bak");
if metadata_path.exists() {
std::fs::copy(&metadata_path, &metadata_backup_path)?;
}
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_metadata = state.metadata.write().await;
*old_metadata = metadata_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher metadata: {err}")
}
};
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,410 @@
//! Theseus state management system
use crate::event::emit::{emit_loading, emit_offline, init_loading_unsafe};
use std::path::PathBuf;
use crate::event::LoadingBarType;
use crate::loading_join;
use crate::util::fetch::{self, FetchSemaphore, IoSemaphore};
use notify::RecommendedWatcher;
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
use std::sync::Arc;
use std::time::Duration;
use tokio::join;
use tokio::sync::{OnceCell, RwLock, Semaphore};
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
// Submodules
mod dirs;
pub use self::dirs::*;
mod metadata;
pub use self::metadata::*;
mod profiles;
pub use self::profiles::*;
mod settings;
pub use self::settings::*;
mod projects;
pub use self::projects::*;
mod children;
pub use self::children::*;
mod tags;
pub use self::tags::*;
mod java_globals;
pub use self::java_globals::*;
mod safe_processes;
pub use self::safe_processes::*;
mod discord;
pub use self::discord::*;
mod minecraft_auth;
pub use self::minecraft_auth::*;
mod mr_auth;
pub use self::mr_auth::*;
// Global state
// RwLock on state only has concurrent reads, except for config dir change which takes control of the State
static LAUNCHER_STATE: OnceCell<RwLock<State>> = OnceCell::const_new();
pub struct State {
/// Whether or not the launcher is currently operating in 'offline mode'
pub offline: RwLock<bool>,
/// Information on the location of files used in the launcher
pub directories: DirectoryInfo,
/// Semaphore used to limit concurrent network requests and avoid errors
pub fetch_semaphore: FetchSemaphore,
/// Stored maximum number of sempahores of current fetch_semaphore
pub fetch_semaphore_max: RwLock<u32>,
/// Semaphore used to limit concurrent I/O and avoid errors
pub io_semaphore: IoSemaphore,
/// Stored maximum number of sempahores of current io_semaphore
pub io_semaphore_max: RwLock<u32>,
/// Launcher metadata
pub metadata: RwLock<Metadata>,
/// Launcher configuration
pub settings: RwLock<Settings>,
/// Reference to minecraft process children
pub children: RwLock<Children>,
/// Launcher profile metadata
pub(crate) profiles: RwLock<Profiles>,
/// Launcher tags
pub(crate) tags: RwLock<Tags>,
/// Launcher processes that should be safely exited on shutdown
pub(crate) safety_processes: RwLock<SafeProcesses>,
/// Launcher user account info
pub(crate) users: RwLock<MinecraftAuthStore>,
/// Modrinth Credentials Store
pub credentials: RwLock<CredentialsStore>,
/// Modrinth auth flow
pub modrinth_auth_flow: RwLock<Option<ModrinthAuthFlow>>,
/// Discord RPC
pub discord_rpc: DiscordGuard,
/// File watcher debouncer
pub(crate) file_watcher: RwLock<Debouncer<RecommendedWatcher>>,
}
impl State {
/// Get the current launcher state, initializing it if needed
pub async fn get(
) -> crate::Result<Arc<tokio::sync::RwLockReadGuard<'static, Self>>> {
Ok(Arc::new(
LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.read()
.await,
))
}
/// Get the current launcher state, initializing it if needed
/// Takes writing control of the state, blocking all other uses of it
/// Only used for state change such as changing the config directory
pub async fn get_write(
) -> crate::Result<tokio::sync::RwLockWriteGuard<'static, Self>> {
Ok(LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.write()
.await)
}
pub fn initialized() -> bool {
LAUNCHER_STATE.initialized()
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn initialize_state() -> crate::Result<RwLock<State>> {
let loading_bar = init_loading_unsafe(
LoadingBarType::StateInit,
100.0,
"Initializing launcher",
)
.await?;
// Settings
let settings =
Settings::init(&DirectoryInfo::get_initial_settings_file()?)
.await?;
let directories = DirectoryInfo::init(&settings)?;
emit_loading(&loading_bar, 10.0, None).await?;
let mut file_watcher = init_watcher().await?;
let fetch_semaphore = FetchSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_downloads,
)));
let io_semaphore = IoSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_writes,
)));
emit_loading(&loading_bar, 10.0, None).await?;
let is_offline = !fetch::check_internet(3).await;
let metadata_fut =
Metadata::init(&directories, !is_offline, &io_semaphore);
let profiles_fut = Profiles::init(&directories, &mut file_watcher);
let tags_fut = Tags::init(
&directories,
!is_offline,
&io_semaphore,
&fetch_semaphore,
&CredentialsStore(None),
);
let users_fut = MinecraftAuthStore::init(&directories, &io_semaphore);
let creds_fut = CredentialsStore::init(&directories, &io_semaphore);
// Launcher data
let (metadata, profiles, tags, users, creds) = loading_join! {
Some(&loading_bar), 70.0, Some("Loading metadata");
metadata_fut,
profiles_fut,
tags_fut,
users_fut,
creds_fut,
}?;
let safety_processes = SafeProcesses::new();
let discord_rpc = DiscordGuard::init(is_offline).await?;
if !settings.disable_discord_rpc && !is_offline {
// Add default Idling to discord rich presence
// Force add to avoid recursion
let _ = discord_rpc.force_set_activity("Idling...", true).await;
}
let children = Children::new();
// Starts a loop of checking if we are online, and updating
Self::offine_check_loop();
emit_loading(&loading_bar, 10.0, None).await?;
Ok::<RwLock<Self>, crate::Error>(RwLock::new(Self {
offline: RwLock::new(is_offline),
directories,
fetch_semaphore,
fetch_semaphore_max: RwLock::new(
settings.max_concurrent_downloads as u32,
),
io_semaphore,
io_semaphore_max: RwLock::new(
settings.max_concurrent_writes as u32,
),
metadata: RwLock::new(metadata),
settings: RwLock::new(settings),
profiles: RwLock::new(profiles),
users: RwLock::new(users),
children: RwLock::new(children),
credentials: RwLock::new(creds),
tags: RwLock::new(tags),
discord_rpc,
safety_processes: RwLock::new(safety_processes),
file_watcher: RwLock::new(file_watcher),
modrinth_auth_flow: RwLock::new(None),
}))
}
/// Starts a loop of checking if we are online, and updating
pub fn offine_check_loop() {
tokio::task::spawn(async {
loop {
let state = Self::get().await;
if let Ok(state) = state {
let _ = state.refresh_offline().await;
}
// Wait 5 seconds
tokio::time::sleep(Duration::from_secs(5)).await;
}
});
}
/// Updates state with data from the web, if we are online
pub fn update() {
tokio::task::spawn(async {
if let Ok(state) = crate::State::get().await {
if !*state.offline.read().await {
let res1 = Profiles::update_modrinth_versions();
let res2 = Tags::update();
let res3 = Metadata::update();
let res4 = Profiles::update_projects();
let res6 = CredentialsStore::update_creds();
let _ = join!(res1, res2, res3, res4, res6);
}
}
});
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
/// Synchronize in-memory state with persistent state
pub async fn sync() -> crate::Result<()> {
let state = Self::get().await?;
let sync_settings = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let reader = state.settings.read().await;
reader.sync(&state.directories.settings_file()).await?;
Ok::<_, crate::Error>(())
})
.await?
};
let sync_profiles = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let profiles = state.profiles.read().await;
profiles.sync().await?;
Ok::<_, crate::Error>(())
})
.await?
};
tokio::try_join!(sync_settings, sync_profiles)?;
Ok(())
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_io_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.io_semaphore.0.write().await;
let mut total_permits = self.io_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_writes as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_writes);
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_fetch_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.fetch_semaphore.0.write().await;
let mut total_permits = self.fetch_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_downloads as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_downloads);
}
/// Refreshes whether or not the launcher should be offline, by whether or not there is an internet connection
pub async fn refresh_offline(&self) -> crate::Result<()> {
let is_online = fetch::check_internet(3).await;
let mut offline = self.offline.write().await;
if *offline != is_online {
return Ok(());
}
emit_offline(!is_online).await?;
*offline = !is_online;
Ok(())
}
}
pub async fn init_watcher() -> crate::Result<Debouncer<RecommendedWatcher>> {
let (mut tx, mut rx) = channel(1);
let file_watcher = new_debouncer(
Duration::from_secs_f32(2.0),
move |res: DebounceEventResult| {
futures::executor::block_on(async {
tx.send(res).await.unwrap();
})
},
)?;
tokio::task::spawn(async move {
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
tracing::info!(parent: &span, "Initting watcher");
while let Some(res) = rx.next().await {
let _span = span.enter();
match res {
Ok(mut events) => {
let mut visited_paths = Vec::new();
// sort events by e.path
events.sort_by(|a, b| a.path.cmp(&b.path));
events.iter().for_each(|e| {
let mut new_path = PathBuf::new();
let mut components_iterator = e.path.components();
let mut found = false;
for component in components_iterator.by_ref() {
new_path.push(component);
if found {
break;
}
if component.as_os_str() == "profiles" {
found = true;
}
}
// if any remain, it's a subfile of the profile folder and not the profile folder itself
let subfile = components_iterator.next().is_some();
// At this point, new_path is the path to the profile, and subfile is whether it's a subfile of the profile or not
let profile_path_id =
ProfilePathId::new(PathBuf::from(
new_path.file_name().unwrap_or_default(),
));
if e.path
.components()
.any(|x| x.as_os_str() == "crash-reports")
&& e.path
.extension()
.map(|x| x == "txt")
.unwrap_or(false)
{
Profile::crash_task(profile_path_id);
} else if !visited_paths.contains(&new_path) {
if subfile {
Profile::sync_projects_task(
profile_path_id,
false,
);
visited_paths.push(new_path);
} else {
Profiles::sync_available_profiles_task(
profile_path_id,
);
}
}
});
}
Err(error) => tracing::warn!("Unable to watch file: {error}"),
}
}
});
Ok(file_watcher)
}

View File

@@ -0,0 +1,376 @@
use crate::config::MODRINTH_API_URL;
use crate::state::DirectoryInfo;
use crate::util::fetch::{
fetch_advanced, read_json, write, FetchSemaphore, IoSemaphore,
};
use crate::State;
use chrono::{DateTime, Duration, Utc};
use futures::TryStreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
const AUTH_JSON: &str = "auth.json";
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthUser {
pub id: String,
pub username: String,
pub name: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: DateTime<Utc>,
pub role: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthCredentials {
pub session: String,
pub expires_at: DateTime<Utc>,
pub user: ModrinthUser,
}
#[derive(Serialize)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum ModrinthCredentialsResult {
TwoFactorRequired { flow: String },
Credentials(ModrinthCredentials),
}
#[derive(Debug)]
pub struct CredentialsStore(pub Option<ModrinthCredentials>);
impl CredentialsStore {
pub async fn init(
dirs: &DirectoryInfo,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
let user = read_json(&auth_path, io_semaphore).await.ok();
if let Some(user) = user {
Ok(Self(Some(user)))
} else {
Ok(Self(None))
}
}
pub async fn save(&self) -> crate::Result<()> {
let state = State::get().await?;
let auth_path =
state.directories.caches_meta_dir().await.join(AUTH_JSON);
if let Some(creds) = &self.0 {
write(&auth_path, &serde_json::to_vec(creds)?, &state.io_semaphore)
.await?;
}
Ok(())
}
pub async fn login(
&mut self,
credentials: ModrinthCredentials,
) -> crate::Result<&Self> {
self.0 = Some(credentials);
self.save().await?;
Ok(self)
}
#[tracing::instrument]
pub async fn update_creds() {
let res = async {
let state = State::get().await?;
let mut creds_write = state.credentials.write().await;
refresh_credentials(&mut creds_write, &state.fetch_semaphore)
.await?;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update credentials: {err}")
}
};
}
pub async fn logout(&mut self) -> crate::Result<&Self> {
self.0 = None;
self.save().await?;
Ok(self)
}
}
pub struct ModrinthAuthFlow {
socket: async_tungstenite::WebSocketStream<
async_tungstenite::tokio::ConnectStream,
>,
}
impl ModrinthAuthFlow {
pub async fn new(provider: &str) -> crate::Result<Self> {
let (socket, _) = async_tungstenite::tokio::connect_async(format!(
"wss://api.modrinth.com/v2/auth/ws?provider={provider}"
))
.await?;
Ok(Self { socket })
}
pub async fn prepare_login_url(&mut self) -> crate::Result<String> {
let code_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
#[derive(Deserialize)]
struct Url {
url: String,
}
let response = serde_json::from_slice::<Url>(&code_resp)?;
Ok(response.url)
}
pub async fn extract_credentials(
&mut self,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
// Minecraft bearer token
let token_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
let response =
serde_json::from_slice::<HashMap<String, Value>>(&token_resp)?;
get_result_from_res("code", response, semaphore).await
}
pub async fn close(&mut self) -> crate::Result<()> {
self.socket.close(None).await?;
Ok(())
}
}
async fn get_result_from_res(
code_key: &str,
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
if let Some(flow) = response.get("flow").and_then(|x| x.as_str()) {
Ok(ModrinthCredentialsResult::TwoFactorRequired {
flow: flow.to_string(),
})
} else if let Some(code) = response.get(code_key).and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
Ok(ModrinthCredentialsResult::Credentials(
ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
},
))
} else if let Some(error) =
response.get("description").and_then(|x| x.as_str())
{
Err(crate::ErrorKind::OtherError(format!(
"Failed to login with error {error}"
))
.as_error())
} else {
Err(crate::ErrorKind::OtherError(String::from(
"Flow/code/error not found in response!",
))
.as_error())
}
}
#[derive(Deserialize)]
struct Session {
session: String,
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login"),
None,
Some(serde_json::json!({
"username": username,
"password": password,
"challenge": challenge,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_result_from_res("session", value, semaphore).await
}
async fn get_creds_from_res(
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentials> {
if let Some(code) = response.get("session").and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
Ok(ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
})
} else if let Some(error) =
response.get("description").and_then(|x| x.as_str())
{
Err(crate::ErrorKind::OtherError(format!(
"Failed to login with error {error}"
))
.as_error())
} else {
Err(crate::ErrorKind::OtherError(String::from(
"Flow/code/error not found in response!",
))
.as_error())
}
}
pub async fn login_2fa(
code: &str,
flow: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login/2fa"),
None,
Some(serde_json::json!({
"code": code,
"flow": flow,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
}
pub async fn create_account(
username: &str,
email: &str,
password: &str,
challenge: &str,
sign_up_newsletter: bool,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/create"),
None,
Some(serde_json::json!({
"username": username,
"email": email,
"password": password,
"challenge": challenge,
"sign_up_newsletter": sign_up_newsletter,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
}
pub async fn refresh_credentials(
credentials_store: &mut CredentialsStore,
semaphore: &FetchSemaphore,
) -> crate::Result<()> {
if let Some(ref mut credentials) = credentials_store.0 {
let token = &credentials.session;
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}session/refresh"),
None,
None,
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
)
.await
.ok()
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
if let Some(value) = resp {
credentials.user = fetch_info(&value.session, semaphore).await?;
credentials.session = value.session;
credentials.expires_at = Utc::now() + Duration::weeks(2);
} else if credentials.expires_at < Utc::now() {
credentials_store.0 = None;
}
}
credentials_store.save().await?;
Ok(())
}
async fn fetch_info(
token: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthUser> {
let result = fetch_advanced(
Method::GET,
&format!("{MODRINTH_API_URL}user"),
None,
None,
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let value = serde_json::from_slice(&result)?;
Ok(value)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,807 @@
//! Project management + inference
use crate::config::MODRINTH_API_URL;
use crate::state::{CredentialsStore, ModrinthUser, Profile};
use crate::util::fetch::{
fetch_json, write_cached_icon, FetchSemaphore, IoSemaphore,
};
use crate::util::io::IOError;
use async_zip::tokio::read::fs::ZipFileReader;
use chrono::{DateTime, Utc};
use futures::StreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sha2::Digest;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::io::AsyncReadExt;
use super::ProjectPathId;
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum ProjectType {
Mod,
DataPack,
ResourcePack,
ShaderPack,
}
impl ProjectType {
pub fn get_from_loaders(loaders: Vec<String>) -> Option<Self> {
if loaders
.iter()
.any(|x| ["fabric", "forge", "quilt"].contains(&&**x))
{
Some(ProjectType::Mod)
} else if loaders.iter().any(|x| x == "datapack") {
Some(ProjectType::DataPack)
} else if loaders.iter().any(|x| ["iris", "optifine"].contains(&&**x)) {
Some(ProjectType::ShaderPack)
} else if loaders
.iter()
.any(|x| ["vanilla", "canvas", "minecraft"].contains(&&**x))
{
Some(ProjectType::ResourcePack)
} else {
None
}
}
pub fn get_from_parent_folder(path: PathBuf) -> Option<Self> {
// Get parent folder
let path = path.parent()?.file_name()?;
match path.to_str()? {
"mods" => Some(ProjectType::Mod),
"datapacks" => Some(ProjectType::DataPack),
"resourcepacks" => Some(ProjectType::ResourcePack),
"shaderpacks" => Some(ProjectType::ShaderPack),
_ => None,
}
}
pub fn get_name(&self) -> &'static str {
match self {
ProjectType::Mod => "mod",
ProjectType::DataPack => "datapack",
ProjectType::ResourcePack => "resourcepack",
ProjectType::ShaderPack => "shaderpack",
}
}
pub fn get_folder(&self) -> &'static str {
match self {
ProjectType::Mod => "mods",
ProjectType::DataPack => "datapacks",
ProjectType::ResourcePack => "resourcepacks",
ProjectType::ShaderPack => "shaderpacks",
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Project {
pub sha512: String,
pub disabled: bool,
pub metadata: ProjectMetadata,
pub file_name: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthProject {
pub id: String,
pub slug: Option<String>,
pub project_type: String,
pub team: String,
pub title: String,
pub description: String,
pub body: String,
pub published: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub client_side: SideType,
pub server_side: SideType,
pub downloads: u32,
pub followers: u32,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
pub versions: Vec<String>,
pub icon_url: Option<String>,
}
/// A specific version of a project
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersion {
pub id: String,
pub project_id: String,
pub author_id: String,
pub featured: bool,
pub name: String,
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: DateTime<Utc>,
pub downloads: u32,
pub version_type: String,
pub files: Vec<ModrinthVersionFile>,
pub dependencies: Vec<Dependency>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersionFile {
pub hashes: HashMap<String, String>,
pub url: String,
pub filename: String,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Dependency {
pub version_id: Option<String>,
pub project_id: Option<String>,
pub file_name: Option<String>,
pub dependency_type: DependencyType,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthTeamMember {
pub team_id: String,
pub user: ModrinthUser,
pub role: String,
pub ordering: i64,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum DependencyType {
Required,
Optional,
Incompatible,
Embedded,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum SideType {
Required,
Optional,
Unsupported,
Unknown,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum FileType {
RequiredResourcePack,
OptionalResourcePack,
Unknown,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ProjectMetadata {
Modrinth {
project: Box<ModrinthProject>,
version: Box<ModrinthVersion>,
members: Vec<ModrinthTeamMember>,
update_version: Option<Box<ModrinthVersion>>,
incompatible: bool,
},
Inferred {
title: Option<String>,
description: Option<String>,
authors: Vec<String>,
version: Option<String>,
icon: Option<PathBuf>,
project_type: Option<String>,
},
Unknown,
}
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
async fn read_icon_from_file(
icon_path: Option<String>,
cache_dir: &Path,
path: &PathBuf,
io_semaphore: &IoSemaphore,
) -> crate::Result<Option<PathBuf>> {
if let Some(icon_path) = icon_path {
// we have to repoen the zip twice here :(
let zip_file_reader = ZipFileReader::new(path).await;
if let Ok(zip_file_reader) = zip_file_reader {
// Get index of icon file and open it
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == icon_path
});
if let Some(zip_index) = zip_index_option {
let mut bytes = Vec::new();
if zip_file_reader
.reader_with_entry(zip_index)
.await?
.read_to_end_checked(&mut bytes)
.await
.is_ok()
{
let bytes = bytes::Bytes::from(bytes);
let path = write_cached_icon(
&icon_path,
cache_dir,
bytes,
io_semaphore,
)
.await?;
return Ok(Some(path));
}
}
}
}
Ok(None)
}
// Creates Project data from the existing files in the file system, for a given Profile
// Paths must be the full paths to the files in the FS, and not the relative paths
// eg: with get_profile_full_project_paths
#[tracing::instrument(skip(paths, profile, io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn infer_data_from_files(
profile: Profile,
paths: Vec<PathBuf>,
cache_dir: PathBuf,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<HashMap<ProjectPathId, Project>> {
let mut file_path_hashes = HashMap::new();
for path in paths {
if !path.exists() {
continue;
}
if let Some(ext) = path.extension() {
// Ignore txt configuration files
if ext == "txt" {
continue;
}
}
let mut file = tokio::fs::File::open(path.clone())
.await
.map_err(|e| IOError::with_path(e, &path))?;
let mut buffer = [0u8; 4096]; // Buffer to read chunks
let mut hasher = sha2::Sha512::new(); // Hasher
loop {
let bytes_read =
file.read(&mut buffer).await.map_err(IOError::from)?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
let hash = format!("{:x}", hasher.finalize());
file_path_hashes.insert(hash, path.clone());
}
let files_url = format!("{}version_files", MODRINTH_API_URL);
let updates_url = format!("{}version_files/update", MODRINTH_API_URL);
let (files, update_versions) = tokio::try_join!(
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
})),
fetch_semaphore,
credentials,
),
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&updates_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
"loaders": [profile.metadata.loader],
"game_versions": [profile.metadata.game_version]
})),
fetch_semaphore,
credentials,
)
)?;
let projects: Vec<ModrinthProject> = fetch_json(
Method::GET,
&format!(
"{}projects?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&files
.values()
.map(|x| x.project_id.clone())
.collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?;
let teams: Vec<ModrinthTeamMember> = fetch_json::<
Vec<Vec<ModrinthTeamMember>>,
>(
Method::GET,
&format!(
"{}teams?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&projects.iter().map(|x| x.team.clone()).collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?
.into_iter()
.flatten()
.collect();
let mut return_projects: Vec<(PathBuf, Project)> = Vec::new();
let mut further_analyze_projects: Vec<(String, PathBuf)> = Vec::new();
for (hash, path) in file_path_hashes {
if let Some(version) = files.get(&hash) {
if let Some(project) =
projects.iter().find(|x| version.project_id == x.id)
{
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
return_projects.push((
path,
Project {
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Modrinth {
project: Box::new(project.clone()),
version: Box::new(version.clone()),
members: teams
.iter()
.filter(|x| x.team_id == project.team)
.cloned()
.collect::<Vec<_>>(),
update_version: if let Some(value) =
update_versions.get(&hash)
{
if value.id != version.id {
Some(Box::new(value.clone()))
} else {
None
}
} else {
None
},
incompatible: !version.loaders.contains(
&profile
.metadata
.loader
.as_api_str()
.to_string(),
) || version
.game_versions
.contains(&profile.metadata.game_version),
},
sha512: hash,
file_name,
},
));
continue;
}
}
further_analyze_projects.push((hash, path));
}
for (hash, path) in further_analyze_projects {
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let zip_file_reader = if let Ok(zip_file_reader) =
ZipFileReader::new(path.clone()).await
{
zip_file_reader
} else {
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Unknown,
file_name,
},
));
continue;
};
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default()
== "META-INF/mods.toml"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeModInfo {
pub mods: Vec<ForgeMod>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
mod_id: String,
version: Option<String>,
display_name: Option<String>,
description: Option<String>,
logo_file: Option<String>,
authors: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = toml::from_str::<ForgeModInfo>(&file_str) {
if let Some(pack) = pack.mods.first() {
let icon = read_icon_from_file(
pack.logo_file.clone(),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.display_name
.clone()
.unwrap_or_else(|| {
pack.mod_id.clone()
}),
),
description: pack.description.clone(),
authors: pack
.authors
.clone()
.map(|x| vec![x])
.unwrap_or_default(),
version: pack.version.clone(),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
}
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "mcmod.info"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
modid: String,
name: String,
description: Option<String>,
version: Option<String>,
author_list: Option<Vec<String>>,
logo_file: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<ForgeMod>(&file_str) {
let icon = read_icon_from_file(
pack.logo_file,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(if pack.name.is_empty() {
pack.modid
} else {
pack.name
}),
description: pack.description,
authors: pack.author_list.unwrap_or_default(),
version: pack.version,
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Fabric
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "fabric.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(untagged)]
enum FabricAuthor {
String(String),
Object { name: String },
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct FabricMod {
id: String,
version: String,
name: Option<String>,
description: Option<String>,
authors: Vec<FabricAuthor>,
icon: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<FabricMod>(&file_str) {
let icon = read_icon_from_file(
pack.icon,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(pack.name.unwrap_or(pack.id)),
description: pack.description,
authors: pack
.authors
.into_iter()
.map(|x| match x {
FabricAuthor::String(name) => name,
FabricAuthor::Object { name } => name,
})
.collect(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Quilt
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "quilt.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct QuiltMetadata {
pub name: Option<String>,
pub description: Option<String>,
pub contributors: Option<HashMap<String, String>>,
pub icon: Option<String>,
}
#[derive(Deserialize)]
struct QuiltMod {
id: String,
version: String,
metadata: Option<QuiltMetadata>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<QuiltMod>(&file_str) {
let icon = read_icon_from_file(
pack.metadata.as_ref().and_then(|x| x.icon.clone()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.metadata
.as_ref()
.and_then(|x| x.name.clone())
.unwrap_or(pack.id),
),
description: pack
.metadata
.as_ref()
.and_then(|x| x.description.clone()),
authors: pack
.metadata
.map(|x| {
x.contributors
.unwrap_or_default()
.keys()
.cloned()
.collect()
})
.unwrap_or_default(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Other
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "pack.mcmeta"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct Pack {
description: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<Pack>(&file_str) {
let icon = read_icon_from_file(
Some("pack.png".to_string()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
// Guess the project type from the filepath
let project_type =
ProjectType::get_from_parent_folder(path.clone());
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: None,
description: pack.description,
authors: Vec::new(),
version: None,
icon,
project_type: project_type
.map(|x| x.get_name().to_string()),
},
},
));
continue;
}
}
}
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Unknown,
},
));
}
// Project paths should be relative
let mut corrected_hashmap = HashMap::new();
let mut stream = tokio_stream::iter(return_projects);
while let Some((h, v)) = stream.next().await {
let h = ProjectPathId::from_fs_path(&h).await?;
corrected_hashmap.insert(h, v);
}
Ok(corrected_hashmap)
}

View File

@@ -0,0 +1,69 @@
use uuid::Uuid;
use crate::State;
// We implement a store for safe loading bars such that we can wait for them to complete
// We create this store separately from the loading bars themselves, because this may be extended as needed
pub struct SafeProcesses {
pub loading_bars: Vec<Uuid>,
}
#[derive(Debug, Copy, Clone)]
pub enum ProcessType {
LoadingBar,
// Potentially other types of processes (ie: IO operations?)
}
impl SafeProcesses {
// init
pub fn new() -> Self {
Self {
loading_bars: Vec::new(),
}
}
// Adds a new running safe process to the list by uuid
pub async fn add_uuid(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<Uuid> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.push(uuid);
}
}
Ok(uuid)
}
// Mark a safe process as finishing
pub async fn complete(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<()> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.retain(|x| *x != uuid);
}
}
Ok(())
}
// Check if there are any pending safe processes of a given type
pub async fn is_complete(r#type: ProcessType) -> crate::Result<bool> {
let state = State::get().await?;
let safe_processes = state.safety_processes.read().await;
match r#type {
ProcessType::LoadingBar => {
if safe_processes.loading_bars.is_empty() {
return Ok(true);
}
}
}
Ok(false)
}
}

View File

@@ -0,0 +1,183 @@
//! Theseus settings file
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use tokio::fs;
use super::{DirectoryInfo, JavaGlobals};
// TODO: convert to semver?
const CURRENT_FORMAT_VERSION: u32 = 1;
// Types
/// Global Theseus settings
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Settings {
pub theme: Theme,
pub memory: MemorySettings,
#[serde(default)]
pub force_fullscreen: bool,
pub game_resolution: WindowSize,
pub custom_java_args: Vec<String>,
pub custom_env_args: Vec<(String, String)>,
pub java_globals: JavaGlobals,
pub hooks: Hooks,
pub max_concurrent_downloads: usize,
pub max_concurrent_writes: usize,
pub version: u32,
pub collapsed_navigation: bool,
#[serde(default)]
pub disable_discord_rpc: bool,
#[serde(default)]
pub hide_on_process: bool,
#[serde(default)]
pub native_decorations: bool,
#[serde(default)]
pub default_page: DefaultPage,
#[serde(default)]
pub developer_mode: bool,
#[serde(default)]
pub opt_out_analytics: bool,
#[serde(default)]
pub advanced_rendering: bool,
#[serde(default)]
pub fully_onboarded: bool,
#[serde(default = "DirectoryInfo::get_initial_settings_dir")]
pub loaded_config_dir: Option<PathBuf>,
}
impl Settings {
#[tracing::instrument]
pub async fn init(file: &Path) -> crate::Result<Self> {
let mut rescued = false;
let settings = if file.exists() {
let loaded_settings = fs::read(&file)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error reading settings file: {err}"
))
.as_error()
})
.and_then(|it| {
serde_json::from_slice::<Settings>(&it)
.map_err(crate::Error::from)
});
// settings is corrupted. Back up the file and create a new one
if let Err(ref err) = loaded_settings {
tracing::error!("Failed to load settings file: {err}. ");
let backup_file = file.with_extension("json.bak");
tracing::error!("Corrupted settings file will be backed up as {}, and a new settings file will be created.", backup_file.display());
let _ = fs::rename(file, backup_file).await;
rescued = true;
}
loaded_settings.ok()
} else {
None
};
if let Some(settings) = settings {
Ok(settings)
} else {
// Create new settings file
let settings = Self {
theme: Theme::Dark,
memory: MemorySettings::default(),
force_fullscreen: false,
game_resolution: WindowSize::default(),
custom_java_args: Vec::new(),
custom_env_args: Vec::new(),
java_globals: JavaGlobals::new(),
hooks: Hooks::default(),
max_concurrent_downloads: 10,
max_concurrent_writes: 10,
version: CURRENT_FORMAT_VERSION,
collapsed_navigation: false,
disable_discord_rpc: false,
hide_on_process: false,
native_decorations: false,
default_page: DefaultPage::Home,
developer_mode: false,
opt_out_analytics: false,
advanced_rendering: true,
fully_onboarded: rescued, // If we rescued the settings file, we should consider the user fully onboarded
// By default, the config directory is the same as the settings directory
loaded_config_dir: DirectoryInfo::get_initial_settings_dir(),
};
if rescued {
settings.sync(file).await?;
}
Ok(settings)
}
}
#[tracing::instrument(skip(self))]
pub async fn sync(&self, to: &Path) -> crate::Result<()> {
fs::write(to, serde_json::to_vec(self)?)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error saving settings to file: {err}"
))
.as_error()
})?;
Ok(())
}
}
/// Theseus theme
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum Theme {
Dark,
Light,
Oled,
}
/// Minecraft memory settings
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct MemorySettings {
pub maximum: u32,
}
impl Default for MemorySettings {
fn default() -> Self {
Self { maximum: 2048 }
}
}
/// Game window size
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct WindowSize(pub u16, pub u16);
impl Default for WindowSize {
fn default() -> Self {
Self(854, 480)
}
}
/// Game initialization hooks
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(default)]
pub struct Hooks {
#[serde(skip_serializing_if = "Option::is_none")]
pub pre_launch: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub wrapper: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub post_exit: Option<String>,
}
/// Opening window to start with
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub enum DefaultPage {
Home,
Library,
}
impl Default for DefaultPage {
fn default() -> Self {
Self::Home
}
}

View File

@@ -0,0 +1,261 @@
use std::path::PathBuf;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use crate::config::MODRINTH_API_URL;
use crate::data::DirectoryInfo;
use crate::state::CredentialsStore;
use crate::util::fetch::{
fetch_json, read_json, write, FetchSemaphore, IoSemaphore,
};
// Serializeable struct for all tags to be fetched together by the frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tags {
pub categories: Vec<Category>,
pub loaders: Vec<Loader>,
pub game_versions: Vec<GameVersion>,
pub donation_platforms: Vec<DonationPlatform>,
pub report_types: Vec<String>,
}
impl Tags {
#[tracing::instrument(skip(io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let mut tags = None;
let tags_path = dirs.caches_meta_dir().await.join("tags.json");
let tags_path_backup =
dirs.caches_meta_dir().await.join("tags.json.bak");
if let Ok(tags_json) = read_json::<Self>(&tags_path, io_semaphore).await
{
tags = Some(tags_json);
} else if fetch_online {
match Self::fetch(fetch_semaphore, credentials).await {
Ok(tags_fetch) => tags = Some(tags_fetch),
Err(err) => {
tracing::warn!("Unable to fetch launcher tags: {err}")
}
}
} else if let Ok(tags_json) =
read_json::<Self>(&tags_path_backup, io_semaphore).await
{
tags = Some(tags_json);
std::fs::copy(&tags_path_backup, &tags_path).map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error restoring tags backup: {err}"
))
.as_error()
})?;
}
if let Some(tags_data) = tags {
write(&tags_path, &serde_json::to_vec(&tags_data)?, io_semaphore)
.await?;
write(
&tags_path_backup,
&serde_json::to_vec(&tags_data)?,
io_semaphore,
)
.await?;
Ok(tags_data)
} else {
Err(crate::ErrorKind::NoValueFor(String::from("launcher tags"))
.as_error())
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update() {
let res = async {
let state = crate::State::get().await?;
let creds = state.credentials.read().await;
let tags_fetch =
Tags::fetch(&state.fetch_semaphore, &creds).await?;
drop(creds);
let tags_path =
state.directories.caches_meta_dir().await.join("tags.json");
let tags_path_backup = state
.directories
.caches_meta_dir()
.await
.join("tags.json.bak");
if tags_path.exists() {
std::fs::copy(&tags_path, &tags_path_backup).unwrap();
}
write(
&tags_path,
&serde_json::to_vec(&tags_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_tags = state.tags.write().await;
*old_tags = tags_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher tags: {err}")
}
};
}
// Checks the database for categories tag, returns a Vec::new() if it doesnt exist, otherwise returns the categories
#[tracing::instrument(skip(self))]
pub fn get_categories(&self) -> Vec<Category> {
self.categories.clone()
}
// Checks the database for loaders tag, returns a Vec::new() if it doesnt exist, otherwise returns the loaders
#[tracing::instrument(skip(self))]
pub fn get_loaders(&self) -> Vec<Loader> {
self.loaders.clone()
}
// Checks the database for game_versions tag, returns a Vec::new() if it doesnt exist, otherwise returns the game_versions
#[tracing::instrument(skip(self))]
pub fn get_game_versions(&self) -> Vec<GameVersion> {
self.game_versions.clone()
}
// Checks the database for donation_platforms tag, returns a Vec::new() if it doesnt exist, otherwise returns the donation_platforms
#[tracing::instrument(skip(self))]
pub fn get_donation_platforms(&self) -> Vec<DonationPlatform> {
self.donation_platforms.clone()
}
// Checks the database for report_types tag, returns a Vec::new() if it doesnt exist, otherwise returns the report_types
#[tracing::instrument(skip(self))]
pub fn get_report_types(&self) -> Vec<String> {
self.report_types.clone()
}
// Gets all tags together as a serializable bundle
#[tracing::instrument(skip(self))]
pub fn get_tag_bundle(&self) -> Tags {
self.clone()
}
// Fetches the tags from the Modrinth API and stores them in the database
pub async fn fetch(
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let categories = format!("{MODRINTH_API_URL}tag/category");
let loaders = format!("{MODRINTH_API_URL}tag/loader");
let game_versions = format!("{MODRINTH_API_URL}tag/game_version");
let donation_platforms =
format!("{MODRINTH_API_URL}tag/donation_platform");
let report_types = format!("{MODRINTH_API_URL}tag/report_type");
let categories_fut = fetch_json::<Vec<Category>>(
Method::GET,
&categories,
None,
None,
semaphore,
credentials,
);
let loaders_fut = fetch_json::<Vec<Loader>>(
Method::GET,
&loaders,
None,
None,
semaphore,
credentials,
);
let game_versions_fut = fetch_json::<Vec<GameVersion>>(
Method::GET,
&game_versions,
None,
None,
semaphore,
credentials,
);
let donation_platforms_fut = fetch_json::<Vec<DonationPlatform>>(
Method::GET,
&donation_platforms,
None,
None,
semaphore,
credentials,
);
let report_types_fut = fetch_json::<Vec<String>>(
Method::GET,
&report_types,
None,
None,
semaphore,
credentials,
);
let (
categories,
loaders,
game_versions,
donation_platforms,
report_types,
) = tokio::try_join!(
categories_fut,
loaders_fut,
game_versions_fut,
donation_platforms_fut,
report_types_fut
)?;
Ok(Self {
categories,
loaders,
game_versions,
donation_platforms,
report_types,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Category {
pub name: String,
pub project_type: String,
pub header: String,
pub icon: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Loader {
pub name: String,
pub icon: PathBuf,
pub supported_project_types: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DonationPlatform {
pub short: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GameVersion {
pub version: String,
pub version_type: String,
pub date: String,
pub major: bool,
}

View File

@@ -0,0 +1,345 @@
//! Functions for fetching infromation from the Internet
use crate::event::emit::emit_loading;
use crate::event::LoadingBarId;
use crate::state::CredentialsStore;
use bytes::Bytes;
use lazy_static::lazy_static;
use reqwest::Method;
use serde::de::DeserializeOwned;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::time::{self, Duration};
use tokio::sync::{RwLock, Semaphore};
use tokio::{fs::File, io::AsyncWriteExt};
use super::io::{self, IOError};
#[derive(Debug)]
pub struct IoSemaphore(pub RwLock<Semaphore>);
#[derive(Debug)]
pub struct FetchSemaphore(pub RwLock<Semaphore>);
lazy_static! {
pub static ref REQWEST_CLIENT: reqwest::Client = {
let mut headers = reqwest::header::HeaderMap::new();
let header = reqwest::header::HeaderValue::from_str(&format!(
"modrinth/theseus/{} (support@modrinth.com)",
env!("CARGO_PKG_VERSION")
))
.unwrap();
headers.insert(reqwest::header::USER_AGENT, header);
reqwest::Client::builder()
.tcp_keepalive(Some(time::Duration::from_secs(10)))
.default_headers(headers)
.build()
.expect("Reqwest Client Building Failed")
};
}
const FETCH_ATTEMPTS: usize = 3;
#[tracing::instrument(skip(semaphore))]
pub async fn fetch(
url: &str,
sha1: Option<&str>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Bytes> {
fetch_advanced(
Method::GET,
url,
sha1,
None,
None,
None,
semaphore,
credentials,
)
.await
}
#[tracing::instrument(skip(json_body, semaphore))]
pub async fn fetch_json<T>(
method: Method,
url: &str,
sha1: Option<&str>,
json_body: Option<serde_json::Value>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let result = fetch_advanced(
method,
url,
sha1,
json_body,
None,
None,
semaphore,
credentials,
)
.await?;
let value = serde_json::from_slice(&result)?;
Ok(value)
}
/// Downloads a file with retry and checksum functionality
#[tracing::instrument(skip(json_body, semaphore))]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn fetch_advanced(
method: Method,
url: &str,
sha1: Option<&str>,
json_body: Option<serde_json::Value>,
header: Option<(&str, &str)>,
loading_bar: Option<(&LoadingBarId, f64)>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Bytes> {
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
for attempt in 1..=(FETCH_ATTEMPTS + 1) {
let mut req = REQWEST_CLIENT.request(method.clone(), url);
if let Some(body) = json_body.clone() {
req = req.json(&body);
}
if let Some(header) = header {
req = req.header(header.0, header.1);
}
if url.starts_with("https://cdn.modrinth.com") {
if let Some(creds) = &credentials.0 {
req = req.header("Authorization", &creds.session);
}
}
let result = req.send().await;
match result {
Ok(x) => {
let bytes = if let Some((bar, total)) = &loading_bar {
let length = x.content_length();
if let Some(total_size) = length {
use futures::StreamExt;
let mut stream = x.bytes_stream();
let mut bytes = Vec::new();
while let Some(item) = stream.next().await {
let chunk = item.or(Err(
crate::error::ErrorKind::NoValueFor(
"fetch bytes".to_string(),
),
))?;
bytes.append(&mut chunk.to_vec());
emit_loading(
bar,
(chunk.len() as f64 / total_size as f64)
* total,
None,
)
.await?;
}
Ok(bytes::Bytes::from(bytes))
} else {
x.bytes().await
}
} else {
x.bytes().await
};
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
let hash = sha1_async(bytes.clone()).await?;
if &*hash != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(crate::ErrorKind::HashError(
sha1.to_string(),
hash,
)
.into());
}
}
}
tracing::trace!("Done downloading URL {url}");
return Ok(bytes);
} else if attempt <= 3 {
continue;
} else if let Err(err) = bytes {
return Err(err.into());
}
}
Err(_) if attempt <= 3 => continue,
Err(err) => {
return Err(err.into());
}
}
}
unreachable!()
}
/// Downloads a file from specified mirrors
#[tracing::instrument(skip(semaphore))]
#[theseus_macros::debug_pin]
pub async fn fetch_mirrors(
mirrors: &[&str],
sha1: Option<&str>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Bytes> {
if mirrors.is_empty() {
return Err(crate::ErrorKind::InputError(
"No mirrors provided!".to_string(),
)
.into());
}
for (index, mirror) in mirrors.iter().enumerate() {
let result = fetch(mirror, sha1, semaphore, credentials).await;
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
return result;
}
}
unreachable!()
}
/// Using labrinth API, checks if an internet response can be found, with a timeout in seconds
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn check_internet(timeout: u64) -> bool {
REQWEST_CLIENT
.get("https://launcher-files.modrinth.com/detect.txt")
.timeout(Duration::from_secs(timeout))
.send()
.await
.is_ok()
}
/// Posts a JSON to a URL
#[tracing::instrument(skip(json_body, semaphore))]
#[theseus_macros::debug_pin]
pub async fn post_json<T>(
url: &str,
json_body: serde_json::Value,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let mut req = REQWEST_CLIENT.post(url).json(&json_body);
if let Some(creds) = &credentials.0 {
req = req.header("Authorization", &creds.session);
}
let result = req.send().await?.error_for_status()?;
let value = result.json().await?;
Ok(value)
}
pub async fn read_json<T>(
path: &Path,
semaphore: &IoSemaphore,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let json = io::read(path).await?;
let json = serde_json::from_slice::<T>(&json)?;
Ok(json)
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write<'a>(
path: &Path,
bytes: &[u8],
semaphore: &IoSemaphore,
) -> crate::Result<()> {
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
if let Some(parent) = path.parent() {
io::create_dir_all(parent).await?;
}
let mut file = File::create(path)
.await
.map_err(|e| IOError::with_path(e, path))?;
file.write_all(bytes)
.await
.map_err(|e| IOError::with_path(e, path))?;
tracing::trace!("Done writing file {}", path.display());
Ok(())
}
pub async fn copy(
src: impl AsRef<std::path::Path>,
dest: impl AsRef<std::path::Path>,
semaphore: &IoSemaphore,
) -> crate::Result<()> {
let src: &Path = src.as_ref();
let dest = dest.as_ref();
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
if let Some(parent) = dest.parent() {
io::create_dir_all(parent).await?;
}
io::copy(src, dest).await?;
tracing::trace!(
"Done copying file {} to {}",
src.display(),
dest.display()
);
Ok(())
}
// Writes a icon to the cache and returns the absolute path of the icon within the cache directory
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write_cached_icon(
icon_path: &str,
cache_dir: &Path,
bytes: Bytes,
semaphore: &IoSemaphore,
) -> crate::Result<PathBuf> {
let extension = Path::new(&icon_path).extension().and_then(OsStr::to_str);
let hash = sha1_async(bytes.clone()).await?;
let path = cache_dir.join("icons").join(if let Some(ext) = extension {
format!("{hash}.{ext}")
} else {
hash
});
write(&path, &bytes, semaphore).await?;
let path = io::canonicalize(path)?;
Ok(path)
}
async fn sha1_async(bytes: Bytes) -> crate::Result<String> {
let hash = tokio::task::spawn_blocking(move || {
sha1_smol::Sha1::from(bytes).hexdigest()
})
.await?;
Ok(hash)
}

194
libs/theseus/src/util/io.rs Normal file
View File

@@ -0,0 +1,194 @@
// IO error
// A wrapper around the tokio IO functions that adds the path to the error message, instead of the uninformative std::io::Error.
use std::{io::Write, path::Path};
use tempfile::NamedTempFile;
use tokio::task::spawn_blocking;
#[derive(Debug, thiserror::Error)]
pub enum IOError {
#[error("{source}, path: {path}")]
IOPathError {
#[source]
source: std::io::Error,
path: String,
},
#[error(transparent)]
IOError(#[from] std::io::Error),
}
impl IOError {
pub fn from(source: std::io::Error) -> Self {
Self::IOError(source)
}
pub fn with_path(
source: std::io::Error,
path: impl AsRef<std::path::Path>,
) -> Self {
let path = path.as_ref();
Self::IOPathError {
source,
path: path.to_string_lossy().to_string(),
}
}
}
// dunce canonicalize
pub fn canonicalize(
path: impl AsRef<std::path::Path>,
) -> Result<std::path::PathBuf, IOError> {
let path = path.as_ref();
dunce::canonicalize(path).map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// read_dir
pub async fn read_dir(
path: impl AsRef<std::path::Path>,
) -> Result<tokio::fs::ReadDir, IOError> {
let path = path.as_ref();
tokio::fs::read_dir(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// create_dir_all
pub async fn create_dir_all(
path: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let path = path.as_ref();
tokio::fs::create_dir_all(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// remove_dir_all
pub async fn remove_dir_all(
path: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let path = path.as_ref();
tokio::fs::remove_dir_all(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// read_to_string
pub async fn read_to_string(
path: impl AsRef<std::path::Path>,
) -> Result<String, IOError> {
let path = path.as_ref();
tokio::fs::read_to_string(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// read
pub async fn read(
path: impl AsRef<std::path::Path>,
) -> Result<Vec<u8>, IOError> {
let path = path.as_ref();
tokio::fs::read(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// write
pub async fn write(
path: impl AsRef<std::path::Path>,
data: impl AsRef<[u8]>,
) -> Result<(), IOError> {
let path = path.as_ref().to_owned();
let data = data.as_ref().to_owned();
spawn_blocking(move || {
let cloned_path = path.clone();
sync_write(data, path).map_err(|e| IOError::IOPathError {
source: e,
path: cloned_path.to_string_lossy().to_string(),
})
})
.await
.map_err(|_| {
std::io::Error::new(std::io::ErrorKind::Other, "background task failed")
})??;
Ok(())
}
fn sync_write(
data: impl AsRef<[u8]>,
path: impl AsRef<Path>,
) -> Result<(), std::io::Error> {
let mut tempfile =
NamedTempFile::new_in(path.as_ref().parent().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::Other,
"could not get parent directory for temporary file",
)
})?)?;
tempfile.write_all(data.as_ref())?;
let tmp_path = tempfile.into_temp_path();
let path = path.as_ref();
tmp_path.persist(path)?;
std::io::Result::Ok(())
}
// rename
pub async fn rename(
from: impl AsRef<std::path::Path>,
to: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let from = from.as_ref();
let to = to.as_ref();
tokio::fs::rename(from, to)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: from.to_string_lossy().to_string(),
})
}
// copy
pub async fn copy(
from: impl AsRef<std::path::Path>,
to: impl AsRef<std::path::Path>,
) -> Result<u64, IOError> {
let from: &Path = from.as_ref();
let to = to.as_ref();
tokio::fs::copy(from, to)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: from.to_string_lossy().to_string(),
})
}
// remove file
pub async fn remove_file(
path: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let path = path.as_ref();
tokio::fs::remove_file(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}

View File

@@ -0,0 +1,388 @@
use super::io;
use futures::prelude::*;
use serde::{Deserialize, Serialize};
use std::env;
use std::path::PathBuf;
use std::process::Command;
use std::{collections::HashSet, path::Path};
use tokio::task::JoinError;
use crate::State;
#[cfg(target_os = "windows")]
use winreg::{
enums::{HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_32KEY, KEY_WOW64_64KEY},
RegKey,
};
#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone)]
pub struct JavaVersion {
pub path: String,
pub version: String,
pub architecture: String,
}
// Entrypoint function (Windows)
// Returns a Vec of unique JavaVersions from the PATH, Windows Registry Keys and common Java locations
#[cfg(target_os = "windows")]
#[tracing::instrument]
pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
let mut jre_paths = HashSet::new();
// Add JRES directly on PATH
jre_paths.extend(get_all_jre_path().await);
jre_paths.extend(get_all_autoinstalled_jre_path().await?);
if let Ok(java_home) = env::var("JAVA_HOME") {
jre_paths.insert(PathBuf::from(java_home));
}
// Hard paths for locations for commonly installed .exes
let java_paths = [
r"C:/Program Files/Java",
r"C:/Program Files (x86)/Java",
r"C:\Program Files\Eclipse Adoptium",
r"C:\Program Files (x86)\Eclipse Adoptium",
];
for java_path in java_paths {
let Ok(java_subpaths) = std::fs::read_dir(java_path) else {
continue;
};
for java_subpath in java_subpaths.flatten() {
let path = java_subpath.path();
jre_paths.insert(path.join("bin"));
}
}
// Windows Registry Keys
let key_paths = [
r"SOFTWARE\JavaSoft\Java Runtime Environment", // Oracle
r"SOFTWARE\JavaSoft\Java Development Kit",
r"SOFTWARE\\JavaSoft\\JRE", // Oracle
r"SOFTWARE\\JavaSoft\\JDK",
r"SOFTWARE\\Eclipse Foundation\\JDK", // Eclipse
r"SOFTWARE\\Eclipse Adoptium\\JRE", // Eclipse
r"SOFTWARE\\Eclipse Foundation\\JDK", // Eclipse
r"SOFTWARE\\Microsoft\\JDK", // Microsoft
];
for key in key_paths {
if let Ok(jre_key) = RegKey::predef(HKEY_LOCAL_MACHINE)
.open_subkey_with_flags(key, KEY_READ | KEY_WOW64_32KEY)
{
jre_paths.extend(get_paths_from_jre_winregkey(jre_key));
}
if let Ok(jre_key) = RegKey::predef(HKEY_LOCAL_MACHINE)
.open_subkey_with_flags(key, KEY_READ | KEY_WOW64_64KEY)
{
jre_paths.extend(get_paths_from_jre_winregkey(jre_key));
}
}
// Get JRE versions from potential paths concurrently
let j = check_java_at_filepaths(jre_paths)
.await
.into_iter()
.collect();
Ok(j)
}
// Gets paths rather than search directly as RegKeys should not be passed asynchronously (do not impl Send)
#[cfg(target_os = "windows")]
#[tracing::instrument]
pub fn get_paths_from_jre_winregkey(jre_key: RegKey) -> HashSet<PathBuf> {
let mut jre_paths = HashSet::new();
for subkey in jre_key.enum_keys().flatten() {
if let Ok(subkey) = jre_key.open_subkey(subkey) {
let subkey_value_names =
[r"JavaHome", r"InstallationPath", r"\\hotspot\\MSI"];
for subkey_value in subkey_value_names {
let path: Result<String, std::io::Error> =
subkey.get_value(subkey_value);
let Ok(path) = path else { continue };
jre_paths.insert(PathBuf::from(path).join("bin"));
}
}
}
jre_paths
}
// Entrypoint function (Mac)
// Returns a Vec of unique JavaVersions from the PATH, and common Java locations
#[cfg(target_os = "macos")]
#[tracing::instrument]
pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
// Use HashSet to avoid duplicates
let mut jre_paths = HashSet::new();
// Add JREs directly on PATH
jre_paths.extend(get_all_jre_path().await);
jre_paths.extend(get_all_autoinstalled_jre_path().await?);
// Hard paths for locations for commonly installed .exes
let java_paths = [
r"/Applications/Xcode.app/Contents/Applications/Application Loader.app/Contents/MacOS/itms/java",
r"/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Home",
r"/System/Library/Frameworks/JavaVM.framework/Versions/Current/Commands",
];
for path in java_paths {
jre_paths.insert(PathBuf::from(path));
}
// Iterate over JavaVirtualMachines/(something)/Contents/Home/bin
let base_path = PathBuf::from("/Library/Java/JavaVirtualMachines/");
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let entry = entry.path().join("Contents/Home/bin");
jre_paths.insert(entry);
}
}
// Get JRE versions from potential paths concurrently
let j = check_java_at_filepaths(jre_paths)
.await
.into_iter()
.collect();
Ok(j)
}
// Entrypoint function (Linux)
// Returns a Vec of unique JavaVersions from the PATH, and common Java locations
#[cfg(target_os = "linux")]
#[tracing::instrument]
pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
// Use HashSet to avoid duplicates
let mut jre_paths = HashSet::new();
// Add JREs directly on PATH
jre_paths.extend(get_all_jre_path().await);
jre_paths.extend(get_all_autoinstalled_jre_path().await?);
// Hard paths for locations for commonly installed locations
let java_paths = [
r"/usr",
r"/usr/java",
r"/usr/lib/jvm",
r"/usr/lib64/jvm",
r"/opt/jdk",
r"/opt/jdks",
];
for path in java_paths {
let path = PathBuf::from(path);
jre_paths.insert(PathBuf::from(&path).join("jre").join("bin"));
jre_paths.insert(PathBuf::from(&path).join("bin"));
if let Ok(dir) = std::fs::read_dir(path) {
for entry in dir.flatten() {
let entry_path = entry.path();
jre_paths.insert(entry_path.join("jre").join("bin"));
jre_paths.insert(entry_path.join("bin"));
}
}
}
// Get JRE versions from potential paths concurrently
let j = check_java_at_filepaths(jre_paths)
.await
.into_iter()
.collect();
Ok(j)
}
// Gets all JREs from the PATH env variable
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
{
Box::pin(async move {
let state = State::get().await.map_err(|_| JREError::StateError)?;
let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir().await;
if base_path.is_dir() {
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if let Ok(contents) =
std::fs::read_to_string(file_path.clone())
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
}
}
}
}
Ok(jre_paths)
})
.await
}
// Gets all JREs from the PATH env variable
#[tracing::instrument]
async fn get_all_jre_path() -> HashSet<PathBuf> {
// Iterate over values in PATH variable, where accessible JREs are referenced
let paths =
env::var("PATH").map(|x| env::split_paths(&x).collect::<HashSet<_>>());
paths.unwrap_or_else(|_| HashSet::new())
}
#[cfg(target_os = "windows")]
#[allow(dead_code)]
pub const JAVA_BIN: &str = "javaw.exe";
#[cfg(not(target_os = "windows"))]
#[allow(dead_code)]
pub const JAVA_BIN: &str = "java";
// For each example filepath in 'paths', perform check_java_at_filepath, checking each one concurrently
// and returning a JavaVersion for every valid path that points to a java bin
#[tracing::instrument]
pub async fn check_java_at_filepaths(
paths: HashSet<PathBuf>,
) -> HashSet<JavaVersion> {
let jres = stream::iter(paths.into_iter())
.map(|p: PathBuf| {
tokio::task::spawn(async move { check_java_at_filepath(&p).await })
})
.buffer_unordered(64)
.collect::<Vec<_>>()
.await;
jres.into_iter().flat_map(|x| x.ok()).flatten().collect()
}
// For example filepath 'path', attempt to resolve it and get a Java version at this path
// If no such path exists, or no such valid java at this path exists, returns None
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn check_java_at_filepath(path: &Path) -> Option<JavaVersion> {
// Attempt to canonicalize the potential java filepath
// If it fails, this path does not exist and None is returned (no Java here)
let Ok(path) = io::canonicalize(path) else {
return None;
};
// Checks for existence of Java at this filepath
// Adds JAVA_BIN to the end of the path if it is not already there
let java = if path.file_name()?.to_str()? != JAVA_BIN {
path.join(JAVA_BIN)
} else {
path
};
if !java.exists() {
return None;
};
let bytes = include_bytes!("../../library/JavaInfo.class");
let tempdir: PathBuf = tempfile::tempdir().ok()?.into_path();
if !tempdir.exists() {
return None;
}
let file_path = tempdir.join("JavaInfo.class");
io::write(&file_path, bytes).await.ok()?;
let output = Command::new(&java)
.arg("-cp")
.arg(file_path.parent().unwrap())
.arg("JavaInfo")
.output()
.ok()?;
let stdout = String::from_utf8_lossy(&output.stdout);
let mut java_version = None;
let mut java_arch = None;
for line in stdout.lines() {
let mut parts = line.split('=');
let key = parts.next().unwrap_or_default();
let value = parts.next().unwrap_or_default();
if key == "os.arch" {
java_arch = Some(value);
} else if key == "java.version" {
java_version = Some(value);
}
}
// Extract version info from it
if let Some(arch) = java_arch {
if let Some(version) = java_version {
let path = java.to_string_lossy().to_string();
return Some(JavaVersion {
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
}
None
}
/// Extract major/minor version from a java version string
/// Gets the minor version or an error, and assumes 1 for major version if it could not find
/// "1.8.0_361" -> (1, 8)
/// "20" -> (1, 20)
pub fn extract_java_majorminor_version(
version: &str,
) -> Result<(u32, u32), JREError> {
let mut split = version.split('.');
let major_opt = split.next();
let mut major;
// Try minor. If doesn't exist, in format like "20" so use major
let mut minor = if let Some(minor) = split.next() {
major = major_opt.unwrap_or("1").parse::<u32>()?;
minor.parse::<u32>()?
} else {
// Formatted like "20", only one value means that is minor version
major = 1;
major_opt
.ok_or_else(|| JREError::InvalidJREVersion(version.to_string()))?
.parse::<u32>()?
};
// Java start should always be 1. If more than 1, it is formatted like "17.0.1.2" and starts with minor version
if major > 1 {
minor = major;
major = 1;
}
Ok((major, minor))
}
#[derive(thiserror::Error, Debug)]
pub enum JREError {
#[error("Command error : {0}")]
IOError(#[from] std::io::Error),
#[error("Env error: {0}")]
EnvError(#[from] env::VarError),
#[error("No JRE found for required version: {0}")]
NoJREFound(String),
#[error("Invalid JRE version string: {0}")]
InvalidJREVersion(String),
#[error("Parsing error: {0}")]
ParseError(#[from] std::num::ParseIntError),
#[error("Join error: {0}")]
JoinError(#[from] JoinError),
#[error("No stored tag for Minecraft Version {0}")]
NoMinecraftVersionFound(String),
#[error("Error getting launcher sttae")]
StateError,
}

View File

@@ -0,0 +1,17 @@
//! Theseus utility functions
pub mod fetch;
pub mod io;
pub mod jre;
pub mod platform;
/// Wrap a builder which uses a mut reference into one which outputs an owned value
macro_rules! wrap_ref_builder {
($id:ident = $init:expr => $transform:block) => {{
let mut it = $init;
{
let $id = &mut it;
$transform;
}
it
}};
}

View File

@@ -0,0 +1,102 @@
//! Platform-related code
use daedalus::minecraft::{Os, OsRule};
use regex::Regex;
// OS detection
pub trait OsExt {
/// Get the OS of the current system
fn native() -> Self;
/// Gets the OS + Arch of the current system
fn native_arch(java_arch: &str) -> Self;
}
impl OsExt for Os {
fn native_arch(java_arch: &str) -> Self {
if std::env::consts::OS == "windows" {
if java_arch == "aarch64" {
Os::WindowsArm64
} else {
Os::Windows
}
} else if std::env::consts::OS == "linux" {
if java_arch == "aarch64" {
Os::LinuxArm64
} else if java_arch == "arm" {
Os::LinuxArm32
} else {
Os::Linux
}
} else if std::env::consts::OS == "macos" {
if java_arch == "aarch64" {
Os::OsxArm64
} else {
Os::Osx
}
} else {
Os::Unknown
}
}
fn native() -> Self {
match std::env::consts::OS {
"windows" => Self::Windows,
"macos" => Self::Osx,
"linux" => Self::Linux,
_ => Self::Unknown,
}
}
}
// Bit width
#[cfg(target_pointer_width = "64")]
pub const ARCH_WIDTH: &str = "64";
#[cfg(target_pointer_width = "32")]
pub const ARCH_WIDTH: &str = "32";
// Platform rule handling
pub fn os_rule(
rule: &OsRule,
java_arch: &str,
// Minecraft updated over 1.18.2 (supports MacOS Natively)
minecraft_updated: bool,
) -> bool {
let mut rule_match = true;
if let Some(ref arch) = rule.arch {
rule_match &= !matches!(arch.as_str(), "x86" | "arm");
}
if let Some(name) = &rule.name {
if minecraft_updated
&& (name != &Os::LinuxArm64 || name != &Os::LinuxArm32)
{
rule_match &=
&Os::native() == name || &Os::native_arch(java_arch) == name;
} else {
rule_match &= &Os::native_arch(java_arch) == name;
}
}
if let Some(version) = &rule.version {
if let Ok(regex) = Regex::new(version.as_str()) {
rule_match &=
regex.is_match(&sys_info::os_release().unwrap_or_default());
}
}
rule_match
}
pub fn classpath_separator(java_arch: &str) -> &'static str {
match Os::native_arch(java_arch) {
Os::Osx
| Os::OsxArm64
| Os::Linux
| Os::LinuxArm32
| Os::LinuxArm64
| Os::Unknown => ":",
Os::Windows | Os::WindowsArm64 => ";",
}
}

View File

@@ -0,0 +1,11 @@
[package]
name = "theseus_macros"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0.58", features = ["full"] }
quote = "1.0"

View File

@@ -0,0 +1,34 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, ItemFn};
#[proc_macro_attribute]
pub fn debug_pin(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as ItemFn);
let attrs = &input.attrs;
let vis = &input.vis;
let sig = &input.sig;
let body = &input.block;
#[cfg(debug_assertions)]
let result = quote! {
#(#attrs)*
#vis #sig {
Box::pin(async move {
#body
}).await
}
};
#[cfg(not(debug_assertions))]
let result = quote! {
#(#attrs)*
#vis #sig {
#body
}
};
TokenStream::from(result)
}