Migrate to Turborepo (#1251)

This commit is contained in:
Evan Song
2024-07-04 21:46:29 -07:00
committed by GitHub
parent 6fa1acc461
commit 0f2ddb452c
811 changed files with 5623 additions and 7832 deletions

View File

@@ -0,0 +1,74 @@
[package]
name = "theseus"
version = "0.7.2"
authors = ["Jai A <jaiagr+gpg@pm.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
theseus_macros = { path = "../app-macros" }
bytes = "1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_ini = "0.2.0"
toml = "0.8.12"
sha1_smol = { version = "1.0.0", features = ["std"] }
sha2 = "0.10.8"
url = "2.2"
uuid = { version = "1.1", features = ["serde", "v4"] }
zip = "0.6.5"
async_zip = { version = "0.0.17", features = ["full"] }
flate2 = "1.0.28"
tempfile = "3.5.0"
urlencoding = "2.1.3"
chrono = { version = "0.4.19", features = ["serde"] }
daedalus = { version = "0.1.25" }
dirs = "5.0.1"
regex = "1.5"
sys-info = "0.9.0"
sysinfo = "0.30.8"
thiserror = "1.0"
tracing = "0.1.37"
tracing-subscriber = { version = "0.3.18", features = ["chrono", "env-filter"] }
tracing-error = "0.2.0"
tracing-appender = "0.2.3"
paste = { version = "1.0" }
tauri = { version = "1.6.1", optional = true }
indicatif = { version = "0.17.3", optional = true }
async-tungstenite = { version = "0.25.1", features = ["tokio-runtime", "tokio-native-tls"] }
futures = "0.3"
reqwest = { version = "0.12.3", features = ["json", "stream", "deflate", "gzip", "brotli"] }
tokio = { version = "1", features = ["full"] }
tokio-stream = { version = "0.1", features = ["fs"] }
async-recursion = "1.0.4"
notify = { version = "6.1.1", default-features = false }
notify-debouncer-mini = { version = "0.4.1", default-features = false }
lazy_static = "1.4.0"
dunce = "1.0.3"
whoami = "1.4.0"
discord-rich-presence = "0.2.3"
p256 = { version = "0.13.2", features = ["ecdsa"] }
rand = "0.8"
byteorder = "1.5.0"
base64 = "0.22.0"
[target.'cfg(windows)'.dependencies]
winreg = "0.52.0"
[features]
tauri = ["dep:tauri"]
cli = ["dep:indicatif"]

Binary file not shown.

View File

@@ -0,0 +1,22 @@
public final class JavaInfo {
private static final String[] CHECKED_PROPERTIES = new String[] {
"os.arch",
"java.version"
};
public static void main(String[] args) {
int returnCode = 0;
for (String key : CHECKED_PROPERTIES) {
String property = System.getProperty(key);
if (property != null) {
System.out.println(key + "=" + property);
} else {
returnCode = 1;
}
}
System.exit(returnCode);
}
}

View File

@@ -0,0 +1,75 @@
use std::path::PathBuf;
use crate::{
event::{
emit::{emit_command, emit_warning},
CommandPayload,
},
util::io,
};
/// Handles external functions (such as through URL deep linkage)
/// Link is extracted value (link) in somewhat URL format, such as
/// subdomain1/subdomain2
/// (Does not include modrinth://)
pub async fn handle_url(sublink: &str) -> crate::Result<CommandPayload> {
Ok(match sublink.split_once('/') {
// /mod/{id} - Installs a mod of mod id
Some(("mod", id)) => CommandPayload::InstallMod { id: id.to_string() },
// /version/{id} - Installs a specific version of id
Some(("version", id)) => {
CommandPayload::InstallVersion { id: id.to_string() }
}
// /modpack/{id} - Installs a modpack of modpack id
Some(("modpack", id)) => {
CommandPayload::InstallModpack { id: id.to_string() }
}
_ => {
emit_warning(&format!(
"Invalid command, unrecognized path: {sublink}"
))
.await?;
return Err(crate::ErrorKind::InputError(format!(
"Invalid command, unrecognized path: {sublink}"
))
.into());
}
})
}
pub async fn parse_command(
command_string: &str,
) -> crate::Result<CommandPayload> {
tracing::debug!("Parsing command: {}", &command_string);
// modrinth://some-command
// This occurs when following a web redirect link
if let Some(sublink) = command_string.strip_prefix("modrinth://") {
Ok(handle_url(sublink).await?)
} else {
// We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?;
if let Some(ext) = path.extension() {
if ext == "mrpack" {
return Ok(CommandPayload::RunMRPack { path });
}
}
emit_warning(&format!(
"Invalid command, unrecognized filetype: {}",
path.display()
))
.await?;
Err(crate::ErrorKind::InputError(format!(
"Invalid command, unrecognized filetype: {}",
path.display()
))
.into())
}
}
pub async fn parse_and_emit_command(command_string: &str) -> crate::Result<()> {
let command = parse_command(command_string).await?;
emit_command(command).await?;
Ok(())
}

View File

@@ -0,0 +1,174 @@
//! Authentication flow interface
use reqwest::Method;
use serde::Deserialize;
use std::path::PathBuf;
use crate::event::emit::{emit_loading, init_loading};
use crate::state::CredentialsStore;
use crate::util::fetch::{fetch_advanced, fetch_json};
use crate::util::io;
use crate::util::jre::extract_java_majorminor_version;
use crate::{
util::jre::{self, JavaVersion},
LoadingBarType, State,
};
// Searches for jres on the system given a java version (ex: 1.8, 1.17, 1.18)
// Allow higher allows for versions higher than the given version to be returned ('at least')
pub async fn find_filtered_jres(
java_version: Option<u32>,
) -> crate::Result<Vec<JavaVersion>> {
let jres = jre::get_all_jre().await?;
// Filter out JREs that are not 1.17 or higher
Ok(if let Some(java_version) = java_version {
jres.into_iter()
.filter(|jre| {
let jre_version = extract_java_majorminor_version(&jre.version);
if let Ok(jre_version) = jre_version {
jre_version.1 == java_version
} else {
false
}
})
.collect()
} else {
jres
})
}
#[theseus_macros::debug_pin]
pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
let state = State::get().await?;
let loading_bar = init_loading(
LoadingBarType::JavaDownload {
version: java_version,
},
100.0,
"Downloading java version",
)
.await?;
#[derive(Deserialize)]
struct Package {
pub download_url: String,
pub name: PathBuf,
}
emit_loading(&loading_bar, 0.0, Some("Fetching java version")).await?;
let packages = fetch_json::<Vec<Package>>(
Method::GET,
&format!(
"https://api.azul.com/metadata/v1/zulu/packages?arch={}&java_version={}&os={}&archive_type=zip&javafx_bundled=false&java_package_type=jre&page_size=1",
std::env::consts::ARCH, java_version, std::env::consts::OS
),
None,
None,
&state.fetch_semaphore,
&CredentialsStore(None),
).await?;
emit_loading(&loading_bar, 10.0, Some("Downloading java version")).await?;
if let Some(download) = packages.first() {
let file = fetch_advanced(
Method::GET,
&download.download_url,
None,
None,
None,
Some((&loading_bar, 80.0)),
&state.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
let path = state.directories.java_versions_dir().await;
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(file))
.map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to read java zip".to_string(),
))
})?;
// removes the old installation of java
if let Some(file) = archive.file_names().next() {
if let Some(dir) = file.split('/').next() {
let path = path.join(dir);
if path.exists() {
io::remove_dir_all(path).await?;
}
}
}
emit_loading(&loading_bar, 0.0, Some("Extracting java")).await?;
archive.extract(&path).map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to extract java zip".to_string(),
))
})?;
emit_loading(&loading_bar, 10.0, Some("Done extracting java")).await?;
let mut base_path = path.join(
download
.name
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string(),
);
#[cfg(target_os = "macos")]
{
base_path = base_path
.join(format!("zulu-{}.jre", java_version))
.join("Contents")
.join("Home")
.join("bin")
.join("java")
}
#[cfg(not(target_os = "macos"))]
{
base_path = base_path.join("bin").join(jre::JAVA_BIN)
}
Ok(base_path)
} else {
Err(crate::ErrorKind::LauncherError(format!(
"No Java Version found for Java version {}, OS {}, and Architecture {}",
java_version, std::env::consts::OS, std::env::consts::ARCH,
)).into())
}
}
// Validates JRE at a given at a given path
pub async fn check_jre(path: PathBuf) -> crate::Result<Option<JavaVersion>> {
Ok(jre::check_java_at_filepath(&path).await)
}
// Test JRE at a given path
pub async fn test_jre(
path: PathBuf,
major_version: u32,
) -> crate::Result<bool> {
let jre = match jre::check_java_at_filepath(&path).await {
Some(jre) => jre,
None => return Ok(false),
};
let (major, _) = extract_java_majorminor_version(&jre.version)?;
Ok(major == major_version)
}
// Gets maximum memory in KiB.
pub async fn get_max_memory() -> crate::Result<u64> {
Ok(sys_info::mem_info()
.map_err(|_| {
crate::Error::from(crate::ErrorKind::LauncherError(
"Unable to get computer memory".to_string(),
))
})?
.total)
}

View File

@@ -0,0 +1,372 @@
use std::io::{Read, SeekFrom};
use std::time::SystemTime;
use futures::TryFutureExt;
use serde::{Deserialize, Serialize};
use tokio::{
fs::File,
io::{AsyncReadExt, AsyncSeekExt},
};
use crate::{
prelude::{Credentials, DirectoryInfo},
util::io::{self, IOError},
{state::ProfilePathId, State},
};
#[derive(Serialize, Debug)]
pub struct Logs {
pub log_type: LogType,
pub filename: String,
pub age: u64,
pub output: Option<CensoredString>,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq)]
pub enum LogType {
InfoLog,
CrashReport,
}
#[derive(Serialize, Debug)]
pub struct LatestLogCursor {
pub cursor: u64,
pub output: CensoredString,
pub new_file: bool,
}
#[derive(Serialize, Debug)] // Not deserialize
#[serde(transparent)]
pub struct CensoredString(String);
impl CensoredString {
pub fn censor(mut s: String, credentials_set: &Vec<Credentials>) -> Self {
let username = whoami::username();
s = s
.replace(&format!("/{}/", username), "/{COMPUTER_USERNAME}/")
.replace(&format!("\\{}\\", username), "\\{COMPUTER_USERNAME}\\");
for credentials in credentials_set {
s = s
.replace(&credentials.access_token, "{MINECRAFT_ACCESS_TOKEN}")
.replace(&credentials.username, "{MINECRAFT_USERNAME}")
.replace(
&credentials.id.as_simple().to_string(),
"{MINECRAFT_UUID}",
)
.replace(
&credentials.id.as_hyphenated().to_string(),
"{MINECRAFT_UUID}",
);
}
Self(s)
}
}
impl Logs {
async fn build(
log_type: LogType,
age: SystemTime,
profile_subpath: &ProfilePathId,
filename: String,
clear_contents: Option<bool>,
) -> crate::Result<Self> {
Ok(Self {
log_type,
age: age
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_else(|_| std::time::Duration::from_secs(0))
.as_secs(),
output: if clear_contents.unwrap_or(false) {
None
} else {
Some(
get_output_by_filename(
profile_subpath,
log_type,
&filename,
)
.await?,
)
},
filename,
})
}
}
#[tracing::instrument]
pub async fn get_logs_from_type(
profile_path: &ProfilePathId,
log_type: LogType,
clear_contents: Option<bool>,
logs: &mut Vec<crate::Result<Logs>>,
) -> crate::Result<()> {
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(profile_path).await?
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(profile_path).await?
}
};
if logs_folder.exists() {
for entry in std::fs::read_dir(&logs_folder)
.map_err(|e| IOError::with_path(e, &logs_folder))?
{
let entry: std::fs::DirEntry =
entry.map_err(|e| IOError::with_path(e, &logs_folder))?;
let age = entry.metadata()?.created().unwrap_or_else(|_| SystemTime::UNIX_EPOCH);
let path = entry.path();
if !path.is_file() {
continue;
}
if let Some(file_name) = path.file_name() {
let file_name = file_name.to_string_lossy().to_string();
logs.push(
Logs::build(
log_type,
age,
&profile_path,
file_name,
clear_contents,
)
.await,
);
}
}
}
Ok(())
}
#[tracing::instrument]
pub async fn get_logs(
profile_path_id: ProfilePathId,
clear_contents: Option<bool>,
) -> crate::Result<Vec<Logs>> {
let profile_path = profile_path_id.profile_path().await?;
let mut logs = Vec::new();
get_logs_from_type(
&profile_path,
LogType::InfoLog,
clear_contents,
&mut logs,
)
.await?;
get_logs_from_type(
&profile_path,
LogType::CrashReport,
clear_contents,
&mut logs,
)
.await?;
let mut logs = logs.into_iter().collect::<crate::Result<Vec<Logs>>>()?;
logs.sort_by(|a, b| b.age.cmp(&a.age).then(b.filename.cmp(&a.filename)));
Ok(logs)
}
#[tracing::instrument]
pub async fn get_logs_by_filename(
profile_path_id: ProfilePathId,
log_type: LogType,
filename: String,
) -> crate::Result<Logs> {
let profile_path = profile_path_id.profile_path().await?;
let path = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(&profile_path).await
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(&profile_path).await
}
}?
.join(&filename);
let metadata = std::fs::metadata(&path)?;
let age = metadata.created().unwrap_or_else(|_| SystemTime::UNIX_EPOCH);
Logs::build(log_type, age, &profile_path, filename, Some(true)).await
}
#[tracing::instrument]
pub async fn get_output_by_filename(
profile_subpath: &ProfilePathId,
log_type: LogType,
file_name: &str,
) -> crate::Result<CensoredString> {
let state = State::get().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(profile_subpath).await?
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(profile_subpath).await?
}
};
let path = logs_folder.join(file_name);
let credentials: Vec<Credentials> = state
.users
.read()
.await
.users
.clone()
.into_values()
.collect();
// Load .gz file into String
if let Some(ext) = path.extension() {
if ext == "gz" {
let file = std::fs::File::open(&path)
.map_err(|e| IOError::with_path(e, &path))?;
let mut contents = [0; 1024];
let mut result = String::new();
let mut gz =
flate2::read::GzDecoder::new(std::io::BufReader::new(file));
while gz
.read(&mut contents)
.map_err(|e| IOError::with_path(e, &path))?
> 0
{
result.push_str(&String::from_utf8_lossy(&contents));
contents = [0; 1024];
}
return Ok(CensoredString::censor(result, &credentials));
} else if ext == "log" || ext == "txt" {
let mut result = String::new();
let mut contents = [0; 1024];
let mut file = std::fs::File::open(&path)
.map_err(|e| IOError::with_path(e, &path))?;
// iteratively read the file to a String
while file
.read(&mut contents)
.map_err(|e| IOError::with_path(e, &path))?
> 0
{
result.push_str(&String::from_utf8_lossy(&contents));
contents = [0; 1024];
}
let result = CensoredString::censor(result, &credentials);
return Ok(result);
}
}
Err(crate::ErrorKind::OtherError(format!(
"File extension not supported: {}",
path.display()
))
.into())
}
#[tracing::instrument]
pub async fn delete_logs(profile_path_id: ProfilePathId) -> crate::Result<()> {
let profile_path = profile_path_id.profile_path().await?;
let logs_folder = DirectoryInfo::profile_logs_dir(&profile_path).await?;
for entry in std::fs::read_dir(&logs_folder)
.map_err(|e| IOError::with_path(e, &logs_folder))?
{
let entry = entry.map_err(|e| IOError::with_path(e, &logs_folder))?;
let path = entry.path();
if path.is_dir() {
io::remove_dir_all(&path).await?;
}
}
Ok(())
}
#[tracing::instrument]
pub async fn delete_logs_by_filename(
profile_path_id: ProfilePathId,
log_type: LogType,
filename: &str,
) -> crate::Result<()> {
let profile_path = profile_path_id.profile_path().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(&profile_path).await
}
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(&profile_path).await
}
}?;
let path = logs_folder.join(filename);
io::remove_dir_all(&path).await?;
Ok(())
}
#[tracing::instrument]
pub async fn get_latest_log_cursor(
profile_path: ProfilePathId,
cursor: u64, // 0 to start at beginning of file
) -> crate::Result<LatestLogCursor> {
get_generic_live_log_cursor(profile_path, "latest.log", cursor).await
}
#[tracing::instrument]
pub async fn get_generic_live_log_cursor(
profile_path_id: ProfilePathId,
log_file_name: &str,
mut cursor: u64, // 0 to start at beginning of file
) -> crate::Result<LatestLogCursor> {
let profile_path = profile_path_id.profile_path().await?;
let state = State::get().await?;
let logs_folder = DirectoryInfo::profile_logs_dir(&profile_path).await?;
let path = logs_folder.join(log_file_name);
if !path.exists() {
// Allow silent failure if latest.log doesn't exist (as the instance may have been launched, but not yet created the file)
return Ok(LatestLogCursor {
cursor: 0,
new_file: false,
output: CensoredString("".to_string()),
});
}
let mut file = File::open(&path)
.await
.map_err(|e| IOError::with_path(e, &path))?;
let metadata = file
.metadata()
.await
.map_err(|e| IOError::with_path(e, &path))?;
let mut new_file = false;
if cursor > metadata.len() {
// Cursor is greater than file length, reset cursor to 0
// Likely cause is that the file was rotated while the log was being read
cursor = 0;
new_file = true;
}
let mut buffer = Vec::new();
file.seek(SeekFrom::Start(cursor))
.map_err(|e| IOError::with_path(e, &path))
.await?; // Seek to cursor
let bytes_read = file
.read_to_end(&mut buffer)
.map_err(|e| IOError::with_path(e, &path))
.await?; // Read to end of file
let output = String::from_utf8_lossy(&buffer).to_string(); // Convert to String
let cursor = cursor + bytes_read as u64; // Update cursor
let credentials: Vec<Credentials> = state
.users
.read()
.await
.users
.clone()
.into_values()
.collect();
let output = CensoredString::censor(output, &credentials);
Ok(LatestLogCursor {
cursor,
new_file,
output,
})
}

View File

@@ -0,0 +1,43 @@
use crate::State;
pub use daedalus::minecraft::VersionManifest;
pub use daedalus::modded::Manifest;
#[tracing::instrument]
pub async fn get_minecraft_versions() -> crate::Result<VersionManifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.minecraft.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_fabric_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.fabric.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_forge_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.forge.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_quilt_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.quilt.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_neoforge_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.neoforge.clone();
Ok(tags)
}

View File

@@ -0,0 +1,76 @@
//! Authentication flow interface
use crate::state::{Credentials, MinecraftLoginFlow};
use crate::State;
#[tracing::instrument]
pub async fn begin_login() -> crate::Result<MinecraftLoginFlow> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.login_begin().await
}
#[tracing::instrument]
pub async fn finish_login(
code: &str,
flow: MinecraftLoginFlow,
) -> crate::Result<Credentials> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.login_finish(code, flow).await
}
#[tracing::instrument]
pub async fn get_default_user() -> crate::Result<Option<uuid::Uuid>> {
let state = State::get().await?;
let users = state.users.read().await;
Ok(users.default_user)
}
#[tracing::instrument]
pub async fn set_default_user(user: uuid::Uuid) -> crate::Result<()> {
let user = get_user(user).await?;
let state = State::get().await?;
let mut users = state.users.write().await;
users.default_user = Some(user.id);
users.save().await?;
Ok(())
}
/// Remove a user account from the database
#[tracing::instrument]
pub async fn remove_user(user: uuid::Uuid) -> crate::Result<()> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.remove(user).await?;
Ok(())
}
/// Get a copy of the list of all user credentials
#[tracing::instrument]
pub async fn users() -> crate::Result<Vec<Credentials>> {
let state = State::get().await?;
let users = state.users.read().await;
Ok(users.users.values().cloned().collect())
}
/// Get a specific user by user ID
/// Prefer to use 'refresh' instead of this function
#[tracing::instrument]
pub async fn get_user(user: uuid::Uuid) -> crate::Result<Credentials> {
let state = State::get().await?;
let users = state.users.read().await;
let user = users
.users
.get(&user)
.ok_or_else(|| {
crate::ErrorKind::OtherError(format!(
"Tried to get nonexistent user with ID {user}"
))
.as_error()
})?
.clone();
Ok(user)
}

View File

@@ -0,0 +1,40 @@
//! API for interacting with Theseus
pub mod handler;
pub mod jre;
pub mod logs;
pub mod metadata;
pub mod minecraft_auth;
pub mod mr_auth;
pub mod pack;
pub mod process;
pub mod profile;
pub mod safety;
pub mod settings;
pub mod tags;
pub mod data {
pub use crate::state::{
Credentials, DirectoryInfo, Hooks, JavaSettings, LinkedData,
MemorySettings, ModLoader, ModrinthCredentials,
ModrinthCredentialsResult, ModrinthProject, ModrinthTeamMember,
ModrinthUser, ModrinthVersion, ProfileMetadata, ProjectMetadata,
Settings, Theme, WindowSize,
};
}
pub mod prelude {
pub use crate::{
data::*,
event::CommandPayload,
jre, metadata, minecraft_auth, pack, process,
profile::{self, create, Profile},
settings,
state::JavaGlobals,
state::{Dependency, ProfilePathId, ProjectPathId},
util::{
io::{canonicalize, IOError},
jre::JavaVersion,
},
State,
};
}

View File

@@ -0,0 +1,144 @@
use crate::state::{
ModrinthAuthFlow, ModrinthCredentials, ModrinthCredentialsResult,
};
use crate::ErrorKind;
#[tracing::instrument]
pub async fn authenticate_begin_flow(provider: &str) -> crate::Result<String> {
let state = crate::State::get().await?;
// Don't start an uncompleteable new flow if there's an existing locked one
let mut write: tokio::sync::RwLockWriteGuard<'_, Option<ModrinthAuthFlow>> =
state.modrinth_auth_flow.write().await;
let mut flow = ModrinthAuthFlow::new(provider).await?;
let url = flow.prepare_login_url().await?;
*write = Some(flow);
Ok(url)
}
#[tracing::instrument]
pub async fn authenticate_await_complete_flow(
) -> crate::Result<ModrinthCredentialsResult> {
let state = crate::State::get().await?;
let mut write = state.modrinth_auth_flow.write().await;
if let Some(ref mut flow) = *write {
let creds = flow.extract_credentials(&state.fetch_semaphore).await?;
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
}
Ok(creds)
} else {
Err(ErrorKind::OtherError(
"No active Modrinth authenication flow!".to_string(),
)
.into())
}
}
#[tracing::instrument]
pub async fn cancel_flow() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.modrinth_auth_flow.write().await;
if let Some(ref mut flow) = *write {
flow.close().await?;
}
*write = None;
Ok(())
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
) -> crate::Result<ModrinthCredentialsResult> {
let state = crate::State::get().await?;
let creds = crate::state::login_password(
username,
password,
challenge,
&state.fetch_semaphore,
)
.await?;
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
}
Ok(creds)
}
#[tracing::instrument]
pub async fn login_2fa(
code: &str,
flow: &str,
) -> crate::Result<ModrinthCredentials> {
let state = crate::State::get().await?;
let creds =
crate::state::login_2fa(code, flow, &state.fetch_semaphore).await?;
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
Ok(creds)
}
#[tracing::instrument]
pub async fn create_account(
username: &str,
email: &str,
password: &str,
challenge: &str,
sign_up_newsletter: bool,
) -> crate::Result<ModrinthCredentials> {
let state = crate::State::get().await?;
let creds = crate::state::create_account(
username,
email,
password,
challenge,
sign_up_newsletter,
&state.fetch_semaphore,
)
.await?;
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
Ok(creds)
}
#[tracing::instrument]
pub async fn refresh() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.credentials.write().await;
crate::state::refresh_credentials(&mut write, &state.fetch_semaphore)
.await?;
Ok(())
}
#[tracing::instrument]
pub async fn logout() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.credentials.write().await;
write.logout().await?;
Ok(())
}
#[tracing::instrument]
pub async fn get_credentials() -> crate::Result<Option<ModrinthCredentials>> {
let state = crate::State::get().await?;
let read = state.credentials.read().await;
Ok(read.0.clone())
}

View File

@@ -0,0 +1,266 @@
use std::{collections::HashMap, path::PathBuf};
use serde::{Deserialize, Serialize};
use crate::{
pack::{
self,
import::{self, copy_dotminecraft},
install_from::CreatePackDescription,
},
prelude::{ModLoader, Profile, ProfilePathId},
state::{LinkedData, ProfileInstallStage},
util::io,
State,
};
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ATInstance {
pub id: String, // minecraft version id ie: 1.12.1, not a name
pub launcher: ATLauncher,
pub java_version: ATJavaVersion,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncher {
pub name: String,
pub pack: String,
pub version: String, // ie: 1.6
pub loader_version: ATLauncherLoaderVersion,
pub modrinth_project: Option<ATLauncherModrinthProject>,
pub modrinth_version: Option<ATLauncherModrinthVersion>,
pub modrinth_manifest: Option<pack::install_from::PackFormat>,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ATJavaVersion {
pub major_version: u8,
pub component: String,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherLoaderVersion {
pub r#type: String,
pub version: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ATLauncherModrinthProject {
pub id: String,
pub slug: String,
pub project_type: String,
pub team: String,
pub client_side: Option<String>,
pub server_side: Option<String>,
pub categories: Vec<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ATLauncherModrinthVersion {
pub id: String,
pub project_id: String,
pub name: String,
pub version_number: String,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherModrinthVersionFile {
pub hashes: HashMap<String, String>,
pub url: String,
pub filename: String,
pub primary: bool,
pub size: u64,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherModrinthVersionDependency {
pub project_id: Option<String>,
pub version_id: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ATLauncherMod {
pub name: String,
pub version: String,
pub file: String,
pub modrinth_project: Option<ATLauncherModrinthProject>,
pub modrinth_version: Option<ATLauncherModrinthVersion>,
}
// Check if folder has a instance.json that parses
pub async fn is_valid_atlauncher(instance_folder: PathBuf) -> bool {
let instance: String =
io::read_to_string(&instance_folder.join("instance.json"))
.await
.unwrap_or("".to_string());
let instance: Result<ATInstance, serde_json::Error> =
serde_json::from_str::<ATInstance>(&instance);
if let Err(e) = instance {
tracing::warn!(
"Could not parse instance.json at {}: {}",
instance_folder.display(),
e
);
false
} else {
true
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn import_atlauncher(
atlauncher_base_path: PathBuf, // path to base atlauncher folder
instance_folder: String, // instance folder in atlauncher_base_path
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
let atlauncher_instance_path = atlauncher_base_path
.join("instances")
.join(instance_folder.clone());
// Load instance.json
let atinstance: String =
io::read_to_string(&atlauncher_instance_path.join("instance.json"))
.await?;
let atinstance: ATInstance =
serde_json::from_str::<ATInstance>(&atinstance)?;
// Icon path should be {instance_folder}/instance.png if it exists,
// Second possibility is ATLauncher/configs/images/{safe_pack_name}.png (safe pack name is alphanumeric lowercase)
let icon_path_primary = atlauncher_instance_path.join("instance.png");
let safe_pack_name = atinstance
.launcher
.pack
.replace(|c: char| !c.is_alphanumeric(), "")
.to_lowercase();
let icon_path_secondary = atlauncher_base_path
.join("configs")
.join("images")
.join(safe_pack_name + ".png");
let icon = match (icon_path_primary.exists(), icon_path_secondary.exists())
{
(true, _) => import::recache_icon(icon_path_primary).await?,
(_, true) => import::recache_icon(icon_path_secondary).await?,
_ => None,
};
// Create description from instance.cfg
let description = CreatePackDescription {
icon,
override_title: Some(atinstance.launcher.name.clone()),
project_id: None,
version_id: None,
existing_loading_bar: None,
profile_path: profile_path.clone(),
};
let backup_name = format!("ATLauncher-{}", instance_folder);
let minecraft_folder = atlauncher_instance_path;
import_atlauncher_unmanaged(
profile_path,
minecraft_folder,
backup_name,
description,
atinstance,
)
.await?;
Ok(())
}
async fn import_atlauncher_unmanaged(
profile_path: ProfilePathId,
minecraft_folder: PathBuf,
backup_name: String,
description: CreatePackDescription,
atinstance: ATInstance,
) -> crate::Result<()> {
let mod_loader = format!(
"\"{}\"",
atinstance.launcher.loader_version.r#type.to_lowercase()
);
let mod_loader: ModLoader = serde_json::from_str::<ModLoader>(&mod_loader)
.map_err(|_| {
crate::ErrorKind::InputError(format!(
"Could not parse mod loader type: {}",
mod_loader
))
})?;
let game_version = atinstance.id;
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
Some(atinstance.launcher.loader_version.version.clone()),
)
.await?
} else {
None
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = description
.override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.linked_data = Some(LinkedData {
project_id: description.project_id.clone(),
version_id: description.version_id.clone(),
locked: Some(
description.project_id.is_some()
&& description.version_id.is_some(),
),
});
prof.metadata.icon = description.icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
// Moves .minecraft folder over (ie: overrides such as resourcepacks, mods, etc)
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
minecraft_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,198 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::prelude::Profile;
use crate::state::CredentialsStore;
use crate::{
prelude::{ModLoader, ProfilePathId},
state::ProfileInstallStage,
util::{
fetch::{fetch, write_cached_icon},
io,
},
State,
};
use super::{copy_dotminecraft, recache_icon};
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MinecraftInstance {
pub name: Option<String>,
pub base_mod_loader: Option<MinecraftInstanceModLoader>,
pub profile_image_path: Option<PathBuf>,
pub installed_modpack: Option<InstalledModpack>,
pub game_version: String, // Minecraft game version. Non-prioritized, use this if Vanilla
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct MinecraftInstanceModLoader {
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct InstalledModpack {
pub thumbnail_url: Option<String>,
}
// Check if folder has a minecraftinstance.json that parses
pub async fn is_valid_curseforge(instance_folder: PathBuf) -> bool {
let minecraftinstance: String =
io::read_to_string(&instance_folder.join("minecraftinstance.json"))
.await
.unwrap_or("".to_string());
let minecraftinstance: Result<MinecraftInstance, serde_json::Error> =
serde_json::from_str::<MinecraftInstance>(&minecraftinstance);
minecraftinstance.is_ok()
}
pub async fn import_curseforge(
curseforge_instance_folder: PathBuf, // instance's folder
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
// Load minecraftinstance.json
let minecraft_instance: String = io::read_to_string(
&curseforge_instance_folder.join("minecraftinstance.json"),
)
.await?;
let minecraft_instance: MinecraftInstance =
serde_json::from_str::<MinecraftInstance>(&minecraft_instance)?;
let override_title: Option<String> = minecraft_instance.name.clone();
let backup_name = format!(
"Curseforge-{}",
curseforge_instance_folder
.file_name()
.map(|a| a.to_string_lossy().to_string())
.unwrap_or("Unknown".to_string())
);
let state = State::get().await?;
// Recache Curseforge Icon if it exists
let mut icon = None;
if let Some(icon_path) = minecraft_instance.profile_image_path.clone() {
icon = recache_icon(icon_path).await?;
} else if let Some(InstalledModpack {
thumbnail_url: Some(thumbnail_url),
}) = minecraft_instance.installed_modpack.clone()
{
let icon_bytes = fetch(
&thumbnail_url,
None,
&state.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
let filename = thumbnail_url.rsplit('/').last();
if let Some(filename) = filename {
icon = Some(
write_cached_icon(
filename,
&state.directories.caches_dir(),
icon_bytes,
&state.io_semaphore,
)
.await?,
);
}
}
// base mod loader is always None for vanilla
if let Some(instance_mod_loader) = minecraft_instance.base_mod_loader {
let game_version = minecraft_instance.game_version;
// CF allows Forge, Fabric, and Vanilla
let mut mod_loader = None;
let mut loader_version = None;
match instance_mod_loader.name.split('-').collect::<Vec<&str>>()[..] {
["forge", version] => {
mod_loader = Some(ModLoader::Forge);
loader_version = Some(version.to_string());
}
["fabric", version, _game_version] => {
mod_loader = Some(ModLoader::Fabric);
loader_version = Some(version.to_string());
}
_ => {}
}
let mod_loader = mod_loader.unwrap_or(ModLoader::Vanilla);
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
loader_version,
)
.await?
} else {
None
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.icon = icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
} else {
// create a vanilla profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.metadata.icon = icon.clone();
prof.metadata.game_version =
minecraft_instance.game_version.clone();
prof.metadata.loader_version = None;
prof.metadata.loader = ModLoader::Vanilla;
async { Ok(()) }
})
.await?;
}
// Copy in contained folders as overrides
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
curseforge_instance_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,134 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::{
prelude::{ModLoader, Profile, ProfilePathId},
state::ProfileInstallStage,
util::io,
State,
};
use super::{copy_dotminecraft, recache_icon};
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GDLauncherConfig {
pub background: Option<String>,
pub loader: GDLauncherLoader,
// pub mods: Vec<GDLauncherMod>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GDLauncherLoader {
pub loader_type: ModLoader,
pub loader_version: Option<String>,
pub mc_version: String,
pub source: Option<String>,
pub source_name: Option<String>,
}
// Check if folder has a config.json that parses
pub async fn is_valid_gdlauncher(instance_folder: PathBuf) -> bool {
let config: String =
io::read_to_string(&instance_folder.join("config.json"))
.await
.unwrap_or("".to_string());
let config: Result<GDLauncherConfig, serde_json::Error> =
serde_json::from_str::<GDLauncherConfig>(&config);
config.is_ok()
}
pub async fn import_gdlauncher(
gdlauncher_instance_folder: PathBuf, // instance's folder
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
// Load config.json
let config: String =
io::read_to_string(&gdlauncher_instance_folder.join("config.json"))
.await?;
let config: GDLauncherConfig =
serde_json::from_str::<GDLauncherConfig>(&config)?;
let override_title: Option<String> = config.loader.source_name.clone();
let backup_name = format!(
"GDLauncher-{}",
gdlauncher_instance_folder
.file_name()
.map(|a| a.to_string_lossy().to_string())
.unwrap_or("Unknown".to_string())
);
// Re-cache icon
let icon = config
.background
.clone()
.map(|b| gdlauncher_instance_folder.join(b));
let icon = if let Some(icon) = icon {
recache_icon(icon).await?
} else {
None
};
let game_version = config.loader.mc_version;
let mod_loader = config.loader.loader_type;
let loader_version = config.loader.loader_version;
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
loader_version,
)
.await?
} else {
None
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.icon = icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
// Copy in contained folders as overrides
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
gdlauncher_instance_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,344 @@
use std::path::{Path, PathBuf};
use serde::{de, Deserialize, Serialize};
use crate::{
pack::{
import::{self, copy_dotminecraft},
install_from::{self, CreatePackDescription, PackDependency},
},
prelude::{Profile, ProfilePathId},
util::io,
State,
};
// instance.cfg
// https://github.com/PrismLauncher/PrismLauncher/blob/develop/launcher/minecraft/MinecraftInstance.cpp
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
#[serde(untagged)]
enum MMCInstanceEnum {
General(MMCInstanceGeneral),
Instance(MMCInstance),
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
struct MMCInstanceGeneral {
pub general: MMCInstance,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
pub struct MMCInstance {
pub java_path: Option<String>,
pub jvm_args: Option<String>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_bool")]
pub managed_pack: Option<bool>,
#[serde(rename = "ManagedPackID")]
pub managed_pack_id: Option<String>,
pub managed_pack_type: Option<MMCManagedPackType>,
#[serde(rename = "ManagedPackVersionID")]
pub managed_pack_version_id: Option<String>,
pub managed_pack_version_name: Option<String>,
#[serde(rename = "iconKey")]
pub icon_key: Option<String>,
#[serde(rename = "name")]
pub name: Option<String>,
}
// serde_ini reads 'true' and 'false' as strings, so we need to convert them to booleans
fn deserialize_optional_bool<'de, D>(
deserializer: D,
) -> Result<Option<bool>, D::Error>
where
D: de::Deserializer<'de>,
{
let s = Option::<String>::deserialize(deserializer)?;
match s {
Some(string) => match string.as_str() {
"true" => Ok(Some(true)),
"false" => Ok(Some(false)),
_ => Err(de::Error::custom("expected 'true' or 'false'")),
},
None => Ok(None),
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "lowercase")]
pub enum MMCManagedPackType {
Modrinth,
Flame,
ATLauncher,
#[serde(other)]
Unknown,
}
// mmc-pack.json
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MMCPack {
components: Vec<MMCComponent>,
format_version: u32,
}
// https://github.com/PrismLauncher/PrismLauncher/blob/develop/launcher/minecraft/Component.h
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MMCComponent {
pub uid: String,
#[serde(default)]
pub version: Option<String>,
#[serde(default)]
pub dependency_only: bool,
#[serde(default)]
pub important: bool,
#[serde(default)]
pub disabled: bool,
pub cached_name: Option<String>,
pub cached_version: Option<String>,
#[serde(default)]
pub cached_requires: Vec<MMCComponentRequirement>,
#[serde(default)]
pub cached_conflicts: Vec<MMCComponentRequirement>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MMCComponentRequirement {
pub uid: String,
pub equals_version: Option<String>,
pub suggests: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
#[serde(untagged)]
enum MMCLauncherEnum {
General(MMCLauncherGeneral),
Instance(MMCLauncher),
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
struct MMCLauncherGeneral {
pub general: MMCLauncher,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "PascalCase")]
pub struct MMCLauncher {
instance_dir: String,
}
// Checks if if its a folder, and the folder contains instance.cfg and mmc-pack.json, and they both parse
#[tracing::instrument]
pub async fn is_valid_mmc(instance_folder: PathBuf) -> bool {
let instance_cfg = instance_folder.join("instance.cfg");
let mmc_pack = instance_folder.join("mmc-pack.json");
let mmc_pack = match io::read_to_string(&mmc_pack).await {
Ok(mmc_pack) => mmc_pack,
Err(_) => return false,
};
load_instance_cfg(&instance_cfg).await.is_ok()
&& serde_json::from_str::<MMCPack>(&mmc_pack).is_ok()
}
#[tracing::instrument]
pub async fn get_instances_subpath(config: PathBuf) -> Option<String> {
let launcher = io::read_to_string(&config).await.ok()?;
let launcher: MMCLauncherEnum = serde_ini::from_str(&launcher).ok()?;
match launcher {
MMCLauncherEnum::General(p) => Some(p.general.instance_dir),
MMCLauncherEnum::Instance(p) => Some(p.instance_dir),
}
}
// Loading the INI (instance.cfg) file
async fn load_instance_cfg(file_path: &Path) -> crate::Result<MMCInstance> {
let instance_cfg: String = io::read_to_string(file_path).await?;
let instance_cfg_enum: MMCInstanceEnum =
serde_ini::from_str::<MMCInstanceEnum>(&instance_cfg)?;
match instance_cfg_enum {
MMCInstanceEnum::General(instance_cfg) => Ok(instance_cfg.general),
MMCInstanceEnum::Instance(instance_cfg) => Ok(instance_cfg),
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn import_mmc(
mmc_base_path: PathBuf, // path to base mmc folder
instance_folder: String, // instance folder in mmc_base_path
profile_path: ProfilePathId, // path to profile
) -> crate::Result<()> {
let mmc_instance_path = mmc_base_path
.join("instances")
.join(instance_folder.clone());
let mmc_pack =
io::read_to_string(&mmc_instance_path.join("mmc-pack.json")).await?;
let mmc_pack: MMCPack = serde_json::from_str::<MMCPack>(&mmc_pack)?;
let instance_cfg =
load_instance_cfg(&mmc_instance_path.join("instance.cfg")).await?;
// Re-cache icon
let icon = if let Some(icon_key) = instance_cfg.icon_key {
let icon_path = mmc_base_path.join("icons").join(icon_key);
import::recache_icon(icon_path).await?
} else {
None
};
// Create description from instance.cfg
let description = CreatePackDescription {
icon,
override_title: instance_cfg.name,
project_id: instance_cfg.managed_pack_id,
version_id: instance_cfg.managed_pack_version_id,
existing_loading_bar: None,
profile_path: profile_path.clone(),
};
// Managed pack
let backup_name = "Imported Modpack".to_string();
if instance_cfg.managed_pack.unwrap_or(false) {
match instance_cfg.managed_pack_type {
Some(MMCManagedPackType::Modrinth) => {
// Modrinth Managed Pack
// Kept separate as we may in the future want to add special handling for modrinth managed packs
let backup_name = "Imported Modrinth Modpack".to_string();
let minecraft_folder = mmc_base_path.join("instances").join(instance_folder).join(".minecraft");
import_mmc_unmanaged(profile_path, minecraft_folder, backup_name, description, mmc_pack).await?;
}
Some(MMCManagedPackType::Flame) | Some(MMCManagedPackType::ATLauncher) => {
// For flame/atlauncher managed packs
// Treat as unmanaged, but with 'minecraft' folder instead of '.minecraft'
let minecraft_folder = mmc_base_path.join("instances").join(instance_folder).join("minecraft");
import_mmc_unmanaged(profile_path, minecraft_folder, backup_name, description, mmc_pack).await?;
},
Some(_) => {
// For managed packs that aren't modrinth, flame, atlauncher
// Treat as unmanaged
let backup_name = "ImportedModpack".to_string();
let minecraft_folder = mmc_base_path.join("instances").join(instance_folder).join(".minecraft");
import_mmc_unmanaged(profile_path, minecraft_folder, backup_name, description, mmc_pack).await?;
},
_ => return Err(crate::ErrorKind::InputError({
"Instance is managed, but managed pack type not specified in instance.cfg".to_string()
}).into())
}
} else {
// Direclty import unmanaged pack
let backup_name = "Imported Modpack".to_string();
let minecraft_folder = mmc_base_path
.join("instances")
.join(instance_folder)
.join(".minecraft");
import_mmc_unmanaged(
profile_path,
minecraft_folder,
backup_name,
description,
mmc_pack,
)
.await?;
}
Ok(())
}
async fn import_mmc_unmanaged(
profile_path: ProfilePathId,
minecraft_folder: PathBuf,
backup_name: String,
description: CreatePackDescription,
mmc_pack: MMCPack,
) -> crate::Result<()> {
// Pack dependencies stored in mmc-pack.json, we convert to .mrpack pack dependencies
let dependencies = mmc_pack
.components
.iter()
.filter_map(|component| {
if component.uid.starts_with("net.fabricmc.fabric-loader") {
return Some((
PackDependency::FabricLoader,
component.version.clone().unwrap_or_default(),
));
}
if component.uid.starts_with("net.minecraftforge") {
return Some((
PackDependency::Forge,
component.version.clone().unwrap_or_default(),
));
}
if component.uid.starts_with("org.quiltmc.quilt-loader") {
return Some((
PackDependency::QuiltLoader,
component.version.clone().unwrap_or_default(),
));
}
if component.uid.starts_with("net.minecraft") {
return Some((
PackDependency::Minecraft,
component.version.clone().unwrap_or_default(),
));
}
None
})
.collect();
// Sets profile information to be that loaded from mmc-pack.json and instance.cfg
install_from::set_profile_information(
profile_path.clone(),
&description,
&backup_name,
&dependencies,
false,
)
.await?;
// Moves .minecraft folder over (ie: overrides such as resourcepacks, mods, etc)
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
minecraft_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -0,0 +1,320 @@
use std::{
fmt,
path::{Path, PathBuf},
};
use io::IOError;
use serde::{Deserialize, Serialize};
use crate::{
event::{
emit::{emit_loading, init_or_edit_loading},
LoadingBarId,
},
prelude::ProfilePathId,
state::Profiles,
util::{
fetch::{self, IoSemaphore},
io,
},
};
pub mod atlauncher;
pub mod curseforge;
pub mod gdlauncher;
pub mod mmc;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum ImportLauncherType {
MultiMC,
PrismLauncher,
ATLauncher,
GDLauncher,
Curseforge,
#[serde(other)]
Unknown,
}
// impl display
impl fmt::Display for ImportLauncherType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ImportLauncherType::MultiMC => write!(f, "MultiMC"),
ImportLauncherType::PrismLauncher => write!(f, "PrismLauncher"),
ImportLauncherType::ATLauncher => write!(f, "ATLauncher"),
ImportLauncherType::GDLauncher => write!(f, "GDLauncher"),
ImportLauncherType::Curseforge => write!(f, "Curseforge"),
ImportLauncherType::Unknown => write!(f, "Unknown"),
}
}
}
// Return a list of importable instances from a launcher type and base path, by iterating through the folder and checking
pub async fn get_importable_instances(
launcher_type: ImportLauncherType,
base_path: PathBuf,
) -> crate::Result<Vec<String>> {
// Some launchers have a different folder structure for instances
let instances_subfolder = match launcher_type {
ImportLauncherType::GDLauncher | ImportLauncherType::ATLauncher => {
"instances".to_string()
}
ImportLauncherType::Curseforge => "Instances".to_string(),
ImportLauncherType::MultiMC => {
mmc::get_instances_subpath(base_path.clone().join("multimc.cfg"))
.await
.unwrap_or_else(|| "instances".to_string())
}
ImportLauncherType::PrismLauncher => mmc::get_instances_subpath(
base_path.clone().join("prismlauncher.cfg"),
)
.await
.unwrap_or_else(|| "instances".to_string()),
ImportLauncherType::Unknown => {
return Err(crate::ErrorKind::InputError(
"Launcher type Unknown".to_string(),
)
.into())
}
};
let instances_folder = base_path.join(&instances_subfolder);
let mut instances = Vec::new();
let mut dir = io::read_dir(&instances_folder).await.map_err(| _ | {
crate::ErrorKind::InputError(format!(
"Invalid {launcher_type} launcher path, could not find '{instances_subfolder}' subfolder."
))
})?;
while let Some(entry) = dir
.next_entry()
.await
.map_err(|e| IOError::with_path(e, &instances_folder))?
{
let path = entry.path();
if path.is_dir() {
// Check instance is valid of this launcher type
if is_valid_importable_instance(path.clone(), launcher_type).await {
let name = path.file_name();
if let Some(name) = name {
instances.push(name.to_string_lossy().to_string());
}
}
}
}
Ok(instances)
}
// Import an instance from a launcher type and base path
// Note: this *deletes* the submitted empty profile
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn import_instance(
profile_path: ProfilePathId, // This should be a blank profile
launcher_type: ImportLauncherType,
base_path: PathBuf,
instance_folder: String,
) -> crate::Result<()> {
tracing::debug!("Importing instance from {instance_folder}");
let res = match launcher_type {
ImportLauncherType::MultiMC | ImportLauncherType::PrismLauncher => {
mmc::import_mmc(
base_path, // path to base mmc folder
instance_folder, // instance folder in mmc_base_path
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::ATLauncher => {
atlauncher::import_atlauncher(
base_path, // path to atlauncher folder
instance_folder, // instance folder in atlauncher
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::GDLauncher => {
gdlauncher::import_gdlauncher(
base_path.join("instances").join(instance_folder), // path to gdlauncher folder
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::Curseforge => {
curseforge::import_curseforge(
base_path.join("Instances").join(instance_folder), // path to curseforge folder
profile_path.clone(), // path to profile
)
.await
}
ImportLauncherType::Unknown => {
return Err(crate::ErrorKind::InputError(
"Launcher type Unknown".to_string(),
)
.into());
}
};
// If import failed, delete the profile
match res {
Ok(_) => {}
Err(e) => {
tracing::warn!("Import failed: {:?}", e);
let _ = crate::api::profile::remove(&profile_path).await;
return Err(e);
}
}
// Check existing managed packs for potential updates
tokio::task::spawn(Profiles::update_modrinth_versions());
tracing::debug!("Completed import.");
Ok(())
}
/// Returns the default path for the given launcher type
/// None if it can't be found or doesn't exist
pub fn get_default_launcher_path(
r#type: ImportLauncherType,
) -> Option<PathBuf> {
let path = match r#type {
ImportLauncherType::MultiMC => None, // multimc data is *in* app dir
ImportLauncherType::PrismLauncher => {
Some(dirs::data_dir()?.join("PrismLauncher"))
}
ImportLauncherType::ATLauncher => {
Some(dirs::data_dir()?.join("ATLauncher"))
}
ImportLauncherType::GDLauncher => {
Some(dirs::data_dir()?.join("gdlauncher_next"))
}
ImportLauncherType::Curseforge => {
Some(dirs::home_dir()?.join("curseforge").join("minecraft"))
}
ImportLauncherType::Unknown => None,
};
let path = path?;
if path.exists() {
Some(path)
} else {
None
}
}
/// Checks if this PathBuf is a valid instance for the given launcher type
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn is_valid_importable_instance(
instance_path: PathBuf,
r#type: ImportLauncherType,
) -> bool {
match r#type {
ImportLauncherType::MultiMC | ImportLauncherType::PrismLauncher => {
mmc::is_valid_mmc(instance_path).await
}
ImportLauncherType::ATLauncher => {
atlauncher::is_valid_atlauncher(instance_path).await
}
ImportLauncherType::GDLauncher => {
gdlauncher::is_valid_gdlauncher(instance_path).await
}
ImportLauncherType::Curseforge => {
curseforge::is_valid_curseforge(instance_path).await
}
ImportLauncherType::Unknown => false,
}
}
/// Caches an image file in the filesystem into the cache directory, and returns the path to the cached file.
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn recache_icon(
icon_path: PathBuf,
) -> crate::Result<Option<PathBuf>> {
let state = crate::State::get().await?;
let bytes = tokio::fs::read(&icon_path).await;
if let Ok(bytes) = bytes {
let bytes = bytes::Bytes::from(bytes);
let cache_dir = &state.directories.caches_dir();
let semaphore = &state.io_semaphore;
Ok(Some(
fetch::write_cached_icon(
&icon_path.to_string_lossy(),
cache_dir,
bytes,
semaphore,
)
.await?,
))
} else {
// could not find icon (for instance, prism default icon, etc)
Ok(None)
}
}
pub async fn copy_dotminecraft(
profile_path_id: ProfilePathId,
dotminecraft: PathBuf,
io_semaphore: &IoSemaphore,
existing_loading_bar: Option<LoadingBarId>,
) -> crate::Result<LoadingBarId> {
// Get full path to profile
let profile_path = profile_path_id.get_full_path().await?;
// Gets all subfiles recursively in src
let subfiles = get_all_subfiles(&dotminecraft).await?;
let total_subfiles = subfiles.len() as u64;
let loading_bar = init_or_edit_loading(
existing_loading_bar,
crate::LoadingBarType::CopyProfile {
import_location: dotminecraft.clone(),
profile_name: profile_path_id.to_string(),
},
total_subfiles as f64,
"Copying files in profile",
)
.await?;
// Copy each file
for src_child in subfiles {
let dst_child =
src_child.strip_prefix(&dotminecraft).map_err(|_| {
crate::ErrorKind::InputError(format!(
"Invalid file: {}",
&src_child.display()
))
})?;
let dst_child = profile_path.join(dst_child);
// sleep for cpu for 1 millisecond
tokio::time::sleep(std::time::Duration::from_millis(1)).await;
fetch::copy(&src_child, &dst_child, io_semaphore).await?;
emit_loading(&loading_bar, 1.0, None).await?;
}
Ok(loading_bar)
}
/// Recursively get a list of all subfiles in src
/// uses async recursion
#[theseus_macros::debug_pin]
#[async_recursion::async_recursion]
#[tracing::instrument]
pub async fn get_all_subfiles(src: &Path) -> crate::Result<Vec<PathBuf>> {
if !src.is_dir() {
return Ok(vec![src.to_path_buf()]);
}
let mut files = Vec::new();
let mut dir = io::read_dir(&src).await?;
while let Some(child) = dir
.next_entry()
.await
.map_err(|e| IOError::with_path(e, src))?
{
let src_child = child.path();
files.append(&mut get_all_subfiles(&src_child).await?);
}
Ok(files)
}

View File

@@ -0,0 +1,419 @@
use crate::config::MODRINTH_API_URL;
use crate::data::ModLoader;
use crate::event::emit::{emit_loading, init_loading};
use crate::event::{LoadingBarId, LoadingBarType};
use crate::prelude::ProfilePathId;
use crate::state::{
LinkedData, ModrinthProject, ModrinthVersion, ProfileInstallStage, SideType,
};
use crate::util::fetch::{
fetch, fetch_advanced, fetch_json, write_cached_icon,
};
use crate::util::io;
use crate::{InnerProjectPathUnix, State};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFormat {
pub game: String,
pub format_version: i32,
pub version_id: String,
pub name: String,
pub summary: Option<String>,
pub files: Vec<PackFile>,
pub dependencies: HashMap<PackDependency, String>,
}
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: InnerProjectPathUnix,
pub hashes: HashMap<PackFileHash, String>,
pub env: Option<HashMap<EnvType, SideType>>,
pub downloads: Vec<String>,
pub file_size: u32,
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase", from = "String")]
pub enum PackFileHash {
Sha1,
Sha512,
Unknown(String),
}
impl From<String> for PackFileHash {
fn from(s: String) -> Self {
return match s.as_str() {
"sha1" => PackFileHash::Sha1,
"sha512" => PackFileHash::Sha512,
_ => PackFileHash::Unknown(s),
};
}
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase")]
pub enum EnvType {
Client,
Server,
}
#[derive(Serialize, Deserialize, Clone, Copy, Hash, PartialEq, Eq, Debug)]
pub enum PackDependency {
#[serde(rename = "forge")]
Forge,
#[serde(rename = "neoforge")]
#[serde(alias = "neo-forge")]
NeoForge,
#[serde(rename = "fabric-loader")]
FabricLoader,
#[serde(rename = "quilt-loader")]
QuiltLoader,
#[serde(rename = "minecraft")]
Minecraft,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum CreatePackLocation {
// Create a pack from a modrinth version ID (such as a modpack)
FromVersionId {
project_id: String,
version_id: String,
title: String,
icon_url: Option<String>,
},
// Create a pack from a file (such as an .mrpack for installing from a file, or a folder name for importing)
FromFile {
path: PathBuf,
},
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreatePackProfile {
pub name: String, // the name of the profile, and relative path
pub game_version: String, // the game version of the profile
pub modloader: ModLoader, // the modloader to use
pub loader_version: Option<String>, // the modloader version to use, set to "latest", "stable", or the ID of your chosen loader. defaults to latest
pub icon: Option<PathBuf>, // the icon for the profile
pub icon_url: Option<String>, // the URL icon for a profile (ONLY USED FOR TEMPORARY PROFILES)
pub linked_data: Option<LinkedData>, // the linked project ID (mainly for modpacks)- used for updating
pub skip_install_profile: Option<bool>,
pub no_watch: Option<bool>,
}
// default
impl Default for CreatePackProfile {
fn default() -> Self {
CreatePackProfile {
name: "Untitled".to_string(),
game_version: "1.19.4".to_string(),
modloader: ModLoader::Vanilla,
loader_version: None,
icon: None,
icon_url: None,
linked_data: None,
skip_install_profile: Some(true),
no_watch: Some(false),
}
}
}
#[derive(Clone)]
pub struct CreatePack {
pub file: bytes::Bytes,
pub description: CreatePackDescription,
}
#[derive(Clone, Debug)]
pub struct CreatePackDescription {
pub icon: Option<PathBuf>,
pub override_title: Option<String>,
pub project_id: Option<String>,
pub version_id: Option<String>,
pub existing_loading_bar: Option<LoadingBarId>,
pub profile_path: ProfilePathId,
}
pub fn get_profile_from_pack(
location: CreatePackLocation,
) -> CreatePackProfile {
match location {
CreatePackLocation::FromVersionId {
project_id,
version_id,
title,
icon_url,
} => CreatePackProfile {
name: title,
icon_url,
linked_data: Some(LinkedData {
project_id: Some(project_id),
version_id: Some(version_id),
locked: Some(true),
}),
..Default::default()
},
CreatePackLocation::FromFile { path } => {
let file_name = path
.file_stem()
.unwrap_or_default()
.to_string_lossy()
.to_string();
CreatePackProfile {
name: file_name,
..Default::default()
}
}
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn generate_pack_from_version_id(
project_id: String,
version_id: String,
title: String,
icon_url: Option<String>,
profile_path: ProfilePathId,
// Existing loading bar. Unlike when existing_loading_bar is used, this one is pre-initialized with PackFileDownload
// For example, you might use this if multiple packs are being downloaded at once and you want to use the same loading bar
initialized_loading_bar: Option<LoadingBarId>,
) -> crate::Result<CreatePack> {
let state = State::get().await?;
let loading_bar = if let Some(bar) = initialized_loading_bar {
emit_loading(&bar, 0.0, Some("Downloading pack file")).await?;
bar
} else {
init_loading(
LoadingBarType::PackFileDownload {
profile_path: profile_path.get_full_path().await?,
pack_name: title,
icon: icon_url,
pack_version: version_id.clone(),
},
100.0,
"Downloading pack file",
)
.await?
};
emit_loading(&loading_bar, 0.0, Some("Fetching version")).await?;
let creds = state.credentials.read().await;
let version: ModrinthVersion = fetch_json(
Method::GET,
&format!("{}version/{}", MODRINTH_API_URL, version_id),
None,
None,
&state.fetch_semaphore,
&creds,
)
.await?;
emit_loading(&loading_bar, 10.0, None).await?;
let (url, hash) =
if let Some(file) = version.files.iter().find(|x| x.primary) {
Some((file.url.clone(), file.hashes.get("sha1")))
} else {
version
.files
.first()
.map(|file| (file.url.clone(), file.hashes.get("sha1")))
}
.ok_or_else(|| {
crate::ErrorKind::InputError(
"Specified version has no files".to_string(),
)
})?;
let file = fetch_advanced(
Method::GET,
&url,
hash.map(|x| &**x),
None,
None,
Some((&loading_bar, 70.0)),
&state.fetch_semaphore,
&creds,
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Fetching project metadata")).await?;
let project: ModrinthProject = fetch_json(
Method::GET,
&format!("{}project/{}", MODRINTH_API_URL, version.project_id),
None,
None,
&state.fetch_semaphore,
&creds,
)
.await?;
emit_loading(&loading_bar, 10.0, Some("Retrieving icon")).await?;
let icon = if let Some(icon_url) = project.icon_url {
let state = State::get().await?;
let icon_bytes =
fetch(&icon_url, None, &state.fetch_semaphore, &creds).await?;
drop(creds);
let filename = icon_url.rsplit('/').next();
if let Some(filename) = filename {
Some(
write_cached_icon(
filename,
&state.directories.caches_dir(),
icon_bytes,
&state.io_semaphore,
)
.await?,
)
} else {
None
}
} else {
None
};
emit_loading(&loading_bar, 10.0, None).await?;
Ok(CreatePack {
file,
description: CreatePackDescription {
icon,
override_title: None,
project_id: Some(project_id),
version_id: Some(version_id),
existing_loading_bar: Some(loading_bar),
profile_path,
},
})
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn generate_pack_from_file(
path: PathBuf,
profile_path: ProfilePathId,
) -> crate::Result<CreatePack> {
let file = io::read(&path).await?;
Ok(CreatePack {
file: bytes::Bytes::from(file),
description: CreatePackDescription {
icon: None,
override_title: None,
project_id: None,
version_id: None,
existing_loading_bar: None,
profile_path,
},
})
}
/// Sets generated profile attributes to the pack ones (using profile::edit)
/// This includes the pack name, icon, game version, loader version, and loader
#[theseus_macros::debug_pin]
pub async fn set_profile_information(
profile_path: ProfilePathId,
description: &CreatePackDescription,
backup_name: &str,
dependencies: &HashMap<PackDependency, String>,
ignore_lock: bool, // do not change locked status
) -> crate::Result<()> {
let mut game_version: Option<&String> = None;
let mut mod_loader = None;
let mut loader_version = None;
for (key, value) in dependencies {
match key {
PackDependency::Forge => {
mod_loader = Some(ModLoader::Forge);
loader_version = Some(value);
}
PackDependency::NeoForge => {
mod_loader = Some(ModLoader::NeoForge);
loader_version = Some(value);
}
PackDependency::FabricLoader => {
mod_loader = Some(ModLoader::Fabric);
loader_version = Some(value);
}
PackDependency::QuiltLoader => {
mod_loader = Some(ModLoader::Quilt);
loader_version = Some(value);
}
PackDependency::Minecraft => game_version = Some(value),
}
}
let game_version = if let Some(game_version) = game_version {
game_version
} else {
return Err(crate::ErrorKind::InputError(
"Pack did not specify Minecraft version".to_string(),
)
.into());
};
let mod_loader = mod_loader.unwrap_or(ModLoader::Vanilla);
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
mod_loader,
loader_version.cloned(),
)
.await?
} else {
None
};
// Sets values in profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = description
.override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
let project_id = description.project_id.clone();
let version_id = description.version_id.clone();
prof.metadata.linked_data = if project_id.is_some()
&& version_id.is_some()
{
Some(LinkedData {
project_id,
version_id,
locked: if !ignore_lock {
Some(true)
} else {
prof.metadata.linked_data.as_ref().and_then(|x| x.locked)
},
})
} else {
None
};
prof.metadata.icon = description.icon.clone();
prof.metadata.game_version = game_version.clone();
prof.metadata.loader_version = loader_version.clone();
prof.metadata.loader = mod_loader;
async { Ok(()) }
})
.await?;
Ok(())
}

View File

@@ -0,0 +1,430 @@
use crate::config::MODRINTH_API_URL;
use crate::event::emit::{
emit_loading, init_or_edit_loading, loading_try_for_each_concurrent,
};
use crate::event::LoadingBarType;
use crate::pack::install_from::{
set_profile_information, EnvType, PackFile, PackFileHash,
};
use crate::prelude::{ModrinthVersion, ProfilePathId, ProjectMetadata};
use crate::state::{ProfileInstallStage, Profiles, SideType};
use crate::util::fetch::{fetch_json, fetch_mirrors, write};
use crate::util::io;
use crate::{profile, State};
use async_zip::base::read::seek::ZipFileReader;
use reqwest::Method;
use serde_json::json;
use std::collections::HashMap;
use std::io::Cursor;
use std::path::{Component, PathBuf};
use super::install_from::{
generate_pack_from_file, generate_pack_from_version_id, CreatePack,
CreatePackLocation, PackFormat,
};
/// Install a pack
/// Wrapper around install_pack_files that generates a pack creation description, and
/// attempts to install the pack files. If it fails, it will remove the profile (fail safely)
/// Install a modpack from a mrpack file (a modrinth .zip format)
#[theseus_macros::debug_pin]
pub async fn install_zipped_mrpack(
location: CreatePackLocation,
profile_path: ProfilePathId,
) -> crate::Result<ProfilePathId> {
// Get file from description
let create_pack: CreatePack = match location {
CreatePackLocation::FromVersionId {
project_id,
version_id,
title,
icon_url,
} => {
generate_pack_from_version_id(
project_id,
version_id,
title,
icon_url,
profile_path.clone(),
None,
)
.await?
}
CreatePackLocation::FromFile { path } => {
generate_pack_from_file(path, profile_path.clone()).await?
}
};
// Install pack files, and if it fails, fail safely by removing the profile
let result = install_zipped_mrpack_files(create_pack, false).await;
// Check existing managed packs for potential updates
tokio::task::spawn(Profiles::update_modrinth_versions());
match result {
Ok(profile) => Ok(profile),
Err(err) => {
let _ = crate::api::profile::remove(&profile_path).await;
Err(err)
}
}
}
/// Install all pack files from a description
/// Does not remove the profile if it fails
#[theseus_macros::debug_pin]
pub async fn install_zipped_mrpack_files(
create_pack: CreatePack,
ignore_lock: bool,
) -> crate::Result<ProfilePathId> {
let state = &State::get().await?;
let file = create_pack.file;
let description = create_pack.description.clone(); // make a copy for profile edit function
let icon = create_pack.description.icon;
let project_id = create_pack.description.project_id;
let version_id = create_pack.description.version_id;
let existing_loading_bar = create_pack.description.existing_loading_bar;
let profile_path = create_pack.description.profile_path;
let icon_exists = icon.is_some();
let reader: Cursor<&bytes::Bytes> = Cursor::new(&file);
// Create zip reader around file
let mut zip_reader =
ZipFileReader::with_tokio(reader).await.map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to read input modpack zip".to_string(),
))
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Sets generated profile attributes to the pack ones (using profile::edit)
set_profile_information(
profile_path.clone(),
&description,
&pack.name,
&pack.dependencies,
ignore_lock,
)
.await?;
let profile_path = profile_path.clone();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.get_full_path().await?.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
pack_version: version_id,
},
100.0,
"Downloading modpack",
)
.await?;
let num_files = pack.files.len();
use futures::StreamExt;
loading_try_for_each_concurrent(
futures::stream::iter(pack.files.into_iter())
.map(Ok::<PackFile, crate::Error>),
None,
Some(&loading_bar),
70.0,
num_files,
None,
|project| {
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env {
if env
.get(&EnvType::Client)
.map(|x| x == &SideType::Unsupported)
.unwrap_or(false)
{
return Ok(());
}
}
let creds = state.credentials.read().await;
let file = fetch_mirrors(
&project
.downloads
.iter()
.map(|x| &**x)
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&creds,
)
.await?;
drop(creds);
let project_path = project.path.to_string();
let path =
std::path::Path::new(&project_path).components().next();
if let Some(path) = path {
match path {
Component::CurDir | Component::Normal(_) => {
let path = profile_path
.get_full_path()
.await?
.join(&project_path);
write(&path, &file, &state.io_semaphore)
.await?;
}
_ => {}
};
}
Ok(())
}
},
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Extracting overrides")).await?;
let mut total_len = 0;
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
total_len += 1;
}
}
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
// Reads the file into the 'content' variable
let mut content = Vec::new();
let mut reader = zip_reader.reader_with_entry(index).await?;
reader.read_to_end_checked(&mut content).await?;
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
if new_path.file_name().is_some() {
write(
&profile_path.get_full_path().await?.join(new_path),
&content,
&state.io_semaphore,
)
.await?;
}
emit_loading(
&loading_bar,
30.0 / total_len as f64,
Some(&format!(
"Extracting override {}/{}",
index, total_len
)),
)
.await?;
}
}
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon =
profile_path.get_full_path().await?.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path, None).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
State::sync().await?;
}
Ok::<ProfilePathId, crate::Error>(profile_path.clone())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
}
}
#[tracing::instrument(skip(mrpack_file))]
#[theseus_macros::debug_pin]
pub async fn remove_all_related_files(
profile_path: ProfilePathId,
mrpack_file: bytes::Bytes,
) -> crate::Result<()> {
let reader: Cursor<&bytes::Bytes> = Cursor::new(&mrpack_file);
// Create zip reader around file
let mut zip_reader =
ZipFileReader::with_tokio(reader).await.map_err(|_| {
crate::Error::from(crate::ErrorKind::InputError(
"Failed to read input modpack zip".to_string(),
))
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Set install stage to installing, and do not change it back (as files are being removed and are not being reinstalled here)
crate::api::profile::edit(&profile_path, |prof| {
prof.install_stage = ProfileInstallStage::PackInstalling;
async { Ok(()) }
})
.await?;
// First, remove all modrinth projects by their version hashes
// Remove all modrinth projects by their version hashes
// We need to do a fetch to get the project ids from Modrinth
let state = State::get().await?;
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha512)?.clone()))
.collect::<Vec<_>>();
let creds = state.credentials.read().await;
// First, get project info by hash
let files_url = format!("{}version_files", MODRINTH_API_URL);
let hash_projects = fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
None,
Some(json!({
"hashes": all_hashes,
"algorithm": "sha512",
})),
&state.fetch_semaphore,
&creds,
)
.await?;
let to_remove = hash_projects
.into_values()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile =
profile::get(&profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(
profile_path.to_string(),
)
})?;
for (project_id, project) in &profile.projects {
if let ProjectMetadata::Modrinth { project, .. } = &project.metadata
{
if to_remove.contains(&project.id) {
let path = profile
.get_profile_full_path()
.await?
.join(project_id.0.clone());
if path.exists() {
io::remove_file(&path).await?;
}
}
}
}
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
let path: PathBuf = profile_path
.get_full_path()
.await?
.join(file.path.to_string());
if path.exists() {
io::remove_file(&path).await?;
}
}
// Iterate over each 'overrides' file and remove it
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
// Remove this file if a corresponding one exists in the filesystem
let existing_file =
profile_path.get_full_path().await?.join(&new_path);
if existing_file.exists() {
io::remove_file(&existing_file).await?;
}
}
}
Ok(())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
}
}

View File

@@ -0,0 +1,3 @@
pub mod import;
pub mod install_from;
pub mod install_mrpack;

View File

@@ -0,0 +1,130 @@
//! Theseus process management interface
use uuid::Uuid;
use crate::state::{MinecraftChild, ProfilePathId};
pub use crate::{
state::{
Hooks, JavaSettings, MemorySettings, Profile, Settings, WindowSize,
},
State,
};
// Gets whether a child process stored in the state by UUID has finished
#[tracing::instrument]
pub async fn has_finished_by_uuid(uuid: Uuid) -> crate::Result<bool> {
Ok(get_exit_status_by_uuid(uuid).await?.is_some())
}
// Gets the exit status of a child process stored in the state by UUID
#[tracing::instrument]
pub async fn get_exit_status_by_uuid(
uuid: Uuid,
) -> crate::Result<Option<i32>> {
let state = State::get().await?;
let children = state.children.read().await;
children.exit_status(uuid).await
}
// Gets the UUID of each stored process in the state
#[tracing::instrument]
pub async fn get_all_uuids() -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
Ok(children.keys())
}
// Gets the UUID of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_uuids() -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_keys().await
}
// Gets the Profile paths of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_profile_paths() -> crate::Result<Vec<ProfilePathId>>
{
let state = State::get().await?;
let children = state.children.read().await;
children.running_profile_paths().await
}
// Gets the Profiles (cloned) of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_profiles() -> crate::Result<Vec<Profile>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_profiles().await
}
// Gets the UUID of each stored process in the state by profile path
#[tracing::instrument]
pub async fn get_uuids_by_profile_path(
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_keys_with_profile(profile_path).await
}
// Kill a child process stored in the state by UUID, as a string
#[tracing::instrument]
pub async fn kill_by_uuid(uuid: Uuid) -> crate::Result<()> {
let state = State::get().await?;
let children = state.children.read().await;
if let Some(mchild) = children.get(uuid) {
let mut mchild = mchild.write().await;
kill(&mut mchild).await
} else {
// No error returned for already finished process
Ok(())
}
}
// Wait for a child process stored in the state by UUID
#[tracing::instrument]
pub async fn wait_for_by_uuid(uuid: Uuid) -> crate::Result<()> {
let state = State::get().await?;
let children = state.children.read().await;
// No error returned for already killed process
if let Some(mchild) = children.get(uuid) {
let mut mchild = mchild.write().await;
wait_for(&mut mchild).await
} else {
// No error returned for already finished process
Ok(())
}
}
// Kill a running child process directly
#[tracing::instrument(skip(running))]
pub async fn kill(running: &mut MinecraftChild) -> crate::Result<()> {
running.current_child.write().await.kill().await?;
Ok(())
}
// Await on the completion of a child process directly
#[tracing::instrument(skip(running))]
pub async fn wait_for(running: &mut MinecraftChild) -> crate::Result<()> {
// We do not wait on the Child directly, but wait on the thread manager.
// This way we can still run all cleanup hook functions that happen after.
running
.manager
.take()
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Process manager already completed or missing for process {}",
running.uuid
))
})?
.await?
.map_err(|err| {
crate::ErrorKind::LauncherError(format!(
"Error running minecraft: {err}"
))
})?;
Ok(())
}

View File

@@ -0,0 +1,315 @@
//! Theseus profile management interface
use crate::pack::install_from::CreatePackProfile;
use crate::prelude::ProfilePathId;
use crate::state::LinkedData;
use crate::util::io::{self, canonicalize};
use crate::{
event::{emit::emit_profile, ProfilePayloadType},
prelude::ModLoader,
};
use crate::{pack, profile, ErrorKind};
pub use crate::{
state::{JavaSettings, Profile},
State,
};
use daedalus::modded::LoaderVersion;
use std::path::PathBuf;
use tracing::{info, trace};
use uuid::Uuid;
// Creates a profile of a given name and adds it to the in-memory state
// Returns relative filepath as ProfilePathId which can be used to access it in the State
#[tracing::instrument]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn profile_create(
mut name: String, // the name of the profile, and relative path
game_version: String, // the game version of the profile
modloader: ModLoader, // the modloader to use
loader_version: Option<String>, // the modloader version to use, set to "latest", "stable", or the ID of your chosen loader. defaults to latest
icon: Option<PathBuf>, // the icon for the profile
icon_url: Option<String>, // the URL icon for a profile (ONLY USED FOR TEMPORARY PROFILES)
linked_data: Option<LinkedData>, // the linked project ID (mainly for modpacks)- used for updating
skip_install_profile: Option<bool>,
no_watch: Option<bool>,
) -> crate::Result<ProfilePathId> {
name = profile::sanitize_profile_name(&name);
trace!("Creating new profile. {}", name);
let state = State::get().await?;
let uuid = Uuid::new_v4();
let mut path = state.directories.profiles_dir().await.join(&name);
if path.exists() {
let mut new_name;
let mut new_path;
let mut which = 1;
loop {
new_name = format!("{name} ({which})");
new_path = state.directories.profiles_dir().await.join(&new_name);
if !new_path.exists() {
break;
}
which += 1;
}
tracing::debug!(
"Folder collision: {}, renaming to: {}",
path.display(),
new_path.display()
);
path = new_path;
name = new_name;
}
io::create_dir_all(&path).await?;
info!(
"Creating profile at path {}",
&canonicalize(&path)?.display()
);
let loader = if modloader != ModLoader::Vanilla {
get_loader_version_from_loader(
game_version.clone(),
modloader,
loader_version,
)
.await?
} else {
None
};
let mut profile = Profile::new(uuid, name, game_version).await?;
let result = async {
if let Some(ref icon) = icon {
let bytes =
io::read(state.directories.caches_dir().join(icon)).await?;
profile
.set_icon(
&state.directories.caches_dir(),
&state.io_semaphore,
bytes::Bytes::from(bytes),
&icon.to_string_lossy(),
)
.await?;
}
profile.metadata.icon_url = icon_url;
if let Some(loader_version) = loader {
profile.metadata.loader = modloader;
profile.metadata.loader_version = Some(loader_version);
}
profile.metadata.linked_data = linked_data;
if let Some(linked_data) = &mut profile.metadata.linked_data {
linked_data.locked = Some(
linked_data.project_id.is_some()
&& linked_data.version_id.is_some(),
);
}
emit_profile(
uuid,
&profile.profile_id(),
&profile.metadata.name,
ProfilePayloadType::Created,
)
.await?;
{
let mut profiles = state.profiles.write().await;
profiles
.insert(profile.clone(), no_watch.unwrap_or_default())
.await?;
}
if !skip_install_profile.unwrap_or(false) {
crate::launcher::install_minecraft(&profile, None, false).await?;
}
State::sync().await?;
Ok(profile.profile_id())
}
.await;
match result {
Ok(profile) => Ok(profile),
Err(err) => {
let _ = crate::api::profile::remove(&profile.profile_id()).await;
Err(err)
}
}
}
pub async fn profile_create_from_creator(
profile: CreatePackProfile,
) -> crate::Result<ProfilePathId> {
profile_create(
profile.name,
profile.game_version,
profile.modloader,
profile.loader_version,
profile.icon,
profile.icon_url,
profile.linked_data,
profile.skip_install_profile,
profile.no_watch,
)
.await
}
pub async fn profile_create_from_duplicate(
copy_from: ProfilePathId,
) -> crate::Result<ProfilePathId> {
// Original profile
let profile = profile::get(&copy_from, None).await?.ok_or_else(|| {
ErrorKind::UnmanagedProfileError(copy_from.to_string())
})?;
let profile_path_id = profile_create(
profile.metadata.name.clone(),
profile.metadata.game_version.clone(),
profile.metadata.loader,
profile.metadata.loader_version.clone().map(|it| it.id),
profile.metadata.icon.clone(),
profile.metadata.icon_url.clone(),
profile.metadata.linked_data.clone(),
Some(true),
Some(true),
)
.await?;
// Copy it over using the import system (essentially importing from the same profile)
let state = State::get().await?;
let bar = pack::import::copy_dotminecraft(
profile_path_id.clone(),
copy_from.get_full_path().await?,
&state.io_semaphore,
None,
)
.await?;
let duplicated_profile =
profile::get(&profile_path_id, None).await?.ok_or_else(|| {
ErrorKind::UnmanagedProfileError(profile_path_id.to_string())
})?;
crate::launcher::install_minecraft(&duplicated_profile, Some(bar), false)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
// emit profile edited
emit_profile(
profile.uuid,
&profile.profile_id(),
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
Ok(profile_path_id)
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub(crate) async fn get_loader_version_from_loader(
game_version: String,
loader: ModLoader,
loader_version: Option<String>,
) -> crate::Result<Option<LoaderVersion>> {
let state = State::get().await?;
let metadata = state.metadata.read().await;
let version = loader_version.unwrap_or_else(|| "latest".to_string());
let filter = |it: &LoaderVersion| match version.as_str() {
"latest" => true,
"stable" => it.stable,
id => {
it.id == *id
|| format!("{}-{}", game_version, id) == it.id
|| format!("{}-{}-{}", game_version, id, game_version) == it.id
}
};
let loader_data = match loader {
ModLoader::Forge => &metadata.forge,
ModLoader::Fabric => &metadata.fabric,
ModLoader::Quilt => &metadata.quilt,
ModLoader::NeoForge => &metadata.neoforge,
_ => {
return Err(
ProfileCreationError::NoManifest(loader.to_string()).into()
)
}
};
let loaders = &loader_data
.game_versions
.iter()
.find(|it| {
it.id
.replace(daedalus::modded::DUMMY_REPLACE_STRING, &game_version)
== game_version
})
.ok_or_else(|| {
ProfileCreationError::ModloaderUnsupported(
loader.to_string(),
game_version.clone(),
)
})?
.loaders;
let loader_version = loaders
.iter()
.find(|&x| filter(x))
.cloned()
.or(
// If stable was searched for but not found, return latest by default
if version == "stable" {
loaders.iter().next().cloned()
} else {
None
},
)
.ok_or_else(|| {
ProfileCreationError::InvalidVersionModloader(
version,
loader.to_string(),
)
})?;
Ok(Some(loader_version))
}
#[derive(thiserror::Error, Debug)]
pub enum ProfileCreationError {
#[error("Profile .json exists: {0}")]
ProfileExistsError(PathBuf),
#[error("Modloader {0} unsupported for Minecraft version {1}")]
ModloaderUnsupported(String, String),
#[error("Invalid version {0} for modloader {1}")]
InvalidVersionModloader(String, String),
#[error("Could not get manifest for loader {0}. This is a bug in the GUI")]
NoManifest(String),
#[error("Could not get State.")]
NoState,
#[error("Attempted to create project in something other than a folder.")]
NotFolder,
#[error("You are trying to create a profile in a non-empty directory")]
NotEmptyFolder,
#[error("IO error: {0}")]
IOError(#[from] std::io::Error),
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,233 @@
use crate::{
event::{
emit::{emit_profile, init_loading, loading_try_for_each_concurrent},
ProfilePayloadType,
},
pack::{self, install_from::generate_pack_from_version_id},
prelude::{ProfilePathId, ProjectPathId},
profile::get,
state::{ProfileInstallStage, Project},
LoadingBarType, State,
};
use futures::try_join;
/// Updates a managed modrinth pack to the version specified by new_version_id
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update_managed_modrinth_version(
profile_path: &ProfilePathId,
new_version_id: &String,
) -> crate::Result<()> {
let profile = get(profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
.as_error()
})?;
let unmanaged_err = || {
crate::ErrorKind::InputError(
format!("Profile at {} is not a managed modrinth pack, or has been disconnected.", profile_path),
)
};
// Extract modrinth pack information, if appropriate
let linked_data = profile
.metadata
.linked_data
.as_ref()
.ok_or_else(unmanaged_err)?;
let project_id: &String =
linked_data.project_id.as_ref().ok_or_else(unmanaged_err)?;
let version_id =
linked_data.version_id.as_ref().ok_or_else(unmanaged_err)?;
// Replace the pack with the new version
replace_managed_modrinth(
profile_path,
&profile,
project_id,
version_id,
Some(new_version_id),
true, // switching versions should ignore the lock
)
.await?;
emit_profile(
profile.uuid,
profile_path,
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
Ok(())
}
/// Repair a managed modrinth pack by 'updating' it to the current version
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn repair_managed_modrinth(
profile_path: &ProfilePathId,
) -> crate::Result<()> {
let profile = get(profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
.as_error()
})?;
let unmanaged_err = || {
crate::ErrorKind::InputError(
format!("Profile at {} is not a managed modrinth pack, or has been disconnected.", profile_path),
)
};
// For repairing specifically, first we remove all installed projects (to ensure we do remove ones that aren't in the pack)
// We do a project removal followed by removing everything in the .mrpack, to ensure we only
// remove relevant projects and not things like save files
let projects_map = profile.projects.clone();
let stream = futures::stream::iter(
projects_map
.into_iter()
.map(Ok::<(ProjectPathId, Project), crate::Error>),
);
loading_try_for_each_concurrent(
stream,
None,
None,
0.0,
0,
None,
|(project_id, _)| {
let profile = profile.clone();
async move {
profile.remove_project(&project_id, Some(true)).await?;
Ok(())
}
},
)
.await?;
// Extract modrinth pack information, if appropriate
let linked_data = profile
.metadata
.linked_data
.as_ref()
.ok_or_else(unmanaged_err)?;
let project_id: &String =
linked_data.project_id.as_ref().ok_or_else(unmanaged_err)?;
let version_id =
linked_data.version_id.as_ref().ok_or_else(unmanaged_err)?;
// Replace the pack with the same version
replace_managed_modrinth(
profile_path,
&profile,
project_id,
version_id,
None,
false, // do not ignore lock, as repairing can reset the lock
)
.await?;
emit_profile(
profile.uuid,
profile_path,
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
Ok(())
}
/// Replace a managed modrinth pack with a new version
/// If new_version_id is None, the pack is 'reinstalled' in-place
#[tracing::instrument(skip(profile))]
#[theseus_macros::debug_pin]
async fn replace_managed_modrinth(
profile_path: &ProfilePathId,
profile: &crate::state::Profile,
project_id: &String,
version_id: &String,
new_version_id: Option<&String>,
ignore_lock: bool,
) -> crate::Result<()> {
crate::profile::edit(profile_path, |profile| {
profile.install_stage = ProfileInstallStage::Installing;
async { Ok(()) }
})
.await?;
// Fetch .mrpacks for both old and new versions
// TODO: this will need to be updated if we revert the hacky pack method we needed for compiler speed
let (old_pack_creator, new_pack_creator) =
if let Some(new_version_id) = new_version_id {
let shared_loading_bar = init_loading(
LoadingBarType::PackFileDownload {
profile_path: profile_path.get_full_path().await?,
pack_name: profile.metadata.name.clone(),
icon: None,
pack_version: version_id.clone(),
},
200.0, // These two downloads will share the same loading bar
"Downloading pack file",
)
.await?;
// download in parallel, then join.
try_join!(
generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
Some(shared_loading_bar.clone())
),
generate_pack_from_version_id(
project_id.clone(),
new_version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
Some(shared_loading_bar)
)
)?
} else {
// If new_version_id is None, we don't need to download the new pack, so we clone the old one
let mut old_pack_creator = generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
None,
)
.await?;
old_pack_creator.description.existing_loading_bar = None;
(old_pack_creator.clone(), old_pack_creator)
};
// Removal - remove all files that were added by the old pack
// - remove all installed projects
// - remove all overrides
pack::install_mrpack::remove_all_related_files(
profile_path.clone(),
old_pack_creator.file,
)
.await?;
// Reinstallation - install all files that are added by the new pack
// - install all projects
// - install all overrides
// - edits the profile to update the new data
// - (functionals almost identically to rteinstalling the pack 'in-place')
pack::install_mrpack::install_zipped_mrpack_files(
new_pack_creator,
ignore_lock,
)
.await?;
Ok(())
}

View File

@@ -0,0 +1,5 @@
use crate::state::{ProcessType, SafeProcesses};
pub async fn check_safe_loading_bars() -> crate::Result<bool> {
SafeProcesses::is_complete(ProcessType::LoadingBar).await
}

View File

@@ -0,0 +1,246 @@
//! Theseus profile management interface
use std::path::{Path, PathBuf};
use tokio::fs;
use io::IOError;
use tokio::sync::RwLock;
use crate::{
event::emit::{emit_loading, init_loading},
prelude::DirectoryInfo,
state::{self, Profiles},
util::{fetch, io},
};
pub use crate::{
state::{
Hooks, JavaSettings, MemorySettings, Profile, Settings, WindowSize,
},
State,
};
/// Gets entire settings
#[tracing::instrument]
pub async fn get() -> crate::Result<Settings> {
let state = State::get().await?;
let settings = state.settings.read().await;
Ok(settings.clone())
}
/// Sets entire settings
#[tracing::instrument]
pub async fn set(settings: Settings) -> crate::Result<()> {
let state = State::get().await?;
if settings.loaded_config_dir
!= state.settings.read().await.loaded_config_dir
{
return Err(crate::ErrorKind::OtherError(
"Cannot change config directory as setting".to_string(),
)
.as_error());
}
let (reset_io, reset_fetch) = async {
let read = state.settings.read().await;
(
settings.max_concurrent_writes != read.max_concurrent_writes,
settings.max_concurrent_downloads != read.max_concurrent_downloads,
)
}
.await;
let updated_discord_rpc = {
let read = state.settings.read().await;
settings.disable_discord_rpc != read.disable_discord_rpc
};
{
*state.settings.write().await = settings;
}
if updated_discord_rpc {
state.discord_rpc.clear_to_default(true).await?;
}
if reset_io {
state.reset_io_semaphore().await;
}
if reset_fetch {
state.reset_fetch_semaphore().await;
}
State::sync().await?;
Ok(())
}
/// Sets the new config dir, the location of all Theseus data except for the settings.json and caches
/// Takes control of the entire state and blocks until completion
pub async fn set_config_dir(new_config_dir: PathBuf) -> crate::Result<()> {
tracing::trace!("Changing config dir to: {}", new_config_dir.display());
if !new_config_dir.is_dir() {
return Err(crate::ErrorKind::FSError(format!(
"New config dir is not a folder: {}",
new_config_dir.display()
))
.as_error());
}
if !is_dir_writeable(new_config_dir.clone()).await? {
return Err(crate::ErrorKind::FSError(format!(
"New config dir is not writeable: {}",
new_config_dir.display()
))
.as_error());
}
let loading_bar = init_loading(
crate::LoadingBarType::ConfigChange {
new_path: new_config_dir.clone(),
},
100.0,
"Changing configuration directory",
)
.await?;
tracing::trace!("Changing config dir, taking control of the state");
// Take control of the state
let mut state_write = State::get_write().await?;
let old_config_dir =
state_write.directories.config_dir.read().await.clone();
// Reset file watcher
tracing::trace!("Reset file watcher");
let file_watcher = state::init_watcher().await?;
state_write.file_watcher = RwLock::new(file_watcher);
// Getting files to be moved
let mut config_entries = io::read_dir(&old_config_dir).await?;
let across_drives = is_different_drive(&old_config_dir, &new_config_dir);
let mut entries = vec![];
let mut deletable_entries = vec![];
while let Some(entry) = config_entries
.next_entry()
.await
.map_err(|e| IOError::with_path(e, &old_config_dir))?
{
let entry_path = entry.path();
if let Some(file_name) = entry_path.file_name() {
// We are only moving the profiles and metadata folders
if file_name == state::PROFILES_FOLDER_NAME
|| file_name == state::METADATA_FOLDER_NAME
{
if across_drives {
entries.extend(
crate::pack::import::get_all_subfiles(&entry_path)
.await?,
);
deletable_entries.push(entry_path.clone());
} else {
entries.push(entry_path.clone());
}
}
}
}
tracing::trace!("Moving files");
let semaphore = &state_write.io_semaphore;
let num_entries = entries.len() as f64;
for entry_path in entries {
let relative_path = entry_path.strip_prefix(&old_config_dir)?;
let new_path = new_config_dir.join(relative_path);
if across_drives {
fetch::copy(&entry_path, &new_path, semaphore).await?;
} else {
io::rename(entry_path.clone(), new_path.clone()).await?;
}
emit_loading(&loading_bar, 80.0 * (1.0 / num_entries), None).await?;
}
tracing::trace!("Setting configuration setting");
// Set load config dir setting
let settings = {
let mut settings = state_write.settings.write().await;
settings.loaded_config_dir = Some(new_config_dir.clone());
// Some java paths are hardcoded to within our config dir, so we need to update them
tracing::trace!("Updating java keys");
for key in settings.java_globals.keys() {
if let Some(java) = settings.java_globals.get_mut(&key) {
// If the path is within the old config dir path, update it to the new config dir
if let Ok(relative_path) = PathBuf::from(java.path.clone())
.strip_prefix(&old_config_dir)
{
java.path = new_config_dir
.join(relative_path)
.to_string_lossy()
.to_string();
}
}
}
tracing::trace!("Syncing settings");
settings
.sync(&state_write.directories.settings_file())
.await?;
settings.clone()
};
tracing::trace!("Reinitializing directory");
// Set new state information
state_write.directories = DirectoryInfo::init(&settings)?;
// Delete entries that were from a different drive
let deletable_entries_len = deletable_entries.len();
if deletable_entries_len > 0 {
tracing::trace!("Deleting old files");
}
for entry in deletable_entries {
io::remove_dir_all(entry).await?;
emit_loading(
&loading_bar,
10.0 * (1.0 / deletable_entries_len as f64),
None,
)
.await?;
}
// Reset file watcher
tracing::trace!("Reset file watcher");
let mut file_watcher = state::init_watcher().await?;
// Reset profiles (for filepaths, file watcher, etc)
state_write.profiles = RwLock::new(
Profiles::init(&state_write.directories, &mut file_watcher).await?,
);
state_write.file_watcher = RwLock::new(file_watcher);
emit_loading(&loading_bar, 10.0, None).await?;
tracing::info!(
"Successfully switched config folder to: {}",
new_config_dir.display()
);
Ok(())
}
// Function to check if two paths are on different drives/roots
fn is_different_drive(path1: &Path, path2: &Path) -> bool {
let root1 = path1.components().next();
let root2 = path2.components().next();
root1 != root2
}
pub async fn is_dir_writeable(new_config_dir: PathBuf) -> crate::Result<bool> {
let temp_path = new_config_dir.join(".tmp");
match fs::write(temp_path.clone(), "test").await {
Ok(_) => {
fs::remove_file(temp_path).await?;
Ok(true)
}
Err(e) => {
tracing::error!("Error writing to new config dir: {}", e);
Ok(false)
}
}
}

View File

@@ -0,0 +1,60 @@
//! Theseus tag management interface
pub use crate::{
state::{Category, DonationPlatform, GameVersion, Loader, Tags},
State,
};
// Get bundled set of tags
#[tracing::instrument]
pub async fn get_tag_bundle() -> crate::Result<Tags> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_tag_bundle())
}
/// Get category tags
#[tracing::instrument]
pub async fn get_category_tags() -> crate::Result<Vec<Category>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_categories())
}
/// Get report type tags
#[tracing::instrument]
pub async fn get_report_type_tags() -> crate::Result<Vec<String>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_report_types())
}
/// Get loader tags
#[tracing::instrument]
pub async fn get_loader_tags() -> crate::Result<Vec<Loader>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_loaders())
}
/// Get game version tags
#[tracing::instrument]
pub async fn get_game_version_tags() -> crate::Result<Vec<GameVersion>> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_game_versions())
}
/// Get donation platform tags
#[tracing::instrument]
pub async fn get_donation_platform_tags() -> crate::Result<Vec<DonationPlatform>>
{
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_donation_platforms())
}

View File

@@ -0,0 +1,3 @@
//! Configuration structs
pub const MODRINTH_API_URL: &str = "https://api.modrinth.com/v2/";

View File

@@ -0,0 +1,143 @@
//! Theseus error type
use crate::{profile, util};
use tracing_error::InstrumentError;
#[derive(thiserror::Error, Debug)]
pub enum ErrorKind {
#[error("Filesystem error: {0}")]
FSError(String),
#[error("Serialization error (INI): {0}")]
INIError(#[from] serde_ini::de::Error),
#[error("Serialization error (JSON): {0}")]
JSONError(#[from] serde_json::Error),
#[error("Error parsing UUID: {0}")]
UUIDError(#[from] uuid::Error),
#[error("Error parsing URL: {0}")]
URLError(#[from] url::ParseError),
#[error("Unable to read {0} from any source")]
NoValueFor(String),
#[error("Metadata error: {0}")]
MetadataError(#[from] daedalus::Error),
#[error("Minecraft authentication error: {0}")]
MinecraftAuthenticationError(
#[from] crate::state::MinecraftAuthenticationError,
),
#[error("I/O error: {0}")]
IOError(#[from] util::io::IOError),
#[error("I/O (std) error: {0}")]
StdIOError(#[from] std::io::Error),
#[error("Error launching Minecraft: {0}")]
LauncherError(String),
#[error("Error fetching URL: {0}")]
FetchError(#[from] reqwest::Error),
#[error("Websocket error: {0}")]
WSError(#[from] async_tungstenite::tungstenite::Error),
#[error("Websocket closed before {0} could be received!")]
WSClosedError(String),
#[error("Incorrect Sha1 hash for download: {0} != {1}")]
HashError(String, String),
#[error("Regex error: {0}")]
RegexError(#[from] regex::Error),
#[error("Paths stored in the database need to be valid UTF-8: {0}")]
UTFError(std::path::PathBuf),
#[error("Invalid input: {0}")]
InputError(String),
#[error("Join handle error: {0}")]
JoinError(#[from] tokio::task::JoinError),
#[error("Recv error: {0}")]
RecvError(#[from] tokio::sync::oneshot::error::RecvError),
#[error("Error acquiring semaphore: {0}")]
AcquireError(#[from] tokio::sync::AcquireError),
#[error("Profile {0} is not managed by the app!")]
UnmanagedProfileError(String),
#[error("Could not create profile: {0}")]
ProfileCreationError(#[from] profile::create::ProfileCreationError),
#[error("User is not logged in, no credentials available!")]
NoCredentialsError,
#[error("JRE error: {0}")]
JREError(#[from] crate::util::jre::JREError),
#[error("Error parsing date: {0}")]
ChronoParseError(#[from] chrono::ParseError),
#[error("Event error: {0}")]
EventError(#[from] crate::event::EventError),
#[error("Zip error: {0}")]
ZipError(#[from] async_zip::error::ZipError),
#[error("File watching error: {0}")]
NotifyError(#[from] notify::Error),
#[error("Error stripping prefix: {0}")]
StripPrefixError(#[from] std::path::StripPrefixError),
#[error("Error: {0}")]
OtherError(String),
#[cfg(feature = "tauri")]
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}
#[derive(Debug)]
pub struct Error {
pub raw: std::sync::Arc<ErrorKind>,
pub source: tracing_error::TracedError<std::sync::Arc<ErrorKind>>,
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.source.source()
}
}
impl std::fmt::Display for Error {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "{}", self.source)
}
}
impl<E: Into<ErrorKind>> From<E> for Error {
fn from(source: E) -> Self {
let error = Into::<ErrorKind>::into(source);
let boxed_error = std::sync::Arc::new(error);
Self {
raw: boxed_error.clone(),
source: boxed_error.in_current_span(),
}
}
}
impl ErrorKind {
pub fn as_error(self) -> Error {
self.into()
}
}
pub type Result<T> = core::result::Result<T, Error>;

View File

@@ -0,0 +1,415 @@
use super::LoadingBarId;
use crate::{
event::{
CommandPayload, EventError, LoadingBar, LoadingBarType,
ProcessPayloadType, ProfilePayloadType,
},
prelude::ProfilePathId,
state::{ProcessType, SafeProcesses},
};
use futures::prelude::*;
#[cfg(feature = "tauri")]
use crate::event::{
LoadingPayload, ProcessPayload, ProfilePayload, WarningPayload,
};
#[cfg(feature = "tauri")]
use tauri::Manager;
use uuid::Uuid;
#[cfg(feature = "cli")]
const CLI_PROGRESS_BAR_TOTAL: u64 = 1000;
/*
Events are a way we can communciate with the Tauri frontend from the Rust backend.
We include a feature flag for Tauri, so that we can compile this code without Tauri.
To use events, we need to do the following:
1) Make sure we are using the tauri feature flag
2) Initialize the EventState with EventState::init() *before* initializing the theseus State
3) Call emit_x functions to send events to the frontend
For emit_loading() specifically, we need to inialize the loading bar with init_loading() first and pass the received loader in
For example:
pub async fn loading_function() -> crate::Result<()> {
loading_function()).await;
}
pub async fn loading_function() -> crate::Result<()> {
let loading_bar = init_loading(LoadingBarType::StateInit, 100.0, "Loading something long...").await;
for i in 0..100 {
emit_loading(&loading_bar, 1.0, None).await?;
tokio::time::sleep(Duration::from_millis(100)).await;
}
}
*/
/// Initialize a loading bar for use in emit_loading
/// This will generate a LoadingBarId, which is used to refer to the loading bar uniquely.
/// total is the total amount of work to be done- all emissions will be considered a fraction of this value (should be 1 or 100 for simplicity)
/// title is the title of the loading bar
/// The app will wait for this loading bar to finish before exiting, as it is considered safe.
#[theseus_macros::debug_pin]
pub async fn init_loading(
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
let key = init_loading_unsafe(bar_type, total, title).await?;
SafeProcesses::add_uuid(ProcessType::LoadingBar, key.0).await?;
Ok(key)
}
/// An unsafe loading bar can be created without adding it to the SafeProcesses list,
/// meaning that the app won't ask to wait for it to finish before exiting.
#[theseus_macros::debug_pin]
pub async fn init_loading_unsafe(
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
let event_state = crate::EventState::get().await?;
let key = LoadingBarId(Uuid::new_v4());
event_state.loading_bars.write().await.insert(
key.0,
LoadingBar {
loading_bar_uuid: key.0,
message: title.to_string(),
total,
current: 0.0,
last_sent: 0.0,
bar_type,
#[cfg(feature = "cli")]
cli_progress_bar: {
let pb = indicatif::ProgressBar::new(CLI_PROGRESS_BAR_TOTAL);
pb.set_position(0);
pb.set_style(
indicatif::ProgressStyle::default_bar()
.template(
"{spinner:.green} [{elapsed_precise}] [{bar:.lime/green}] {pos}/{len} {msg}",
).unwrap()
.progress_chars("#>-"),
);
pb
},
},
);
// attempt an initial loading_emit event to the frontend
emit_loading(&key, 0.0, None).await?;
Ok(key)
}
pub async fn init_or_edit_loading(
id: Option<LoadingBarId>,
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
if let Some(id) = id {
edit_loading(&id, bar_type, total, title).await?;
Ok(id)
} else {
init_loading(bar_type, total, title).await
}
}
// Edits a loading bar's type
// This also resets the bar's current progress to 0
pub async fn edit_loading(
id: &LoadingBarId,
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<()> {
let event_state = crate::EventState::get().await?;
if let Some(bar) = event_state.loading_bars.write().await.get_mut(&id.0) {
bar.bar_type = bar_type;
bar.total = total;
bar.message = title.to_string();
bar.current = 0.0;
bar.last_sent = 0.0;
#[cfg(feature = "cli")]
{
bar.cli_progress_bar.reset(); // indicatif::ProgressBar::new(CLI_PROGRESS_BAR_TOTAL as u64);
}
};
emit_loading(id, 0.0, None).await?;
Ok(())
}
// emit_loading emits a loading event to the frontend
// key refers to the loading bar to update
// increment refers to by what relative increment to the loading struct's total to update
// message is the message to display on the loading bar- if None, use the loading bar's default one
// By convention, fraction is the fraction of the progress bar that is filled
#[allow(unused_variables)]
#[tracing::instrument(level = "debug")]
#[theseus_macros::debug_pin]
pub async fn emit_loading(
key: &LoadingBarId,
increment_frac: f64,
message: Option<&str>,
) -> crate::Result<()> {
let event_state = crate::EventState::get().await?;
let mut loading_bar = event_state.loading_bars.write().await;
let loading_bar = match loading_bar.get_mut(&key.0) {
Some(f) => f,
None => {
return Err(EventError::NoLoadingBar(key.0).into());
}
};
// Tick up loading bar
loading_bar.current += increment_frac;
let display_frac = loading_bar.current / loading_bar.total;
let opt_display_frac = if display_frac >= 1.0 {
None // by convention, when its done, we submit None
// any further updates will be ignored (also sending None)
} else {
Some(display_frac)
};
if f64::abs(display_frac - loading_bar.last_sent) > 0.005 {
// Emit event to indicatif progress bar
#[cfg(feature = "cli")]
{
loading_bar.cli_progress_bar.set_message(
message
.map(|x| x.to_string())
.unwrap_or(loading_bar.message.clone()),
);
loading_bar.cli_progress_bar.set_position(
(display_frac * CLI_PROGRESS_BAR_TOTAL as f64).round() as u64,
);
}
//Emit event to tauri
#[cfg(feature = "tauri")]
event_state
.app
.emit_all(
"loading",
LoadingPayload {
fraction: opt_display_frac,
message: message
.unwrap_or(&loading_bar.message)
.to_string(),
event: loading_bar.bar_type.clone(),
loader_uuid: loading_bar.loading_bar_uuid,
},
)
.map_err(EventError::from)?;
loading_bar.last_sent = display_frac;
}
Ok(())
}
// emit_warning(message)
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_warning(message: &str) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"warning",
WarningPayload {
message: message.to_string(),
},
)
.map_err(EventError::from)?;
}
tracing::warn!("{}", message);
Ok(())
}
// emit_offline(bool)
// This is used to emit an event to the frontend that the app is offline after a refresh (or online)
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_offline(offline: bool) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all("offline", offline)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_command(CommandPayload::Something { something })
// ie: installing a pack, opening an .mrpack, etc
// Generally used for url deep links and file opens that we we want to handle in the frontend
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_command(command: CommandPayload) -> crate::Result<()> {
tracing::debug!("Command: {}", serde_json::to_string(&command)?);
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all("command", command)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_process(uuid, pid, event, message)
#[allow(unused_variables)]
pub async fn emit_process(
uuid: Uuid,
pid: u32,
event: ProcessPayloadType,
message: &str,
) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"process",
ProcessPayload {
uuid,
pid,
event,
message: message.to_string(),
},
)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_profile(path, event)
#[allow(unused_variables)]
pub async fn emit_profile(
uuid: Uuid,
profile_path_id: &ProfilePathId,
name: &str,
event: ProfilePayloadType,
) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let path = profile_path_id.get_full_path().await?;
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"profile",
ProfilePayload {
uuid,
profile_path_id: profile_path_id.clone(),
path,
name: name.to_string(),
event,
},
)
.map_err(EventError::from)?;
}
Ok(())
}
// loading_join! macro
// loading_join!(key: Option<&LoadingBarId>, total: f64, message: Option<&str>; task1, task2, task3...)
// This will submit a loading event with the given message for each task as they complete
// task1, task2, task3 are async tasks that yuo want to to join on await on
// Key is the key to use for which loading bar to submit these results to- a LoadingBarId. If None, it does nothing
// Total is the total amount of progress that the loading bar should take up by all futures in this (will be split evenly amongst them).
// If message is Some(t) you will overwrite this loading bar's message with a custom one
// For example, if you want the tasks to range as 0.1, 0.2, 0.3 (of the progress bar), you would do:
// loading_join!(loading_bar, 0.1; task1, task2, task3)
// This will await on each of the tasks, and as each completes, it will emit a loading event for 0.033, 0.066, 0.099, etc
// This should function as a drop-in replacement for tokio::try_join_all! in most cases- except the function *itself* calls ? rather than needing it.
#[macro_export]
macro_rules! count {
() => (0usize);
( $x:tt $($xs:tt)* ) => (1usize + $crate::count!($($xs)*));
}
#[macro_export]
macro_rules! loading_join {
($key:expr, $total:expr, $message:expr; $($task:expr $(,)?)+) => {
{
let key = $key;
let message : Option<&str> = $message;
let num_futures = $crate::count!($($task)*);
let increment = $total / num_futures as f64;
paste::paste! {
$( let [ <unique_name $task>] = {
{
let key = key.clone();
let message = message.clone();
async move {
let res = $task.await;
if let Some(key) = key {
$crate::event::emit::emit_loading(key, increment, message).await?;
}
res
}
}
};)+
}
paste::paste! {
tokio::try_join! (
$( [ <unique_name $task>] ),+
)
}
}
};
}
// A drop in replacement to try_for_each_concurrent that emits loading events as it goes
// Key is the key to use for which loading bar- a LoadingBarId. If None, does nothing
// Total is the total amount of progress that the loading bar should take up by all futures in this (will be split evenly amongst them).
// If message is Some(t) you will overwrite this loading bar's message with a custom one
// num_futs is the number of futures that will be run, which is needed as we allow Iterator to be passed in, which doesn't have a size
#[tracing::instrument(skip(stream, f))]
#[theseus_macros::debug_pin]
pub async fn loading_try_for_each_concurrent<I, F, Fut, T>(
stream: I,
limit: Option<usize>,
key: Option<&LoadingBarId>,
total: f64,
num_futs: usize, // num is in here as we allow Iterator to be passed in, which doesn't have a size
message: Option<&str>,
f: F,
) -> crate::Result<()>
where
I: futures::TryStreamExt<Error = crate::Error> + TryStream<Ok = T>,
F: FnMut(T) -> Fut + Send,
Fut: Future<Output = crate::Result<()>> + Send,
T: Send,
{
let mut f = f;
stream
.try_for_each_concurrent(limit, |item| {
let f = f(item);
async move {
f.await?;
if let Some(key) = key {
emit_loading(key, total / (num_futs as f64), message)
.await?;
}
Ok(())
}
})
.await
}

View File

@@ -0,0 +1,278 @@
//! Theseus state management system
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use tokio::sync::OnceCell;
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::prelude::ProfilePathId;
use crate::state::SafeProcesses;
pub mod emit;
// Global event state
// Stores the Tauri app handle and other event-related state variables
static EVENT_STATE: OnceCell<Arc<EventState>> = OnceCell::const_new();
pub struct EventState {
/// Tauri app
#[cfg(feature = "tauri")]
pub app: tauri::AppHandle,
pub loading_bars: RwLock<HashMap<Uuid, LoadingBar>>,
}
impl EventState {
#[cfg(feature = "tauri")]
pub async fn init(app: tauri::AppHandle) -> crate::Result<Arc<Self>> {
EVENT_STATE
.get_or_try_init(|| async {
Ok(Arc::new(Self {
app,
loading_bars: RwLock::new(HashMap::new()),
}))
})
.await
.cloned()
}
#[cfg(not(feature = "tauri"))]
pub async fn init() -> crate::Result<Arc<Self>> {
EVENT_STATE
.get_or_try_init(|| async {
Ok(Arc::new(Self {
loading_bars: RwLock::new(HashMap::new()),
}))
})
.await
.cloned()
}
#[cfg(feature = "tauri")]
pub async fn get() -> crate::Result<Arc<Self>> {
Ok(EVENT_STATE.get().ok_or(EventError::NotInitialized)?.clone())
}
// Initialization requires no app handle in non-tauri mode, so we can just use the same function
#[cfg(not(feature = "tauri"))]
pub async fn get() -> crate::Result<Arc<Self>> {
Self::init().await
}
// Values provided should not be used directly, as they are clones and are not guaranteed to be up-to-date
pub async fn list_progress_bars() -> crate::Result<HashMap<Uuid, LoadingBar>>
{
let value = Self::get().await?;
let read = value.loading_bars.read().await;
let mut display_list: HashMap<Uuid, LoadingBar> = HashMap::new();
for (uuid, loading_bar) in read.iter() {
display_list.insert(*uuid, loading_bar.clone());
}
Ok(display_list)
}
#[cfg(feature = "tauri")]
pub async fn get_main_window() -> crate::Result<Option<tauri::Window>> {
use tauri::Manager;
let value = Self::get().await?;
Ok(value.app.get_window("main"))
}
}
#[derive(Serialize, Debug, Clone)]
pub struct LoadingBar {
// loading_bar_uuid not be used directly by external functions as it may not reflect the current state of the loading bar/hashmap
pub loading_bar_uuid: Uuid,
pub message: String,
pub total: f64,
pub current: f64,
#[serde(skip)]
pub last_sent: f64,
pub bar_type: LoadingBarType,
#[cfg(feature = "cli")]
#[serde(skip)]
pub cli_progress_bar: indicatif::ProgressBar,
}
#[derive(Serialize, Debug, Clone)]
pub struct LoadingBarId(Uuid);
// When Loading bar id is dropped, we should remove it from the hashmap
impl Drop for LoadingBarId {
fn drop(&mut self) {
let loader_uuid = self.0;
tokio::spawn(async move {
if let Ok(event_state) = EventState::get().await {
let mut bars = event_state.loading_bars.write().await;
#[cfg(any(feature = "tauri", feature = "cli"))]
if let Some(bar) = bars.remove(&loader_uuid) {
#[cfg(feature = "tauri")]
{
let loader_uuid = bar.loading_bar_uuid;
let event = bar.bar_type.clone();
let fraction = bar.current / bar.total;
use tauri::Manager;
let _ = event_state.app.emit_all(
"loading",
LoadingPayload {
fraction: None,
message: "Completed".to_string(),
event,
loader_uuid,
},
);
tracing::trace!(
"Exited at {fraction} for loading bar: {:?}",
loader_uuid
);
}
// Emit event to indicatif progress bar arc
#[cfg(feature = "cli")]
{
let cli_progress_bar = bar.cli_progress_bar;
cli_progress_bar.finish();
}
}
#[cfg(not(any(feature = "tauri", feature = "cli")))]
bars.remove(&loader_uuid);
}
// complete calls state, and since a LoadingBarId is created in state initialization, we only complete if its already initializaed
// to avoid an infinite loop.
if crate::State::initialized() {
let _ = SafeProcesses::complete(
crate::state::ProcessType::LoadingBar,
loader_uuid,
)
.await;
}
});
}
}
#[derive(Serialize, Deserialize, Clone, Debug, Hash, PartialEq, Eq)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum LoadingBarType {
StateInit,
JavaDownload {
version: u32,
},
PackFileDownload {
profile_path: PathBuf,
pack_name: String,
icon: Option<String>,
pack_version: String,
},
PackDownload {
profile_path: PathBuf,
pack_name: String,
icon: Option<PathBuf>,
pack_id: Option<String>,
pack_version: Option<String>,
},
MinecraftDownload {
profile_path: PathBuf,
profile_name: String,
},
ProfileUpdate {
profile_path: PathBuf,
profile_name: String,
},
ZipExtract {
profile_path: PathBuf,
profile_name: String,
},
ConfigChange {
new_path: PathBuf,
},
CopyProfile {
import_location: PathBuf,
profile_name: String,
},
}
#[derive(Serialize, Clone)]
pub struct LoadingPayload {
pub event: LoadingBarType,
pub loader_uuid: Uuid,
pub fraction: Option<f64>, // by convention, if optional, it means the loading is done
pub message: String,
}
#[derive(Serialize, Clone)]
pub struct OfflinePayload {
pub offline: bool,
}
#[derive(Serialize, Clone)]
pub struct WarningPayload {
pub message: String,
}
#[derive(Serialize, Clone)]
#[serde(tag = "event")]
pub enum CommandPayload {
InstallMod {
id: String,
},
InstallVersion {
id: String,
},
InstallModpack {
id: String,
},
RunMRPack {
// run or install .mrpack
path: PathBuf,
},
}
#[derive(Serialize, Clone)]
pub struct ProcessPayload {
pub uuid: Uuid, // processes in state are going to be identified by UUIDs, as they might change to different processes
pub pid: u32,
pub event: ProcessPayloadType,
pub message: String,
}
#[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType {
Launched,
Updated, // eg: if the MinecraftChild changes to its post-command process instead of the Minecraft process
Finished,
}
#[derive(Serialize, Clone)]
pub struct ProfilePayload {
pub uuid: Uuid,
pub profile_path_id: ProfilePathId,
pub path: PathBuf,
pub name: String,
pub event: ProfilePayloadType,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum ProfilePayloadType {
Created,
Added, // also triggered when Created
Synced,
Edited,
Removed,
}
#[derive(Debug, thiserror::Error)]
pub enum EventError {
#[error("Event state was not properly initialized")]
NotInitialized,
#[error("Non-existent loading bar of key: {0}")]
NoLoadingBar(Uuid),
#[cfg(feature = "tauri")]
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}

View File

@@ -0,0 +1,436 @@
//! Minecraft CLI argument logic
use crate::launcher::parse_rules;
use crate::state::Credentials;
use crate::{
state::{MemorySettings, WindowSize},
util::{io::IOError, platform::classpath_separator},
};
use daedalus::{
get_path_from_artifact,
minecraft::{Argument, ArgumentValue, Library, VersionType},
modded::SidedDataEntry,
};
use dunce::canonicalize;
use std::collections::HashSet;
use std::io::{BufRead, BufReader};
use std::{collections::HashMap, path::Path};
use uuid::Uuid;
// Replaces the space separator with a newline character, as to not split the arguments
const TEMPORARY_REPLACE_CHAR: &str = "\n";
pub fn get_class_paths(
libraries_path: &Path,
libraries: &[Library],
client_path: &Path,
java_arch: &str,
minecraft_updated: bool,
) -> crate::Result<String> {
let mut cps = libraries
.iter()
.filter_map(|library| {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, minecraft_updated) {
return None;
}
}
if !library.include_in_classpath {
return None;
}
Some(get_lib_path(libraries_path, &library.name, false))
})
.collect::<Result<HashSet<_>, _>>()?;
cps.insert(
canonicalize(client_path)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified class path {} does not exist",
client_path.to_string_lossy()
))
.as_error()
})?
.to_string_lossy()
.to_string(),
);
Ok(cps
.into_iter()
.collect::<Vec<_>>()
.join(classpath_separator(java_arch)))
}
pub fn get_class_paths_jar<T: AsRef<str>>(
libraries_path: &Path,
libraries: &[T],
java_arch: &str,
) -> crate::Result<String> {
let cps = libraries
.iter()
.map(|library| get_lib_path(libraries_path, library.as_ref(), false))
.collect::<Result<Vec<_>, _>>()?;
Ok(cps.join(classpath_separator(java_arch)))
}
pub fn get_lib_path(
libraries_path: &Path,
lib: &str,
allow_not_exist: bool,
) -> crate::Result<String> {
let mut path = libraries_path.to_path_buf();
path.push(get_path_from_artifact(lib)?);
if !path.exists() && allow_not_exist {
return Ok(path.to_string_lossy().to_string());
}
let path = &canonicalize(&path).map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Library file at path {} does not exist",
path.to_string_lossy()
))
.as_error()
})?;
Ok(path.to_string_lossy().to_string())
}
#[allow(clippy::too_many_arguments)]
pub fn get_jvm_arguments(
arguments: Option<&[Argument]>,
natives_path: &Path,
libraries_path: &Path,
class_paths: &str,
version_name: &str,
memory: MemorySettings,
custom_args: Vec<String>,
java_arch: &str,
) -> crate::Result<Vec<String>> {
let mut parsed_arguments = Vec::new();
if let Some(args) = arguments {
parse_arguments(
args,
&mut parsed_arguments,
|arg| {
parse_jvm_argument(
arg.to_string(),
natives_path,
libraries_path,
class_paths,
version_name,
java_arch,
)
},
java_arch,
)?;
} else {
parsed_arguments.push(format!(
"-Djava.library.path={}",
canonicalize(natives_path)
.map_err(|_| crate::ErrorKind::LauncherError(format!(
"Specified natives path {} does not exist",
natives_path.to_string_lossy()
))
.as_error())?
.to_string_lossy()
));
parsed_arguments.push("-cp".to_string());
parsed_arguments.push(class_paths.to_string());
}
parsed_arguments.push(format!("-Xmx{}M", memory.maximum));
for arg in custom_args {
if !arg.is_empty() {
parsed_arguments.push(arg);
}
}
Ok(parsed_arguments)
}
fn parse_jvm_argument(
mut argument: String,
natives_path: &Path,
libraries_path: &Path,
class_paths: &str,
version_name: &str,
java_arch: &str,
) -> crate::Result<String> {
argument.retain(|c| !c.is_whitespace());
Ok(argument
.replace(
"${natives_directory}",
&canonicalize(natives_path)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified natives path {} does not exist",
natives_path.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace(
"${library_directory}",
&canonicalize(libraries_path)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified libraries path {} does not exist",
libraries_path.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace("${classpath_separator}", classpath_separator(java_arch))
.replace("${launcher_name}", "theseus")
.replace("${launcher_version}", env!("CARGO_PKG_VERSION"))
.replace("${version_name}", version_name)
.replace("${classpath}", class_paths))
}
#[allow(clippy::too_many_arguments)]
pub fn get_minecraft_arguments(
arguments: Option<&[Argument]>,
legacy_arguments: Option<&str>,
credentials: &Credentials,
version: &str,
asset_index_name: &str,
game_directory: &Path,
assets_directory: &Path,
version_type: &VersionType,
resolution: WindowSize,
java_arch: &str,
) -> crate::Result<Vec<String>> {
if let Some(arguments) = arguments {
let mut parsed_arguments = Vec::new();
parse_arguments(
arguments,
&mut parsed_arguments,
|arg| {
parse_minecraft_argument(
arg,
&credentials.access_token,
&credentials.username,
credentials.id,
version,
asset_index_name,
game_directory,
assets_directory,
version_type,
resolution,
)
},
java_arch,
)?;
Ok(parsed_arguments)
} else if let Some(legacy_arguments) = legacy_arguments {
let mut parsed_arguments = Vec::new();
for x in legacy_arguments.split(' ') {
parsed_arguments.push(parse_minecraft_argument(
&x.replace(' ', TEMPORARY_REPLACE_CHAR),
&credentials.access_token,
&credentials.username,
credentials.id,
version,
asset_index_name,
game_directory,
assets_directory,
version_type,
resolution,
)?);
}
Ok(parsed_arguments)
} else {
Ok(Vec::new())
}
}
#[allow(clippy::too_many_arguments)]
fn parse_minecraft_argument(
argument: &str,
access_token: &str,
username: &str,
uuid: Uuid,
version: &str,
asset_index_name: &str,
game_directory: &Path,
assets_directory: &Path,
version_type: &VersionType,
resolution: WindowSize,
) -> crate::Result<String> {
Ok(argument
.replace("${accessToken}", access_token)
.replace("${auth_access_token}", access_token)
.replace("${auth_session}", access_token)
.replace("${auth_player_name}", username)
// TODO: add auth xuid eventually
.replace("${auth_xuid}", "0")
.replace("${auth_uuid}", &uuid.simple().to_string())
.replace("${uuid}", &uuid.simple().to_string())
.replace("${clientid}", "c4502edb-87c6-40cb-b595-64a280cf8906")
.replace("${user_properties}", "{}")
.replace("${user_type}", "msa")
.replace("${version_name}", version)
.replace("${assets_index_name}", asset_index_name)
.replace(
"${game_directory}",
&canonicalize(game_directory)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified game directory {} does not exist",
game_directory.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace(
"${assets_root}",
&canonicalize(assets_directory)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified assets directory {} does not exist",
assets_directory.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace(
"${game_assets}",
&canonicalize(assets_directory)
.map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Specified assets directory {} does not exist",
assets_directory.to_string_lossy()
))
.as_error()
})?
.to_string_lossy(),
)
.replace("${version_type}", version_type.as_str())
.replace("${resolution_width}", &resolution.0.to_string())
.replace("${resolution_height}", &resolution.1.to_string()))
}
fn parse_arguments<F>(
arguments: &[Argument],
parsed_arguments: &mut Vec<String>,
parse_function: F,
java_arch: &str,
) -> crate::Result<()>
where
F: Fn(&str) -> crate::Result<String>,
{
for argument in arguments {
match argument {
Argument::Normal(arg) => {
let parsed =
parse_function(&arg.replace(' ', TEMPORARY_REPLACE_CHAR))?;
for arg in parsed.split(TEMPORARY_REPLACE_CHAR) {
parsed_arguments.push(arg.to_string());
}
}
Argument::Ruled { rules, value } => {
if parse_rules(rules, java_arch, true) {
match value {
ArgumentValue::Single(arg) => {
parsed_arguments.push(parse_function(
&arg.replace(' ', TEMPORARY_REPLACE_CHAR),
)?);
}
ArgumentValue::Many(args) => {
for arg in args {
parsed_arguments.push(parse_function(
&arg.replace(' ', TEMPORARY_REPLACE_CHAR),
)?);
}
}
}
}
}
}
}
Ok(())
}
pub fn get_processor_arguments<T: AsRef<str>>(
libraries_path: &Path,
arguments: &[T],
data: &HashMap<String, SidedDataEntry>,
) -> crate::Result<Vec<String>> {
let mut new_arguments = Vec::new();
for argument in arguments {
let trimmed_arg = &argument.as_ref()[1..argument.as_ref().len() - 1];
if argument.as_ref().starts_with('{') {
if let Some(entry) = data.get(trimmed_arg) {
new_arguments.push(if entry.client.starts_with('[') {
get_lib_path(
libraries_path,
&entry.client[1..entry.client.len() - 1],
true,
)?
} else {
entry.client.clone()
})
}
} else if argument.as_ref().starts_with('[') {
new_arguments.push(get_lib_path(libraries_path, trimmed_arg, true)?)
} else {
new_arguments.push(argument.as_ref().to_string())
}
}
Ok(new_arguments)
}
pub async fn get_processor_main_class(
path: String,
) -> crate::Result<Option<String>> {
let main_class = tokio::task::spawn_blocking(move || {
let zipfile = std::fs::File::open(&path)
.map_err(|e| IOError::with_path(e, &path))?;
let mut archive = zip::ZipArchive::new(zipfile).map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Cannot read processor at {}",
path
))
.as_error()
})?;
let file = archive.by_name("META-INF/MANIFEST.MF").map_err(|_| {
crate::ErrorKind::LauncherError(format!(
"Cannot read processor manifest at {}",
path
))
.as_error()
})?;
let reader = BufReader::new(file);
for line in reader.lines() {
let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:") {
if let Some(class) = line.split(':').nth(1) {
return Ok(Some(class.to_string()));
}
}
}
Ok::<Option<String>, crate::Error>(None)
})
.await??;
Ok(main_class)
}

View File

@@ -0,0 +1,353 @@
//! Downloader for Minecraft data
use crate::launcher::parse_rules;
use crate::state::CredentialsStore;
use crate::{
event::{
emit::{emit_loading, loading_try_for_each_concurrent},
LoadingBarId,
},
state::State,
util::{fetch::*, io, platform::OsExt},
};
use daedalus::{
self as d,
minecraft::{
Asset, AssetsIndex, Library, Os, Version as GameVersion,
VersionInfo as GameVersionInfo,
},
modded::LoaderVersion,
};
use futures::prelude::*;
use tokio::sync::OnceCell;
#[tracing::instrument(skip(st, version))]
pub async fn download_minecraft(
st: &State,
version: &GameVersionInfo,
loading_bar: &LoadingBarId,
java_arch: &str,
force: bool,
minecraft_updated: bool,
) -> crate::Result<()> {
tracing::info!("Downloading Minecraft version {}", version.id);
// 5
let assets_index =
download_assets_index(st, version, Some(loading_bar), force).await?;
let amount = if version
.processors
.as_ref()
.map(|x| !x.is_empty())
.unwrap_or(false)
{
25.0
} else {
40.0
};
tokio::try_join! {
// Total loading sums to 90/60
download_client(st, version, Some(loading_bar), force), // 10
download_assets(st, version.assets == "legacy", &assets_index, Some(loading_bar), amount, force), // 40
download_libraries(st, version.libraries.as_slice(), &version.id, Some(loading_bar), amount, java_arch, force, minecraft_updated) // 40
}?;
tracing::info!("Done downloading Minecraft!");
Ok(())
}
#[tracing::instrument(skip_all, fields(version = version.id.as_str(), loader = ?loader))]
#[theseus_macros::debug_pin]
pub async fn download_version_info(
st: &State,
version: &GameVersion,
loader: Option<&LoaderVersion>,
force: Option<bool>,
loading_bar: Option<&LoadingBarId>,
) -> crate::Result<GameVersionInfo> {
let version_id = loader
.map_or(version.id.clone(), |it| format!("{}-{}", version.id, it.id));
tracing::debug!("Loading version info for Minecraft {version_id}");
let path = st
.directories
.version_dir(&version_id)
.await
.join(format!("{version_id}.json"));
let res = if path.exists() && !force.unwrap_or(false) {
io::read(path)
.err_into::<crate::Error>()
.await
.and_then(|ref it| Ok(serde_json::from_slice(it)?))
} else {
tracing::info!("Downloading version info for version {}", &version.id);
let mut info = d::minecraft::fetch_version_info(version).await?;
if let Some(loader) = loader {
let partial = d::modded::fetch_partial_version(&loader.url).await?;
info = d::modded::merge_partial_version(partial, info);
}
info.id = version_id.clone();
write(&path, &serde_json::to_vec(&info)?, &st.io_semaphore).await?;
Ok(info)
}?;
if let Some(loading_bar) = loading_bar {
emit_loading(loading_bar, 5.0, None).await?;
}
tracing::debug!("Loaded version info for Minecraft {version_id}");
Ok(res)
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
pub async fn download_client(
st: &State,
version_info: &GameVersionInfo,
loading_bar: Option<&LoadingBarId>,
force: bool,
) -> crate::Result<()> {
let version = &version_info.id;
tracing::debug!("Locating client for version {version}");
let client_download = version_info
.downloads
.get(&d::minecraft::DownloadType::Client)
.ok_or(
crate::ErrorKind::LauncherError(format!(
"No client downloads exist for version {version}"
))
.as_error(),
)?;
let path = st
.directories
.version_dir(version)
.await
.join(format!("{version}.jar"));
if !path.exists() || force {
let bytes = fetch(
&client_download.url,
Some(&client_download.sha1),
&st.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched client version {version}");
}
if let Some(loading_bar) = loading_bar {
emit_loading(loading_bar, 9.0, None).await?;
}
tracing::debug!("Client loaded for version {version}!");
Ok(())
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
pub async fn download_assets_index(
st: &State,
version: &GameVersionInfo,
loading_bar: Option<&LoadingBarId>,
force: bool,
) -> crate::Result<AssetsIndex> {
tracing::debug!("Loading assets index");
let path = st
.directories
.assets_index_dir()
.await
.join(format!("{}.json", &version.asset_index.id));
let res = if path.exists() && !force {
io::read(path)
.err_into::<crate::Error>()
.await
.and_then(|ref it| Ok(serde_json::from_slice(it)?))
} else {
let index = d::minecraft::fetch_assets_index(version).await?;
write(&path, &serde_json::to_vec(&index)?, &st.io_semaphore).await?;
tracing::info!("Fetched assets index");
Ok(index)
}?;
if let Some(loading_bar) = loading_bar {
emit_loading(loading_bar, 5.0, None).await?;
}
tracing::debug!("Assets index successfully loaded!");
Ok(res)
}
#[tracing::instrument(skip(st, index))]
#[theseus_macros::debug_pin]
pub async fn download_assets(
st: &State,
with_legacy: bool,
index: &AssetsIndex,
loading_bar: Option<&LoadingBarId>,
loading_amount: f64,
force: bool,
) -> crate::Result<()> {
tracing::debug!("Loading assets");
let num_futs = index.objects.len();
let assets = stream::iter(index.objects.iter())
.map(Ok::<(&String, &Asset), crate::Error>);
loading_try_for_each_concurrent(assets,
None,
loading_bar,
loading_amount,
num_futs,
None,
|(name, asset)| async move {
let hash = &asset.hash;
let resource_path = st.directories.object_dir(hash).await;
let url = format!(
"https://resources.download.minecraft.net/{sub_hash}/{hash}",
sub_hash = &hash[..2]
);
let fetch_cell = OnceCell::<bytes::Bytes>::new();
tokio::try_join! {
async {
if !resource_path.exists() || force {
let resource = fetch_cell
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &CredentialsStore(None)))
.await?;
write(&resource_path, resource, &st.io_semaphore).await?;
tracing::trace!("Fetched asset with hash {hash}");
}
Ok::<_, crate::Error>(())
},
async {
let resource_path = st.directories.legacy_assets_dir().await.join(
name.replace('/', &String::from(std::path::MAIN_SEPARATOR))
);
if with_legacy && !resource_path.exists() || force {
let resource = fetch_cell
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &CredentialsStore(None)))
.await?;
write(&resource_path, resource, &st.io_semaphore).await?;
tracing::trace!("Fetched legacy asset with hash {hash}");
}
Ok::<_, crate::Error>(())
},
}?;
tracing::trace!("Loaded asset with hash {hash}");
Ok(())
}).await?;
tracing::debug!("Done loading assets!");
Ok(())
}
#[tracing::instrument(skip(st, libraries))]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn download_libraries(
st: &State,
libraries: &[Library],
version: &str,
loading_bar: Option<&LoadingBarId>,
loading_amount: f64,
java_arch: &str,
force: bool,
minecraft_updated: bool,
) -> crate::Result<()> {
tracing::debug!("Loading libraries");
tokio::try_join! {
io::create_dir_all(st.directories.libraries_dir().await),
io::create_dir_all(st.directories.version_natives_dir(version).await)
}?;
let num_files = libraries.len();
loading_try_for_each_concurrent(
stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name);
return Ok(());
}
}
tokio::try_join! {
async {
let artifact_path = d::get_path_from_artifact(&library.name)?;
let path = st.directories.libraries_dir().await.join(&artifact_path);
match library.downloads {
_ if path.exists() && !force => Ok(()),
Some(d::minecraft::LibraryDownloads {
artifact: Some(ref artifact),
..
}) => {
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &CredentialsStore(None))
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
}
_ => {
let url = [
library
.url
.as_deref()
.unwrap_or("https://libraries.minecraft.net/"),
&artifact_path
].concat();
let bytes = fetch(&url, None, &st.fetch_semaphore, &CredentialsStore(None)).await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
}
}
},
async {
// HACK: pseudo try block using or else
if let Some((os_key, classifiers)) = None.or_else(|| Some((
library
.natives
.as_ref()?
.get(&Os::native_arch(java_arch))?,
library
.downloads
.as_ref()?
.classifiers
.as_ref()?
))) {
let parsed_key = os_key.replace(
"${arch}",
crate::util::platform::ARCH_WIDTH,
);
if let Some(native) = classifiers.get(&parsed_key) {
let data = fetch(&native.url, Some(&native.sha1), &st.fetch_semaphore, &CredentialsStore(None)).await?;
let reader = std::io::Cursor::new(&data);
if let Ok(mut archive) = zip::ZipArchive::new(reader) {
match archive.extract(st.directories.version_natives_dir(version).await) {
Ok(_) => tracing::debug!("Fetched native {}", &library.name),
Err(err) => tracing::error!("Failed extracting native {}. err: {}", &library.name, err)
}
} else {
tracing::error!("Failed extracting native {}", &library.name)
}
}
}
Ok(())
}
}?;
tracing::debug!("Loaded library {}", library.name);
Ok(())
}
).await?;
tracing::debug!("Done loading libraries!");
Ok(())
}

View File

@@ -0,0 +1,633 @@
//! Logic for launching Minecraft
use crate::event::emit::{emit_loading, init_or_edit_loading};
use crate::event::{LoadingBarId, LoadingBarType};
use crate::launcher::io::IOError;
use crate::prelude::JavaVersion;
use crate::state::{Credentials, ProfileInstallStage};
use crate::util::io;
use crate::{
process,
state::{self as st, MinecraftChild},
State,
};
use chrono::Utc;
use daedalus as d;
use daedalus::minecraft::{RuleAction, VersionInfo};
use st::Profile;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::process::Command;
use uuid::Uuid;
mod args;
pub mod download;
// All nones -> disallowed
// 1+ true -> allowed
// 1+ false -> disallowed
#[tracing::instrument]
pub fn parse_rules(
rules: &[d::minecraft::Rule],
java_version: &str,
minecraft_updated: bool,
) -> bool {
let mut x = rules
.iter()
.map(|x| parse_rule(x, java_version, minecraft_updated))
.collect::<Vec<Option<bool>>>();
if rules
.iter()
.all(|x| matches!(x.action, RuleAction::Disallow))
{
x.push(Some(true))
}
!(x.iter().any(|x| x == &Some(false)) || x.iter().all(|x| x.is_none()))
}
// if anything is disallowed, it should NOT be included
// if anything is not disallowed, it shouldn't factor in final result
// if anything is not allowed, it shouldn't factor in final result
// if anything is allowed, it should be included
#[tracing::instrument]
pub fn parse_rule(
rule: &d::minecraft::Rule,
java_version: &str,
minecraft_updated: bool,
) -> Option<bool> {
use d::minecraft::{Rule, RuleAction};
let res = match rule {
Rule {
os: Some(ref os), ..
} => {
crate::util::platform::os_rule(os, java_version, minecraft_updated)
}
Rule {
features: Some(ref features),
..
} => {
!features.is_demo_user.unwrap_or(true)
|| features.has_custom_resolution.unwrap_or(false)
|| !features.has_quick_plays_support.unwrap_or(true)
|| !features.is_quick_play_multiplayer.unwrap_or(true)
|| !features.is_quick_play_realms.unwrap_or(true)
|| !features.is_quick_play_singleplayer.unwrap_or(true)
}
_ => return Some(true),
};
match rule.action {
RuleAction::Allow => {
if res {
Some(true)
} else {
None
}
}
RuleAction::Disallow => {
if res {
Some(false)
} else {
None
}
}
}
}
macro_rules! processor_rules {
($dest:expr; $($name:literal : client => $client:expr, server => $server:expr;)+) => {
$(std::collections::HashMap::insert(
$dest,
String::from($name),
daedalus::modded::SidedDataEntry {
client: String::from($client),
server: String::from($server),
},
);)+
}
}
pub async fn get_java_version_from_profile(
profile: &Profile,
version_info: &VersionInfo,
) -> crate::Result<Option<JavaVersion>> {
if let Some(java) = profile.java.clone().and_then(|x| x.override_version) {
Ok(Some(java))
} else {
let key = version_info
.java_version
.as_ref()
.map(|it| it.major_version)
.unwrap_or(8);
let state = State::get().await?;
let settings = state.settings.read().await;
if let Some(java) = settings.java_globals.get(&format!("JAVA_{key}")) {
return Ok(Some(java.clone()));
}
Ok(None)
}
}
#[tracing::instrument(skip(profile))]
#[theseus_macros::debug_pin]
pub async fn install_minecraft(
profile: &Profile,
existing_loading_bar: Option<LoadingBarId>,
repairing: bool,
) -> crate::Result<()> {
let sync_projects = existing_loading_bar.is_some();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::MinecraftDownload {
// If we are downloading minecraft for a profile, provide its name and uuid
profile_name: profile.metadata.name.clone(),
profile_path: profile.get_profile_full_path().await?,
},
100.0,
"Downloading Minecraft",
)
.await?;
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.install_stage = ProfileInstallStage::Installing;
async { Ok(()) }
})
.await?;
State::sync().await?;
if sync_projects {
Profile::sync_projects_task(profile.profile_id(), true);
}
let state = State::get().await?;
let instance_path =
&io::canonicalize(profile.get_profile_full_path().await?)?;
let metadata = state.metadata.read().await;
let version_index = metadata
.minecraft
.versions
.iter()
.position(|it| it.id == profile.metadata.game_version)
.ok_or(crate::ErrorKind::LauncherError(format!(
"Invalid game version: {}",
profile.metadata.game_version
)))?;
let version = &metadata.minecraft.versions[version_index];
let minecraft_updated = version_index
<= metadata
.minecraft
.versions
.iter()
.position(|x| x.id == "22w16a")
.unwrap_or(0);
let version_jar = profile
.metadata
.loader_version
.as_ref()
.map_or(version.id.clone(), |it| {
format!("{}-{}", version.id.clone(), it.id.clone())
});
// Download version info (5)
let mut version_info = download::download_version_info(
&state,
version,
profile.metadata.loader_version.as_ref(),
Some(repairing),
Some(&loading_bar),
)
.await?;
// TODO: check if java exists, if not install it add to install step
let key = version_info
.java_version
.as_ref()
.map(|it| it.major_version)
.unwrap_or(8);
let (java_version, set_java) = if let Some(java_version) =
get_java_version_from_profile(profile, &version_info).await?
{
(std::path::PathBuf::from(java_version.path), false)
} else {
let path = crate::api::jre::auto_install_java(key).await?;
(path, true)
};
// Test jre version
let java_version = crate::api::jre::check_jre(java_version.clone())
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Java path invalid or non-functional: {:?}",
java_version
))
})?;
if set_java {
{
let mut settings = state.settings.write().await;
settings
.java_globals
.insert(format!("JAVA_{key}"), java_version.clone());
}
State::sync().await?;
}
// Download minecraft (5-90)
download::download_minecraft(
&state,
&version_info,
&loading_bar,
&java_version.architecture,
repairing,
minecraft_updated,
)
.await?;
if let Some(processors) = &version_info.processors {
let client_path = state
.directories
.version_dir(&version_jar)
.await
.join(format!("{version_jar}.jar"));
let libraries_dir = state.directories.libraries_dir().await;
if let Some(ref mut data) = version_info.data {
processor_rules! {
data;
"SIDE":
client => "client",
server => "";
"MINECRAFT_JAR" :
client => client_path.to_string_lossy(),
server => "";
"MINECRAFT_VERSION":
client => profile.metadata.game_version.clone(),
server => "";
"ROOT":
client => instance_path.to_string_lossy(),
server => "";
"LIBRARY_DIR":
client => libraries_dir.to_string_lossy(),
server => "";
}
emit_loading(&loading_bar, 0.0, Some("Running forge processors"))
.await?;
let total_length = processors.len();
// Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides {
if !sides.contains(&String::from("client")) {
continue;
}
}
let cp = wrap_ref_builder!(cp = processor.classpath.clone() => {
cp.push(processor.jar.clone())
});
let child = Command::new(&java_version.path)
.arg("-cp")
.arg(args::get_class_paths_jar(
&libraries_dir,
&cp,
&java_version.architecture,
)?)
.arg(
args::get_processor_main_class(args::get_lib_path(
&libraries_dir,
&processor.jar,
false,
)?)
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find processor main class for {}",
processor.jar
))
})?,
)
.args(args::get_processor_arguments(
&libraries_dir,
&processor.args,
data,
)?)
.output()
.await
.map_err(|e| IOError::with_path(e, &java_version.path))
.map_err(|err| {
crate::ErrorKind::LauncherError(format!(
"Error running processor: {err}",
))
})?;
if !child.status.success() {
return Err(crate::ErrorKind::LauncherError(format!(
"Processor error: {}",
String::from_utf8_lossy(&child.stderr)
))
.as_error());
}
emit_loading(
&loading_bar,
30.0 / total_length as f64,
Some(&format!(
"Running forge processor {}/{}",
index, total_length
)),
)
.await?;
}
}
}
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.install_stage = ProfileInstallStage::Installed;
async { Ok(()) }
})
.await?;
State::sync().await?;
emit_loading(&loading_bar, 1.0, Some("Finished installing")).await?;
Ok(())
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn launch_minecraft(
java_args: &[String],
env_args: &[(String, String)],
mc_set_options: &[(String, String)],
wrapper: &Option<String>,
memory: &st::MemorySettings,
resolution: &st::WindowSize,
credentials: &Credentials,
post_exit_hook: Option<String>,
profile: &Profile,
) -> crate::Result<Arc<tokio::sync::RwLock<MinecraftChild>>> {
if profile.install_stage == ProfileInstallStage::PackInstalling
|| profile.install_stage == ProfileInstallStage::Installing
{
return Err(crate::ErrorKind::LauncherError(
"Profile is still installing".to_string(),
)
.into());
}
if profile.install_stage != ProfileInstallStage::Installed {
install_minecraft(profile, None, false).await?;
}
let state = State::get().await?;
let metadata = state.metadata.read().await;
let instance_path = profile.get_profile_full_path().await?;
let instance_path = &io::canonicalize(instance_path)?;
let version_index = metadata
.minecraft
.versions
.iter()
.position(|it| it.id == profile.metadata.game_version)
.ok_or(crate::ErrorKind::LauncherError(format!(
"Invalid game version: {}",
profile.metadata.game_version
)))?;
let version = &metadata.minecraft.versions[version_index];
let minecraft_updated = version_index
<= metadata
.minecraft
.versions
.iter()
.position(|x| x.id == "22w16a")
.unwrap_or(0);
let version_jar = profile
.metadata
.loader_version
.as_ref()
.map_or(version.id.clone(), |it| {
format!("{}-{}", version.id.clone(), it.id.clone())
});
let version_info = download::download_version_info(
&state,
version,
profile.metadata.loader_version.as_ref(),
None,
None,
)
.await?;
let java_version = get_java_version_from_profile(profile, &version_info)
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Missing correct java installation".to_string(),
)
})?;
// Test jre version
let java_version =
crate::api::jre::check_jre(java_version.path.clone().into())
.await?
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Java path invalid or non-functional: {}",
java_version.path
))
})?;
let client_path = state
.directories
.version_dir(&version_jar)
.await
.join(format!("{version_jar}.jar"));
let args = version_info.arguments.clone().unwrap_or_default();
let mut command = match wrapper {
Some(hook) => {
wrap_ref_builder!(it = Command::new(hook) => {it.arg(&java_version.path)})
}
None => Command::new(&java_version.path),
};
let env_args = Vec::from(env_args);
// Check if profile has a running profile, and reject running the command if it does
// Done late so a quick double call doesn't launch two instances
let existing_processes =
process::get_uuids_by_profile_path(profile.profile_id()).await?;
if let Some(uuid) = existing_processes.first() {
return Err(crate::ErrorKind::LauncherError(format!(
"Profile {} is already running at UUID: {uuid}",
profile.profile_id()
))
.as_error());
}
command
.args(
args::get_jvm_arguments(
args.get(&d::minecraft::ArgumentType::Jvm)
.map(|x| x.as_slice()),
&state.directories.version_natives_dir(&version_jar).await,
&state.directories.libraries_dir().await,
&args::get_class_paths(
&state.directories.libraries_dir().await,
version_info.libraries.as_slice(),
&client_path,
&java_version.architecture,
minecraft_updated,
)?,
&version_jar,
*memory,
Vec::from(java_args),
&java_version.architecture,
)?
.into_iter()
.collect::<Vec<_>>(),
)
.arg(version_info.main_class.clone())
.args(
args::get_minecraft_arguments(
args.get(&d::minecraft::ArgumentType::Game)
.map(|x| x.as_slice()),
version_info.minecraft_arguments.as_deref(),
credentials,
&version.id,
&version_info.asset_index.id,
instance_path,
&state.directories.assets_dir().await,
&version.type_,
*resolution,
&java_version.architecture,
)?
.into_iter()
.collect::<Vec<_>>(),
)
.current_dir(instance_path.clone());
// CARGO-set DYLD_LIBRARY_PATH breaks Minecraft on macOS during testing on playground
#[cfg(target_os = "macos")]
if std::env::var("CARGO").is_ok() {
command.env_remove("DYLD_FALLBACK_LIBRARY_PATH");
}
// Java options should be set in instance options (the existence of _JAVA_OPTIONS overwites them)
command.env_remove("_JAVA_OPTIONS");
command.envs(env_args);
// Overwrites the minecraft options.txt file with the settings from the profile
// Uses 'a:b' syntax which is not quite yaml
use regex::Regex;
if !mc_set_options.is_empty() {
let options_path = instance_path.join("options.txt");
let mut options_string = String::new();
if options_path.exists() {
options_string = io::read_to_string(&options_path).await?;
}
for (key, value) in mc_set_options {
let re = Regex::new(&format!(r"(?m)^{}:.*$", regex::escape(key)))?;
// check if the regex exists in the file
if !re.is_match(&options_string) {
// The key was not found in the file, so append it
options_string.push_str(&format!("\n{}:{}", key, value));
} else {
let replaced_string = re
.replace_all(&options_string, &format!("{}:{}", key, value))
.to_string();
options_string = replaced_string;
}
}
io::write(&options_path, options_string).await?;
}
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.metadata.last_played = Some(Utc::now());
async { Ok(()) }
})
.await?;
State::sync().await?;
let mut censor_strings = HashMap::new();
let username = whoami::username();
censor_strings.insert(
format!("/{}/", username),
"/{COMPUTER_USERNAME}/".to_string(),
);
censor_strings.insert(
format!("\\{}\\", username),
"\\{COMPUTER_USERNAME}\\".to_string(),
);
censor_strings.insert(
credentials.access_token.clone(),
"{MINECRAFT_ACCESS_TOKEN}".to_string(),
);
censor_strings.insert(
credentials.username.clone(),
"{MINECRAFT_USERNAME}".to_string(),
);
censor_strings.insert(
credentials.id.as_simple().to_string(),
"{MINECRAFT_UUID}".to_string(),
);
censor_strings.insert(
credentials.id.as_hyphenated().to_string(),
"{MINECRAFT_UUID}".to_string(),
);
// If in tauri, and the 'minimize on launch' setting is enabled, minimize the window
#[cfg(feature = "tauri")]
{
use crate::EventState;
let window = EventState::get_main_window().await?;
if let Some(window) = window {
let settings = state.settings.read().await;
if settings.hide_on_process {
window.minimize()?;
}
}
}
if !*state.offline.read().await {
// Add game played to discord rich presence
let _ = state
.discord_rpc
.set_activity(&format!("Playing {}", profile.metadata.name), true)
.await;
}
// Create Minecraft child by inserting it into the state
// This also spawns the process and prepares the subsequent processes
let mut state_children = state.children.write().await;
state_children
.insert_new_process(
Uuid::new_v4(),
profile.profile_id(),
command,
post_exit_hook,
censor_strings,
)
.await
}

View File

@@ -0,0 +1,26 @@
/*!
# Theseus
Theseus is a library which provides utilities for launching minecraft, creating Modrinth mod packs,
and launching Modrinth mod packs
*/
#![warn(unused_import_braces)]
#![deny(unused_must_use)]
#[macro_use]
mod util;
mod api;
mod config;
mod error;
mod event;
mod launcher;
mod logger;
mod state;
pub use api::*;
pub use error::*;
pub use event::{EventState, LoadingBar, LoadingBarType};
pub use logger::start_logger;
pub use state::InnerProjectPathUnix;
pub use state::State;

View File

@@ -0,0 +1,77 @@
/*
tracing is set basd on the environment variable RUST_LOG=xxx, depending on the amount of logs to show
ERROR > WARN > INFO > DEBUG > TRACE
eg. RUST_LOG=info will show info, warn, and error logs
RUST_LOG="theseus=trace" will show *all* messages but from theseus only (and not dependencies using similar crates)
RUST_LOG="theseus=trace" will show *all* messages but from theseus only (and not dependencies using similar crates)
Error messages returned to Tauri will display as traced error logs if they return an error.
This will also include an attached span trace if the error is from a tracing error, and the level is set to info, debug, or trace
on unix:
RUST_LOG="theseus=trace" {run command}
The default is theseus=show, meaning only logs from theseus will be displayed, and at the info or higher level.
*/
use tracing_appender::non_blocking::WorkerGuard;
// Handling for the live development logging
// This will log to the console, and will not log to a file
#[cfg(debug_assertions)]
pub fn start_logger() -> Option<WorkerGuard> {
use tracing_subscriber::prelude::*;
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| {
tracing_subscriber::EnvFilter::new("theseus=info,theseus_gui=info")
});
let subscriber = tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer())
.with(filter)
.with(tracing_error::ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)
.expect("setting default subscriber failed");
None
}
// Handling for the live production logging
// This will log to a file in the logs directory, and will not show any logs in the console
#[cfg(not(debug_assertions))]
pub fn start_logger() -> Option<WorkerGuard> {
use crate::prelude::DirectoryInfo;
use tracing_appender::rolling::{RollingFileAppender, Rotation};
use tracing_subscriber::fmt::time::ChronoLocal;
use tracing_subscriber::prelude::*;
// Initialize and get logs directory path
let logs_dir = if let Some(d) = DirectoryInfo::launcher_logs_dir() {
d
} else {
eprintln!("Could not start logger");
return None;
};
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("theseus=info"));
let file_appender =
RollingFileAppender::new(Rotation::DAILY, logs_dir, "theseus.log");
let (non_blocking, guard) = tracing_appender::non_blocking(file_appender);
let subscriber = tracing_subscriber::registry()
.with(
tracing_subscriber::fmt::layer()
.with_writer(non_blocking)
.with_ansi(false) // disable ANSI escape codes
.with_timer(ChronoLocal::rfc_3339()),
)
.with(filter)
.with(tracing_error::ErrorLayer::default());
tracing::subscriber::set_global_default(subscriber)
.expect("Setting default subscriber failed");
Some(guard)
}

View File

@@ -0,0 +1,728 @@
use super::{Profile, ProfilePathId};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde::Serialize;
use std::{collections::HashMap, sync::Arc};
use tokio::process::Child;
use tokio::process::Command;
use tokio::sync::RwLock;
use crate::event::emit::emit_process;
use crate::event::ProcessPayloadType;
use crate::util::fetch::read_json;
use crate::util::io::IOError;
use crate::{profile, ErrorKind};
use tokio::task::JoinHandle;
use uuid::Uuid;
const PROCESSES_JSON: &str = "processes.json";
// Child processes (instances of Minecraft)
// A wrapper over a Hashmap connecting PID -> MinecraftChild
pub struct Children(HashMap<Uuid, Arc<RwLock<MinecraftChild>>>);
#[derive(Debug)]
pub enum ChildType {
// A child process that is being managed by tokio
TokioChild(Child),
// A child process that was rescued from a cache (e.g. a process that was launched by theseus before the launcher was restarted)
// This may not have all the same functionality as a TokioChild
RescuedPID(u32),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ProcessCache {
pub pid: u32,
pub uuid: Uuid,
pub start_time: u64,
pub name: String,
pub exe: String,
pub profile_relative_path: ProfilePathId,
pub post_command: Option<String>,
}
impl ChildType {
pub async fn try_wait(&mut self) -> crate::Result<Option<i32>> {
match self {
ChildType::TokioChild(child) => Ok(child
.try_wait()
.map_err(IOError::from)?
.map(|x| x.code().unwrap_or(0))),
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if !system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
return Ok(Some(0));
}
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
if process.status() == sysinfo::ProcessStatus::Run {
Ok(None)
} else {
Ok(Some(0))
}
} else {
Ok(Some(0))
}
}
}
}
pub async fn kill(&mut self) -> crate::Result<()> {
match self {
ChildType::TokioChild(child) => {
Ok(child.kill().await.map_err(IOError::from)?)
}
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
process.kill();
}
}
Ok(())
}
}
}
pub fn id(&self) -> Option<u32> {
match self {
ChildType::TokioChild(child) => child.id(),
ChildType::RescuedPID(pid) => Some(*pid),
}
}
// Caches the process so that it can be restored if the launcher is restarted
// Stored in the caches/metadata/processes.json file
pub async fn cache_process(
&self,
uuid: uuid::Uuid,
profile_path_id: ProfilePathId,
post_command: Option<String>,
) -> crate::Result<()> {
let pid = match self {
ChildType::TokioChild(child) => child.id().unwrap_or(0),
ChildType::RescuedPID(pid) => *pid,
};
let state = crate::State::get().await?;
let mut system = sysinfo::System::new();
system.refresh_processes();
let process =
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
pid
))
})?;
let start_time = process.start_time();
let name = process.name().to_string();
let Some(path) = process.exe() else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
pid
))
.into());
};
let exe = path.to_string_lossy().to_string();
let cached_process = ProcessCache {
pid,
start_time,
name,
exe,
post_command,
uuid,
profile_relative_path: profile_path_id,
};
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.insert(uuid, cached_process);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
// Removes the process from the cache (ie: on process exit)
pub async fn remove_cache(&self, uuid: uuid::Uuid) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.remove(&uuid);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
}
// Minecraft Child, bundles together the PID, the actual Child, and the easily queryable stdout and stderr streams (if needed)
#[derive(Debug)]
pub struct MinecraftChild {
pub uuid: Uuid,
pub profile_relative_path: ProfilePathId,
pub manager: Option<JoinHandle<crate::Result<i32>>>, // None when future has completed and been handled
pub current_child: Arc<RwLock<ChildType>>,
pub last_updated_playtime: DateTime<Utc>, // The last time we updated the playtime for the associated profile
}
impl Children {
pub fn new() -> Self {
Children(HashMap::new())
}
// Loads cached processes from the caches/metadata/processes.json file, re-inserts them into the hashmap, and removes them from the file
// This will only be called once, on startup. Only processes who match a cached process (name, time started, pid, etc) will be re-inserted
pub async fn rescue_cache(&mut self) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
// Overwrite the file with an empty hashmap- we will re-insert the cached processes
let empty = HashMap::<uuid::Uuid, ProcessCache>::new();
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&empty)?,
&state.io_semaphore,
)
.await?;
// Return the cached processes
children_json
} else {
HashMap::new()
};
for (_, cache) in children_caches.drain() {
let uuid = cache.uuid;
match self.insert_cached_process(cache).await {
Ok(child) => {
self.0.insert(uuid, child);
}
Err(e) => tracing::warn!(
"Failed to rescue cached process {}: {}",
uuid,
e
),
}
}
Ok(())
}
// Runs the command in process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// The threads for stdout and stderr are spawned here
// Unlike a Hashmap's 'insert', this directly returns the reference to the MinecraftChild rather than any previously stored MinecraftChild that may exist
#[tracing::instrument(skip(
self,
uuid,
mc_command,
post_command,
censor_strings
))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_new_process(
&mut self,
uuid: Uuid,
profile_relative_path: ProfilePathId,
mut mc_command: Command,
post_command: Option<String>, // Command to run after minecraft.
censor_strings: HashMap<String, String>,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
// Takes the first element of the commands vector and spawns it
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
let child = ChildType::TokioChild(mc_proc);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Caches process so that it can be restored if the launcher is restarted
child
.cache_process(
uuid,
profile_relative_path.clone(),
post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
uuid,
post_command,
pid,
current_child.clone(),
profile_relative_path.clone(),
)));
emit_process(
uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid,
profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(uuid, mchild.clone());
Ok(mchild)
}
// Rescues a cached process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// Essentially 'reconnects' to a process that was launched by theseus before the launcher was restarted
// However, this may not have all the same functionality as a TokioChild, as we only have the PID and not the actual Child
// Only processes who match a cached process (name, time started, pid, etc) will be re-inserted. The function fails with an error if the process is notably different.
#[tracing::instrument(skip(self, cached_process,))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_cached_process(
&mut self,
cached_process: ProcessCache,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
let _state = crate::State::get().await?;
// Takes the first element of the commands vector and spawns it
// Checks processes, compares cached process to actual process
// Fails if notably different (meaning that the PID was reused, and we shouldn't reconnect to it)
{
let mut system = sysinfo::System::new();
system.refresh_processes();
let process = system
.process(sysinfo::Pid::from_u32(cached_process.pid))
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
cached_process.pid
))
})?;
if cached_process.start_time != process.start_time() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different start time than actual process {}", cached_process.pid, process.start_time())).into());
}
if cached_process.name != process.name() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different name than actual process {}", cached_process.pid, process.name())).into());
}
if let Some(path) = process.exe() {
if cached_process.exe != path.to_string_lossy() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different exe than actual process {}", cached_process.pid, path.to_string_lossy())).into());
}
} else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
cached_process.pid
))
.into());
}
}
let child = ChildType::RescuedPID(cached_process.pid);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Re-caches process so that it can be restored if the launcher is restarted
child
.cache_process(
cached_process.uuid,
cached_process.profile_relative_path.clone(),
cached_process.post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
cached_process.uuid,
cached_process.post_command,
pid,
current_child.clone(),
cached_process.profile_relative_path.clone(),
)));
emit_process(
cached_process.uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid: cached_process.uuid,
profile_relative_path: cached_process.profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(cached_process.uuid, mchild.clone());
Ok(mchild)
}
// Spawns a new child process and inserts it into the hashmap
// Also, as the process ends, it spawns the follow-up process if it exists
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
#[tracing::instrument(skip(current_child))]
#[theseus_macros::debug_pin]
async fn sequential_process_manager(
uuid: Uuid,
post_command: Option<String>,
mut current_pid: u32,
current_child: Arc<RwLock<ChildType>>,
associated_profile: ProfilePathId,
) -> crate::Result<i32> {
let current_child = current_child.clone();
// Wait on current Minecraft Child
let mut mc_exit_status;
let mut last_updated_playtime = Utc::now();
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
// Auto-update playtime every minute
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if diff >= 60 {
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
last_updated_playtime = Utc::now();
}
}
// Now fully complete- update playtime one last time
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
// Publish play time update
// Allow failure, it will be stored locally and sent next time
// Sent in another thread as first call may take a couple seconds and hold up process ending
let associated_profile_clone = associated_profile.clone();
tokio::spawn(async move {
if let Err(e) =
profile::try_update_playtime(&associated_profile_clone.clone())
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile_clone,
e
);
}
});
{
// Clear game played for Discord RPC
// May have other active processes, so we clear to the next running process
let state = crate::State::get().await?;
let _ = state.discord_rpc.clear_to_default(true).await;
}
// If in tauri, window should show itself again after process exists if it was hidden
#[cfg(feature = "tauri")]
{
let window = crate::EventState::get_main_window().await?;
if let Some(window) = window {
window.unminimize()?;
}
}
{
let current_child = current_child.write().await;
current_child.remove_cache(uuid).await?;
}
if !mc_exit_status == 0 {
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
return Ok(mc_exit_status); // Err for a non-zero exit is handled in helper
}
// If a post-command exist, switch to it and wait on it
// First, create the command by splitting arguments
let post_command = if let Some(hook) = post_command {
let mut cmd = hook.split(' ');
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command
.args(&cmd.collect::<Vec<&str>>())
.current_dir(associated_profile.get_full_path().await?);
Some(command)
} else {
None
}
} else {
None
};
if let Some(mut m_command) = post_command {
{
let mut current_child: tokio::sync::RwLockWriteGuard<
'_,
ChildType,
> = current_child.write().await;
let new_child = m_command.spawn().map_err(IOError::from)?;
current_pid = new_child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID"
.to_string(),
)
})?;
*current_child = ChildType::TokioChild(new_child);
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Updated,
"Completed Minecraft, switching to post-commands",
)
.await?;
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(10))
.await;
}
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
Ok(mc_exit_status)
}
// Returns a ref to the child
pub fn get(&self, uuid: Uuid) -> Option<Arc<RwLock<MinecraftChild>>> {
self.0.get(&uuid).cloned()
}
// Gets all PID keys
pub fn keys(&self) -> Vec<Uuid> {
self.0.keys().cloned().collect()
}
// Get exit status of a child by PID
// Returns None if the child is still running
pub async fn exit_status(&self, uuid: Uuid) -> crate::Result<Option<i32>> {
if let Some(child) = self.get(uuid) {
let child = child.write().await;
let status = child.current_child.write().await.try_wait().await?;
Ok(status)
} else {
Ok(None)
}
}
// Gets all PID keys of running children
pub async fn running_keys(&self) -> crate::Result<Vec<Uuid>> {
let mut keys = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all PID keys of running children with a given profile path
pub async fn running_keys_with_profile(
&self,
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
let running_keys = self.running_keys().await?;
let mut keys = Vec::new();
for key in running_keys {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.read().await;
if child.profile_relative_path == profile_path {
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all profiles of running children
pub async fn running_profile_paths(
&self,
) -> crate::Result<Vec<ProfilePathId>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
profiles.push(child.profile_relative_path.clone());
}
}
}
Ok(profiles)
}
// Gets all profiles of running children
// Returns clones because it would be serialized anyway
pub async fn running_profiles(&self) -> crate::Result<Vec<Profile>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
if let Some(prof) = crate::api::profile::get(
&child.profile_relative_path.clone(),
None,
)
.await?
{
profiles.push(prof);
}
}
}
}
Ok(profiles)
}
}
impl Default for Children {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,211 @@
//! Theseus directory information
use std::fs;
use std::path::PathBuf;
use tokio::sync::RwLock;
use super::{ProfilePathId, Settings};
pub const SETTINGS_FILE_NAME: &str = "settings.json";
pub const CACHES_FOLDER_NAME: &str = "caches";
pub const LAUNCHER_LOGS_FOLDER_NAME: &str = "launcher_logs";
pub const PROFILES_FOLDER_NAME: &str = "profiles";
pub const METADATA_FOLDER_NAME: &str = "meta";
#[derive(Debug)]
pub struct DirectoryInfo {
pub settings_dir: PathBuf, // Base settings directory- settings.json and icon cache.
pub config_dir: RwLock<PathBuf>, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
pub working_dir: PathBuf,
}
impl DirectoryInfo {
// Get the settings directory
// init() is not needed for this function
pub fn get_initial_settings_dir() -> Option<PathBuf> {
Self::env_path("THESEUS_CONFIG_DIR")
.or_else(|| Some(dirs::config_dir()?.join("com.modrinth.theseus")))
}
#[inline]
pub fn get_initial_settings_file() -> crate::Result<PathBuf> {
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
Ok(settings_dir.join("settings.json"))
}
/// Get all paths needed for Theseus to operate properly
#[tracing::instrument]
pub fn init(settings: &Settings) -> crate::Result<Self> {
// Working directory
let working_dir = std::env::current_dir().map_err(|err| {
crate::ErrorKind::FSError(format!(
"Could not open working directory: {err}"
))
})?;
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid settings dir".to_string(),
),
)?;
fs::create_dir_all(&settings_dir).map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error creating Theseus config directory: {err}"
))
})?;
// config directory (for instances, etc.)
// by default this is the same as the settings directory
let config_dir = settings.loaded_config_dir.clone().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
Ok(Self {
settings_dir,
config_dir: RwLock::new(config_dir),
working_dir,
})
}
/// Get the Minecraft instance metadata directory
#[inline]
pub async fn metadata_dir(&self) -> PathBuf {
self.config_dir.read().await.join(METADATA_FOLDER_NAME)
}
/// Get the Minecraft java versions metadata directory
#[inline]
pub async fn java_versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("java_versions")
}
/// Get the Minecraft versions metadata directory
#[inline]
pub async fn versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("versions")
}
/// Get the metadata directory for a given version
#[inline]
pub async fn version_dir(&self, version: &str) -> PathBuf {
self.versions_dir().await.join(version)
}
/// Get the Minecraft libraries metadata directory
#[inline]
pub async fn libraries_dir(&self) -> PathBuf {
self.metadata_dir().await.join("libraries")
}
/// Get the Minecraft assets metadata directory
#[inline]
pub async fn assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("assets")
}
/// Get the assets index directory
#[inline]
pub async fn assets_index_dir(&self) -> PathBuf {
self.assets_dir().await.join("indexes")
}
/// Get the assets objects directory
#[inline]
pub async fn objects_dir(&self) -> PathBuf {
self.assets_dir().await.join("objects")
}
/// Get the directory for a specific object
#[inline]
pub async fn object_dir(&self, hash: &str) -> PathBuf {
self.objects_dir().await.join(&hash[..2]).join(hash)
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn legacy_assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("resources")
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn natives_dir(&self) -> PathBuf {
self.metadata_dir().await.join("natives")
}
/// Get the natives directory for a version of Minecraft
#[inline]
pub async fn version_natives_dir(&self, version: &str) -> PathBuf {
self.natives_dir().await.join(version)
}
/// Get the directory containing instance icons
#[inline]
pub async fn icon_dir(&self) -> PathBuf {
self.config_dir.read().await.join("icons")
}
/// Get the profiles directory for created profiles
#[inline]
pub async fn profiles_dir(&self) -> PathBuf {
self.config_dir.read().await.join(PROFILES_FOLDER_NAME)
}
/// Gets the logs dir for a given profile
#[inline]
pub async fn profile_logs_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("logs"))
}
/// Gets the crash reports dir for a given profile
#[inline]
pub async fn crash_reports_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("crash-reports"))
}
#[inline]
pub fn launcher_logs_dir() -> Option<PathBuf> {
Self::get_initial_settings_dir()
.map(|d| d.join(LAUNCHER_LOGS_FOLDER_NAME))
}
/// Get the file containing the global database
#[inline]
pub async fn database_file(&self) -> PathBuf {
self.config_dir.read().await.join("data.bin")
}
/// Get the settings file for Theseus
#[inline]
pub fn settings_file(&self) -> PathBuf {
self.settings_dir.join(SETTINGS_FILE_NAME)
}
/// Get the cache directory for Theseus
#[inline]
pub fn caches_dir(&self) -> PathBuf {
self.settings_dir.join(CACHES_FOLDER_NAME)
}
#[inline]
pub async fn caches_meta_dir(&self) -> PathBuf {
self.caches_dir().join("metadata")
}
/// Get path from environment variable
#[inline]
fn env_path(name: &str) -> Option<PathBuf> {
std::env::var_os(name).map(PathBuf::from)
}
}

View File

@@ -0,0 +1,216 @@
use std::sync::{atomic::AtomicBool, Arc};
use discord_rich_presence::{
activity::{Activity, Assets},
DiscordIpc, DiscordIpcClient,
};
use tokio::sync::RwLock;
use crate::State;
pub struct DiscordGuard {
client: Arc<RwLock<DiscordIpcClient>>,
connected: Arc<AtomicBool>,
}
impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub async fn init(is_offline: bool) -> crate::Result<DiscordGuard> {
let mut dipc =
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not create Discord client {}",
e,
))
})?;
let connected = if !is_offline {
let res = dipc.connect(); // Do not need to connect to Discord to use app
if res.is_ok() {
Arc::new(AtomicBool::new(true))
} else {
Arc::new(AtomicBool::new(false))
}
} else {
Arc::new(AtomicBool::new(false))
};
let client = Arc::new(RwLock::new(dipc));
Ok(DiscordGuard { client, connected })
}
/// If the client failed connecting during init(), this will check for connection and attempt to reconnect
/// This MUST be called first in any client method that requires a connection, because those can PANIC if the client is not connected
/// (No connection is different than a failed connection, the latter will not panic and can be retried)
pub async fn retry_if_not_ready(&self) -> bool {
let mut client = self.client.write().await;
if !self.connected.load(std::sync::atomic::Ordering::Relaxed) {
if client.connect().is_ok() {
self.connected
.store(true, std::sync::atomic::Ordering::Relaxed);
return true;
}
return false;
}
true
}
// check online
pub async fn check_online(&self) -> bool {
let state = match State::get().await {
Ok(s) => s,
Err(_) => return false,
};
let offline = state.offline.read().await;
if *offline {
return false;
}
true
}
/// Set the activity to the given message
/// First checks if discord is disabled, and if so, clear the activity instead
pub async fn set_activity(
&self,
msg: &str,
reconnect_if_fail: bool,
) -> crate::Result<()> {
if !self.check_online().await {
return Ok(());
}
// Check if discord is disabled, and if so, clear the activity instead
let state = State::get().await?;
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
Ok(self.clear_activity(true).await?)
} else {
Ok(self.force_set_activity(msg, reconnect_if_fail).await?)
}
}
/// Sets the activity to the given message, regardless of if discord is disabled or offline
/// Should not be used except for in the above method, or if it is already known that discord is enabled (specifically for state initialization) and we are connected to the internet
pub async fn force_set_activity(
&self,
msg: &str,
reconnect_if_fail: bool,
) -> crate::Result<()> {
// Attempt to connect if not connected. Do not continue if it fails, as the client.set_activity can panic if it never was connected
if !self.retry_if_not_ready().await {
return Ok(());
}
let activity = Activity::new().state(msg).assets(
Assets::new()
.large_image("modrinth_simple")
.large_text("Modrinth Logo"),
);
// Attempt to set the activity
// If the existing connection fails, attempt to reconnect and try again
let mut client: tokio::sync::RwLockWriteGuard<'_, DiscordIpcClient> =
self.client.write().await;
let res = client.set_activity(activity.clone());
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not update Discord activity {}",
e,
))
};
if reconnect_if_fail {
if let Err(_e) = res {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {}",
e,
))
})?;
return Ok(client
.set_activity(activity)
.map_err(could_not_set_err)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_set_err)?;
}
Ok(())
}
/// Clear the activity entirely ('disabling' the RPC until the next set_activity)
pub async fn clear_activity(
&self,
reconnect_if_fail: bool,
) -> crate::Result<()> {
// Attempt to connect if not connected. Do not continue if it fails, as the client.clear_activity can panic if it never was connected
if !self.check_online().await || !self.retry_if_not_ready().await {
return Ok(());
}
// Attempt to clear the activity
// If the existing connection fails, attempt to reconnect and try again
let mut client = self.client.write().await;
let res = client.clear_activity();
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not clear Discord activity {}",
e,
))
};
if reconnect_if_fail {
if res.is_err() {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {}",
e,
))
})?;
return Ok(client
.clear_activity()
.map_err(could_not_clear_err)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_clear_err)?;
}
Ok(())
}
/// Clear the activity, but if there is a running profile, set the activity to that instead
pub async fn clear_to_default(
&self,
reconnect_if_fail: bool,
) -> crate::Result<()> {
let state: Arc<tokio::sync::RwLockReadGuard<'_, State>> =
State::get().await?;
{
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
println!("Discord is disabled, clearing activity");
return self.clear_activity(true).await;
}
}
if let Some(existing_child) = state
.children
.read()
.await
.running_profile_paths()
.await?
.first()
{
self.set_activity(
&format!("Playing {}", existing_child),
reconnect_if_fail,
)
.await?;
} else {
self.set_activity("Idling...", reconnect_if_fail).await?;
}
Ok(())
}
}

View File

@@ -0,0 +1,66 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use crate::prelude::JavaVersion;
use crate::util::jre;
// All stored Java versions, chosen by the user
// A wrapper over a Hashmap connecting key -> java version
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JavaGlobals(HashMap<String, JavaVersion>);
impl JavaGlobals {
pub fn new() -> JavaGlobals {
JavaGlobals(HashMap::new())
}
pub fn insert(&mut self, key: String, java: JavaVersion) {
self.0.insert(key, java);
}
pub fn remove(&mut self, key: &String) {
self.0.remove(key);
}
pub fn get(&self, key: &String) -> Option<&JavaVersion> {
self.0.get(key)
}
pub fn get_mut(&mut self, key: &String) -> Option<&mut JavaVersion> {
self.0.get_mut(key)
}
pub fn count(&self) -> usize {
self.0.len()
}
pub fn keys(&self) -> Vec<String> {
self.0.keys().cloned().collect()
}
// Validates that every path here is a valid Java version and that the version matches the version stored here
// If false, when checked, the user should be prompted to reselect the Java version
pub async fn is_all_valid(&self) -> bool {
for (_, java) in self.0.iter() {
let jre = jre::check_java_at_filepath(
PathBuf::from(&java.path).as_path(),
)
.await;
if let Some(jre) = jre {
if jre.version != java.version {
return false;
}
} else {
return false;
}
}
true
}
}
impl Default for JavaGlobals {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,173 @@
//! Theseus metadata
use crate::data::DirectoryInfo;
use crate::util::fetch::{read_json, write, IoSemaphore};
use crate::State;
use daedalus::{
minecraft::{fetch_version_manifest, VersionManifest as MinecraftManifest},
modded::{
fetch_manifest as fetch_loader_manifest, Manifest as LoaderManifest,
},
};
use serde::{Deserialize, Serialize};
const METADATA_URL: &str = "https://meta.modrinth.com";
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Metadata {
pub minecraft: MinecraftManifest,
pub forge: LoaderManifest,
pub fabric: LoaderManifest,
pub quilt: LoaderManifest,
pub neoforge: LoaderManifest,
}
impl Metadata {
fn get_manifest(name: &str) -> String {
format!("{METADATA_URL}/{name}/v0/manifest.json")
}
pub async fn fetch() -> crate::Result<Self> {
let (minecraft, forge, fabric, quilt, neoforge) = tokio::try_join! {
async {
let url = Self::get_manifest("minecraft");
fetch_version_manifest(Some(&url)).await
},
async {
let url = Self::get_manifest("forge");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("fabric");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("quilt");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("neo");
fetch_loader_manifest(&url).await
}
}?;
Ok(Self {
minecraft,
forge,
fabric,
quilt,
neoforge,
})
}
// Attempt to fetch metadata and store in sled DB
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let mut metadata = None;
let metadata_path = dirs.caches_meta_dir().await.join("metadata.json");
let metadata_backup_path =
dirs.caches_meta_dir().await.join("metadata.json.bak");
if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_path, io_semaphore).await
{
metadata = Some(metadata_json);
} else if fetch_online {
let res = async {
let metadata_fetch = Self::fetch().await?;
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
write(
&metadata_backup_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
metadata = Some(metadata_fetch);
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to fetch launcher metadata: {err}")
}
}
} else if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_backup_path, io_semaphore).await
{
metadata = Some(metadata_json);
std::fs::copy(&metadata_backup_path, &metadata_path).map_err(
|err| {
crate::ErrorKind::FSError(format!(
"Error restoring metadata backup: {err}"
))
.as_error()
},
)?;
}
if let Some(meta) = metadata {
Ok(meta)
} else {
Err(
crate::ErrorKind::NoValueFor(String::from("launcher metadata"))
.as_error(),
)
}
}
pub async fn update() {
let res = async {
let metadata_fetch = Metadata::fetch().await?;
let state = State::get().await?;
let metadata_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json");
let metadata_backup_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json.bak");
if metadata_path.exists() {
std::fs::copy(&metadata_path, &metadata_backup_path)?;
}
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_metadata = state.metadata.write().await;
*old_metadata = metadata_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher metadata: {err}")
}
};
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,410 @@
//! Theseus state management system
use crate::event::emit::{emit_loading, emit_offline, init_loading_unsafe};
use std::path::PathBuf;
use crate::event::LoadingBarType;
use crate::loading_join;
use crate::util::fetch::{self, FetchSemaphore, IoSemaphore};
use notify::RecommendedWatcher;
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
use std::sync::Arc;
use std::time::Duration;
use tokio::join;
use tokio::sync::{OnceCell, RwLock, Semaphore};
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
// Submodules
mod dirs;
pub use self::dirs::*;
mod metadata;
pub use self::metadata::*;
mod profiles;
pub use self::profiles::*;
mod settings;
pub use self::settings::*;
mod projects;
pub use self::projects::*;
mod children;
pub use self::children::*;
mod tags;
pub use self::tags::*;
mod java_globals;
pub use self::java_globals::*;
mod safe_processes;
pub use self::safe_processes::*;
mod discord;
pub use self::discord::*;
mod minecraft_auth;
pub use self::minecraft_auth::*;
mod mr_auth;
pub use self::mr_auth::*;
// Global state
// RwLock on state only has concurrent reads, except for config dir change which takes control of the State
static LAUNCHER_STATE: OnceCell<RwLock<State>> = OnceCell::const_new();
pub struct State {
/// Whether or not the launcher is currently operating in 'offline mode'
pub offline: RwLock<bool>,
/// Information on the location of files used in the launcher
pub directories: DirectoryInfo,
/// Semaphore used to limit concurrent network requests and avoid errors
pub fetch_semaphore: FetchSemaphore,
/// Stored maximum number of sempahores of current fetch_semaphore
pub fetch_semaphore_max: RwLock<u32>,
/// Semaphore used to limit concurrent I/O and avoid errors
pub io_semaphore: IoSemaphore,
/// Stored maximum number of sempahores of current io_semaphore
pub io_semaphore_max: RwLock<u32>,
/// Launcher metadata
pub metadata: RwLock<Metadata>,
/// Launcher configuration
pub settings: RwLock<Settings>,
/// Reference to minecraft process children
pub children: RwLock<Children>,
/// Launcher profile metadata
pub(crate) profiles: RwLock<Profiles>,
/// Launcher tags
pub(crate) tags: RwLock<Tags>,
/// Launcher processes that should be safely exited on shutdown
pub(crate) safety_processes: RwLock<SafeProcesses>,
/// Launcher user account info
pub(crate) users: RwLock<MinecraftAuthStore>,
/// Modrinth Credentials Store
pub credentials: RwLock<CredentialsStore>,
/// Modrinth auth flow
pub modrinth_auth_flow: RwLock<Option<ModrinthAuthFlow>>,
/// Discord RPC
pub discord_rpc: DiscordGuard,
/// File watcher debouncer
pub(crate) file_watcher: RwLock<Debouncer<RecommendedWatcher>>,
}
impl State {
/// Get the current launcher state, initializing it if needed
pub async fn get(
) -> crate::Result<Arc<tokio::sync::RwLockReadGuard<'static, Self>>> {
Ok(Arc::new(
LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.read()
.await,
))
}
/// Get the current launcher state, initializing it if needed
/// Takes writing control of the state, blocking all other uses of it
/// Only used for state change such as changing the config directory
pub async fn get_write(
) -> crate::Result<tokio::sync::RwLockWriteGuard<'static, Self>> {
Ok(LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.write()
.await)
}
pub fn initialized() -> bool {
LAUNCHER_STATE.initialized()
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn initialize_state() -> crate::Result<RwLock<State>> {
let loading_bar = init_loading_unsafe(
LoadingBarType::StateInit,
100.0,
"Initializing launcher",
)
.await?;
// Settings
let settings =
Settings::init(&DirectoryInfo::get_initial_settings_file()?)
.await?;
let directories = DirectoryInfo::init(&settings)?;
emit_loading(&loading_bar, 10.0, None).await?;
let mut file_watcher = init_watcher().await?;
let fetch_semaphore = FetchSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_downloads,
)));
let io_semaphore = IoSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_writes,
)));
emit_loading(&loading_bar, 10.0, None).await?;
let is_offline = !fetch::check_internet(3).await;
let metadata_fut =
Metadata::init(&directories, !is_offline, &io_semaphore);
let profiles_fut = Profiles::init(&directories, &mut file_watcher);
let tags_fut = Tags::init(
&directories,
!is_offline,
&io_semaphore,
&fetch_semaphore,
&CredentialsStore(None),
);
let users_fut = MinecraftAuthStore::init(&directories, &io_semaphore);
let creds_fut = CredentialsStore::init(&directories, &io_semaphore);
// Launcher data
let (metadata, profiles, tags, users, creds) = loading_join! {
Some(&loading_bar), 70.0, Some("Loading metadata");
metadata_fut,
profiles_fut,
tags_fut,
users_fut,
creds_fut,
}?;
let safety_processes = SafeProcesses::new();
let discord_rpc = DiscordGuard::init(is_offline).await?;
if !settings.disable_discord_rpc && !is_offline {
// Add default Idling to discord rich presence
// Force add to avoid recursion
let _ = discord_rpc.force_set_activity("Idling...", true).await;
}
let children = Children::new();
// Starts a loop of checking if we are online, and updating
Self::offine_check_loop();
emit_loading(&loading_bar, 10.0, None).await?;
Ok::<RwLock<Self>, crate::Error>(RwLock::new(Self {
offline: RwLock::new(is_offline),
directories,
fetch_semaphore,
fetch_semaphore_max: RwLock::new(
settings.max_concurrent_downloads as u32,
),
io_semaphore,
io_semaphore_max: RwLock::new(
settings.max_concurrent_writes as u32,
),
metadata: RwLock::new(metadata),
settings: RwLock::new(settings),
profiles: RwLock::new(profiles),
users: RwLock::new(users),
children: RwLock::new(children),
credentials: RwLock::new(creds),
tags: RwLock::new(tags),
discord_rpc,
safety_processes: RwLock::new(safety_processes),
file_watcher: RwLock::new(file_watcher),
modrinth_auth_flow: RwLock::new(None),
}))
}
/// Starts a loop of checking if we are online, and updating
pub fn offine_check_loop() {
tokio::task::spawn(async {
loop {
let state = Self::get().await;
if let Ok(state) = state {
let _ = state.refresh_offline().await;
}
// Wait 5 seconds
tokio::time::sleep(Duration::from_secs(5)).await;
}
});
}
/// Updates state with data from the web, if we are online
pub fn update() {
tokio::task::spawn(async {
if let Ok(state) = crate::State::get().await {
if !*state.offline.read().await {
let res1 = Profiles::update_modrinth_versions();
let res2 = Tags::update();
let res3 = Metadata::update();
let res4 = Profiles::update_projects();
let res6 = CredentialsStore::update_creds();
let _ = join!(res1, res2, res3, res4, res6);
}
}
});
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
/// Synchronize in-memory state with persistent state
pub async fn sync() -> crate::Result<()> {
let state = Self::get().await?;
let sync_settings = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let reader = state.settings.read().await;
reader.sync(&state.directories.settings_file()).await?;
Ok::<_, crate::Error>(())
})
.await?
};
let sync_profiles = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let profiles = state.profiles.read().await;
profiles.sync().await?;
Ok::<_, crate::Error>(())
})
.await?
};
tokio::try_join!(sync_settings, sync_profiles)?;
Ok(())
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_io_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.io_semaphore.0.write().await;
let mut total_permits = self.io_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_writes as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_writes);
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_fetch_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.fetch_semaphore.0.write().await;
let mut total_permits = self.fetch_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_downloads as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_downloads);
}
/// Refreshes whether or not the launcher should be offline, by whether or not there is an internet connection
pub async fn refresh_offline(&self) -> crate::Result<()> {
let is_online = fetch::check_internet(3).await;
let mut offline = self.offline.write().await;
if *offline != is_online {
return Ok(());
}
emit_offline(!is_online).await?;
*offline = !is_online;
Ok(())
}
}
pub async fn init_watcher() -> crate::Result<Debouncer<RecommendedWatcher>> {
let (mut tx, mut rx) = channel(1);
let file_watcher = new_debouncer(
Duration::from_secs_f32(2.0),
move |res: DebounceEventResult| {
futures::executor::block_on(async {
tx.send(res).await.unwrap();
})
},
)?;
tokio::task::spawn(async move {
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
tracing::info!(parent: &span, "Initting watcher");
while let Some(res) = rx.next().await {
let _span = span.enter();
match res {
Ok(mut events) => {
let mut visited_paths = Vec::new();
// sort events by e.path
events.sort_by(|a, b| a.path.cmp(&b.path));
events.iter().for_each(|e| {
let mut new_path = PathBuf::new();
let mut components_iterator = e.path.components();
let mut found = false;
for component in components_iterator.by_ref() {
new_path.push(component);
if found {
break;
}
if component.as_os_str() == "profiles" {
found = true;
}
}
// if any remain, it's a subfile of the profile folder and not the profile folder itself
let subfile = components_iterator.next().is_some();
// At this point, new_path is the path to the profile, and subfile is whether it's a subfile of the profile or not
let profile_path_id =
ProfilePathId::new(PathBuf::from(
new_path.file_name().unwrap_or_default(),
));
if e.path
.components()
.any(|x| x.as_os_str() == "crash-reports")
&& e.path
.extension()
.map(|x| x == "txt")
.unwrap_or(false)
{
Profile::crash_task(profile_path_id);
} else if !visited_paths.contains(&new_path) {
if subfile {
Profile::sync_projects_task(
profile_path_id,
false,
);
visited_paths.push(new_path);
} else {
Profiles::sync_available_profiles_task(
profile_path_id,
);
}
}
});
}
Err(error) => tracing::warn!("Unable to watch file: {error}"),
}
}
});
Ok(file_watcher)
}

View File

@@ -0,0 +1,376 @@
use crate::config::MODRINTH_API_URL;
use crate::state::DirectoryInfo;
use crate::util::fetch::{
fetch_advanced, read_json, write, FetchSemaphore, IoSemaphore,
};
use crate::State;
use chrono::{DateTime, Duration, Utc};
use futures::TryStreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
const AUTH_JSON: &str = "auth.json";
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthUser {
pub id: String,
pub username: String,
pub name: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: DateTime<Utc>,
pub role: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthCredentials {
pub session: String,
pub expires_at: DateTime<Utc>,
pub user: ModrinthUser,
}
#[derive(Serialize)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum ModrinthCredentialsResult {
TwoFactorRequired { flow: String },
Credentials(ModrinthCredentials),
}
#[derive(Debug)]
pub struct CredentialsStore(pub Option<ModrinthCredentials>);
impl CredentialsStore {
pub async fn init(
dirs: &DirectoryInfo,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
let user = read_json(&auth_path, io_semaphore).await.ok();
if let Some(user) = user {
Ok(Self(Some(user)))
} else {
Ok(Self(None))
}
}
pub async fn save(&self) -> crate::Result<()> {
let state = State::get().await?;
let auth_path =
state.directories.caches_meta_dir().await.join(AUTH_JSON);
if let Some(creds) = &self.0 {
write(&auth_path, &serde_json::to_vec(creds)?, &state.io_semaphore)
.await?;
}
Ok(())
}
pub async fn login(
&mut self,
credentials: ModrinthCredentials,
) -> crate::Result<&Self> {
self.0 = Some(credentials);
self.save().await?;
Ok(self)
}
#[tracing::instrument]
pub async fn update_creds() {
let res = async {
let state = State::get().await?;
let mut creds_write = state.credentials.write().await;
refresh_credentials(&mut creds_write, &state.fetch_semaphore)
.await?;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update credentials: {err}")
}
};
}
pub async fn logout(&mut self) -> crate::Result<&Self> {
self.0 = None;
self.save().await?;
Ok(self)
}
}
pub struct ModrinthAuthFlow {
socket: async_tungstenite::WebSocketStream<
async_tungstenite::tokio::ConnectStream,
>,
}
impl ModrinthAuthFlow {
pub async fn new(provider: &str) -> crate::Result<Self> {
let (socket, _) = async_tungstenite::tokio::connect_async(format!(
"wss://api.modrinth.com/v2/auth/ws?provider={provider}"
))
.await?;
Ok(Self { socket })
}
pub async fn prepare_login_url(&mut self) -> crate::Result<String> {
let code_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
#[derive(Deserialize)]
struct Url {
url: String,
}
let response = serde_json::from_slice::<Url>(&code_resp)?;
Ok(response.url)
}
pub async fn extract_credentials(
&mut self,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
// Minecraft bearer token
let token_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
let response =
serde_json::from_slice::<HashMap<String, Value>>(&token_resp)?;
get_result_from_res("code", response, semaphore).await
}
pub async fn close(&mut self) -> crate::Result<()> {
self.socket.close(None).await?;
Ok(())
}
}
async fn get_result_from_res(
code_key: &str,
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
if let Some(flow) = response.get("flow").and_then(|x| x.as_str()) {
Ok(ModrinthCredentialsResult::TwoFactorRequired {
flow: flow.to_string(),
})
} else if let Some(code) = response.get(code_key).and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
Ok(ModrinthCredentialsResult::Credentials(
ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
},
))
} else if let Some(error) =
response.get("description").and_then(|x| x.as_str())
{
Err(crate::ErrorKind::OtherError(format!(
"Failed to login with error {error}"
))
.as_error())
} else {
Err(crate::ErrorKind::OtherError(String::from(
"Flow/code/error not found in response!",
))
.as_error())
}
}
#[derive(Deserialize)]
struct Session {
session: String,
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login"),
None,
Some(serde_json::json!({
"username": username,
"password": password,
"challenge": challenge,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_result_from_res("session", value, semaphore).await
}
async fn get_creds_from_res(
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentials> {
if let Some(code) = response.get("session").and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
Ok(ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
})
} else if let Some(error) =
response.get("description").and_then(|x| x.as_str())
{
Err(crate::ErrorKind::OtherError(format!(
"Failed to login with error {error}"
))
.as_error())
} else {
Err(crate::ErrorKind::OtherError(String::from(
"Flow/code/error not found in response!",
))
.as_error())
}
}
pub async fn login_2fa(
code: &str,
flow: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login/2fa"),
None,
Some(serde_json::json!({
"code": code,
"flow": flow,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
}
pub async fn create_account(
username: &str,
email: &str,
password: &str,
challenge: &str,
sign_up_newsletter: bool,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/create"),
None,
Some(serde_json::json!({
"username": username,
"email": email,
"password": password,
"challenge": challenge,
"sign_up_newsletter": sign_up_newsletter,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
}
pub async fn refresh_credentials(
credentials_store: &mut CredentialsStore,
semaphore: &FetchSemaphore,
) -> crate::Result<()> {
if let Some(ref mut credentials) = credentials_store.0 {
let token = &credentials.session;
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}session/refresh"),
None,
None,
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
)
.await
.ok()
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
if let Some(value) = resp {
credentials.user = fetch_info(&value.session, semaphore).await?;
credentials.session = value.session;
credentials.expires_at = Utc::now() + Duration::weeks(2);
} else if credentials.expires_at < Utc::now() {
credentials_store.0 = None;
}
}
credentials_store.save().await?;
Ok(())
}
async fn fetch_info(
token: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthUser> {
let result = fetch_advanced(
Method::GET,
&format!("{MODRINTH_API_URL}user"),
None,
None,
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let value = serde_json::from_slice(&result)?;
Ok(value)
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,807 @@
//! Project management + inference
use crate::config::MODRINTH_API_URL;
use crate::state::{CredentialsStore, ModrinthUser, Profile};
use crate::util::fetch::{
fetch_json, write_cached_icon, FetchSemaphore, IoSemaphore,
};
use crate::util::io::IOError;
use async_zip::tokio::read::fs::ZipFileReader;
use chrono::{DateTime, Utc};
use futures::StreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sha2::Digest;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::io::AsyncReadExt;
use super::ProjectPathId;
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum ProjectType {
Mod,
DataPack,
ResourcePack,
ShaderPack,
}
impl ProjectType {
pub fn get_from_loaders(loaders: Vec<String>) -> Option<Self> {
if loaders
.iter()
.any(|x| ["fabric", "forge", "quilt"].contains(&&**x))
{
Some(ProjectType::Mod)
} else if loaders.iter().any(|x| x == "datapack") {
Some(ProjectType::DataPack)
} else if loaders.iter().any(|x| ["iris", "optifine"].contains(&&**x)) {
Some(ProjectType::ShaderPack)
} else if loaders
.iter()
.any(|x| ["vanilla", "canvas", "minecraft"].contains(&&**x))
{
Some(ProjectType::ResourcePack)
} else {
None
}
}
pub fn get_from_parent_folder(path: PathBuf) -> Option<Self> {
// Get parent folder
let path = path.parent()?.file_name()?;
match path.to_str()? {
"mods" => Some(ProjectType::Mod),
"datapacks" => Some(ProjectType::DataPack),
"resourcepacks" => Some(ProjectType::ResourcePack),
"shaderpacks" => Some(ProjectType::ShaderPack),
_ => None,
}
}
pub fn get_name(&self) -> &'static str {
match self {
ProjectType::Mod => "mod",
ProjectType::DataPack => "datapack",
ProjectType::ResourcePack => "resourcepack",
ProjectType::ShaderPack => "shaderpack",
}
}
pub fn get_folder(&self) -> &'static str {
match self {
ProjectType::Mod => "mods",
ProjectType::DataPack => "datapacks",
ProjectType::ResourcePack => "resourcepacks",
ProjectType::ShaderPack => "shaderpacks",
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Project {
pub sha512: String,
pub disabled: bool,
pub metadata: ProjectMetadata,
pub file_name: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthProject {
pub id: String,
pub slug: Option<String>,
pub project_type: String,
pub team: String,
pub title: String,
pub description: String,
pub body: String,
pub published: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub client_side: SideType,
pub server_side: SideType,
pub downloads: u32,
pub followers: u32,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
pub versions: Vec<String>,
pub icon_url: Option<String>,
}
/// A specific version of a project
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersion {
pub id: String,
pub project_id: String,
pub author_id: String,
pub featured: bool,
pub name: String,
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: DateTime<Utc>,
pub downloads: u32,
pub version_type: String,
pub files: Vec<ModrinthVersionFile>,
pub dependencies: Vec<Dependency>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersionFile {
pub hashes: HashMap<String, String>,
pub url: String,
pub filename: String,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Dependency {
pub version_id: Option<String>,
pub project_id: Option<String>,
pub file_name: Option<String>,
pub dependency_type: DependencyType,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthTeamMember {
pub team_id: String,
pub user: ModrinthUser,
pub role: String,
pub ordering: i64,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum DependencyType {
Required,
Optional,
Incompatible,
Embedded,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum SideType {
Required,
Optional,
Unsupported,
Unknown,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum FileType {
RequiredResourcePack,
OptionalResourcePack,
Unknown,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ProjectMetadata {
Modrinth {
project: Box<ModrinthProject>,
version: Box<ModrinthVersion>,
members: Vec<ModrinthTeamMember>,
update_version: Option<Box<ModrinthVersion>>,
incompatible: bool,
},
Inferred {
title: Option<String>,
description: Option<String>,
authors: Vec<String>,
version: Option<String>,
icon: Option<PathBuf>,
project_type: Option<String>,
},
Unknown,
}
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
async fn read_icon_from_file(
icon_path: Option<String>,
cache_dir: &Path,
path: &PathBuf,
io_semaphore: &IoSemaphore,
) -> crate::Result<Option<PathBuf>> {
if let Some(icon_path) = icon_path {
// we have to repoen the zip twice here :(
let zip_file_reader = ZipFileReader::new(path).await;
if let Ok(zip_file_reader) = zip_file_reader {
// Get index of icon file and open it
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == icon_path
});
if let Some(zip_index) = zip_index_option {
let mut bytes = Vec::new();
if zip_file_reader
.reader_with_entry(zip_index)
.await?
.read_to_end_checked(&mut bytes)
.await
.is_ok()
{
let bytes = bytes::Bytes::from(bytes);
let path = write_cached_icon(
&icon_path,
cache_dir,
bytes,
io_semaphore,
)
.await?;
return Ok(Some(path));
}
}
}
}
Ok(None)
}
// Creates Project data from the existing files in the file system, for a given Profile
// Paths must be the full paths to the files in the FS, and not the relative paths
// eg: with get_profile_full_project_paths
#[tracing::instrument(skip(paths, profile, io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn infer_data_from_files(
profile: Profile,
paths: Vec<PathBuf>,
cache_dir: PathBuf,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<HashMap<ProjectPathId, Project>> {
let mut file_path_hashes = HashMap::new();
for path in paths {
if !path.exists() {
continue;
}
if let Some(ext) = path.extension() {
// Ignore txt configuration files
if ext == "txt" {
continue;
}
}
let mut file = tokio::fs::File::open(path.clone())
.await
.map_err(|e| IOError::with_path(e, &path))?;
let mut buffer = [0u8; 4096]; // Buffer to read chunks
let mut hasher = sha2::Sha512::new(); // Hasher
loop {
let bytes_read =
file.read(&mut buffer).await.map_err(IOError::from)?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
let hash = format!("{:x}", hasher.finalize());
file_path_hashes.insert(hash, path.clone());
}
let files_url = format!("{}version_files", MODRINTH_API_URL);
let updates_url = format!("{}version_files/update", MODRINTH_API_URL);
let (files, update_versions) = tokio::try_join!(
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
})),
fetch_semaphore,
credentials,
),
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&updates_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
"loaders": [profile.metadata.loader],
"game_versions": [profile.metadata.game_version]
})),
fetch_semaphore,
credentials,
)
)?;
let projects: Vec<ModrinthProject> = fetch_json(
Method::GET,
&format!(
"{}projects?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&files
.values()
.map(|x| x.project_id.clone())
.collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?;
let teams: Vec<ModrinthTeamMember> = fetch_json::<
Vec<Vec<ModrinthTeamMember>>,
>(
Method::GET,
&format!(
"{}teams?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&projects.iter().map(|x| x.team.clone()).collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?
.into_iter()
.flatten()
.collect();
let mut return_projects: Vec<(PathBuf, Project)> = Vec::new();
let mut further_analyze_projects: Vec<(String, PathBuf)> = Vec::new();
for (hash, path) in file_path_hashes {
if let Some(version) = files.get(&hash) {
if let Some(project) =
projects.iter().find(|x| version.project_id == x.id)
{
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
return_projects.push((
path,
Project {
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Modrinth {
project: Box::new(project.clone()),
version: Box::new(version.clone()),
members: teams
.iter()
.filter(|x| x.team_id == project.team)
.cloned()
.collect::<Vec<_>>(),
update_version: if let Some(value) =
update_versions.get(&hash)
{
if value.id != version.id {
Some(Box::new(value.clone()))
} else {
None
}
} else {
None
},
incompatible: !version.loaders.contains(
&profile
.metadata
.loader
.as_api_str()
.to_string(),
) || version
.game_versions
.contains(&profile.metadata.game_version),
},
sha512: hash,
file_name,
},
));
continue;
}
}
further_analyze_projects.push((hash, path));
}
for (hash, path) in further_analyze_projects {
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let zip_file_reader = if let Ok(zip_file_reader) =
ZipFileReader::new(path.clone()).await
{
zip_file_reader
} else {
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Unknown,
file_name,
},
));
continue;
};
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default()
== "META-INF/mods.toml"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeModInfo {
pub mods: Vec<ForgeMod>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
mod_id: String,
version: Option<String>,
display_name: Option<String>,
description: Option<String>,
logo_file: Option<String>,
authors: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = toml::from_str::<ForgeModInfo>(&file_str) {
if let Some(pack) = pack.mods.first() {
let icon = read_icon_from_file(
pack.logo_file.clone(),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.display_name
.clone()
.unwrap_or_else(|| {
pack.mod_id.clone()
}),
),
description: pack.description.clone(),
authors: pack
.authors
.clone()
.map(|x| vec![x])
.unwrap_or_default(),
version: pack.version.clone(),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
}
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "mcmod.info"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
modid: String,
name: String,
description: Option<String>,
version: Option<String>,
author_list: Option<Vec<String>>,
logo_file: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<ForgeMod>(&file_str) {
let icon = read_icon_from_file(
pack.logo_file,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(if pack.name.is_empty() {
pack.modid
} else {
pack.name
}),
description: pack.description,
authors: pack.author_list.unwrap_or_default(),
version: pack.version,
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Fabric
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "fabric.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(untagged)]
enum FabricAuthor {
String(String),
Object { name: String },
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct FabricMod {
id: String,
version: String,
name: Option<String>,
description: Option<String>,
authors: Vec<FabricAuthor>,
icon: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<FabricMod>(&file_str) {
let icon = read_icon_from_file(
pack.icon,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(pack.name.unwrap_or(pack.id)),
description: pack.description,
authors: pack
.authors
.into_iter()
.map(|x| match x {
FabricAuthor::String(name) => name,
FabricAuthor::Object { name } => name,
})
.collect(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Quilt
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "quilt.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct QuiltMetadata {
pub name: Option<String>,
pub description: Option<String>,
pub contributors: Option<HashMap<String, String>>,
pub icon: Option<String>,
}
#[derive(Deserialize)]
struct QuiltMod {
id: String,
version: String,
metadata: Option<QuiltMetadata>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<QuiltMod>(&file_str) {
let icon = read_icon_from_file(
pack.metadata.as_ref().and_then(|x| x.icon.clone()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.metadata
.as_ref()
.and_then(|x| x.name.clone())
.unwrap_or(pack.id),
),
description: pack
.metadata
.as_ref()
.and_then(|x| x.description.clone()),
authors: pack
.metadata
.map(|x| {
x.contributors
.unwrap_or_default()
.keys()
.cloned()
.collect()
})
.unwrap_or_default(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Other
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "pack.mcmeta"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct Pack {
description: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<Pack>(&file_str) {
let icon = read_icon_from_file(
Some("pack.png".to_string()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
// Guess the project type from the filepath
let project_type =
ProjectType::get_from_parent_folder(path.clone());
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: None,
description: pack.description,
authors: Vec::new(),
version: None,
icon,
project_type: project_type
.map(|x| x.get_name().to_string()),
},
},
));
continue;
}
}
}
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Unknown,
},
));
}
// Project paths should be relative
let mut corrected_hashmap = HashMap::new();
let mut stream = tokio_stream::iter(return_projects);
while let Some((h, v)) = stream.next().await {
let h = ProjectPathId::from_fs_path(&h).await?;
corrected_hashmap.insert(h, v);
}
Ok(corrected_hashmap)
}

View File

@@ -0,0 +1,69 @@
use uuid::Uuid;
use crate::State;
// We implement a store for safe loading bars such that we can wait for them to complete
// We create this store separately from the loading bars themselves, because this may be extended as needed
pub struct SafeProcesses {
pub loading_bars: Vec<Uuid>,
}
#[derive(Debug, Copy, Clone)]
pub enum ProcessType {
LoadingBar,
// Potentially other types of processes (ie: IO operations?)
}
impl SafeProcesses {
// init
pub fn new() -> Self {
Self {
loading_bars: Vec::new(),
}
}
// Adds a new running safe process to the list by uuid
pub async fn add_uuid(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<Uuid> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.push(uuid);
}
}
Ok(uuid)
}
// Mark a safe process as finishing
pub async fn complete(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<()> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.retain(|x| *x != uuid);
}
}
Ok(())
}
// Check if there are any pending safe processes of a given type
pub async fn is_complete(r#type: ProcessType) -> crate::Result<bool> {
let state = State::get().await?;
let safe_processes = state.safety_processes.read().await;
match r#type {
ProcessType::LoadingBar => {
if safe_processes.loading_bars.is_empty() {
return Ok(true);
}
}
}
Ok(false)
}
}

View File

@@ -0,0 +1,183 @@
//! Theseus settings file
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use tokio::fs;
use super::{DirectoryInfo, JavaGlobals};
// TODO: convert to semver?
const CURRENT_FORMAT_VERSION: u32 = 1;
// Types
/// Global Theseus settings
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Settings {
pub theme: Theme,
pub memory: MemorySettings,
#[serde(default)]
pub force_fullscreen: bool,
pub game_resolution: WindowSize,
pub custom_java_args: Vec<String>,
pub custom_env_args: Vec<(String, String)>,
pub java_globals: JavaGlobals,
pub hooks: Hooks,
pub max_concurrent_downloads: usize,
pub max_concurrent_writes: usize,
pub version: u32,
pub collapsed_navigation: bool,
#[serde(default)]
pub disable_discord_rpc: bool,
#[serde(default)]
pub hide_on_process: bool,
#[serde(default)]
pub native_decorations: bool,
#[serde(default)]
pub default_page: DefaultPage,
#[serde(default)]
pub developer_mode: bool,
#[serde(default)]
pub opt_out_analytics: bool,
#[serde(default)]
pub advanced_rendering: bool,
#[serde(default)]
pub fully_onboarded: bool,
#[serde(default = "DirectoryInfo::get_initial_settings_dir")]
pub loaded_config_dir: Option<PathBuf>,
}
impl Settings {
#[tracing::instrument]
pub async fn init(file: &Path) -> crate::Result<Self> {
let mut rescued = false;
let settings = if file.exists() {
let loaded_settings = fs::read(&file)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error reading settings file: {err}"
))
.as_error()
})
.and_then(|it| {
serde_json::from_slice::<Settings>(&it)
.map_err(crate::Error::from)
});
// settings is corrupted. Back up the file and create a new one
if let Err(ref err) = loaded_settings {
tracing::error!("Failed to load settings file: {err}. ");
let backup_file = file.with_extension("json.bak");
tracing::error!("Corrupted settings file will be backed up as {}, and a new settings file will be created.", backup_file.display());
let _ = fs::rename(file, backup_file).await;
rescued = true;
}
loaded_settings.ok()
} else {
None
};
if let Some(settings) = settings {
Ok(settings)
} else {
// Create new settings file
let settings = Self {
theme: Theme::Dark,
memory: MemorySettings::default(),
force_fullscreen: false,
game_resolution: WindowSize::default(),
custom_java_args: Vec::new(),
custom_env_args: Vec::new(),
java_globals: JavaGlobals::new(),
hooks: Hooks::default(),
max_concurrent_downloads: 10,
max_concurrent_writes: 10,
version: CURRENT_FORMAT_VERSION,
collapsed_navigation: false,
disable_discord_rpc: false,
hide_on_process: false,
native_decorations: false,
default_page: DefaultPage::Home,
developer_mode: false,
opt_out_analytics: false,
advanced_rendering: true,
fully_onboarded: rescued, // If we rescued the settings file, we should consider the user fully onboarded
// By default, the config directory is the same as the settings directory
loaded_config_dir: DirectoryInfo::get_initial_settings_dir(),
};
if rescued {
settings.sync(file).await?;
}
Ok(settings)
}
}
#[tracing::instrument(skip(self))]
pub async fn sync(&self, to: &Path) -> crate::Result<()> {
fs::write(to, serde_json::to_vec(self)?)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error saving settings to file: {err}"
))
.as_error()
})?;
Ok(())
}
}
/// Theseus theme
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum Theme {
Dark,
Light,
Oled,
}
/// Minecraft memory settings
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct MemorySettings {
pub maximum: u32,
}
impl Default for MemorySettings {
fn default() -> Self {
Self { maximum: 2048 }
}
}
/// Game window size
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct WindowSize(pub u16, pub u16);
impl Default for WindowSize {
fn default() -> Self {
Self(854, 480)
}
}
/// Game initialization hooks
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(default)]
pub struct Hooks {
#[serde(skip_serializing_if = "Option::is_none")]
pub pre_launch: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub wrapper: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub post_exit: Option<String>,
}
/// Opening window to start with
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub enum DefaultPage {
Home,
Library,
}
impl Default for DefaultPage {
fn default() -> Self {
Self::Home
}
}

View File

@@ -0,0 +1,261 @@
use std::path::PathBuf;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use crate::config::MODRINTH_API_URL;
use crate::data::DirectoryInfo;
use crate::state::CredentialsStore;
use crate::util::fetch::{
fetch_json, read_json, write, FetchSemaphore, IoSemaphore,
};
// Serializeable struct for all tags to be fetched together by the frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tags {
pub categories: Vec<Category>,
pub loaders: Vec<Loader>,
pub game_versions: Vec<GameVersion>,
pub donation_platforms: Vec<DonationPlatform>,
pub report_types: Vec<String>,
}
impl Tags {
#[tracing::instrument(skip(io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let mut tags = None;
let tags_path = dirs.caches_meta_dir().await.join("tags.json");
let tags_path_backup =
dirs.caches_meta_dir().await.join("tags.json.bak");
if let Ok(tags_json) = read_json::<Self>(&tags_path, io_semaphore).await
{
tags = Some(tags_json);
} else if fetch_online {
match Self::fetch(fetch_semaphore, credentials).await {
Ok(tags_fetch) => tags = Some(tags_fetch),
Err(err) => {
tracing::warn!("Unable to fetch launcher tags: {err}")
}
}
} else if let Ok(tags_json) =
read_json::<Self>(&tags_path_backup, io_semaphore).await
{
tags = Some(tags_json);
std::fs::copy(&tags_path_backup, &tags_path).map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error restoring tags backup: {err}"
))
.as_error()
})?;
}
if let Some(tags_data) = tags {
write(&tags_path, &serde_json::to_vec(&tags_data)?, io_semaphore)
.await?;
write(
&tags_path_backup,
&serde_json::to_vec(&tags_data)?,
io_semaphore,
)
.await?;
Ok(tags_data)
} else {
Err(crate::ErrorKind::NoValueFor(String::from("launcher tags"))
.as_error())
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update() {
let res = async {
let state = crate::State::get().await?;
let creds = state.credentials.read().await;
let tags_fetch =
Tags::fetch(&state.fetch_semaphore, &creds).await?;
drop(creds);
let tags_path =
state.directories.caches_meta_dir().await.join("tags.json");
let tags_path_backup = state
.directories
.caches_meta_dir()
.await
.join("tags.json.bak");
if tags_path.exists() {
std::fs::copy(&tags_path, &tags_path_backup).unwrap();
}
write(
&tags_path,
&serde_json::to_vec(&tags_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_tags = state.tags.write().await;
*old_tags = tags_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher tags: {err}")
}
};
}
// Checks the database for categories tag, returns a Vec::new() if it doesnt exist, otherwise returns the categories
#[tracing::instrument(skip(self))]
pub fn get_categories(&self) -> Vec<Category> {
self.categories.clone()
}
// Checks the database for loaders tag, returns a Vec::new() if it doesnt exist, otherwise returns the loaders
#[tracing::instrument(skip(self))]
pub fn get_loaders(&self) -> Vec<Loader> {
self.loaders.clone()
}
// Checks the database for game_versions tag, returns a Vec::new() if it doesnt exist, otherwise returns the game_versions
#[tracing::instrument(skip(self))]
pub fn get_game_versions(&self) -> Vec<GameVersion> {
self.game_versions.clone()
}
// Checks the database for donation_platforms tag, returns a Vec::new() if it doesnt exist, otherwise returns the donation_platforms
#[tracing::instrument(skip(self))]
pub fn get_donation_platforms(&self) -> Vec<DonationPlatform> {
self.donation_platforms.clone()
}
// Checks the database for report_types tag, returns a Vec::new() if it doesnt exist, otherwise returns the report_types
#[tracing::instrument(skip(self))]
pub fn get_report_types(&self) -> Vec<String> {
self.report_types.clone()
}
// Gets all tags together as a serializable bundle
#[tracing::instrument(skip(self))]
pub fn get_tag_bundle(&self) -> Tags {
self.clone()
}
// Fetches the tags from the Modrinth API and stores them in the database
pub async fn fetch(
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let categories = format!("{MODRINTH_API_URL}tag/category");
let loaders = format!("{MODRINTH_API_URL}tag/loader");
let game_versions = format!("{MODRINTH_API_URL}tag/game_version");
let donation_platforms =
format!("{MODRINTH_API_URL}tag/donation_platform");
let report_types = format!("{MODRINTH_API_URL}tag/report_type");
let categories_fut = fetch_json::<Vec<Category>>(
Method::GET,
&categories,
None,
None,
semaphore,
credentials,
);
let loaders_fut = fetch_json::<Vec<Loader>>(
Method::GET,
&loaders,
None,
None,
semaphore,
credentials,
);
let game_versions_fut = fetch_json::<Vec<GameVersion>>(
Method::GET,
&game_versions,
None,
None,
semaphore,
credentials,
);
let donation_platforms_fut = fetch_json::<Vec<DonationPlatform>>(
Method::GET,
&donation_platforms,
None,
None,
semaphore,
credentials,
);
let report_types_fut = fetch_json::<Vec<String>>(
Method::GET,
&report_types,
None,
None,
semaphore,
credentials,
);
let (
categories,
loaders,
game_versions,
donation_platforms,
report_types,
) = tokio::try_join!(
categories_fut,
loaders_fut,
game_versions_fut,
donation_platforms_fut,
report_types_fut
)?;
Ok(Self {
categories,
loaders,
game_versions,
donation_platforms,
report_types,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Category {
pub name: String,
pub project_type: String,
pub header: String,
pub icon: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Loader {
pub name: String,
pub icon: PathBuf,
pub supported_project_types: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DonationPlatform {
pub short: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GameVersion {
pub version: String,
pub version_type: String,
pub date: String,
pub major: bool,
}

View File

@@ -0,0 +1,345 @@
//! Functions for fetching infromation from the Internet
use crate::event::emit::emit_loading;
use crate::event::LoadingBarId;
use crate::state::CredentialsStore;
use bytes::Bytes;
use lazy_static::lazy_static;
use reqwest::Method;
use serde::de::DeserializeOwned;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::time::{self, Duration};
use tokio::sync::{RwLock, Semaphore};
use tokio::{fs::File, io::AsyncWriteExt};
use super::io::{self, IOError};
#[derive(Debug)]
pub struct IoSemaphore(pub RwLock<Semaphore>);
#[derive(Debug)]
pub struct FetchSemaphore(pub RwLock<Semaphore>);
lazy_static! {
pub static ref REQWEST_CLIENT: reqwest::Client = {
let mut headers = reqwest::header::HeaderMap::new();
let header = reqwest::header::HeaderValue::from_str(&format!(
"modrinth/theseus/{} (support@modrinth.com)",
env!("CARGO_PKG_VERSION")
))
.unwrap();
headers.insert(reqwest::header::USER_AGENT, header);
reqwest::Client::builder()
.tcp_keepalive(Some(time::Duration::from_secs(10)))
.default_headers(headers)
.build()
.expect("Reqwest Client Building Failed")
};
}
const FETCH_ATTEMPTS: usize = 3;
#[tracing::instrument(skip(semaphore))]
pub async fn fetch(
url: &str,
sha1: Option<&str>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Bytes> {
fetch_advanced(
Method::GET,
url,
sha1,
None,
None,
None,
semaphore,
credentials,
)
.await
}
#[tracing::instrument(skip(json_body, semaphore))]
pub async fn fetch_json<T>(
method: Method,
url: &str,
sha1: Option<&str>,
json_body: Option<serde_json::Value>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let result = fetch_advanced(
method,
url,
sha1,
json_body,
None,
None,
semaphore,
credentials,
)
.await?;
let value = serde_json::from_slice(&result)?;
Ok(value)
}
/// Downloads a file with retry and checksum functionality
#[tracing::instrument(skip(json_body, semaphore))]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn fetch_advanced(
method: Method,
url: &str,
sha1: Option<&str>,
json_body: Option<serde_json::Value>,
header: Option<(&str, &str)>,
loading_bar: Option<(&LoadingBarId, f64)>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Bytes> {
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
for attempt in 1..=(FETCH_ATTEMPTS + 1) {
let mut req = REQWEST_CLIENT.request(method.clone(), url);
if let Some(body) = json_body.clone() {
req = req.json(&body);
}
if let Some(header) = header {
req = req.header(header.0, header.1);
}
if url.starts_with("https://cdn.modrinth.com") {
if let Some(creds) = &credentials.0 {
req = req.header("Authorization", &creds.session);
}
}
let result = req.send().await;
match result {
Ok(x) => {
let bytes = if let Some((bar, total)) = &loading_bar {
let length = x.content_length();
if let Some(total_size) = length {
use futures::StreamExt;
let mut stream = x.bytes_stream();
let mut bytes = Vec::new();
while let Some(item) = stream.next().await {
let chunk = item.or(Err(
crate::error::ErrorKind::NoValueFor(
"fetch bytes".to_string(),
),
))?;
bytes.append(&mut chunk.to_vec());
emit_loading(
bar,
(chunk.len() as f64 / total_size as f64)
* total,
None,
)
.await?;
}
Ok(bytes::Bytes::from(bytes))
} else {
x.bytes().await
}
} else {
x.bytes().await
};
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
let hash = sha1_async(bytes.clone()).await?;
if &*hash != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(crate::ErrorKind::HashError(
sha1.to_string(),
hash,
)
.into());
}
}
}
tracing::trace!("Done downloading URL {url}");
return Ok(bytes);
} else if attempt <= 3 {
continue;
} else if let Err(err) = bytes {
return Err(err.into());
}
}
Err(_) if attempt <= 3 => continue,
Err(err) => {
return Err(err.into());
}
}
}
unreachable!()
}
/// Downloads a file from specified mirrors
#[tracing::instrument(skip(semaphore))]
#[theseus_macros::debug_pin]
pub async fn fetch_mirrors(
mirrors: &[&str],
sha1: Option<&str>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Bytes> {
if mirrors.is_empty() {
return Err(crate::ErrorKind::InputError(
"No mirrors provided!".to_string(),
)
.into());
}
for (index, mirror) in mirrors.iter().enumerate() {
let result = fetch(mirror, sha1, semaphore, credentials).await;
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
return result;
}
}
unreachable!()
}
/// Using labrinth API, checks if an internet response can be found, with a timeout in seconds
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn check_internet(timeout: u64) -> bool {
REQWEST_CLIENT
.get("https://launcher-files.modrinth.com/detect.txt")
.timeout(Duration::from_secs(timeout))
.send()
.await
.is_ok()
}
/// Posts a JSON to a URL
#[tracing::instrument(skip(json_body, semaphore))]
#[theseus_macros::debug_pin]
pub async fn post_json<T>(
url: &str,
json_body: serde_json::Value,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let mut req = REQWEST_CLIENT.post(url).json(&json_body);
if let Some(creds) = &credentials.0 {
req = req.header("Authorization", &creds.session);
}
let result = req.send().await?.error_for_status()?;
let value = result.json().await?;
Ok(value)
}
pub async fn read_json<T>(
path: &Path,
semaphore: &IoSemaphore,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let json = io::read(path).await?;
let json = serde_json::from_slice::<T>(&json)?;
Ok(json)
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write<'a>(
path: &Path,
bytes: &[u8],
semaphore: &IoSemaphore,
) -> crate::Result<()> {
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
if let Some(parent) = path.parent() {
io::create_dir_all(parent).await?;
}
let mut file = File::create(path)
.await
.map_err(|e| IOError::with_path(e, path))?;
file.write_all(bytes)
.await
.map_err(|e| IOError::with_path(e, path))?;
tracing::trace!("Done writing file {}", path.display());
Ok(())
}
pub async fn copy(
src: impl AsRef<std::path::Path>,
dest: impl AsRef<std::path::Path>,
semaphore: &IoSemaphore,
) -> crate::Result<()> {
let src: &Path = src.as_ref();
let dest = dest.as_ref();
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
if let Some(parent) = dest.parent() {
io::create_dir_all(parent).await?;
}
io::copy(src, dest).await?;
tracing::trace!(
"Done copying file {} to {}",
src.display(),
dest.display()
);
Ok(())
}
// Writes a icon to the cache and returns the absolute path of the icon within the cache directory
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write_cached_icon(
icon_path: &str,
cache_dir: &Path,
bytes: Bytes,
semaphore: &IoSemaphore,
) -> crate::Result<PathBuf> {
let extension = Path::new(&icon_path).extension().and_then(OsStr::to_str);
let hash = sha1_async(bytes.clone()).await?;
let path = cache_dir.join("icons").join(if let Some(ext) = extension {
format!("{hash}.{ext}")
} else {
hash
});
write(&path, &bytes, semaphore).await?;
let path = io::canonicalize(path)?;
Ok(path)
}
async fn sha1_async(bytes: Bytes) -> crate::Result<String> {
let hash = tokio::task::spawn_blocking(move || {
sha1_smol::Sha1::from(bytes).hexdigest()
})
.await?;
Ok(hash)
}

View File

@@ -0,0 +1,194 @@
// IO error
// A wrapper around the tokio IO functions that adds the path to the error message, instead of the uninformative std::io::Error.
use std::{io::Write, path::Path};
use tempfile::NamedTempFile;
use tokio::task::spawn_blocking;
#[derive(Debug, thiserror::Error)]
pub enum IOError {
#[error("{source}, path: {path}")]
IOPathError {
#[source]
source: std::io::Error,
path: String,
},
#[error(transparent)]
IOError(#[from] std::io::Error),
}
impl IOError {
pub fn from(source: std::io::Error) -> Self {
Self::IOError(source)
}
pub fn with_path(
source: std::io::Error,
path: impl AsRef<std::path::Path>,
) -> Self {
let path = path.as_ref();
Self::IOPathError {
source,
path: path.to_string_lossy().to_string(),
}
}
}
// dunce canonicalize
pub fn canonicalize(
path: impl AsRef<std::path::Path>,
) -> Result<std::path::PathBuf, IOError> {
let path = path.as_ref();
dunce::canonicalize(path).map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// read_dir
pub async fn read_dir(
path: impl AsRef<std::path::Path>,
) -> Result<tokio::fs::ReadDir, IOError> {
let path = path.as_ref();
tokio::fs::read_dir(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// create_dir_all
pub async fn create_dir_all(
path: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let path = path.as_ref();
tokio::fs::create_dir_all(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// remove_dir_all
pub async fn remove_dir_all(
path: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let path = path.as_ref();
tokio::fs::remove_dir_all(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// read_to_string
pub async fn read_to_string(
path: impl AsRef<std::path::Path>,
) -> Result<String, IOError> {
let path = path.as_ref();
tokio::fs::read_to_string(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// read
pub async fn read(
path: impl AsRef<std::path::Path>,
) -> Result<Vec<u8>, IOError> {
let path = path.as_ref();
tokio::fs::read(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}
// write
pub async fn write(
path: impl AsRef<std::path::Path>,
data: impl AsRef<[u8]>,
) -> Result<(), IOError> {
let path = path.as_ref().to_owned();
let data = data.as_ref().to_owned();
spawn_blocking(move || {
let cloned_path = path.clone();
sync_write(data, path).map_err(|e| IOError::IOPathError {
source: e,
path: cloned_path.to_string_lossy().to_string(),
})
})
.await
.map_err(|_| {
std::io::Error::new(std::io::ErrorKind::Other, "background task failed")
})??;
Ok(())
}
fn sync_write(
data: impl AsRef<[u8]>,
path: impl AsRef<Path>,
) -> Result<(), std::io::Error> {
let mut tempfile =
NamedTempFile::new_in(path.as_ref().parent().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::Other,
"could not get parent directory for temporary file",
)
})?)?;
tempfile.write_all(data.as_ref())?;
let tmp_path = tempfile.into_temp_path();
let path = path.as_ref();
tmp_path.persist(path)?;
std::io::Result::Ok(())
}
// rename
pub async fn rename(
from: impl AsRef<std::path::Path>,
to: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let from = from.as_ref();
let to = to.as_ref();
tokio::fs::rename(from, to)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: from.to_string_lossy().to_string(),
})
}
// copy
pub async fn copy(
from: impl AsRef<std::path::Path>,
to: impl AsRef<std::path::Path>,
) -> Result<u64, IOError> {
let from: &Path = from.as_ref();
let to = to.as_ref();
tokio::fs::copy(from, to)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: from.to_string_lossy().to_string(),
})
}
// remove file
pub async fn remove_file(
path: impl AsRef<std::path::Path>,
) -> Result<(), IOError> {
let path = path.as_ref();
tokio::fs::remove_file(path)
.await
.map_err(|e| IOError::IOPathError {
source: e,
path: path.to_string_lossy().to_string(),
})
}

View File

@@ -0,0 +1,388 @@
use super::io;
use futures::prelude::*;
use serde::{Deserialize, Serialize};
use std::env;
use std::path::PathBuf;
use std::process::Command;
use std::{collections::HashSet, path::Path};
use tokio::task::JoinError;
use crate::State;
#[cfg(target_os = "windows")]
use winreg::{
enums::{HKEY_LOCAL_MACHINE, KEY_READ, KEY_WOW64_32KEY, KEY_WOW64_64KEY},
RegKey,
};
#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone)]
pub struct JavaVersion {
pub path: String,
pub version: String,
pub architecture: String,
}
// Entrypoint function (Windows)
// Returns a Vec of unique JavaVersions from the PATH, Windows Registry Keys and common Java locations
#[cfg(target_os = "windows")]
#[tracing::instrument]
pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
let mut jre_paths = HashSet::new();
// Add JRES directly on PATH
jre_paths.extend(get_all_jre_path().await);
jre_paths.extend(get_all_autoinstalled_jre_path().await?);
if let Ok(java_home) = env::var("JAVA_HOME") {
jre_paths.insert(PathBuf::from(java_home));
}
// Hard paths for locations for commonly installed .exes
let java_paths = [
r"C:/Program Files/Java",
r"C:/Program Files (x86)/Java",
r"C:\Program Files\Eclipse Adoptium",
r"C:\Program Files (x86)\Eclipse Adoptium",
];
for java_path in java_paths {
let Ok(java_subpaths) = std::fs::read_dir(java_path) else {
continue;
};
for java_subpath in java_subpaths.flatten() {
let path = java_subpath.path();
jre_paths.insert(path.join("bin"));
}
}
// Windows Registry Keys
let key_paths = [
r"SOFTWARE\JavaSoft\Java Runtime Environment", // Oracle
r"SOFTWARE\JavaSoft\Java Development Kit",
r"SOFTWARE\\JavaSoft\\JRE", // Oracle
r"SOFTWARE\\JavaSoft\\JDK",
r"SOFTWARE\\Eclipse Foundation\\JDK", // Eclipse
r"SOFTWARE\\Eclipse Adoptium\\JRE", // Eclipse
r"SOFTWARE\\Eclipse Foundation\\JDK", // Eclipse
r"SOFTWARE\\Microsoft\\JDK", // Microsoft
];
for key in key_paths {
if let Ok(jre_key) = RegKey::predef(HKEY_LOCAL_MACHINE)
.open_subkey_with_flags(key, KEY_READ | KEY_WOW64_32KEY)
{
jre_paths.extend(get_paths_from_jre_winregkey(jre_key));
}
if let Ok(jre_key) = RegKey::predef(HKEY_LOCAL_MACHINE)
.open_subkey_with_flags(key, KEY_READ | KEY_WOW64_64KEY)
{
jre_paths.extend(get_paths_from_jre_winregkey(jre_key));
}
}
// Get JRE versions from potential paths concurrently
let j = check_java_at_filepaths(jre_paths)
.await
.into_iter()
.collect();
Ok(j)
}
// Gets paths rather than search directly as RegKeys should not be passed asynchronously (do not impl Send)
#[cfg(target_os = "windows")]
#[tracing::instrument]
pub fn get_paths_from_jre_winregkey(jre_key: RegKey) -> HashSet<PathBuf> {
let mut jre_paths = HashSet::new();
for subkey in jre_key.enum_keys().flatten() {
if let Ok(subkey) = jre_key.open_subkey(subkey) {
let subkey_value_names =
[r"JavaHome", r"InstallationPath", r"\\hotspot\\MSI"];
for subkey_value in subkey_value_names {
let path: Result<String, std::io::Error> =
subkey.get_value(subkey_value);
let Ok(path) = path else { continue };
jre_paths.insert(PathBuf::from(path).join("bin"));
}
}
}
jre_paths
}
// Entrypoint function (Mac)
// Returns a Vec of unique JavaVersions from the PATH, and common Java locations
#[cfg(target_os = "macos")]
#[tracing::instrument]
pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
// Use HashSet to avoid duplicates
let mut jre_paths = HashSet::new();
// Add JREs directly on PATH
jre_paths.extend(get_all_jre_path().await);
jre_paths.extend(get_all_autoinstalled_jre_path().await?);
// Hard paths for locations for commonly installed .exes
let java_paths = [
r"/Applications/Xcode.app/Contents/Applications/Application Loader.app/Contents/MacOS/itms/java",
r"/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Home",
r"/System/Library/Frameworks/JavaVM.framework/Versions/Current/Commands",
];
for path in java_paths {
jre_paths.insert(PathBuf::from(path));
}
// Iterate over JavaVirtualMachines/(something)/Contents/Home/bin
let base_path = PathBuf::from("/Library/Java/JavaVirtualMachines/");
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let entry = entry.path().join("Contents/Home/bin");
jre_paths.insert(entry);
}
}
// Get JRE versions from potential paths concurrently
let j = check_java_at_filepaths(jre_paths)
.await
.into_iter()
.collect();
Ok(j)
}
// Entrypoint function (Linux)
// Returns a Vec of unique JavaVersions from the PATH, and common Java locations
#[cfg(target_os = "linux")]
#[tracing::instrument]
pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
// Use HashSet to avoid duplicates
let mut jre_paths = HashSet::new();
// Add JREs directly on PATH
jre_paths.extend(get_all_jre_path().await);
jre_paths.extend(get_all_autoinstalled_jre_path().await?);
// Hard paths for locations for commonly installed locations
let java_paths = [
r"/usr",
r"/usr/java",
r"/usr/lib/jvm",
r"/usr/lib64/jvm",
r"/opt/jdk",
r"/opt/jdks",
];
for path in java_paths {
let path = PathBuf::from(path);
jre_paths.insert(PathBuf::from(&path).join("jre").join("bin"));
jre_paths.insert(PathBuf::from(&path).join("bin"));
if let Ok(dir) = std::fs::read_dir(path) {
for entry in dir.flatten() {
let entry_path = entry.path();
jre_paths.insert(entry_path.join("jre").join("bin"));
jre_paths.insert(entry_path.join("bin"));
}
}
}
// Get JRE versions from potential paths concurrently
let j = check_java_at_filepaths(jre_paths)
.await
.into_iter()
.collect();
Ok(j)
}
// Gets all JREs from the PATH env variable
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
{
Box::pin(async move {
let state = State::get().await.map_err(|_| JREError::StateError)?;
let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir().await;
if base_path.is_dir() {
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if let Ok(contents) =
std::fs::read_to_string(file_path.clone())
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
}
}
}
}
Ok(jre_paths)
})
.await
}
// Gets all JREs from the PATH env variable
#[tracing::instrument]
async fn get_all_jre_path() -> HashSet<PathBuf> {
// Iterate over values in PATH variable, where accessible JREs are referenced
let paths =
env::var("PATH").map(|x| env::split_paths(&x).collect::<HashSet<_>>());
paths.unwrap_or_else(|_| HashSet::new())
}
#[cfg(target_os = "windows")]
#[allow(dead_code)]
pub const JAVA_BIN: &str = "javaw.exe";
#[cfg(not(target_os = "windows"))]
#[allow(dead_code)]
pub const JAVA_BIN: &str = "java";
// For each example filepath in 'paths', perform check_java_at_filepath, checking each one concurrently
// and returning a JavaVersion for every valid path that points to a java bin
#[tracing::instrument]
pub async fn check_java_at_filepaths(
paths: HashSet<PathBuf>,
) -> HashSet<JavaVersion> {
let jres = stream::iter(paths.into_iter())
.map(|p: PathBuf| {
tokio::task::spawn(async move { check_java_at_filepath(&p).await })
})
.buffer_unordered(64)
.collect::<Vec<_>>()
.await;
jres.into_iter().flat_map(|x| x.ok()).flatten().collect()
}
// For example filepath 'path', attempt to resolve it and get a Java version at this path
// If no such path exists, or no such valid java at this path exists, returns None
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn check_java_at_filepath(path: &Path) -> Option<JavaVersion> {
// Attempt to canonicalize the potential java filepath
// If it fails, this path does not exist and None is returned (no Java here)
let Ok(path) = io::canonicalize(path) else {
return None;
};
// Checks for existence of Java at this filepath
// Adds JAVA_BIN to the end of the path if it is not already there
let java = if path.file_name()?.to_str()? != JAVA_BIN {
path.join(JAVA_BIN)
} else {
path
};
if !java.exists() {
return None;
};
let bytes = include_bytes!("../../library/JavaInfo.class");
let tempdir: PathBuf = tempfile::tempdir().ok()?.into_path();
if !tempdir.exists() {
return None;
}
let file_path = tempdir.join("JavaInfo.class");
io::write(&file_path, bytes).await.ok()?;
let output = Command::new(&java)
.arg("-cp")
.arg(file_path.parent().unwrap())
.arg("JavaInfo")
.output()
.ok()?;
let stdout = String::from_utf8_lossy(&output.stdout);
let mut java_version = None;
let mut java_arch = None;
for line in stdout.lines() {
let mut parts = line.split('=');
let key = parts.next().unwrap_or_default();
let value = parts.next().unwrap_or_default();
if key == "os.arch" {
java_arch = Some(value);
} else if key == "java.version" {
java_version = Some(value);
}
}
// Extract version info from it
if let Some(arch) = java_arch {
if let Some(version) = java_version {
let path = java.to_string_lossy().to_string();
return Some(JavaVersion {
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
}
None
}
/// Extract major/minor version from a java version string
/// Gets the minor version or an error, and assumes 1 for major version if it could not find
/// "1.8.0_361" -> (1, 8)
/// "20" -> (1, 20)
pub fn extract_java_majorminor_version(
version: &str,
) -> Result<(u32, u32), JREError> {
let mut split = version.split('.');
let major_opt = split.next();
let mut major;
// Try minor. If doesn't exist, in format like "20" so use major
let mut minor = if let Some(minor) = split.next() {
major = major_opt.unwrap_or("1").parse::<u32>()?;
minor.parse::<u32>()?
} else {
// Formatted like "20", only one value means that is minor version
major = 1;
major_opt
.ok_or_else(|| JREError::InvalidJREVersion(version.to_string()))?
.parse::<u32>()?
};
// Java start should always be 1. If more than 1, it is formatted like "17.0.1.2" and starts with minor version
if major > 1 {
minor = major;
major = 1;
}
Ok((major, minor))
}
#[derive(thiserror::Error, Debug)]
pub enum JREError {
#[error("Command error : {0}")]
IOError(#[from] std::io::Error),
#[error("Env error: {0}")]
EnvError(#[from] env::VarError),
#[error("No JRE found for required version: {0}")]
NoJREFound(String),
#[error("Invalid JRE version string: {0}")]
InvalidJREVersion(String),
#[error("Parsing error: {0}")]
ParseError(#[from] std::num::ParseIntError),
#[error("Join error: {0}")]
JoinError(#[from] JoinError),
#[error("No stored tag for Minecraft Version {0}")]
NoMinecraftVersionFound(String),
#[error("Error getting launcher sttae")]
StateError,
}

View File

@@ -0,0 +1,17 @@
//! Theseus utility functions
pub mod fetch;
pub mod io;
pub mod jre;
pub mod platform;
/// Wrap a builder which uses a mut reference into one which outputs an owned value
macro_rules! wrap_ref_builder {
($id:ident = $init:expr => $transform:block) => {{
let mut it = $init;
{
let $id = &mut it;
$transform;
}
it
}};
}

View File

@@ -0,0 +1,102 @@
//! Platform-related code
use daedalus::minecraft::{Os, OsRule};
use regex::Regex;
// OS detection
pub trait OsExt {
/// Get the OS of the current system
fn native() -> Self;
/// Gets the OS + Arch of the current system
fn native_arch(java_arch: &str) -> Self;
}
impl OsExt for Os {
fn native_arch(java_arch: &str) -> Self {
if std::env::consts::OS == "windows" {
if java_arch == "aarch64" {
Os::WindowsArm64
} else {
Os::Windows
}
} else if std::env::consts::OS == "linux" {
if java_arch == "aarch64" {
Os::LinuxArm64
} else if java_arch == "arm" {
Os::LinuxArm32
} else {
Os::Linux
}
} else if std::env::consts::OS == "macos" {
if java_arch == "aarch64" {
Os::OsxArm64
} else {
Os::Osx
}
} else {
Os::Unknown
}
}
fn native() -> Self {
match std::env::consts::OS {
"windows" => Self::Windows,
"macos" => Self::Osx,
"linux" => Self::Linux,
_ => Self::Unknown,
}
}
}
// Bit width
#[cfg(target_pointer_width = "64")]
pub const ARCH_WIDTH: &str = "64";
#[cfg(target_pointer_width = "32")]
pub const ARCH_WIDTH: &str = "32";
// Platform rule handling
pub fn os_rule(
rule: &OsRule,
java_arch: &str,
// Minecraft updated over 1.18.2 (supports MacOS Natively)
minecraft_updated: bool,
) -> bool {
let mut rule_match = true;
if let Some(ref arch) = rule.arch {
rule_match &= !matches!(arch.as_str(), "x86" | "arm");
}
if let Some(name) = &rule.name {
if minecraft_updated
&& (name != &Os::LinuxArm64 || name != &Os::LinuxArm32)
{
rule_match &=
&Os::native() == name || &Os::native_arch(java_arch) == name;
} else {
rule_match &= &Os::native_arch(java_arch) == name;
}
}
if let Some(version) = &rule.version {
if let Ok(regex) = Regex::new(version.as_str()) {
rule_match &=
regex.is_match(&sys_info::os_release().unwrap_or_default());
}
}
rule_match
}
pub fn classpath_separator(java_arch: &str) -> &'static str {
match Os::native_arch(java_arch) {
Os::Osx
| Os::OsxArm64
| Os::Linux
| Os::LinuxArm32
| Os::LinuxArm64
| Os::Unknown => ":",
Os::Windows | Os::WindowsArm64 => ";",
}
}

View File

@@ -0,0 +1,11 @@
[package]
name = "theseus_macros"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0.58", features = ["full"] }
quote = "1.0"

View File

@@ -0,0 +1,34 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, ItemFn};
#[proc_macro_attribute]
pub fn debug_pin(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as ItemFn);
let attrs = &input.attrs;
let vis = &input.vis;
let sig = &input.sig;
let body = &input.block;
#[cfg(debug_assertions)]
let result = quote! {
#(#attrs)*
#vis #sig {
Box::pin(async move {
#body
}).await
}
};
#[cfg(not(debug_assertions))]
let result = quote! {
#(#attrs)*
#vis #sig {
#body
}
};
TokenStream::from(result)
}

View File

@@ -0,0 +1,5 @@
module.exports = {
root: true,
extends: ['custom/library'],
ignorePatterns: ['**/*.scss', '**/*.svg', 'node_modules/', 'dist/', 'tsconfig.json'],
}

674
packages/assets/LICENSE Normal file
View File

@@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

View File

@@ -0,0 +1,74 @@
<svg width="100%" height="100%" viewBox="0 0 3247 1234" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:1.5;">
<g transform="matrix(1,0,0,1,-149.406,-1192.33)">
<g transform="matrix(1,0,0,1,0,-0.0649902)">
<g transform="matrix(1,0,0,1,-190.112,0.0649902)">
<g transform="matrix(1549.76,0,0,1549.76,285.034,2355.46)">
<path d="M0.386,-0L0.526,-0L0.526,-0.129L0.619,-0.129L0.619,-0.247L0.526,-0.247L0.526,-0.705L0.318,-0.705L0.035,-0.252L0.035,-0.129L0.386,-0.129L0.386,-0ZM0.164,-0.243L0.164,-0.247L0.385,-0.598L0.389,-0.598L0.389,-0.243L0.164,-0.243Z" style="fill-rule:nonzero;" />
</g>
</g>
</g>
<g transform="matrix(1,0,0,1,0,-0.0649902)">
<g transform="matrix(1,0,0,1,2151.95,0.0649902)">
<g transform="matrix(1549.76,0,0,1549.76,285.034,2355.46)">
<path d="M0.386,-0L0.526,-0L0.526,-0.129L0.619,-0.129L0.619,-0.247L0.526,-0.247L0.526,-0.705L0.318,-0.705L0.035,-0.252L0.035,-0.129L0.386,-0.129L0.386,-0ZM0.164,-0.243L0.164,-0.247L0.385,-0.598L0.389,-0.598L0.389,-0.243L0.164,-0.243Z" style="fill-rule:nonzero;" />
</g>
</g>
</g>
<g transform="matrix(1,0,0,1,0,-0.0649902)">
<g transform="matrix(1.04827,0,0,1.04827,1374.32,1499.56)">
<path d="M309.082,335.571L283.216,266.381L336.966,211.169L404.9,196.531L424.57,220.74L393.254,252.46L365.941,261.052L346.425,281.11L355.987,307.719L375.387,328.306L402.745,321.031L422.216,299.648L464.729,286.185L477.395,314.677L433.529,368.46L360.02,391.735L309.082,335.571Z" />
</g>
<g transform="matrix(1.6241,0,0,1.6241,3325.29,75.5995)">
<path d="M-799.272,1141.68C-827.119,1200.8 -887.255,1241.76 -956.882,1241.76C-1048.32,1241.76 -1123.39,1171.12 -1130.46,1081.49" style="fill:none;stroke:currentcolor;stroke-width:33.35px;" />
</g>
<g transform="matrix(1.6241,0,0,1.6241,3325.29,75.5995)">
<path d="M-1129.95,1048.4C-1120.36,961.34 -1046.48,893.522 -956.882,893.522C-860.784,893.522 -782.765,971.541 -782.765,1067.64C-782.765,1082.32 -784.586,1096.58 -788.016,1110.21" style="fill:none;stroke:currentcolor;stroke-width:33.35px;" />
</g>
<g transform="matrix(-0.578174,2.15777,-2.15777,-0.578174,3522.29,4490.98)">
<path d="M-799.272,1141.68C-827.119,1200.8 -887.255,1241.76 -956.882,1241.76C-1048.32,1241.76 -1123.39,1171.12 -1130.46,1081.49" style="fill:none;stroke:currentcolor;stroke-width:24.25px;" />
</g>
<g transform="matrix(-0.578174,2.15777,-2.15777,-0.578174,3522.29,4490.98)">
<path d="M-1129.95,1048.4C-1120.36,961.34 -1046.48,893.522 -956.882,893.522C-860.784,893.522 -782.765,971.541 -782.765,1067.64C-782.765,1082.32 -784.586,1096.58 -788.016,1110.21" style="fill:none;stroke:currentcolor;stroke-width:24.25px;" />
</g>
<g transform="matrix(2.06597,1.89638,-1.89638,2.06597,5772.64,1417.93)">
<path d="M-799.272,1141.68C-827.119,1200.8 -887.255,1241.76 -956.882,1241.76C-1048.32,1241.76 -1123.39,1171.12 -1130.46,1081.49" style="fill:none;stroke:currentcolor;stroke-width:19.32px;" />
</g>
<g transform="matrix(2.06597,1.89638,-1.89638,2.06597,5772.64,1417.93)">
<path d="M-1129.95,1048.4C-1120.36,961.34 -1046.48,893.522 -956.882,893.522C-860.784,893.522 -782.765,971.541 -782.765,1067.64C-782.765,1082.32 -784.586,1096.58 -788.016,1110.21" style="fill:none;stroke:currentcolor;stroke-width:19.32px;" />
</g>
<g transform="matrix(-3.26901,0.875928,-0.875928,-3.26901,-420.338,6137.6)">
<path d="M-799.272,1141.68C-827.119,1200.8 -887.255,1241.76 -956.882,1241.76C-1048.32,1241.76 -1123.39,1171.12 -1130.46,1081.49" style="fill:none;stroke:currentcolor;stroke-width:16.01px;" />
</g>
<g transform="matrix(-3.26901,0.875928,-0.875928,-3.26901,-420.338,6137.6)">
<path d="M-1129.95,1048.4C-1120.36,961.34 -1046.48,893.522 -956.882,893.522C-860.784,893.522 -782.765,971.541 -782.765,1067.64C-782.765,1082.32 -784.586,1096.58 -788.016,1110.21" style="fill:none;stroke:currentcolor;stroke-width:16.01px;" />
</g>
<g transform="matrix(1,0,0,1,2728.09,741.915)">
<path d="M-904.543,901.539C-833.986,923.78 -782.765,989.775 -782.765,1067.64C-782.765,1137.58 -824.09,1197.94 -883.633,1225.62" style="fill:none;stroke:currentcolor;stroke-width:54.17px;" />
</g>
<g transform="matrix(1,0,0,1,2728.09,741.915)">
<path d="M-937.34,1240.67C-943.756,1241.39 -950.276,1241.76 -956.882,1241.76C-1052.98,1241.76 -1131,1163.74 -1131,1067.64C-1131,971.541 -1052.98,893.522 -956.882,893.522" style="fill:none;stroke:currentcolor;stroke-width:54.17px;" />
</g>
<g transform="matrix(1,0,0,1,2728.09,741.915)">
<path d="M-904.543,901.539C-833.986,923.78 -782.765,989.775 -782.765,1067.64C-782.765,1137.58 -824.09,1197.94 -883.633,1225.62" style="fill:none;stroke:currentcolor;stroke-width:54.17px;" />
</g>
<g transform="matrix(1,0,0,1,2728.09,741.915)">
<path d="M-937.34,1240.67C-943.756,1241.39 -950.276,1241.76 -956.882,1241.76C-1052.98,1241.76 -1131,1163.74 -1131,1067.64C-1131,971.541 -1052.98,893.522 -956.882,893.522" style="fill:none;stroke:currentcolor;stroke-width:54.17px;" />
</g>
<g transform="matrix(1,0,0,1,2730.59,729.536)">
<path d="M-791.26,1034.59L-686.335,1006.23" style="fill:none;stroke:currentcolor;stroke-width:54.17px;" />
</g>
<g transform="matrix(0.730214,-0.361316,0.535157,0.830095,1995.93,464.536)">
<path d="M-791.26,1034.59L-686.335,1006.23" style="fill:none;stroke:currentcolor;stroke-width:56.77px;" />
</g>
<g transform="matrix(0.588836,0.702962,-0.731171,0.673626,3318.16,1883.42)">
<path d="M-791.26,1034.59L-686.335,1006.23" style="fill:none;stroke:currentcolor;stroke-width:55.35px;" />
</g>
<g transform="matrix(0.260851,-0.884338,0.965379,0.238953,979.167,372.085)">
<path d="M-791.26,1034.59L-686.335,1006.23" style="fill:none;stroke:currentcolor;stroke-width:55.28px;" />
</g>
<g transform="matrix(1.94443,-0.569179,-0.569179,1.34303,4301.69,-217.31)">
<path d="M-1202.21,1224.92L-999.087,1102.51" style="fill:none;stroke:currentcolor;stroke-width:48.54px;" />
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 6.3 KiB

View File

@@ -0,0 +1,4 @@
<svg width="512" height="514" viewBox="0 0 512 514" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M503.16 323.56C514.55 281.47 515.32 235.91 503.2 190.76C466.57 54.2299 326.04 -26.8001 189.33 9.77991C83.8101 38.0199 11.3899 128.07 0.689941 230.47H43.99C54.29 147.33 113.74 74.7298 199.75 51.7098C306.05 23.2598 415.13 80.6699 453.17 181.38L411.03 192.65C391.64 145.8 352.57 111.45 306.3 96.8198L298.56 140.66C335.09 154.13 364.72 184.5 375.56 224.91C391.36 283.8 361.94 344.14 308.56 369.17L320.09 412.16C390.25 383.21 432.4 310.3 422.43 235.14L464.41 223.91C468.91 252.62 467.35 281.16 460.55 308.07L503.16 323.56Z" fill="var(--color-brand)"/>
<path d="M321.99 504.22C185.27 540.8 44.7501 459.77 8.11011 323.24C3.84011 307.31 1.17 291.33 0 275.46H43.27C44.36 287.37 46.4699 299.35 49.6799 311.29C53.0399 323.8 57.45 335.75 62.79 347.07L101.38 323.92C98.1299 316.42 95.39 308.6 93.21 300.47C69.17 210.87 122.41 118.77 212.13 94.7601C229.13 90.2101 246.23 88.4401 262.93 89.1501L255.19 133C244.73 133.05 234.11 134.42 223.53 137.25C157.31 154.98 118.01 222.95 135.75 289.09C136.85 293.16 138.13 297.13 139.59 300.99L188.94 271.38L174.07 231.95L220.67 184.08L279.57 171.39L296.62 192.38L269.47 219.88L245.79 227.33L228.87 244.72L237.16 267.79C237.16 267.79 253.95 285.63 253.98 285.64L277.7 279.33L294.58 260.79L331.44 249.12L342.42 273.82L304.39 320.45L240.66 340.63L212.08 308.81L162.26 338.7C187.8 367.78 226.2 383.93 266.01 380.56L277.54 423.55C218.13 431.41 160.1 406.82 124.05 361.64L85.6399 384.68C136.25 451.17 223.84 484.11 309.61 461.16C371.35 444.64 419.4 402.56 445.42 349.38L488.06 364.88C457.17 431.16 398.22 483.82 321.99 504.22Z" fill="var(--color-brand)"/>
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

9
packages/assets/external/apple.svg vendored Normal file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg width="56px" height="56px" viewBox="0 0 56 56" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<!-- Generator: Sketch 61 (89581) - https://sketch.com -->
<title>Black Logo Square</title>
<desc>Created with Sketch.</desc>
<g id="Black-Logo-Square" stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<path d="M28.2226562,20.3846154 C29.0546875,20.3846154 30.0976562,19.8048315 30.71875,19.0317864 C31.28125,18.3312142 31.6914062,17.352829 31.6914062,16.3744437 C31.6914062,16.2415766 31.6796875,16.1087095 31.65625,16 C30.7304687,16.0362365 29.6171875,16.640178 28.9492187,17.4494596 C28.421875,18.06548 27.9414062,19.0317864 27.9414062,20.0222505 C27.9414062,20.1671964 27.9648438,20.3121424 27.9765625,20.3604577 C28.0351562,20.3725366 28.1289062,20.3846154 28.2226562,20.3846154 Z M25.2929688,35 C26.4296875,35 26.9335938,34.214876 28.3515625,34.214876 C29.7929688,34.214876 30.109375,34.9758423 31.375,34.9758423 C32.6171875,34.9758423 33.4492188,33.792117 34.234375,32.6325493 C35.1132812,31.3038779 35.4765625,29.9993643 35.5,29.9389701 C35.4179688,29.9148125 33.0390625,28.9122695 33.0390625,26.0979021 C33.0390625,23.6579784 34.9140625,22.5588048 35.0195312,22.474253 C33.7773438,20.6382708 31.890625,20.5899555 31.375,20.5899555 C29.9804688,20.5899555 28.84375,21.4596313 28.1289062,21.4596313 C27.3554688,21.4596313 26.3359375,20.6382708 25.1289062,20.6382708 C22.8320312,20.6382708 20.5,22.5950413 20.5,26.2911634 C20.5,28.5861411 21.3671875,31.013986 22.4335938,32.5842339 C23.3476562,33.9129053 24.1445312,35 25.2929688,35 Z" id="" fill="currentColor" fill-rule="nonzero"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

28
packages/assets/external/bh.svg vendored Normal file
View File

@@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 75.75 81.21">
<defs>
<style>
.cls-1 {
fill: #fff;
}
.cls-2 {
fill: #03ddff;
}
.cls-3 {
fill: #0d1129;
}
</style>
</defs>
<path class="cls-1" d="m37.05,81.21c-1.46,0-2.92-.36-4.21-1.03l-29.06-15.15C1.46,63.82.02,61.44.02,58.82L0,22.43c0-2.62,1.44-5,3.76-6.21L32.9,1.03c1.3-.68,2.75-1.03,4.21-1.03s2.92.36,4.21,1.03l30.66,15.98c2.32,1.21,3.76,3.59,3.76,6.2v34.73c.01,2.62-1.43,5-3.75,6.21l-30.73,16.02c-1.29.67-2.75,1.03-4.21,1.03Z"/>
<path class="cls-3" d="m37.05,78.21c-.98,0-1.96-.24-2.82-.69l-29.06-15.15c-1.32-.69-2.15-2.06-2.15-3.55l-.02-36.39c0-1.49.83-2.86,2.15-3.55L34.29,3.69c.87-.45,1.85-.69,2.82-.69s1.96.24,2.82.69l30.65,15.98c1.32.69,2.15,2.06,2.15,3.55v34.73c.01,1.49-.81,2.86-2.14,3.55l-30.73,16.02c-.87.45-1.84.69-2.82.69h0Z"/>
<path class="cls-2" d="m68.76,57.28c0,.37-.21.72-.54.89l-7.94,4.14-6.27-2.35v5.62l-10.48,5.46c-4.06,2.12-8.89,2.12-12.95,0l-23.02-12c-.34-.17-.54-.51-.54-.88l-.02-35.18c0-.37.21-.72.54-.89l23.1-12.04c4.06-2.12,8.89-2.12,12.95,0l10.46,5.45v5.62l6.27-2.35,7.88,4.11c.34.17.54.51.54.88l.02,33.52Z"/>
<path class="cls-3" d="m67.36,24.16v32.77l-6.65,3.47v-14.37c0-.29-.25-.52-.54-.5l-7.24.53c-.26.02-.46.24-.46.5v18.15l-8.47,4.41-2.7,1.41s1.45-2.08,1.63-5.9v-15.16h0s.14-5.68-4.52-8.8c4.66-3.11,4.52-8.79,4.52-8.79h0v-14.62c0-4.31-1.65-6.68-1.65-6.68l1.65.86,1.05.55,8.49,4.42v17.12c0,.26.2.48.46.5l7.24.52c.29.02.54-.21.54-.5v-13.34l6.65,3.46Z"/>
<path class="cls-3" d="m31.89,33.27v-7.96c0-.51-.48-.88-.98-.75l-3.94,1.07-3.06.82-8.63,2.33-2.22.6-.9.24v7.17l.9-.11,2.22-.26,8.63-1,3.06-.36,2.04-.23,1.67-.2c.69-.08,1.21-.67,1.21-1.36Zm-1.26-1.32l-1.62.27-2.04.33-3.06.5-8.63,1.41-2.22.36v-3.68l2.22-.53,8.63-2.08,3.06-.73,2.04-.49,1.62-.39v5.03Z"/>
<polygon class="cls-3" points="23.91 29.98 23.91 31.65 15.28 33.21 15.28 31.88 23.91 29.98"/>
<polygon class="cls-3" points="29.01 28.86 29.01 30.73 26.97 31.09 26.97 29.31 29.01 28.86"/>
<path class="cls-3" d="m30.69,46.97l-1.67-.2-2.04-.23-3.06-.36-8.63-1-2.22-.26-.9-.11v7.17l.9.24,2.22.6,8.63,2.33,3.06.82,3.94,1.07c.49.13.98-.24.98-.75v-7.96c0-.7-.52-1.28-1.21-1.36Zm-.05,7.71l-1.62-.39-2.04-.49-3.06-.73-8.63-2.08-2.22-.53v-3.68l2.22.36,8.63,1.41,3.06.5,2.04.33,1.62.27v5.03Z"/>
<polygon class="cls-3" points="23.92 51.62 23.92 49.95 15.29 48.39 15.29 49.72 23.92 51.62"/>
<polygon class="cls-3" points="29.02 52.74 29.02 50.87 26.98 50.51 26.98 52.29 29.02 52.74"/>
</svg>

After

Width:  |  Height:  |  Size: 2.5 KiB

16
packages/assets/external/bmac.svg vendored Normal file

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.1 KiB

10
packages/assets/external/discord.svg vendored Normal file
View File

@@ -0,0 +1,10 @@
<svg width="71" height="55" viewBox="0 0 71 55" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0)">
<path d="M60.1045 4.8978C55.5792 2.8214 50.7265 1.2916 45.6527 0.41542C45.5603 0.39851 45.468 0.440769 45.4204 0.525289C44.7963 1.6353 44.105 3.0834 43.6209 4.2216C38.1637 3.4046 32.7345 3.4046 27.3892 4.2216C26.905 3.0581 26.1886 1.6353 25.5617 0.525289C25.5141 0.443589 25.4218 0.40133 25.3294 0.41542C20.2584 1.2888 15.4057 2.8186 10.8776 4.8978C10.8384 4.9147 10.8048 4.9429 10.7825 4.9795C1.57795 18.7309 -0.943561 32.1443 0.293408 45.3914C0.299005 45.4562 0.335386 45.5182 0.385761 45.5576C6.45866 50.0174 12.3413 52.7249 18.1147 54.5195C18.2071 54.5477 18.305 54.5139 18.3638 54.4378C19.7295 52.5728 20.9469 50.6063 21.9907 48.5383C22.0523 48.4172 21.9935 48.2735 21.8676 48.2256C19.9366 47.4931 18.0979 46.6 16.3292 45.5858C16.1893 45.5041 16.1781 45.304 16.3068 45.2082C16.679 44.9293 17.0513 44.6391 17.4067 44.3461C17.471 44.2926 17.5606 44.2813 17.6362 44.3151C29.2558 49.6202 41.8354 49.6202 53.3179 44.3151C53.3935 44.2785 53.4831 44.2898 53.5502 44.3433C53.9057 44.6363 54.2779 44.9293 54.6529 45.2082C54.7816 45.304 54.7732 45.5041 54.6333 45.5858C52.8646 46.6197 51.0259 47.4931 49.0921 48.2228C48.9662 48.2707 48.9102 48.4172 48.9718 48.5383C50.038 50.6034 51.2554 52.5699 52.5959 54.435C52.6519 54.5139 52.7526 54.5477 52.845 54.5195C58.6464 52.7249 64.529 50.0174 70.6019 45.5576C70.6551 45.5182 70.6887 45.459 70.6943 45.3942C72.1747 30.0791 68.2147 16.7757 60.1968 4.9823C60.1772 4.9429 60.1437 4.9147 60.1045 4.8978ZM23.7259 37.3253C20.2276 37.3253 17.3451 34.1136 17.3451 30.1693C17.3451 26.225 20.1717 23.0133 23.7259 23.0133C27.308 23.0133 30.1626 26.2532 30.1066 30.1693C30.1066 34.1136 27.28 37.3253 23.7259 37.3253ZM47.3178 37.3253C43.8196 37.3253 40.9371 34.1136 40.9371 30.1693C40.9371 26.225 43.7636 23.0133 47.3178 23.0133C50.9 23.0133 53.7545 26.2532 53.6986 30.1693C53.6986 34.1136 50.9 37.3253 47.3178 37.3253Z" fill="currentColor"/>
</g>
<defs>
<clipPath id="clip0">
<rect width="71" height="55" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 2.0 KiB

1
packages/assets/external/kofi.svg vendored Normal file
View File

@@ -0,0 +1 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Ko-fi</title><path d="M23.881 8.948c-.773-4.085-4.859-4.593-4.859-4.593H.723c-.604 0-.679.798-.679.798s-.082 7.324-.022 11.822c.164 2.424 2.586 2.672 2.586 2.672s8.267-.023 11.966-.049c2.438-.426 2.683-2.566 2.658-3.734 4.352.24 7.422-2.831 6.649-6.916zm-11.062 3.511c-1.246 1.453-4.011 3.976-4.011 3.976s-.121.119-.31.023c-.076-.057-.108-.09-.108-.09-.443-.441-3.368-3.049-4.034-3.954-.709-.965-1.041-2.7-.091-3.71.951-1.01 3.005-1.086 4.363.407 0 0 1.565-1.782 3.468-.963 1.904.82 1.832 3.011.723 4.311zm6.173.478c-.928.116-1.682.028-1.682.028V7.284h1.77s1.971.551 1.971 2.638c0 1.913-.985 2.667-2.059 3.015z" fill="currentColor" /></svg>

After

Width:  |  Height:  |  Size: 718 B

1
packages/assets/external/mastodon.svg vendored Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path d="M480,173.59c0-104.13-68.26-134.65-68.26-134.65C377.3,23.15,318.2,16.5,256.8,16h-1.51c-61.4.5-120.46,7.15-154.88,22.94,0,0-68.27,30.52-68.27,134.65,0,23.85-.46,52.35.29,82.59C34.91,358,51.11,458.37,145.32,483.29c43.43,11.49,80.73,13.89,110.76,12.24,54.47-3,85-19.42,85-19.42l-1.79-39.5s-38.93,12.27-82.64,10.77c-43.31-1.48-89-4.67-96-57.81a108.44,108.44,0,0,1-1-14.9,558.91,558.91,0,0,0,96.39,12.85c32.95,1.51,63.84-1.93,95.22-5.67,60.18-7.18,112.58-44.24,119.16-78.09C480.84,250.42,480,173.59,480,173.59ZM399.46,307.75h-50V185.38c0-25.8-10.86-38.89-32.58-38.89-24,0-36.06,15.53-36.06,46.24v67H231.16v-67c0-30.71-12-46.24-36.06-46.24-21.72,0-32.58,13.09-32.58,38.89V307.75h-50V181.67q0-38.65,19.75-61.39c13.6-15.15,31.4-22.92,53.51-22.92,25.58,0,44.95,9.82,57.75,29.48L256,147.69l12.45-20.85c12.81-19.66,32.17-29.48,57.75-29.48,22.11,0,39.91,7.77,53.51,22.92Q399.5,143,399.46,181.67Z"/></svg>

After

Width:  |  Height:  |  Size: 962 B

View File

@@ -0,0 +1 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Open Collective</title><path d="M12 0C5.373 0 0 5.373 0 12s5.373 12 12 12c2.54 0 4.894-.79 6.834-2.135l-3.107-3.109a7.715 7.715 0 1 1 0-13.512l3.107-3.109A11.943 11.943 0 0 0 12 0zm9.865 5.166l-3.109 3.107A7.67 7.67 0 0 1 19.715 12a7.682 7.682 0 0 1-.959 3.727l3.109 3.107A11.943 11.943 0 0 0 24 12c0-2.54-.79-4.894-2.135-6.834z"/></svg>

After

Width:  |  Height:  |  Size: 415 B

1
packages/assets/external/patreon.svg vendored Normal file
View File

@@ -0,0 +1 @@
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 24 24" style="enable-background:new 0 0 24 24;" xml:space="preserve"><path fill="currentColor" d="M23,7.2c0-3.1-2.4-5.6-5.2-6.5c-3.5-1.1-8.1-1-11.4,0.6C2.4,3.2,1.1,7.4,1,11.5C1,15,1.3,23.9,6.4,24c3.8,0,4.3-4.8,6.1-7.1 c1.2-1.7,2.8-2.1,4.8-2.6C20.7,13.4,23,10.7,23,7.2z"/></svg>

After

Width:  |  Height:  |  Size: 418 B

1
packages/assets/external/paypal.svg vendored Normal file
View File

@@ -0,0 +1 @@
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>PayPal</title><path d="M7.076 21.337H2.47a.641.641 0 0 1-.633-.74L4.944.901C5.026.382 5.474 0 5.998 0h7.46c2.57 0 4.578.543 5.69 1.81 1.01 1.15 1.304 2.42 1.012 4.287-.023.143-.047.288-.077.437-.983 5.05-4.349 6.797-8.647 6.797h-2.19c-.524 0-.968.382-1.05.9l-1.12 7.106zm14.146-14.42a3.35 3.35 0 0 0-.607-.541c-.013.076-.026.175-.041.254-.93 4.778-4.005 7.201-9.138 7.201h-2.19a.563.563 0 0 0-.556.479l-1.187 7.527h-.506l-.24 1.516a.56.56 0 0 0 .554.647h3.882c.46 0 .85-.334.922-.788.06-.26.76-4.852.816-5.09a.932.932 0 0 1 .923-.788h.58c3.76 0 6.705-1.528 7.565-5.946.36-1.847.174-3.388-.777-4.471z" fill="currentColor"/></svg>

After

Width:  |  Height:  |  Size: 706 B

1
packages/assets/external/reddit.svg vendored Normal file
View File

@@ -0,0 +1 @@
<svg id="Layer_1" enable-background="new 0 0 100 100" height="512" viewBox="0 0 100 100" width="512" xmlns="http://www.w3.org/2000/svg"><g id="_x33_2.Reddit"><path id="Icon_69_" d="m90 50.6c-.2-4.8-4.2-8.6-9.1-8.5-2.2.1-4.2 1-5.7 2.4-6.8-4.7-14.9-7.2-23.2-7.4l3.9-18.7 12.9 2.6c.4 3.3 3.3 5.7 6.6 5.3s5.7-3.3 5.3-6.6-3.3-5.7-6.6-5.3c-1.9.2-3.6 1.3-4.5 2.9l-14.7-2.9c-1-.2-2 .4-2.2 1.4l-4.4 20.9c-8.4.1-16.5 2.7-23.5 7.4-3.5-3.3-9.1-3.2-12.4.4-3.3 3.5-3.2 9.1.4 12.4.7.6 1.5 1.2 2.4 1.6-.1.9-.1 1.8 0 2.6 0 13.5 15.7 24.5 35.1 24.5s35.1-10.9 35.1-24.5c.1-.9.1-1.8 0-2.6 2.8-1.4 4.7-4.5 4.6-7.9zm-60.3 6.1c0-3.3 2.7-6 6-6s6 2.7 6 6-2.7 6-6 6-6-2.7-6-6zm35 16.6c-4.3 3.2-9.5 4.9-14.8 4.6-5.3.2-10.6-1.4-14.8-4.6-.6-.7-.5-1.7.2-2.3.6-.5 1.4-.5 2.1 0 3.6 2.6 8 4 12.5 3.8 4.5.2 8.9-1 12.6-3.7.7-.6 1.7-.6 2.4 0 .6.7.6 1.7 0 2.4v-.2zm-1-10.3c-3.3 0-6-2.7-6-6s2.7-6 6-6 6 2.7 6 6c.1 3.3-2.4 6.1-5.8 6.2-.1 0-.2 0-.3 0z"/></g></svg>

After

Width:  |  Height:  |  Size: 924 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 127.14 96.36"><defs><style>.cls-1{fill:#5865f2;}</style></defs><g id="图层_2" data-name="图层 2"><g id="Discord_Logos" data-name="Discord Logos"><g id="Discord_Logo_-_Large_-_White" data-name="Discord Logo - Large - White"><path class="cls-1" d="M107.7,8.07A105.15,105.15,0,0,0,81.47,0a72.06,72.06,0,0,0-3.36,6.83A97.68,97.68,0,0,0,49,6.83,72.37,72.37,0,0,0,45.64,0,105.89,105.89,0,0,0,19.39,8.09C2.79,32.65-1.71,56.6.54,80.21h0A105.73,105.73,0,0,0,32.71,96.36,77.7,77.7,0,0,0,39.6,85.25a68.42,68.42,0,0,1-10.85-5.18c.91-.66,1.8-1.34,2.66-2a75.57,75.57,0,0,0,64.32,0c.87.71,1.76,1.39,2.66,2a68.68,68.68,0,0,1-10.87,5.19,77,77,0,0,0,6.89,11.1A105.25,105.25,0,0,0,126.6,80.22h0C129.24,52.84,122.09,29.11,107.7,8.07ZM42.45,65.69C36.18,65.69,31,60,31,53s5-12.74,11.43-12.74S54,46,53.89,53,48.84,65.69,42.45,65.69Zm42.24,0C78.41,65.69,73.25,60,73.25,53s5-12.74,11.44-12.74S96.23,46,96.12,53,91.08,65.69,84.69,65.69Z"/></g></g></g></svg>

After

Width:  |  Height:  |  Size: 989 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 97.63 96.03" fill="currentColor"> <path fill-rule="evenodd" clip-rule="evenodd" d="M48.854 0C21.839 0 0 22 0 49.217c0 21.756 13.993 40.172 33.405 46.69 2.427.49 3.316-1.059 3.316-2.362 0-1.141-.08-5.052-.08-9.127-13.59 2.934-16.42-5.867-16.42-5.867-2.184-5.704-5.42-7.17-5.42-7.17-4.448-3.015.324-3.015.324-3.015 4.934.326 7.523 5.052 7.523 5.052 4.367 7.496 11.404 5.378 14.235 4.074.404-3.178 1.699-5.378 3.074-6.6-10.839-1.141-22.243-5.378-22.243-24.283 0-5.378 1.94-9.778 5.014-13.2-.485-1.222-2.184-6.275.486-13.038 0 0 4.125-1.304 13.426 5.052a46.97 46.97 0 0 1 12.214-1.63c4.125 0 8.33.571 12.213 1.63 9.302-6.356 13.427-5.052 13.427-5.052 2.67 6.763.97 11.816.485 13.038 3.155 3.422 5.015 7.822 5.015 13.2 0 18.905-11.404 23.06-22.324 24.283 1.78 1.548 3.316 4.481 3.316 9.126 0 6.6-.08 11.897-.08 13.526 0 1.304.89 2.853 3.316 2.364 19.412-6.52 33.405-24.935 33.405-46.691C97.707 22 75.788 0 48.854 0z"></path> </svg>

After

Width:  |  Height:  |  Size: 982 B

View File

@@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="93.97 97.52 192.07 185">
<path fill="#e24329" d="m282.83 170.73-.27-.69-26.14-68.22a6.81 6.81 0 0 0-2.69-3.24 7 7 0 0 0-8 .43 7 7 0 0 0-2.32 3.52l-17.65 54h-71.47l-17.65-54a6.86 6.86 0 0 0-2.32-3.53 7 7 0 0 0-8-.43 6.87 6.87 0 0 0-2.69 3.24L97.44 170l-.26.69a48.54 48.54 0 0 0 16.1 56.1l.09.07.24.17 39.82 29.82 19.7 14.91 12 9.06a8.07 8.07 0 0 0 9.76 0l12-9.06 19.7-14.91 40.06-30 .1-.08a48.56 48.56 0 0 0 16.08-56.04Z" />
<path d="m282.83 170.73-.27-.69a88.3 88.3 0 0 0-35.15 15.8L190 229.25c19.55 14.79 36.57 27.64 36.57 27.64l40.06-30 .1-.08a48.56 48.56 0 0 0 16.1-56.08Z" fill="#fc6d26" />
<path fill="#fca326" d="m153.43 256.89 19.7 14.91 12 9.06a8.07 8.07 0 0 0 9.76 0l12-9.06 19.7-14.91S209.55 244 190 229.25c-19.55 14.75-36.57 27.64-36.57 27.64Z" />
<path d="M132.58 185.84A88.19 88.19 0 0 0 97.44 170l-.26.69a48.54 48.54 0 0 0 16.1 56.1l.09.07.24.17 39.82 29.82L190 229.21Z" fill="#fc6d26" />
</svg>

After

Width:  |  Height:  |  Size: 967 B

18
packages/assets/external/sso/google.svg vendored Normal file
View File

@@ -0,0 +1,18 @@
<svg width="100%" height="100%" viewBox="0 0 100 100" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve" xmlns:serif="http://www.serif.com/" style="fill-rule:evenodd;clip-rule:evenodd;stroke-linejoin:round;stroke-miterlimit:2;">
<circle cx="50" cy="50" r="50" style="fill:white;"/>
<g id="Google__G__Logo.svg" transform="matrix(0.0991612,0,0,0.0991612,49.3739,50)">
<g transform="matrix(1,0,0,1,-352.8,-360)">
<clipPath id="_clip1">
<rect x="0" y="0" width="705.6" height="720"/>
</clipPath>
<g clip-path="url(#_clip1)">
<g transform="matrix(1,0,0,1,4477.16,2891.98)">
<path d="M-4117.16,-2597.44L-4117.16,-2458.02L-3923.42,-2458.02C-3931.93,-2413.18 -3957.46,-2375.22 -3995.75,-2349.69L-3878.91,-2259.03C-3810.84,-2321.87 -3771.56,-2414.16 -3771.56,-2523.8C-3771.56,-2549.33 -3773.85,-2573.87 -3778.11,-2597.43L-4117.16,-2597.44Z" style="fill:rgb(66,133,244);fill-rule:nonzero;"/>
<path d="M-4318.92,-2463.46L-4345.27,-2443.29L-4438.55,-2370.64C-4379.31,-2253.15 -4257.9,-2171.98 -4117.17,-2171.98C-4019.97,-2171.98 -3938.48,-2204.05 -3878.92,-2259.03L-3995.75,-2349.69C-4027.83,-2328.09 -4068.74,-2315 -4117.17,-2315C-4210.77,-2315 -4290.3,-2378.16 -4318.77,-2463.25L-4318.92,-2463.46Z" style="fill:rgb(52,168,83);fill-rule:nonzero;"/>
<path d="M-4438.55,-2693.33C-4463.09,-2644.89 -4477.16,-2590.24 -4477.16,-2531.99C-4477.16,-2473.73 -4463.09,-2419.08 -4438.55,-2370.64C-4438.55,-2370.32 -4318.76,-2463.59 -4318.76,-2463.59C-4325.96,-2485.19 -4330.22,-2508.09 -4330.22,-2531.99C-4330.22,-2555.88 -4325.96,-2578.79 -4318.76,-2600.39L-4438.55,-2693.33Z" style="fill:rgb(251,188,5);fill-rule:nonzero;"/>
<path d="M-4117.16,-2748.64C-4064.14,-2748.64 -4017.02,-2730.31 -3979.38,-2694.97L-3876.29,-2798.06C-3938.8,-2856.31 -4019.96,-2891.99 -4117.16,-2891.99C-4257.89,-2891.99 -4379.31,-2811.15 -4438.55,-2693.33L-4318.76,-2600.38C-4290.29,-2685.47 -4210.76,-2748.64 -4117.16,-2748.64Z" style="fill:rgb(234,67,53);fill-rule:nonzero;"/>
</g>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 21 21"><title>MS-SymbolLockup</title><rect x="1" y="1" width="9" height="9" fill="#f25022"/><rect x="1" y="11" width="9" height="9" fill="#00a4ef"/><rect x="11" y="1" width="9" height="9" fill="#7fba00"/><rect x="11" y="11" width="9" height="9" fill="#ffb900"/></svg>

After

Width:  |  Height:  |  Size: 321 B

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-steam" viewBox="0 0 16 16">
<path d="M.329 10.333A8.01 8.01 0 0 0 7.99 16C12.414 16 16 12.418 16 8s-3.586-8-8.009-8A8.006 8.006 0 0 0 0 7.468l.003.006 4.304 1.769A2.198 2.198 0 0 1 5.62 8.88l1.96-2.844-.001-.04a3.046 3.046 0 0 1 3.042-3.043 3.046 3.046 0 0 1 3.042 3.043 3.047 3.047 0 0 1-3.111 3.044l-2.804 2a2.223 2.223 0 0 1-3.075 2.11 2.217 2.217 0 0 1-1.312-1.568L.33 10.333Z"/>
<path d="M4.868 12.683a1.715 1.715 0 0 0 1.318-3.165 1.705 1.705 0 0 0-1.263-.02l1.023.424a1.261 1.261 0 1 1-.97 2.33l-.99-.41a1.7 1.7 0 0 0 .882.84Zm3.726-6.687a2.03 2.03 0 0 0 2.027 2.029 2.03 2.03 0 0 0 2.027-2.029 2.03 2.03 0 0 0-2.027-2.027 2.03 2.03 0 0 0-2.027 2.027Zm2.03-1.527a1.524 1.524 0 1 1-.002 3.048 1.524 1.524 0 0 1 .002-3.048Z"/>
</svg>

After

Width:  |  Height:  |  Size: 839 B

1
packages/assets/external/twitter.svg vendored Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><!--! Font Awesome Pro 6.4.0 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license (Commercial License) Copyright 2023 Fonticons, Inc. --><path d="M459.37 151.716c.325 4.548.325 9.097.325 13.645 0 138.72-105.583 298.558-298.558 298.558-59.452 0-114.68-17.219-161.137-47.106 8.447.974 16.568 1.299 25.34 1.299 49.055 0 94.213-16.568 130.274-44.832-46.132-.975-84.792-31.188-98.112-72.772 6.498.974 12.995 1.624 19.818 1.624 9.421 0 18.843-1.3 27.614-3.573-48.081-9.747-84.143-51.98-84.143-102.985v-1.299c13.969 7.797 30.214 12.67 47.431 13.319-28.264-18.843-46.781-51.005-46.781-87.391 0-19.492 5.197-37.36 14.294-52.954 51.655 63.675 129.3 105.258 216.365 109.807-1.624-7.797-2.599-15.918-2.599-24.04 0-57.828 46.782-104.934 104.934-104.934 30.213 0 57.502 12.67 76.67 33.137 23.715-4.548 46.456-13.32 66.599-25.34-7.798 24.366-24.366 44.833-46.132 57.827 21.117-2.273 41.584-8.122 60.426-16.243-14.292 20.791-32.161 39.308-52.628 54.253z"/></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

3
packages/assets/external/windows.svg vendored Normal file
View File

@@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 4875 4875">
<path d="M0 0h2311v2310H0zm2564 0h2311v2310H2564zM0 2564h2311v2311H0zm2564 0h2311v2311H2564" fill="currentColor"/>
</svg>

After

Width:  |  Height:  |  Size: 189 B

6
packages/assets/icons.d.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
declare module '*.svg?component' {
import type { FunctionalComponent, SVGAttributes } from 'vue'
const src: FunctionalComponent<SVGAttributes>
export default src
}

View File

@@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<line x1="21" y1="6" x2="3" y2="6"></line>
<line x1="15" y1="12" x2="3" y2="12"></line>
<line x1="17" y1="18" x2="3" y2="18"></line>
</svg>

After

Width:  |  Height:  |  Size: 329 B

View File

@@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<rect x="2" y="4" width="20" height="5" rx="2"></rect>
<path d="M4 9v9a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V9"></path>
<path d="M10 13h4"></path>
</svg>

After

Width:  |  Height:  |  Size: 336 B

View File

@@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M12 6v12"></path>
<path d="M17.196 9 6.804 15"></path>
<path d="m6.804 9 10.392 6"></path>
</svg>

After

Width:  |  Height:  |  Size: 296 B

View File

@@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M18 8A6 6 0 0 0 6 8c0 7-3 9-3 9h18s-3-2-3-9"></path>
<path d="M13.73 21a2 2 0 0 1-3.46 0"></path>
<path d="M2 8c0-2.2.7-4.3 2-6"></path>
<path d="M22 8a10 10 0 0 0-2-6"></path>
</svg>

After

Width:  |  Height:  |  Size: 384 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M18 8A6 6 0 0 0 6 8c0 7-3 9-3 9h18s-3-2-3-9"></path>
<path d="M13.73 21a2 2 0 0 1-3.46 0"></path>
</svg>

After

Width:  |  Height:  |  Size: 301 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bold"><path d="M14 12a4 4 0 0 0 0-8H6v8"/><path d="M15 20a4 4 0 0 0 0-8H6v8Z"/></svg>

After

Width:  |  Height:  |  Size: 287 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M4 19.5A2.5 2.5 0 0 1 6.5 17H20"></path>
<path d="M6.5 2H20v20H6.5A2.5 2.5 0 0 1 4 19.5v-15A2.5 2.5 0 0 1 6.5 2z"></path>
</svg>

After

Width:  |  Height:  |  Size: 325 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bookmark"><path d="m19 21-7-4-7 4V5a2 2 0 0 1 2-2h10a2 2 0 0 1 2 2v16z"/></svg>

After

Width:  |  Height:  |  Size: 281 B

View File

@@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z"></path>
<polyline points="3.29 7 12 12 20.71 7"></polyline>
<line x1="12" y1="22" x2="12" y2="12"></line>
</svg>

After

Width:  |  Height:  |  Size: 434 B

View File

@@ -0,0 +1,5 @@
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
d="M8 7V3M16 7V3M7 11H17M5 21H19C20.1046 21 21 20.1046 21 19V7C21 5.89543 20.1046 5 19 5H5C3.89543 5 3 5.89543 3 7V19C3 20.1046 3.89543 21 5 21Z"
stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" />
</svg>

After

Width:  |  Height:  |  Size: 349 B

View File

@@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M3 3v18h18"></path>
<path d="M18 17V9"></path>
<path d="M13 17V5"></path>
<path d="M8 17v-3"></path>
</svg>

After

Width:  |  Height:  |  Size: 308 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-check-check"><path d="M18 6 7 17l-5-5"/><path d="m22 10-7.5 7.5L13 16"/></svg>

After

Width:  |  Height:  |  Size: 280 B

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M12 22c5.523 0 10-4.477 10-10S17.523 2 12 2 2 6.477 2 12s4.477 10 10 10z"></path>
<path d="m9 12 2 2 4-4"></path>
</svg>

After

Width:  |  Height:  |  Size: 316 B

View File

@@ -0,0 +1,4 @@
<svg fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"
viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path d="M20 6L9 17l-5-5" />
</svg>

After

Width:  |  Height:  |  Size: 197 B

View File

@@ -0,0 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<polyline points="15 18 9 12 15 6"></polyline>
</svg>

After

Width:  |  Height:  |  Size: 239 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><polyline points="9 18 15 12 9 6"></polyline></svg>

After

Width:  |  Height:  |  Size: 233 B

View File

@@ -0,0 +1,5 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<circle cx="12" cy="12" r="10"></circle>
<line x1="15" y1="9" x2="9" y2="15"></line>
<line x1="9" y1="9" x2="15" y2="15"></line>
</svg>

After

Width:  |  Height:  |  Size: 324 B

View File

@@ -0,0 +1,4 @@
<svg fill="none" viewBox="0 0 24 24" stroke="currentColor" xmlns="http://www.w3.org/2000/svg">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M9.75 17L9 20l-1 1h8l-1-1-.75-3M3 13h18M5 17h14a2 2 0 002-2V5a2 2 0 00-2-2H5a2 2 0 00-2 2v10a2 2 0 002 2z" />
</svg>

After

Width:  |  Height:  |  Size: 291 B

View File

@@ -0,0 +1 @@
<svg fill="none" viewBox="0 0 24 24" stroke="currentColor" stroke-width="2"><path stroke-linecap="round" stroke-linejoin="round" d="M8 5H6a2 2 0 00-2 2v12a2 2 0 002 2h10a2 2 0 002-2v-1M8 5a2 2 0 002 2h2a2 2 0 002-2M8 5a2 2 0 012-2h2a2 2 0 012 2m0 0h2a2 2 0 012 2v3m2 4H10m0 0l3-3m-3 3l3 3" /></svg>

After

Width:  |  Height:  |  Size: 299 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><polyline points="16 18 22 12 16 6"></polyline><polyline points="8 6 2 12 8 18"></polyline></svg>

After

Width:  |  Height:  |  Size: 279 B

View File

@@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<circle cx="8" cy="8" r="6"></circle>
<path d="M18.09 10.37A6 6 0 1 1 10.34 18"></path>
<path d="M7 6h1v4"></path>
<path d="m16.71 13.88.7.71-2.82 2.82"></path>
</svg>

After

Width:  |  Height:  |  Size: 359 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" style="enable-background:new 0 0 24 24" xml:space="preserve"><path d="M9 5v4m0 0H5m4 0L4 4m11 1v4m0 0h4m-4 0 5-5M9 19v-4m0 0H5m4 0-5 5m11-5 5 5m-5-5v4m0-4h4" style="fill:none;stroke:currentColor;stroke-width:2;stroke-linecap:round;stroke-linejoin:round"/></svg>

After

Width:  |  Height:  |  Size: 322 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-copy"><rect width="14" height="14" x="8" y="8" rx="2" ry="2"/><path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2"/></svg>

After

Width:  |  Height:  |  Size: 337 B

Some files were not shown because too many files have changed in this diff Show More