You've already forked AstralRinth
forked from didirus/AstralRinth
Move files in preparation for monorepo migration
This commit is contained in:
728
libs/theseus/src/state/children.rs
Normal file
728
libs/theseus/src/state/children.rs
Normal file
@@ -0,0 +1,728 @@
|
||||
use super::{Profile, ProfilePathId};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use tokio::process::Child;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::event::emit::emit_process;
|
||||
use crate::event::ProcessPayloadType;
|
||||
use crate::util::fetch::read_json;
|
||||
use crate::util::io::IOError;
|
||||
use crate::{profile, ErrorKind};
|
||||
|
||||
use tokio::task::JoinHandle;
|
||||
use uuid::Uuid;
|
||||
|
||||
const PROCESSES_JSON: &str = "processes.json";
|
||||
|
||||
// Child processes (instances of Minecraft)
|
||||
// A wrapper over a Hashmap connecting PID -> MinecraftChild
|
||||
pub struct Children(HashMap<Uuid, Arc<RwLock<MinecraftChild>>>);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ChildType {
|
||||
// A child process that is being managed by tokio
|
||||
TokioChild(Child),
|
||||
// A child process that was rescued from a cache (e.g. a process that was launched by theseus before the launcher was restarted)
|
||||
// This may not have all the same functionality as a TokioChild
|
||||
RescuedPID(u32),
|
||||
}
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct ProcessCache {
|
||||
pub pid: u32,
|
||||
pub uuid: Uuid,
|
||||
pub start_time: u64,
|
||||
pub name: String,
|
||||
pub exe: String,
|
||||
pub profile_relative_path: ProfilePathId,
|
||||
pub post_command: Option<String>,
|
||||
}
|
||||
impl ChildType {
|
||||
pub async fn try_wait(&mut self) -> crate::Result<Option<i32>> {
|
||||
match self {
|
||||
ChildType::TokioChild(child) => Ok(child
|
||||
.try_wait()
|
||||
.map_err(IOError::from)?
|
||||
.map(|x| x.code().unwrap_or(0))),
|
||||
ChildType::RescuedPID(pid) => {
|
||||
let mut system = sysinfo::System::new();
|
||||
if !system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
|
||||
return Ok(Some(0));
|
||||
}
|
||||
let process = system.process(sysinfo::Pid::from_u32(*pid));
|
||||
if let Some(process) = process {
|
||||
if process.status() == sysinfo::ProcessStatus::Run {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(0))
|
||||
}
|
||||
} else {
|
||||
Ok(Some(0))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub async fn kill(&mut self) -> crate::Result<()> {
|
||||
match self {
|
||||
ChildType::TokioChild(child) => {
|
||||
Ok(child.kill().await.map_err(IOError::from)?)
|
||||
}
|
||||
ChildType::RescuedPID(pid) => {
|
||||
let mut system = sysinfo::System::new();
|
||||
if system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
|
||||
let process = system.process(sysinfo::Pid::from_u32(*pid));
|
||||
if let Some(process) = process {
|
||||
process.kill();
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn id(&self) -> Option<u32> {
|
||||
match self {
|
||||
ChildType::TokioChild(child) => child.id(),
|
||||
ChildType::RescuedPID(pid) => Some(*pid),
|
||||
}
|
||||
}
|
||||
|
||||
// Caches the process so that it can be restored if the launcher is restarted
|
||||
// Stored in the caches/metadata/processes.json file
|
||||
pub async fn cache_process(
|
||||
&self,
|
||||
uuid: uuid::Uuid,
|
||||
profile_path_id: ProfilePathId,
|
||||
post_command: Option<String>,
|
||||
) -> crate::Result<()> {
|
||||
let pid = match self {
|
||||
ChildType::TokioChild(child) => child.id().unwrap_or(0),
|
||||
ChildType::RescuedPID(pid) => *pid,
|
||||
};
|
||||
|
||||
let state = crate::State::get().await?;
|
||||
|
||||
let mut system = sysinfo::System::new();
|
||||
system.refresh_processes();
|
||||
let process =
|
||||
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
|
||||
crate::ErrorKind::LauncherError(format!(
|
||||
"Could not find process {}",
|
||||
pid
|
||||
))
|
||||
})?;
|
||||
let start_time = process.start_time();
|
||||
let name = process.name().to_string();
|
||||
|
||||
let Some(path) = process.exe() else {
|
||||
return Err(ErrorKind::LauncherError(format!(
|
||||
"Cached process {} has no accessable path",
|
||||
pid
|
||||
))
|
||||
.into());
|
||||
};
|
||||
|
||||
let exe = path.to_string_lossy().to_string();
|
||||
|
||||
let cached_process = ProcessCache {
|
||||
pid,
|
||||
start_time,
|
||||
name,
|
||||
exe,
|
||||
post_command,
|
||||
uuid,
|
||||
profile_relative_path: profile_path_id,
|
||||
};
|
||||
|
||||
let children_path = state
|
||||
.directories
|
||||
.caches_meta_dir()
|
||||
.await
|
||||
.join(PROCESSES_JSON);
|
||||
let mut children_caches = if let Ok(children_json) =
|
||||
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
|
||||
&children_path,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await
|
||||
{
|
||||
children_json
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
children_caches.insert(uuid, cached_process);
|
||||
crate::util::fetch::write(
|
||||
&children_path,
|
||||
&serde_json::to_vec(&children_caches)?,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Removes the process from the cache (ie: on process exit)
|
||||
pub async fn remove_cache(&self, uuid: uuid::Uuid) -> crate::Result<()> {
|
||||
let state = crate::State::get().await?;
|
||||
let children_path = state
|
||||
.directories
|
||||
.caches_meta_dir()
|
||||
.await
|
||||
.join(PROCESSES_JSON);
|
||||
let mut children_caches = if let Ok(children_json) =
|
||||
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
|
||||
&children_path,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await
|
||||
{
|
||||
children_json
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
children_caches.remove(&uuid);
|
||||
crate::util::fetch::write(
|
||||
&children_path,
|
||||
&serde_json::to_vec(&children_caches)?,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Minecraft Child, bundles together the PID, the actual Child, and the easily queryable stdout and stderr streams (if needed)
|
||||
#[derive(Debug)]
|
||||
pub struct MinecraftChild {
|
||||
pub uuid: Uuid,
|
||||
pub profile_relative_path: ProfilePathId,
|
||||
pub manager: Option<JoinHandle<crate::Result<i32>>>, // None when future has completed and been handled
|
||||
pub current_child: Arc<RwLock<ChildType>>,
|
||||
pub last_updated_playtime: DateTime<Utc>, // The last time we updated the playtime for the associated profile
|
||||
}
|
||||
|
||||
impl Children {
|
||||
pub fn new() -> Self {
|
||||
Children(HashMap::new())
|
||||
}
|
||||
|
||||
// Loads cached processes from the caches/metadata/processes.json file, re-inserts them into the hashmap, and removes them from the file
|
||||
// This will only be called once, on startup. Only processes who match a cached process (name, time started, pid, etc) will be re-inserted
|
||||
pub async fn rescue_cache(&mut self) -> crate::Result<()> {
|
||||
let state = crate::State::get().await?;
|
||||
let children_path = state
|
||||
.directories
|
||||
.caches_meta_dir()
|
||||
.await
|
||||
.join(PROCESSES_JSON);
|
||||
|
||||
let mut children_caches = if let Ok(children_json) =
|
||||
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
|
||||
&children_path,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await
|
||||
{
|
||||
// Overwrite the file with an empty hashmap- we will re-insert the cached processes
|
||||
let empty = HashMap::<uuid::Uuid, ProcessCache>::new();
|
||||
crate::util::fetch::write(
|
||||
&children_path,
|
||||
&serde_json::to_vec(&empty)?,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Return the cached processes
|
||||
children_json
|
||||
} else {
|
||||
HashMap::new()
|
||||
};
|
||||
|
||||
for (_, cache) in children_caches.drain() {
|
||||
let uuid = cache.uuid;
|
||||
match self.insert_cached_process(cache).await {
|
||||
Ok(child) => {
|
||||
self.0.insert(uuid, child);
|
||||
}
|
||||
Err(e) => tracing::warn!(
|
||||
"Failed to rescue cached process {}: {}",
|
||||
uuid,
|
||||
e
|
||||
),
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Runs the command in process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
|
||||
// The threads for stdout and stderr are spawned here
|
||||
// Unlike a Hashmap's 'insert', this directly returns the reference to the MinecraftChild rather than any previously stored MinecraftChild that may exist
|
||||
#[tracing::instrument(skip(
|
||||
self,
|
||||
uuid,
|
||||
mc_command,
|
||||
post_command,
|
||||
censor_strings
|
||||
))]
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
#[theseus_macros::debug_pin]
|
||||
pub async fn insert_new_process(
|
||||
&mut self,
|
||||
uuid: Uuid,
|
||||
profile_relative_path: ProfilePathId,
|
||||
mut mc_command: Command,
|
||||
post_command: Option<String>, // Command to run after minecraft.
|
||||
censor_strings: HashMap<String, String>,
|
||||
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
|
||||
// Takes the first element of the commands vector and spawns it
|
||||
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
|
||||
|
||||
let child = ChildType::TokioChild(mc_proc);
|
||||
|
||||
// Slots child into manager
|
||||
let pid = child.id().ok_or_else(|| {
|
||||
crate::ErrorKind::LauncherError(
|
||||
"Process immediately failed, could not get PID".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Caches process so that it can be restored if the launcher is restarted
|
||||
child
|
||||
.cache_process(
|
||||
uuid,
|
||||
profile_relative_path.clone(),
|
||||
post_command.clone(),
|
||||
)
|
||||
.await?;
|
||||
let current_child = Arc::new(RwLock::new(child));
|
||||
let manager = Some(tokio::spawn(Self::sequential_process_manager(
|
||||
uuid,
|
||||
post_command,
|
||||
pid,
|
||||
current_child.clone(),
|
||||
profile_relative_path.clone(),
|
||||
)));
|
||||
|
||||
emit_process(
|
||||
uuid,
|
||||
pid,
|
||||
ProcessPayloadType::Launched,
|
||||
"Launched Minecraft",
|
||||
)
|
||||
.await?;
|
||||
|
||||
let last_updated_playtime = Utc::now();
|
||||
|
||||
// Create MinecraftChild
|
||||
let mchild = MinecraftChild {
|
||||
uuid,
|
||||
profile_relative_path,
|
||||
current_child,
|
||||
manager,
|
||||
last_updated_playtime,
|
||||
};
|
||||
|
||||
let mchild = Arc::new(RwLock::new(mchild));
|
||||
self.0.insert(uuid, mchild.clone());
|
||||
Ok(mchild)
|
||||
}
|
||||
|
||||
// Rescues a cached process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
|
||||
// Essentially 'reconnects' to a process that was launched by theseus before the launcher was restarted
|
||||
// However, this may not have all the same functionality as a TokioChild, as we only have the PID and not the actual Child
|
||||
// Only processes who match a cached process (name, time started, pid, etc) will be re-inserted. The function fails with an error if the process is notably different.
|
||||
#[tracing::instrument(skip(self, cached_process,))]
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
#[theseus_macros::debug_pin]
|
||||
pub async fn insert_cached_process(
|
||||
&mut self,
|
||||
cached_process: ProcessCache,
|
||||
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
|
||||
let _state = crate::State::get().await?;
|
||||
|
||||
// Takes the first element of the commands vector and spawns it
|
||||
// Checks processes, compares cached process to actual process
|
||||
// Fails if notably different (meaning that the PID was reused, and we shouldn't reconnect to it)
|
||||
{
|
||||
let mut system = sysinfo::System::new();
|
||||
system.refresh_processes();
|
||||
let process = system
|
||||
.process(sysinfo::Pid::from_u32(cached_process.pid))
|
||||
.ok_or_else(|| {
|
||||
crate::ErrorKind::LauncherError(format!(
|
||||
"Could not find process {}",
|
||||
cached_process.pid
|
||||
))
|
||||
})?;
|
||||
|
||||
if cached_process.start_time != process.start_time() {
|
||||
return Err(ErrorKind::LauncherError(format!("Cached process {} has different start time than actual process {}", cached_process.pid, process.start_time())).into());
|
||||
}
|
||||
if cached_process.name != process.name() {
|
||||
return Err(ErrorKind::LauncherError(format!("Cached process {} has different name than actual process {}", cached_process.pid, process.name())).into());
|
||||
}
|
||||
if let Some(path) = process.exe() {
|
||||
if cached_process.exe != path.to_string_lossy() {
|
||||
return Err(ErrorKind::LauncherError(format!("Cached process {} has different exe than actual process {}", cached_process.pid, path.to_string_lossy())).into());
|
||||
}
|
||||
} else {
|
||||
return Err(ErrorKind::LauncherError(format!(
|
||||
"Cached process {} has no accessable path",
|
||||
cached_process.pid
|
||||
))
|
||||
.into());
|
||||
}
|
||||
}
|
||||
|
||||
let child = ChildType::RescuedPID(cached_process.pid);
|
||||
|
||||
// Slots child into manager
|
||||
let pid = child.id().ok_or_else(|| {
|
||||
crate::ErrorKind::LauncherError(
|
||||
"Process immediately failed, could not get PID".to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
// Re-caches process so that it can be restored if the launcher is restarted
|
||||
child
|
||||
.cache_process(
|
||||
cached_process.uuid,
|
||||
cached_process.profile_relative_path.clone(),
|
||||
cached_process.post_command.clone(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let current_child = Arc::new(RwLock::new(child));
|
||||
let manager = Some(tokio::spawn(Self::sequential_process_manager(
|
||||
cached_process.uuid,
|
||||
cached_process.post_command,
|
||||
pid,
|
||||
current_child.clone(),
|
||||
cached_process.profile_relative_path.clone(),
|
||||
)));
|
||||
|
||||
emit_process(
|
||||
cached_process.uuid,
|
||||
pid,
|
||||
ProcessPayloadType::Launched,
|
||||
"Launched Minecraft",
|
||||
)
|
||||
.await?;
|
||||
|
||||
let last_updated_playtime = Utc::now();
|
||||
|
||||
// Create MinecraftChild
|
||||
let mchild = MinecraftChild {
|
||||
uuid: cached_process.uuid,
|
||||
profile_relative_path: cached_process.profile_relative_path,
|
||||
current_child,
|
||||
manager,
|
||||
last_updated_playtime,
|
||||
};
|
||||
|
||||
let mchild = Arc::new(RwLock::new(mchild));
|
||||
self.0.insert(cached_process.uuid, mchild.clone());
|
||||
Ok(mchild)
|
||||
}
|
||||
|
||||
// Spawns a new child process and inserts it into the hashmap
|
||||
// Also, as the process ends, it spawns the follow-up process if it exists
|
||||
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
|
||||
#[tracing::instrument(skip(current_child))]
|
||||
#[theseus_macros::debug_pin]
|
||||
async fn sequential_process_manager(
|
||||
uuid: Uuid,
|
||||
post_command: Option<String>,
|
||||
mut current_pid: u32,
|
||||
current_child: Arc<RwLock<ChildType>>,
|
||||
associated_profile: ProfilePathId,
|
||||
) -> crate::Result<i32> {
|
||||
let current_child = current_child.clone();
|
||||
|
||||
// Wait on current Minecraft Child
|
||||
let mut mc_exit_status;
|
||||
let mut last_updated_playtime = Utc::now();
|
||||
loop {
|
||||
if let Some(t) = current_child.write().await.try_wait().await? {
|
||||
mc_exit_status = t;
|
||||
break;
|
||||
}
|
||||
// sleep for 10ms
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
|
||||
|
||||
// Auto-update playtime every minute
|
||||
let diff = Utc::now()
|
||||
.signed_duration_since(last_updated_playtime)
|
||||
.num_seconds();
|
||||
if diff >= 60 {
|
||||
if let Err(e) = profile::edit(&associated_profile, |prof| {
|
||||
prof.metadata.recent_time_played += diff as u64;
|
||||
async { Ok(()) }
|
||||
})
|
||||
.await
|
||||
{
|
||||
tracing::warn!(
|
||||
"Failed to update playtime for profile {}: {}",
|
||||
&associated_profile,
|
||||
e
|
||||
);
|
||||
}
|
||||
last_updated_playtime = Utc::now();
|
||||
}
|
||||
}
|
||||
|
||||
// Now fully complete- update playtime one last time
|
||||
let diff = Utc::now()
|
||||
.signed_duration_since(last_updated_playtime)
|
||||
.num_seconds();
|
||||
if let Err(e) = profile::edit(&associated_profile, |prof| {
|
||||
prof.metadata.recent_time_played += diff as u64;
|
||||
async { Ok(()) }
|
||||
})
|
||||
.await
|
||||
{
|
||||
tracing::warn!(
|
||||
"Failed to update playtime for profile {}: {}",
|
||||
&associated_profile,
|
||||
e
|
||||
);
|
||||
}
|
||||
|
||||
// Publish play time update
|
||||
// Allow failure, it will be stored locally and sent next time
|
||||
// Sent in another thread as first call may take a couple seconds and hold up process ending
|
||||
let associated_profile_clone = associated_profile.clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) =
|
||||
profile::try_update_playtime(&associated_profile_clone.clone())
|
||||
.await
|
||||
{
|
||||
tracing::warn!(
|
||||
"Failed to update playtime for profile {}: {}",
|
||||
&associated_profile_clone,
|
||||
e
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// Clear game played for Discord RPC
|
||||
// May have other active processes, so we clear to the next running process
|
||||
let state = crate::State::get().await?;
|
||||
let _ = state.discord_rpc.clear_to_default(true).await;
|
||||
}
|
||||
|
||||
// If in tauri, window should show itself again after process exists if it was hidden
|
||||
#[cfg(feature = "tauri")]
|
||||
{
|
||||
let window = crate::EventState::get_main_window().await?;
|
||||
if let Some(window) = window {
|
||||
window.unminimize()?;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let current_child = current_child.write().await;
|
||||
current_child.remove_cache(uuid).await?;
|
||||
}
|
||||
|
||||
if !mc_exit_status == 0 {
|
||||
emit_process(
|
||||
uuid,
|
||||
current_pid,
|
||||
ProcessPayloadType::Finished,
|
||||
"Exited process",
|
||||
)
|
||||
.await?;
|
||||
|
||||
return Ok(mc_exit_status); // Err for a non-zero exit is handled in helper
|
||||
}
|
||||
|
||||
// If a post-command exist, switch to it and wait on it
|
||||
// First, create the command by splitting arguments
|
||||
let post_command = if let Some(hook) = post_command {
|
||||
let mut cmd = hook.split(' ');
|
||||
if let Some(command) = cmd.next() {
|
||||
let mut command = Command::new(command);
|
||||
command
|
||||
.args(&cmd.collect::<Vec<&str>>())
|
||||
.current_dir(associated_profile.get_full_path().await?);
|
||||
Some(command)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(mut m_command) = post_command {
|
||||
{
|
||||
let mut current_child: tokio::sync::RwLockWriteGuard<
|
||||
'_,
|
||||
ChildType,
|
||||
> = current_child.write().await;
|
||||
let new_child = m_command.spawn().map_err(IOError::from)?;
|
||||
current_pid = new_child.id().ok_or_else(|| {
|
||||
crate::ErrorKind::LauncherError(
|
||||
"Process immediately failed, could not get PID"
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
*current_child = ChildType::TokioChild(new_child);
|
||||
}
|
||||
emit_process(
|
||||
uuid,
|
||||
current_pid,
|
||||
ProcessPayloadType::Updated,
|
||||
"Completed Minecraft, switching to post-commands",
|
||||
)
|
||||
.await?;
|
||||
|
||||
loop {
|
||||
if let Some(t) = current_child.write().await.try_wait().await? {
|
||||
mc_exit_status = t;
|
||||
break;
|
||||
}
|
||||
// sleep for 10ms
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(10))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
emit_process(
|
||||
uuid,
|
||||
current_pid,
|
||||
ProcessPayloadType::Finished,
|
||||
"Exited process",
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(mc_exit_status)
|
||||
}
|
||||
|
||||
// Returns a ref to the child
|
||||
pub fn get(&self, uuid: Uuid) -> Option<Arc<RwLock<MinecraftChild>>> {
|
||||
self.0.get(&uuid).cloned()
|
||||
}
|
||||
|
||||
// Gets all PID keys
|
||||
pub fn keys(&self) -> Vec<Uuid> {
|
||||
self.0.keys().cloned().collect()
|
||||
}
|
||||
|
||||
// Get exit status of a child by PID
|
||||
// Returns None if the child is still running
|
||||
pub async fn exit_status(&self, uuid: Uuid) -> crate::Result<Option<i32>> {
|
||||
if let Some(child) = self.get(uuid) {
|
||||
let child = child.write().await;
|
||||
let status = child.current_child.write().await.try_wait().await?;
|
||||
Ok(status)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// Gets all PID keys of running children
|
||||
pub async fn running_keys(&self) -> crate::Result<Vec<Uuid>> {
|
||||
let mut keys = Vec::new();
|
||||
for key in self.keys() {
|
||||
if let Some(child) = self.get(key) {
|
||||
let child = child.clone();
|
||||
let child = child.write().await;
|
||||
if child
|
||||
.current_child
|
||||
.write()
|
||||
.await
|
||||
.try_wait()
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
keys.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
// Gets all PID keys of running children with a given profile path
|
||||
pub async fn running_keys_with_profile(
|
||||
&self,
|
||||
profile_path: ProfilePathId,
|
||||
) -> crate::Result<Vec<Uuid>> {
|
||||
let running_keys = self.running_keys().await?;
|
||||
let mut keys = Vec::new();
|
||||
for key in running_keys {
|
||||
if let Some(child) = self.get(key) {
|
||||
let child = child.clone();
|
||||
let child = child.read().await;
|
||||
if child.profile_relative_path == profile_path {
|
||||
keys.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
// Gets all profiles of running children
|
||||
pub async fn running_profile_paths(
|
||||
&self,
|
||||
) -> crate::Result<Vec<ProfilePathId>> {
|
||||
let mut profiles = Vec::new();
|
||||
for key in self.keys() {
|
||||
if let Some(child) = self.get(key) {
|
||||
let child = child.clone();
|
||||
let child = child.write().await;
|
||||
if child
|
||||
.current_child
|
||||
.write()
|
||||
.await
|
||||
.try_wait()
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
profiles.push(child.profile_relative_path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
// Gets all profiles of running children
|
||||
// Returns clones because it would be serialized anyway
|
||||
pub async fn running_profiles(&self) -> crate::Result<Vec<Profile>> {
|
||||
let mut profiles = Vec::new();
|
||||
for key in self.keys() {
|
||||
if let Some(child) = self.get(key) {
|
||||
let child = child.clone();
|
||||
let child = child.write().await;
|
||||
if child
|
||||
.current_child
|
||||
.write()
|
||||
.await
|
||||
.try_wait()
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
if let Some(prof) = crate::api::profile::get(
|
||||
&child.profile_relative_path.clone(),
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
profiles.push(prof);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(profiles)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Children {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
211
libs/theseus/src/state/dirs.rs
Normal file
211
libs/theseus/src/state/dirs.rs
Normal file
@@ -0,0 +1,211 @@
|
||||
//! Theseus directory information
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use super::{ProfilePathId, Settings};
|
||||
|
||||
pub const SETTINGS_FILE_NAME: &str = "settings.json";
|
||||
pub const CACHES_FOLDER_NAME: &str = "caches";
|
||||
pub const LAUNCHER_LOGS_FOLDER_NAME: &str = "launcher_logs";
|
||||
pub const PROFILES_FOLDER_NAME: &str = "profiles";
|
||||
pub const METADATA_FOLDER_NAME: &str = "meta";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DirectoryInfo {
|
||||
pub settings_dir: PathBuf, // Base settings directory- settings.json and icon cache.
|
||||
pub config_dir: RwLock<PathBuf>, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
|
||||
pub working_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl DirectoryInfo {
|
||||
// Get the settings directory
|
||||
// init() is not needed for this function
|
||||
pub fn get_initial_settings_dir() -> Option<PathBuf> {
|
||||
Self::env_path("THESEUS_CONFIG_DIR")
|
||||
.or_else(|| Some(dirs::config_dir()?.join("com.modrinth.theseus")))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn get_initial_settings_file() -> crate::Result<PathBuf> {
|
||||
let settings_dir = Self::get_initial_settings_dir().ok_or(
|
||||
crate::ErrorKind::FSError(
|
||||
"Could not find valid config dir".to_string(),
|
||||
),
|
||||
)?;
|
||||
Ok(settings_dir.join("settings.json"))
|
||||
}
|
||||
|
||||
/// Get all paths needed for Theseus to operate properly
|
||||
#[tracing::instrument]
|
||||
pub fn init(settings: &Settings) -> crate::Result<Self> {
|
||||
// Working directory
|
||||
let working_dir = std::env::current_dir().map_err(|err| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Could not open working directory: {err}"
|
||||
))
|
||||
})?;
|
||||
|
||||
let settings_dir = Self::get_initial_settings_dir().ok_or(
|
||||
crate::ErrorKind::FSError(
|
||||
"Could not find valid settings dir".to_string(),
|
||||
),
|
||||
)?;
|
||||
|
||||
fs::create_dir_all(&settings_dir).map_err(|err| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Error creating Theseus config directory: {err}"
|
||||
))
|
||||
})?;
|
||||
|
||||
// config directory (for instances, etc.)
|
||||
// by default this is the same as the settings directory
|
||||
let config_dir = settings.loaded_config_dir.clone().ok_or(
|
||||
crate::ErrorKind::FSError(
|
||||
"Could not find valid config dir".to_string(),
|
||||
),
|
||||
)?;
|
||||
|
||||
Ok(Self {
|
||||
settings_dir,
|
||||
config_dir: RwLock::new(config_dir),
|
||||
working_dir,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the Minecraft instance metadata directory
|
||||
#[inline]
|
||||
pub async fn metadata_dir(&self) -> PathBuf {
|
||||
self.config_dir.read().await.join(METADATA_FOLDER_NAME)
|
||||
}
|
||||
|
||||
/// Get the Minecraft java versions metadata directory
|
||||
#[inline]
|
||||
pub async fn java_versions_dir(&self) -> PathBuf {
|
||||
self.metadata_dir().await.join("java_versions")
|
||||
}
|
||||
|
||||
/// Get the Minecraft versions metadata directory
|
||||
#[inline]
|
||||
pub async fn versions_dir(&self) -> PathBuf {
|
||||
self.metadata_dir().await.join("versions")
|
||||
}
|
||||
|
||||
/// Get the metadata directory for a given version
|
||||
#[inline]
|
||||
pub async fn version_dir(&self, version: &str) -> PathBuf {
|
||||
self.versions_dir().await.join(version)
|
||||
}
|
||||
|
||||
/// Get the Minecraft libraries metadata directory
|
||||
#[inline]
|
||||
pub async fn libraries_dir(&self) -> PathBuf {
|
||||
self.metadata_dir().await.join("libraries")
|
||||
}
|
||||
|
||||
/// Get the Minecraft assets metadata directory
|
||||
#[inline]
|
||||
pub async fn assets_dir(&self) -> PathBuf {
|
||||
self.metadata_dir().await.join("assets")
|
||||
}
|
||||
|
||||
/// Get the assets index directory
|
||||
#[inline]
|
||||
pub async fn assets_index_dir(&self) -> PathBuf {
|
||||
self.assets_dir().await.join("indexes")
|
||||
}
|
||||
|
||||
/// Get the assets objects directory
|
||||
#[inline]
|
||||
pub async fn objects_dir(&self) -> PathBuf {
|
||||
self.assets_dir().await.join("objects")
|
||||
}
|
||||
|
||||
/// Get the directory for a specific object
|
||||
#[inline]
|
||||
pub async fn object_dir(&self, hash: &str) -> PathBuf {
|
||||
self.objects_dir().await.join(&hash[..2]).join(hash)
|
||||
}
|
||||
|
||||
/// Get the Minecraft legacy assets metadata directory
|
||||
#[inline]
|
||||
pub async fn legacy_assets_dir(&self) -> PathBuf {
|
||||
self.metadata_dir().await.join("resources")
|
||||
}
|
||||
|
||||
/// Get the Minecraft legacy assets metadata directory
|
||||
#[inline]
|
||||
pub async fn natives_dir(&self) -> PathBuf {
|
||||
self.metadata_dir().await.join("natives")
|
||||
}
|
||||
|
||||
/// Get the natives directory for a version of Minecraft
|
||||
#[inline]
|
||||
pub async fn version_natives_dir(&self, version: &str) -> PathBuf {
|
||||
self.natives_dir().await.join(version)
|
||||
}
|
||||
|
||||
/// Get the directory containing instance icons
|
||||
#[inline]
|
||||
pub async fn icon_dir(&self) -> PathBuf {
|
||||
self.config_dir.read().await.join("icons")
|
||||
}
|
||||
|
||||
/// Get the profiles directory for created profiles
|
||||
#[inline]
|
||||
pub async fn profiles_dir(&self) -> PathBuf {
|
||||
self.config_dir.read().await.join(PROFILES_FOLDER_NAME)
|
||||
}
|
||||
|
||||
/// Gets the logs dir for a given profile
|
||||
#[inline]
|
||||
pub async fn profile_logs_dir(
|
||||
profile_id: &ProfilePathId,
|
||||
) -> crate::Result<PathBuf> {
|
||||
Ok(profile_id.get_full_path().await?.join("logs"))
|
||||
}
|
||||
|
||||
/// Gets the crash reports dir for a given profile
|
||||
#[inline]
|
||||
pub async fn crash_reports_dir(
|
||||
profile_id: &ProfilePathId,
|
||||
) -> crate::Result<PathBuf> {
|
||||
Ok(profile_id.get_full_path().await?.join("crash-reports"))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn launcher_logs_dir() -> Option<PathBuf> {
|
||||
Self::get_initial_settings_dir()
|
||||
.map(|d| d.join(LAUNCHER_LOGS_FOLDER_NAME))
|
||||
}
|
||||
|
||||
/// Get the file containing the global database
|
||||
#[inline]
|
||||
pub async fn database_file(&self) -> PathBuf {
|
||||
self.config_dir.read().await.join("data.bin")
|
||||
}
|
||||
|
||||
/// Get the settings file for Theseus
|
||||
#[inline]
|
||||
pub fn settings_file(&self) -> PathBuf {
|
||||
self.settings_dir.join(SETTINGS_FILE_NAME)
|
||||
}
|
||||
|
||||
/// Get the cache directory for Theseus
|
||||
#[inline]
|
||||
pub fn caches_dir(&self) -> PathBuf {
|
||||
self.settings_dir.join(CACHES_FOLDER_NAME)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub async fn caches_meta_dir(&self) -> PathBuf {
|
||||
self.caches_dir().join("metadata")
|
||||
}
|
||||
|
||||
/// Get path from environment variable
|
||||
#[inline]
|
||||
fn env_path(name: &str) -> Option<PathBuf> {
|
||||
std::env::var_os(name).map(PathBuf::from)
|
||||
}
|
||||
}
|
||||
216
libs/theseus/src/state/discord.rs
Normal file
216
libs/theseus/src/state/discord.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
|
||||
use discord_rich_presence::{
|
||||
activity::{Activity, Assets},
|
||||
DiscordIpc, DiscordIpcClient,
|
||||
};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::State;
|
||||
|
||||
pub struct DiscordGuard {
|
||||
client: Arc<RwLock<DiscordIpcClient>>,
|
||||
connected: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl DiscordGuard {
|
||||
/// Initialize discord IPC client, and attempt to connect to it
|
||||
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
|
||||
pub async fn init(is_offline: bool) -> crate::Result<DiscordGuard> {
|
||||
let mut dipc =
|
||||
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
|
||||
crate::ErrorKind::OtherError(format!(
|
||||
"Could not create Discord client {}",
|
||||
e,
|
||||
))
|
||||
})?;
|
||||
|
||||
let connected = if !is_offline {
|
||||
let res = dipc.connect(); // Do not need to connect to Discord to use app
|
||||
if res.is_ok() {
|
||||
Arc::new(AtomicBool::new(true))
|
||||
} else {
|
||||
Arc::new(AtomicBool::new(false))
|
||||
}
|
||||
} else {
|
||||
Arc::new(AtomicBool::new(false))
|
||||
};
|
||||
|
||||
let client = Arc::new(RwLock::new(dipc));
|
||||
Ok(DiscordGuard { client, connected })
|
||||
}
|
||||
|
||||
/// If the client failed connecting during init(), this will check for connection and attempt to reconnect
|
||||
/// This MUST be called first in any client method that requires a connection, because those can PANIC if the client is not connected
|
||||
/// (No connection is different than a failed connection, the latter will not panic and can be retried)
|
||||
pub async fn retry_if_not_ready(&self) -> bool {
|
||||
let mut client = self.client.write().await;
|
||||
if !self.connected.load(std::sync::atomic::Ordering::Relaxed) {
|
||||
if client.connect().is_ok() {
|
||||
self.connected
|
||||
.store(true, std::sync::atomic::Ordering::Relaxed);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
// check online
|
||||
pub async fn check_online(&self) -> bool {
|
||||
let state = match State::get().await {
|
||||
Ok(s) => s,
|
||||
Err(_) => return false,
|
||||
};
|
||||
let offline = state.offline.read().await;
|
||||
if *offline {
|
||||
return false;
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
/// Set the activity to the given message
|
||||
/// First checks if discord is disabled, and if so, clear the activity instead
|
||||
pub async fn set_activity(
|
||||
&self,
|
||||
msg: &str,
|
||||
reconnect_if_fail: bool,
|
||||
) -> crate::Result<()> {
|
||||
if !self.check_online().await {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Check if discord is disabled, and if so, clear the activity instead
|
||||
let state = State::get().await?;
|
||||
let settings = state.settings.read().await;
|
||||
if settings.disable_discord_rpc {
|
||||
Ok(self.clear_activity(true).await?)
|
||||
} else {
|
||||
Ok(self.force_set_activity(msg, reconnect_if_fail).await?)
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the activity to the given message, regardless of if discord is disabled or offline
|
||||
/// Should not be used except for in the above method, or if it is already known that discord is enabled (specifically for state initialization) and we are connected to the internet
|
||||
pub async fn force_set_activity(
|
||||
&self,
|
||||
msg: &str,
|
||||
reconnect_if_fail: bool,
|
||||
) -> crate::Result<()> {
|
||||
// Attempt to connect if not connected. Do not continue if it fails, as the client.set_activity can panic if it never was connected
|
||||
if !self.retry_if_not_ready().await {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let activity = Activity::new().state(msg).assets(
|
||||
Assets::new()
|
||||
.large_image("modrinth_simple")
|
||||
.large_text("Modrinth Logo"),
|
||||
);
|
||||
|
||||
// Attempt to set the activity
|
||||
// If the existing connection fails, attempt to reconnect and try again
|
||||
let mut client: tokio::sync::RwLockWriteGuard<'_, DiscordIpcClient> =
|
||||
self.client.write().await;
|
||||
let res = client.set_activity(activity.clone());
|
||||
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
|
||||
crate::ErrorKind::OtherError(format!(
|
||||
"Could not update Discord activity {}",
|
||||
e,
|
||||
))
|
||||
};
|
||||
|
||||
if reconnect_if_fail {
|
||||
if let Err(_e) = res {
|
||||
client.reconnect().map_err(|e| {
|
||||
crate::ErrorKind::OtherError(format!(
|
||||
"Could not reconnect to Discord IPC {}",
|
||||
e,
|
||||
))
|
||||
})?;
|
||||
return Ok(client
|
||||
.set_activity(activity)
|
||||
.map_err(could_not_set_err)?); // try again, but don't reconnect if it fails again
|
||||
}
|
||||
} else {
|
||||
res.map_err(could_not_set_err)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clear the activity entirely ('disabling' the RPC until the next set_activity)
|
||||
pub async fn clear_activity(
|
||||
&self,
|
||||
reconnect_if_fail: bool,
|
||||
) -> crate::Result<()> {
|
||||
// Attempt to connect if not connected. Do not continue if it fails, as the client.clear_activity can panic if it never was connected
|
||||
if !self.check_online().await || !self.retry_if_not_ready().await {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Attempt to clear the activity
|
||||
// If the existing connection fails, attempt to reconnect and try again
|
||||
let mut client = self.client.write().await;
|
||||
let res = client.clear_activity();
|
||||
|
||||
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
|
||||
crate::ErrorKind::OtherError(format!(
|
||||
"Could not clear Discord activity {}",
|
||||
e,
|
||||
))
|
||||
};
|
||||
|
||||
if reconnect_if_fail {
|
||||
if res.is_err() {
|
||||
client.reconnect().map_err(|e| {
|
||||
crate::ErrorKind::OtherError(format!(
|
||||
"Could not reconnect to Discord IPC {}",
|
||||
e,
|
||||
))
|
||||
})?;
|
||||
return Ok(client
|
||||
.clear_activity()
|
||||
.map_err(could_not_clear_err)?); // try again, but don't reconnect if it fails again
|
||||
}
|
||||
} else {
|
||||
res.map_err(could_not_clear_err)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clear the activity, but if there is a running profile, set the activity to that instead
|
||||
pub async fn clear_to_default(
|
||||
&self,
|
||||
reconnect_if_fail: bool,
|
||||
) -> crate::Result<()> {
|
||||
let state: Arc<tokio::sync::RwLockReadGuard<'_, State>> =
|
||||
State::get().await?;
|
||||
|
||||
{
|
||||
let settings = state.settings.read().await;
|
||||
if settings.disable_discord_rpc {
|
||||
println!("Discord is disabled, clearing activity");
|
||||
return self.clear_activity(true).await;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(existing_child) = state
|
||||
.children
|
||||
.read()
|
||||
.await
|
||||
.running_profile_paths()
|
||||
.await?
|
||||
.first()
|
||||
{
|
||||
self.set_activity(
|
||||
&format!("Playing {}", existing_child),
|
||||
reconnect_if_fail,
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
self.set_activity("Idling...", reconnect_if_fail).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
66
libs/theseus/src/state/java_globals.rs
Normal file
66
libs/theseus/src/state/java_globals.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::prelude::JavaVersion;
|
||||
use crate::util::jre;
|
||||
|
||||
// All stored Java versions, chosen by the user
|
||||
// A wrapper over a Hashmap connecting key -> java version
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct JavaGlobals(HashMap<String, JavaVersion>);
|
||||
|
||||
impl JavaGlobals {
|
||||
pub fn new() -> JavaGlobals {
|
||||
JavaGlobals(HashMap::new())
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, key: String, java: JavaVersion) {
|
||||
self.0.insert(key, java);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, key: &String) {
|
||||
self.0.remove(key);
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &String) -> Option<&JavaVersion> {
|
||||
self.0.get(key)
|
||||
}
|
||||
|
||||
pub fn get_mut(&mut self, key: &String) -> Option<&mut JavaVersion> {
|
||||
self.0.get_mut(key)
|
||||
}
|
||||
|
||||
pub fn count(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub fn keys(&self) -> Vec<String> {
|
||||
self.0.keys().cloned().collect()
|
||||
}
|
||||
|
||||
// Validates that every path here is a valid Java version and that the version matches the version stored here
|
||||
// If false, when checked, the user should be prompted to reselect the Java version
|
||||
pub async fn is_all_valid(&self) -> bool {
|
||||
for (_, java) in self.0.iter() {
|
||||
let jre = jre::check_java_at_filepath(
|
||||
PathBuf::from(&java.path).as_path(),
|
||||
)
|
||||
.await;
|
||||
if let Some(jre) = jre {
|
||||
if jre.version != java.version {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for JavaGlobals {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
173
libs/theseus/src/state/metadata.rs
Normal file
173
libs/theseus/src/state/metadata.rs
Normal file
@@ -0,0 +1,173 @@
|
||||
//! Theseus metadata
|
||||
use crate::data::DirectoryInfo;
|
||||
use crate::util::fetch::{read_json, write, IoSemaphore};
|
||||
use crate::State;
|
||||
use daedalus::{
|
||||
minecraft::{fetch_version_manifest, VersionManifest as MinecraftManifest},
|
||||
modded::{
|
||||
fetch_manifest as fetch_loader_manifest, Manifest as LoaderManifest,
|
||||
},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const METADATA_URL: &str = "https://meta.modrinth.com";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct Metadata {
|
||||
pub minecraft: MinecraftManifest,
|
||||
pub forge: LoaderManifest,
|
||||
pub fabric: LoaderManifest,
|
||||
pub quilt: LoaderManifest,
|
||||
pub neoforge: LoaderManifest,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
fn get_manifest(name: &str) -> String {
|
||||
format!("{METADATA_URL}/{name}/v0/manifest.json")
|
||||
}
|
||||
|
||||
pub async fn fetch() -> crate::Result<Self> {
|
||||
let (minecraft, forge, fabric, quilt, neoforge) = tokio::try_join! {
|
||||
async {
|
||||
let url = Self::get_manifest("minecraft");
|
||||
fetch_version_manifest(Some(&url)).await
|
||||
},
|
||||
async {
|
||||
let url = Self::get_manifest("forge");
|
||||
fetch_loader_manifest(&url).await
|
||||
},
|
||||
async {
|
||||
let url = Self::get_manifest("fabric");
|
||||
fetch_loader_manifest(&url).await
|
||||
},
|
||||
async {
|
||||
let url = Self::get_manifest("quilt");
|
||||
fetch_loader_manifest(&url).await
|
||||
},
|
||||
async {
|
||||
let url = Self::get_manifest("neo");
|
||||
fetch_loader_manifest(&url).await
|
||||
}
|
||||
}?;
|
||||
|
||||
Ok(Self {
|
||||
minecraft,
|
||||
forge,
|
||||
fabric,
|
||||
quilt,
|
||||
neoforge,
|
||||
})
|
||||
}
|
||||
|
||||
// Attempt to fetch metadata and store in sled DB
|
||||
#[tracing::instrument(skip(io_semaphore))]
|
||||
#[theseus_macros::debug_pin]
|
||||
pub async fn init(
|
||||
dirs: &DirectoryInfo,
|
||||
fetch_online: bool,
|
||||
io_semaphore: &IoSemaphore,
|
||||
) -> crate::Result<Self> {
|
||||
let mut metadata = None;
|
||||
let metadata_path = dirs.caches_meta_dir().await.join("metadata.json");
|
||||
let metadata_backup_path =
|
||||
dirs.caches_meta_dir().await.join("metadata.json.bak");
|
||||
|
||||
if let Ok(metadata_json) =
|
||||
read_json::<Metadata>(&metadata_path, io_semaphore).await
|
||||
{
|
||||
metadata = Some(metadata_json);
|
||||
} else if fetch_online {
|
||||
let res = async {
|
||||
let metadata_fetch = Self::fetch().await?;
|
||||
|
||||
write(
|
||||
&metadata_path,
|
||||
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
write(
|
||||
&metadata_backup_path,
|
||||
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
metadata = Some(metadata_fetch);
|
||||
Ok::<(), crate::Error>(())
|
||||
}
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
tracing::warn!("Unable to fetch launcher metadata: {err}")
|
||||
}
|
||||
}
|
||||
} else if let Ok(metadata_json) =
|
||||
read_json::<Metadata>(&metadata_backup_path, io_semaphore).await
|
||||
{
|
||||
metadata = Some(metadata_json);
|
||||
std::fs::copy(&metadata_backup_path, &metadata_path).map_err(
|
||||
|err| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Error restoring metadata backup: {err}"
|
||||
))
|
||||
.as_error()
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Some(meta) = metadata {
|
||||
Ok(meta)
|
||||
} else {
|
||||
Err(
|
||||
crate::ErrorKind::NoValueFor(String::from("launcher metadata"))
|
||||
.as_error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update() {
|
||||
let res = async {
|
||||
let metadata_fetch = Metadata::fetch().await?;
|
||||
let state = State::get().await?;
|
||||
|
||||
let metadata_path = state
|
||||
.directories
|
||||
.caches_meta_dir()
|
||||
.await
|
||||
.join("metadata.json");
|
||||
let metadata_backup_path = state
|
||||
.directories
|
||||
.caches_meta_dir()
|
||||
.await
|
||||
.join("metadata.json.bak");
|
||||
|
||||
if metadata_path.exists() {
|
||||
std::fs::copy(&metadata_path, &metadata_backup_path)?;
|
||||
}
|
||||
|
||||
write(
|
||||
&metadata_path,
|
||||
&serde_json::to_vec(&metadata_fetch)?,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut old_metadata = state.metadata.write().await;
|
||||
*old_metadata = metadata_fetch;
|
||||
|
||||
Ok::<(), crate::Error>(())
|
||||
}
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
tracing::warn!("Unable to update launcher metadata: {err}")
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
1078
libs/theseus/src/state/minecraft_auth.rs
Normal file
1078
libs/theseus/src/state/minecraft_auth.rs
Normal file
File diff suppressed because it is too large
Load Diff
410
libs/theseus/src/state/mod.rs
Normal file
410
libs/theseus/src/state/mod.rs
Normal file
@@ -0,0 +1,410 @@
|
||||
//! Theseus state management system
|
||||
use crate::event::emit::{emit_loading, emit_offline, init_loading_unsafe};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::event::LoadingBarType;
|
||||
use crate::loading_join;
|
||||
|
||||
use crate::util::fetch::{self, FetchSemaphore, IoSemaphore};
|
||||
use notify::RecommendedWatcher;
|
||||
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::join;
|
||||
use tokio::sync::{OnceCell, RwLock, Semaphore};
|
||||
|
||||
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
|
||||
|
||||
// Submodules
|
||||
mod dirs;
|
||||
pub use self::dirs::*;
|
||||
|
||||
mod metadata;
|
||||
pub use self::metadata::*;
|
||||
|
||||
mod profiles;
|
||||
pub use self::profiles::*;
|
||||
|
||||
mod settings;
|
||||
pub use self::settings::*;
|
||||
|
||||
mod projects;
|
||||
pub use self::projects::*;
|
||||
|
||||
mod children;
|
||||
pub use self::children::*;
|
||||
|
||||
mod tags;
|
||||
pub use self::tags::*;
|
||||
|
||||
mod java_globals;
|
||||
pub use self::java_globals::*;
|
||||
|
||||
mod safe_processes;
|
||||
pub use self::safe_processes::*;
|
||||
|
||||
mod discord;
|
||||
pub use self::discord::*;
|
||||
|
||||
mod minecraft_auth;
|
||||
pub use self::minecraft_auth::*;
|
||||
|
||||
mod mr_auth;
|
||||
pub use self::mr_auth::*;
|
||||
|
||||
// Global state
|
||||
// RwLock on state only has concurrent reads, except for config dir change which takes control of the State
|
||||
static LAUNCHER_STATE: OnceCell<RwLock<State>> = OnceCell::const_new();
|
||||
pub struct State {
|
||||
/// Whether or not the launcher is currently operating in 'offline mode'
|
||||
pub offline: RwLock<bool>,
|
||||
|
||||
/// Information on the location of files used in the launcher
|
||||
pub directories: DirectoryInfo,
|
||||
|
||||
/// Semaphore used to limit concurrent network requests and avoid errors
|
||||
pub fetch_semaphore: FetchSemaphore,
|
||||
/// Stored maximum number of sempahores of current fetch_semaphore
|
||||
pub fetch_semaphore_max: RwLock<u32>,
|
||||
/// Semaphore used to limit concurrent I/O and avoid errors
|
||||
pub io_semaphore: IoSemaphore,
|
||||
/// Stored maximum number of sempahores of current io_semaphore
|
||||
pub io_semaphore_max: RwLock<u32>,
|
||||
|
||||
/// Launcher metadata
|
||||
pub metadata: RwLock<Metadata>,
|
||||
/// Launcher configuration
|
||||
pub settings: RwLock<Settings>,
|
||||
/// Reference to minecraft process children
|
||||
pub children: RwLock<Children>,
|
||||
/// Launcher profile metadata
|
||||
pub(crate) profiles: RwLock<Profiles>,
|
||||
/// Launcher tags
|
||||
pub(crate) tags: RwLock<Tags>,
|
||||
/// Launcher processes that should be safely exited on shutdown
|
||||
pub(crate) safety_processes: RwLock<SafeProcesses>,
|
||||
/// Launcher user account info
|
||||
pub(crate) users: RwLock<MinecraftAuthStore>,
|
||||
/// Modrinth Credentials Store
|
||||
pub credentials: RwLock<CredentialsStore>,
|
||||
/// Modrinth auth flow
|
||||
pub modrinth_auth_flow: RwLock<Option<ModrinthAuthFlow>>,
|
||||
|
||||
/// Discord RPC
|
||||
pub discord_rpc: DiscordGuard,
|
||||
|
||||
/// File watcher debouncer
|
||||
pub(crate) file_watcher: RwLock<Debouncer<RecommendedWatcher>>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// Get the current launcher state, initializing it if needed
|
||||
pub async fn get(
|
||||
) -> crate::Result<Arc<tokio::sync::RwLockReadGuard<'static, Self>>> {
|
||||
Ok(Arc::new(
|
||||
LAUNCHER_STATE
|
||||
.get_or_try_init(Self::initialize_state)
|
||||
.await?
|
||||
.read()
|
||||
.await,
|
||||
))
|
||||
}
|
||||
|
||||
/// Get the current launcher state, initializing it if needed
|
||||
/// Takes writing control of the state, blocking all other uses of it
|
||||
/// Only used for state change such as changing the config directory
|
||||
pub async fn get_write(
|
||||
) -> crate::Result<tokio::sync::RwLockWriteGuard<'static, Self>> {
|
||||
Ok(LAUNCHER_STATE
|
||||
.get_or_try_init(Self::initialize_state)
|
||||
.await?
|
||||
.write()
|
||||
.await)
|
||||
}
|
||||
|
||||
pub fn initialized() -> bool {
|
||||
LAUNCHER_STATE.initialized()
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
#[theseus_macros::debug_pin]
|
||||
async fn initialize_state() -> crate::Result<RwLock<State>> {
|
||||
let loading_bar = init_loading_unsafe(
|
||||
LoadingBarType::StateInit,
|
||||
100.0,
|
||||
"Initializing launcher",
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Settings
|
||||
let settings =
|
||||
Settings::init(&DirectoryInfo::get_initial_settings_file()?)
|
||||
.await?;
|
||||
|
||||
let directories = DirectoryInfo::init(&settings)?;
|
||||
|
||||
emit_loading(&loading_bar, 10.0, None).await?;
|
||||
|
||||
let mut file_watcher = init_watcher().await?;
|
||||
|
||||
let fetch_semaphore = FetchSemaphore(RwLock::new(Semaphore::new(
|
||||
settings.max_concurrent_downloads,
|
||||
)));
|
||||
let io_semaphore = IoSemaphore(RwLock::new(Semaphore::new(
|
||||
settings.max_concurrent_writes,
|
||||
)));
|
||||
emit_loading(&loading_bar, 10.0, None).await?;
|
||||
|
||||
let is_offline = !fetch::check_internet(3).await;
|
||||
|
||||
let metadata_fut =
|
||||
Metadata::init(&directories, !is_offline, &io_semaphore);
|
||||
let profiles_fut = Profiles::init(&directories, &mut file_watcher);
|
||||
let tags_fut = Tags::init(
|
||||
&directories,
|
||||
!is_offline,
|
||||
&io_semaphore,
|
||||
&fetch_semaphore,
|
||||
&CredentialsStore(None),
|
||||
);
|
||||
let users_fut = MinecraftAuthStore::init(&directories, &io_semaphore);
|
||||
let creds_fut = CredentialsStore::init(&directories, &io_semaphore);
|
||||
// Launcher data
|
||||
let (metadata, profiles, tags, users, creds) = loading_join! {
|
||||
Some(&loading_bar), 70.0, Some("Loading metadata");
|
||||
metadata_fut,
|
||||
profiles_fut,
|
||||
tags_fut,
|
||||
users_fut,
|
||||
creds_fut,
|
||||
}?;
|
||||
|
||||
let safety_processes = SafeProcesses::new();
|
||||
|
||||
let discord_rpc = DiscordGuard::init(is_offline).await?;
|
||||
if !settings.disable_discord_rpc && !is_offline {
|
||||
// Add default Idling to discord rich presence
|
||||
// Force add to avoid recursion
|
||||
let _ = discord_rpc.force_set_activity("Idling...", true).await;
|
||||
}
|
||||
|
||||
let children = Children::new();
|
||||
|
||||
// Starts a loop of checking if we are online, and updating
|
||||
Self::offine_check_loop();
|
||||
|
||||
emit_loading(&loading_bar, 10.0, None).await?;
|
||||
|
||||
Ok::<RwLock<Self>, crate::Error>(RwLock::new(Self {
|
||||
offline: RwLock::new(is_offline),
|
||||
directories,
|
||||
fetch_semaphore,
|
||||
fetch_semaphore_max: RwLock::new(
|
||||
settings.max_concurrent_downloads as u32,
|
||||
),
|
||||
io_semaphore,
|
||||
io_semaphore_max: RwLock::new(
|
||||
settings.max_concurrent_writes as u32,
|
||||
),
|
||||
metadata: RwLock::new(metadata),
|
||||
settings: RwLock::new(settings),
|
||||
profiles: RwLock::new(profiles),
|
||||
users: RwLock::new(users),
|
||||
children: RwLock::new(children),
|
||||
credentials: RwLock::new(creds),
|
||||
tags: RwLock::new(tags),
|
||||
discord_rpc,
|
||||
safety_processes: RwLock::new(safety_processes),
|
||||
file_watcher: RwLock::new(file_watcher),
|
||||
modrinth_auth_flow: RwLock::new(None),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Starts a loop of checking if we are online, and updating
|
||||
pub fn offine_check_loop() {
|
||||
tokio::task::spawn(async {
|
||||
loop {
|
||||
let state = Self::get().await;
|
||||
if let Ok(state) = state {
|
||||
let _ = state.refresh_offline().await;
|
||||
}
|
||||
|
||||
// Wait 5 seconds
|
||||
tokio::time::sleep(Duration::from_secs(5)).await;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Updates state with data from the web, if we are online
|
||||
pub fn update() {
|
||||
tokio::task::spawn(async {
|
||||
if let Ok(state) = crate::State::get().await {
|
||||
if !*state.offline.read().await {
|
||||
let res1 = Profiles::update_modrinth_versions();
|
||||
let res2 = Tags::update();
|
||||
let res3 = Metadata::update();
|
||||
let res4 = Profiles::update_projects();
|
||||
let res6 = CredentialsStore::update_creds();
|
||||
|
||||
let _ = join!(res1, res2, res3, res4, res6);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
#[theseus_macros::debug_pin]
|
||||
/// Synchronize in-memory state with persistent state
|
||||
pub async fn sync() -> crate::Result<()> {
|
||||
let state = Self::get().await?;
|
||||
let sync_settings = async {
|
||||
let state = Arc::clone(&state);
|
||||
|
||||
tokio::spawn(async move {
|
||||
let reader = state.settings.read().await;
|
||||
reader.sync(&state.directories.settings_file()).await?;
|
||||
Ok::<_, crate::Error>(())
|
||||
})
|
||||
.await?
|
||||
};
|
||||
|
||||
let sync_profiles = async {
|
||||
let state = Arc::clone(&state);
|
||||
|
||||
tokio::spawn(async move {
|
||||
let profiles = state.profiles.read().await;
|
||||
|
||||
profiles.sync().await?;
|
||||
Ok::<_, crate::Error>(())
|
||||
})
|
||||
.await?
|
||||
};
|
||||
|
||||
tokio::try_join!(sync_settings, sync_profiles)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset IO semaphore to default values
|
||||
/// This will block until all uses of the semaphore are complete, so it should only be called
|
||||
/// when we are not in the middle of downloading something (ie: changing the settings!)
|
||||
pub async fn reset_io_semaphore(&self) {
|
||||
let settings = self.settings.read().await;
|
||||
let mut io_semaphore = self.io_semaphore.0.write().await;
|
||||
let mut total_permits = self.io_semaphore_max.write().await;
|
||||
|
||||
// Wait to get all permits back
|
||||
let _ = io_semaphore.acquire_many(*total_permits).await;
|
||||
|
||||
// Reset the semaphore
|
||||
io_semaphore.close();
|
||||
*total_permits = settings.max_concurrent_writes as u32;
|
||||
*io_semaphore = Semaphore::new(settings.max_concurrent_writes);
|
||||
}
|
||||
|
||||
/// Reset IO semaphore to default values
|
||||
/// This will block until all uses of the semaphore are complete, so it should only be called
|
||||
/// when we are not in the middle of downloading something (ie: changing the settings!)
|
||||
pub async fn reset_fetch_semaphore(&self) {
|
||||
let settings = self.settings.read().await;
|
||||
let mut io_semaphore = self.fetch_semaphore.0.write().await;
|
||||
let mut total_permits = self.fetch_semaphore_max.write().await;
|
||||
|
||||
// Wait to get all permits back
|
||||
let _ = io_semaphore.acquire_many(*total_permits).await;
|
||||
|
||||
// Reset the semaphore
|
||||
io_semaphore.close();
|
||||
*total_permits = settings.max_concurrent_downloads as u32;
|
||||
*io_semaphore = Semaphore::new(settings.max_concurrent_downloads);
|
||||
}
|
||||
|
||||
/// Refreshes whether or not the launcher should be offline, by whether or not there is an internet connection
|
||||
pub async fn refresh_offline(&self) -> crate::Result<()> {
|
||||
let is_online = fetch::check_internet(3).await;
|
||||
|
||||
let mut offline = self.offline.write().await;
|
||||
|
||||
if *offline != is_online {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
emit_offline(!is_online).await?;
|
||||
*offline = !is_online;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn init_watcher() -> crate::Result<Debouncer<RecommendedWatcher>> {
|
||||
let (mut tx, mut rx) = channel(1);
|
||||
|
||||
let file_watcher = new_debouncer(
|
||||
Duration::from_secs_f32(2.0),
|
||||
move |res: DebounceEventResult| {
|
||||
futures::executor::block_on(async {
|
||||
tx.send(res).await.unwrap();
|
||||
})
|
||||
},
|
||||
)?;
|
||||
tokio::task::spawn(async move {
|
||||
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
|
||||
tracing::info!(parent: &span, "Initting watcher");
|
||||
while let Some(res) = rx.next().await {
|
||||
let _span = span.enter();
|
||||
match res {
|
||||
Ok(mut events) => {
|
||||
let mut visited_paths = Vec::new();
|
||||
// sort events by e.path
|
||||
events.sort_by(|a, b| a.path.cmp(&b.path));
|
||||
events.iter().for_each(|e| {
|
||||
let mut new_path = PathBuf::new();
|
||||
let mut components_iterator = e.path.components();
|
||||
let mut found = false;
|
||||
for component in components_iterator.by_ref() {
|
||||
new_path.push(component);
|
||||
if found {
|
||||
break;
|
||||
}
|
||||
if component.as_os_str() == "profiles" {
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
// if any remain, it's a subfile of the profile folder and not the profile folder itself
|
||||
let subfile = components_iterator.next().is_some();
|
||||
|
||||
// At this point, new_path is the path to the profile, and subfile is whether it's a subfile of the profile or not
|
||||
let profile_path_id =
|
||||
ProfilePathId::new(PathBuf::from(
|
||||
new_path.file_name().unwrap_or_default(),
|
||||
));
|
||||
|
||||
if e.path
|
||||
.components()
|
||||
.any(|x| x.as_os_str() == "crash-reports")
|
||||
&& e.path
|
||||
.extension()
|
||||
.map(|x| x == "txt")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
Profile::crash_task(profile_path_id);
|
||||
} else if !visited_paths.contains(&new_path) {
|
||||
if subfile {
|
||||
Profile::sync_projects_task(
|
||||
profile_path_id,
|
||||
false,
|
||||
);
|
||||
visited_paths.push(new_path);
|
||||
} else {
|
||||
Profiles::sync_available_profiles_task(
|
||||
profile_path_id,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Err(error) => tracing::warn!("Unable to watch file: {error}"),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(file_watcher)
|
||||
}
|
||||
376
libs/theseus/src/state/mr_auth.rs
Normal file
376
libs/theseus/src/state/mr_auth.rs
Normal file
@@ -0,0 +1,376 @@
|
||||
use crate::config::MODRINTH_API_URL;
|
||||
use crate::state::DirectoryInfo;
|
||||
use crate::util::fetch::{
|
||||
fetch_advanced, read_json, write, FetchSemaphore, IoSemaphore,
|
||||
};
|
||||
use crate::State;
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use futures::TryStreamExt;
|
||||
use reqwest::Method;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
|
||||
const AUTH_JSON: &str = "auth.json";
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ModrinthUser {
|
||||
pub id: String,
|
||||
pub username: String,
|
||||
pub name: Option<String>,
|
||||
pub avatar_url: Option<String>,
|
||||
pub bio: Option<String>,
|
||||
pub created: DateTime<Utc>,
|
||||
pub role: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ModrinthCredentials {
|
||||
pub session: String,
|
||||
pub expires_at: DateTime<Utc>,
|
||||
pub user: ModrinthUser,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(tag = "type")]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum ModrinthCredentialsResult {
|
||||
TwoFactorRequired { flow: String },
|
||||
Credentials(ModrinthCredentials),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CredentialsStore(pub Option<ModrinthCredentials>);
|
||||
|
||||
impl CredentialsStore {
|
||||
pub async fn init(
|
||||
dirs: &DirectoryInfo,
|
||||
io_semaphore: &IoSemaphore,
|
||||
) -> crate::Result<Self> {
|
||||
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
|
||||
let user = read_json(&auth_path, io_semaphore).await.ok();
|
||||
|
||||
if let Some(user) = user {
|
||||
Ok(Self(Some(user)))
|
||||
} else {
|
||||
Ok(Self(None))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn save(&self) -> crate::Result<()> {
|
||||
let state = State::get().await?;
|
||||
let auth_path =
|
||||
state.directories.caches_meta_dir().await.join(AUTH_JSON);
|
||||
|
||||
if let Some(creds) = &self.0 {
|
||||
write(&auth_path, &serde_json::to_vec(creds)?, &state.io_semaphore)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn login(
|
||||
&mut self,
|
||||
credentials: ModrinthCredentials,
|
||||
) -> crate::Result<&Self> {
|
||||
self.0 = Some(credentials);
|
||||
self.save().await?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
pub async fn update_creds() {
|
||||
let res = async {
|
||||
let state = State::get().await?;
|
||||
let mut creds_write = state.credentials.write().await;
|
||||
|
||||
refresh_credentials(&mut creds_write, &state.fetch_semaphore)
|
||||
.await?;
|
||||
|
||||
Ok::<(), crate::Error>(())
|
||||
}
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
tracing::warn!("Unable to update credentials: {err}")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn logout(&mut self) -> crate::Result<&Self> {
|
||||
self.0 = None;
|
||||
self.save().await?;
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ModrinthAuthFlow {
|
||||
socket: async_tungstenite::WebSocketStream<
|
||||
async_tungstenite::tokio::ConnectStream,
|
||||
>,
|
||||
}
|
||||
|
||||
impl ModrinthAuthFlow {
|
||||
pub async fn new(provider: &str) -> crate::Result<Self> {
|
||||
let (socket, _) = async_tungstenite::tokio::connect_async(format!(
|
||||
"wss://api.modrinth.com/v2/auth/ws?provider={provider}"
|
||||
))
|
||||
.await?;
|
||||
Ok(Self { socket })
|
||||
}
|
||||
|
||||
pub async fn prepare_login_url(&mut self) -> crate::Result<String> {
|
||||
let code_resp = self
|
||||
.socket
|
||||
.try_next()
|
||||
.await?
|
||||
.ok_or(
|
||||
crate::ErrorKind::WSClosedError(String::from(
|
||||
"login socket URL",
|
||||
))
|
||||
.as_error(),
|
||||
)?
|
||||
.into_data();
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Url {
|
||||
url: String,
|
||||
}
|
||||
|
||||
let response = serde_json::from_slice::<Url>(&code_resp)?;
|
||||
|
||||
Ok(response.url)
|
||||
}
|
||||
|
||||
pub async fn extract_credentials(
|
||||
&mut self,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthCredentialsResult> {
|
||||
// Minecraft bearer token
|
||||
let token_resp = self
|
||||
.socket
|
||||
.try_next()
|
||||
.await?
|
||||
.ok_or(
|
||||
crate::ErrorKind::WSClosedError(String::from(
|
||||
"login socket URL",
|
||||
))
|
||||
.as_error(),
|
||||
)?
|
||||
.into_data();
|
||||
|
||||
let response =
|
||||
serde_json::from_slice::<HashMap<String, Value>>(&token_resp)?;
|
||||
|
||||
get_result_from_res("code", response, semaphore).await
|
||||
}
|
||||
|
||||
pub async fn close(&mut self) -> crate::Result<()> {
|
||||
self.socket.close(None).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_result_from_res(
|
||||
code_key: &str,
|
||||
response: HashMap<String, Value>,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthCredentialsResult> {
|
||||
if let Some(flow) = response.get("flow").and_then(|x| x.as_str()) {
|
||||
Ok(ModrinthCredentialsResult::TwoFactorRequired {
|
||||
flow: flow.to_string(),
|
||||
})
|
||||
} else if let Some(code) = response.get(code_key).and_then(|x| x.as_str()) {
|
||||
let info = fetch_info(code, semaphore).await?;
|
||||
|
||||
Ok(ModrinthCredentialsResult::Credentials(
|
||||
ModrinthCredentials {
|
||||
session: code.to_string(),
|
||||
expires_at: Utc::now() + Duration::weeks(2),
|
||||
user: info,
|
||||
},
|
||||
))
|
||||
} else if let Some(error) =
|
||||
response.get("description").and_then(|x| x.as_str())
|
||||
{
|
||||
Err(crate::ErrorKind::OtherError(format!(
|
||||
"Failed to login with error {error}"
|
||||
))
|
||||
.as_error())
|
||||
} else {
|
||||
Err(crate::ErrorKind::OtherError(String::from(
|
||||
"Flow/code/error not found in response!",
|
||||
))
|
||||
.as_error())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Session {
|
||||
session: String,
|
||||
}
|
||||
|
||||
pub async fn login_password(
|
||||
username: &str,
|
||||
password: &str,
|
||||
challenge: &str,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthCredentialsResult> {
|
||||
let resp = fetch_advanced(
|
||||
Method::POST,
|
||||
&format!("{MODRINTH_API_URL}auth/login"),
|
||||
None,
|
||||
Some(serde_json::json!({
|
||||
"username": username,
|
||||
"password": password,
|
||||
"challenge": challenge,
|
||||
})),
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
&CredentialsStore(None),
|
||||
)
|
||||
.await?;
|
||||
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
|
||||
|
||||
get_result_from_res("session", value, semaphore).await
|
||||
}
|
||||
|
||||
async fn get_creds_from_res(
|
||||
response: HashMap<String, Value>,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthCredentials> {
|
||||
if let Some(code) = response.get("session").and_then(|x| x.as_str()) {
|
||||
let info = fetch_info(code, semaphore).await?;
|
||||
|
||||
Ok(ModrinthCredentials {
|
||||
session: code.to_string(),
|
||||
expires_at: Utc::now() + Duration::weeks(2),
|
||||
user: info,
|
||||
})
|
||||
} else if let Some(error) =
|
||||
response.get("description").and_then(|x| x.as_str())
|
||||
{
|
||||
Err(crate::ErrorKind::OtherError(format!(
|
||||
"Failed to login with error {error}"
|
||||
))
|
||||
.as_error())
|
||||
} else {
|
||||
Err(crate::ErrorKind::OtherError(String::from(
|
||||
"Flow/code/error not found in response!",
|
||||
))
|
||||
.as_error())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn login_2fa(
|
||||
code: &str,
|
||||
flow: &str,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthCredentials> {
|
||||
let resp = fetch_advanced(
|
||||
Method::POST,
|
||||
&format!("{MODRINTH_API_URL}auth/login/2fa"),
|
||||
None,
|
||||
Some(serde_json::json!({
|
||||
"code": code,
|
||||
"flow": flow,
|
||||
})),
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
&CredentialsStore(None),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
|
||||
|
||||
get_creds_from_res(response, semaphore).await
|
||||
}
|
||||
|
||||
pub async fn create_account(
|
||||
username: &str,
|
||||
email: &str,
|
||||
password: &str,
|
||||
challenge: &str,
|
||||
sign_up_newsletter: bool,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthCredentials> {
|
||||
let resp = fetch_advanced(
|
||||
Method::POST,
|
||||
&format!("{MODRINTH_API_URL}auth/create"),
|
||||
None,
|
||||
Some(serde_json::json!({
|
||||
"username": username,
|
||||
"email": email,
|
||||
"password": password,
|
||||
"challenge": challenge,
|
||||
"sign_up_newsletter": sign_up_newsletter,
|
||||
})),
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
&CredentialsStore(None),
|
||||
)
|
||||
.await?;
|
||||
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
|
||||
|
||||
get_creds_from_res(response, semaphore).await
|
||||
}
|
||||
|
||||
pub async fn refresh_credentials(
|
||||
credentials_store: &mut CredentialsStore,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<()> {
|
||||
if let Some(ref mut credentials) = credentials_store.0 {
|
||||
let token = &credentials.session;
|
||||
let resp = fetch_advanced(
|
||||
Method::POST,
|
||||
&format!("{MODRINTH_API_URL}session/refresh"),
|
||||
None,
|
||||
None,
|
||||
Some(("Authorization", token)),
|
||||
None,
|
||||
semaphore,
|
||||
&CredentialsStore(None),
|
||||
)
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
|
||||
|
||||
if let Some(value) = resp {
|
||||
credentials.user = fetch_info(&value.session, semaphore).await?;
|
||||
credentials.session = value.session;
|
||||
credentials.expires_at = Utc::now() + Duration::weeks(2);
|
||||
} else if credentials.expires_at < Utc::now() {
|
||||
credentials_store.0 = None;
|
||||
}
|
||||
}
|
||||
|
||||
credentials_store.save().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_info(
|
||||
token: &str,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<ModrinthUser> {
|
||||
let result = fetch_advanced(
|
||||
Method::GET,
|
||||
&format!("{MODRINTH_API_URL}user"),
|
||||
None,
|
||||
None,
|
||||
Some(("Authorization", token)),
|
||||
None,
|
||||
semaphore,
|
||||
&CredentialsStore(None),
|
||||
)
|
||||
.await?;
|
||||
let value = serde_json::from_slice(&result)?;
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
1097
libs/theseus/src/state/profiles.rs
Normal file
1097
libs/theseus/src/state/profiles.rs
Normal file
File diff suppressed because it is too large
Load Diff
807
libs/theseus/src/state/projects.rs
Normal file
807
libs/theseus/src/state/projects.rs
Normal file
@@ -0,0 +1,807 @@
|
||||
//! Project management + inference
|
||||
|
||||
use crate::config::MODRINTH_API_URL;
|
||||
use crate::state::{CredentialsStore, ModrinthUser, Profile};
|
||||
use crate::util::fetch::{
|
||||
fetch_json, write_cached_icon, FetchSemaphore, IoSemaphore,
|
||||
};
|
||||
use crate::util::io::IOError;
|
||||
|
||||
use async_zip::tokio::read::fs::ZipFileReader;
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::StreamExt;
|
||||
use reqwest::Method;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sha2::Digest;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::io::AsyncReadExt;
|
||||
|
||||
use super::ProjectPathId;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum ProjectType {
|
||||
Mod,
|
||||
DataPack,
|
||||
ResourcePack,
|
||||
ShaderPack,
|
||||
}
|
||||
|
||||
impl ProjectType {
|
||||
pub fn get_from_loaders(loaders: Vec<String>) -> Option<Self> {
|
||||
if loaders
|
||||
.iter()
|
||||
.any(|x| ["fabric", "forge", "quilt"].contains(&&**x))
|
||||
{
|
||||
Some(ProjectType::Mod)
|
||||
} else if loaders.iter().any(|x| x == "datapack") {
|
||||
Some(ProjectType::DataPack)
|
||||
} else if loaders.iter().any(|x| ["iris", "optifine"].contains(&&**x)) {
|
||||
Some(ProjectType::ShaderPack)
|
||||
} else if loaders
|
||||
.iter()
|
||||
.any(|x| ["vanilla", "canvas", "minecraft"].contains(&&**x))
|
||||
{
|
||||
Some(ProjectType::ResourcePack)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_from_parent_folder(path: PathBuf) -> Option<Self> {
|
||||
// Get parent folder
|
||||
let path = path.parent()?.file_name()?;
|
||||
match path.to_str()? {
|
||||
"mods" => Some(ProjectType::Mod),
|
||||
"datapacks" => Some(ProjectType::DataPack),
|
||||
"resourcepacks" => Some(ProjectType::ResourcePack),
|
||||
"shaderpacks" => Some(ProjectType::ShaderPack),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_name(&self) -> &'static str {
|
||||
match self {
|
||||
ProjectType::Mod => "mod",
|
||||
ProjectType::DataPack => "datapack",
|
||||
ProjectType::ResourcePack => "resourcepack",
|
||||
ProjectType::ShaderPack => "shaderpack",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_folder(&self) -> &'static str {
|
||||
match self {
|
||||
ProjectType::Mod => "mods",
|
||||
ProjectType::DataPack => "datapacks",
|
||||
ProjectType::ResourcePack => "resourcepacks",
|
||||
ProjectType::ShaderPack => "shaderpacks",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct Project {
|
||||
pub sha512: String,
|
||||
pub disabled: bool,
|
||||
pub metadata: ProjectMetadata,
|
||||
pub file_name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ModrinthProject {
|
||||
pub id: String,
|
||||
pub slug: Option<String>,
|
||||
pub project_type: String,
|
||||
pub team: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub body: String,
|
||||
|
||||
pub published: DateTime<Utc>,
|
||||
pub updated: DateTime<Utc>,
|
||||
|
||||
pub client_side: SideType,
|
||||
pub server_side: SideType,
|
||||
|
||||
pub downloads: u32,
|
||||
pub followers: u32,
|
||||
|
||||
pub categories: Vec<String>,
|
||||
pub additional_categories: Vec<String>,
|
||||
pub game_versions: Vec<String>,
|
||||
pub loaders: Vec<String>,
|
||||
|
||||
pub versions: Vec<String>,
|
||||
|
||||
pub icon_url: Option<String>,
|
||||
}
|
||||
|
||||
/// A specific version of a project
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ModrinthVersion {
|
||||
pub id: String,
|
||||
pub project_id: String,
|
||||
pub author_id: String,
|
||||
|
||||
pub featured: bool,
|
||||
|
||||
pub name: String,
|
||||
pub version_number: String,
|
||||
pub changelog: String,
|
||||
pub changelog_url: Option<String>,
|
||||
|
||||
pub date_published: DateTime<Utc>,
|
||||
pub downloads: u32,
|
||||
pub version_type: String,
|
||||
|
||||
pub files: Vec<ModrinthVersionFile>,
|
||||
pub dependencies: Vec<Dependency>,
|
||||
pub game_versions: Vec<String>,
|
||||
pub loaders: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ModrinthVersionFile {
|
||||
pub hashes: HashMap<String, String>,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct Dependency {
|
||||
pub version_id: Option<String>,
|
||||
pub project_id: Option<String>,
|
||||
pub file_name: Option<String>,
|
||||
pub dependency_type: DependencyType,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ModrinthTeamMember {
|
||||
pub team_id: String,
|
||||
pub user: ModrinthUser,
|
||||
pub role: String,
|
||||
pub ordering: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum DependencyType {
|
||||
Required,
|
||||
Optional,
|
||||
Incompatible,
|
||||
Embedded,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum SideType {
|
||||
Required,
|
||||
Optional,
|
||||
Unsupported,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum FileType {
|
||||
RequiredResourcePack,
|
||||
OptionalResourcePack,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ProjectMetadata {
|
||||
Modrinth {
|
||||
project: Box<ModrinthProject>,
|
||||
version: Box<ModrinthVersion>,
|
||||
members: Vec<ModrinthTeamMember>,
|
||||
update_version: Option<Box<ModrinthVersion>>,
|
||||
incompatible: bool,
|
||||
},
|
||||
Inferred {
|
||||
title: Option<String>,
|
||||
description: Option<String>,
|
||||
authors: Vec<String>,
|
||||
version: Option<String>,
|
||||
icon: Option<PathBuf>,
|
||||
project_type: Option<String>,
|
||||
},
|
||||
Unknown,
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(io_semaphore))]
|
||||
#[theseus_macros::debug_pin]
|
||||
async fn read_icon_from_file(
|
||||
icon_path: Option<String>,
|
||||
cache_dir: &Path,
|
||||
path: &PathBuf,
|
||||
io_semaphore: &IoSemaphore,
|
||||
) -> crate::Result<Option<PathBuf>> {
|
||||
if let Some(icon_path) = icon_path {
|
||||
// we have to repoen the zip twice here :(
|
||||
let zip_file_reader = ZipFileReader::new(path).await;
|
||||
if let Ok(zip_file_reader) = zip_file_reader {
|
||||
// Get index of icon file and open it
|
||||
let zip_index_option =
|
||||
zip_file_reader.file().entries().iter().position(|f| {
|
||||
f.filename().as_str().unwrap_or_default() == icon_path
|
||||
});
|
||||
if let Some(zip_index) = zip_index_option {
|
||||
let mut bytes = Vec::new();
|
||||
if zip_file_reader
|
||||
.reader_with_entry(zip_index)
|
||||
.await?
|
||||
.read_to_end_checked(&mut bytes)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
let bytes = bytes::Bytes::from(bytes);
|
||||
let path = write_cached_icon(
|
||||
&icon_path,
|
||||
cache_dir,
|
||||
bytes,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
return Ok(Some(path));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
// Creates Project data from the existing files in the file system, for a given Profile
|
||||
// Paths must be the full paths to the files in the FS, and not the relative paths
|
||||
// eg: with get_profile_full_project_paths
|
||||
#[tracing::instrument(skip(paths, profile, io_semaphore, fetch_semaphore))]
|
||||
#[theseus_macros::debug_pin]
|
||||
pub async fn infer_data_from_files(
|
||||
profile: Profile,
|
||||
paths: Vec<PathBuf>,
|
||||
cache_dir: PathBuf,
|
||||
io_semaphore: &IoSemaphore,
|
||||
fetch_semaphore: &FetchSemaphore,
|
||||
credentials: &CredentialsStore,
|
||||
) -> crate::Result<HashMap<ProjectPathId, Project>> {
|
||||
let mut file_path_hashes = HashMap::new();
|
||||
|
||||
for path in paths {
|
||||
if !path.exists() {
|
||||
continue;
|
||||
}
|
||||
if let Some(ext) = path.extension() {
|
||||
// Ignore txt configuration files
|
||||
if ext == "txt" {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let mut file = tokio::fs::File::open(path.clone())
|
||||
.await
|
||||
.map_err(|e| IOError::with_path(e, &path))?;
|
||||
|
||||
let mut buffer = [0u8; 4096]; // Buffer to read chunks
|
||||
let mut hasher = sha2::Sha512::new(); // Hasher
|
||||
|
||||
loop {
|
||||
let bytes_read =
|
||||
file.read(&mut buffer).await.map_err(IOError::from)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
hasher.update(&buffer[..bytes_read]);
|
||||
}
|
||||
|
||||
let hash = format!("{:x}", hasher.finalize());
|
||||
file_path_hashes.insert(hash, path.clone());
|
||||
}
|
||||
|
||||
let files_url = format!("{}version_files", MODRINTH_API_URL);
|
||||
let updates_url = format!("{}version_files/update", MODRINTH_API_URL);
|
||||
let (files, update_versions) = tokio::try_join!(
|
||||
fetch_json::<HashMap<String, ModrinthVersion>>(
|
||||
Method::POST,
|
||||
&files_url,
|
||||
None,
|
||||
Some(json!({
|
||||
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
|
||||
"algorithm": "sha512",
|
||||
})),
|
||||
fetch_semaphore,
|
||||
credentials,
|
||||
),
|
||||
fetch_json::<HashMap<String, ModrinthVersion>>(
|
||||
Method::POST,
|
||||
&updates_url,
|
||||
None,
|
||||
Some(json!({
|
||||
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
|
||||
"algorithm": "sha512",
|
||||
"loaders": [profile.metadata.loader],
|
||||
"game_versions": [profile.metadata.game_version]
|
||||
})),
|
||||
fetch_semaphore,
|
||||
credentials,
|
||||
)
|
||||
)?;
|
||||
|
||||
let projects: Vec<ModrinthProject> = fetch_json(
|
||||
Method::GET,
|
||||
&format!(
|
||||
"{}projects?ids={}",
|
||||
MODRINTH_API_URL,
|
||||
serde_json::to_string(
|
||||
&files
|
||||
.values()
|
||||
.map(|x| x.project_id.clone())
|
||||
.collect::<Vec<_>>()
|
||||
)?
|
||||
),
|
||||
None,
|
||||
None,
|
||||
fetch_semaphore,
|
||||
credentials,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let teams: Vec<ModrinthTeamMember> = fetch_json::<
|
||||
Vec<Vec<ModrinthTeamMember>>,
|
||||
>(
|
||||
Method::GET,
|
||||
&format!(
|
||||
"{}teams?ids={}",
|
||||
MODRINTH_API_URL,
|
||||
serde_json::to_string(
|
||||
&projects.iter().map(|x| x.team.clone()).collect::<Vec<_>>()
|
||||
)?
|
||||
),
|
||||
None,
|
||||
None,
|
||||
fetch_semaphore,
|
||||
credentials,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
let mut return_projects: Vec<(PathBuf, Project)> = Vec::new();
|
||||
let mut further_analyze_projects: Vec<(String, PathBuf)> = Vec::new();
|
||||
|
||||
for (hash, path) in file_path_hashes {
|
||||
if let Some(version) = files.get(&hash) {
|
||||
if let Some(project) =
|
||||
projects.iter().find(|x| version.project_id == x.id)
|
||||
{
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
return_projects.push((
|
||||
path,
|
||||
Project {
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
metadata: ProjectMetadata::Modrinth {
|
||||
project: Box::new(project.clone()),
|
||||
version: Box::new(version.clone()),
|
||||
members: teams
|
||||
.iter()
|
||||
.filter(|x| x.team_id == project.team)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
update_version: if let Some(value) =
|
||||
update_versions.get(&hash)
|
||||
{
|
||||
if value.id != version.id {
|
||||
Some(Box::new(value.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
},
|
||||
incompatible: !version.loaders.contains(
|
||||
&profile
|
||||
.metadata
|
||||
.loader
|
||||
.as_api_str()
|
||||
.to_string(),
|
||||
) || version
|
||||
.game_versions
|
||||
.contains(&profile.metadata.game_version),
|
||||
},
|
||||
sha512: hash,
|
||||
file_name,
|
||||
},
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
further_analyze_projects.push((hash, path));
|
||||
}
|
||||
|
||||
for (hash, path) in further_analyze_projects {
|
||||
let file_name = path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
let zip_file_reader = if let Ok(zip_file_reader) =
|
||||
ZipFileReader::new(path.clone()).await
|
||||
{
|
||||
zip_file_reader
|
||||
} else {
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
metadata: ProjectMetadata::Unknown,
|
||||
file_name,
|
||||
},
|
||||
));
|
||||
continue;
|
||||
};
|
||||
|
||||
// Forge
|
||||
let zip_index_option =
|
||||
zip_file_reader.file().entries().iter().position(|f| {
|
||||
f.filename().as_str().unwrap_or_default()
|
||||
== "META-INF/mods.toml"
|
||||
});
|
||||
if let Some(index) = zip_index_option {
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForgeModInfo {
|
||||
pub mods: Vec<ForgeMod>,
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForgeMod {
|
||||
mod_id: String,
|
||||
version: Option<String>,
|
||||
display_name: Option<String>,
|
||||
description: Option<String>,
|
||||
logo_file: Option<String>,
|
||||
authors: Option<String>,
|
||||
}
|
||||
|
||||
let mut file_str = String::new();
|
||||
if zip_file_reader
|
||||
.reader_with_entry(index)
|
||||
.await?
|
||||
.read_to_string_checked(&mut file_str)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
if let Ok(pack) = toml::from_str::<ForgeModInfo>(&file_str) {
|
||||
if let Some(pack) = pack.mods.first() {
|
||||
let icon = read_icon_from_file(
|
||||
pack.logo_file.clone(),
|
||||
&cache_dir,
|
||||
&path,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
file_name,
|
||||
metadata: ProjectMetadata::Inferred {
|
||||
title: Some(
|
||||
pack.display_name
|
||||
.clone()
|
||||
.unwrap_or_else(|| {
|
||||
pack.mod_id.clone()
|
||||
}),
|
||||
),
|
||||
description: pack.description.clone(),
|
||||
authors: pack
|
||||
.authors
|
||||
.clone()
|
||||
.map(|x| vec![x])
|
||||
.unwrap_or_default(),
|
||||
version: pack.version.clone(),
|
||||
icon,
|
||||
project_type: Some("mod".to_string()),
|
||||
},
|
||||
},
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Forge
|
||||
let zip_index_option =
|
||||
zip_file_reader.file().entries().iter().position(|f| {
|
||||
f.filename().as_str().unwrap_or_default() == "mcmod.info"
|
||||
});
|
||||
if let Some(index) = zip_index_option {
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ForgeMod {
|
||||
modid: String,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
version: Option<String>,
|
||||
author_list: Option<Vec<String>>,
|
||||
logo_file: Option<String>,
|
||||
}
|
||||
|
||||
let mut file_str = String::new();
|
||||
if zip_file_reader
|
||||
.reader_with_entry(index)
|
||||
.await?
|
||||
.read_to_string_checked(&mut file_str)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
if let Ok(pack) = serde_json::from_str::<ForgeMod>(&file_str) {
|
||||
let icon = read_icon_from_file(
|
||||
pack.logo_file,
|
||||
&cache_dir,
|
||||
&path,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
file_name,
|
||||
metadata: ProjectMetadata::Inferred {
|
||||
title: Some(if pack.name.is_empty() {
|
||||
pack.modid
|
||||
} else {
|
||||
pack.name
|
||||
}),
|
||||
description: pack.description,
|
||||
authors: pack.author_list.unwrap_or_default(),
|
||||
version: pack.version,
|
||||
icon,
|
||||
project_type: Some("mod".to_string()),
|
||||
},
|
||||
},
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fabric
|
||||
let zip_index_option =
|
||||
zip_file_reader.file().entries().iter().position(|f| {
|
||||
f.filename().as_str().unwrap_or_default() == "fabric.mod.json"
|
||||
});
|
||||
if let Some(index) = zip_index_option {
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum FabricAuthor {
|
||||
String(String),
|
||||
Object { name: String },
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct FabricMod {
|
||||
id: String,
|
||||
version: String,
|
||||
name: Option<String>,
|
||||
description: Option<String>,
|
||||
authors: Vec<FabricAuthor>,
|
||||
icon: Option<String>,
|
||||
}
|
||||
|
||||
let mut file_str = String::new();
|
||||
if zip_file_reader
|
||||
.reader_with_entry(index)
|
||||
.await?
|
||||
.read_to_string_checked(&mut file_str)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
if let Ok(pack) = serde_json::from_str::<FabricMod>(&file_str) {
|
||||
let icon = read_icon_from_file(
|
||||
pack.icon,
|
||||
&cache_dir,
|
||||
&path,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
file_name,
|
||||
metadata: ProjectMetadata::Inferred {
|
||||
title: Some(pack.name.unwrap_or(pack.id)),
|
||||
description: pack.description,
|
||||
authors: pack
|
||||
.authors
|
||||
.into_iter()
|
||||
.map(|x| match x {
|
||||
FabricAuthor::String(name) => name,
|
||||
FabricAuthor::Object { name } => name,
|
||||
})
|
||||
.collect(),
|
||||
version: Some(pack.version),
|
||||
icon,
|
||||
project_type: Some("mod".to_string()),
|
||||
},
|
||||
},
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Quilt
|
||||
let zip_index_option =
|
||||
zip_file_reader.file().entries().iter().position(|f| {
|
||||
f.filename().as_str().unwrap_or_default() == "quilt.mod.json"
|
||||
});
|
||||
if let Some(index) = zip_index_option {
|
||||
#[derive(Deserialize)]
|
||||
struct QuiltMetadata {
|
||||
pub name: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub contributors: Option<HashMap<String, String>>,
|
||||
pub icon: Option<String>,
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct QuiltMod {
|
||||
id: String,
|
||||
version: String,
|
||||
metadata: Option<QuiltMetadata>,
|
||||
}
|
||||
|
||||
let mut file_str = String::new();
|
||||
if zip_file_reader
|
||||
.reader_with_entry(index)
|
||||
.await?
|
||||
.read_to_string_checked(&mut file_str)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
if let Ok(pack) = serde_json::from_str::<QuiltMod>(&file_str) {
|
||||
let icon = read_icon_from_file(
|
||||
pack.metadata.as_ref().and_then(|x| x.icon.clone()),
|
||||
&cache_dir,
|
||||
&path,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
file_name,
|
||||
metadata: ProjectMetadata::Inferred {
|
||||
title: Some(
|
||||
pack.metadata
|
||||
.as_ref()
|
||||
.and_then(|x| x.name.clone())
|
||||
.unwrap_or(pack.id),
|
||||
),
|
||||
description: pack
|
||||
.metadata
|
||||
.as_ref()
|
||||
.and_then(|x| x.description.clone()),
|
||||
authors: pack
|
||||
.metadata
|
||||
.map(|x| {
|
||||
x.contributors
|
||||
.unwrap_or_default()
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
version: Some(pack.version),
|
||||
icon,
|
||||
project_type: Some("mod".to_string()),
|
||||
},
|
||||
},
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Other
|
||||
let zip_index_option =
|
||||
zip_file_reader.file().entries().iter().position(|f| {
|
||||
f.filename().as_str().unwrap_or_default() == "pack.mcmeta"
|
||||
});
|
||||
if let Some(index) = zip_index_option {
|
||||
#[derive(Deserialize)]
|
||||
struct Pack {
|
||||
description: Option<String>,
|
||||
}
|
||||
|
||||
let mut file_str = String::new();
|
||||
if zip_file_reader
|
||||
.reader_with_entry(index)
|
||||
.await?
|
||||
.read_to_string_checked(&mut file_str)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
if let Ok(pack) = serde_json::from_str::<Pack>(&file_str) {
|
||||
let icon = read_icon_from_file(
|
||||
Some("pack.png".to_string()),
|
||||
&cache_dir,
|
||||
&path,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Guess the project type from the filepath
|
||||
let project_type =
|
||||
ProjectType::get_from_parent_folder(path.clone());
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
file_name,
|
||||
metadata: ProjectMetadata::Inferred {
|
||||
title: None,
|
||||
description: pack.description,
|
||||
authors: Vec::new(),
|
||||
version: None,
|
||||
icon,
|
||||
project_type: project_type
|
||||
.map(|x| x.get_name().to_string()),
|
||||
},
|
||||
},
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return_projects.push((
|
||||
path.clone(),
|
||||
Project {
|
||||
sha512: hash,
|
||||
disabled: file_name.ends_with(".disabled"),
|
||||
file_name,
|
||||
metadata: ProjectMetadata::Unknown,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
// Project paths should be relative
|
||||
let mut corrected_hashmap = HashMap::new();
|
||||
let mut stream = tokio_stream::iter(return_projects);
|
||||
while let Some((h, v)) = stream.next().await {
|
||||
let h = ProjectPathId::from_fs_path(&h).await?;
|
||||
corrected_hashmap.insert(h, v);
|
||||
}
|
||||
|
||||
Ok(corrected_hashmap)
|
||||
}
|
||||
69
libs/theseus/src/state/safe_processes.rs
Normal file
69
libs/theseus/src/state/safe_processes.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::State;
|
||||
|
||||
// We implement a store for safe loading bars such that we can wait for them to complete
|
||||
// We create this store separately from the loading bars themselves, because this may be extended as needed
|
||||
pub struct SafeProcesses {
|
||||
pub loading_bars: Vec<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum ProcessType {
|
||||
LoadingBar,
|
||||
// Potentially other types of processes (ie: IO operations?)
|
||||
}
|
||||
|
||||
impl SafeProcesses {
|
||||
// init
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
loading_bars: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// Adds a new running safe process to the list by uuid
|
||||
pub async fn add_uuid(
|
||||
r#type: ProcessType,
|
||||
uuid: Uuid,
|
||||
) -> crate::Result<Uuid> {
|
||||
let state = State::get().await?;
|
||||
let mut safe_processes = state.safety_processes.write().await;
|
||||
match r#type {
|
||||
ProcessType::LoadingBar => {
|
||||
safe_processes.loading_bars.push(uuid);
|
||||
}
|
||||
}
|
||||
Ok(uuid)
|
||||
}
|
||||
|
||||
// Mark a safe process as finishing
|
||||
pub async fn complete(
|
||||
r#type: ProcessType,
|
||||
uuid: Uuid,
|
||||
) -> crate::Result<()> {
|
||||
let state = State::get().await?;
|
||||
let mut safe_processes = state.safety_processes.write().await;
|
||||
|
||||
match r#type {
|
||||
ProcessType::LoadingBar => {
|
||||
safe_processes.loading_bars.retain(|x| *x != uuid);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Check if there are any pending safe processes of a given type
|
||||
pub async fn is_complete(r#type: ProcessType) -> crate::Result<bool> {
|
||||
let state = State::get().await?;
|
||||
let safe_processes = state.safety_processes.read().await;
|
||||
match r#type {
|
||||
ProcessType::LoadingBar => {
|
||||
if safe_processes.loading_bars.is_empty() {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
183
libs/theseus/src/state/settings.rs
Normal file
183
libs/theseus/src/state/settings.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
//! Theseus settings file
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::fs;
|
||||
|
||||
use super::{DirectoryInfo, JavaGlobals};
|
||||
|
||||
// TODO: convert to semver?
|
||||
const CURRENT_FORMAT_VERSION: u32 = 1;
|
||||
|
||||
// Types
|
||||
/// Global Theseus settings
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct Settings {
|
||||
pub theme: Theme,
|
||||
pub memory: MemorySettings,
|
||||
#[serde(default)]
|
||||
pub force_fullscreen: bool,
|
||||
pub game_resolution: WindowSize,
|
||||
pub custom_java_args: Vec<String>,
|
||||
pub custom_env_args: Vec<(String, String)>,
|
||||
pub java_globals: JavaGlobals,
|
||||
pub hooks: Hooks,
|
||||
pub max_concurrent_downloads: usize,
|
||||
pub max_concurrent_writes: usize,
|
||||
pub version: u32,
|
||||
pub collapsed_navigation: bool,
|
||||
#[serde(default)]
|
||||
pub disable_discord_rpc: bool,
|
||||
#[serde(default)]
|
||||
pub hide_on_process: bool,
|
||||
#[serde(default)]
|
||||
pub native_decorations: bool,
|
||||
#[serde(default)]
|
||||
pub default_page: DefaultPage,
|
||||
#[serde(default)]
|
||||
pub developer_mode: bool,
|
||||
#[serde(default)]
|
||||
pub opt_out_analytics: bool,
|
||||
#[serde(default)]
|
||||
pub advanced_rendering: bool,
|
||||
#[serde(default)]
|
||||
pub fully_onboarded: bool,
|
||||
#[serde(default = "DirectoryInfo::get_initial_settings_dir")]
|
||||
pub loaded_config_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
#[tracing::instrument]
|
||||
pub async fn init(file: &Path) -> crate::Result<Self> {
|
||||
let mut rescued = false;
|
||||
|
||||
let settings = if file.exists() {
|
||||
let loaded_settings = fs::read(&file)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Error reading settings file: {err}"
|
||||
))
|
||||
.as_error()
|
||||
})
|
||||
.and_then(|it| {
|
||||
serde_json::from_slice::<Settings>(&it)
|
||||
.map_err(crate::Error::from)
|
||||
});
|
||||
// settings is corrupted. Back up the file and create a new one
|
||||
if let Err(ref err) = loaded_settings {
|
||||
tracing::error!("Failed to load settings file: {err}. ");
|
||||
let backup_file = file.with_extension("json.bak");
|
||||
tracing::error!("Corrupted settings file will be backed up as {}, and a new settings file will be created.", backup_file.display());
|
||||
let _ = fs::rename(file, backup_file).await;
|
||||
rescued = true;
|
||||
}
|
||||
loaded_settings.ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(settings) = settings {
|
||||
Ok(settings)
|
||||
} else {
|
||||
// Create new settings file
|
||||
let settings = Self {
|
||||
theme: Theme::Dark,
|
||||
memory: MemorySettings::default(),
|
||||
force_fullscreen: false,
|
||||
game_resolution: WindowSize::default(),
|
||||
custom_java_args: Vec::new(),
|
||||
custom_env_args: Vec::new(),
|
||||
java_globals: JavaGlobals::new(),
|
||||
hooks: Hooks::default(),
|
||||
max_concurrent_downloads: 10,
|
||||
max_concurrent_writes: 10,
|
||||
version: CURRENT_FORMAT_VERSION,
|
||||
collapsed_navigation: false,
|
||||
disable_discord_rpc: false,
|
||||
hide_on_process: false,
|
||||
native_decorations: false,
|
||||
default_page: DefaultPage::Home,
|
||||
developer_mode: false,
|
||||
opt_out_analytics: false,
|
||||
advanced_rendering: true,
|
||||
fully_onboarded: rescued, // If we rescued the settings file, we should consider the user fully onboarded
|
||||
|
||||
// By default, the config directory is the same as the settings directory
|
||||
loaded_config_dir: DirectoryInfo::get_initial_settings_dir(),
|
||||
};
|
||||
if rescued {
|
||||
settings.sync(file).await?;
|
||||
}
|
||||
Ok(settings)
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub async fn sync(&self, to: &Path) -> crate::Result<()> {
|
||||
fs::write(to, serde_json::to_vec(self)?)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Error saving settings to file: {err}"
|
||||
))
|
||||
.as_error()
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Theseus theme
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum Theme {
|
||||
Dark,
|
||||
Light,
|
||||
Oled,
|
||||
}
|
||||
|
||||
/// Minecraft memory settings
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
||||
pub struct MemorySettings {
|
||||
pub maximum: u32,
|
||||
}
|
||||
|
||||
impl Default for MemorySettings {
|
||||
fn default() -> Self {
|
||||
Self { maximum: 2048 }
|
||||
}
|
||||
}
|
||||
|
||||
/// Game window size
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
||||
pub struct WindowSize(pub u16, pub u16);
|
||||
|
||||
impl Default for WindowSize {
|
||||
fn default() -> Self {
|
||||
Self(854, 480)
|
||||
}
|
||||
}
|
||||
|
||||
/// Game initialization hooks
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
|
||||
#[serde(default)]
|
||||
pub struct Hooks {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pre_launch: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub wrapper: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub post_exit: Option<String>,
|
||||
}
|
||||
|
||||
/// Opening window to start with
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
||||
pub enum DefaultPage {
|
||||
Home,
|
||||
Library,
|
||||
}
|
||||
|
||||
impl Default for DefaultPage {
|
||||
fn default() -> Self {
|
||||
Self::Home
|
||||
}
|
||||
}
|
||||
261
libs/theseus/src/state/tags.rs
Normal file
261
libs/theseus/src/state/tags.rs
Normal file
@@ -0,0 +1,261 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use reqwest::Method;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::config::MODRINTH_API_URL;
|
||||
use crate::data::DirectoryInfo;
|
||||
use crate::state::CredentialsStore;
|
||||
use crate::util::fetch::{
|
||||
fetch_json, read_json, write, FetchSemaphore, IoSemaphore,
|
||||
};
|
||||
|
||||
// Serializeable struct for all tags to be fetched together by the frontend
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Tags {
|
||||
pub categories: Vec<Category>,
|
||||
pub loaders: Vec<Loader>,
|
||||
pub game_versions: Vec<GameVersion>,
|
||||
pub donation_platforms: Vec<DonationPlatform>,
|
||||
pub report_types: Vec<String>,
|
||||
}
|
||||
|
||||
impl Tags {
|
||||
#[tracing::instrument(skip(io_semaphore, fetch_semaphore))]
|
||||
#[theseus_macros::debug_pin]
|
||||
pub async fn init(
|
||||
dirs: &DirectoryInfo,
|
||||
fetch_online: bool,
|
||||
io_semaphore: &IoSemaphore,
|
||||
fetch_semaphore: &FetchSemaphore,
|
||||
credentials: &CredentialsStore,
|
||||
) -> crate::Result<Self> {
|
||||
let mut tags = None;
|
||||
let tags_path = dirs.caches_meta_dir().await.join("tags.json");
|
||||
let tags_path_backup =
|
||||
dirs.caches_meta_dir().await.join("tags.json.bak");
|
||||
|
||||
if let Ok(tags_json) = read_json::<Self>(&tags_path, io_semaphore).await
|
||||
{
|
||||
tags = Some(tags_json);
|
||||
} else if fetch_online {
|
||||
match Self::fetch(fetch_semaphore, credentials).await {
|
||||
Ok(tags_fetch) => tags = Some(tags_fetch),
|
||||
Err(err) => {
|
||||
tracing::warn!("Unable to fetch launcher tags: {err}")
|
||||
}
|
||||
}
|
||||
} else if let Ok(tags_json) =
|
||||
read_json::<Self>(&tags_path_backup, io_semaphore).await
|
||||
{
|
||||
tags = Some(tags_json);
|
||||
std::fs::copy(&tags_path_backup, &tags_path).map_err(|err| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Error restoring tags backup: {err}"
|
||||
))
|
||||
.as_error()
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(tags_data) = tags {
|
||||
write(&tags_path, &serde_json::to_vec(&tags_data)?, io_semaphore)
|
||||
.await?;
|
||||
write(
|
||||
&tags_path_backup,
|
||||
&serde_json::to_vec(&tags_data)?,
|
||||
io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(tags_data)
|
||||
} else {
|
||||
Err(crate::ErrorKind::NoValueFor(String::from("launcher tags"))
|
||||
.as_error())
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument]
|
||||
#[theseus_macros::debug_pin]
|
||||
pub async fn update() {
|
||||
let res = async {
|
||||
let state = crate::State::get().await?;
|
||||
|
||||
let creds = state.credentials.read().await;
|
||||
let tags_fetch =
|
||||
Tags::fetch(&state.fetch_semaphore, &creds).await?;
|
||||
drop(creds);
|
||||
|
||||
let tags_path =
|
||||
state.directories.caches_meta_dir().await.join("tags.json");
|
||||
let tags_path_backup = state
|
||||
.directories
|
||||
.caches_meta_dir()
|
||||
.await
|
||||
.join("tags.json.bak");
|
||||
if tags_path.exists() {
|
||||
std::fs::copy(&tags_path, &tags_path_backup).unwrap();
|
||||
}
|
||||
|
||||
write(
|
||||
&tags_path,
|
||||
&serde_json::to_vec(&tags_fetch)?,
|
||||
&state.io_semaphore,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut old_tags = state.tags.write().await;
|
||||
*old_tags = tags_fetch;
|
||||
|
||||
Ok::<(), crate::Error>(())
|
||||
}
|
||||
.await;
|
||||
|
||||
match res {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
tracing::warn!("Unable to update launcher tags: {err}")
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Checks the database for categories tag, returns a Vec::new() if it doesnt exist, otherwise returns the categories
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_categories(&self) -> Vec<Category> {
|
||||
self.categories.clone()
|
||||
}
|
||||
|
||||
// Checks the database for loaders tag, returns a Vec::new() if it doesnt exist, otherwise returns the loaders
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_loaders(&self) -> Vec<Loader> {
|
||||
self.loaders.clone()
|
||||
}
|
||||
|
||||
// Checks the database for game_versions tag, returns a Vec::new() if it doesnt exist, otherwise returns the game_versions
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_game_versions(&self) -> Vec<GameVersion> {
|
||||
self.game_versions.clone()
|
||||
}
|
||||
|
||||
// Checks the database for donation_platforms tag, returns a Vec::new() if it doesnt exist, otherwise returns the donation_platforms
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_donation_platforms(&self) -> Vec<DonationPlatform> {
|
||||
self.donation_platforms.clone()
|
||||
}
|
||||
|
||||
// Checks the database for report_types tag, returns a Vec::new() if it doesnt exist, otherwise returns the report_types
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_report_types(&self) -> Vec<String> {
|
||||
self.report_types.clone()
|
||||
}
|
||||
|
||||
// Gets all tags together as a serializable bundle
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_tag_bundle(&self) -> Tags {
|
||||
self.clone()
|
||||
}
|
||||
|
||||
// Fetches the tags from the Modrinth API and stores them in the database
|
||||
pub async fn fetch(
|
||||
semaphore: &FetchSemaphore,
|
||||
credentials: &CredentialsStore,
|
||||
) -> crate::Result<Self> {
|
||||
let categories = format!("{MODRINTH_API_URL}tag/category");
|
||||
let loaders = format!("{MODRINTH_API_URL}tag/loader");
|
||||
let game_versions = format!("{MODRINTH_API_URL}tag/game_version");
|
||||
let donation_platforms =
|
||||
format!("{MODRINTH_API_URL}tag/donation_platform");
|
||||
let report_types = format!("{MODRINTH_API_URL}tag/report_type");
|
||||
|
||||
let categories_fut = fetch_json::<Vec<Category>>(
|
||||
Method::GET,
|
||||
&categories,
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
credentials,
|
||||
);
|
||||
let loaders_fut = fetch_json::<Vec<Loader>>(
|
||||
Method::GET,
|
||||
&loaders,
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
credentials,
|
||||
);
|
||||
let game_versions_fut = fetch_json::<Vec<GameVersion>>(
|
||||
Method::GET,
|
||||
&game_versions,
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
credentials,
|
||||
);
|
||||
let donation_platforms_fut = fetch_json::<Vec<DonationPlatform>>(
|
||||
Method::GET,
|
||||
&donation_platforms,
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
credentials,
|
||||
);
|
||||
let report_types_fut = fetch_json::<Vec<String>>(
|
||||
Method::GET,
|
||||
&report_types,
|
||||
None,
|
||||
None,
|
||||
semaphore,
|
||||
credentials,
|
||||
);
|
||||
|
||||
let (
|
||||
categories,
|
||||
loaders,
|
||||
game_versions,
|
||||
donation_platforms,
|
||||
report_types,
|
||||
) = tokio::try_join!(
|
||||
categories_fut,
|
||||
loaders_fut,
|
||||
game_versions_fut,
|
||||
donation_platforms_fut,
|
||||
report_types_fut
|
||||
)?;
|
||||
|
||||
Ok(Self {
|
||||
categories,
|
||||
loaders,
|
||||
game_versions,
|
||||
donation_platforms,
|
||||
report_types,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Category {
|
||||
pub name: String,
|
||||
pub project_type: String,
|
||||
pub header: String,
|
||||
pub icon: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Loader {
|
||||
pub name: String,
|
||||
pub icon: PathBuf,
|
||||
pub supported_project_types: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DonationPlatform {
|
||||
pub short: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GameVersion {
|
||||
pub version: String,
|
||||
pub version_type: String,
|
||||
pub date: String,
|
||||
pub major: bool,
|
||||
}
|
||||
Reference in New Issue
Block a user