Migrate to SQLite for Internal Launcher Data (#1300)

* initial migration

* barebones profiles

* Finish profiles

* Add back file watcher

* UI support progress

* Finish most of cache

* Fix options page

* Fix forge, finish modrinth auth

* Accounts, process cache

* Run SQLX prepare

* Finish

* Run lint + actions

* Fix version to be compat with windows

* fix lint

* actually fix lint

* actually fix lint again
This commit is contained in:
Geometrically
2024-07-24 11:03:19 -07:00
committed by GitHub
parent 90f74427d9
commit 49a20a303a
156 changed files with 9208 additions and 8547 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -1,728 +0,0 @@
use super::{Profile, ProfilePathId};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde::Serialize;
use std::{collections::HashMap, sync::Arc};
use tokio::process::Child;
use tokio::process::Command;
use tokio::sync::RwLock;
use crate::event::emit::emit_process;
use crate::event::ProcessPayloadType;
use crate::util::fetch::read_json;
use crate::util::io::IOError;
use crate::{profile, ErrorKind};
use tokio::task::JoinHandle;
use uuid::Uuid;
const PROCESSES_JSON: &str = "processes.json";
// Child processes (instances of Minecraft)
// A wrapper over a Hashmap connecting PID -> MinecraftChild
pub struct Children(HashMap<Uuid, Arc<RwLock<MinecraftChild>>>);
#[derive(Debug)]
pub enum ChildType {
// A child process that is being managed by tokio
TokioChild(Child),
// A child process that was rescued from a cache (e.g. a process that was launched by theseus before the launcher was restarted)
// This may not have all the same functionality as a TokioChild
RescuedPID(u32),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ProcessCache {
pub pid: u32,
pub uuid: Uuid,
pub start_time: u64,
pub name: String,
pub exe: String,
pub profile_relative_path: ProfilePathId,
pub post_command: Option<String>,
}
impl ChildType {
pub async fn try_wait(&mut self) -> crate::Result<Option<i32>> {
match self {
ChildType::TokioChild(child) => Ok(child
.try_wait()
.map_err(IOError::from)?
.map(|x| x.code().unwrap_or(0))),
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if !system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
return Ok(Some(0));
}
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
if process.status() == sysinfo::ProcessStatus::Run {
Ok(None)
} else {
Ok(Some(0))
}
} else {
Ok(Some(0))
}
}
}
}
pub async fn kill(&mut self) -> crate::Result<()> {
match self {
ChildType::TokioChild(child) => {
Ok(child.kill().await.map_err(IOError::from)?)
}
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
process.kill();
}
}
Ok(())
}
}
}
pub fn id(&self) -> Option<u32> {
match self {
ChildType::TokioChild(child) => child.id(),
ChildType::RescuedPID(pid) => Some(*pid),
}
}
// Caches the process so that it can be restored if the launcher is restarted
// Stored in the caches/metadata/processes.json file
pub async fn cache_process(
&self,
uuid: uuid::Uuid,
profile_path_id: ProfilePathId,
post_command: Option<String>,
) -> crate::Result<()> {
let pid = match self {
ChildType::TokioChild(child) => child.id().unwrap_or(0),
ChildType::RescuedPID(pid) => *pid,
};
let state = crate::State::get().await?;
let mut system = sysinfo::System::new();
system.refresh_processes();
let process =
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
pid
))
})?;
let start_time = process.start_time();
let name = process.name().to_string();
let Some(path) = process.exe() else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
pid
))
.into());
};
let exe = path.to_string_lossy().to_string();
let cached_process = ProcessCache {
pid,
start_time,
name,
exe,
post_command,
uuid,
profile_relative_path: profile_path_id,
};
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.insert(uuid, cached_process);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
// Removes the process from the cache (ie: on process exit)
pub async fn remove_cache(&self, uuid: uuid::Uuid) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.remove(&uuid);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
}
// Minecraft Child, bundles together the PID, the actual Child, and the easily queryable stdout and stderr streams (if needed)
#[derive(Debug)]
pub struct MinecraftChild {
pub uuid: Uuid,
pub profile_relative_path: ProfilePathId,
pub manager: Option<JoinHandle<crate::Result<i32>>>, // None when future has completed and been handled
pub current_child: Arc<RwLock<ChildType>>,
pub last_updated_playtime: DateTime<Utc>, // The last time we updated the playtime for the associated profile
}
impl Children {
pub fn new() -> Self {
Children(HashMap::new())
}
// Loads cached processes from the caches/metadata/processes.json file, re-inserts them into the hashmap, and removes them from the file
// This will only be called once, on startup. Only processes who match a cached process (name, time started, pid, etc) will be re-inserted
pub async fn rescue_cache(&mut self) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
// Overwrite the file with an empty hashmap- we will re-insert the cached processes
let empty = HashMap::<uuid::Uuid, ProcessCache>::new();
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&empty)?,
&state.io_semaphore,
)
.await?;
// Return the cached processes
children_json
} else {
HashMap::new()
};
for (_, cache) in children_caches.drain() {
let uuid = cache.uuid;
match self.insert_cached_process(cache).await {
Ok(child) => {
self.0.insert(uuid, child);
}
Err(e) => tracing::warn!(
"Failed to rescue cached process {}: {}",
uuid,
e
),
}
}
Ok(())
}
// Runs the command in process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// The threads for stdout and stderr are spawned here
// Unlike a Hashmap's 'insert', this directly returns the reference to the MinecraftChild rather than any previously stored MinecraftChild that may exist
#[tracing::instrument(skip(
self,
uuid,
mc_command,
post_command,
censor_strings
))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_new_process(
&mut self,
uuid: Uuid,
profile_relative_path: ProfilePathId,
mut mc_command: Command,
post_command: Option<String>, // Command to run after minecraft.
censor_strings: HashMap<String, String>,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
// Takes the first element of the commands vector and spawns it
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
let child = ChildType::TokioChild(mc_proc);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Caches process so that it can be restored if the launcher is restarted
child
.cache_process(
uuid,
profile_relative_path.clone(),
post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
uuid,
post_command,
pid,
current_child.clone(),
profile_relative_path.clone(),
)));
emit_process(
uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid,
profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(uuid, mchild.clone());
Ok(mchild)
}
// Rescues a cached process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// Essentially 'reconnects' to a process that was launched by theseus before the launcher was restarted
// However, this may not have all the same functionality as a TokioChild, as we only have the PID and not the actual Child
// Only processes who match a cached process (name, time started, pid, etc) will be re-inserted. The function fails with an error if the process is notably different.
#[tracing::instrument(skip(self, cached_process,))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_cached_process(
&mut self,
cached_process: ProcessCache,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
let _state = crate::State::get().await?;
// Takes the first element of the commands vector and spawns it
// Checks processes, compares cached process to actual process
// Fails if notably different (meaning that the PID was reused, and we shouldn't reconnect to it)
{
let mut system = sysinfo::System::new();
system.refresh_processes();
let process = system
.process(sysinfo::Pid::from_u32(cached_process.pid))
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
cached_process.pid
))
})?;
if cached_process.start_time != process.start_time() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different start time than actual process {}", cached_process.pid, process.start_time())).into());
}
if cached_process.name != process.name() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different name than actual process {}", cached_process.pid, process.name())).into());
}
if let Some(path) = process.exe() {
if cached_process.exe != path.to_string_lossy() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different exe than actual process {}", cached_process.pid, path.to_string_lossy())).into());
}
} else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
cached_process.pid
))
.into());
}
}
let child = ChildType::RescuedPID(cached_process.pid);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Re-caches process so that it can be restored if the launcher is restarted
child
.cache_process(
cached_process.uuid,
cached_process.profile_relative_path.clone(),
cached_process.post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
cached_process.uuid,
cached_process.post_command,
pid,
current_child.clone(),
cached_process.profile_relative_path.clone(),
)));
emit_process(
cached_process.uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid: cached_process.uuid,
profile_relative_path: cached_process.profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(cached_process.uuid, mchild.clone());
Ok(mchild)
}
// Spawns a new child process and inserts it into the hashmap
// Also, as the process ends, it spawns the follow-up process if it exists
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
#[tracing::instrument(skip(current_child))]
#[theseus_macros::debug_pin]
async fn sequential_process_manager(
uuid: Uuid,
post_command: Option<String>,
mut current_pid: u32,
current_child: Arc<RwLock<ChildType>>,
associated_profile: ProfilePathId,
) -> crate::Result<i32> {
let current_child = current_child.clone();
// Wait on current Minecraft Child
let mut mc_exit_status;
let mut last_updated_playtime = Utc::now();
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
// Auto-update playtime every minute
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if diff >= 60 {
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
last_updated_playtime = Utc::now();
}
}
// Now fully complete- update playtime one last time
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
// Publish play time update
// Allow failure, it will be stored locally and sent next time
// Sent in another thread as first call may take a couple seconds and hold up process ending
let associated_profile_clone = associated_profile.clone();
tokio::spawn(async move {
if let Err(e) =
profile::try_update_playtime(&associated_profile_clone.clone())
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile_clone,
e
);
}
});
{
// Clear game played for Discord RPC
// May have other active processes, so we clear to the next running process
let state = crate::State::get().await?;
let _ = state.discord_rpc.clear_to_default(true).await;
}
// If in tauri, window should show itself again after process exists if it was hidden
#[cfg(feature = "tauri")]
{
let window = crate::EventState::get_main_window().await?;
if let Some(window) = window {
window.unminimize()?;
}
}
{
let current_child = current_child.write().await;
current_child.remove_cache(uuid).await?;
}
if !mc_exit_status == 0 {
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
return Ok(mc_exit_status); // Err for a non-zero exit is handled in helper
}
// If a post-command exist, switch to it and wait on it
// First, create the command by splitting arguments
let post_command = if let Some(hook) = post_command {
let mut cmd = hook.split(' ');
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command
.args(&cmd.collect::<Vec<&str>>())
.current_dir(associated_profile.get_full_path().await?);
Some(command)
} else {
None
}
} else {
None
};
if let Some(mut m_command) = post_command {
{
let mut current_child: tokio::sync::RwLockWriteGuard<
'_,
ChildType,
> = current_child.write().await;
let new_child = m_command.spawn().map_err(IOError::from)?;
current_pid = new_child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID"
.to_string(),
)
})?;
*current_child = ChildType::TokioChild(new_child);
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Updated,
"Completed Minecraft, switching to post-commands",
)
.await?;
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(10))
.await;
}
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
Ok(mc_exit_status)
}
// Returns a ref to the child
pub fn get(&self, uuid: Uuid) -> Option<Arc<RwLock<MinecraftChild>>> {
self.0.get(&uuid).cloned()
}
// Gets all PID keys
pub fn keys(&self) -> Vec<Uuid> {
self.0.keys().cloned().collect()
}
// Get exit status of a child by PID
// Returns None if the child is still running
pub async fn exit_status(&self, uuid: Uuid) -> crate::Result<Option<i32>> {
if let Some(child) = self.get(uuid) {
let child = child.write().await;
let status = child.current_child.write().await.try_wait().await?;
Ok(status)
} else {
Ok(None)
}
}
// Gets all PID keys of running children
pub async fn running_keys(&self) -> crate::Result<Vec<Uuid>> {
let mut keys = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all PID keys of running children with a given profile path
pub async fn running_keys_with_profile(
&self,
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
let running_keys = self.running_keys().await?;
let mut keys = Vec::new();
for key in running_keys {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.read().await;
if child.profile_relative_path == profile_path {
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all profiles of running children
pub async fn running_profile_paths(
&self,
) -> crate::Result<Vec<ProfilePathId>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
profiles.push(child.profile_relative_path.clone());
}
}
}
Ok(profiles)
}
// Gets all profiles of running children
// Returns clones because it would be serialized anyway
pub async fn running_profiles(&self) -> crate::Result<Vec<Profile>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
if let Some(prof) = crate::api::profile::get(
&child.profile_relative_path.clone(),
None,
)
.await?
{
profiles.push(prof);
}
}
}
}
Ok(profiles)
}
}
impl Default for Children {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,31 @@
use crate::state::DirectoryInfo;
use sqlx::migrate::MigrateDatabase;
use sqlx::sqlite::SqlitePoolOptions;
use sqlx::{Pool, Sqlite};
pub(crate) async fn connect() -> crate::Result<Pool<Sqlite>> {
let settings_dir = DirectoryInfo::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
if !settings_dir.exists() {
crate::util::io::create_dir_all(&settings_dir).await?;
}
let uri = format!("sqlite:{}", settings_dir.join("app.db").display());
if !Sqlite::database_exists(&uri).await? {
Sqlite::create_database(&uri).await?;
}
let pool = SqlitePoolOptions::new()
.max_connections(100)
.connect(&uri)
.await?;
sqlx::migrate!().run(&pool).await?;
Ok(pool)
}

View File

@@ -1,12 +1,9 @@
//! Theseus directory information
use std::fs;
use std::path::PathBuf;
use crate::state::{JavaVersion, Settings};
use crate::util::fetch::IoSemaphore;
use std::path::{Path, PathBuf};
use tokio::fs;
use tokio::sync::RwLock;
use super::{ProfilePathId, Settings};
pub const SETTINGS_FILE_NAME: &str = "settings.json";
pub const CACHES_FOLDER_NAME: &str = "caches";
pub const LAUNCHER_LOGS_FOLDER_NAME: &str = "launcher_logs";
pub const PROFILES_FOLDER_NAME: &str = "profiles";
@@ -15,8 +12,7 @@ pub const METADATA_FOLDER_NAME: &str = "meta";
#[derive(Debug)]
pub struct DirectoryInfo {
pub settings_dir: PathBuf, // Base settings directory- settings.json and icon cache.
pub config_dir: RwLock<PathBuf>, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
pub working_dir: PathBuf,
pub config_dir: PathBuf, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
}
impl DirectoryInfo {
@@ -24,154 +20,128 @@ impl DirectoryInfo {
// init() is not needed for this function
pub fn get_initial_settings_dir() -> Option<PathBuf> {
Self::env_path("THESEUS_CONFIG_DIR")
.or_else(|| Some(dirs::config_dir()?.join("com.modrinth.theseus")))
}
#[inline]
pub fn get_initial_settings_file() -> crate::Result<PathBuf> {
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
Ok(settings_dir.join("settings.json"))
.or_else(|| Some(dirs::data_dir()?.join("ModrinthApp")))
}
/// Get all paths needed for Theseus to operate properly
#[tracing::instrument]
pub fn init(settings: &Settings) -> crate::Result<Self> {
// Working directory
let working_dir = std::env::current_dir().map_err(|err| {
crate::ErrorKind::FSError(format!(
"Could not open working directory: {err}"
))
})?;
pub async fn init(config_dir: Option<String>) -> crate::Result<Self> {
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid settings dir".to_string(),
),
)?;
fs::create_dir_all(&settings_dir).map_err(|err| {
fs::create_dir_all(&settings_dir).await.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error creating Theseus config directory: {err}"
))
})?;
// config directory (for instances, etc.)
// by default this is the same as the settings directory
let config_dir = settings.loaded_config_dir.clone().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
let config_dir = config_dir
.map(PathBuf::from)
.unwrap_or_else(|| settings_dir.clone());
Ok(Self {
settings_dir,
config_dir: RwLock::new(config_dir),
working_dir,
config_dir,
})
}
/// Get the Minecraft instance metadata directory
#[inline]
pub async fn metadata_dir(&self) -> PathBuf {
self.config_dir.read().await.join(METADATA_FOLDER_NAME)
pub fn metadata_dir(&self) -> PathBuf {
self.config_dir.join(METADATA_FOLDER_NAME)
}
/// Get the Minecraft java versions metadata directory
#[inline]
pub async fn java_versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("java_versions")
pub fn java_versions_dir(&self) -> PathBuf {
self.metadata_dir().join("java_versions")
}
/// Get the Minecraft versions metadata directory
#[inline]
pub async fn versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("versions")
pub fn versions_dir(&self) -> PathBuf {
self.metadata_dir().join("versions")
}
/// Get the metadata directory for a given version
#[inline]
pub async fn version_dir(&self, version: &str) -> PathBuf {
self.versions_dir().await.join(version)
pub fn version_dir(&self, version: &str) -> PathBuf {
self.versions_dir().join(version)
}
/// Get the Minecraft libraries metadata directory
#[inline]
pub async fn libraries_dir(&self) -> PathBuf {
self.metadata_dir().await.join("libraries")
pub fn libraries_dir(&self) -> PathBuf {
self.metadata_dir().join("libraries")
}
/// Get the Minecraft assets metadata directory
#[inline]
pub async fn assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("assets")
pub fn assets_dir(&self) -> PathBuf {
self.metadata_dir().join("assets")
}
/// Get the assets index directory
#[inline]
pub async fn assets_index_dir(&self) -> PathBuf {
self.assets_dir().await.join("indexes")
pub fn assets_index_dir(&self) -> PathBuf {
self.assets_dir().join("indexes")
}
/// Get the assets objects directory
#[inline]
pub async fn objects_dir(&self) -> PathBuf {
self.assets_dir().await.join("objects")
pub fn objects_dir(&self) -> PathBuf {
self.assets_dir().join("objects")
}
/// Get the directory for a specific object
#[inline]
pub async fn object_dir(&self, hash: &str) -> PathBuf {
self.objects_dir().await.join(&hash[..2]).join(hash)
pub fn object_dir(&self, hash: &str) -> PathBuf {
self.objects_dir().join(&hash[..2]).join(hash)
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn legacy_assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("resources")
pub fn legacy_assets_dir(&self) -> PathBuf {
self.metadata_dir().join("resources")
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn natives_dir(&self) -> PathBuf {
self.metadata_dir().await.join("natives")
pub fn natives_dir(&self) -> PathBuf {
self.metadata_dir().join("natives")
}
/// Get the natives directory for a version of Minecraft
#[inline]
pub async fn version_natives_dir(&self, version: &str) -> PathBuf {
self.natives_dir().await.join(version)
pub fn version_natives_dir(&self, version: &str) -> PathBuf {
self.natives_dir().join(version)
}
/// Get the directory containing instance icons
#[inline]
pub async fn icon_dir(&self) -> PathBuf {
self.config_dir.read().await.join("icons")
pub fn icon_dir(&self) -> PathBuf {
self.config_dir.join("icons")
}
/// Get the profiles directory for created profiles
#[inline]
pub async fn profiles_dir(&self) -> PathBuf {
self.config_dir.read().await.join(PROFILES_FOLDER_NAME)
pub fn profiles_dir(&self) -> PathBuf {
self.config_dir.join(PROFILES_FOLDER_NAME)
}
/// Gets the logs dir for a given profile
#[inline]
pub async fn profile_logs_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("logs"))
pub fn profile_logs_dir(&self, profile_path: &str) -> PathBuf {
self.profiles_dir().join(profile_path).join("logs")
}
/// Gets the crash reports dir for a given profile
#[inline]
pub async fn crash_reports_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("crash-reports"))
pub fn crash_reports_dir(&self, profile_path: &str) -> PathBuf {
self.profiles_dir().join(profile_path).join("crash-reports")
}
#[inline]
@@ -180,32 +150,140 @@ impl DirectoryInfo {
.map(|d| d.join(LAUNCHER_LOGS_FOLDER_NAME))
}
/// Get the file containing the global database
#[inline]
pub async fn database_file(&self) -> PathBuf {
self.config_dir.read().await.join("data.bin")
}
/// Get the settings file for Theseus
#[inline]
pub fn settings_file(&self) -> PathBuf {
self.settings_dir.join(SETTINGS_FILE_NAME)
}
/// Get the cache directory for Theseus
#[inline]
pub fn caches_dir(&self) -> PathBuf {
self.settings_dir.join(CACHES_FOLDER_NAME)
}
#[inline]
pub async fn caches_meta_dir(&self) -> PathBuf {
self.caches_dir().join("metadata")
}
/// Get path from environment variable
#[inline]
fn env_path(name: &str) -> Option<PathBuf> {
std::env::var_os(name).map(PathBuf::from)
}
pub async fn move_launcher_directory<'a, E>(
settings: &mut Settings,
exec: E,
io_semaphore: &IoSemaphore,
) -> crate::Result<()>
where
E: sqlx::Executor<'a, Database = sqlx::Sqlite> + Copy,
{
if let Some(ref prev_custom_dir) = settings.prev_custom_dir {
let prev_dir = PathBuf::from(prev_custom_dir);
let app_dir = DirectoryInfo::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
let move_dir = settings
.custom_dir
.as_ref()
.map(PathBuf::from)
.unwrap_or_else(|| app_dir.clone());
async fn is_dir_writeable(
new_config_dir: &Path,
) -> crate::Result<bool> {
let temp_path = new_config_dir.join(".tmp");
match fs::write(temp_path.clone(), "test").await {
Ok(_) => {
fs::remove_file(temp_path).await?;
Ok(true)
}
Err(e) => {
tracing::error!(
"Error writing to new config dir: {}",
e
);
Ok(false)
}
}
}
async fn move_directory(
source: &Path,
destination: &Path,
io_semaphore: &IoSemaphore,
) -> crate::Result<()> {
if !source.exists() {
crate::util::io::create_dir_all(source).await?;
}
if !destination.exists() {
crate::util::io::create_dir_all(destination).await?;
}
for entry_path in
crate::pack::import::get_all_subfiles(source).await?
{
let relative_path = entry_path.strip_prefix(source)?;
let new_path = destination.join(relative_path);
crate::util::fetch::copy(
&entry_path,
&new_path,
io_semaphore,
)
.await?;
}
Ok(())
}
let new_dir = move_dir.to_string_lossy().to_string();
if prev_dir != move_dir {
if !is_dir_writeable(&move_dir).await? {
settings.custom_dir = Some(prev_custom_dir.clone());
return Ok(());
}
move_directory(
&prev_dir.join(CACHES_FOLDER_NAME),
&app_dir.join(CACHES_FOLDER_NAME),
io_semaphore,
)
.await?;
move_directory(
&prev_dir.join(LAUNCHER_LOGS_FOLDER_NAME),
&app_dir.join(LAUNCHER_LOGS_FOLDER_NAME),
io_semaphore,
)
.await?;
move_directory(
&prev_dir.join(PROFILES_FOLDER_NAME),
&move_dir.join(PROFILES_FOLDER_NAME),
io_semaphore,
)
.await?;
move_directory(
&prev_dir.join(METADATA_FOLDER_NAME),
&move_dir.join(METADATA_FOLDER_NAME),
io_semaphore,
)
.await?;
let java_versions = JavaVersion::get_all(exec).await?;
for (_, mut java_version) in java_versions {
java_version.path = java_version.path.replace(
prev_custom_dir,
new_dir.trim_end_matches('/').trim_end_matches('\\'),
);
java_version.upsert(exec).await?;
}
}
settings.custom_dir = Some(new_dir.clone());
settings.prev_custom_dir = Some(new_dir);
settings.update(exec).await?;
}
Ok(())
}
}

View File

@@ -6,6 +6,7 @@ use discord_rich_presence::{
};
use tokio::sync::RwLock;
use crate::state::{Process, Profile};
use crate::State;
pub struct DiscordGuard {
@@ -16,7 +17,7 @@ pub struct DiscordGuard {
impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub async fn init(is_offline: bool) -> crate::Result<DiscordGuard> {
pub async fn init() -> crate::Result<DiscordGuard> {
let mut dipc =
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
crate::ErrorKind::OtherError(format!(
@@ -25,13 +26,9 @@ impl DiscordGuard {
))
})?;
let connected = if !is_offline {
let res = dipc.connect(); // Do not need to connect to Discord to use app
if res.is_ok() {
Arc::new(AtomicBool::new(true))
} else {
Arc::new(AtomicBool::new(false))
}
let res = dipc.connect(); // Do not need to connect to Discord to use app
let connected = if res.is_ok() {
Arc::new(AtomicBool::new(true))
} else {
Arc::new(AtomicBool::new(false))
};
@@ -56,19 +53,6 @@ impl DiscordGuard {
true
}
// check online
pub async fn check_online(&self) -> bool {
let state = match State::get().await {
Ok(s) => s,
Err(_) => return false,
};
let offline = state.offline.read().await;
if *offline {
return false;
}
true
}
/// Set the activity to the given message
/// First checks if discord is disabled, and if so, clear the activity instead
pub async fn set_activity(
@@ -76,14 +60,10 @@ impl DiscordGuard {
msg: &str,
reconnect_if_fail: bool,
) -> crate::Result<()> {
if !self.check_online().await {
return Ok(());
}
// Check if discord is disabled, and if so, clear the activity instead
let state = State::get().await?;
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
let settings = crate::state::Settings::get(&state.pool).await?;
if !settings.discord_rpc {
Ok(self.clear_activity(true).await?)
} else {
Ok(self.force_set_activity(msg, reconnect_if_fail).await?)
@@ -145,7 +125,7 @@ impl DiscordGuard {
reconnect_if_fail: bool,
) -> crate::Result<()> {
// Attempt to connect if not connected. Do not continue if it fails, as the client.clear_activity can panic if it never was connected
if !self.check_online().await || !self.retry_if_not_ready().await {
if !self.retry_if_not_ready().await {
return Ok(());
}
@@ -184,30 +164,25 @@ impl DiscordGuard {
&self,
reconnect_if_fail: bool,
) -> crate::Result<()> {
let state: Arc<tokio::sync::RwLockReadGuard<'_, State>> =
State::get().await?;
let state = State::get().await?;
{
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
println!("Discord is disabled, clearing activity");
return self.clear_activity(true).await;
}
let settings = crate::state::Settings::get(&state.pool).await?;
if !settings.discord_rpc {
println!("Discord is disabled, clearing activity");
return self.clear_activity(true).await;
}
if let Some(existing_child) = state
.children
.read()
.await
.running_profile_paths()
.await?
.first()
{
self.set_activity(
&format!("Playing {}", existing_child),
reconnect_if_fail,
)
.await?;
let running_profiles = Process::get_all(&state.pool).await?;
if let Some(existing_child) = running_profiles.first() {
let prof =
Profile::get(&existing_child.profile_path, &state.pool).await?;
if let Some(prof) = prof {
self.set_activity(
&format!("Playing {}", prof.name),
reconnect_if_fail,
)
.await?;
}
} else {
self.set_activity("Idling...", reconnect_if_fail).await?;
}

View File

@@ -0,0 +1,155 @@
use crate::event::emit::{emit_profile, emit_warning};
use crate::event::ProfilePayloadType;
use crate::state::{DirectoryInfo, ProfileInstallStage, ProjectType};
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
use notify::{RecommendedWatcher, RecursiveMode};
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
use std::time::Duration;
use tokio::sync::RwLock;
pub type FileWatcher = RwLock<Debouncer<RecommendedWatcher>>;
pub async fn init_watcher() -> crate::Result<FileWatcher> {
let (mut tx, mut rx) = channel(1);
let file_watcher = new_debouncer(
Duration::from_secs_f32(1.0),
move |res: DebounceEventResult| {
futures::executor::block_on(async {
tx.send(res).await.unwrap();
})
},
)?;
tokio::task::spawn(async move {
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
tracing::info!(parent: &span, "Initting watcher");
while let Some(res) = rx.next().await {
let _span = span.enter();
match res {
Ok(events) => {
let mut visited_profiles = Vec::new();
events.iter().for_each(|e| {
let mut profile_path = None;
let mut found = false;
for component in e.path.components() {
if found {
profile_path = Some(
component.as_os_str().to_string_lossy(),
);
break;
}
if component.as_os_str()
== crate::state::dirs::PROFILES_FOLDER_NAME
{
found = true;
}
}
if let Some(profile_path) = profile_path {
if e.path
.components()
.any(|x| x.as_os_str() == "crash-reports")
&& e.path
.extension()
.map(|x| x == "txt")
.unwrap_or(false)
{
crash_task(profile_path.to_string());
} else if !visited_profiles.contains(&profile_path)
{
let path = profile_path.to_string();
tokio::spawn(async move {
let _ = emit_profile(
&path,
ProfilePayloadType::Synced,
)
.await;
});
visited_profiles.push(profile_path);
}
}
});
}
Err(error) => tracing::warn!("Unable to watch file: {error}"),
}
}
});
Ok(RwLock::new(file_watcher))
}
/// Watches all existing profiles
pub(crate) async fn watch_profiles_init(
watcher: &FileWatcher,
dirs: &DirectoryInfo,
) -> crate::Result<()> {
if let Ok(profiles_dir) = std::fs::read_dir(&dirs.profiles_dir()) {
for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) {
if let Some(file_name) = file_name.to_str() {
if file_name.starts_with(".DS_Store") {
continue;
};
watch_profile(file_name, watcher, dirs).await?;
}
}
}
}
Ok(())
}
pub(crate) async fn watch_profile(
profile_path: &str,
watcher: &FileWatcher,
dirs: &DirectoryInfo,
) -> crate::Result<()> {
let profile_path = dirs.profiles_dir().join(profile_path);
for folder in ProjectType::iterator()
.map(|x| x.get_folder())
.chain(["crash-reports"])
{
let path = profile_path.join(folder);
if !path.exists() {
crate::util::io::create_dir_all(&path).await?;
}
let mut watcher = watcher.write().await;
watcher.watcher().watch(&path, RecursiveMode::Recursive)?;
}
Ok(())
}
fn crash_task(path: String) {
tokio::task::spawn(async move {
let res = async {
let profile = crate::api::profile::get(&path).await?;
if let Some(profile) = profile {
// Hide warning if profile is not yet installed
if profile.install_stage == ProfileInstallStage::Installed {
emit_warning(&format!("Profile {} has crashed! Visit the logs page to see a crash report.", profile.name)).await?;
}
}
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to send crash report to frontend: {err}")
}
};
});
}

View File

@@ -1,66 +1,94 @@
use dashmap::DashMap;
use futures::TryStreamExt;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use crate::prelude::JavaVersion;
use crate::util::jre;
// All stored Java versions, chosen by the user
// A wrapper over a Hashmap connecting key -> java version
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JavaGlobals(HashMap<String, JavaVersion>);
impl JavaGlobals {
pub fn new() -> JavaGlobals {
JavaGlobals(HashMap::new())
}
pub fn insert(&mut self, key: String, java: JavaVersion) {
self.0.insert(key, java);
}
pub fn remove(&mut self, key: &String) {
self.0.remove(key);
}
pub fn get(&self, key: &String) -> Option<&JavaVersion> {
self.0.get(key)
}
pub fn get_mut(&mut self, key: &String) -> Option<&mut JavaVersion> {
self.0.get_mut(key)
}
pub fn count(&self) -> usize {
self.0.len()
}
pub fn keys(&self) -> Vec<String> {
self.0.keys().cloned().collect()
}
// Validates that every path here is a valid Java version and that the version matches the version stored here
// If false, when checked, the user should be prompted to reselect the Java version
pub async fn is_all_valid(&self) -> bool {
for (_, java) in self.0.iter() {
let jre = jre::check_java_at_filepath(
PathBuf::from(&java.path).as_path(),
)
.await;
if let Some(jre) = jre {
if jre.version != java.version {
return false;
}
} else {
return false;
}
}
true
}
#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone)]
pub struct JavaVersion {
pub major_version: u32,
pub version: String,
pub architecture: String,
pub path: String,
}
impl Default for JavaGlobals {
fn default() -> Self {
Self::new()
impl JavaVersion {
pub async fn get(
major_version: u32,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<JavaVersion>> {
let version = major_version as i32;
let res = sqlx::query!(
"
SELECT
full_version, architecture, path
FROM java_versions
WHERE major_version = $1
",
version
)
.fetch_optional(exec)
.await?;
Ok(res.map(|x| JavaVersion {
major_version,
version: x.full_version,
architecture: x.architecture,
path: x.path,
}))
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<DashMap<u32, Self>> {
let res = sqlx::query!(
"
SELECT
major_version, full_version, architecture, path
FROM java_versions
"
)
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
acc.insert(
x.major_version as u32,
JavaVersion {
major_version: x.major_version as u32,
version: x.full_version,
architecture: x.architecture,
path: x.path,
},
);
async move { Ok(acc) }
})
.await?;
Ok(res)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let major_version = self.major_version as i32;
sqlx::query!(
"
INSERT INTO java_versions (major_version, full_version, architecture, path)
VALUES ($1, $2, $3, $4)
ON CONFLICT (major_version) DO UPDATE SET
full_version = $2,
architecture = $3,
path = $4
",
major_version,
self.version,
self.architecture,
self.path,
)
.execute(exec)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,513 @@
use crate::data::DirectoryInfo;
use crate::jre::check_jre;
use crate::prelude::ModLoader;
use crate::state;
use crate::state::{
Credentials, DefaultPage, DeviceToken, DeviceTokenKey, DeviceTokenPair,
Hooks, LinkedData, MemorySettings, ModrinthCredentials, Profile,
ProfileInstallStage, Theme, WindowSize,
};
use crate::util::fetch::{read_json, IoSemaphore};
use chrono::{DateTime, Utc};
use p256::ecdsa::SigningKey;
use p256::pkcs8::DecodePrivateKey;
use serde::Deserialize;
use std::collections::HashMap;
use std::path::PathBuf;
use tokio::sync::Semaphore;
use uuid::Uuid;
pub async fn migrate_legacy_data<'a, E>(exec: E) -> crate::Result<()>
where
E: sqlx::Executor<'a, Database = sqlx::Sqlite> + Copy,
{
let mut settings = state::Settings::get(exec).await?;
if settings.migrated {
return Ok(());
};
let old_launcher_root = if let Some(dir) = default_settings_dir() {
dir
} else {
return Ok(());
};
let old_launcher_root_str = old_launcher_root.to_string_lossy().to_string();
let new_launcher_root = DirectoryInfo::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
let new_launcher_root_str = new_launcher_root
.to_string_lossy()
.to_string()
.trim_end_matches('/')
.trim_end_matches('\\')
.to_string();
let io_semaphore = IoSemaphore(Semaphore::new(10));
let settings_path = old_launcher_root.join("settings.json");
if let Ok(legacy_settings) =
read_json::<LegacySettings>(&settings_path, &io_semaphore).await
{
settings.max_concurrent_writes = legacy_settings.max_concurrent_writes;
settings.max_concurrent_downloads =
legacy_settings.max_concurrent_downloads;
settings.theme = match legacy_settings.theme {
LegacyTheme::Dark => Theme::Dark,
LegacyTheme::Light => Theme::Light,
LegacyTheme::Oled => Theme::Oled,
};
settings.default_page = match legacy_settings.default_page {
LegacyDefaultPage::Home => DefaultPage::Home,
LegacyDefaultPage::Library => DefaultPage::Library,
};
settings.collapsed_navigation = legacy_settings.collapsed_navigation;
settings.advanced_rendering = legacy_settings.advanced_rendering;
settings.native_decorations = legacy_settings.native_decorations;
settings.telemetry = !legacy_settings.opt_out_analytics;
settings.discord_rpc = !legacy_settings.disable_discord_rpc;
settings.developer_mode = legacy_settings.developer_mode;
settings.onboarded = legacy_settings.fully_onboarded;
settings.extra_launch_args = legacy_settings.custom_java_args;
settings.custom_env_vars = legacy_settings.custom_env_args;
settings.memory.maximum = legacy_settings.memory.maximum;
settings.force_fullscreen = legacy_settings.force_fullscreen;
settings.game_resolution.0 = legacy_settings.game_resolution.0;
settings.game_resolution.1 = legacy_settings.game_resolution.1;
settings.hide_on_process_start = legacy_settings.hide_on_process;
settings.hooks.pre_launch = legacy_settings.hooks.pre_launch;
settings.hooks.wrapper = legacy_settings.hooks.wrapper;
settings.hooks.post_exit = legacy_settings.hooks.post_exit;
if let Some(path) = legacy_settings
.loaded_config_dir
.clone()
.and_then(|x| x.to_str().map(|x| x.to_string()))
{
if path != old_launcher_root_str {
settings.custom_dir = Some(path);
}
}
settings.prev_custom_dir = Some(old_launcher_root_str.clone());
for (_, legacy_version) in legacy_settings.java_globals.0 {
if let Ok(Some(mut java_version)) =
check_jre(PathBuf::from(legacy_version.path)).await
{
java_version.path = java_version
.path
.replace(&old_launcher_root_str, &new_launcher_root_str);
java_version.upsert(exec).await?;
}
}
let modrinth_auth_path =
old_launcher_root.join("caches/metadata/auth.json");
if let Ok(creds) = read_json::<LegacyModrinthCredentials>(
&modrinth_auth_path,
&io_semaphore,
)
.await
{
ModrinthCredentials {
session: creds.session,
expires: creds.expires_at,
user_id: creds.user.id,
active: true,
}
.upsert(exec)
.await?;
}
let minecraft_auth_path =
old_launcher_root.join("caches/metadata/minecraft_auth.json");
if let Ok(minecraft_auth) = read_json::<LegacyMinecraftAuthStore>(
&minecraft_auth_path,
&io_semaphore,
)
.await
{
let minecraft_users_len = minecraft_auth.users.len();
for (uuid, credential) in minecraft_auth.users {
Credentials {
id: credential.id,
username: credential.username,
access_token: credential.access_token,
refresh_token: credential.refresh_token,
expires: credential.expires,
active: minecraft_auth.default_user == Some(uuid)
|| minecraft_users_len == 1,
}
.upsert(exec)
.await?;
}
if let Some(device_token) = minecraft_auth.token {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key)
{
if let Ok(uuid) = Uuid::parse_str(&device_token.id) {
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token
.token
.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
}
}
}
if let Ok(profiles_dir) = std::fs::read_dir(
&legacy_settings
.loaded_config_dir
.unwrap_or(old_launcher_root)
.join("profiles"),
) {
for entry in profiles_dir.flatten() {
if entry.path().is_dir() {
let profile_path = entry.path().join("profile.json");
if let Ok(profile) =
read_json::<LegacyProfile>(&profile_path, &io_semaphore)
.await
{
Profile {
path: profile.path,
install_stage: match profile.install_stage {
LegacyProfileInstallStage::Installed => {
ProfileInstallStage::Installed
}
LegacyProfileInstallStage::Installing => {
ProfileInstallStage::Installing
}
LegacyProfileInstallStage::PackInstalling => {
ProfileInstallStage::PackInstalling
}
LegacyProfileInstallStage::NotInstalled => {
ProfileInstallStage::NotInstalled
}
},
name: profile.metadata.name,
icon_path: profile.metadata.icon.map(|x| {
x.replace(
&old_launcher_root_str,
&new_launcher_root_str,
)
}),
game_version: profile.metadata.game_version,
loader: match profile.metadata.loader {
LegacyModLoader::Vanilla => ModLoader::Vanilla,
LegacyModLoader::Forge => ModLoader::Forge,
LegacyModLoader::Fabric => ModLoader::Fabric,
LegacyModLoader::Quilt => ModLoader::Quilt,
LegacyModLoader::NeoForge => {
ModLoader::NeoForge
}
},
loader_version: profile
.metadata
.loader_version
.map(|x| x.id),
groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(
|x| {
if let Some(project_id) = x.project_id {
if let Some(version_id) = x.version_id {
if let Some(locked) = x.locked {
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
}
}
None
},
),
created: profile.metadata.date_created,
modified: profile.metadata.date_modified,
last_played: profile.metadata.last_played,
submitted_time_played: profile
.metadata
.submitted_time_played,
recent_time_played: profile
.metadata
.recent_time_played,
java_path: profile.java.as_ref().and_then(|x| {
x.override_version.clone().map(|x| {
x.path.replace(
&old_launcher_root_str,
&new_launcher_root_str,
)
})
}),
extra_launch_args: profile
.java
.as_ref()
.and_then(|x| x.extra_arguments.clone()),
custom_env_vars: profile
.java
.and_then(|x| x.custom_env_args),
memory: profile
.memory
.map(|x| MemorySettings { maximum: x.maximum }),
force_fullscreen: profile.fullscreen,
game_resolution: profile
.resolution
.map(|x| WindowSize(x.0, x.1)),
hooks: Hooks {
pre_launch: profile
.hooks
.as_ref()
.and_then(|x| x.pre_launch.clone()),
wrapper: profile
.hooks
.as_ref()
.and_then(|x| x.wrapper.clone()),
post_exit: profile
.hooks
.and_then(|x| x.post_exit),
},
}
.upsert(exec)
.await?;
}
}
}
}
settings.migrated = true;
settings.update(exec).await?;
}
Ok(())
}
#[derive(Deserialize, Debug, Clone)]
struct LegacySettings {
pub theme: LegacyTheme,
pub memory: LegacyMemorySettings,
#[serde(default)]
pub force_fullscreen: bool,
pub game_resolution: LegacyWindowSize,
pub custom_java_args: Vec<String>,
pub custom_env_args: Vec<(String, String)>,
pub java_globals: LegacyJavaGlobals,
pub hooks: LegacyHooks,
pub max_concurrent_downloads: usize,
pub max_concurrent_writes: usize,
pub collapsed_navigation: bool,
#[serde(default)]
pub disable_discord_rpc: bool,
#[serde(default)]
pub hide_on_process: bool,
#[serde(default)]
pub native_decorations: bool,
#[serde(default)]
pub default_page: LegacyDefaultPage,
#[serde(default)]
pub developer_mode: bool,
#[serde(default)]
pub opt_out_analytics: bool,
#[serde(default)]
pub advanced_rendering: bool,
#[serde(default)]
pub fully_onboarded: bool,
#[serde(default = "default_settings_dir")]
pub loaded_config_dir: Option<PathBuf>,
}
fn default_settings_dir() -> Option<PathBuf> {
Some(dirs::config_dir()?.join("com.modrinth.theseus"))
}
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LegacyTheme {
Dark,
Light,
Oled,
}
#[derive(Deserialize, Default, Debug, Clone, Copy)]
enum LegacyDefaultPage {
#[default]
Home,
Library,
}
#[derive(Deserialize, Debug, Clone)]
struct LegacyHooks {
pub pre_launch: Option<String>,
pub wrapper: Option<String>,
pub post_exit: Option<String>,
}
#[derive(Deserialize, Debug, Clone, Copy)]
struct LegacyMemorySettings {
pub maximum: u32,
}
#[derive(Deserialize, Debug, Clone, Copy)]
struct LegacyWindowSize(pub u16, pub u16);
#[derive(Debug, Deserialize, Clone)]
struct LegacyJavaGlobals(HashMap<String, LegacyJavaVersion>);
#[derive(Debug, PartialEq, Eq, Hash, Deserialize, Clone)]
struct LegacyJavaVersion {
pub path: String,
pub version: String,
pub architecture: String,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyModrinthUser {
pub id: String,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyModrinthCredentials {
pub session: String,
pub expires_at: DateTime<Utc>,
pub user: LegacyModrinthUser,
}
#[derive(Deserialize, Debug)]
struct LegacyMinecraftAuthStore {
pub users: HashMap<Uuid, LegacyCredentials>,
pub token: Option<LegacySaveDeviceToken>,
pub default_user: Option<Uuid>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyCredentials {
pub id: Uuid,
pub username: String,
pub access_token: String,
pub refresh_token: String,
pub expires: DateTime<Utc>,
}
#[derive(Deserialize, Debug)]
struct LegacySaveDeviceToken {
pub id: String,
pub private_key: String,
pub x: String,
pub y: String,
pub token: LegacyDeviceToken,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "PascalCase")]
struct LegacyDeviceToken {
pub issue_instant: DateTime<Utc>,
pub not_after: DateTime<Utc>,
pub token: String,
pub display_claims: HashMap<String, serde_json::Value>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyProfile {
#[serde(default)]
pub install_stage: LegacyProfileInstallStage,
#[serde(default)]
pub path: String,
pub metadata: LegacyProfileMetadata,
pub java: Option<LegacyJavaSettings>,
pub memory: Option<LegacyMemorySettings>,
pub resolution: Option<LegacyWindowSize>,
pub fullscreen: Option<bool>,
pub hooks: Option<LegacyHooks>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyProfileMetadata {
pub name: String,
pub icon: Option<String>,
#[serde(default)]
pub groups: Vec<String>,
pub game_version: String,
#[serde(default)]
pub loader: LegacyModLoader,
pub loader_version: Option<LegacyLoaderVersion>,
pub linked_data: Option<LegacyLinkedData>,
#[serde(default)]
pub date_created: DateTime<Utc>,
#[serde(default)]
pub date_modified: DateTime<Utc>,
pub last_played: Option<DateTime<Utc>>,
#[serde(default)]
pub submitted_time_played: u64,
#[serde(default)]
pub recent_time_played: u64,
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
enum LegacyModLoader {
#[default]
Vanilla,
Forge,
Fabric,
Quilt,
NeoForge,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyLinkedData {
pub project_id: Option<String>,
pub version_id: Option<String>,
#[serde(default = "default_locked")]
pub locked: Option<bool>,
}
fn default_locked() -> Option<bool> {
Some(true)
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyJavaSettings {
pub override_version: Option<LegacyJavaVersion>,
pub extra_arguments: Option<Vec<String>>,
pub custom_env_args: Option<Vec<(String, String)>>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyLoaderVersion {
pub id: String,
}
#[derive(Deserialize, Clone, Copy, Debug, Default, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
enum LegacyProfileInstallStage {
Installed,
Installing,
PackInstalling,
#[default]
NotInstalled,
}

View File

@@ -1,173 +0,0 @@
//! Theseus metadata
use crate::data::DirectoryInfo;
use crate::util::fetch::{read_json, write, IoSemaphore};
use crate::State;
use daedalus::{
minecraft::{fetch_version_manifest, VersionManifest as MinecraftManifest},
modded::{
fetch_manifest as fetch_loader_manifest, Manifest as LoaderManifest,
},
};
use serde::{Deserialize, Serialize};
const METADATA_URL: &str = "https://meta.modrinth.com";
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Metadata {
pub minecraft: MinecraftManifest,
pub forge: LoaderManifest,
pub fabric: LoaderManifest,
pub quilt: LoaderManifest,
pub neoforge: LoaderManifest,
}
impl Metadata {
fn get_manifest(name: &str) -> String {
format!("{METADATA_URL}/{name}/v0/manifest.json")
}
pub async fn fetch() -> crate::Result<Self> {
let (minecraft, forge, fabric, quilt, neoforge) = tokio::try_join! {
async {
let url = Self::get_manifest("minecraft");
fetch_version_manifest(Some(&url)).await
},
async {
let url = Self::get_manifest("forge");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("fabric");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("quilt");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("neo");
fetch_loader_manifest(&url).await
}
}?;
Ok(Self {
minecraft,
forge,
fabric,
quilt,
neoforge,
})
}
// Attempt to fetch metadata and store in sled DB
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let mut metadata = None;
let metadata_path = dirs.caches_meta_dir().await.join("metadata.json");
let metadata_backup_path =
dirs.caches_meta_dir().await.join("metadata.json.bak");
if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_path, io_semaphore).await
{
metadata = Some(metadata_json);
} else if fetch_online {
let res = async {
let metadata_fetch = Self::fetch().await?;
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
write(
&metadata_backup_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
metadata = Some(metadata_fetch);
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to fetch launcher metadata: {err}")
}
}
} else if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_backup_path, io_semaphore).await
{
metadata = Some(metadata_json);
std::fs::copy(&metadata_backup_path, &metadata_path).map_err(
|err| {
crate::ErrorKind::FSError(format!(
"Error restoring metadata backup: {err}"
))
.as_error()
},
)?;
}
if let Some(meta) = metadata {
Ok(meta)
} else {
Err(
crate::ErrorKind::NoValueFor(String::from("launcher metadata"))
.as_error(),
)
}
}
pub async fn update() {
let res = async {
let metadata_fetch = Metadata::fetch().await?;
let state = State::get().await?;
let metadata_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json");
let metadata_backup_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json.bak");
if metadata_path.exists() {
std::fs::copy(&metadata_path, &metadata_backup_path)?;
}
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_metadata = state.metadata.write().await;
*old_metadata = metadata_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher metadata: {err}")
}
};
}
}

View File

@@ -1,10 +1,11 @@
use crate::data::DirectoryInfo;
use crate::util::fetch::{read_json, write, IoSemaphore, REQWEST_CLIENT};
use crate::{ErrorKind, State};
use crate::util::fetch::REQWEST_CLIENT;
use crate::ErrorKind;
use base64::prelude::{BASE64_STANDARD, BASE64_URL_SAFE_NO_PAD};
use base64::Engine;
use byteorder::BigEndian;
use chrono::{DateTime, Duration, Utc};
use chrono::{DateTime, Duration, TimeZone, Utc};
use dashmap::DashMap;
use futures::TryStreamExt;
use p256::ecdsa::signature::Signer;
use p256::ecdsa::{Signature, SigningKey, VerifyingKey};
use p256::pkcs8::{DecodePrivateKey, EncodePrivateKey, LineEnding};
@@ -73,17 +74,6 @@ pub enum MinecraftAuthenticationError {
NoUserHash,
}
const AUTH_JSON: &str = "minecraft_auth.json";
#[derive(Serialize, Deserialize, Debug)]
pub struct SaveDeviceToken {
pub id: String,
pub private_key: String,
pub x: String,
pub y: String,
pub token: DeviceToken,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MinecraftLoginFlow {
pub verifier: String,
@@ -92,327 +82,119 @@ pub struct MinecraftLoginFlow {
pub redirect_uri: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MinecraftAuthStore {
pub users: HashMap<Uuid, Credentials>,
pub token: Option<SaveDeviceToken>,
pub default_user: Option<Uuid>,
}
impl MinecraftAuthStore {
#[tracing::instrument]
pub async fn init(
dirs: &DirectoryInfo,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
let store = read_json(&auth_path, io_semaphore).await.ok();
if let Some(store) = store {
Ok(store)
} else {
Ok(Self {
users: HashMap::new(),
token: None,
default_user: None,
})
}
}
#[tracing::instrument(skip(self))]
pub async fn save(&self) -> crate::Result<()> {
let state = State::get().await?;
let auth_path =
state.directories.caches_meta_dir().await.join(AUTH_JSON);
write(&auth_path, &serde_json::to_vec(&self)?, &state.io_semaphore)
#[tracing::instrument]
pub async fn login_begin(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<MinecraftLoginFlow> {
let (pair, current_date, valid_date) =
DeviceTokenPair::refresh_and_get_device_token(Utc::now(), false, exec)
.await?;
Ok(())
}
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
#[tracing::instrument(skip(self))]
async fn refresh_and_get_device_token(
&mut self,
current_date: DateTime<Utc>,
force_generate: bool,
) -> crate::Result<(DeviceTokenKey, DeviceToken, DateTime<Utc>, bool)> {
macro_rules! generate_key {
($self:ident, $generate_key:expr, $device_token:expr, $SaveDeviceToken:path) => {{
let key = generate_key()?;
let res = device_token(&key, current_date).await?;
self.token = Some(SaveDeviceToken {
id: key.id.clone(),
private_key: key
.key
.to_pkcs8_pem(LineEnding::default())
.map_err(|err| {
MinecraftAuthenticationError::PEMSerialize(err)
})?
.to_string(),
x: key.x.clone(),
y: key.y.clone(),
token: res.value.clone(),
});
self.save().await?;
(key, res.value, res.date, true)
}};
}
let (key, token, date, valid_date) = if let Some(ref token) = self.token
{
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&token.private_key)
{
if token.token.not_after > Utc::now() && !force_generate {
(
DeviceTokenKey {
id: token.id.clone(),
key: private_key,
x: token.x.clone(),
y: token.y.clone(),
},
token.token.clone(),
current_date,
match sisu_authenticate(
&pair.token.token,
&challenge,
&pair.key,
current_date,
)
.await
{
Ok((session_id, redirect_uri)) => Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
}),
Err(err) => {
if !valid_date {
let (pair, current_date, _) =
DeviceTokenPair::refresh_and_get_device_token(
Utc::now(),
false,
)
} else {
let key = DeviceTokenKey {
id: token.id.clone(),
key: private_key,
x: token.x.clone(),
y: token.y.clone(),
};
let res = device_token(&key, current_date).await?;
(key, res.value, res.date, true)
}
} else {
generate_key!(self, generate_key, device_token, SaveDeviceToken)
}
} else {
generate_key!(self, generate_key, device_token, SaveDeviceToken)
};
Ok((key, token, date, valid_date))
}
#[tracing::instrument(skip(self))]
pub async fn login_begin(&mut self) -> crate::Result<MinecraftLoginFlow> {
let (key, token, current_date, valid_date) =
self.refresh_and_get_device_token(Utc::now(), false).await?;
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
match sisu_authenticate(&token.token, &challenge, &key, current_date)
.await
{
Ok((session_id, redirect_uri)) => Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
}),
Err(err) => {
if !valid_date {
let (key, token, current_date, _) = self
.refresh_and_get_device_token(Utc::now(), false)
.await?;
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
let (session_id, redirect_uri) = sisu_authenticate(
&token.token,
&challenge,
&key,
current_date,
exec,
)
.await?;
Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
})
} else {
Err(crate::ErrorKind::from(err).into())
}
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
let (session_id, redirect_uri) = sisu_authenticate(
&pair.token.token,
&challenge,
&pair.key,
current_date,
)
.await?;
Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
})
} else {
Err(crate::ErrorKind::from(err).into())
}
}
}
}
#[tracing::instrument(skip(self))]
pub async fn login_finish(
&mut self,
code: &str,
flow: MinecraftLoginFlow,
) -> crate::Result<Credentials> {
let (key, token, _, _) =
self.refresh_and_get_device_token(Utc::now(), false).await?;
let oauth_token = oauth_token(code, &flow.verifier).await?;
let sisu_authorize = sisu_authorize(
Some(&flow.session_id),
&oauth_token.value.access_token,
&token.token,
&key,
oauth_token.date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&token.token,
&key,
sisu_authorize.date,
)
.await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
minecraft_entitlements(&minecraft_token.access_token).await?;
let profile = minecraft_profile(&minecraft_token.access_token).await?;
let profile_id = profile.id.unwrap_or_default();
let credentials = Credentials {
id: profile_id,
username: profile.name,
access_token: minecraft_token.access_token,
refresh_token: oauth_token.value.refresh_token,
expires: oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64),
};
self.users.insert(profile_id, credentials.clone());
if self.default_user.is_none() {
self.default_user = Some(profile_id);
}
self.save().await?;
Ok(credentials)
}
async fn refresh_token(
&mut self,
creds: &Credentials,
) -> crate::Result<Option<Credentials>> {
let cred_id = creds.id;
let profile_name = creds.username.clone();
let oauth_token = oauth_refresh(&creds.refresh_token).await?;
let (key, token, current_date, _) = self
.refresh_and_get_device_token(oauth_token.date, false)
#[tracing::instrument]
pub async fn login_finish(
code: &str,
flow: MinecraftLoginFlow,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<Credentials> {
let (pair, _, _) =
DeviceTokenPair::refresh_and_get_device_token(Utc::now(), false, exec)
.await?;
let sisu_authorize = sisu_authorize(
None,
&oauth_token.value.access_token,
&token.token,
&key,
current_date,
)
.await?;
let oauth_token = oauth_token(code, &flow.verifier).await?;
let sisu_authorize = sisu_authorize(
Some(&flow.session_id),
&oauth_token.value.access_token,
&pair.token.token,
&pair.key,
oauth_token.date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&token.token,
&key,
sisu_authorize.date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&pair.token.token,
&pair.key,
sisu_authorize.date,
)
.await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
minecraft_entitlements(&minecraft_token.access_token).await?;
let val = Credentials {
id: cred_id,
username: profile_name,
access_token: minecraft_token.access_token,
refresh_token: oauth_token.value.refresh_token,
expires: oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64),
};
let profile = minecraft_profile(&minecraft_token.access_token).await?;
self.users.insert(val.id, val.clone());
self.save().await?;
let profile_id = profile.id.unwrap_or_default();
Ok(Some(val))
}
let credentials = Credentials {
id: profile_id,
username: profile.name,
access_token: minecraft_token.access_token,
refresh_token: oauth_token.value.refresh_token,
expires: oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64),
active: true,
};
#[tracing::instrument(skip(self))]
pub async fn get_default_credential(
&mut self,
) -> crate::Result<Option<Credentials>> {
let credentials = if let Some(default_user) = self.default_user {
if let Some(creds) = self.users.get(&default_user) {
Some(creds)
} else {
self.users.values().next()
}
} else {
self.users.values().next()
};
credentials.upsert(exec).await?;
if let Some(creds) = credentials {
if self.default_user != Some(creds.id) {
self.default_user = Some(creds.id);
self.save().await?;
}
if creds.expires < Utc::now() {
let old_credentials = creds.clone();
let res = self.refresh_token(&old_credentials).await;
match res {
Ok(val) => Ok(val),
Err(err) => {
if let ErrorKind::MinecraftAuthenticationError(
MinecraftAuthenticationError::Request {
ref source,
..
},
) = *err.raw
{
if source.is_connect() || source.is_timeout() {
return Ok(Some(old_credentials));
}
}
Err(err)
}
}
} else {
Ok(Some(creds.clone()))
}
} else {
Ok(None)
}
}
#[tracing::instrument(skip(self))]
pub async fn remove(
&mut self,
id: Uuid,
) -> crate::Result<Option<Credentials>> {
let val = self.users.remove(&id);
self.save().await?;
Ok(val)
}
Ok(credentials)
}
#[derive(Serialize, Deserialize, Clone, Debug)]
@@ -422,6 +204,343 @@ pub struct Credentials {
pub access_token: String,
pub refresh_token: String,
pub expires: DateTime<Utc>,
pub active: bool,
}
impl Credentials {
async fn refresh(
&mut self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let oauth_token = oauth_refresh(&self.refresh_token).await?;
let (pair, current_date, _) =
DeviceTokenPair::refresh_and_get_device_token(
oauth_token.date,
false,
exec,
)
.await?;
let sisu_authorize = sisu_authorize(
None,
&oauth_token.value.access_token,
&pair.token.token,
&pair.key,
current_date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&pair.token.token,
&pair.key,
sisu_authorize.date,
)
.await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
self.access_token = minecraft_token.access_token;
self.refresh_token = oauth_token.value.refresh_token;
self.expires = oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64);
self.upsert(exec).await?;
Ok(())
}
#[tracing::instrument]
pub async fn get_default_credential(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<Option<Credentials>> {
let credentials = Self::get_active(exec).await?;
if let Some(mut creds) = credentials {
if creds.expires < Utc::now() {
let res = creds.refresh(exec).await;
match res {
Ok(_) => Ok(Some(creds)),
Err(err) => {
if let ErrorKind::MinecraftAuthenticationError(
MinecraftAuthenticationError::Request {
ref source,
..
},
) = *err.raw
{
if source.is_connect() || source.is_timeout() {
return Ok(Some(creds));
}
}
Err(err)
}
}
} else {
Ok(Some(creds))
}
} else {
Ok(None)
}
}
pub async fn get_active(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = sqlx::query!(
"
SELECT
uuid, active, username, access_token, refresh_token, expires
FROM minecraft_users
WHERE active = TRUE
"
)
.fetch_optional(exec)
.await?;
Ok(res.map(|x| Self {
id: Uuid::parse_str(&x.uuid).unwrap_or_default(),
username: x.username,
access_token: x.access_token,
refresh_token: x.refresh_token,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
active: x.active == 1,
}))
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<DashMap<Uuid, Self>> {
let res = sqlx::query!(
"
SELECT
uuid, active, username, access_token, refresh_token, expires
FROM minecraft_users
"
)
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
let uuid = Uuid::parse_str(&x.uuid).unwrap_or_default();
acc.insert(
uuid,
Self {
id: uuid,
username: x.username,
access_token: x.access_token,
refresh_token: x.refresh_token,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
active: x.active == 1,
},
);
async move { Ok(acc) }
})
.await?;
Ok(res)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let expires = self.expires.timestamp();
let uuid = self.id.as_hyphenated().to_string();
if self.active {
sqlx::query!(
"
UPDATE minecraft_users
SET active = FALSE
",
)
.execute(exec)
.await?;
}
sqlx::query!(
"
INSERT INTO minecraft_users (uuid, active, username, access_token, refresh_token, expires)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (uuid) DO UPDATE SET
active = $2,
username = $3,
access_token = $4,
refresh_token = $5,
expires = $6
",
uuid,
self.active,
self.username,
self.access_token,
self.refresh_token,
expires,
)
.execute(exec)
.await?;
Ok(())
}
pub async fn remove(
uuid: Uuid,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let uuid = uuid.as_hyphenated().to_string();
sqlx::query!(
"
DELETE FROM minecraft_users WHERE uuid = $1
",
uuid,
)
.execute(exec)
.await?;
Ok(())
}
}
pub struct DeviceTokenPair {
pub token: DeviceToken,
pub key: DeviceTokenKey,
}
impl DeviceTokenPair {
#[tracing::instrument(skip(exec))]
async fn refresh_and_get_device_token(
current_date: DateTime<Utc>,
force_generate: bool,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<(Self, DateTime<Utc>, bool)> {
let pair = Self::get(exec).await?;
if let Some(mut pair) = pair {
if pair.token.not_after > Utc::now() && !force_generate {
Ok((pair, current_date, false))
} else {
let res = device_token(&pair.key, current_date).await?;
pair.token = res.value;
pair.upsert(exec).await?;
Ok((pair, res.date, true))
}
} else {
let key = generate_key()?;
let res = device_token(&key, current_date).await?;
let pair = Self {
key,
token: res.value,
};
pair.upsert(exec).await?;
Ok((pair, res.date, true))
}
}
async fn get(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = sqlx::query!(
r#"
SELECT
uuid, private_key, x, y, issue_instant, not_after, token, json(display_claims) as "display_claims!: serde_json::Value"
FROM minecraft_device_tokens
"#
)
.fetch_optional(exec)
.await?;
if let Some(x) = res {
if let Ok(uuid) = Uuid::parse_str(&x.uuid) {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(
x.display_claims,
)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
}
}
Ok(None)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let uuid = self.key.id.as_hyphenated().to_string();
let issue_instant = self.token.issue_instant.timestamp();
let not_after = self.token.not_after.timestamp();
let key = self
.key
.key
.to_pkcs8_pem(LineEnding::default())
.map_err(MinecraftAuthenticationError::PEMSerialize)?
.to_string();
let display_claims = serde_json::to_string(&self.token.display_claims)?;
sqlx::query!(
"
INSERT INTO minecraft_device_tokens (id, uuid, private_key, x, y, issue_instant, not_after, token, display_claims)
VALUES (0, $1, $2, $3, $4, $5, $6, $7, $8)
ON CONFLICT (id) DO UPDATE SET
uuid = $1,
private_key = $2,
x = $3,
y = $4,
issue_instant = $5,
not_after = $6,
token = $7,
display_claims = jsonb($8)
",
uuid,
key,
self.key.x,
self.key.y,
issue_instant,
not_after,
self.token.token,
display_claims,
)
.execute(exec)
.await?;
Ok(())
}
}
const MICROSOFT_CLIENT_ID: &str = "00000000402b5328";
@@ -455,7 +574,7 @@ pub async fn device_token(
json!({
"Properties": {
"AuthMethod": "ProofOfPossession",
"Id": format!("{{{}}}", key.id),
"Id": format!("{{{}}}", key.id.to_string().to_uppercase()),
"DeviceType": "Win32",
"Version": "10.16.0",
"ProofKey": {
@@ -905,7 +1024,7 @@ where
}
pub struct DeviceTokenKey {
pub id: String,
pub id: Uuid,
pub key: SigningKey,
pub x: String,
pub y: String,
@@ -913,7 +1032,7 @@ pub struct DeviceTokenKey {
#[tracing::instrument]
fn generate_key() -> Result<DeviceTokenKey, MinecraftAuthenticationError> {
let id = Uuid::new_v4().to_string().to_uppercase();
let uuid = Uuid::new_v4();
let signing_key = SigningKey::random(&mut OsRng);
let public_key = VerifyingKey::from(&signing_key);
@@ -921,7 +1040,7 @@ fn generate_key() -> Result<DeviceTokenKey, MinecraftAuthenticationError> {
let encoded_point = public_key.to_encoded_point(false);
Ok(DeviceTokenKey {
id,
id: uuid,
key: signing_key,
x: BASE64_URL_SAFE_NO_PAD.encode(
encoded_point.x().ok_or_else(|| {

View File

@@ -1,125 +1,91 @@
//! Theseus state management system
use crate::event::emit::{emit_loading, emit_offline, init_loading_unsafe};
use std::path::PathBuf;
use crate::event::emit::{emit_loading, init_loading_unsafe};
use crate::event::LoadingBarType;
use crate::loading_join;
use crate::util::fetch::{self, FetchSemaphore, IoSemaphore};
use notify::RecommendedWatcher;
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
use crate::util::fetch::{FetchSemaphore, IoSemaphore};
use std::sync::Arc;
use std::time::Duration;
use tokio::join;
use tokio::sync::{OnceCell, RwLock, Semaphore};
use tokio::sync::{OnceCell, Semaphore};
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
use crate::state::fs_watcher::FileWatcher;
use sqlx::SqlitePool;
// Submodules
mod dirs;
pub use self::dirs::*;
mod metadata;
pub use self::metadata::*;
mod profiles;
pub use self::profiles::*;
mod settings;
pub use self::settings::*;
mod projects;
pub use self::projects::*;
mod children;
pub use self::children::*;
mod tags;
pub use self::tags::*;
mod process;
pub use self::process::*;
mod java_globals;
pub use self::java_globals::*;
mod safe_processes;
pub use self::safe_processes::*;
mod discord;
pub use self::discord::*;
mod minecraft_auth;
pub use self::minecraft_auth::*;
mod cache;
pub use self::cache::*;
mod db;
pub mod fs_watcher;
mod mr_auth;
pub use self::mr_auth::*;
mod legacy_converter;
// Global state
// RwLock on state only has concurrent reads, except for config dir change which takes control of the State
static LAUNCHER_STATE: OnceCell<RwLock<State>> = OnceCell::const_new();
static LAUNCHER_STATE: OnceCell<Arc<State>> = OnceCell::const_new();
pub struct State {
/// Whether or not the launcher is currently operating in 'offline mode'
pub offline: RwLock<bool>,
/// Information on the location of files used in the launcher
pub directories: DirectoryInfo,
/// Semaphore used to limit concurrent network requests and avoid errors
pub fetch_semaphore: FetchSemaphore,
/// Stored maximum number of sempahores of current fetch_semaphore
pub fetch_semaphore_max: RwLock<u32>,
/// Semaphore used to limit concurrent I/O and avoid errors
pub io_semaphore: IoSemaphore,
/// Stored maximum number of sempahores of current io_semaphore
pub io_semaphore_max: RwLock<u32>,
/// Launcher metadata
pub metadata: RwLock<Metadata>,
/// Launcher configuration
pub settings: RwLock<Settings>,
/// Reference to minecraft process children
pub children: RwLock<Children>,
/// Launcher profile metadata
pub(crate) profiles: RwLock<Profiles>,
/// Launcher tags
pub(crate) tags: RwLock<Tags>,
/// Launcher processes that should be safely exited on shutdown
pub(crate) safety_processes: RwLock<SafeProcesses>,
/// Launcher user account info
pub(crate) users: RwLock<MinecraftAuthStore>,
/// Modrinth Credentials Store
pub credentials: RwLock<CredentialsStore>,
/// Modrinth auth flow
pub modrinth_auth_flow: RwLock<Option<ModrinthAuthFlow>>,
/// Semaphore to limit concurrent API requests. This is separate from the fetch semaphore
/// to keep API functionality while the app is performing intensive tasks.
pub api_semaphore: FetchSemaphore,
/// Discord RPC
pub discord_rpc: DiscordGuard,
/// File watcher debouncer
pub(crate) file_watcher: RwLock<Debouncer<RecommendedWatcher>>,
pub(crate) pool: SqlitePool,
pub(crate) file_watcher: FileWatcher,
}
impl State {
/// Get the current launcher state, initializing it if needed
pub async fn get(
) -> crate::Result<Arc<tokio::sync::RwLockReadGuard<'static, Self>>> {
Ok(Arc::new(
LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.read()
.await,
))
pub async fn init() -> crate::Result<()> {
let state = LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?;
Process::garbage_collect(&state.pool).await?;
Ok(())
}
/// Get the current launcher state, initializing it if needed
/// Takes writing control of the state, blocking all other uses of it
/// Only used for state change such as changing the config directory
pub async fn get_write(
) -> crate::Result<tokio::sync::RwLockWriteGuard<'static, Self>> {
Ok(LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.write()
.await)
/// Get the current launcher state, waiting for initialization
pub async fn get() -> crate::Result<Arc<Self>> {
if !LAUNCHER_STATE.initialized() {
while !LAUNCHER_STATE.initialized() {}
}
Ok(Arc::clone(
LAUNCHER_STATE.get().expect("State is not initialized!"),
))
}
pub fn initialized() -> bool {
@@ -127,8 +93,7 @@ impl State {
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn initialize_state() -> crate::Result<RwLock<State>> {
async fn initialize_state() -> crate::Result<Arc<Self>> {
let loading_bar = init_loading_unsafe(
LoadingBarType::StateInit,
100.0,
@@ -136,275 +101,50 @@ impl State {
)
.await?;
// Settings
let settings =
Settings::init(&DirectoryInfo::get_initial_settings_file()?)
.await?;
let pool = db::connect().await?;
let directories = DirectoryInfo::init(&settings)?;
legacy_converter::migrate_legacy_data(&pool).await?;
emit_loading(&loading_bar, 10.0, None).await?;
let mut settings = Settings::get(&pool).await?;
let mut file_watcher = init_watcher().await?;
let fetch_semaphore =
FetchSemaphore(Semaphore::new(settings.max_concurrent_downloads));
let io_semaphore =
IoSemaphore(Semaphore::new(settings.max_concurrent_writes));
let api_semaphore =
FetchSemaphore(Semaphore::new(settings.max_concurrent_downloads));
let fetch_semaphore = FetchSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_downloads,
)));
let io_semaphore = IoSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_writes,
)));
emit_loading(&loading_bar, 10.0, None).await?;
let is_offline = !fetch::check_internet(3).await;
let metadata_fut =
Metadata::init(&directories, !is_offline, &io_semaphore);
let profiles_fut = Profiles::init(&directories, &mut file_watcher);
let tags_fut = Tags::init(
&directories,
!is_offline,
DirectoryInfo::move_launcher_directory(
&mut settings,
&pool,
&io_semaphore,
&fetch_semaphore,
&CredentialsStore(None),
);
let users_fut = MinecraftAuthStore::init(&directories, &io_semaphore);
let creds_fut = CredentialsStore::init(&directories, &io_semaphore);
// Launcher data
let (metadata, profiles, tags, users, creds) = loading_join! {
Some(&loading_bar), 70.0, Some("Loading metadata");
metadata_fut,
profiles_fut,
tags_fut,
users_fut,
creds_fut,
}?;
)
.await?;
let safety_processes = SafeProcesses::new();
let directories = DirectoryInfo::init(settings.custom_dir).await?;
let discord_rpc = DiscordGuard::init(is_offline).await?;
if !settings.disable_discord_rpc && !is_offline {
emit_loading(&loading_bar, 10.0, None).await?;
let discord_rpc = DiscordGuard::init().await?;
if settings.discord_rpc {
// Add default Idling to discord rich presence
// Force add to avoid recursion
let _ = discord_rpc.force_set_activity("Idling...", true).await;
}
let children = Children::new();
// Starts a loop of checking if we are online, and updating
Self::offine_check_loop();
let file_watcher = fs_watcher::init_watcher().await?;
fs_watcher::watch_profiles_init(&file_watcher, &directories).await?;
emit_loading(&loading_bar, 10.0, None).await?;
Ok::<RwLock<Self>, crate::Error>(RwLock::new(Self {
offline: RwLock::new(is_offline),
Ok(Arc::new(Self {
directories,
fetch_semaphore,
fetch_semaphore_max: RwLock::new(
settings.max_concurrent_downloads as u32,
),
io_semaphore,
io_semaphore_max: RwLock::new(
settings.max_concurrent_writes as u32,
),
metadata: RwLock::new(metadata),
settings: RwLock::new(settings),
profiles: RwLock::new(profiles),
users: RwLock::new(users),
children: RwLock::new(children),
credentials: RwLock::new(creds),
tags: RwLock::new(tags),
api_semaphore,
discord_rpc,
safety_processes: RwLock::new(safety_processes),
file_watcher: RwLock::new(file_watcher),
modrinth_auth_flow: RwLock::new(None),
pool,
file_watcher,
}))
}
/// Starts a loop of checking if we are online, and updating
pub fn offine_check_loop() {
tokio::task::spawn(async {
loop {
let state = Self::get().await;
if let Ok(state) = state {
let _ = state.refresh_offline().await;
}
// Wait 5 seconds
tokio::time::sleep(Duration::from_secs(5)).await;
}
});
}
/// Updates state with data from the web, if we are online
pub fn update() {
tokio::task::spawn(async {
if let Ok(state) = crate::State::get().await {
if !*state.offline.read().await {
let res1 = Profiles::update_modrinth_versions();
let res2 = Tags::update();
let res3 = Metadata::update();
let res4 = Profiles::update_projects();
let res6 = CredentialsStore::update_creds();
let _ = join!(res1, res2, res3, res4, res6);
}
}
});
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
/// Synchronize in-memory state with persistent state
pub async fn sync() -> crate::Result<()> {
let state = Self::get().await?;
let sync_settings = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let reader = state.settings.read().await;
reader.sync(&state.directories.settings_file()).await?;
Ok::<_, crate::Error>(())
})
.await?
};
let sync_profiles = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let profiles = state.profiles.read().await;
profiles.sync().await?;
Ok::<_, crate::Error>(())
})
.await?
};
tokio::try_join!(sync_settings, sync_profiles)?;
Ok(())
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_io_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.io_semaphore.0.write().await;
let mut total_permits = self.io_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_writes as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_writes);
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_fetch_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.fetch_semaphore.0.write().await;
let mut total_permits = self.fetch_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_downloads as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_downloads);
}
/// Refreshes whether or not the launcher should be offline, by whether or not there is an internet connection
pub async fn refresh_offline(&self) -> crate::Result<()> {
let is_online = fetch::check_internet(3).await;
let mut offline = self.offline.write().await;
if *offline != is_online {
return Ok(());
}
emit_offline(!is_online).await?;
*offline = !is_online;
Ok(())
}
}
pub async fn init_watcher() -> crate::Result<Debouncer<RecommendedWatcher>> {
let (mut tx, mut rx) = channel(1);
let file_watcher = new_debouncer(
Duration::from_secs_f32(2.0),
move |res: DebounceEventResult| {
futures::executor::block_on(async {
tx.send(res).await.unwrap();
})
},
)?;
tokio::task::spawn(async move {
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
tracing::info!(parent: &span, "Initting watcher");
while let Some(res) = rx.next().await {
let _span = span.enter();
match res {
Ok(mut events) => {
let mut visited_paths = Vec::new();
// sort events by e.path
events.sort_by(|a, b| a.path.cmp(&b.path));
events.iter().for_each(|e| {
let mut new_path = PathBuf::new();
let mut components_iterator = e.path.components();
let mut found = false;
for component in components_iterator.by_ref() {
new_path.push(component);
if found {
break;
}
if component.as_os_str() == "profiles" {
found = true;
}
}
// if any remain, it's a subfile of the profile folder and not the profile folder itself
let subfile = components_iterator.next().is_some();
// At this point, new_path is the path to the profile, and subfile is whether it's a subfile of the profile or not
let profile_path_id =
ProfilePathId::new(PathBuf::from(
new_path.file_name().unwrap_or_default(),
));
if e.path
.components()
.any(|x| x.as_os_str() == "crash-reports")
&& e.path
.extension()
.map(|x| x == "txt")
.unwrap_or(false)
{
Profile::crash_task(profile_path_id);
} else if !visited_paths.contains(&new_path) {
if subfile {
Profile::sync_projects_task(
profile_path_id,
false,
);
visited_paths.push(new_path);
} else {
Profiles::sync_available_profiles_task(
profile_path_id,
);
}
}
});
}
Err(error) => tracing::warn!("Unable to watch file: {error}"),
}
}
});
Ok(file_watcher)
}

View File

@@ -1,37 +1,180 @@
use crate::config::MODRINTH_API_URL;
use crate::state::DirectoryInfo;
use crate::util::fetch::{
fetch_advanced, read_json, write, FetchSemaphore, IoSemaphore,
};
use crate::State;
use chrono::{DateTime, Duration, Utc};
use crate::util::fetch::{fetch_advanced, FetchSemaphore};
use chrono::{DateTime, Duration, TimeZone, Utc};
use dashmap::DashMap;
use futures::TryStreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
const AUTH_JSON: &str = "auth.json";
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthUser {
pub id: String,
pub username: String,
pub name: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: DateTime<Utc>,
pub role: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthCredentials {
pub session: String,
pub expires_at: DateTime<Utc>,
pub user: ModrinthUser,
pub expires: DateTime<Utc>,
pub user_id: String,
pub active: bool,
}
#[derive(Serialize)]
impl ModrinthCredentials {
pub async fn get_and_refresh(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
semaphore: &FetchSemaphore,
) -> crate::Result<Option<Self>> {
let creds = Self::get_active(exec).await?;
if let Some(mut creds) = creds {
if creds.expires < Utc::now() {
#[derive(Deserialize)]
struct Session {
session: String,
}
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}session/refresh"),
None,
None,
Some(("Authorization", &*creds.session)),
None,
semaphore,
exec,
)
.await
.ok()
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
if let Some(value) = resp {
creds.session = value.session;
creds.expires = Utc::now() + Duration::weeks(2);
creds.upsert(exec).await?;
Ok(Some(creds))
} else {
Self::remove(&creds.user_id, exec).await?;
Ok(None)
}
} else {
Ok(Some(creds))
}
} else {
Ok(None)
}
}
pub async fn get_active(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = sqlx::query!(
"
SELECT
id, active, session_id, expires
FROM modrinth_users
WHERE active = TRUE
"
)
.fetch_optional(exec)
.await?;
Ok(res.map(|x| Self {
session: x.session_id,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
user_id: x.id,
active: x.active == 1,
}))
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<DashMap<String, Self>> {
let res = sqlx::query!(
"
SELECT
id, active, session_id, expires
FROM modrinth_users
"
)
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
acc.insert(
x.id.clone(),
Self {
session: x.session_id,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
user_id: x.id,
active: x.active == 1,
},
);
async move { Ok(acc) }
})
.await?;
Ok(res)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let expires = self.expires.timestamp();
if self.active {
sqlx::query!(
"
UPDATE modrinth_users
SET active = FALSE
"
)
.execute(exec)
.await?;
}
sqlx::query!(
"
INSERT INTO modrinth_users (id, active, session_id, expires)
VALUES ($1, $2, $3, $4)
ON CONFLICT (id) DO UPDATE SET
active = $2,
session_id = $3,
expires = $4
",
self.user_id,
self.active,
self.session,
expires,
)
.execute(exec)
.await?;
Ok(())
}
pub async fn remove(
user_id: &str,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
sqlx::query!(
"
DELETE FROM modrinth_users WHERE id = $1
",
user_id,
)
.execute(exec)
.await?;
Ok(())
}
}
#[derive(Serialize, Debug)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum ModrinthCredentialsResult {
@@ -39,159 +182,25 @@ pub enum ModrinthCredentialsResult {
Credentials(ModrinthCredentials),
}
#[derive(Debug)]
pub struct CredentialsStore(pub Option<ModrinthCredentials>);
impl CredentialsStore {
pub async fn init(
dirs: &DirectoryInfo,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
let user = read_json(&auth_path, io_semaphore).await.ok();
if let Some(user) = user {
Ok(Self(Some(user)))
} else {
Ok(Self(None))
}
}
pub async fn save(&self) -> crate::Result<()> {
let state = State::get().await?;
let auth_path =
state.directories.caches_meta_dir().await.join(AUTH_JSON);
if let Some(creds) = &self.0 {
write(&auth_path, &serde_json::to_vec(creds)?, &state.io_semaphore)
.await?;
}
Ok(())
}
pub async fn login(
&mut self,
credentials: ModrinthCredentials,
) -> crate::Result<&Self> {
self.0 = Some(credentials);
self.save().await?;
Ok(self)
}
#[tracing::instrument]
pub async fn update_creds() {
let res = async {
let state = State::get().await?;
let mut creds_write = state.credentials.write().await;
refresh_credentials(&mut creds_write, &state.fetch_semaphore)
.await?;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update credentials: {err}")
}
};
}
pub async fn logout(&mut self) -> crate::Result<&Self> {
self.0 = None;
self.save().await?;
Ok(self)
}
}
pub struct ModrinthAuthFlow {
socket: async_tungstenite::WebSocketStream<
async_tungstenite::tokio::ConnectStream,
>,
}
impl ModrinthAuthFlow {
pub async fn new(provider: &str) -> crate::Result<Self> {
let (socket, _) = async_tungstenite::tokio::connect_async(format!(
"wss://api.modrinth.com/v2/auth/ws?provider={provider}"
))
.await?;
Ok(Self { socket })
}
pub async fn prepare_login_url(&mut self) -> crate::Result<String> {
let code_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
#[derive(Deserialize)]
struct Url {
url: String,
}
let response = serde_json::from_slice::<Url>(&code_resp)?;
Ok(response.url)
}
pub async fn extract_credentials(
&mut self,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
// Minecraft bearer token
let token_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
let response =
serde_json::from_slice::<HashMap<String, Value>>(&token_resp)?;
get_result_from_res("code", response, semaphore).await
}
pub async fn close(&mut self) -> crate::Result<()> {
self.socket.close(None).await?;
Ok(())
}
}
async fn get_result_from_res(
code_key: &str,
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentialsResult> {
if let Some(flow) = response.get("flow").and_then(|x| x.as_str()) {
Ok(ModrinthCredentialsResult::TwoFactorRequired {
flow: flow.to_string(),
})
} else if let Some(code) = response.get(code_key).and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
let info = fetch_info(code, semaphore, exec).await?;
Ok(ModrinthCredentialsResult::Credentials(
ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
expires: Utc::now() + Duration::weeks(2),
user_id: info.id,
active: true,
},
))
} else if let Some(error) =
@@ -209,48 +218,19 @@ async fn get_result_from_res(
}
}
#[derive(Deserialize)]
struct Session {
session: String,
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login"),
None,
Some(serde_json::json!({
"username": username,
"password": password,
"challenge": challenge,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_result_from_res("session", value, semaphore).await
}
async fn get_creds_from_res(
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentials> {
if let Some(code) = response.get("session").and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
let info = fetch_info(code, semaphore, exec).await?;
Ok(ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
expires: Utc::now() + Duration::weeks(2),
user_id: info.id,
active: true,
})
} else if let Some(error) =
response.get("description").and_then(|x| x.as_str())
@@ -267,10 +247,53 @@ async fn get_creds_from_res(
}
}
pub fn get_login_url(provider: &str) -> String {
format!(
"{MODRINTH_API_URL}auth/init?url={}&provider={provider}",
urlencoding::encode("https://launcher-files.modrinth.com/detect.txt")
)
}
pub async fn finish_login_flow(
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentialsResult> {
get_result_from_res("code", response, semaphore, exec).await
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<ModrinthCredentialsResult> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login"),
None,
Some(serde_json::json!({
"username": username,
"password": password,
"challenge": challenge,
})),
None,
None,
semaphore,
exec,
)
.await?;
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_result_from_res("session", value, semaphore, exec).await
}
pub async fn login_2fa(
code: &str,
flow: &str,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
@@ -283,13 +306,13 @@ pub async fn login_2fa(
None,
None,
semaphore,
&CredentialsStore(None),
exec,
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
get_creds_from_res(response, semaphore, exec).await
}
pub async fn create_account(
@@ -299,6 +322,7 @@ pub async fn create_account(
challenge: &str,
sign_up_newsletter: bool,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
@@ -314,51 +338,19 @@ pub async fn create_account(
None,
None,
semaphore,
&CredentialsStore(None),
exec,
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
}
pub async fn refresh_credentials(
credentials_store: &mut CredentialsStore,
semaphore: &FetchSemaphore,
) -> crate::Result<()> {
if let Some(ref mut credentials) = credentials_store.0 {
let token = &credentials.session;
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}session/refresh"),
None,
None,
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
)
.await
.ok()
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
if let Some(value) = resp {
credentials.user = fetch_info(&value.session, semaphore).await?;
credentials.session = value.session;
credentials.expires_at = Utc::now() + Duration::weeks(2);
} else if credentials.expires_at < Utc::now() {
credentials_store.0 = None;
}
}
credentials_store.save().await?;
Ok(())
get_creds_from_res(response, semaphore, exec).await
}
async fn fetch_info(
token: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthUser> {
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<crate::state::cache::User> {
let result = fetch_advanced(
Method::GET,
&format!("{MODRINTH_API_URL}user"),
@@ -367,7 +359,7 @@ async fn fetch_info(
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
exec,
)
.await?;
let value = serde_json::from_slice(&result)?;

View File

@@ -0,0 +1,366 @@
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde::Serialize;
use tokio::process::Command;
use crate::event::emit::emit_process;
use crate::event::ProcessPayloadType;
use crate::util::io::IOError;
use crate::{profile, ErrorKind};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Process {
pub pid: i64,
pub start_time: i64,
pub name: String,
pub executable: String,
pub profile_path: String,
pub post_exit_command: Option<String>,
}
macro_rules! select_process_with_predicate {
($predicate:tt, $param:ident) => {
sqlx::query_as!(
Process,
r#"
SELECT
pid, start_time, name, executable, profile_path, post_exit_command
FROM processes
"#
+ $predicate,
$param
)
};
}
impl Process {
/// Runs on launcher startup. Queries all the cached processes and removes processes that no
/// longer exist. If a PID is found, they are "rescued" and passed to our process manager
pub async fn garbage_collect(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let processes = Self::get_all(exec).await?;
let mut system = sysinfo::System::new();
system.refresh_processes();
for cached_process in processes {
let process = system
.process(sysinfo::Pid::from_u32(cached_process.pid as u32));
if let Some(process) = process {
if cached_process.start_time as u64 == process.start_time()
&& cached_process.name == process.name()
&& cached_process.executable
== process
.exe()
.map(|x| x.to_string_lossy())
.unwrap_or_default()
{
tokio::spawn(cached_process.sequential_process_manager());
break;
}
}
Self::remove(cached_process.pid as u32, exec).await?;
}
Ok(())
}
pub async fn insert_new_process(
profile_path: &str,
mut mc_command: Command,
post_exit_command: Option<String>, // Command to run after minecraft.
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Self> {
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
let pid = mc_proc.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
let mut system = sysinfo::System::new();
system.refresh_processes();
let process =
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
pid
))
})?;
let start_time = process.start_time();
let name = process.name().to_string();
let Some(path) = process.exe() else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessible path",
pid
))
.into());
};
let executable = path.to_string_lossy().to_string();
let process = Self {
pid: pid as i64,
start_time: start_time as i64,
name,
executable,
profile_path: profile_path.to_string(),
post_exit_command,
};
process.upsert(exec).await?;
tokio::spawn(process.clone().sequential_process_manager());
emit_process(
profile_path,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
Ok(process)
}
// Spawns a new child process and inserts it into the hashmap
// Also, as the process ends, it spawns the follow-up process if it exists
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
async fn sequential_process_manager(self) -> crate::Result<i32> {
async fn update_playtime(
last_updated_playtime: &mut DateTime<Utc>,
profile_path: &str,
force_update: bool,
) {
let diff = Utc::now()
.signed_duration_since(*last_updated_playtime)
.num_seconds();
if diff >= 60 || force_update {
if let Err(e) = profile::edit(profile_path, |prof| {
prof.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&profile_path,
e
);
}
*last_updated_playtime = Utc::now();
}
}
// Wait on current Minecraft Child
let mc_exit_status;
let mut last_updated_playtime = Utc::now();
loop {
if let Some(t) = self.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
// Auto-update playtime every minute
update_playtime(
&mut last_updated_playtime,
&self.profile_path,
false,
)
.await;
}
// Now fully complete- update playtime one last time
update_playtime(&mut last_updated_playtime, &self.profile_path, true)
.await;
// Publish play time update
// Allow failure, it will be stored locally and sent next time
// Sent in another thread as first call may take a couple seconds and hold up process ending
let profile_path = self.profile_path.clone();
tokio::spawn(async move {
if let Err(e) =
profile::try_update_playtime(&profile_path.clone()).await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&profile_path,
e
);
}
});
let state = crate::State::get().await?;
let _ = state.discord_rpc.clear_to_default(true).await;
Self::remove(self.pid as u32, &state.pool).await?;
// If in tauri, window should show itself again after process exists if it was hidden
#[cfg(feature = "tauri")]
{
let window = crate::EventState::get_main_window().await?;
if let Some(window) = window {
window.unminimize()?;
}
}
if mc_exit_status == 0 {
// We do not wait on the post exist command to finish running! We let it spawn + run on its own.
// This behaviour may be changed in the future
if let Some(hook) = self.post_exit_command {
let mut cmd = hook.split(' ');
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command.args(&cmd.collect::<Vec<&str>>()).current_dir(
crate::api::profile::get_full_path(&self.profile_path)
.await?,
);
command.spawn().map_err(IOError::from)?;
}
}
}
emit_process(
&self.profile_path,
self.pid as u32,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
Ok(mc_exit_status)
}
async fn try_wait(&self) -> crate::Result<Option<i32>> {
let mut system = sysinfo::System::new();
if !system.refresh_process(sysinfo::Pid::from_u32(self.pid as u32)) {
return Ok(Some(0));
}
let process = system.process(sysinfo::Pid::from_u32(self.pid as u32));
if let Some(process) = process {
if process.status() == sysinfo::ProcessStatus::Run {
Ok(None)
} else {
Ok(Some(0))
}
} else {
Ok(Some(0))
}
}
pub async fn wait_for(&self) -> crate::Result<()> {
loop {
if self.try_wait().await?.is_some() {
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
}
Ok(())
}
pub async fn kill(&self) -> crate::Result<()> {
let mut system = sysinfo::System::new();
if system.refresh_process(sysinfo::Pid::from_u32(self.pid as u32)) {
let process =
system.process(sysinfo::Pid::from_u32(self.pid as u32));
if let Some(process) = process {
process.kill();
}
}
Ok(())
}
pub async fn get(
pid: i32,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = select_process_with_predicate!("WHERE pid = $1", pid)
.fetch_optional(exec)
.await?;
Ok(res)
}
pub async fn get_from_profile(
profile_path: &str,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Vec<Self>> {
let results = select_process_with_predicate!(
"WHERE profile_path = $1",
profile_path
)
.fetch_all(exec)
.await?;
Ok(results)
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Vec<Self>> {
let true_val = 1;
let results = select_process_with_predicate!("WHERE 1=$1", true_val)
.fetch_all(exec)
.await?;
Ok(results)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
sqlx::query!(
"
INSERT INTO processes (pid, start_time, name, executable, profile_path, post_exit_command)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (pid) DO UPDATE SET
start_time = $2,
name = $3,
executable = $4,
profile_path = $5,
post_exit_command = $6
",
self.pid,
self.start_time,
self.name,
self.executable,
self.profile_path,
self.post_exit_command
)
.execute(exec)
.await?;
Ok(())
}
pub async fn remove(
pid: u32,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let pid = pid as i32;
sqlx::query!(
"
DELETE FROM processes WHERE pid = $1
",
pid,
)
.execute(exec)
.await?;
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,807 +0,0 @@
//! Project management + inference
use crate::config::MODRINTH_API_URL;
use crate::state::{CredentialsStore, ModrinthUser, Profile};
use crate::util::fetch::{
fetch_json, write_cached_icon, FetchSemaphore, IoSemaphore,
};
use crate::util::io::IOError;
use async_zip::tokio::read::fs::ZipFileReader;
use chrono::{DateTime, Utc};
use futures::StreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sha2::Digest;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::io::AsyncReadExt;
use super::ProjectPathId;
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum ProjectType {
Mod,
DataPack,
ResourcePack,
ShaderPack,
}
impl ProjectType {
pub fn get_from_loaders(loaders: Vec<String>) -> Option<Self> {
if loaders
.iter()
.any(|x| ["fabric", "forge", "quilt", "neoforge"].contains(&&**x))
{
Some(ProjectType::Mod)
} else if loaders.iter().any(|x| x == "datapack") {
Some(ProjectType::DataPack)
} else if loaders.iter().any(|x| ["iris", "optifine"].contains(&&**x)) {
Some(ProjectType::ShaderPack)
} else if loaders
.iter()
.any(|x| ["vanilla", "canvas", "minecraft"].contains(&&**x))
{
Some(ProjectType::ResourcePack)
} else {
None
}
}
pub fn get_from_parent_folder(path: PathBuf) -> Option<Self> {
// Get parent folder
let path = path.parent()?.file_name()?;
match path.to_str()? {
"mods" => Some(ProjectType::Mod),
"datapacks" => Some(ProjectType::DataPack),
"resourcepacks" => Some(ProjectType::ResourcePack),
"shaderpacks" => Some(ProjectType::ShaderPack),
_ => None,
}
}
pub fn get_name(&self) -> &'static str {
match self {
ProjectType::Mod => "mod",
ProjectType::DataPack => "datapack",
ProjectType::ResourcePack => "resourcepack",
ProjectType::ShaderPack => "shaderpack",
}
}
pub fn get_folder(&self) -> &'static str {
match self {
ProjectType::Mod => "mods",
ProjectType::DataPack => "datapacks",
ProjectType::ResourcePack => "resourcepacks",
ProjectType::ShaderPack => "shaderpacks",
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Project {
pub sha512: String,
pub disabled: bool,
pub metadata: ProjectMetadata,
pub file_name: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthProject {
pub id: String,
pub slug: Option<String>,
pub project_type: String,
pub team: String,
pub title: String,
pub description: String,
pub body: String,
pub published: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub client_side: SideType,
pub server_side: SideType,
pub downloads: u32,
pub followers: u32,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
pub versions: Vec<String>,
pub icon_url: Option<String>,
}
/// A specific version of a project
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersion {
pub id: String,
pub project_id: String,
pub author_id: String,
pub featured: bool,
pub name: String,
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: DateTime<Utc>,
pub downloads: u32,
pub version_type: String,
pub files: Vec<ModrinthVersionFile>,
pub dependencies: Vec<Dependency>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersionFile {
pub hashes: HashMap<String, String>,
pub url: String,
pub filename: String,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Dependency {
pub version_id: Option<String>,
pub project_id: Option<String>,
pub file_name: Option<String>,
pub dependency_type: DependencyType,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthTeamMember {
pub team_id: String,
pub user: ModrinthUser,
pub role: String,
pub ordering: i64,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum DependencyType {
Required,
Optional,
Incompatible,
Embedded,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum SideType {
Required,
Optional,
Unsupported,
Unknown,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum FileType {
RequiredResourcePack,
OptionalResourcePack,
Unknown,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ProjectMetadata {
Modrinth {
project: Box<ModrinthProject>,
version: Box<ModrinthVersion>,
members: Vec<ModrinthTeamMember>,
update_version: Option<Box<ModrinthVersion>>,
incompatible: bool,
},
Inferred {
title: Option<String>,
description: Option<String>,
authors: Vec<String>,
version: Option<String>,
icon: Option<PathBuf>,
project_type: Option<String>,
},
Unknown,
}
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
async fn read_icon_from_file(
icon_path: Option<String>,
cache_dir: &Path,
path: &PathBuf,
io_semaphore: &IoSemaphore,
) -> crate::Result<Option<PathBuf>> {
if let Some(icon_path) = icon_path {
// we have to repoen the zip twice here :(
let zip_file_reader = ZipFileReader::new(path).await;
if let Ok(zip_file_reader) = zip_file_reader {
// Get index of icon file and open it
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == icon_path
});
if let Some(zip_index) = zip_index_option {
let mut bytes = Vec::new();
if zip_file_reader
.reader_with_entry(zip_index)
.await?
.read_to_end_checked(&mut bytes)
.await
.is_ok()
{
let bytes = bytes::Bytes::from(bytes);
let path = write_cached_icon(
&icon_path,
cache_dir,
bytes,
io_semaphore,
)
.await?;
return Ok(Some(path));
}
}
}
}
Ok(None)
}
// Creates Project data from the existing files in the file system, for a given Profile
// Paths must be the full paths to the files in the FS, and not the relative paths
// eg: with get_profile_full_project_paths
#[tracing::instrument(skip(paths, profile, io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn infer_data_from_files(
profile: Profile,
paths: Vec<PathBuf>,
cache_dir: PathBuf,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<HashMap<ProjectPathId, Project>> {
let mut file_path_hashes = HashMap::new();
for path in paths {
if !path.exists() {
continue;
}
if let Some(ext) = path.extension() {
// Ignore txt configuration files
if ext == "txt" {
continue;
}
}
let mut file = tokio::fs::File::open(path.clone())
.await
.map_err(|e| IOError::with_path(e, &path))?;
let mut buffer = [0u8; 4096]; // Buffer to read chunks
let mut hasher = sha2::Sha512::new(); // Hasher
loop {
let bytes_read =
file.read(&mut buffer).await.map_err(IOError::from)?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
let hash = format!("{:x}", hasher.finalize());
file_path_hashes.insert(hash, path.clone());
}
let files_url = format!("{}version_files", MODRINTH_API_URL);
let updates_url = format!("{}version_files/update", MODRINTH_API_URL);
let (files, update_versions) = tokio::try_join!(
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
})),
fetch_semaphore,
credentials,
),
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&updates_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
"loaders": [profile.metadata.loader],
"game_versions": [profile.metadata.game_version]
})),
fetch_semaphore,
credentials,
)
)?;
let projects: Vec<ModrinthProject> = fetch_json(
Method::GET,
&format!(
"{}projects?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&files
.values()
.map(|x| x.project_id.clone())
.collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?;
let teams: Vec<ModrinthTeamMember> = fetch_json::<
Vec<Vec<ModrinthTeamMember>>,
>(
Method::GET,
&format!(
"{}teams?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&projects.iter().map(|x| x.team.clone()).collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?
.into_iter()
.flatten()
.collect();
let mut return_projects: Vec<(PathBuf, Project)> = Vec::new();
let mut further_analyze_projects: Vec<(String, PathBuf)> = Vec::new();
for (hash, path) in file_path_hashes {
if let Some(version) = files.get(&hash) {
if let Some(project) =
projects.iter().find(|x| version.project_id == x.id)
{
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
return_projects.push((
path,
Project {
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Modrinth {
project: Box::new(project.clone()),
version: Box::new(version.clone()),
members: teams
.iter()
.filter(|x| x.team_id == project.team)
.cloned()
.collect::<Vec<_>>(),
update_version: if let Some(value) =
update_versions.get(&hash)
{
if value.id != version.id {
Some(Box::new(value.clone()))
} else {
None
}
} else {
None
},
incompatible: !version.loaders.contains(
&profile
.metadata
.loader
.as_api_str()
.to_string(),
) || version
.game_versions
.contains(&profile.metadata.game_version),
},
sha512: hash,
file_name,
},
));
continue;
}
}
further_analyze_projects.push((hash, path));
}
for (hash, path) in further_analyze_projects {
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let zip_file_reader = if let Ok(zip_file_reader) =
ZipFileReader::new(path.clone()).await
{
zip_file_reader
} else {
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Unknown,
file_name,
},
));
continue;
};
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default()
== "META-INF/mods.toml"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeModInfo {
pub mods: Vec<ForgeMod>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
mod_id: String,
version: Option<String>,
display_name: Option<String>,
description: Option<String>,
logo_file: Option<String>,
authors: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = toml::from_str::<ForgeModInfo>(&file_str) {
if let Some(pack) = pack.mods.first() {
let icon = read_icon_from_file(
pack.logo_file.clone(),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.display_name
.clone()
.unwrap_or_else(|| {
pack.mod_id.clone()
}),
),
description: pack.description.clone(),
authors: pack
.authors
.clone()
.map(|x| vec![x])
.unwrap_or_default(),
version: pack.version.clone(),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
}
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "mcmod.info"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
modid: String,
name: String,
description: Option<String>,
version: Option<String>,
author_list: Option<Vec<String>>,
logo_file: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<ForgeMod>(&file_str) {
let icon = read_icon_from_file(
pack.logo_file,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(if pack.name.is_empty() {
pack.modid
} else {
pack.name
}),
description: pack.description,
authors: pack.author_list.unwrap_or_default(),
version: pack.version,
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Fabric
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "fabric.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(untagged)]
enum FabricAuthor {
String(String),
Object { name: String },
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct FabricMod {
id: String,
version: String,
name: Option<String>,
description: Option<String>,
authors: Vec<FabricAuthor>,
icon: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<FabricMod>(&file_str) {
let icon = read_icon_from_file(
pack.icon,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(pack.name.unwrap_or(pack.id)),
description: pack.description,
authors: pack
.authors
.into_iter()
.map(|x| match x {
FabricAuthor::String(name) => name,
FabricAuthor::Object { name } => name,
})
.collect(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Quilt
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "quilt.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct QuiltMetadata {
pub name: Option<String>,
pub description: Option<String>,
pub contributors: Option<HashMap<String, String>>,
pub icon: Option<String>,
}
#[derive(Deserialize)]
struct QuiltMod {
id: String,
version: String,
metadata: Option<QuiltMetadata>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<QuiltMod>(&file_str) {
let icon = read_icon_from_file(
pack.metadata.as_ref().and_then(|x| x.icon.clone()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.metadata
.as_ref()
.and_then(|x| x.name.clone())
.unwrap_or(pack.id),
),
description: pack
.metadata
.as_ref()
.and_then(|x| x.description.clone()),
authors: pack
.metadata
.map(|x| {
x.contributors
.unwrap_or_default()
.keys()
.cloned()
.collect()
})
.unwrap_or_default(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Other
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "pack.mcmeta"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct Pack {
description: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<Pack>(&file_str) {
let icon = read_icon_from_file(
Some("pack.png".to_string()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
// Guess the project type from the filepath
let project_type =
ProjectType::get_from_parent_folder(path.clone());
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: None,
description: pack.description,
authors: Vec::new(),
version: None,
icon,
project_type: project_type
.map(|x| x.get_name().to_string()),
},
},
));
continue;
}
}
}
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Unknown,
},
));
}
// Project paths should be relative
let mut corrected_hashmap = HashMap::new();
let mut stream = tokio_stream::iter(return_projects);
while let Some((h, v)) = stream.next().await {
let h = ProjectPathId::from_fs_path(&h).await?;
corrected_hashmap.insert(h, v);
}
Ok(corrected_hashmap)
}

View File

@@ -1,69 +0,0 @@
use uuid::Uuid;
use crate::State;
// We implement a store for safe loading bars such that we can wait for them to complete
// We create this store separately from the loading bars themselves, because this may be extended as needed
pub struct SafeProcesses {
pub loading_bars: Vec<Uuid>,
}
#[derive(Debug, Copy, Clone)]
pub enum ProcessType {
LoadingBar,
// Potentially other types of processes (ie: IO operations?)
}
impl SafeProcesses {
// init
pub fn new() -> Self {
Self {
loading_bars: Vec::new(),
}
}
// Adds a new running safe process to the list by uuid
pub async fn add_uuid(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<Uuid> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.push(uuid);
}
}
Ok(uuid)
}
// Mark a safe process as finishing
pub async fn complete(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<()> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.retain(|x| *x != uuid);
}
}
Ok(())
}
// Check if there are any pending safe processes of a given type
pub async fn is_complete(r#type: ProcessType) -> crate::Result<bool> {
let state = State::get().await?;
let safe_processes = state.safety_processes.read().await;
match r#type {
ProcessType::LoadingBar => {
if safe_processes.loading_bars.is_empty() {
return Ok(true);
}
}
}
Ok(false)
}
}

View File

@@ -1,127 +1,173 @@
//! Theseus settings file
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use tokio::fs;
use super::{DirectoryInfo, JavaGlobals};
// TODO: convert to semver?
const CURRENT_FORMAT_VERSION: u32 = 1;
// Types
/// Global Theseus settings
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Settings {
pub theme: Theme,
pub memory: MemorySettings,
#[serde(default)]
pub force_fullscreen: bool,
pub game_resolution: WindowSize,
pub custom_java_args: Vec<String>,
pub custom_env_args: Vec<(String, String)>,
pub java_globals: JavaGlobals,
pub hooks: Hooks,
pub max_concurrent_downloads: usize,
pub max_concurrent_writes: usize,
pub version: u32,
pub collapsed_navigation: bool,
#[serde(default)]
pub disable_discord_rpc: bool,
#[serde(default)]
pub hide_on_process: bool,
#[serde(default)]
pub native_decorations: bool,
#[serde(default)]
pub theme: Theme,
pub default_page: DefaultPage,
#[serde(default)]
pub developer_mode: bool,
#[serde(default)]
pub opt_out_analytics: bool,
#[serde(default)]
pub collapsed_navigation: bool,
pub advanced_rendering: bool,
#[serde(default)]
pub fully_onboarded: bool,
#[serde(default = "DirectoryInfo::get_initial_settings_dir")]
pub loaded_config_dir: Option<PathBuf>,
pub native_decorations: bool,
pub telemetry: bool,
pub discord_rpc: bool,
pub developer_mode: bool,
pub onboarded: bool,
pub extra_launch_args: Vec<String>,
pub custom_env_vars: Vec<(String, String)>,
pub memory: MemorySettings,
pub force_fullscreen: bool,
pub game_resolution: WindowSize,
pub hide_on_process_start: bool,
pub hooks: Hooks,
pub custom_dir: Option<String>,
pub prev_custom_dir: Option<String>,
pub migrated: bool,
}
impl Settings {
#[tracing::instrument]
pub async fn init(file: &Path) -> crate::Result<Self> {
let mut rescued = false;
pub async fn get(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Self> {
let res = sqlx::query!(
"
SELECT
max_concurrent_writes, max_concurrent_downloads,
theme, default_page, collapsed_navigation, advanced_rendering, native_decorations,
discord_rpc, developer_mode, telemetry,
onboarded,
json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,
mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,
hook_pre_launch, hook_wrapper, hook_post_exit,
custom_dir, prev_custom_dir, migrated
FROM settings
"
)
.fetch_one(exec)
.await?;
let settings = if file.exists() {
let loaded_settings = fs::read(&file)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error reading settings file: {err}"
))
.as_error()
})
.and_then(|it| {
serde_json::from_slice::<Settings>(&it)
.map_err(crate::Error::from)
});
// settings is corrupted. Back up the file and create a new one
if let Err(ref err) = loaded_settings {
tracing::error!("Failed to load settings file: {err}. ");
let backup_file = file.with_extension("json.bak");
tracing::error!("Corrupted settings file will be backed up as {}, and a new settings file will be created.", backup_file.display());
let _ = fs::rename(file, backup_file).await;
rescued = true;
}
loaded_settings.ok()
} else {
None
};
if let Some(settings) = settings {
Ok(settings)
} else {
// Create new settings file
let settings = Self {
theme: Theme::Dark,
memory: MemorySettings::default(),
force_fullscreen: false,
game_resolution: WindowSize::default(),
custom_java_args: Vec::new(),
custom_env_args: Vec::new(),
java_globals: JavaGlobals::new(),
hooks: Hooks::default(),
max_concurrent_downloads: 10,
max_concurrent_writes: 10,
version: CURRENT_FORMAT_VERSION,
collapsed_navigation: false,
disable_discord_rpc: false,
hide_on_process: false,
native_decorations: false,
default_page: DefaultPage::Home,
developer_mode: false,
opt_out_analytics: false,
advanced_rendering: true,
fully_onboarded: rescued, // If we rescued the settings file, we should consider the user fully onboarded
// By default, the config directory is the same as the settings directory
loaded_config_dir: DirectoryInfo::get_initial_settings_dir(),
};
if rescued {
settings.sync(file).await?;
}
Ok(settings)
}
Ok(Self {
max_concurrent_downloads: res.max_concurrent_downloads as usize,
max_concurrent_writes: res.max_concurrent_writes as usize,
theme: Theme::from_string(&res.theme),
default_page: DefaultPage::from_string(&res.default_page),
collapsed_navigation: res.collapsed_navigation == 1,
advanced_rendering: res.advanced_rendering == 1,
native_decorations: res.native_decorations == 1,
telemetry: res.telemetry == 1,
discord_rpc: res.discord_rpc == 1,
developer_mode: res.developer_mode == 1,
onboarded: res.onboarded == 1,
extra_launch_args: res
.extra_launch_args
.and_then(|x| serde_json::from_str(&x).ok())
.unwrap_or_default(),
custom_env_vars: res
.custom_env_vars
.and_then(|x| serde_json::from_str(&x).ok())
.unwrap_or_default(),
memory: MemorySettings {
maximum: res.mc_memory_max as u32,
},
force_fullscreen: res.mc_force_fullscreen == 1,
game_resolution: WindowSize(
res.mc_game_resolution_x as u16,
res.mc_game_resolution_y as u16,
),
hide_on_process_start: res.hide_on_process_start == 1,
hooks: Hooks {
pre_launch: res.hook_pre_launch,
wrapper: res.hook_wrapper,
post_exit: res.hook_post_exit,
},
custom_dir: res.custom_dir,
prev_custom_dir: res.prev_custom_dir,
migrated: res.migrated == 1,
})
}
#[tracing::instrument(skip(self))]
pub async fn sync(&self, to: &Path) -> crate::Result<()> {
fs::write(to, serde_json::to_vec(self)?)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error saving settings to file: {err}"
))
.as_error()
})?;
pub async fn update(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let max_concurrent_writes = self.max_concurrent_writes as i32;
let max_concurrent_downloads = self.max_concurrent_downloads as i32;
let theme = self.theme.as_str();
let default_page = self.default_page.as_str();
let extra_launch_args = serde_json::to_string(&self.extra_launch_args)?;
let custom_env_vars = serde_json::to_string(&self.custom_env_vars)?;
sqlx::query!(
"
UPDATE settings
SET
max_concurrent_writes = $1,
max_concurrent_downloads = $2,
theme = $3,
default_page = $4,
collapsed_navigation = $5,
advanced_rendering = $6,
native_decorations = $7,
discord_rpc = $8,
developer_mode = $9,
telemetry = $10,
onboarded = $11,
extra_launch_args = jsonb($12),
custom_env_vars = jsonb($13),
mc_memory_max = $14,
mc_force_fullscreen = $15,
mc_game_resolution_x = $16,
mc_game_resolution_y = $17,
hide_on_process_start = $18,
hook_pre_launch = $19,
hook_wrapper = $20,
hook_post_exit = $21,
custom_dir = $22,
prev_custom_dir = $23,
migrated = $24
",
max_concurrent_writes,
max_concurrent_downloads,
theme,
default_page,
self.collapsed_navigation,
self.advanced_rendering,
self.native_decorations,
self.discord_rpc,
self.developer_mode,
self.telemetry,
self.onboarded,
extra_launch_args,
custom_env_vars,
self.memory.maximum,
self.force_fullscreen,
self.game_resolution.0,
self.game_resolution.1,
self.hide_on_process_start,
self.hooks.pre_launch,
self.hooks.wrapper,
self.hooks.post_exit,
self.custom_dir,
self.prev_custom_dir,
self.migrated
)
.execute(exec)
.await?;
Ok(())
}
}
@@ -135,37 +181,40 @@ pub enum Theme {
Oled,
}
impl Theme {
pub fn as_str(&self) -> &'static str {
match self {
Theme::Dark => "dark",
Theme::Light => "light",
Theme::Oled => "oled",
}
}
pub fn from_string(string: &str) -> Theme {
match string {
"dark" => Theme::Dark,
"light" => Theme::Light,
"oled" => Theme::Oled,
_ => Theme::Dark,
}
}
}
/// Minecraft memory settings
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct MemorySettings {
pub maximum: u32,
}
impl Default for MemorySettings {
fn default() -> Self {
Self { maximum: 2048 }
}
}
/// Game window size
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct WindowSize(pub u16, pub u16);
impl Default for WindowSize {
fn default() -> Self {
Self(854, 480)
}
}
/// Game initialization hooks
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(default)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Hooks {
#[serde(skip_serializing_if = "Option::is_none")]
pub pre_launch: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub wrapper: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub post_exit: Option<String>,
}
@@ -176,8 +225,19 @@ pub enum DefaultPage {
Library,
}
impl Default for DefaultPage {
fn default() -> Self {
Self::Home
impl DefaultPage {
pub fn as_str(&self) -> &'static str {
match self {
DefaultPage::Home => "home",
DefaultPage::Library => "library",
}
}
pub fn from_string(string: &str) -> Self {
match string {
"home" => Self::Home,
"library" => Self::Library,
_ => Self::Home,
}
}
}

View File

@@ -1,261 +0,0 @@
use std::path::PathBuf;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use crate::config::MODRINTH_API_URL;
use crate::data::DirectoryInfo;
use crate::state::CredentialsStore;
use crate::util::fetch::{
fetch_json, read_json, write, FetchSemaphore, IoSemaphore,
};
// Serializeable struct for all tags to be fetched together by the frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tags {
pub categories: Vec<Category>,
pub loaders: Vec<Loader>,
pub game_versions: Vec<GameVersion>,
pub donation_platforms: Vec<DonationPlatform>,
pub report_types: Vec<String>,
}
impl Tags {
#[tracing::instrument(skip(io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let mut tags = None;
let tags_path = dirs.caches_meta_dir().await.join("tags.json");
let tags_path_backup =
dirs.caches_meta_dir().await.join("tags.json.bak");
if let Ok(tags_json) = read_json::<Self>(&tags_path, io_semaphore).await
{
tags = Some(tags_json);
} else if fetch_online {
match Self::fetch(fetch_semaphore, credentials).await {
Ok(tags_fetch) => tags = Some(tags_fetch),
Err(err) => {
tracing::warn!("Unable to fetch launcher tags: {err}")
}
}
} else if let Ok(tags_json) =
read_json::<Self>(&tags_path_backup, io_semaphore).await
{
tags = Some(tags_json);
std::fs::copy(&tags_path_backup, &tags_path).map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error restoring tags backup: {err}"
))
.as_error()
})?;
}
if let Some(tags_data) = tags {
write(&tags_path, &serde_json::to_vec(&tags_data)?, io_semaphore)
.await?;
write(
&tags_path_backup,
&serde_json::to_vec(&tags_data)?,
io_semaphore,
)
.await?;
Ok(tags_data)
} else {
Err(crate::ErrorKind::NoValueFor(String::from("launcher tags"))
.as_error())
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update() {
let res = async {
let state = crate::State::get().await?;
let creds = state.credentials.read().await;
let tags_fetch =
Tags::fetch(&state.fetch_semaphore, &creds).await?;
drop(creds);
let tags_path =
state.directories.caches_meta_dir().await.join("tags.json");
let tags_path_backup = state
.directories
.caches_meta_dir()
.await
.join("tags.json.bak");
if tags_path.exists() {
std::fs::copy(&tags_path, &tags_path_backup).unwrap();
}
write(
&tags_path,
&serde_json::to_vec(&tags_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_tags = state.tags.write().await;
*old_tags = tags_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher tags: {err}")
}
};
}
// Checks the database for categories tag, returns a Vec::new() if it doesnt exist, otherwise returns the categories
#[tracing::instrument(skip(self))]
pub fn get_categories(&self) -> Vec<Category> {
self.categories.clone()
}
// Checks the database for loaders tag, returns a Vec::new() if it doesnt exist, otherwise returns the loaders
#[tracing::instrument(skip(self))]
pub fn get_loaders(&self) -> Vec<Loader> {
self.loaders.clone()
}
// Checks the database for game_versions tag, returns a Vec::new() if it doesnt exist, otherwise returns the game_versions
#[tracing::instrument(skip(self))]
pub fn get_game_versions(&self) -> Vec<GameVersion> {
self.game_versions.clone()
}
// Checks the database for donation_platforms tag, returns a Vec::new() if it doesnt exist, otherwise returns the donation_platforms
#[tracing::instrument(skip(self))]
pub fn get_donation_platforms(&self) -> Vec<DonationPlatform> {
self.donation_platforms.clone()
}
// Checks the database for report_types tag, returns a Vec::new() if it doesnt exist, otherwise returns the report_types
#[tracing::instrument(skip(self))]
pub fn get_report_types(&self) -> Vec<String> {
self.report_types.clone()
}
// Gets all tags together as a serializable bundle
#[tracing::instrument(skip(self))]
pub fn get_tag_bundle(&self) -> Tags {
self.clone()
}
// Fetches the tags from the Modrinth API and stores them in the database
pub async fn fetch(
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let categories = format!("{MODRINTH_API_URL}tag/category");
let loaders = format!("{MODRINTH_API_URL}tag/loader");
let game_versions = format!("{MODRINTH_API_URL}tag/game_version");
let donation_platforms =
format!("{MODRINTH_API_URL}tag/donation_platform");
let report_types = format!("{MODRINTH_API_URL}tag/report_type");
let categories_fut = fetch_json::<Vec<Category>>(
Method::GET,
&categories,
None,
None,
semaphore,
credentials,
);
let loaders_fut = fetch_json::<Vec<Loader>>(
Method::GET,
&loaders,
None,
None,
semaphore,
credentials,
);
let game_versions_fut = fetch_json::<Vec<GameVersion>>(
Method::GET,
&game_versions,
None,
None,
semaphore,
credentials,
);
let donation_platforms_fut = fetch_json::<Vec<DonationPlatform>>(
Method::GET,
&donation_platforms,
None,
None,
semaphore,
credentials,
);
let report_types_fut = fetch_json::<Vec<String>>(
Method::GET,
&report_types,
None,
None,
semaphore,
credentials,
);
let (
categories,
loaders,
game_versions,
donation_platforms,
report_types,
) = tokio::try_join!(
categories_fut,
loaders_fut,
game_versions_fut,
donation_platforms_fut,
report_types_fut
)?;
Ok(Self {
categories,
loaders,
game_versions,
donation_platforms,
report_types,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Category {
pub name: String,
pub project_type: String,
pub header: String,
pub icon: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Loader {
pub name: String,
pub icon: PathBuf,
pub supported_project_types: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DonationPlatform {
pub short: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GameVersion {
pub version: String,
pub version_type: String,
pub date: String,
pub major: bool,
}