MR App 0.9.5 - Big bugfix update (#3585)

* Add launcher_feature_version to Profile

* Misc fixes

- Add typing to theme and settings stuff
- Push instance route on creation from installing a modpack
- Fixed servers not reloading properly when first added

* Make old instances scan the logs folder for joined servers on launcher startup

* Create AttachedWorldData

* Change AttachedWorldData interface

* Rename WorldType::World to WorldType::Singleplayer

* Implement world display status system

* Fix Minecraft font

* Fix set_world_display_status Tauri error

* Add 'Play instance' option

* Add option to disable worlds showing in Home

* Fixes

- Fix available server filter only showing if there are some available
- Fixed server and singleplayer filters sometimes showing when there are only servers or singleplayer worlds
- Fixed new worlds not being automatically added when detected
- Rephrased Jump back into worlds option description

* Fixed sometimes more than 6 items showing up in Jump back in

* Fix servers.dat issue with instances you haven't played before

* Fix too large of bulk requests being made, limit max to 800 #3430

* Add hiding from home page, add types to Mods.vue

* Make recent worlds go into grid when display is huge

* Fix lint

* Remove redundant media query

* Fix protocol version on home page, and home page being blocked by pinging servers

* Clippy fix

* More Clippy fixes

* Fix Prettier lints

* Undo `from_string` changes

---------

Co-authored-by: Josiah Glosson <soujournme@gmail.com>
Co-authored-by: Alejandro González <me@alegon.dev>
This commit is contained in:
Prospector
2025-05-01 16:13:13 -07:00
committed by GitHub
parent 4a2605bc1e
commit 3dad6b317f
123 changed files with 1622 additions and 744 deletions

View File

@@ -0,0 +1,99 @@
use crate::worlds::{DisplayStatus, WorldType};
use paste::paste;
use std::collections::HashMap;
#[derive(Debug, Clone, Default)]
pub struct AttachedWorldData {
pub display_status: DisplayStatus,
}
impl AttachedWorldData {
pub async fn get_for_world(
instance: &str,
world_type: WorldType,
world_id: &str,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let world_type = world_type.as_str();
let attached_data = sqlx::query!(
"
SELECT display_status
FROM attached_world_data
WHERE profile_path = $1 and world_type = $2 and world_id = $3
",
instance,
world_type,
world_id
)
.fetch_optional(exec)
.await?;
Ok(attached_data.map(|x| AttachedWorldData {
display_status: DisplayStatus::from_string(&x.display_status),
}))
}
pub async fn get_all_for_instance(
instance: &str,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<HashMap<(WorldType, String), Self>> {
let attached_data = sqlx::query!(
"
SELECT world_type, world_id, display_status
FROM attached_world_data
WHERE profile_path = $1
",
instance
)
.fetch_all(exec)
.await?;
Ok(attached_data
.into_iter()
.map(|x| {
let world_type = WorldType::from_string(&x.world_type);
let display_status =
DisplayStatus::from_string(&x.display_status);
(
(world_type, x.world_id),
AttachedWorldData { display_status },
)
})
.collect())
}
}
macro_rules! attached_data_setter {
($parameter:ident: $parameter_type:ty, $column:expr $(=> $adapter:expr)?) => {
paste! {
pub async fn [<set_ $parameter>](
instance: &str,
world_type: WorldType,
world_id: &str,
$parameter: $parameter_type,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let world_type = world_type.as_str();
$(let $parameter = $adapter;)?
sqlx::query!(
"INSERT INTO attached_world_data (profile_path, world_type, world_id, " + $column + ")\n" +
"VALUES ($1, $2, $3, $4)\n" +
"ON CONFLICT (profile_path, world_type, world_id) DO UPDATE\n" +
" SET " + $column + " = $4",
instance,
world_type,
world_id,
$parameter
)
.execute(exec)
.await?;
Ok(())
}
}
}
}
attached_data_setter!(display_status: DisplayStatus, "display_status" => display_status.as_str());

View File

@@ -843,7 +843,7 @@ impl CachedEntry {
fetch_semaphore: &FetchSemaphore,
pool: &SqlitePool,
) -> crate::Result<Vec<T>> {
const MAX_REQUEST_SIZE: usize = 1000;
const MAX_REQUEST_SIZE: usize = 800;
let urls = keys
.iter()
@@ -1072,7 +1072,7 @@ impl CachedEntry {
CacheValueType::File => {
let mut versions = fetch_json::<HashMap<String, Version>>(
Method::POST,
&format!("{}version_files", MODRINTH_API_URL),
&format!("{MODRINTH_API_URL}version_files"),
None,
Some(serde_json::json!({
"algorithm": "sha1",
@@ -1307,7 +1307,7 @@ impl CachedEntry {
});
let version_update_url =
format!("{}version_files/update", MODRINTH_API_URL);
format!("{MODRINTH_API_URL}version_files/update");
let variations =
futures::future::try_join_all(filtered_keys.iter().map(
|((loaders_key, game_version), hashes)| {
@@ -1481,7 +1481,7 @@ pub async fn cache_file_hash(
CachedEntry::upsert_many(
&[CacheValue::FileHash(CachedFileHash {
path: format!("{}/{}", profile_path, path),
path: format!("{profile_path}/{path}"),
size: size as u64,
hash,
project_type,

View File

@@ -21,8 +21,7 @@ impl DiscordGuard {
let dipc =
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not create Discord client {}",
e,
"Could not create Discord client {e}",
))
})?;
@@ -90,8 +89,7 @@ impl DiscordGuard {
let res = client.set_activity(activity.clone());
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not update Discord activity {}",
e,
"Could not update Discord activity {e}",
))
};
@@ -99,8 +97,7 @@ impl DiscordGuard {
if let Err(_e) = res {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {}",
e,
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
@@ -131,8 +128,7 @@ impl DiscordGuard {
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not clear Discord activity {}",
e,
"Could not clear Discord activity {e}",
))
};
@@ -140,8 +136,7 @@ impl DiscordGuard {
if res.is_err() {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {}",
e,
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client

View File

@@ -155,28 +155,47 @@ pub(crate) async fn watch_profile(
let profile_path = dirs.profiles_dir().join(profile_path);
if profile_path.exists() && profile_path.is_dir() {
for folder in ProjectType::iterator().map(|x| x.get_folder()).chain([
for sub_path in ProjectType::iterator().map(|x| x.get_folder()).chain([
"crash-reports",
"saves",
"servers.dat",
]) {
let path = profile_path.join(folder);
let full_path = profile_path.join(sub_path);
if !path.exists() && !path.is_symlink() && !folder.contains(".") {
if let Err(e) = crate::util::io::create_dir_all(&path).await {
tracing::error!(
"Failed to create directory for watcher {path:?}: {e}"
);
return;
if !full_path.exists() && !full_path.is_symlink() {
if !sub_path.contains(".") {
if let Err(e) =
crate::util::io::create_dir_all(&full_path).await
{
tracing::error!(
"Failed to create directory for watcher {full_path:?}: {e}"
);
return;
}
} else if sub_path == "servers.dat" {
const EMPTY_NBT: &[u8] = &[
10, // Compound tag
0, 0, // Empty name
0, // End of compound tag
];
if let Err(e) =
crate::util::io::write(&full_path, EMPTY_NBT).await
{
tracing::error!(
"Failed to create file for watcher {full_path:?}: {e}"
);
return;
}
}
}
let mut watcher = watcher.write().await;
if let Err(e) =
watcher.watcher().watch(&path, RecursiveMode::Recursive)
if let Err(e) = watcher
.watcher()
.watch(&full_path, RecursiveMode::Recursive)
{
tracing::error!(
"Failed to watch directory for watcher {path:?}: {e}"
"Failed to watch directory for watcher {full_path:?}: {e}"
);
return;
}

View File

@@ -5,9 +5,9 @@ use crate::state;
use crate::state::{
CacheValue, CachedEntry, CachedFile, CachedFileHash, CachedFileUpdate,
Credentials, DefaultPage, DependencyType, DeviceToken, DeviceTokenKey,
DeviceTokenPair, FileType, Hooks, LinkedData, MemorySettings,
ModrinthCredentials, Profile, ProfileInstallStage, TeamMember, Theme,
VersionFile, WindowSize,
DeviceTokenPair, FileType, Hooks, LauncherFeatureVersion, LinkedData,
MemorySettings, ModrinthCredentials, Profile, ProfileInstallStage,
TeamMember, Theme, VersionFile, WindowSize,
};
use crate::util::fetch::{read_json, IoSemaphore};
use chrono::{DateTime, Utc};
@@ -317,6 +317,7 @@ where
ProfileInstallStage::NotInstalled
}
},
launcher_feature_version: LauncherFeatureVersion::None,
name: profile.metadata.name,
icon_path: profile.metadata.icon,
game_version: profile.metadata.game_version,

View File

@@ -1202,5 +1202,5 @@ fn generate_oauth_challenge() -> String {
let mut rng = rand::thread_rng();
let bytes: Vec<u8> = (0..64).map(|_| rng.gen::<u8>()).collect();
bytes.iter().map(|byte| format!("{:02x}", byte)).collect()
bytes.iter().map(|byte| format!("{byte:02x}")).collect()
}

View File

@@ -45,6 +45,7 @@ pub use self::mr_auth::*;
mod legacy_converter;
pub mod attached_world_data;
pub mod server_join_log;
// Global state

View File

@@ -86,7 +86,7 @@ impl ProcessManager {
now.format("%Y-%m-%d %H:%M:%S")
)
.map_err(|e| IOError::with_path(e, &log_path))?;
writeln!(log_file, "# Profile: {} \n", profile_path)
writeln!(log_file, "# Profile: {profile_path} \n")
.map_err(|e| IOError::with_path(e, &log_path))?;
writeln!(log_file).map_err(|e| IOError::with_path(e, &log_path))?;
}
@@ -318,7 +318,7 @@ impl Process {
formatted_time,
thread,
if !logger.is_empty() {
format!("{}/", logger)
format!("{logger}/")
} else {
String::new()
},
@@ -383,7 +383,7 @@ impl Process {
formatted_time,
thread,
if !logger.is_empty() {
format!("{}/", logger)
format!("{logger}/")
} else {
String::new()
},
@@ -659,10 +659,7 @@ impl Process {
if log_path.exists() {
if let Err(e) = Process::append_to_log_file(
&log_path,
&format!(
"\n# Process exited with status: {}\n",
mc_exit_status
),
&format!("\n# Process exited with status: {mc_exit_status}\n"),
) {
tracing::warn!(
"Failed to write exit status to log file: {}",

View File

@@ -1,23 +1,32 @@
use super::settings::{Hooks, MemorySettings, WindowSize};
use crate::profile::get_full_path;
use crate::state::server_join_log::JoinLogEntry;
use crate::state::{
cache_file_hash, CacheBehaviour, CachedEntry, CachedFileHash,
};
use crate::util;
use crate::util::fetch::{write_cached_icon, FetchSemaphore, IoSemaphore};
use crate::util::io::{self};
use chrono::{DateTime, TimeZone, Utc};
use chrono::{DateTime, TimeDelta, TimeZone, Utc};
use dashmap::DashMap;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use sqlx::SqlitePool;
use std::collections::HashSet;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::path::Path;
use tokio::fs::DirEntry;
use tokio::io::{AsyncBufReadExt, AsyncRead};
use tokio::task::JoinSet;
// Represent a Minecraft instance.
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Profile {
pub path: String,
pub install_stage: ProfileInstallStage,
pub launcher_feature_version: LauncherFeatureVersion,
pub name: String,
pub icon_path: Option<String>,
@@ -87,6 +96,38 @@ impl ProfileInstallStage {
}
}
#[derive(
Serialize, Deserialize, Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd,
)]
#[serde(rename_all = "snake_case")]
pub enum LauncherFeatureVersion {
None,
MigratedServerLastPlayTime,
}
impl LauncherFeatureVersion {
pub const MOST_RECENT: Self = Self::MigratedServerLastPlayTime;
pub fn as_str(&self) -> &'static str {
match *self {
Self::None => "none",
Self::MigratedServerLastPlayTime => {
"migrated_server_last_play_time"
}
}
}
pub fn from_str(val: &str) -> Self {
match val {
"none" => Self::None,
"migrated_server_last_play_time" => {
Self::MigratedServerLastPlayTime
}
_ => Self::None,
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct LinkedData {
pub project_id: String,
@@ -263,6 +304,7 @@ struct ProfileQueryResult {
override_hook_wrapper: Option<String>,
override_hook_post_exit: Option<String>,
protocol_version: Option<i64>,
launcher_feature_version: String,
}
impl TryFrom<ProfileQueryResult> for Profile {
@@ -272,6 +314,9 @@ impl TryFrom<ProfileQueryResult> for Profile {
Ok(Profile {
path: x.path,
install_stage: ProfileInstallStage::from_str(&x.install_stage),
launcher_feature_version: LauncherFeatureVersion::from_str(
&x.launcher_feature_version,
),
name: x.name,
icon_path: x.icon_path,
game_version: x.game_version,
@@ -339,7 +384,7 @@ macro_rules! select_profiles_with_predicate {
ProfileQueryResult,
r#"
SELECT
path, install_stage, name, icon_path,
path, install_stage, launcher_feature_version, name, icon_path,
game_version, protocol_version, mod_loader, mod_loader_version,
json(groups) as "groups!: serde_json::Value",
linked_project_id, linked_version_id, locked,
@@ -402,6 +447,8 @@ impl Profile {
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let install_stage = self.install_stage.as_str();
let launcher_feature_version = self.launcher_feature_version.as_str();
let mod_loader = self.loader.as_str();
let groups = serde_json::to_string(&self.groups)?;
@@ -439,7 +486,7 @@ impl Profile {
override_java_path, override_extra_launch_args, override_custom_env_vars,
override_mc_memory_max, override_mc_force_fullscreen, override_mc_game_resolution_x, override_mc_game_resolution_y,
override_hook_pre_launch, override_hook_wrapper, override_hook_post_exit,
protocol_version
protocol_version, launcher_feature_version
)
VALUES (
$1, $2, $3, $4,
@@ -451,7 +498,7 @@ impl Profile {
$17, jsonb($18), jsonb($19),
$20, $21, $22, $23,
$24, $25, $26,
$27
$27, $28
)
ON CONFLICT (path) DO UPDATE SET
install_stage = $2,
@@ -487,7 +534,8 @@ impl Profile {
override_hook_wrapper = $25,
override_hook_post_exit = $26,
protocol_version = $27
protocol_version = $27,
launcher_feature_version = $28
",
self.path,
install_stage,
@@ -516,6 +564,7 @@ impl Profile {
self.hooks.wrapper,
self.hooks.post_exit,
self.protocol_version,
launcher_feature_version
)
.execute(exec)
.await?;
@@ -565,10 +614,10 @@ impl Profile {
let mut all = Self::get_all(&state.pool).await?;
let mut keys = vec![];
let mut migrations = JoinSet::new();
for profile in &mut all {
let path =
crate::api::profile::get_full_path(&profile.path).await?;
let path = get_full_path(&profile.path).await?;
for project_type in ProjectType::iterator() {
let folder = project_type.get_folder();
@@ -610,7 +659,42 @@ impl Profile {
profile.install_stage = ProfileInstallStage::NotInstalled;
profile.upsert(&state.pool).await?;
}
if profile.launcher_feature_version
< LauncherFeatureVersion::MOST_RECENT
{
let state = state.clone();
let profile_path = profile.path.clone();
migrations.spawn(async move {
let Ok(Some(mut profile)) = Self::get(&profile_path, &state.pool).await else {
tracing::error!("Failed to find instance '{}' for migration", profile_path);
return;
};
drop(profile_path);
tracing::info!(
"Migrating profile '{}' from launcher feature version {:?} to {:?}",
profile.path, profile.launcher_feature_version, LauncherFeatureVersion::MOST_RECENT
);
loop {
let result = profile.perform_launcher_feature_migration(&state).await;
if result.is_err() || profile.launcher_feature_version == LauncherFeatureVersion::MOST_RECENT {
if let Err(err) = result {
tracing::error!("Failed to migrate instance '{}': {}", profile.path, err);
return;
}
if let Err(err) = profile.upsert(&state.pool).await {
tracing::error!("Failed to update instance '{}' migration state: {}", profile.path, err);
return;
}
break;
}
}
tracing::info!("Finished migration for profile '{}'", profile.path);
});
}
}
migrations.join_all().await;
let file_hashes = CachedEntry::get_file_hash_many(
&keys.iter().map(|s| &**s).collect::<Vec<_>>(),
@@ -651,6 +735,144 @@ impl Profile {
Ok(())
}
async fn perform_launcher_feature_migration(
&mut self,
state: &crate::State,
) -> crate::Result<()> {
match self.launcher_feature_version {
LauncherFeatureVersion::None => {
if self.last_played.is_none() {
self.launcher_feature_version =
LauncherFeatureVersion::MigratedServerLastPlayTime;
return Ok(());
}
let mut join_log_entry = JoinLogEntry {
profile_path: self.path.clone(),
..Default::default()
};
let logs_path = state.directories.profile_logs_dir(&self.path);
let Ok(mut directory) = io::read_dir(&logs_path).await else {
self.launcher_feature_version =
LauncherFeatureVersion::MigratedServerLastPlayTime;
return Ok(());
};
let existing_joins_map =
super::server_join_log::get_joins(&self.path, &state.pool)
.await?;
let existing_joins = existing_joins_map
.keys()
.map(|x| (&x.0 as &str, x.1))
.collect::<HashSet<_>>();
while let Some(log_file) = directory.next_entry().await? {
if let Err(err) = Self::parse_log_file(
&log_file,
|host, port| existing_joins.contains(&(host, port)),
state,
&mut join_log_entry,
)
.await
{
tracing::error!(
"Failed to parse log file '{}': {}",
log_file.path().display(),
err
);
}
}
self.launcher_feature_version =
LauncherFeatureVersion::MigratedServerLastPlayTime;
}
LauncherFeatureVersion::MOST_RECENT => unreachable!(
"LauncherFeatureVersion::MOST_RECENT was not updated"
),
}
Ok(())
}
// Parses a log file on a best-effort basis, using the log's creation time, rather than the
// actual times mentioned in the log file, which are missing date information.
async fn parse_log_file(
log_file: &DirEntry,
should_skip: impl Fn(&str, u16) -> bool,
state: &crate::State,
join_entry: &mut JoinLogEntry,
) -> crate::Result<()> {
let file_name = log_file.file_name();
let Some(file_name) = file_name.to_str() else {
return Ok(());
};
let log_time = io::metadata(&log_file.path()).await?.created()?.into();
if file_name == "latest.log" {
let file = io::open_file(&log_file.path()).await?;
Self::parse_open_log_file(
file,
should_skip,
log_time,
state,
join_entry,
)
.await
} else if file_name.ends_with(".log.gz") {
let file = io::open_file(&log_file.path()).await?;
let file = tokio::io::BufReader::new(file);
let file =
async_compression::tokio::bufread::GzipDecoder::new(file);
Self::parse_open_log_file(
file,
should_skip,
log_time,
state,
join_entry,
)
.await
} else {
Ok(())
}
}
async fn parse_open_log_file(
reader: impl AsyncRead + Unpin,
should_skip: impl Fn(&str, u16) -> bool,
mut log_time: DateTime<Utc>,
state: &crate::State,
join_entry: &mut JoinLogEntry,
) -> crate::Result<()> {
lazy_static! {
static ref LOG_LINE_REGEX: Regex = Regex::new(r"^\[[0-9]{2}(?::[0-9]{2}){2}] \[.+?/[A-Z]+?]: Connecting to (.+?), ([1-9][0-9]{0,4})$").unwrap();
}
let reader = tokio::io::BufReader::new(reader);
let mut lines = reader.lines();
while let Some(log_line) = lines.next_line().await? {
let Some(log_line) = LOG_LINE_REGEX.captures(&log_line) else {
continue;
};
let Some(host) = log_line.get(1) else {
continue;
};
let host = host.as_str();
let Some(port) = log_line.get(2) else {
continue;
};
let Ok(port) = port.as_str().parse::<u16>() else {
continue;
};
if should_skip(host, port) {
continue;
}
join_entry.host = host.to_string();
join_entry.port = port;
join_entry.join_time = log_time;
join_entry.upsert(&state.pool).await?;
log_time += TimeDelta::seconds(1);
}
Ok(())
}
pub async fn get_projects(
&self,
cache_behaviour: Option<CacheBehaviour>,

View File

@@ -2,6 +2,7 @@ use std::collections::HashMap;
use chrono::{DateTime, TimeZone, Utc};
#[derive(Default)]
pub struct JoinLogEntry {
pub profile_path: String,
pub host: String,

View File

@@ -44,6 +44,8 @@ pub struct Settings {
pub enum FeatureFlag {
PagePath,
ProjectBackground,
WorldsTab,
WorldsInHome,
}
impl Settings {