Merge beta into release #21

Merged
didirus merged 276 commits from beta into release 2025-11-01 13:04:25 +00:00
16 changed files with 235 additions and 78 deletions
Showing only changes of commit ab6e9dd5d7 - Show all commits

19
Cargo.lock generated
View File

@@ -4436,6 +4436,7 @@ dependencies = [
"meilisearch-sdk",
"murmur2",
"paste",
"path-util",
"prometheus",
"rand 0.8.5",
"rand_chacha 0.3.1",
@@ -5769,6 +5770,16 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
[[package]]
name = "path-util"
version = "0.0.0"
dependencies = [
"derive_more 2.0.1",
"itertools 0.14.0",
"serde",
"typed-path",
]
[[package]]
name = "pathdiff"
version = "0.2.3"
@@ -9117,6 +9128,7 @@ dependencies = [
"notify-debouncer-mini",
"p256",
"paste",
"path-util",
"phf 0.12.1",
"png",
"quartz_nbt",
@@ -9164,6 +9176,7 @@ dependencies = [
"hyper-util",
"native-dialog",
"paste",
"path-util",
"serde",
"serde_json",
"serde_with",
@@ -9790,6 +9803,12 @@ dependencies = [
"utf-8",
]
[[package]]
name = "typed-path"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c462d18470a2857aa657d338af5fa67170bb48bcc80a296710ce3b0802a32566"
[[package]]
name = "typeid"
version = "1.0.3"

View File

@@ -8,6 +8,7 @@ members = [
"packages/app-lib",
"packages/ariadne",
"packages/daedalus",
"packages/path-util",
]
[workspace.package]
@@ -37,6 +38,7 @@ base64 = "0.22.1"
bitflags = "2.9.1"
bytemuck = "1.23.1"
bytes = "1.10.1"
typed-path = "0.11.0"
censor = "0.3.0"
chardetng = "0.1.17"
chrono = "0.4.41"
@@ -46,6 +48,7 @@ clickhouse = "0.13.3"
color-thief = "0.2.2"
console-subscriber = "0.4.1"
daedalus = { path = "packages/daedalus" }
path-util = { path = "packages/path-util" }
dashmap = "6.1.0"
data-url = "0.3.1"
deadpool-redis = "0.22.0"
@@ -239,7 +242,7 @@ codegen-units = 1 # Compile crates one after another so the compiler can optimiz
# Specific profile for labrinth production builds
[profile.release-labrinth]
inherits = "release"
panic = "unwind" # Don't exit the whole app on panic in production
panic = "unwind" # Don't exit the whole app on panic in production
[profile.dev.package.sqlx-macros]
opt-level = 3

View File

@@ -11,6 +11,7 @@ tauri-build = { workspace = true, features = ["codegen"] }
[dependencies]
theseus = { workspace = true, features = ["tauri"] }
path-util.workspace = true
serde_json.workspace = true
serde = { workspace = true, features = ["derive"] }

View File

@@ -1,5 +1,6 @@
use crate::api::Result;
use dashmap::DashMap;
use path_util::SafeRelativeUtf8UnixPathBuf;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
@@ -239,7 +240,7 @@ pub async fn profile_export_mrpack(
#[tauri::command]
pub async fn profile_get_pack_export_candidates(
profile_path: &str,
) -> Result<Vec<String>> {
) -> Result<Vec<SafeRelativeUtf8UnixPathBuf>> {
let candidates = profile::get_pack_export_candidates(profile_path).await?;
Ok(candidates)
}

View File

@@ -133,6 +133,7 @@ rusty-money.workspace = true
json-patch.workspace = true
ariadne.workspace = true
path-util.workspace = true
clap = { workspace = true, features = ["derive"] }

View File

@@ -1,6 +1,7 @@
use crate::{
models::v2::projects::LegacySideType, util::env::parse_strings_from_var,
};
use path_util::SafeRelativeUtf8UnixPathBuf;
use serde::{Deserialize, Serialize};
use validator::Validate;
@@ -23,7 +24,7 @@ pub struct PackFormat {
#[derive(Serialize, Deserialize, Validate, Eq, PartialEq, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: String,
pub path: SafeRelativeUtf8UnixPathBuf,
pub hashes: std::collections::HashMap<PackFileHash, String>,
pub env: Option<std::collections::HashMap<EnvType, LegacySideType>>, // TODO: Should this use LegacySideType? Will probably require a overhaul of mrpack format to change this
#[validate(custom(function = "validate_download_url"))]

View File

@@ -304,7 +304,7 @@ impl AutomatedModerationQueue {
let hash = x.hashes.get(&PackFileHash::Sha1);
if let Some(hash) = hash {
let path = x.path.clone();
let path = x.path.to_string();
Some((hash.clone(), Some(x), path, None))
} else {
None

View File

@@ -4,7 +4,6 @@ use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use std::io::{Cursor, Read};
use std::path::Component;
use validator::Validate;
use zip::ZipArchive;
@@ -72,24 +71,6 @@ impl super::Validator for ModpackValidator {
"All pack files must provide a SHA512 hash!".into(),
));
}
let path = std::path::Path::new(&file.path)
.components()
.next()
.ok_or_else(|| {
ValidationError::InvalidInput(
"Invalid pack file path!".into(),
)
})?;
match path {
Component::CurDir | Component::Normal(_) => {}
_ => {
return Err(ValidationError::InvalidInput(
"Invalid pack file path!".into(),
));
}
};
}
Ok(ValidationResult::PassWithPackDataAndFiles {

View File

@@ -114,6 +114,7 @@ hickory-resolver.workspace = true
zbus.workspace = true
ariadne.workspace = true
path-util.workspace = true
[target.'cfg(windows)'.dependencies]
winreg.workspace = true

View File

@@ -6,6 +6,7 @@ use crate::state::{CachedEntry, LinkedData, ProfileInstallStage, SideType};
use crate::util::fetch::{fetch, fetch_advanced, write_cached_icon};
use crate::util::io;
use path_util::SafeRelativeUtf8UnixPathBuf;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
@@ -27,7 +28,7 @@ pub struct PackFormat {
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: String,
pub path: SafeRelativeUtf8UnixPathBuf,
pub hashes: HashMap<PackFileHash, String>,
pub env: Option<HashMap<EnvType, SideType>>,
pub downloads: Vec<String>,

View File

@@ -18,8 +18,8 @@ use super::install_from::{
generate_pack_from_version_id,
};
use crate::data::ProjectType;
use std::io::Cursor;
use std::path::{Component, PathBuf};
use std::io::{Cursor, ErrorKind};
use std::path::PathBuf;
/// Install a pack
/// Wrapper around install_pack_files that generates a pack creation description, and
@@ -169,31 +169,22 @@ pub async fn install_zipped_mrpack_files(
)
.await?;
let project_path = project.path.to_string();
let path = profile::get_full_path(&profile_path)
.await?
.join(project.path.as_str());
let path =
std::path::Path::new(&project_path).components().next();
if let Some(Component::CurDir | Component::Normal(_)) = path
{
let path = profile::get_full_path(&profile_path)
.await?
.join(&project_path);
cache_file_hash(
file.clone(),
&profile_path,
project.path.as_str(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
ProjectType::get_from_parent_folder(&path),
&state.pool,
)
.await?;
cache_file_hash(
file.clone(),
&profile_path,
&project_path,
project
.hashes
.get(&PackFileHash::Sha1)
.map(|x| &**x),
ProjectType::get_from_parent_folder(&path),
&state.pool,
)
.await?;
write(&path, &file, &state.io_semaphore).await?;
write(&path, &file, &state.io_semaphore).await?;
}
Ok(())
}
},
@@ -377,9 +368,10 @@ pub async fn remove_all_related_files(
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
match io::remove_file(profile_full_path.join(file_path)).await {
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
}
@@ -387,9 +379,12 @@ pub async fn remove_all_related_files(
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
let path: PathBuf = profile_full_path.join(file.path);
if path.exists() {
io::remove_file(&path).await?;
match io::remove_file(profile_full_path.join(file.path.as_str()))
.await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
@@ -412,11 +407,16 @@ pub async fn remove_all_related_files(
}
// Remove this file if a corresponding one exists in the filesystem
let existing_file = profile::get_full_path(&profile_path)
.await?
.join(&new_path);
if existing_file.exists() {
io::remove_file(&existing_file).await?;
match io::remove_file(
profile::get_full_path(&profile_path)
.await?
.join(&new_path),
)
.await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
}

View File

@@ -18,6 +18,7 @@ use crate::util::io::{self, IOError};
pub use crate::{State, state::Profile};
use async_zip::tokio::write::ZipFileWriter;
use async_zip::{Compression, ZipEntryBuilder};
use path_util::SafeRelativeUtf8UnixPathBuf;
use serde_json::json;
use std::collections::{HashMap, HashSet};
@@ -497,11 +498,12 @@ pub async fn export_mrpack(
let version_id = version_id.unwrap_or("1.0.0".to_string());
let mut packfile =
create_mrpack_json(&profile, version_id, description).await?;
let included_candidates_set =
HashSet::<_>::from_iter(included_export_candidates.iter());
let included_candidates_set = HashSet::<_>::from_iter(
included_export_candidates.iter().map(|x| x.as_str()),
);
packfile
.files
.retain(|f| included_candidates_set.contains(&f.path));
.retain(|f| included_candidates_set.contains(f.path.as_str()));
// Build vec of all files in the folder
let mut path_list = Vec::new();
@@ -575,8 +577,8 @@ pub async fn export_mrpack(
#[tracing::instrument]
pub async fn get_pack_export_candidates(
profile_path: &str,
) -> crate::Result<Vec<String>> {
let mut path_list: Vec<String> = Vec::new();
) -> crate::Result<Vec<SafeRelativeUtf8UnixPathBuf>> {
let mut path_list = Vec::new();
let profile_base_dir = get_full_path(profile_path).await?;
let mut read_dir = io::read_dir(&profile_base_dir).await?;
@@ -610,18 +612,19 @@ pub async fn get_pack_export_candidates(
fn pack_get_relative_path(
profile_path: &PathBuf,
path: &PathBuf,
) -> crate::Result<String> {
Ok(path
.strip_prefix(profile_path)
.map_err(|_| {
crate::ErrorKind::FSError(format!(
"Path {path:?} does not correspond to a profile"
))
})?
.components()
.map(|c| c.as_os_str().to_string_lossy().to_string())
.collect::<Vec<_>>()
.join("/"))
) -> crate::Result<SafeRelativeUtf8UnixPathBuf> {
Ok(SafeRelativeUtf8UnixPathBuf::try_from(
path.strip_prefix(profile_path)
.map_err(|_| {
crate::ErrorKind::FSError(format!(
"Path {path:?} does not correspond to a profile"
))
})?
.components()
.map(|c| c.as_os_str().to_string_lossy())
.collect::<Vec<_>>()
.join("/"),
)?)
}
/// Run Minecraft using a profile and the default credentials, logged in credentials,
@@ -896,7 +899,15 @@ pub async fn create_mrpack_json(
.collect();
Some(Ok(PackFile {
path,
path: match path.try_into() {
Ok(path) => path,
Err(_) => {
return Some(Err(crate::ErrorKind::OtherError(
"Invalid file path in project".into(),
)
.as_error()));
}
},
hashes,
env: Some(env),
downloads,

View File

@@ -173,6 +173,9 @@ pub enum ErrorKind {
#[error("zbus error: {0}")]
ZbusError(#[from] zbus::Error),
#[error("Deserialization error: {0}")]
DeserializationError(#[from] serde::de::value::Error),
}
#[derive(Debug)]

View File

@@ -1,7 +1,10 @@
// IO error
// A wrapper around the tokio IO functions that adds the path to the error message, instead of the uninformative std::io::Error.
use std::{io::Write, path::Path};
use std::{
io::{ErrorKind, Write},
path::Path,
};
use tempfile::NamedTempFile;
use tokio::task::spawn_blocking;
@@ -32,6 +35,13 @@ impl IOError {
path: path.to_string_lossy().to_string(),
}
}
pub fn kind(&self) -> ErrorKind {
match self {
IOError::IOPathError { source, .. } => source.kind(),
IOError::IOError(source) => source.kind(),
}
}
}
pub fn canonicalize(

View File

@@ -0,0 +1,12 @@
[package]
name = "path-util"
edition.workspace = true
[dependencies]
typed-path.workspace = true
serde = { workspace = true, features = ["derive"] }
derive_more = { workspace = true, features = ["display", "deref"] }
itertools.workspace = true
[lints]
workspace = true

View File

@@ -0,0 +1,112 @@
use itertools::Itertools;
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::value::StringDeserializer,
};
use typed_path::{Utf8Component, Utf8TypedPathBuf, Utf8UnixPathBuf};
#[derive(
Eq, PartialEq, Hash, Debug, Clone, derive_more::Display, derive_more::Deref,
)]
#[repr(transparent)]
pub struct SafeRelativeUtf8UnixPathBuf(Utf8UnixPathBuf);
impl<'de> Deserialize<'de> for SafeRelativeUtf8UnixPathBuf {
fn deserialize<D: Deserializer<'de>>(
deserializer: D,
) -> Result<Self, D::Error> {
// When parsed successfully, the path is guaranteed to be free from leading backslashes
// and Windows prefixes (e.g., `C:`)
let Utf8TypedPathBuf::Unix(path) =
Utf8TypedPathBuf::from(String::deserialize(deserializer)?)
else {
return Err(serde::de::Error::custom(
"File path must be a Unix-style relative path",
));
};
// At this point, we may have a pseudo-Unix path like `my\directory`, which we should reject
// to guarantee consistent cross-platform behavior when interpreting component separators
if path.as_str().contains('\\') {
return Err(serde::de::Error::custom(
"File path must not contain backslashes",
));
}
let mut path_components = path.components().peekable();
if path_components.peek().is_none() {
return Err(serde::de::Error::custom("File path cannot be empty"));
}
// All components should be normal: a file or directory name, not `/`, `.`, or `..`
if path_components.any(|component| !component.is_normal()) {
return Err(serde::de::Error::custom(
"File path cannot contain any special component or prefix",
));
}
if path_components.any(|component| {
let file_name = component.as_str().to_ascii_uppercase();
// Windows reserves some special DOS device names in every directory, which may be optionally
// followed by an extension or alternate data stream name and be case insensitive. Trying to
// write, read, or delete these files is usually not that useful even for malware, since they
// mostly refer to console and printer devices, but it's best to avoid them entirely anyway.
// References:
// https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file#naming-conventions
// https://devblogs.microsoft.com/oldnewthing/20031022-00/?p=42073
// https://github.com/wine-mirror/wine/blob/01269452e0fbb1f081d506bd64996590a553e2b9/dlls/ntdll/path.c#L66
const RESERVED_WINDOWS_DEVICE_NAMES: &[&str] = &[
"CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4",
"COM5", "COM6", "COM7", "COM8", "COM9", "COM¹", "COM²", "COM³",
"LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8",
"LPT9", "LPT¹", "LPT²", "LPT³", "CONIN$", "CONOUT$",
];
RESERVED_WINDOWS_DEVICE_NAMES.iter().any(|name| {
file_name == *name
|| file_name.starts_with(&format!("{name}."))
|| file_name.starts_with(&format!("{name}:"))
})
}) {
return Err(serde::de::Error::custom(
"File path contains a reserved Windows device name",
));
}
Ok(Self(path))
}
}
impl Serialize for SafeRelativeUtf8UnixPathBuf {
fn serialize<S: Serializer>(
&self,
serializer: S,
) -> Result<S::Ok, S::Error> {
let mut path_components = self.0.components().peekable();
if path_components.peek().is_none() {
return Err(serde::ser::Error::custom("File path cannot be empty"));
}
if path_components.any(|component| !component.is_normal()) {
return Err(serde::ser::Error::custom(
"File path cannot contain any special component or prefix",
));
}
// Iterating over components does basic normalization by e.g. removing redundant
// slashes and collapsing `.` components, so do that to produce a cleaner output
// friendlier to the strict deserialization algorithm above
self.0.components().join("/").serialize(serializer)
}
}
impl TryFrom<String> for SafeRelativeUtf8UnixPathBuf {
type Error = serde::de::value::Error;
fn try_from(s: String) -> Result<Self, Self::Error> {
Self::deserialize(StringDeserializer::new(s))
}
}