Update Rust dependencies (#4139)

* Update Rust version

* Update async-compression 0.4.25 -> 0.4.27

* Update async-tungstenite 0.29.1 -> 0.30.0

* Update bytemuck 1.23.0 -> 1.23.1

* Update clap 4.5.40 -> 4.5.43

* Update deadpool-redis 0.21.1 -> 0.22.0 and redis 0.31.0 -> 0.32.4

* Update enumset 1.1.6 -> 1.1.7

* Update hyper-util 0.1.14 -> 0.1.16

* Update indexmap 2.9.0 -> 2.10.0

* Update indicatif 0.17.11 -> 0.18.0

* Update jemalloc_pprof 0.7.0 -> 0.8.1

* Update lettre 0.11.17 -> 0.11.18

* Update meilisearch-sdk 0.28.0 -> 0.29.1

* Update notify 8.0.0 -> 8.2.0 and notify-debouncer-mini 0.6.0 -> 0.7.0

* Update quick-xml 0.37.5 -> 0.38.1

* Fix theseus lint

* Update reqwest 0.12.20 -> 0.12.22

* Cargo fmt in theseus

* Update rgb 0.8.50 -> 0.8.52

* Update sentry 0.41.0 -> 0.42.0 and sentry-actix 0.41.0 -> 0.42.0

* Update serde_json 1.0.140 -> 1.0.142

* Update serde_with 3.13.0 -> 3.14.0

* Update spdx 0.10.8 -> 0.10.9

* Update sysinfo 0.35.2 -> 0.36.1

* Update tauri suite

* Fix build by updating mappings

* Update tokio 1.45.1 -> 1.47.1 and tokio-util 0.7.15 -> 0.7.16

* Update tracing-actix-web 0.7.18 -> 0.7.19

* Update zip 4.2.0 -> 4.3.0

* Misc Cargo.lock updates

* Update Dockerfiles
This commit is contained in:
Josiah Glosson
2025-08-08 15:50:44 -07:00
committed by GitHub
parent ca0c16b1fe
commit cf190d86d5
67 changed files with 1936 additions and 1890 deletions

1476
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,31 +25,31 @@ actix-ws = "0.3.0"
argon2 = { version = "0.5.3", features = ["std"] } argon2 = { version = "0.5.3", features = ["std"] }
ariadne = { path = "packages/ariadne" } ariadne = { path = "packages/ariadne" }
async_zip = "0.0.17" async_zip = "0.0.17"
async-compression = { version = "0.4.25", default-features = false } async-compression = { version = "0.4.27", default-features = false }
async-recursion = "1.1.1" async-recursion = "1.1.1"
async-stripe = { version = "0.41.0", default-features = false, features = [ async-stripe = { version = "0.41.0", default-features = false, features = [
"runtime-tokio-hyper-rustls", "runtime-tokio-hyper-rustls",
] } ] }
async-trait = "0.1.88" async-trait = "0.1.88"
async-tungstenite = { version = "0.29.1", default-features = false, features = [ async-tungstenite = { version = "0.30.0", default-features = false, features = [
"futures-03-sink", "futures-03-sink",
] } ] }
async-walkdir = "2.1.0" async-walkdir = "2.1.0"
base64 = "0.22.1" base64 = "0.22.1"
bitflags = "2.9.1" bitflags = "2.9.1"
bytemuck = "1.23.0" bytemuck = "1.23.1"
bytes = "1.10.1" bytes = "1.10.1"
censor = "0.3.0" censor = "0.3.0"
chardetng = "0.1.17" chardetng = "0.1.17"
chrono = "0.4.41" chrono = "0.4.41"
clap = "4.5.40" clap = "4.5.43"
clickhouse = "0.13.3" clickhouse = "0.13.3"
color-thief = "0.2.2" color-thief = "0.2.2"
console-subscriber = "0.4.1" console-subscriber = "0.4.1"
daedalus = { path = "packages/daedalus" } daedalus = { path = "packages/daedalus" }
dashmap = "6.1.0" dashmap = "6.1.0"
data-url = "0.3.1" data-url = "0.3.1"
deadpool-redis = "0.21.1" deadpool-redis = "0.22.0"
dirs = "6.0.0" dirs = "6.0.0"
discord-rich-presence = "0.2.5" discord-rich-presence = "0.2.5"
dotenv-build = "0.1.1" dotenv-build = "0.1.1"
@@ -57,7 +57,7 @@ dotenvy = "0.15.7"
dunce = "1.0.5" dunce = "1.0.5"
either = "1.15.0" either = "1.15.0"
encoding_rs = "0.8.35" encoding_rs = "0.8.35"
enumset = "1.1.6" enumset = "1.1.7"
flate2 = "1.1.2" flate2 = "1.1.2"
fs4 = { version = "0.13.1", default-features = false } fs4 = { version = "0.13.1", default-features = false }
futures = { version = "0.3.31", default-features = false } futures = { version = "0.3.31", default-features = false }
@@ -74,15 +74,15 @@ hyper-rustls = { version = "0.27.7", default-features = false, features = [
"ring", "ring",
"tls12", "tls12",
] } ] }
hyper-util = "0.1.14" hyper-util = "0.1.16"
iana-time-zone = "0.1.63" iana-time-zone = "0.1.63"
image = { version = "0.25.6", default-features = false, features = ["rayon"] } image = { version = "0.25.6", default-features = false, features = ["rayon"] }
indexmap = "2.9.0" indexmap = "2.10.0"
indicatif = "0.17.11" indicatif = "0.18.0"
itertools = "0.14.0" itertools = "0.14.0"
jemalloc_pprof = "0.7.0" jemalloc_pprof = "0.8.1"
json-patch = { version = "4.0.0", default-features = false } json-patch = { version = "4.0.0", default-features = false }
lettre = { version = "0.11.17", default-features = false, features = [ lettre = { version = "0.11.18", default-features = false, features = [
"builder", "builder",
"hostname", "hostname",
"pool", "pool",
@@ -92,24 +92,24 @@ lettre = { version = "0.11.17", default-features = false, features = [
"smtp-transport", "smtp-transport",
] } ] }
maxminddb = "0.26.0" maxminddb = "0.26.0"
meilisearch-sdk = { version = "0.28.0", default-features = false } meilisearch-sdk = { version = "0.29.1", default-features = false }
murmur2 = "0.1.0" murmur2 = "0.1.0"
native-dialog = "0.9.0" native-dialog = "0.9.0"
notify = { version = "8.0.0", default-features = false } notify = { version = "8.2.0", default-features = false }
notify-debouncer-mini = { version = "0.6.0", default-features = false } notify-debouncer-mini = { version = "0.7.0", default-features = false }
p256 = "0.13.2" p256 = "0.13.2"
paste = "1.0.15" paste = "1.0.15"
phf = { version = "0.12.1", features = ["macros"] } phf = { version = "0.12.1", features = ["macros"] }
png = "0.17.16" png = "0.17.16"
prometheus = "0.14.0" prometheus = "0.14.0"
quartz_nbt = "0.2.9" quartz_nbt = "0.2.9"
quick-xml = "0.37.5" quick-xml = "0.38.1"
rand = "=0.8.5" # Locked on 0.8 until argon2 and p256 update to 0.9 rand = "=0.8.5" # Locked on 0.8 until argon2 and p256 update to 0.9
rand_chacha = "=0.3.1" # Locked on 0.3 until we can update rand to 0.9 rand_chacha = "=0.3.1" # Locked on 0.3 until we can update rand to 0.9
redis = "=0.31.0" # Locked on 0.31 until deadpool-redis updates to 0.32 redis = "0.32.4"
regex = "1.11.1" regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false } reqwest = { version = "0.12.22", default-features = false }
rgb = "0.8.50" rgb = "0.8.52"
rust_decimal = { version = "1.37.2", features = [ rust_decimal = { version = "1.37.2", features = [
"serde-with-float", "serde-with-float",
"serde-with-str", "serde-with-str",
@@ -121,7 +121,7 @@ rust-s3 = { version = "0.35.1", default-features = false, features = [
"tokio-rustls-tls", "tokio-rustls-tls",
] } ] }
rusty-money = "0.4.1" rusty-money = "0.4.1"
sentry = { version = "0.41.0", default-features = false, features = [ sentry = { version = "0.42.0", default-features = false, features = [
"backtrace", "backtrace",
"contexts", "contexts",
"debug-images", "debug-images",
@@ -129,45 +129,45 @@ sentry = { version = "0.41.0", default-features = false, features = [
"reqwest", "reqwest",
"rustls", "rustls",
] } ] }
sentry-actix = "0.41.0" sentry-actix = "0.42.0"
serde = "1.0.219" serde = "1.0.219"
serde_bytes = "0.11.17" serde_bytes = "0.11.17"
serde_cbor = "0.11.2" serde_cbor = "0.11.2"
serde_ini = "0.2.0" serde_ini = "0.2.0"
serde_json = "1.0.140" serde_json = "1.0.142"
serde_with = "3.13.0" serde_with = "3.14.0"
serde-xml-rs = "0.8.1" # Also an XML (de)serializer, consider dropping yaserde in favor of this serde-xml-rs = "0.8.1" # Also an XML (de)serializer, consider dropping yaserde in favor of this
sha1 = "0.10.6" sha1 = "0.10.6"
sha1_smol = { version = "1.0.1", features = ["std"] } sha1_smol = { version = "1.0.1", features = ["std"] }
sha2 = "0.10.9" sha2 = "0.10.9"
spdx = "0.10.8" spdx = "0.10.9"
sqlx = { version = "0.8.6", default-features = false } sqlx = { version = "0.8.6", default-features = false }
sysinfo = { version = "0.35.2", default-features = false } sysinfo = { version = "0.36.1", default-features = false }
tar = "0.4.44" tar = "0.4.44"
tauri = "2.6.1" tauri = "2.7.0"
tauri-build = "2.3.0" tauri-build = "2.3.1"
tauri-plugin-deep-link = "2.4.0" tauri-plugin-deep-link = "2.4.1"
tauri-plugin-dialog = "2.3.0" tauri-plugin-dialog = "2.3.2"
tauri-plugin-http = "2.5.0" tauri-plugin-http = "2.5.1"
tauri-plugin-opener = "2.4.0" tauri-plugin-opener = "2.4.0"
tauri-plugin-os = "2.3.0" tauri-plugin-os = "2.3.0"
tauri-plugin-single-instance = "2.3.0" tauri-plugin-single-instance = "2.3.2"
tauri-plugin-updater = { version = "2.9.0", default-features = false, features = [ tauri-plugin-updater = { version = "2.9.0", default-features = false, features = [
"rustls-tls", "rustls-tls",
"zip", "zip",
] } ] }
tauri-plugin-window-state = "2.3.0" tauri-plugin-window-state = "2.4.0"
tempfile = "3.20.0" tempfile = "3.20.0"
theseus = { path = "packages/app-lib" } theseus = { path = "packages/app-lib" }
thiserror = "2.0.12" thiserror = "2.0.12"
tikv-jemalloc-ctl = "0.6.0" tikv-jemalloc-ctl = "0.6.0"
tikv-jemallocator = "0.6.0" tikv-jemallocator = "0.6.0"
tokio = "1.45.1" tokio = "1.47.1"
tokio-stream = "0.1.17" tokio-stream = "0.1.17"
tokio-util = "0.7.15" tokio-util = "0.7.16"
totp-rs = "5.7.0" totp-rs = "5.7.0"
tracing = "0.1.41" tracing = "0.1.41"
tracing-actix-web = "0.7.18" tracing-actix-web = "0.7.19"
tracing-error = "0.2.1" tracing-error = "0.2.1"
tracing-subscriber = "0.3.19" tracing-subscriber = "0.3.19"
url = "2.5.4" url = "2.5.4"
@@ -179,7 +179,7 @@ whoami = "1.6.0"
winreg = "0.55.0" winreg = "0.55.0"
woothee = "0.13.0" woothee = "0.13.0"
yaserde = "0.12.0" yaserde = "0.12.0"
zip = { version = "4.2.0", default-features = false, features = [ zip = { version = "4.3.0", default-features = false, features = [
"bzip2", "bzip2",
"deflate", "deflate",
"deflate64", "deflate64",
@@ -226,7 +226,7 @@ wildcard_dependencies = "warn"
warnings = "deny" warnings = "deny"
[patch.crates-io] [patch.crates-io]
wry = { git = "https://github.com/modrinth/wry", rev = "21db186" } wry = { git = "https://github.com/modrinth/wry", rev = "f2ce0b0" }
# Optimize for speed and reduce size on release builds # Optimize for speed and reduce size on release builds
[profile.release] [profile.release]

View File

@@ -197,16 +197,14 @@ pub async fn open_link<R: Runtime>(
if url::Url::parse(&path).is_ok() if url::Url::parse(&path).is_ok()
&& !state.malicious_origins.contains(&origin) && !state.malicious_origins.contains(&origin)
&& let Some(last_click) = state.last_click
&& last_click.elapsed() < Duration::from_millis(100)
{ {
if let Some(last_click) = state.last_click {
if last_click.elapsed() < Duration::from_millis(100) {
let _ = app.opener().open_url(&path, None::<String>); let _ = app.opener().open_url(&path, None::<String>);
state.last_click = None; state.last_click = None;
return Ok(()); return Ok(());
} }
}
}
tracing::info!("Malicious click: {path} origin {origin}"); tracing::info!("Malicious click: {path} origin {origin}");
state.malicious_origins.insert(origin); state.malicious_origins.insert(origin);

View File

@@ -59,17 +59,14 @@ pub async fn login<R: Runtime>(
.url()? .url()?
.as_str() .as_str()
.starts_with("https://login.live.com/oauth20_desktop.srf") .starts_with("https://login.live.com/oauth20_desktop.srf")
{ && let Some((_, code)) =
if let Some((_, code)) =
window.url()?.query_pairs().find(|x| x.0 == "code") window.url()?.query_pairs().find(|x| x.0 == "code")
{ {
window.close()?; window.close()?;
let val = let val = minecraft_auth::finish_login(&code.clone(), flow).await?;
minecraft_auth::finish_login(&code.clone(), flow).await?;
return Ok(Some(val)); return Ok(Some(val));
} }
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await; tokio::time::sleep(std::time::Duration::from_millis(50)).await;
} }

View File

@@ -63,12 +63,12 @@ pub async fn should_disable_mouseover() -> bool {
// We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety // We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety
if let tauri_plugin_os::Version::Semantic(major, minor, _) = if let tauri_plugin_os::Version::Semantic(major, minor, _) =
tauri_plugin_os::version() tauri_plugin_os::version()
&& major >= 12
&& minor >= 3
{ {
if major >= 12 && minor >= 3 {
// Mac os version is 12.3 or higher, we allow mouseover // Mac os version is 12.3 or higher, we allow mouseover
return false; return false;
} }
}
true true
} else { } else {
// Not macos, we allow mouseover // Not macos, we allow mouseover

View File

@@ -233,11 +233,11 @@ fn main() {
}); });
#[cfg(not(target_os = "linux"))] #[cfg(not(target_os = "linux"))]
if let Some(window) = app.get_window("main") { if let Some(window) = app.get_window("main")
if let Err(e) = window.set_shadow(true) { && let Err(e) = window.set_shadow(true)
{
tracing::warn!("Failed to set window shadow: {e}"); tracing::warn!("Failed to set window shadow: {e}");
} }
}
Ok(()) Ok(())
}); });

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM rust:1.88.0 AS build FROM rust:1.89.0 AS build
WORKDIR /usr/src/daedalus WORKDIR /usr/src/daedalus
COPY . . COPY . .

View File

@@ -506,17 +506,16 @@ async fn fetch(
return Ok(lib); return Ok(lib);
} }
} else if let Some(url) = &lib.url { } else if let Some(url) = &lib.url
if !url.is_empty() { && !url.is_empty()
{
insert_mirrored_artifact( insert_mirrored_artifact(
&lib.name, &lib.name,
None, None,
vec![ vec![
url.clone(), url.clone(),
"https://libraries.minecraft.net/" "https://libraries.minecraft.net/".to_string(),
.to_string(), "https://maven.creeperhost.net/".to_string(),
"https://maven.creeperhost.net/"
.to_string(),
maven_url.to_string(), maven_url.to_string(),
], ],
false, false,
@@ -527,7 +526,6 @@ async fn fetch(
return Ok(lib); return Ok(lib);
} }
}
// Other libraries are generally available in the "maven" directory of the installer. If they are // Other libraries are generally available in the "maven" directory of the installer. If they are
// not present here, they will be generated by Forge processors. // not present here, they will be generated by Forge processors.

View File

@@ -93,9 +93,9 @@ async fn main() -> Result<()> {
.ok() .ok()
.and_then(|x| x.parse::<bool>().ok()) .and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false) .unwrap_or(false)
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
{ {
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
let cache_clears = upload_files let cache_clears = upload_files
.into_iter() .into_iter()
.map(|x| format_url(&x.0)) .map(|x| format_url(&x.0))
@@ -130,8 +130,6 @@ async fn main() -> Result<()> {
})?; })?;
} }
} }
}
}
Ok(()) Ok(())
} }

View File

@@ -167,20 +167,18 @@ pub async fn download_file(
let bytes = x.bytes().await; let bytes = x.bytes().await;
if let Ok(bytes) = bytes { if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 { if let Some(sha1) = sha1
if &*sha1_async(bytes.clone()).await? != sha1 { && &*sha1_async(bytes.clone()).await? != sha1
{
if attempt <= 3 { if attempt <= 3 {
continue; continue;
} else { } else {
return Err( return Err(crate::ErrorKind::ChecksumFailure {
crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(), hash: sha1.to_string(),
url: url.to_string(), url: url.to_string(),
tries: attempt, tries: attempt,
} }
.into(), .into());
);
}
} }
} }

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1 # syntax=docker/dockerfile:1
FROM rust:1.88.0 AS build FROM rust:1.89.0 AS build
WORKDIR /usr/src/labrinth WORKDIR /usr/src/labrinth
COPY . . COPY . .

View File

@@ -322,13 +322,12 @@ pub async fn is_visible_collection(
} else { } else {
!collection_data.status.is_hidden() !collection_data.status.is_hidden()
}) && !collection_data.projects.is_empty(); }) && !collection_data.projects.is_empty();
if let Some(user) = &user_option { if let Some(user) = &user_option
if !authorized && !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into()) && (user.role.is_mod() || user.id == collection_data.user_id.into())
{ {
authorized = true; authorized = true;
} }
}
Ok(authorized) Ok(authorized)
} }
@@ -356,12 +355,12 @@ pub async fn filter_visible_collections(
for collection in check_collections { for collection in check_collections {
// Collections are simple- if we are the owner or a mod, we can see it // Collections are simple- if we are the owner or a mod, we can see it
if let Some(user) = user_option { if let Some(user) = user_option
if user.role.is_mod() || user.id == collection.user_id.into() { && (user.role.is_mod() || user.id == collection.user_id.into())
{
return_collections.push(collection.into()); return_collections.push(collection.into());
} }
} }
}
Ok(return_collections) Ok(return_collections)
} }

View File

@@ -95,11 +95,11 @@ impl DBFlow {
redis: &RedisPool, redis: &RedisPool,
) -> Result<Option<DBFlow>, DatabaseError> { ) -> Result<Option<DBFlow>, DatabaseError> {
let flow = Self::get(id, redis).await?; let flow = Self::get(id, redis).await?;
if let Some(flow) = flow.as_ref() { if let Some(flow) = flow.as_ref()
if predicate(flow) { && predicate(flow)
{
Self::remove(id, redis).await?; Self::remove(id, redis).await?;
} }
}
Ok(flow) Ok(flow)
} }

View File

@@ -801,18 +801,19 @@ impl VersionField {
}; };
if let Some(count) = countable { if let Some(count) = countable {
if let Some(min) = loader_field.min_val { if let Some(min) = loader_field.min_val
if count < min { && count < min
{
return Err(format!( return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}", "Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(), v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field, field_name = loader_field.field,
)); ));
} }
}
if let Some(max) = loader_field.max_val { if let Some(max) = loader_field.max_val
if count > max { && count > max
{
return Err(format!( return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}", "Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(), v = serde_json::to_string(&value).unwrap_or_default(),
@@ -820,7 +821,6 @@ impl VersionField {
)); ));
} }
} }
}
Ok(VersionField { Ok(VersionField {
version_id, version_id,

View File

@@ -483,8 +483,9 @@ impl DBTeamMember {
.await?; .await?;
} }
if let Some(accepted) = new_accepted { if let Some(accepted) = new_accepted
if accepted { && accepted
{
sqlx::query!( sqlx::query!(
" "
UPDATE team_members UPDATE team_members
@@ -497,7 +498,6 @@ impl DBTeamMember {
.execute(&mut **transaction) .execute(&mut **transaction)
.await?; .await?;
} }
}
if let Some(payouts_split) = new_payouts_split { if let Some(payouts_split) = new_payouts_split {
sqlx::query!( sqlx::query!(

View File

@@ -353,11 +353,11 @@ impl RedisPool {
}; };
for (idx, key) in fetch_ids.into_iter().enumerate() { for (idx, key) in fetch_ids.into_iter().enumerate() {
if let Some(locked) = results.get(idx) { if let Some(locked) = results.get(idx)
if locked.is_none() { && locked.is_none()
{
continue; continue;
} }
}
if let Some((key, raw_key)) = ids.remove(&key) { if let Some((key, raw_key)) = ids.remove(&key) {
if let Some(val) = expired_values.remove(&key) { if let Some(val) = expired_values.remove(&key) {

View File

@@ -334,19 +334,15 @@ impl From<Version> for LegacyVersion {
// the v2 loaders are whatever the corresponding loader fields are // the v2 loaders are whatever the corresponding loader fields are
let mut loaders = let mut loaders =
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>(); data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
if loaders.contains(&"mrpack".to_string()) { if loaders.contains(&"mrpack".to_string())
if let Some((_, mrpack_loaders)) = data && let Some((_, mrpack_loaders)) = data
.fields .fields
.into_iter() .into_iter()
.find(|(key, _)| key == "mrpack_loaders") .find(|(key, _)| key == "mrpack_loaders")
{ && let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders)
if let Ok(mrpack_loaders) =
serde_json::from_value(mrpack_loaders)
{ {
loaders = mrpack_loaders; loaders = mrpack_loaders;
} }
}
}
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>(); let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();
Self { Self {

View File

@@ -43,8 +43,8 @@ impl LegacyResultSearchProject {
pub fn from(result_search_project: ResultSearchProject) -> Self { pub fn from(result_search_project: ResultSearchProject) -> Self {
let mut categories = result_search_project.categories; let mut categories = result_search_project.categories;
categories.extend(result_search_project.loaders.clone()); categories.extend(result_search_project.loaders.clone());
if categories.contains(&"mrpack".to_string()) { if categories.contains(&"mrpack".to_string())
if let Some(mrpack_loaders) = result_search_project && let Some(mrpack_loaders) = result_search_project
.project_loader_fields .project_loader_fields
.get("mrpack_loaders") .get("mrpack_loaders")
{ {
@@ -56,11 +56,10 @@ impl LegacyResultSearchProject {
); );
categories.retain(|c| c != "mrpack"); categories.retain(|c| c != "mrpack");
} }
}
let mut display_categories = result_search_project.display_categories; let mut display_categories = result_search_project.display_categories;
display_categories.extend(result_search_project.loaders); display_categories.extend(result_search_project.loaders);
if display_categories.contains(&"mrpack".to_string()) { if display_categories.contains(&"mrpack".to_string())
if let Some(mrpack_loaders) = result_search_project && let Some(mrpack_loaders) = result_search_project
.project_loader_fields .project_loader_fields
.get("mrpack_loaders") .get("mrpack_loaders")
{ {
@@ -72,7 +71,6 @@ impl LegacyResultSearchProject {
); );
display_categories.retain(|c| c != "mrpack"); display_categories.retain(|c| c != "mrpack");
} }
}
// Sort then remove duplicates // Sort then remove duplicates
categories.sort(); categories.sort();

View File

@@ -166,12 +166,12 @@ impl From<ProjectQueryResult> for Project {
Ok(spdx_expr) => { Ok(spdx_expr) => {
let mut vec: Vec<&str> = Vec::new(); let mut vec: Vec<&str> = Vec::new();
for node in spdx_expr.iter() { for node in spdx_expr.iter() {
if let spdx::expression::ExprNode::Req(req) = node { if let spdx::expression::ExprNode::Req(req) = node
if let Some(id) = req.req.license.id() { && let Some(id) = req.req.license.id()
{
vec.push(id.full_name); vec.push(id.full_name);
} }
} }
}
// spdx crate returns AND/OR operations in postfix order // spdx crate returns AND/OR operations in postfix order
// and it would be a lot more effort to make it actually in order // and it would be a lot more effort to make it actually in order
// so let's just ignore that and make them comma-separated // so let's just ignore that and make them comma-separated

View File

@@ -51,17 +51,17 @@ impl ProjectPermissions {
return Some(ProjectPermissions::all()); return Some(ProjectPermissions::all());
} }
if let Some(member) = project_team_member { if let Some(member) = project_team_member
if member.accepted { && member.accepted
{
return Some(member.permissions); return Some(member.permissions);
} }
}
if let Some(member) = organization_team_member { if let Some(member) = organization_team_member
if member.accepted { && member.accepted
{
return Some(member.permissions); return Some(member.permissions);
} }
}
if role.is_mod() { if role.is_mod() {
Some( Some(
@@ -107,11 +107,11 @@ impl OrganizationPermissions {
return Some(OrganizationPermissions::all()); return Some(OrganizationPermissions::all());
} }
if let Some(member) = team_member { if let Some(member) = team_member
if member.accepted { && member.accepted
{
return member.organization_permissions; return member.organization_permissions;
} }
}
if role.is_mod() { if role.is_mod() {
return Some( return Some(
OrganizationPermissions::EDIT_DETAILS OrganizationPermissions::EDIT_DETAILS

View File

@@ -45,20 +45,18 @@ impl MaxMindIndexer {
if let Ok(entries) = archive.entries() { if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() { for mut file in entries.flatten() {
if let Ok(path) = file.header().path() { if let Ok(path) = file.header().path()
if path.extension().and_then(|x| x.to_str()) == Some("mmdb") && path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{ {
let mut buf = Vec::new(); let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap(); file.read_to_end(&mut buf).unwrap();
let reader = let reader = maxminddb::Reader::from_source(buf).unwrap();
maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader)); return Ok(Some(reader));
} }
} }
} }
}
if should_panic { if should_panic {
panic!( panic!(

View File

@@ -371,8 +371,8 @@ impl AutomatedModerationQueue {
for file in for file in
files.iter().filter(|x| x.version_id == version.id.into()) files.iter().filter(|x| x.version_id == version.id.into())
{ {
if let Some(hash) = file.hashes.get("sha1") { if let Some(hash) = file.hashes.get("sha1")
if let Some((index, (sha1, _, file_name, _))) = hashes && let Some((index, (sha1, _, file_name, _))) = hashes
.iter() .iter()
.enumerate() .enumerate()
.find(|(_, (value, _, _, _))| value == hash) .find(|(_, (value, _, _, _))| value == hash)
@@ -384,7 +384,6 @@ impl AutomatedModerationQueue {
} }
} }
} }
}
// All files are on Modrinth, so we don't send any messages // All files are on Modrinth, so we don't send any messages
if hashes.is_empty() { if hashes.is_empty() {
@@ -420,13 +419,12 @@ impl AutomatedModerationQueue {
.await?; .await?;
for row in rows { for row in rows {
if let Some(sha1) = row.sha1 { if let Some(sha1) = row.sha1
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) { && let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) }); final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) });
hashes.remove(index); hashes.remove(index);
} }
} }
}
if hashes.is_empty() { if hashes.is_empty() {
let metadata = MissingMetadata { let metadata = MissingMetadata {
@@ -499,8 +497,8 @@ impl AutomatedModerationQueue {
let mut insert_ids = Vec::new(); let mut insert_ids = Vec::new();
for row in rows { for row in rows {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) { if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id)
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) { && let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
final_hashes.insert(sha1.clone(), IdentifiedFile { final_hashes.insert(sha1.clone(), IdentifiedFile {
file_name: file_name.clone(), file_name: file_name.clone(),
status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified),
@@ -513,7 +511,6 @@ impl AutomatedModerationQueue {
flame_files.remove(curse_index); flame_files.remove(curse_index);
} }
} }
}
if !insert_ids.is_empty() && !insert_hashes.is_empty() { if !insert_ids.is_empty() && !insert_hashes.is_empty() {
sqlx::query!( sqlx::query!(
@@ -581,8 +578,8 @@ impl AutomatedModerationQueue {
for (sha1, _pack_file, file_name, _mumur2) in hashes { for (sha1, _pack_file, file_name, _mumur2) in hashes {
let flame_file = flame_files.iter().find(|x| x.0 == sha1); let flame_file = flame_files.iter().find(|x| x.0 == sha1);
if let Some((_, flame_project_id)) = flame_file { if let Some((_, flame_project_id)) = flame_file
if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) { && let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
missing_metadata.flame_files.insert(sha1, MissingMetadataFlame { missing_metadata.flame_files.insert(sha1, MissingMetadataFlame {
title: project.name.clone(), title: project.name.clone(),
file_name, file_name,
@@ -592,7 +589,6 @@ impl AutomatedModerationQueue {
continue; continue;
} }
}
missing_metadata.unknown_files.insert(sha1, file_name); missing_metadata.unknown_files.insert(sha1, file_name);
} }

View File

@@ -257,17 +257,17 @@ impl PayoutsQueue {
) )
})?; })?;
if !status.is_success() { if !status.is_success()
if let Some(obj) = value.as_object() { && let Some(obj) = value.as_object()
{
if let Some(array) = obj.get("errors") { if let Some(array) = obj.get("errors") {
#[derive(Deserialize)] #[derive(Deserialize)]
struct TremendousError { struct TremendousError {
message: String, message: String,
} }
let err = serde_json::from_value::<TremendousError>( let err =
array.clone(), serde_json::from_value::<TremendousError>(array.clone())
)
.map_err(|_| { .map_err(|_| {
ApiError::Payments( ApiError::Payments(
"could not retrieve Tremendous error json body" "could not retrieve Tremendous error json body"
@@ -282,7 +282,6 @@ impl PayoutsQueue {
"could not retrieve Tremendous error body".to_string(), "could not retrieve Tremendous error body".to_string(),
)); ));
} }
}
Ok(serde_json::from_value(value)?) Ok(serde_json::from_value(value)?)
} }
@@ -449,11 +448,11 @@ impl PayoutsQueue {
}; };
// we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly // we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly
if let PayoutInterval::Fixed { .. } = method.interval { if let PayoutInterval::Fixed { .. } = method.interval
if !product.currency_codes.contains(&"USD".to_string()) { && !product.currency_codes.contains(&"USD".to_string())
{
continue; continue;
} }
}
methods.push(method); methods.push(method);
} }

View File

@@ -286,8 +286,9 @@ pub async fn refund_charge(
.upsert(&mut transaction) .upsert(&mut transaction)
.await?; .await?;
if body.0.unprovision.unwrap_or(false) { if body.0.unprovision.unwrap_or(false)
if let Some(subscription_id) = charge.subscription_id { && let Some(subscription_id) = charge.subscription_id
{
let open_charge = let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool) DBCharge::get_open_subscription(subscription_id, &**pool)
.await?; .await?;
@@ -298,7 +299,6 @@ pub async fn refund_charge(
open_charge.upsert(&mut transaction).await?; open_charge.upsert(&mut transaction).await?;
} }
} }
}
transaction.commit().await?; transaction.commit().await?;
} }
@@ -392,19 +392,18 @@ pub async fn edit_subscription(
} }
} }
if let Some(interval) = &edit_subscription.interval { if let Some(interval) = &edit_subscription.interval
if let Price::Recurring { intervals } = &current_price.prices { && let Price::Recurring { intervals } = &current_price.prices
{
if let Some(price) = intervals.get(interval) { if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval); open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64; open_charge.amount = *price as i64;
} else { } else {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!" "Interval is not valid for this subscription!".to_string(),
.to_string(),
)); ));
} }
} }
}
let intent = if let Some(product_id) = &edit_subscription.product { let intent = if let Some(product_id) = &edit_subscription.product {
let product_price = let product_price =
@@ -1225,8 +1224,9 @@ pub async fn initiate_payment(
} }
}; };
if let Price::Recurring { .. } = price_item.prices { if let Price::Recurring { .. } = price_item.prices
if product.unitary { && product.unitary
{
let user_subscriptions = let user_subscriptions =
user_subscription_item::DBUserSubscription::get_all_user( user_subscription_item::DBUserSubscription::get_all_user(
user.id.into(), user.id.into(),
@@ -1234,13 +1234,11 @@ pub async fn initiate_payment(
) )
.await?; .await?;
let user_products = let user_products = product_item::DBProductPrice::get_many(
product_item::DBProductPrice::get_many(
&user_subscriptions &user_subscriptions
.iter() .iter()
.filter(|x| { .filter(|x| {
x.status x.status == SubscriptionStatus::Provisioned
== SubscriptionStatus::Provisioned
}) })
.map(|x| x.price_id) .map(|x| x.price_id)
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
@@ -1258,7 +1256,6 @@ pub async fn initiate_payment(
)); ));
} }
} }
}
( (
price as i64, price as i64,
@@ -2004,8 +2001,7 @@ pub async fn stripe_webhook(
EventType::PaymentMethodAttached => { EventType::PaymentMethodAttached => {
if let EventObject::PaymentMethod(payment_method) = if let EventObject::PaymentMethod(payment_method) =
event.data.object event.data.object
{ && let Some(customer_id) =
if let Some(customer_id) =
payment_method.customer.map(|x| x.id()) payment_method.customer.map(|x| x.id())
{ {
let customer = stripe::Customer::retrieve( let customer = stripe::Customer::retrieve(
@@ -2038,7 +2034,6 @@ pub async fn stripe_webhook(
} }
} }
} }
}
_ => {} _ => {}
} }
} else { } else {

View File

@@ -79,14 +79,13 @@ impl TempUser {
file_host: &Arc<dyn FileHost + Send + Sync>, file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &RedisPool, redis: &RedisPool,
) -> Result<crate::database::models::DBUserId, AuthenticationError> { ) -> Result<crate::database::models::DBUserId, AuthenticationError> {
if let Some(email) = &self.email { if let Some(email) = &self.email
if crate::database::models::DBUser::get_by_email(email, client) && crate::database::models::DBUser::get_by_email(email, client)
.await? .await?
.is_some() .is_some()
{ {
return Err(AuthenticationError::DuplicateUser); return Err(AuthenticationError::DuplicateUser);
} }
}
let user_id = let user_id =
crate::database::models::generate_user_id(transaction).await?; crate::database::models::generate_user_id(transaction).await?;
@@ -1269,8 +1268,9 @@ pub async fn delete_auth_provider(
.update_user_id(user.id.into(), None, &mut transaction) .update_user_id(user.id.into(), None, &mut transaction)
.await?; .await?;
if delete_provider.provider != AuthProvider::PayPal { if delete_provider.provider != AuthProvider::PayPal
if let Some(email) = user.email { && let Some(email) = user.email
{
send_email( send_email(
email, email,
"Authentication method removed", "Authentication method removed",
@@ -1282,7 +1282,6 @@ pub async fn delete_auth_provider(
None, None,
)?; )?;
} }
}
transaction.commit().await?; transaction.commit().await?;
crate::database::models::DBUser::clear_caches( crate::database::models::DBUser::clear_caches(

View File

@@ -189,8 +189,8 @@ pub async fn get_project_meta(
.iter() .iter()
.find(|x| Some(x.1.id as i32) == row.flame_project_id) .find(|x| Some(x.1.id as i32) == row.flame_project_id)
.map(|x| x.0.clone()) .map(|x| x.0.clone())
&& let Some(val) = merged.flame_files.remove(&sha1)
{ {
if let Some(val) = merged.flame_files.remove(&sha1) {
merged.identified.insert( merged.identified.insert(
sha1, sha1,
IdentifiedFile { IdentifiedFile {
@@ -201,7 +201,6 @@ pub async fn get_project_meta(
); );
} }
} }
}
Ok(HttpResponse::Ok().json(merged)) Ok(HttpResponse::Ok().json(merged))
} else { } else {

View File

@@ -185,8 +185,9 @@ pub async fn edit_pat(
) )
.await?; .await?;
if let Some(pat) = pat { if let Some(pat) = pat
if pat.user_id == user.id.into() { && pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?; let mut transaction = pool.begin().await?;
if let Some(scopes) = &info.scopes { if let Some(scopes) = &info.scopes {
@@ -248,7 +249,6 @@ pub async fn edit_pat(
) )
.await?; .await?;
} }
}
Ok(HttpResponse::NoContent().finish()) Ok(HttpResponse::NoContent().finish())
} }
@@ -276,8 +276,9 @@ pub async fn delete_pat(
) )
.await?; .await?;
if let Some(pat) = pat { if let Some(pat) = pat
if pat.user_id == user.id.into() { && pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?; let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove( database::models::pat_item::DBPersonalAccessToken::remove(
pat.id, pat.id,
@@ -291,7 +292,6 @@ pub async fn delete_pat(
) )
.await?; .await?;
} }
}
Ok(HttpResponse::NoContent().finish()) Ok(HttpResponse::NoContent().finish())
} }

View File

@@ -185,8 +185,9 @@ pub async fn delete(
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?; let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
if let Some(session) = session { if let Some(session) = session
if session.user_id == current_user.id.into() { && session.user_id == current_user.id.into()
{
let mut transaction = pool.begin().await?; let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?; DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?; transaction.commit().await?;
@@ -200,7 +201,6 @@ pub async fn delete(
) )
.await?; .await?;
} }
}
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} }

View File

@@ -401,8 +401,8 @@ async fn broadcast_to_known_local_friends(
friend.user_id friend.user_id
}; };
if friend.accepted { if friend.accepted
if let Some(socket_ids) = && let Some(socket_ids) =
sockets.sockets_by_user_id.get(&friend_id.into()) sockets.sockets_by_user_id.get(&friend_id.into())
{ {
for socket_id in socket_ids.iter() { for socket_id in socket_ids.iter() {
@@ -412,7 +412,6 @@ async fn broadcast_to_known_local_friends(
} }
} }
} }
}
Ok(()) Ok(())
} }

View File

@@ -387,9 +387,10 @@ pub async fn revenue_get(
.map(|x| (x.to_string(), HashMap::new())) .map(|x| (x.to_string(), HashMap::new()))
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
for value in payouts_values { for value in payouts_values {
if let Some(mod_id) = value.mod_id { if let Some(mod_id) = value.mod_id
if let Some(amount) = value.amount_sum { && let Some(amount) = value.amount_sum
if let Some(interval_start) = value.interval_start { && let Some(interval_start) = value.interval_start
{
let id_string = to_base62(mod_id as u64); let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) { if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new()); hm.insert(id_string.clone(), HashMap::new());
@@ -399,8 +400,6 @@ pub async fn revenue_get(
} }
} }
} }
}
}
Ok(HttpResponse::Ok().json(hm)) Ok(HttpResponse::Ok().json(hm))
} }

View File

@@ -192,11 +192,11 @@ pub async fn collection_get(
.map(|x| x.1) .map(|x| x.1)
.ok(); .ok();
if let Some(data) = collection_data { if let Some(data) = collection_data
if is_visible_collection(&data, &user_option, false).await? { && is_visible_collection(&data, &user_option, false).await?
{
return Ok(HttpResponse::Ok().json(Collection::from(data))); return Ok(HttpResponse::Ok().json(Collection::from(data)));
} }
}
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }

View File

@@ -536,11 +536,9 @@ pub async fn create_payout(
Some(true), Some(true),
) )
.await .await
&& let Some(data) = res.items.first()
{ {
if let Some(data) = res.items.first() { payout_item.platform_id = Some(data.payout_item_id.clone());
payout_item.platform_id =
Some(data.payout_item_id.clone());
}
} }
} }

View File

@@ -182,11 +182,11 @@ pub async fn project_get(
.map(|x| x.1) .map(|x| x.1)
.ok(); .ok();
if let Some(data) = project_data { if let Some(data) = project_data
if is_visible_project(&data.inner, &user_option, &pool, false).await? { && is_visible_project(&data.inner, &user_option, &pool, false).await?
{
return Ok(HttpResponse::Ok().json(Project::from(data))); return Ok(HttpResponse::Ok().json(Project::from(data)));
} }
}
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }
@@ -405,8 +405,10 @@ pub async fn project_edit(
.await?; .await?;
} }
if status.is_searchable() && !project_item.inner.webhook_sent { if status.is_searchable()
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") { && !project_item.inner.webhook_sent
&& let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
{
crate::util::webhook::send_discord_webhook( crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(), project_item.inner.id.into(),
&pool, &pool,
@@ -428,10 +430,10 @@ pub async fn project_edit(
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
} }
}
if user.role.is_mod() { if user.role.is_mod()
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") { && let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK")
{
crate::util::webhook::send_slack_webhook( crate::util::webhook::send_slack_webhook(
project_item.inner.id.into(), project_item.inner.id.into(),
&pool, &pool,
@@ -452,7 +454,6 @@ pub async fn project_edit(
.await .await
.ok(); .ok();
} }
}
if team_member.is_none_or(|x| !x.accepted) { if team_member.is_none_or(|x| !x.accepted) {
let notified_members = sqlx::query!( let notified_members = sqlx::query!(
@@ -694,8 +695,9 @@ pub async fn project_edit(
.await?; .await?;
} }
if let Some(links) = &new_project.link_urls { if let Some(links) = &new_project.link_urls
if !links.is_empty() { && !links.is_empty()
{
if !perms.contains(ProjectPermissions::EDIT_DETAILS) { if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication( return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the links of this project!" "You do not have the permissions to edit the links of this project!"
@@ -720,8 +722,7 @@ pub async fn project_edit(
for (platform, url) in links { for (platform, url) in links {
if let Some(url) = url { if let Some(url) = url {
let platform_id = let platform_id = db_models::categories::LinkPlatform::get_id(
db_models::categories::LinkPlatform::get_id(
platform, platform,
&mut *transaction, &mut *transaction,
) )
@@ -746,7 +747,6 @@ pub async fn project_edit(
} }
} }
} }
}
if let Some(moderation_message) = &new_project.moderation_message { if let Some(moderation_message) = &new_project.moderation_message {
if !user.role.is_mod() if !user.role.is_mod()
&& (!project_item.inner.status.is_approved() && (!project_item.inner.status.is_approved()
@@ -2455,7 +2455,7 @@ pub async fn project_get_organization(
organization, organization,
team_members, team_members,
); );
return Ok(HttpResponse::Ok().json(organization)); Ok(HttpResponse::Ok().json(organization))
} else { } else {
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }

View File

@@ -767,13 +767,14 @@ pub async fn edit_team_member(
)); ));
} }
if let Some(new_permissions) = edit_member.permissions { if let Some(new_permissions) = edit_member.permissions
if !permissions.contains(new_permissions) { && !permissions.contains(new_permissions)
{
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have".to_string(), "The new permissions have permissions that you don't have"
.to_string(),
)); ));
} }
}
if edit_member.organization_permissions.is_some() { if edit_member.organization_permissions.is_some() {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
@@ -800,14 +801,13 @@ pub async fn edit_team_member(
} }
if let Some(new_permissions) = edit_member.organization_permissions if let Some(new_permissions) = edit_member.organization_permissions
&& !organization_permissions.contains(new_permissions)
{ {
if !organization_permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"The new organization permissions have permissions that you don't have" "The new organization permissions have permissions that you don't have"
.to_string(), .to_string(),
)); ));
} }
}
if edit_member.permissions.is_some() if edit_member.permissions.is_some()
&& !organization_permissions.contains( && !organization_permissions.contains(
@@ -822,14 +822,14 @@ pub async fn edit_team_member(
} }
} }
if let Some(payouts_split) = edit_member.payouts_split { if let Some(payouts_split) = edit_member.payouts_split
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000) && (payouts_split < Decimal::ZERO
|| payouts_split > Decimal::from(5000))
{ {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(), "Payouts split must be between 0 and 5000!".to_string(),
)); ));
} }
}
DBTeamMember::edit_team_member( DBTeamMember::edit_team_member(
id, id,
@@ -883,15 +883,15 @@ pub async fn transfer_ownership(
DBTeam::get_association(id.into(), &**pool).await?; DBTeam::get_association(id.into(), &**pool).await?;
if let Some(TeamAssociationId::Project(pid)) = team_association_id { if let Some(TeamAssociationId::Project(pid)) = team_association_id {
let result = DBProject::get_id(pid, &**pool, &redis).await?; let result = DBProject::get_id(pid, &**pool, &redis).await?;
if let Some(project_item) = result { if let Some(project_item) = result
if project_item.inner.organization_id.is_some() { && project_item.inner.organization_id.is_some()
{
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"You cannot transfer ownership of a project team that is owend by an organization" "You cannot transfer ownership of a project team that is owend by an organization"
.to_string(), .to_string(),
)); ));
} }
} }
}
if !current_user.role.is_admin() { if !current_user.role.is_admin() {
let member = DBTeamMember::get_from_user_id( let member = DBTeamMember::get_from_user_id(

View File

@@ -289,8 +289,9 @@ pub async fn thread_get(
.await? .await?
.1; .1;
if let Some(mut data) = thread_data { if let Some(mut data) = thread_data
if is_authorized_thread(&data, &user, &pool).await? { && is_authorized_thread(&data, &user, &pool).await?
{
let authors = &mut data.members; let authors = &mut data.members;
authors.append( authors.append(
@@ -307,18 +308,14 @@ pub async fn thread_get(
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
); );
let users: Vec<User> = database::models::DBUser::get_many_ids( let users: Vec<User> =
authors, &**pool, &redis, database::models::DBUser::get_many_ids(authors, &**pool, &redis)
)
.await? .await?
.into_iter() .into_iter()
.map(From::from) .map(From::from)
.collect(); .collect();
return Ok( return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
HttpResponse::Ok().json(Thread::from(data, users, &user))
);
}
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }
@@ -454,8 +451,8 @@ pub async fn thread_send_message(
) )
.await?; .await?;
if let Some(project) = project { if let Some(project) = project
if project.inner.status != ProjectStatus::Processing && project.inner.status != ProjectStatus::Processing
&& user.role.is_mod() && user.role.is_mod()
{ {
let members = let members =
@@ -481,7 +478,6 @@ pub async fn thread_send_message(
) )
.await?; .await?;
} }
}
} else if let Some(report_id) = thread.report_id { } else if let Some(report_id) = thread.report_id {
let report = database::models::report_item::DBReport::get( let report = database::models::report_item::DBReport::get(
report_id, &**pool, report_id, &**pool,

View File

@@ -522,11 +522,11 @@ async fn version_create_inner(
.fetch_optional(pool) .fetch_optional(pool)
.await?; .await?;
if let Some(project_status) = project_status { if let Some(project_status) = project_status
if project_status.status == ProjectStatus::Processing.as_str() { && project_status.status == ProjectStatus::Processing.as_str()
{
moderation_queue.projects.insert(project_id.into()); moderation_queue.projects.insert(project_id.into());
} }
}
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(response))
} }
@@ -871,8 +871,8 @@ pub async fn upload_file(
ref format, ref format,
ref files, ref files,
} = validation_result } = validation_result
&& dependencies.is_empty()
{ {
if dependencies.is_empty() {
let hashes: Vec<Vec<u8>> = format let hashes: Vec<Vec<u8>> = format
.files .files
.iter() .iter()
@@ -933,7 +933,6 @@ pub async fn upload_file(
} }
} }
} }
}
let data = data.freeze(); let data = data.freeze();
let primary = (validation_result.is_passed() let primary = (validation_result.is_passed()
@@ -974,11 +973,11 @@ pub async fn upload_file(
)); ));
} }
if let ValidationResult::Warning(msg) = validation_result { if let ValidationResult::Warning(msg) = validation_result
if primary { && primary
{
return Err(CreateError::InvalidInput(msg.to_string())); return Err(CreateError::InvalidInput(msg.to_string()));
} }
}
let url = format!("{cdn_url}/{file_path_encode}"); let url = format!("{cdn_url}/{file_path_encode}");

View File

@@ -148,8 +148,7 @@ pub async fn get_update_from_hash(
&redis, &redis,
) )
.await? .await?
{ && let Some(project) = database::models::DBProject::get_id(
if let Some(project) = database::models::DBProject::get_id(
file.project_id, file.project_id,
&**pool, &**pool,
&redis, &redis,
@@ -175,14 +174,10 @@ pub async fn get_update_from_hash(
} }
if let Some(loader_fields) = &update_data.loader_fields { if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields { for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x bool &= if let Some(x_vf) =
.version_fields x.version_fields.iter().find(|y| y.field_name == *key)
.iter()
.find(|y| y.field_name == *key)
{ {
values values.iter().any(|v| x_vf.value.contains_json_value(v))
.iter()
.any(|v| x_vf.value.contains_json_value(v))
} else { } else {
true true
}; };
@@ -193,20 +188,15 @@ pub async fn get_update_from_hash(
.sorted(); .sorted();
if let Some(first) = versions.next_back() { if let Some(first) = versions.next_back() {
if !is_visible_version( if !is_visible_version(&first.inner, &user_option, &pool, &redis)
&first.inner,
&user_option,
&pool,
&redis,
)
.await? .await?
{ {
return Err(ApiError::NotFound); return Err(ApiError::NotFound);
} }
return Ok(HttpResponse::Ok() return Ok(
.json(models::projects::Version::from(first))); HttpResponse::Ok().json(models::projects::Version::from(first))
} );
} }
} }
Err(ApiError::NotFound) Err(ApiError::NotFound)
@@ -398,15 +388,14 @@ pub async fn update_files(
if let Some(version) = versions if let Some(version) = versions
.iter() .iter()
.find(|x| x.inner.project_id == file.project_id) .find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm)
{ {
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert( response.insert(
hash.clone(), hash.clone(),
models::projects::Version::from(version.clone()), models::projects::Version::from(version.clone()),
); );
} }
} }
}
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(response))
} }
@@ -484,8 +473,8 @@ pub async fn update_individual_files(
for project in projects { for project in projects {
for file in files.iter().filter(|x| x.project_id == project.inner.id) { for file in files.iter().filter(|x| x.project_id == project.inner.id) {
if let Some(hash) = file.hashes.get(&algorithm) { if let Some(hash) = file.hashes.get(&algorithm)
if let Some(query_file) = && let Some(query_file) =
update_data.hashes.iter().find(|x| &x.hash == hash) update_data.hashes.iter().find(|x| &x.hash == hash)
{ {
let version = all_versions let version = all_versions
@@ -494,23 +483,17 @@ pub async fn update_individual_files(
.filter(|x| { .filter(|x| {
let mut bool = true; let mut bool = true;
if let Some(version_types) = if let Some(version_types) = &query_file.version_types {
&query_file.version_types bool &= version_types
{ .iter()
bool &= version_types.iter().any(|y| { .any(|y| y.as_str() == x.inner.version_type);
y.as_str() == x.inner.version_type
});
} }
if let Some(loaders) = &query_file.loaders { if let Some(loaders) = &query_file.loaders {
bool &= x bool &=
.loaders x.loaders.iter().any(|y| loaders.contains(y));
.iter()
.any(|y| loaders.contains(y));
} }
if let Some(loader_fields) = if let Some(loader_fields) = &query_file.loader_fields {
&query_file.loader_fields
{
for (key, values) in loader_fields { for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x bool &= if let Some(x_vf) = x
.version_fields .version_fields
@@ -530,8 +513,8 @@ pub async fn update_individual_files(
.sorted() .sorted()
.next_back(); .next_back();
if let Some(version) = version { if let Some(version) = version
if is_visible_version( && is_visible_version(
&version.inner, &version.inner,
&user_option, &user_option,
&pool, &pool,
@@ -541,16 +524,12 @@ pub async fn update_individual_files(
{ {
response.insert( response.insert(
hash.clone(), hash.clone(),
models::projects::Version::from( models::projects::Version::from(version.clone()),
version.clone(),
),
); );
} }
} }
} }
} }
}
}
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(response))
} }

View File

@@ -106,15 +106,14 @@ pub async fn version_project_get_helper(
|| x.inner.version_number == id.1 || x.inner.version_number == id.1
}); });
if let Some(version) = version { if let Some(version) = version
if is_visible_version(&version.inner, &user_option, &pool, &redis) && is_visible_version(&version.inner, &user_option, &pool, &redis)
.await? .await?
{ {
return Ok(HttpResponse::Ok() return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version))); .json(models::projects::Version::from(version)));
} }
} }
}
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }
@@ -190,13 +189,13 @@ pub async fn version_get_helper(
.map(|x| x.1) .map(|x| x.1)
.ok(); .ok();
if let Some(data) = version_data { if let Some(data) = version_data
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? { && is_visible_version(&data.inner, &user_option, &pool, &redis).await?
{
return Ok( return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data)) HttpResponse::Ok().json(models::projects::Version::from(data))
); );
} }
}
Err(ApiError::NotFound) Err(ApiError::NotFound)
} }

View File

@@ -15,15 +15,13 @@ pub async fn get_user_status(
return Some(friend_status); return Some(friend_status);
} }
if let Ok(mut conn) = redis.pool.get().await { if let Ok(mut conn) = redis.pool.get().await
if let Ok(mut statuses) = && let Ok(mut statuses) =
conn.sscan::<_, String>(get_field_name(user)).await conn.sscan::<_, String>(get_field_name(user)).await
&& let Some(status_json) = statuses.next_item().await
{ {
if let Some(status_json) = statuses.next_item().await {
return serde_json::from_str::<UserStatus>(&status_json).ok(); return serde_json::from_str::<UserStatus>(&status_json).ok();
} }
}
}
None None
} }

View File

@@ -138,13 +138,12 @@ fn process_image(
let (orig_width, orig_height) = img.dimensions(); let (orig_width, orig_height) = img.dimensions();
let aspect_ratio = orig_width as f32 / orig_height as f32; let aspect_ratio = orig_width as f32 / orig_height as f32;
if let Some(target_width) = target_width { if let Some(target_width) = target_width
if img.width() > target_width { && img.width() > target_width
let new_height = {
(target_width as f32 / aspect_ratio).round() as u32; let new_height = (target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3); img = img.resize(target_width, new_height, FilterType::Lanczos3);
} }
}
if let Some(min_aspect_ratio) = min_aspect_ratio { if let Some(min_aspect_ratio) = min_aspect_ratio {
// Crop if necessary // Crop if necessary

View File

@@ -133,13 +133,12 @@ pub async fn rate_limit_middleware(
.expect("Rate limiter not configured properly") .expect("Rate limiter not configured properly")
.clone(); .clone();
if let Some(key) = req.headers().get("x-ratelimit-key") { if let Some(key) = req.headers().get("x-ratelimit-key")
if key.to_str().ok() && key.to_str().ok()
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref() == dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
{ {
return Ok(next.call(req).await?.map_into_left_body()); return Ok(next.call(req).await?.map_into_left_body());
} }
}
let conn_info = req.connection_info().clone(); let conn_info = req.connection_info().clone();
let ip = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) { let ip = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {

View File

@@ -22,8 +22,9 @@ pub fn validation_errors_to_string(
let key_option = map.keys().next(); let key_option = map.keys().next();
if let Some(field) = key_option { if let Some(field) = key_option
if let Some(error) = map.get(field) { && let Some(error) = map.get(field)
{
return match error { return match error {
ValidationErrorsKind::Struct(errors) => { ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string( validation_errors_to_string(
@@ -54,7 +55,8 @@ pub fn validation_errors_to_string(
&mut output, &mut output,
"Field {field} failed validation with error: {}", "Field {field} failed validation with error: {}",
error.code error.code
).unwrap(); )
.unwrap();
} }
} }
@@ -62,7 +64,6 @@ pub fn validation_errors_to_string(
} }
}; };
} }
}
String::new() String::new()
} }

View File

@@ -238,8 +238,9 @@ pub async fn send_slack_webhook(
} }
}); });
if let Some(icon_url) = metadata.project_icon_url { if let Some(icon_url) = metadata.project_icon_url
if let Some(project_block) = project_block.as_object_mut() { && let Some(project_block) = project_block.as_object_mut()
{
project_block.insert( project_block.insert(
"accessory".to_string(), "accessory".to_string(),
serde_json::json!({ serde_json::json!({
@@ -249,7 +250,6 @@ pub async fn send_slack_webhook(
}), }),
); );
} }
}
blocks.push(project_block); blocks.push(project_block);

View File

@@ -222,11 +222,11 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(failure_json_check) = &self.failure_json_check { && let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await); failure_json_check(&test::read_body_json(resp).await);
} }
}
// Failure test- logged in on a non-team user // Failure test- logged in on a non-team user
let resp = req_gen(PermissionsTestContext { let resp = req_gen(PermissionsTestContext {
@@ -246,11 +246,11 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(failure_json_check) = &self.failure_json_check { && let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await); failure_json_check(&test::read_body_json(resp).await);
} }
}
// Failure test- logged in with EVERY non-relevant permission // Failure test- logged in with EVERY non-relevant permission
let resp: ServiceResponse = req_gen(PermissionsTestContext { let resp: ServiceResponse = req_gen(PermissionsTestContext {
@@ -270,11 +270,11 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(failure_json_check) = &self.failure_json_check { && let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await); failure_json_check(&test::read_body_json(resp).await);
} }
}
// Patch user's permissions to success permissions // Patch user's permissions to success permissions
modify_user_team_permissions( modify_user_team_permissions(
@@ -300,11 +300,11 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16() resp.status().as_u16()
)); ));
} }
if resp.status() == StatusCode::OK { if resp.status() == StatusCode::OK
if let Some(success_json_check) = &self.success_json_check { && let Some(success_json_check) = &self.success_json_check
{
success_json_check(&test::read_body_json(resp).await); success_json_check(&test::read_body_json(resp).await);
} }
}
// If the remove_user flag is set, remove the user from the project // If the remove_user flag is set, remove the user from the project
// Relevant for existing projects/users // Relevant for existing projects/users

View File

@@ -1,2 +1,2 @@
allow-dbg-in-tests = true allow-dbg-in-tests = true
msrv = "1.88.0" msrv = "1.89.0"

View File

@@ -50,11 +50,11 @@ pub async fn parse_command(
// We assume anything else is a filepath to an .mrpack file // We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string); let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?; let path = io::canonicalize(path)?;
if let Some(ext) = path.extension() { if let Some(ext) = path.extension()
if ext == "mrpack" { && ext == "mrpack"
{
return Ok(CommandPayload::RunMRPack { path }); return Ok(CommandPayload::RunMRPack { path });
} }
}
emit_warning(&format!( emit_warning(&format!(
"Invalid command, unrecognized filetype: {}", "Invalid command, unrecognized filetype: {}",
path.display() path.display()

View File

@@ -106,15 +106,15 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
})?; })?;
// removes the old installation of java // removes the old installation of java
if let Some(file) = archive.file_names().next() { if let Some(file) = archive.file_names().next()
if let Some(dir) = file.split('/').next() { && let Some(dir) = file.split('/').next()
{
let path = path.join(dir); let path = path.join(dir);
if path.exists() { if path.exists() {
io::remove_dir_all(path).await?; io::remove_dir_all(path).await?;
} }
} }
}
emit_loading(&loading_bar, 0.0, Some("Extracting java"))?; emit_loading(&loading_bar, 0.0, Some("Extracting java"))?;
archive.extract(&path).map_err(|_| { archive.extract(&path).map_err(|_| {

View File

@@ -54,13 +54,13 @@ pub async fn remove_user(uuid: uuid::Uuid) -> crate::Result<()> {
if let Some((uuid, user)) = users.remove(&uuid) { if let Some((uuid, user)) = users.remove(&uuid) {
Credentials::remove(uuid, &state.pool).await?; Credentials::remove(uuid, &state.pool).await?;
if user.active { if user.active
if let Some((_, mut user)) = users.into_iter().next() { && let Some((_, mut user)) = users.into_iter().next()
{
user.active = true; user.active = true;
user.upsert(&state.pool).await?; user.upsert(&state.pool).await?;
} }
} }
}
Ok(()) Ok(())
} }

View File

@@ -221,15 +221,15 @@ async fn import_atlauncher_unmanaged(
.unwrap_or_else(|| backup_name.to_string()); .unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling; prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id { if let Some(ref project_id) = description.project_id
if let Some(ref version_id) = description.version_id { && let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData { prof.linked_data = Some(LinkedData {
project_id: project_id.clone(), project_id: project_id.clone(),
version_id: version_id.clone(), version_id: version_id.clone(),
locked: true, locked: true,
}) })
} }
}
prof.icon_path = description prof.icon_path = description
.icon .icon

View File

@@ -383,8 +383,9 @@ pub async fn set_profile_information(
.unwrap_or_else(|| backup_name.to_string()); .unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling; prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id { if let Some(ref project_id) = description.project_id
if let Some(ref version_id) = description.version_id { && let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData { prof.linked_data = Some(LinkedData {
project_id: project_id.clone(), project_id: project_id.clone(),
version_id: version_id.clone(), version_id: version_id.clone(),
@@ -395,7 +396,6 @@ pub async fn set_profile_information(
}, },
}) })
} }
}
prof.icon_path = description prof.icon_path = description
.icon .icon

View File

@@ -149,14 +149,13 @@ pub async fn install_zipped_mrpack_files(
let profile_path = profile_path.clone(); let profile_path = profile_path.clone();
async move { async move {
//TODO: Future update: prompt user for optional files in a modpack //TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env { if let Some(env) = project.env
if env && env
.get(&EnvType::Client) .get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported) .is_some_and(|x| x == &SideType::Unsupported)
{ {
return Ok(()); return Ok(());
} }
}
let file = fetch_mirrors( let file = fetch_mirrors(
&project &project
@@ -375,15 +374,15 @@ pub async fn remove_all_related_files(
) )
.await? .await?
{ {
if let Some(metadata) = &project.metadata { if let Some(metadata) = &project.metadata
if to_remove.contains(&metadata.project_id) { && to_remove.contains(&metadata.project_id)
{
let path = profile_full_path.join(file_path); let path = profile_full_path.join(file_path);
if path.exists() { if path.exists() {
io::remove_file(&path).await?; io::remove_file(&path).await?;
} }
} }
} }
}
// Iterate over all Modrinth project file paths in the json, and remove them // Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized) // (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)

View File

@@ -337,8 +337,8 @@ pub async fn update_project(
) )
.await? .await?
.remove(project_path) .remove(project_path)
&& let Some(update_version) = &file.update_version_id
{ {
if let Some(update_version) = &file.update_version_id {
let path = Profile::add_project_version( let path = Profile::add_project_version(
profile_path, profile_path,
update_version, update_version,
@@ -353,13 +353,11 @@ pub async fn update_project(
} }
if !skip_send_event.unwrap_or(false) { if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited) emit_profile(profile_path, ProfilePayloadType::Edited).await?;
.await?;
} }
return Ok(path); return Ok(path);
} }
}
Err(crate::ErrorKind::InputError( Err(crate::ErrorKind::InputError(
"This project cannot be updated!".to_string(), "This project cannot be updated!".to_string(),
@@ -479,11 +477,11 @@ pub async fn export_mrpack(
let included_export_candidates = included_export_candidates let included_export_candidates = included_export_candidates
.into_iter() .into_iter()
.filter(|x| { .filter(|x| {
if let Some(f) = PathBuf::from(x).file_name() { if let Some(f) = PathBuf::from(x).file_name()
if f.to_string_lossy().starts_with(".DS_Store") { && f.to_string_lossy().starts_with(".DS_Store")
{
return false; return false;
} }
}
true true
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();

View File

@@ -184,6 +184,7 @@ pub enum LoadingBarType {
} }
#[derive(Serialize, Clone)] #[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct LoadingPayload { pub struct LoadingPayload {
pub event: LoadingBarType, pub event: LoadingBarType,
pub loader_uuid: Uuid, pub loader_uuid: Uuid,
@@ -192,11 +193,7 @@ pub struct LoadingPayload {
} }
#[derive(Serialize, Clone)] #[derive(Serialize, Clone)]
pub struct OfflinePayload { #[cfg(feature = "tauri")]
pub offline: bool,
}
#[derive(Serialize, Clone)]
pub struct WarningPayload { pub struct WarningPayload {
pub message: String, pub message: String,
} }
@@ -220,12 +217,14 @@ pub enum CommandPayload {
} }
#[derive(Serialize, Clone)] #[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProcessPayload { pub struct ProcessPayload {
pub profile_path_id: String, pub profile_path_id: String,
pub uuid: Uuid, pub uuid: Uuid,
pub event: ProcessPayloadType, pub event: ProcessPayloadType,
pub message: String, pub message: String,
} }
#[derive(Serialize, Clone, Debug)] #[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "snake_case")] #[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType { pub enum ProcessPayloadType {
@@ -234,11 +233,13 @@ pub enum ProcessPayloadType {
} }
#[derive(Serialize, Clone)] #[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProfilePayload { pub struct ProfilePayload {
pub profile_path_id: String, pub profile_path_id: String,
#[serde(flatten)] #[serde(flatten)]
pub event: ProfilePayloadType, pub event: ProfilePayloadType,
} }
#[derive(Serialize, Clone)] #[derive(Serialize, Clone)]
#[serde(tag = "event", rename_all = "snake_case")] #[serde(tag = "event", rename_all = "snake_case")]
pub enum ProfilePayloadType { pub enum ProfilePayloadType {
@@ -257,6 +258,16 @@ pub enum ProfilePayloadType {
Removed, Removed,
} }
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum EventError { pub enum EventError {
#[error("Event state was not properly initialized")] #[error("Event state was not properly initialized")]
@@ -269,13 +280,3 @@ pub enum EventError {
#[error("Tauri error: {0}")] #[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error), TauriError(#[from] tauri::Error),
} }
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}

View File

@@ -32,16 +32,16 @@ pub fn get_class_paths(
let mut cps = libraries let mut cps = libraries
.iter() .iter()
.filter_map(|library| { .filter_map(|library| {
if let Some(rules) = &library.rules { if let Some(rules) = &library.rules
if !parse_rules( && !parse_rules(
rules, rules,
java_arch, java_arch,
&QuickPlayType::None, &QuickPlayType::None,
minecraft_updated, minecraft_updated,
) { )
{
return None; return None;
} }
}
if !library.include_in_classpath { if !library.include_in_classpath {
return None; return None;
@@ -504,12 +504,12 @@ pub async fn get_processor_main_class(
let mut line = line.map_err(IOError::from)?; let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace()); line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:") { if line.starts_with("Main-Class:")
if let Some(class) = line.split(':').nth(1) { && let Some(class) = line.split(':').nth(1)
{
return Ok(Some(class.to_string())); return Ok(Some(class.to_string()));
} }
} }
}
Ok::<Option<String>, crate::Error>(None) Ok::<Option<String>, crate::Error>(None)
}) })

View File

@@ -290,12 +290,11 @@ pub async fn download_libraries(
loading_try_for_each_concurrent( loading_try_for_each_concurrent(
stream::iter(libraries.iter()) stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move { .map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules { if let Some(rules) = &library.rules
if !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) { && !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name); tracing::trace!("Skipped library {}", &library.name);
return Ok(()); return Ok(());
} }
}
if !library.downloadable { if !library.downloadable {
tracing::trace!("Skipped non-downloadable library {}", &library.name); tracing::trace!("Skipped non-downloadable library {}", &library.name);
@@ -311,15 +310,14 @@ pub async fn download_libraries(
return Ok(()); return Ok(());
} }
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads { if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads
if !artifact.url.is_empty(){ && !artifact.url.is_empty(){
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool) let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
.await?; .await?;
write(&path, &bytes, &st.io_semaphore).await?; write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path); tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
return Ok::<_, crate::Error>(()); return Ok::<_, crate::Error>(());
} }
}
let url = [ let url = [
library library

View File

@@ -341,11 +341,11 @@ pub async fn install_minecraft(
// Forge processors (90-100) // Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() { for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides { if let Some(sides) = &processor.sides
if !sides.contains(&String::from("client")) { && !sides.contains(&String::from("client"))
{
continue; continue;
} }
}
let cp = { let cp = {
let mut cp = processor.classpath.clone(); let mut cp = processor.classpath.clone();

View File

@@ -385,11 +385,11 @@ impl DirectoryInfo {
return Err(e); return Err(e);
} }
} else { } else {
if let Some(disk_usage) = get_disk_usage(&move_dir)? { if let Some(disk_usage) = get_disk_usage(&move_dir)?
if total_size > disk_usage { && total_size > disk_usage
{
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into()); return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
} }
}
let loader_bar_id = Arc::new(&loader_bar_id); let loader_bar_id = Arc::new(&loader_bar_id);
futures::future::try_join_all(paths.iter().map(|x| { futures::future::try_join_all(paths.iter().map(|x| {

View File

@@ -9,7 +9,7 @@ use ariadne::networking::message::{
ClientToServerMessage, ServerToClientMessage, ClientToServerMessage, ServerToClientMessage,
}; };
use ariadne::users::UserStatus; use ariadne::users::UserStatus;
use async_tungstenite::WebSocketStream; use async_tungstenite::WebSocketSender;
use async_tungstenite::tokio::{ConnectStream, connect_async}; use async_tungstenite::tokio::{ConnectStream, connect_async};
use async_tungstenite::tungstenite::Message; use async_tungstenite::tungstenite::Message;
use async_tungstenite::tungstenite::client::IntoClientRequest; use async_tungstenite::tungstenite::client::IntoClientRequest;
@@ -17,7 +17,6 @@ use bytes::Bytes;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use dashmap::DashMap; use dashmap::DashMap;
use either::Either; use either::Either;
use futures::stream::SplitSink;
use futures::{SinkExt, StreamExt}; use futures::{SinkExt, StreamExt};
use reqwest::Method; use reqwest::Method;
use reqwest::header::HeaderValue; use reqwest::header::HeaderValue;
@@ -32,7 +31,7 @@ use tokio::sync::{Mutex, RwLock};
use uuid::Uuid; use uuid::Uuid;
pub(super) type WriteSocket = pub(super) type WriteSocket =
Arc<RwLock<Option<SplitSink<WebSocketStream<ConnectStream>, Message>>>>; Arc<RwLock<Option<WebSocketSender<ConnectStream>>>>;
pub(super) type TunnelSockets = Arc<DashMap<Uuid, Arc<InternalTunnelSocket>>>; pub(super) type TunnelSockets = Arc<DashMap<Uuid, Arc<InternalTunnelSocket>>>;
pub struct FriendsSocket { pub struct FriendsSocket {
@@ -180,27 +179,24 @@ impl FriendsSocket {
ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO
ServerToClientMessage::SocketConnected { to_socket, new_socket } => { ServerToClientMessage::SocketConnected { to_socket, new_socket } => {
if let Some(connected_to) = sockets.get(&to_socket) { if let Some(connected_to) = sockets.get(&to_socket)
if let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone() { && let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone()
if let Ok(new_stream) = TcpStream::connect(local_addr).await { && let Ok(new_stream) = TcpStream::connect(local_addr).await {
let (read, write) = new_stream.into_split(); let (read, write) = new_stream.into_split();
sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write)))); sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write))));
Self::socket_read_loop(write_handle.clone(), read, new_socket); Self::socket_read_loop(write_handle.clone(), read, new_socket);
continue; continue;
} }
}
}
let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await; let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await;
}, },
ServerToClientMessage::SocketClosed { socket } => { ServerToClientMessage::SocketClosed { socket } => {
sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_))); sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_)));
}, },
ServerToClientMessage::SocketData { socket, data } => { ServerToClientMessage::SocketData { socket, data } => {
if let Some(mut socket) = sockets.get_mut(&socket) { if let Some(mut socket) = sockets.get_mut(&socket)
if let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() { && let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
let _ = stream.lock().await.write_all(&data).await; let _ = stream.lock().await.write_all(&data).await;
} }
}
}, },
} }
} }

View File

@@ -100,8 +100,8 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
let profile_path_str = profile_path_str.clone(); let profile_path_str = profile_path_str.clone();
let world = world.clone(); let world = world.clone();
tokio::spawn(async move { tokio::spawn(async move {
if let Ok(state) = State::get().await { if let Ok(state) = State::get().await
if let Err(e) = attached_world_data::AttachedWorldData::remove_for_world( && let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
&profile_path_str, &profile_path_str,
WorldType::Singleplayer, WorldType::Singleplayer,
&world, &world,
@@ -109,7 +109,6 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
).await { ).await {
tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}") tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}")
} }
}
}); });
} }
Some(ProfilePayloadType::WorldUpdated { world }) Some(ProfilePayloadType::WorldUpdated { world })
@@ -150,8 +149,9 @@ pub(crate) async fn watch_profiles_init(
) { ) {
if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) { if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) {
for profile_dir in profiles_dir { for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) { if let Ok(file_name) = profile_dir.map(|x| x.file_name())
if let Some(file_name) = file_name.to_str() { && let Some(file_name) = file_name.to_str()
{
if file_name.starts_with(".DS_Store") { if file_name.starts_with(".DS_Store") {
continue; continue;
}; };
@@ -161,7 +161,6 @@ pub(crate) async fn watch_profiles_init(
} }
} }
} }
}
pub(crate) async fn watch_profile( pub(crate) async fn watch_profile(
profile_path: &str, profile_path: &str,

View File

@@ -76,11 +76,10 @@ where
.loaded_config_dir .loaded_config_dir
.clone() .clone()
.and_then(|x| x.to_str().map(|x| x.to_string())) .and_then(|x| x.to_str().map(|x| x.to_string()))
&& path != old_launcher_root_str
{ {
if path != old_launcher_root_str {
settings.custom_dir = Some(path); settings.custom_dir = Some(path);
} }
}
settings.prev_custom_dir = Some(old_launcher_root_str.clone()); settings.prev_custom_dir = Some(old_launcher_root_str.clone());
@@ -136,19 +135,17 @@ where
.await?; .await?;
} }
if let Some(device_token) = minecraft_auth.token { if let Some(device_token) = minecraft_auth.token
if let Ok(private_key) = && let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key) SigningKey::from_pkcs8_pem(&device_token.private_key)
&& let Ok(uuid) = Uuid::parse_str(&device_token.id)
{ {
if let Ok(uuid) = Uuid::parse_str(&device_token.id) {
DeviceTokenPair { DeviceTokenPair {
token: DeviceToken { token: DeviceToken {
issue_instant: device_token.token.issue_instant, issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after, not_after: device_token.token.not_after,
token: device_token.token.token, token: device_token.token.token,
display_claims: device_token display_claims: device_token.token.display_claims,
.token
.display_claims,
}, },
key: DeviceTokenKey { key: DeviceTokenKey {
id: uuid, id: uuid,
@@ -161,8 +158,6 @@ where
.await?; .await?;
} }
} }
}
}
let mut cached_entries = vec![]; let mut cached_entries = vec![];
@@ -207,13 +202,12 @@ where
update_version, update_version,
.. ..
} = project.metadata } = project.metadata
{ && let Some(file) = version
if let Some(file) = version
.files .files
.iter() .iter()
.find(|x| x.hashes.get("sha512") == Some(&sha512)) .find(|x| x.hashes.get("sha512") == Some(&sha512))
&& let Some(sha1) = file.hashes.get("sha1")
{ {
if let Some(sha1) = file.hashes.get("sha1") {
if let Ok(metadata) = full_path.metadata() { if let Ok(metadata) = full_path.metadata() {
let file_name = format!( let file_name = format!(
"{}/{}", "{}/{}",
@@ -227,24 +221,24 @@ where
path: file_name, path: file_name,
size: metadata.len(), size: metadata.len(),
hash: sha1.clone(), hash: sha1.clone(),
project_type: ProjectType::get_from_parent_folder(&full_path), project_type:
ProjectType::get_from_parent_folder(
&full_path,
),
}, },
)); ));
} }
cached_entries.push(CacheValue::File( cached_entries.push(CacheValue::File(CachedFile {
CachedFile {
hash: sha1.clone(), hash: sha1.clone(),
project_id: version.project_id.clone(), project_id: version.project_id.clone(),
version_id: version.id.clone(), version_id: version.id.clone(),
}, }));
));
if let Some(update_version) = update_version { if let Some(update_version) = update_version {
let mod_loader: ModLoader = let mod_loader: ModLoader =
profile.metadata.loader.into(); profile.metadata.loader.into();
cached_entries.push( cached_entries.push(CacheValue::FileUpdate(
CacheValue::FileUpdate(
CachedFileUpdate { CachedFileUpdate {
hash: sha1.clone(), hash: sha1.clone(),
game_version: profile game_version: profile
@@ -252,15 +246,13 @@ where
.game_version .game_version
.clone(), .clone(),
loaders: vec![ loaders: vec![
mod_loader mod_loader.as_str().to_string(),
.as_str()
.to_string(),
], ],
update_version_id: update_version_id: update_version
update_version.id.clone(), .id
.clone(),
}, },
), ));
);
cached_entries.push(CacheValue::Version( cached_entries.push(CacheValue::Version(
(*update_version).into(), (*update_version).into(),
@@ -280,9 +272,8 @@ where
badges: 0, badges: 0,
}; };
cached_entries.push(CacheValue::User( cached_entries
user.clone(), .push(CacheValue::User(user.clone()));
));
TeamMember { TeamMember {
team_id: x.team_id, team_id: x.team_id,
@@ -296,11 +287,8 @@ where
cached_entries.push(CacheValue::Team(members)); cached_entries.push(CacheValue::Team(members));
cached_entries.push(CacheValue::Version( cached_entries
(*version).into(), .push(CacheValue::Version((*version).into()));
));
}
}
} }
} }
@@ -332,17 +320,16 @@ where
.map(|x| x.id), .map(|x| x.id),
groups: profile.metadata.groups, groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(|x| { linked_data: profile.metadata.linked_data.and_then(|x| {
if let Some(project_id) = x.project_id { if let Some(project_id) = x.project_id
if let Some(version_id) = x.version_id { && let Some(version_id) = x.version_id
if let Some(locked) = x.locked { && let Some(locked) = x.locked
{
return Some(LinkedData { return Some(LinkedData {
project_id, project_id,
version_id, version_id,
locked, locked,
}); });
} }
}
}
None None
}), }),

View File

@@ -393,11 +393,10 @@ impl Credentials {
.. ..
}, },
) = *err.raw ) = *err.raw
&& (source.is_connect() || source.is_timeout())
{ {
if source.is_connect() || source.is_timeout() {
return Ok(Some(creds)); return Ok(Some(creds));
} }
}
Err(err) Err(err)
} }
@@ -640,10 +639,9 @@ impl DeviceTokenPair {
.fetch_optional(exec) .fetch_optional(exec)
.await?; .await?;
if let Some(x) = res { if let Some(x) = res
if let Ok(uuid) = Uuid::parse_str(&x.uuid) { && let Ok(uuid) = Uuid::parse_str(&x.uuid)
if let Ok(private_key) = && let Ok(private_key) = SigningKey::from_pkcs8_pem(&x.private_key)
SigningKey::from_pkcs8_pem(&x.private_key)
{ {
return Ok(Some(Self { return Ok(Some(Self {
token: DeviceToken { token: DeviceToken {
@@ -656,9 +654,7 @@ impl DeviceTokenPair {
.single() .single()
.unwrap_or_else(Utc::now), .unwrap_or_else(Utc::now),
token: x.token, token: x.token,
display_claims: serde_json::from_value( display_claims: serde_json::from_value(x.display_claims)
x.display_claims,
)
.unwrap_or_default(), .unwrap_or_default(),
}, },
key: DeviceTokenKey { key: DeviceTokenKey {
@@ -669,8 +665,6 @@ impl DeviceTokenPair {
}, },
})); }));
} }
}
}
Ok(None) Ok(None)
} }
@@ -724,7 +718,7 @@ const MICROSOFT_CLIENT_ID: &str = "00000000402b5328";
const AUTH_REPLY_URL: &str = "https://login.live.com/oauth20_desktop.srf"; const AUTH_REPLY_URL: &str = "https://login.live.com/oauth20_desktop.srf";
const REQUESTED_SCOPE: &str = "service::user.auth.xboxlive.com::MBI_SSL"; const REQUESTED_SCOPE: &str = "service::user.auth.xboxlive.com::MBI_SSL";
struct RequestWithDate<T> { pub struct RequestWithDate<T> {
pub date: DateTime<Utc>, pub date: DateTime<Utc>,
pub value: T, pub value: T,
} }

View File

@@ -360,8 +360,8 @@ impl Process {
} }
// Write the throwable if present // Write the throwable if present
if !current_content.is_empty() { if !current_content.is_empty()
if let Err(e) = && let Err(e) =
Process::append_to_log_file( Process::append_to_log_file(
&log_path, &log_path,
&current_content, &current_content,
@@ -374,7 +374,6 @@ impl Process {
} }
} }
} }
}
b"log4j:Event" => { b"log4j:Event" => {
in_event = false; in_event = false;
// If no throwable was present, write the log entry at the end of the event // If no throwable was present, write the log entry at the end of the event
@@ -429,8 +428,7 @@ impl Process {
if let Some(timestamp) = if let Some(timestamp) =
current_event.timestamp.as_deref() current_event.timestamp.as_deref()
{ && let Err(e) = Self::maybe_handle_server_join_logging(
if let Err(e) = Self::maybe_handle_server_join_logging(
profile_path, profile_path,
timestamp, timestamp,
message message
@@ -439,43 +437,36 @@ impl Process {
} }
} }
} }
}
_ => {} _ => {}
} }
} }
Ok(Event::Text(mut e)) => { Ok(Event::Text(mut e)) => {
if in_message || in_throwable { if in_message || in_throwable {
if let Ok(text) = e.unescape() { if let Ok(text) = e.xml_content() {
current_content.push_str(&text); current_content.push_str(&text);
} }
} else if !in_event } else if !in_event
&& !e.inplace_trim_end() && !e.inplace_trim_end()
&& !e.inplace_trim_start() && !e.inplace_trim_start()
{ && let Ok(text) = e.xml_content()
if let Ok(text) = e.unescape() { && let Err(e) = Process::append_to_log_file(
if let Err(e) = Process::append_to_log_file(
&log_path, &log_path,
&format!("{text}\n"), &format!("{text}\n"),
) { )
{
tracing::error!( tracing::error!(
"Failed to write to log file: {}", "Failed to write to log file: {}",
e e
); );
} }
} }
}
}
Ok(Event::CData(e)) => { Ok(Event::CData(e)) => {
if in_message || in_throwable { if (in_message || in_throwable)
if let Ok(text) = e && let Ok(text) = e.xml_content()
.escape()
.map_err(|x| x.into())
.and_then(|x| x.unescape())
{ {
current_content.push_str(&text); current_content.push_str(&text);
} }
} }
}
_ => (), _ => (),
} }
@@ -720,16 +711,13 @@ impl Process {
let logs_folder = state.directories.profile_logs_dir(&profile_path); let logs_folder = state.directories.profile_logs_dir(&profile_path);
let log_path = logs_folder.join(LAUNCHER_LOG_PATH); let log_path = logs_folder.join(LAUNCHER_LOG_PATH);
if log_path.exists() { if log_path.exists()
if let Err(e) = Process::append_to_log_file( && let Err(e) = Process::append_to_log_file(
&log_path, &log_path,
&format!("\n# Process exited with status: {mc_exit_status}\n"), &format!("\n# Process exited with status: {mc_exit_status}\n"),
) { )
tracing::warn!( {
"Failed to write exit status to log file: {}", tracing::warn!("Failed to write exit status to log file: {}", e);
e
);
}
} }
let _ = state.discord_rpc.clear_to_default(true).await; let _ = state.discord_rpc.clear_to_default(true).await;

View File

@@ -595,8 +595,8 @@ impl Profile {
} }
#[tracing::instrument(skip(self, semaphore, icon))] #[tracing::instrument(skip(self, semaphore, icon))]
pub async fn set_icon<'a>( pub async fn set_icon(
&'a mut self, &mut self,
cache_dir: &Path, cache_dir: &Path,
semaphore: &IoSemaphore, semaphore: &IoSemaphore,
icon: bytes::Bytes, icon: bytes::Bytes,
@@ -629,8 +629,8 @@ impl Profile {
{ {
let subdirectory = let subdirectory =
subdirectory.map_err(io::IOError::from)?.path(); subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() { if subdirectory.is_file()
if let Some(file_name) = subdirectory && let Some(file_name) = subdirectory
.file_name() .file_name()
.and_then(|x| x.to_str()) .and_then(|x| x.to_str())
{ {
@@ -647,7 +647,6 @@ impl Profile {
} }
} }
} }
}
if profile.install_stage == ProfileInstallStage::MinecraftInstalling if profile.install_stage == ProfileInstallStage::MinecraftInstalling
{ {
@@ -901,8 +900,8 @@ impl Profile {
{ {
let subdirectory = let subdirectory =
subdirectory.map_err(io::IOError::from)?.path(); subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() { if subdirectory.is_file()
if let Some(file_name) = && let Some(file_name) =
subdirectory.file_name().and_then(|x| x.to_str()) subdirectory.file_name().and_then(|x| x.to_str())
{ {
let file_size = subdirectory let file_size = subdirectory
@@ -928,7 +927,6 @@ impl Profile {
} }
} }
} }
}
let file_hashes = CachedEntry::get_file_hash_many( let file_hashes = CachedEntry::get_file_hash_many(
&keys.iter().map(|s| &*s.cache_key).collect::<Vec<_>>(), &keys.iter().map(|s| &*s.cache_key).collect::<Vec<_>>(),

View File

@@ -254,7 +254,7 @@ where
} }
#[tracing::instrument(skip(bytes, semaphore))] #[tracing::instrument(skip(bytes, semaphore))]
pub async fn write<'a>( pub async fn write(
path: &Path, path: &Path,
bytes: &[u8], bytes: &[u8],
semaphore: &IoSemaphore, semaphore: &IoSemaphore,

View File

@@ -191,13 +191,13 @@ async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
let mut jre_paths = HashSet::new(); let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir(); let base_path = state.directories.java_versions_dir();
if base_path.is_dir() { if base_path.is_dir()
if let Ok(dir) = std::fs::read_dir(base_path) { && let Ok(dir) = std::fs::read_dir(base_path)
{
for entry in dir.flatten() { for entry in dir.flatten() {
let file_path = entry.path().join("bin"); let file_path = entry.path().join("bin");
if let Ok(contents) = if let Ok(contents) = std::fs::read_to_string(file_path.clone())
std::fs::read_to_string(file_path.clone())
{ {
let entry = entry.path().join(contents); let entry = entry.path().join(contents);
jre_paths.insert(entry); jre_paths.insert(entry);
@@ -210,7 +210,6 @@ async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
} }
} }
} }
}
Ok(jre_paths) Ok(jre_paths)
}) })
@@ -300,8 +299,9 @@ pub async fn check_java_at_filepath(path: &Path) -> crate::Result<JavaVersion> {
} }
// Extract version info from it // Extract version info from it
if let Some(arch) = java_arch { if let Some(arch) = java_arch
if let Some(version) = java_version { && let Some(version) = java_version
{
if let Ok(version) = extract_java_version(version) { if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string(); let path = java.to_string_lossy().to_string();
return Ok(JavaVersion { return Ok(JavaVersion {
@@ -314,7 +314,6 @@ pub async fn check_java_at_filepath(path: &Path) -> crate::Result<JavaVersion> {
return Err(JREError::InvalidJREVersion(version.to_owned()).into()); return Err(JREError::InvalidJREVersion(version.to_owned()).into());
} }
}
Err(JREError::FailedJavaCheck(java).into()) Err(JREError::FailedJavaCheck(java).into())
} }

View File

@@ -33,13 +33,12 @@ pub fn is_feature_supported_in(
if part_version == part_first_release { if part_version == part_first_release {
continue; continue;
} }
if let Ok(part_version) = part_version.parse::<u32>() { if let Ok(part_version) = part_version.parse::<u32>()
if let Ok(part_first_release) = part_first_release.parse::<u32>() { && let Ok(part_first_release) = part_first_release.parse::<u32>()
if part_version > part_first_release { && part_version > part_first_release
{
return true; return true;
} }
} }
}
}
false false
} }

View File

@@ -1,2 +1,2 @@
[toolchain] [toolchain]
channel = "1.88.0" channel = "1.89.0"