Update Rust dependencies (#4139)

* Update Rust version

* Update async-compression 0.4.25 -> 0.4.27

* Update async-tungstenite 0.29.1 -> 0.30.0

* Update bytemuck 1.23.0 -> 1.23.1

* Update clap 4.5.40 -> 4.5.43

* Update deadpool-redis 0.21.1 -> 0.22.0 and redis 0.31.0 -> 0.32.4

* Update enumset 1.1.6 -> 1.1.7

* Update hyper-util 0.1.14 -> 0.1.16

* Update indexmap 2.9.0 -> 2.10.0

* Update indicatif 0.17.11 -> 0.18.0

* Update jemalloc_pprof 0.7.0 -> 0.8.1

* Update lettre 0.11.17 -> 0.11.18

* Update meilisearch-sdk 0.28.0 -> 0.29.1

* Update notify 8.0.0 -> 8.2.0 and notify-debouncer-mini 0.6.0 -> 0.7.0

* Update quick-xml 0.37.5 -> 0.38.1

* Fix theseus lint

* Update reqwest 0.12.20 -> 0.12.22

* Cargo fmt in theseus

* Update rgb 0.8.50 -> 0.8.52

* Update sentry 0.41.0 -> 0.42.0 and sentry-actix 0.41.0 -> 0.42.0

* Update serde_json 1.0.140 -> 1.0.142

* Update serde_with 3.13.0 -> 3.14.0

* Update spdx 0.10.8 -> 0.10.9

* Update sysinfo 0.35.2 -> 0.36.1

* Update tauri suite

* Fix build by updating mappings

* Update tokio 1.45.1 -> 1.47.1 and tokio-util 0.7.15 -> 0.7.16

* Update tracing-actix-web 0.7.18 -> 0.7.19

* Update zip 4.2.0 -> 4.3.0

* Misc Cargo.lock updates

* Update Dockerfiles
This commit is contained in:
Josiah Glosson
2025-08-08 15:50:44 -07:00
committed by GitHub
parent ca0c16b1fe
commit cf190d86d5
67 changed files with 1936 additions and 1890 deletions

1476
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,31 +25,31 @@ actix-ws = "0.3.0"
argon2 = { version = "0.5.3", features = ["std"] }
ariadne = { path = "packages/ariadne" }
async_zip = "0.0.17"
async-compression = { version = "0.4.25", default-features = false }
async-compression = { version = "0.4.27", default-features = false }
async-recursion = "1.1.1"
async-stripe = { version = "0.41.0", default-features = false, features = [
"runtime-tokio-hyper-rustls",
] }
async-trait = "0.1.88"
async-tungstenite = { version = "0.29.1", default-features = false, features = [
async-tungstenite = { version = "0.30.0", default-features = false, features = [
"futures-03-sink",
] }
async-walkdir = "2.1.0"
base64 = "0.22.1"
bitflags = "2.9.1"
bytemuck = "1.23.0"
bytemuck = "1.23.1"
bytes = "1.10.1"
censor = "0.3.0"
chardetng = "0.1.17"
chrono = "0.4.41"
clap = "4.5.40"
clap = "4.5.43"
clickhouse = "0.13.3"
color-thief = "0.2.2"
console-subscriber = "0.4.1"
daedalus = { path = "packages/daedalus" }
dashmap = "6.1.0"
data-url = "0.3.1"
deadpool-redis = "0.21.1"
deadpool-redis = "0.22.0"
dirs = "6.0.0"
discord-rich-presence = "0.2.5"
dotenv-build = "0.1.1"
@@ -57,7 +57,7 @@ dotenvy = "0.15.7"
dunce = "1.0.5"
either = "1.15.0"
encoding_rs = "0.8.35"
enumset = "1.1.6"
enumset = "1.1.7"
flate2 = "1.1.2"
fs4 = { version = "0.13.1", default-features = false }
futures = { version = "0.3.31", default-features = false }
@@ -74,15 +74,15 @@ hyper-rustls = { version = "0.27.7", default-features = false, features = [
"ring",
"tls12",
] }
hyper-util = "0.1.14"
hyper-util = "0.1.16"
iana-time-zone = "0.1.63"
image = { version = "0.25.6", default-features = false, features = ["rayon"] }
indexmap = "2.9.0"
indicatif = "0.17.11"
indexmap = "2.10.0"
indicatif = "0.18.0"
itertools = "0.14.0"
jemalloc_pprof = "0.7.0"
jemalloc_pprof = "0.8.1"
json-patch = { version = "4.0.0", default-features = false }
lettre = { version = "0.11.17", default-features = false, features = [
lettre = { version = "0.11.18", default-features = false, features = [
"builder",
"hostname",
"pool",
@@ -92,24 +92,24 @@ lettre = { version = "0.11.17", default-features = false, features = [
"smtp-transport",
] }
maxminddb = "0.26.0"
meilisearch-sdk = { version = "0.28.0", default-features = false }
meilisearch-sdk = { version = "0.29.1", default-features = false }
murmur2 = "0.1.0"
native-dialog = "0.9.0"
notify = { version = "8.0.0", default-features = false }
notify-debouncer-mini = { version = "0.6.0", default-features = false }
notify = { version = "8.2.0", default-features = false }
notify-debouncer-mini = { version = "0.7.0", default-features = false }
p256 = "0.13.2"
paste = "1.0.15"
phf = { version = "0.12.1", features = ["macros"] }
png = "0.17.16"
prometheus = "0.14.0"
quartz_nbt = "0.2.9"
quick-xml = "0.37.5"
quick-xml = "0.38.1"
rand = "=0.8.5" # Locked on 0.8 until argon2 and p256 update to 0.9
rand_chacha = "=0.3.1" # Locked on 0.3 until we can update rand to 0.9
redis = "=0.31.0" # Locked on 0.31 until deadpool-redis updates to 0.32
redis = "0.32.4"
regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false }
rgb = "0.8.50"
reqwest = { version = "0.12.22", default-features = false }
rgb = "0.8.52"
rust_decimal = { version = "1.37.2", features = [
"serde-with-float",
"serde-with-str",
@@ -121,7 +121,7 @@ rust-s3 = { version = "0.35.1", default-features = false, features = [
"tokio-rustls-tls",
] }
rusty-money = "0.4.1"
sentry = { version = "0.41.0", default-features = false, features = [
sentry = { version = "0.42.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
@@ -129,45 +129,45 @@ sentry = { version = "0.41.0", default-features = false, features = [
"reqwest",
"rustls",
] }
sentry-actix = "0.41.0"
sentry-actix = "0.42.0"
serde = "1.0.219"
serde_bytes = "0.11.17"
serde_cbor = "0.11.2"
serde_ini = "0.2.0"
serde_json = "1.0.140"
serde_with = "3.13.0"
serde_json = "1.0.142"
serde_with = "3.14.0"
serde-xml-rs = "0.8.1" # Also an XML (de)serializer, consider dropping yaserde in favor of this
sha1 = "0.10.6"
sha1_smol = { version = "1.0.1", features = ["std"] }
sha2 = "0.10.9"
spdx = "0.10.8"
spdx = "0.10.9"
sqlx = { version = "0.8.6", default-features = false }
sysinfo = { version = "0.35.2", default-features = false }
sysinfo = { version = "0.36.1", default-features = false }
tar = "0.4.44"
tauri = "2.6.1"
tauri-build = "2.3.0"
tauri-plugin-deep-link = "2.4.0"
tauri-plugin-dialog = "2.3.0"
tauri-plugin-http = "2.5.0"
tauri = "2.7.0"
tauri-build = "2.3.1"
tauri-plugin-deep-link = "2.4.1"
tauri-plugin-dialog = "2.3.2"
tauri-plugin-http = "2.5.1"
tauri-plugin-opener = "2.4.0"
tauri-plugin-os = "2.3.0"
tauri-plugin-single-instance = "2.3.0"
tauri-plugin-single-instance = "2.3.2"
tauri-plugin-updater = { version = "2.9.0", default-features = false, features = [
"rustls-tls",
"zip",
] }
tauri-plugin-window-state = "2.3.0"
tauri-plugin-window-state = "2.4.0"
tempfile = "3.20.0"
theseus = { path = "packages/app-lib" }
thiserror = "2.0.12"
tikv-jemalloc-ctl = "0.6.0"
tikv-jemallocator = "0.6.0"
tokio = "1.45.1"
tokio = "1.47.1"
tokio-stream = "0.1.17"
tokio-util = "0.7.15"
tokio-util = "0.7.16"
totp-rs = "5.7.0"
tracing = "0.1.41"
tracing-actix-web = "0.7.18"
tracing-actix-web = "0.7.19"
tracing-error = "0.2.1"
tracing-subscriber = "0.3.19"
url = "2.5.4"
@@ -179,7 +179,7 @@ whoami = "1.6.0"
winreg = "0.55.0"
woothee = "0.13.0"
yaserde = "0.12.0"
zip = { version = "4.2.0", default-features = false, features = [
zip = { version = "4.3.0", default-features = false, features = [
"bzip2",
"deflate",
"deflate64",
@@ -226,7 +226,7 @@ wildcard_dependencies = "warn"
warnings = "deny"
[patch.crates-io]
wry = { git = "https://github.com/modrinth/wry", rev = "21db186" }
wry = { git = "https://github.com/modrinth/wry", rev = "f2ce0b0" }
# Optimize for speed and reduce size on release builds
[profile.release]

View File

@@ -197,15 +197,13 @@ pub async fn open_link<R: Runtime>(
if url::Url::parse(&path).is_ok()
&& !state.malicious_origins.contains(&origin)
&& let Some(last_click) = state.last_click
&& last_click.elapsed() < Duration::from_millis(100)
{
if let Some(last_click) = state.last_click {
if last_click.elapsed() < Duration::from_millis(100) {
let _ = app.opener().open_url(&path, None::<String>);
state.last_click = None;
let _ = app.opener().open_url(&path, None::<String>);
state.last_click = None;
return Ok(());
}
}
return Ok(());
}
tracing::info!("Malicious click: {path} origin {origin}");

View File

@@ -59,16 +59,13 @@ pub async fn login<R: Runtime>(
.url()?
.as_str()
.starts_with("https://login.live.com/oauth20_desktop.srf")
{
if let Some((_, code)) =
&& let Some((_, code)) =
window.url()?.query_pairs().find(|x| x.0 == "code")
{
window.close()?;
let val =
minecraft_auth::finish_login(&code.clone(), flow).await?;
{
window.close()?;
let val = minecraft_auth::finish_login(&code.clone(), flow).await?;
return Ok(Some(val));
}
return Ok(Some(val));
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;

View File

@@ -63,11 +63,11 @@ pub async fn should_disable_mouseover() -> bool {
// We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety
if let tauri_plugin_os::Version::Semantic(major, minor, _) =
tauri_plugin_os::version()
&& major >= 12
&& minor >= 3
{
if major >= 12 && minor >= 3 {
// Mac os version is 12.3 or higher, we allow mouseover
return false;
}
// Mac os version is 12.3 or higher, we allow mouseover
return false;
}
true
} else {

View File

@@ -233,10 +233,10 @@ fn main() {
});
#[cfg(not(target_os = "linux"))]
if let Some(window) = app.get_window("main") {
if let Err(e) = window.set_shadow(true) {
tracing::warn!("Failed to set window shadow: {e}");
}
if let Some(window) = app.get_window("main")
&& let Err(e) = window.set_shadow(true)
{
tracing::warn!("Failed to set window shadow: {e}");
}
Ok(())

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1
FROM rust:1.88.0 AS build
FROM rust:1.89.0 AS build
WORKDIR /usr/src/daedalus
COPY . .

View File

@@ -506,27 +506,25 @@ async fn fetch(
return Ok(lib);
}
} else if let Some(url) = &lib.url {
if !url.is_empty() {
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/"
.to_string(),
"https://maven.creeperhost.net/"
.to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
} else if let Some(url) = &lib.url
&& !url.is_empty()
{
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/".to_string(),
"https://maven.creeperhost.net/".to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
lib.url = Some(format_url("maven/"));
lib.url = Some(format_url("maven/"));
return Ok(lib);
}
return Ok(lib);
}
// Other libraries are generally available in the "maven" directory of the installer. If they are

View File

@@ -93,22 +93,22 @@ async fn main() -> Result<()> {
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
{
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
let cache_clears = upload_files
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token)
.json(&serde_json::json!({
"files": chunk
@@ -128,8 +128,6 @@ async fn main() -> Result<()> {
item: "cloudflare clear cache".to_string(),
}
})?;
}
}
}
}

View File

@@ -167,20 +167,18 @@ pub async fn download_file(
let bytes = x.bytes().await;
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
if &*sha1_async(bytes.clone()).await? != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(
crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into(),
);
if let Some(sha1) = sha1
&& &*sha1_async(bytes.clone()).await? != sha1
{
if attempt <= 3 {
continue;
} else {
return Err(crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into());
}
}

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1
FROM rust:1.88.0 AS build
FROM rust:1.89.0 AS build
WORKDIR /usr/src/labrinth
COPY . .

View File

@@ -322,12 +322,11 @@ pub async fn is_visible_collection(
} else {
!collection_data.status.is_hidden()
}) && !collection_data.projects.is_empty();
if let Some(user) = &user_option {
if !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
}
if let Some(user) = &user_option
&& !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
}
Ok(authorized)
}
@@ -356,10 +355,10 @@ pub async fn filter_visible_collections(
for collection in check_collections {
// Collections are simple- if we are the owner or a mod, we can see it
if let Some(user) = user_option {
if user.role.is_mod() || user.id == collection.user_id.into() {
return_collections.push(collection.into());
}
if let Some(user) = user_option
&& (user.role.is_mod() || user.id == collection.user_id.into())
{
return_collections.push(collection.into());
}
}

View File

@@ -95,10 +95,10 @@ impl DBFlow {
redis: &RedisPool,
) -> Result<Option<DBFlow>, DatabaseError> {
let flow = Self::get(id, redis).await?;
if let Some(flow) = flow.as_ref() {
if predicate(flow) {
Self::remove(id, redis).await?;
}
if let Some(flow) = flow.as_ref()
&& predicate(flow)
{
Self::remove(id, redis).await?;
}
Ok(flow)
}

View File

@@ -801,24 +801,24 @@ impl VersionField {
};
if let Some(count) = countable {
if let Some(min) = loader_field.min_val {
if count < min {
return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
if let Some(min) = loader_field.min_val
&& count < min
{
return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
if let Some(max) = loader_field.max_val {
if count > max {
return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
if let Some(max) = loader_field.max_val
&& count > max
{
return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
}

View File

@@ -483,20 +483,20 @@ impl DBTeamMember {
.await?;
}
if let Some(accepted) = new_accepted {
if accepted {
sqlx::query!(
"
if let Some(accepted) = new_accepted
&& accepted
{
sqlx::query!(
"
UPDATE team_members
SET accepted = TRUE
WHERE (team_id = $1 AND user_id = $2)
",
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
}
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
}
if let Some(payouts_split) = new_payouts_split {

View File

@@ -353,10 +353,10 @@ impl RedisPool {
};
for (idx, key) in fetch_ids.into_iter().enumerate() {
if let Some(locked) = results.get(idx) {
if locked.is_none() {
continue;
}
if let Some(locked) = results.get(idx)
&& locked.is_none()
{
continue;
}
if let Some((key, raw_key)) = ids.remove(&key) {

View File

@@ -334,18 +334,14 @@ impl From<Version> for LegacyVersion {
// the v2 loaders are whatever the corresponding loader fields are
let mut loaders =
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
if loaders.contains(&"mrpack".to_string()) {
if let Some((_, mrpack_loaders)) = data
if loaders.contains(&"mrpack".to_string())
&& let Some((_, mrpack_loaders)) = data
.fields
.into_iter()
.find(|(key, _)| key == "mrpack_loaders")
{
if let Ok(mrpack_loaders) =
serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
}
}
&& let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
}
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();

View File

@@ -43,35 +43,33 @@ impl LegacyResultSearchProject {
pub fn from(result_search_project: ResultSearchProject) -> Self {
let mut categories = result_search_project.categories;
categories.extend(result_search_project.loaders.clone());
if categories.contains(&"mrpack".to_string()) {
if let Some(mrpack_loaders) = result_search_project
if categories.contains(&"mrpack".to_string())
&& let Some(mrpack_loaders) = result_search_project
.project_loader_fields
.get("mrpack_loaders")
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
categories.retain(|c| c != "mrpack");
}
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
categories.retain(|c| c != "mrpack");
}
let mut display_categories = result_search_project.display_categories;
display_categories.extend(result_search_project.loaders);
if display_categories.contains(&"mrpack".to_string()) {
if let Some(mrpack_loaders) = result_search_project
if display_categories.contains(&"mrpack".to_string())
&& let Some(mrpack_loaders) = result_search_project
.project_loader_fields
.get("mrpack_loaders")
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
display_categories.retain(|c| c != "mrpack");
}
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
display_categories.retain(|c| c != "mrpack");
}
// Sort then remove duplicates

View File

@@ -166,10 +166,10 @@ impl From<ProjectQueryResult> for Project {
Ok(spdx_expr) => {
let mut vec: Vec<&str> = Vec::new();
for node in spdx_expr.iter() {
if let spdx::expression::ExprNode::Req(req) = node {
if let Some(id) = req.req.license.id() {
vec.push(id.full_name);
}
if let spdx::expression::ExprNode::Req(req) = node
&& let Some(id) = req.req.license.id()
{
vec.push(id.full_name);
}
}
// spdx crate returns AND/OR operations in postfix order

View File

@@ -51,16 +51,16 @@ impl ProjectPermissions {
return Some(ProjectPermissions::all());
}
if let Some(member) = project_team_member {
if member.accepted {
return Some(member.permissions);
}
if let Some(member) = project_team_member
&& member.accepted
{
return Some(member.permissions);
}
if let Some(member) = organization_team_member {
if member.accepted {
return Some(member.permissions);
}
if let Some(member) = organization_team_member
&& member.accepted
{
return Some(member.permissions);
}
if role.is_mod() {
@@ -107,10 +107,10 @@ impl OrganizationPermissions {
return Some(OrganizationPermissions::all());
}
if let Some(member) = team_member {
if member.accepted {
return member.organization_permissions;
}
if let Some(member) = team_member
&& member.accepted
{
return member.organization_permissions;
}
if role.is_mod() {
return Some(

View File

@@ -45,17 +45,15 @@ impl MaxMindIndexer {
if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() {
if let Ok(path) = file.header().path() {
if path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
if let Ok(path) = file.header().path()
&& path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
let reader =
maxminddb::Reader::from_source(buf).unwrap();
let reader = maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader));
}
return Ok(Some(reader));
}
}
}

View File

@@ -371,8 +371,8 @@ impl AutomatedModerationQueue {
for file in
files.iter().filter(|x| x.version_id == version.id.into())
{
if let Some(hash) = file.hashes.get("sha1") {
if let Some((index, (sha1, _, file_name, _))) = hashes
if let Some(hash) = file.hashes.get("sha1")
&& let Some((index, (sha1, _, file_name, _))) = hashes
.iter()
.enumerate()
.find(|(_, (value, _, _, _))| value == hash)
@@ -382,7 +382,6 @@ impl AutomatedModerationQueue {
hashes.remove(index);
}
}
}
}
@@ -420,12 +419,11 @@ impl AutomatedModerationQueue {
.await?;
for row in rows {
if let Some(sha1) = row.sha1 {
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
if let Some(sha1) = row.sha1
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) });
hashes.remove(index);
}
}
}
if hashes.is_empty() {
@@ -499,8 +497,8 @@ impl AutomatedModerationQueue {
let mut insert_ids = Vec::new();
for row in rows {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) {
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id)
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
final_hashes.insert(sha1.clone(), IdentifiedFile {
file_name: file_name.clone(),
status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified),
@@ -512,7 +510,6 @@ impl AutomatedModerationQueue {
hashes.remove(index);
flame_files.remove(curse_index);
}
}
}
if !insert_ids.is_empty() && !insert_hashes.is_empty() {
@@ -581,8 +578,8 @@ impl AutomatedModerationQueue {
for (sha1, _pack_file, file_name, _mumur2) in hashes {
let flame_file = flame_files.iter().find(|x| x.0 == sha1);
if let Some((_, flame_project_id)) = flame_file {
if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
if let Some((_, flame_project_id)) = flame_file
&& let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
missing_metadata.flame_files.insert(sha1, MissingMetadataFlame {
title: project.name.clone(),
file_name,
@@ -592,7 +589,6 @@ impl AutomatedModerationQueue {
continue;
}
}
missing_metadata.unknown_files.insert(sha1, file_name);
}

View File

@@ -257,31 +257,30 @@ impl PayoutsQueue {
)
})?;
if !status.is_success() {
if let Some(obj) = value.as_object() {
if let Some(array) = obj.get("errors") {
#[derive(Deserialize)]
struct TremendousError {
message: String,
}
let err = serde_json::from_value::<TremendousError>(
array.clone(),
)
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
if !status.is_success()
&& let Some(obj) = value.as_object()
{
if let Some(array) = obj.get("errors") {
#[derive(Deserialize)]
struct TremendousError {
message: String,
}
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
let err =
serde_json::from_value::<TremendousError>(array.clone())
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
}
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
}
Ok(serde_json::from_value(value)?)
@@ -449,10 +448,10 @@ impl PayoutsQueue {
};
// we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly
if let PayoutInterval::Fixed { .. } = method.interval {
if !product.currency_codes.contains(&"USD".to_string()) {
continue;
}
if let PayoutInterval::Fixed { .. } = method.interval
&& !product.currency_codes.contains(&"USD".to_string())
{
continue;
}
methods.push(method);

View File

@@ -286,17 +286,17 @@ pub async fn refund_charge(
.upsert(&mut transaction)
.await?;
if body.0.unprovision.unwrap_or(false) {
if let Some(subscription_id) = charge.subscription_id {
let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool)
.await?;
if let Some(mut open_charge) = open_charge {
open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
if body.0.unprovision.unwrap_or(false)
&& let Some(subscription_id) = charge.subscription_id
{
let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool)
.await?;
if let Some(mut open_charge) = open_charge {
open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
open_charge.upsert(&mut transaction).await?;
}
open_charge.upsert(&mut transaction).await?;
}
}
@@ -392,17 +392,16 @@ pub async fn edit_subscription(
}
}
if let Some(interval) = &edit_subscription.interval {
if let Price::Recurring { intervals } = &current_price.prices {
if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64;
} else {
return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!"
.to_string(),
));
}
if let Some(interval) = &edit_subscription.interval
&& let Price::Recurring { intervals } = &current_price.prices
{
if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64;
} else {
return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!".to_string(),
));
}
}
@@ -1225,38 +1224,36 @@ pub async fn initiate_payment(
}
};
if let Price::Recurring { .. } = price_item.prices {
if product.unitary {
let user_subscriptions =
if let Price::Recurring { .. } = price_item.prices
&& product.unitary
{
let user_subscriptions =
user_subscription_item::DBUserSubscription::get_all_user(
user.id.into(),
&**pool,
)
.await?;
let user_products =
product_item::DBProductPrice::get_many(
&user_subscriptions
.iter()
.filter(|x| {
x.status
== SubscriptionStatus::Provisioned
})
.map(|x| x.price_id)
.collect::<Vec<_>>(),
&**pool,
)
.await?;
let user_products = product_item::DBProductPrice::get_many(
&user_subscriptions
.iter()
.filter(|x| {
x.status == SubscriptionStatus::Provisioned
})
.map(|x| x.price_id)
.collect::<Vec<_>>(),
&**pool,
)
.await?;
if user_products
.into_iter()
.any(|x| x.product_id == product.id)
{
return Err(ApiError::InvalidInput(
"You are already subscribed to this product!"
.to_string(),
));
}
if user_products
.into_iter()
.any(|x| x.product_id == product.id)
{
return Err(ApiError::InvalidInput(
"You are already subscribed to this product!"
.to_string(),
));
}
}
@@ -2004,38 +2001,36 @@ pub async fn stripe_webhook(
EventType::PaymentMethodAttached => {
if let EventObject::PaymentMethod(payment_method) =
event.data.object
{
if let Some(customer_id) =
&& let Some(customer_id) =
payment_method.customer.map(|x| x.id())
{
let customer = stripe::Customer::retrieve(
&stripe_client,
&customer_id,
&[],
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{
let customer = stripe::Customer::retrieve(
stripe::Customer::update(
&stripe_client,
&customer_id,
&[],
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{
stripe::Customer::update(
&stripe_client,
&customer_id,
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
)
.await?;
}
}
}
}

View File

@@ -79,13 +79,12 @@ impl TempUser {
file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &RedisPool,
) -> Result<crate::database::models::DBUserId, AuthenticationError> {
if let Some(email) = &self.email {
if crate::database::models::DBUser::get_by_email(email, client)
if let Some(email) = &self.email
&& crate::database::models::DBUser::get_by_email(email, client)
.await?
.is_some()
{
return Err(AuthenticationError::DuplicateUser);
}
{
return Err(AuthenticationError::DuplicateUser);
}
let user_id =
@@ -1269,19 +1268,19 @@ pub async fn delete_auth_provider(
.update_user_id(user.id.into(), None, &mut transaction)
.await?;
if delete_provider.provider != AuthProvider::PayPal {
if let Some(email) = user.email {
send_email(
email,
"Authentication method removed",
&format!(
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
delete_provider.provider.as_str()
),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
if delete_provider.provider != AuthProvider::PayPal
&& let Some(email) = user.email
{
send_email(
email,
"Authentication method removed",
&format!(
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
delete_provider.provider.as_str()
),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
transaction.commit().await?;

View File

@@ -189,17 +189,16 @@ pub async fn get_project_meta(
.iter()
.find(|x| Some(x.1.id as i32) == row.flame_project_id)
.map(|x| x.0.clone())
&& let Some(val) = merged.flame_files.remove(&sha1)
{
if let Some(val) = merged.flame_files.remove(&sha1) {
merged.identified.insert(
sha1,
IdentifiedFile {
file_name: val.file_name.clone(),
status: ApprovalType::from_string(&row.status)
.unwrap_or(ApprovalType::Unidentified),
},
);
}
merged.identified.insert(
sha1,
IdentifiedFile {
file_name: val.file_name.clone(),
status: ApprovalType::from_string(&row.status)
.unwrap_or(ApprovalType::Unidentified),
},
);
}
}

View File

@@ -185,69 +185,69 @@ pub async fn edit_pat(
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
if let Some(pat) = pat
&& pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?;
if let Some(scopes) = &info.scopes {
if scopes.is_restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
if let Some(scopes) = &info.scopes {
if scopes.is_restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
sqlx::query!(
"
sqlx::query!(
"
UPDATE pats
SET scopes = $1
WHERE id = $2
",
scopes.bits() as i64,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(name) = &info.name {
sqlx::query!(
"
scopes.bits() as i64,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(name) = &info.name {
sqlx::query!(
"
UPDATE pats
SET name = $1
WHERE id = $2
",
name,
pat.id.0
)
.execute(&mut *transaction)
.await?;
name,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
sqlx::query!(
"
sqlx::query!(
"
UPDATE pats
SET expires = $1
WHERE id = $2
",
expires,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
expires,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().finish())
@@ -276,21 +276,21 @@ pub async fn delete_pat(
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
if let Some(pat) = pat
&& pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().finish())

View File

@@ -185,21 +185,21 @@ pub async fn delete(
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
if let Some(session) = session {
if session.user_id == current_user.id.into() {
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(
Some(session.id),
Some(session.session),
Some(session.user_id),
)],
&redis,
)
.await?;
}
if let Some(session) = session
&& session.user_id == current_user.id.into()
{
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(
Some(session.id),
Some(session.session),
Some(session.user_id),
)],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().body(""))

View File

@@ -401,14 +401,13 @@ async fn broadcast_to_known_local_friends(
friend.user_id
};
if friend.accepted {
if let Some(socket_ids) =
if friend.accepted
&& let Some(socket_ids) =
sockets.sockets_by_user_id.get(&friend_id.into())
{
for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await;
}
{
for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await;
}
}
}

View File

@@ -387,17 +387,16 @@ pub async fn revenue_get(
.map(|x| (x.to_string(), HashMap::new()))
.collect::<HashMap<_, _>>();
for value in payouts_values {
if let Some(mod_id) = value.mod_id {
if let Some(amount) = value.amount_sum {
if let Some(interval_start) = value.interval_start {
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
}
}
if let Some(mod_id) = value.mod_id
&& let Some(amount) = value.amount_sum
&& let Some(interval_start) = value.interval_start
{
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
}
}
}

View File

@@ -192,10 +192,10 @@ pub async fn collection_get(
.map(|x| x.1)
.ok();
if let Some(data) = collection_data {
if is_visible_collection(&data, &user_option, false).await? {
return Ok(HttpResponse::Ok().json(Collection::from(data)));
}
if let Some(data) = collection_data
&& is_visible_collection(&data, &user_option, false).await?
{
return Ok(HttpResponse::Ok().json(Collection::from(data)));
}
Err(ApiError::NotFound)
}

View File

@@ -536,11 +536,9 @@ pub async fn create_payout(
Some(true),
)
.await
&& let Some(data) = res.items.first()
{
if let Some(data) = res.items.first() {
payout_item.platform_id =
Some(data.payout_item_id.clone());
}
payout_item.platform_id = Some(data.payout_item_id.clone());
}
}

View File

@@ -182,10 +182,10 @@ pub async fn project_get(
.map(|x| x.1)
.ok();
if let Some(data) = project_data {
if is_visible_project(&data.inner, &user_option, &pool, false).await? {
return Ok(HttpResponse::Ok().json(Project::from(data)));
}
if let Some(data) = project_data
&& is_visible_project(&data.inner, &user_option, &pool, false).await?
{
return Ok(HttpResponse::Ok().json(Project::from(data)));
}
Err(ApiError::NotFound)
}
@@ -405,34 +405,36 @@ pub async fn project_edit(
.await?;
}
if status.is_searchable() && !project_item.inner.webhook_sent {
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") {
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
&redis,
webhook_url,
None,
)
.await
.ok();
if status.is_searchable()
&& !project_item.inner.webhook_sent
&& let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
{
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
&redis,
webhook_url,
None,
)
.await
.ok();
sqlx::query!(
"
sqlx::query!(
"
UPDATE mods
SET webhook_sent = TRUE
WHERE id = $1
",
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
}
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
}
if user.role.is_mod() {
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") {
crate::util::webhook::send_slack_webhook(
if user.role.is_mod()
&& let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK")
{
crate::util::webhook::send_slack_webhook(
project_item.inner.id.into(),
&pool,
&redis,
@@ -451,7 +453,6 @@ pub async fn project_edit(
)
.await
.ok();
}
}
if team_member.is_none_or(|x| !x.accepted) {
@@ -694,45 +695,45 @@ pub async fn project_edit(
.await?;
}
if let Some(links) = &new_project.link_urls {
if !links.is_empty() {
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
if let Some(links) = &new_project.link_urls
&& !links.is_empty()
{
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the links of this project!"
.to_string(),
));
}
}
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!(
"
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!(
"
DELETE FROM mods_links
WHERE joining_mod_id = $1 AND joining_platform_id IN (
SELECT id FROM link_platforms WHERE name = ANY($2)
)
",
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
.await?;
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
.await?;
for (platform, url) in links {
if let Some(url) = url {
let platform_id =
db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
sqlx::query!(
for (platform, url) in links {
if let Some(url) = url {
let platform_id = db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
sqlx::query!(
"
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
VALUES ($1, $2, $3)
@@ -743,7 +744,6 @@ pub async fn project_edit(
)
.execute(&mut *transaction)
.await?;
}
}
}
}
@@ -2455,7 +2455,7 @@ pub async fn project_get_organization(
organization,
team_members,
);
return Ok(HttpResponse::Ok().json(organization));
Ok(HttpResponse::Ok().json(organization))
} else {
Err(ApiError::NotFound)
}

View File

@@ -767,12 +767,13 @@ pub async fn edit_team_member(
));
}
if let Some(new_permissions) = edit_member.permissions {
if !permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have".to_string(),
));
}
if let Some(new_permissions) = edit_member.permissions
&& !permissions.contains(new_permissions)
{
return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have"
.to_string(),
));
}
if edit_member.organization_permissions.is_some() {
@@ -800,13 +801,12 @@ pub async fn edit_team_member(
}
if let Some(new_permissions) = edit_member.organization_permissions
&& !organization_permissions.contains(new_permissions)
{
if !organization_permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput(
return Err(ApiError::InvalidInput(
"The new organization permissions have permissions that you don't have"
.to_string(),
));
}
}
if edit_member.permissions.is_some()
@@ -822,13 +822,13 @@ pub async fn edit_team_member(
}
}
if let Some(payouts_split) = edit_member.payouts_split {
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000)
{
return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(),
));
}
if let Some(payouts_split) = edit_member.payouts_split
&& (payouts_split < Decimal::ZERO
|| payouts_split > Decimal::from(5000))
{
return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(),
));
}
DBTeamMember::edit_team_member(
@@ -883,13 +883,13 @@ pub async fn transfer_ownership(
DBTeam::get_association(id.into(), &**pool).await?;
if let Some(TeamAssociationId::Project(pid)) = team_association_id {
let result = DBProject::get_id(pid, &**pool, &redis).await?;
if let Some(project_item) = result {
if project_item.inner.organization_id.is_some() {
return Err(ApiError::InvalidInput(
if let Some(project_item) = result
&& project_item.inner.organization_id.is_some()
{
return Err(ApiError::InvalidInput(
"You cannot transfer ownership of a project team that is owend by an organization"
.to_string(),
));
}
}
}

View File

@@ -289,36 +289,33 @@ pub async fn thread_get(
.await?
.1;
if let Some(mut data) = thread_data {
if is_authorized_thread(&data, &user, &pool).await? {
let authors = &mut data.members;
if let Some(mut data) = thread_data
&& is_authorized_thread(&data, &user, &pool).await?
{
let authors = &mut data.members;
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
let users: Vec<User> = database::models::DBUser::get_many_ids(
authors, &**pool, &redis,
)
.await?
.into_iter()
.map(From::from)
.collect();
let users: Vec<User> =
database::models::DBUser::get_many_ids(authors, &**pool, &redis)
.await?
.into_iter()
.map(From::from)
.collect();
return Ok(
HttpResponse::Ok().json(Thread::from(data, users, &user))
);
}
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
}
Err(ApiError::NotFound)
}
@@ -454,33 +451,32 @@ pub async fn thread_send_message(
)
.await?;
if let Some(project) = project {
if project.inner.status != ProjectStatus::Processing
&& user.role.is_mod()
{
let members =
database::models::DBTeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
if let Some(project) = project
&& project.inner.status != ProjectStatus::Processing
&& user.role.is_mod()
{
let members =
database::models::DBTeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
}
} else if let Some(report_id) = thread.report_id {
let report = database::models::report_item::DBReport::get(

View File

@@ -522,10 +522,10 @@ async fn version_create_inner(
.fetch_optional(pool)
.await?;
if let Some(project_status) = project_status {
if project_status.status == ProjectStatus::Processing.as_str() {
moderation_queue.projects.insert(project_id.into());
}
if let Some(project_status) = project_status
&& project_status.status == ProjectStatus::Processing.as_str()
{
moderation_queue.projects.insert(project_id.into());
}
Ok(HttpResponse::Ok().json(response))
@@ -871,16 +871,16 @@ pub async fn upload_file(
ref format,
ref files,
} = validation_result
&& dependencies.is_empty()
{
if dependencies.is_empty() {
let hashes: Vec<Vec<u8>> = format
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
let hashes: Vec<Vec<u8>> = format
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
let res = sqlx::query!(
let res = sqlx::query!(
"
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
INNER JOIN files f on h.file_id = f.id
@@ -892,45 +892,44 @@ pub async fn upload_file(
.fetch_all(&mut **transaction)
.await?;
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Embedded.to_string(),
});
}
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Embedded.to_string(),
});
}
}
for file in files {
if !file.is_empty() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(),
});
}
for file in files {
if !file.is_empty() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(),
});
}
}
}
@@ -974,10 +973,10 @@ pub async fn upload_file(
));
}
if let ValidationResult::Warning(msg) = validation_result {
if primary {
return Err(CreateError::InvalidInput(msg.to_string()));
}
if let ValidationResult::Warning(msg) = validation_result
&& primary
{
return Err(CreateError::InvalidInput(msg.to_string()));
}
let url = format!("{cdn_url}/{file_path_encode}");

View File

@@ -148,65 +148,55 @@ pub async fn get_update_from_hash(
&redis,
)
.await?
{
if let Some(project) = database::models::DBProject::get_id(
&& let Some(project) = database::models::DBProject::get_id(
file.project_id,
&**pool,
&redis,
)
.await?
{
let mut versions = database::models::DBVersion::get_many(
&project.versions,
&**pool,
&redis,
)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values
.iter()
.any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(
&first.inner,
&user_option,
&pool,
&redis,
)
.await?
{
return Err(ApiError::NotFound);
}
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(first)));
{
let mut versions = database::models::DBVersion::get_many(
&project.versions,
&**pool,
&redis,
)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(&first.inner, &user_option, &pool, &redis)
.await?
{
return Err(ApiError::NotFound);
}
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(first))
);
}
}
Err(ApiError::NotFound)
@@ -398,13 +388,12 @@ pub async fn update_files(
if let Some(version) = versions
.iter()
.find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm)
{
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
@@ -484,69 +473,59 @@ pub async fn update_individual_files(
for project in projects {
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
if let Some(hash) = file.hashes.get(&algorithm) {
if let Some(query_file) =
if let Some(hash) = file.hashes.get(&algorithm)
&& let Some(query_file) =
update_data.hashes.iter().find(|x| &x.hash == hash)
{
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
{
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
if let Some(version_types) =
&query_file.version_types
{
bool &= version_types.iter().any(|y| {
y.as_str() == x.inner.version_type
});
}
if let Some(loaders) = &query_file.loaders {
bool &= x
.loaders
.iter()
.any(|y| loaders.contains(y));
}
if let Some(loader_fields) =
&query_file.loader_fields
{
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version {
if is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(
version.clone(),
),
);
if let Some(version_types) = &query_file.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
}
if let Some(loaders) = &query_file.loaders {
bool &=
x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &query_file.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version
&& is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
}

View File

@@ -106,13 +106,12 @@ pub async fn version_project_get_helper(
|| x.inner.version_number == id.1
});
if let Some(version) = version {
if is_visible_version(&version.inner, &user_option, &pool, &redis)
if let Some(version) = version
&& is_visible_version(&version.inner, &user_option, &pool, &redis)
.await?
{
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version)));
}
{
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version)));
}
}
@@ -190,12 +189,12 @@ pub async fn version_get_helper(
.map(|x| x.1)
.ok();
if let Some(data) = version_data {
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data))
);
}
if let Some(data) = version_data
&& is_visible_version(&data.inner, &user_option, &pool, &redis).await?
{
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data))
);
}
Err(ApiError::NotFound)

View File

@@ -15,14 +15,12 @@ pub async fn get_user_status(
return Some(friend_status);
}
if let Ok(mut conn) = redis.pool.get().await {
if let Ok(mut statuses) =
if let Ok(mut conn) = redis.pool.get().await
&& let Ok(mut statuses) =
conn.sscan::<_, String>(get_field_name(user)).await
{
if let Some(status_json) = statuses.next_item().await {
return serde_json::from_str::<UserStatus>(&status_json).ok();
}
}
&& let Some(status_json) = statuses.next_item().await
{
return serde_json::from_str::<UserStatus>(&status_json).ok();
}
None

View File

@@ -138,12 +138,11 @@ fn process_image(
let (orig_width, orig_height) = img.dimensions();
let aspect_ratio = orig_width as f32 / orig_height as f32;
if let Some(target_width) = target_width {
if img.width() > target_width {
let new_height =
(target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
if let Some(target_width) = target_width
&& img.width() > target_width
{
let new_height = (target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
if let Some(min_aspect_ratio) = min_aspect_ratio {

View File

@@ -133,12 +133,11 @@ pub async fn rate_limit_middleware(
.expect("Rate limiter not configured properly")
.clone();
if let Some(key) = req.headers().get("x-ratelimit-key") {
if key.to_str().ok()
if let Some(key) = req.headers().get("x-ratelimit-key")
&& key.to_str().ok()
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
{
return Ok(next.call(req).await?.map_into_left_body());
}
{
return Ok(next.call(req).await?.map_into_left_body());
}
let conn_info = req.connection_info().clone();

View File

@@ -22,46 +22,47 @@ pub fn validation_errors_to_string(
let key_option = map.keys().next();
if let Some(field) = key_option {
if let Some(error) = map.get(field) {
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
if let Some(field) = key_option
&& let Some(error) = map.get(field)
{
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
*errors.clone(),
Some(format!("of item {field}")),
)
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of item {field}")),
)
Some(format!("of list {field} with index {index}")),
));
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of list {field} with index {index}")),
));
}
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
write!(
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
write!(
&mut output,
"Field {field} {adder} failed validation with error: {}",
error.code
).unwrap();
} else {
write!(
&mut output,
"Field {field} failed validation with error: {}",
error.code
).unwrap();
}
} else {
write!(
&mut output,
"Field {field} failed validation with error: {}",
error.code
)
.unwrap();
}
output
}
};
}
output
}
};
}
String::new()

View File

@@ -238,17 +238,17 @@ pub async fn send_slack_webhook(
}
});
if let Some(icon_url) = metadata.project_icon_url {
if let Some(project_block) = project_block.as_object_mut() {
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
}
if let Some(icon_url) = metadata.project_icon_url
&& let Some(project_block) = project_block.as_object_mut()
{
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
}
blocks.push(project_block);

View File

@@ -222,10 +222,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
}
// Failure test- logged in on a non-team user
@@ -246,10 +246,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
}
// Failure test- logged in with EVERY non-relevant permission
@@ -270,10 +270,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
}
// Patch user's permissions to success permissions
@@ -300,10 +300,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(success_json_check) = &self.success_json_check {
success_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(success_json_check) = &self.success_json_check
{
success_json_check(&test::read_body_json(resp).await);
}
// If the remove_user flag is set, remove the user from the project

View File

@@ -1,2 +1,2 @@
allow-dbg-in-tests = true
msrv = "1.88.0"
msrv = "1.89.0"

View File

@@ -50,10 +50,10 @@ pub async fn parse_command(
// We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?;
if let Some(ext) = path.extension() {
if ext == "mrpack" {
return Ok(CommandPayload::RunMRPack { path });
}
if let Some(ext) = path.extension()
&& ext == "mrpack"
{
return Ok(CommandPayload::RunMRPack { path });
}
emit_warning(&format!(
"Invalid command, unrecognized filetype: {}",

View File

@@ -106,13 +106,13 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
})?;
// removes the old installation of java
if let Some(file) = archive.file_names().next() {
if let Some(dir) = file.split('/').next() {
let path = path.join(dir);
if let Some(file) = archive.file_names().next()
&& let Some(dir) = file.split('/').next()
{
let path = path.join(dir);
if path.exists() {
io::remove_dir_all(path).await?;
}
if path.exists() {
io::remove_dir_all(path).await?;
}
}

View File

@@ -54,11 +54,11 @@ pub async fn remove_user(uuid: uuid::Uuid) -> crate::Result<()> {
if let Some((uuid, user)) = users.remove(&uuid) {
Credentials::remove(uuid, &state.pool).await?;
if user.active {
if let Some((_, mut user)) = users.into_iter().next() {
user.active = true;
user.upsert(&state.pool).await?;
}
if user.active
&& let Some((_, mut user)) = users.into_iter().next()
{
user.active = true;
user.upsert(&state.pool).await?;
}
}

View File

@@ -221,14 +221,14 @@ async fn import_atlauncher_unmanaged(
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
}
if let Some(ref project_id) = description.project_id
&& let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
}
prof.icon_path = description

View File

@@ -383,18 +383,18 @@ pub async fn set_profile_information(
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data.as_ref().is_none_or(|x| x.locked)
},
})
}
if let Some(ref project_id) = description.project_id
&& let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data.as_ref().is_none_or(|x| x.locked)
},
})
}
prof.icon_path = description

View File

@@ -149,13 +149,12 @@ pub async fn install_zipped_mrpack_files(
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env {
if env
if let Some(env) = project.env
&& env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
}
{
return Ok(());
}
let file = fetch_mirrors(
@@ -375,12 +374,12 @@ pub async fn remove_all_related_files(
)
.await?
{
if let Some(metadata) = &project.metadata {
if to_remove.contains(&metadata.project_id) {
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
}
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
}
}
}

View File

@@ -337,28 +337,26 @@ pub async fn update_project(
)
.await?
.remove(project_path)
&& let Some(update_version) = &file.update_version_id
{
if let Some(update_version) = &file.update_version_id {
let path = Profile::add_project_version(
profile_path,
update_version,
&state.pool,
&state.fetch_semaphore,
&state.io_semaphore,
)
.await?;
let path = Profile::add_project_version(
profile_path,
update_version,
&state.pool,
&state.fetch_semaphore,
&state.io_semaphore,
)
.await?;
if path != project_path {
Profile::remove_project(profile_path, project_path).await?;
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited)
.await?;
}
return Ok(path);
if path != project_path {
Profile::remove_project(profile_path, project_path).await?;
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited).await?;
}
return Ok(path);
}
Err(crate::ErrorKind::InputError(
@@ -479,10 +477,10 @@ pub async fn export_mrpack(
let included_export_candidates = included_export_candidates
.into_iter()
.filter(|x| {
if let Some(f) = PathBuf::from(x).file_name() {
if f.to_string_lossy().starts_with(".DS_Store") {
return false;
}
if let Some(f) = PathBuf::from(x).file_name()
&& f.to_string_lossy().starts_with(".DS_Store")
{
return false;
}
true
})

View File

@@ -184,6 +184,7 @@ pub enum LoadingBarType {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct LoadingPayload {
pub event: LoadingBarType,
pub loader_uuid: Uuid,
@@ -192,11 +193,7 @@ pub struct LoadingPayload {
}
#[derive(Serialize, Clone)]
pub struct OfflinePayload {
pub offline: bool,
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct WarningPayload {
pub message: String,
}
@@ -220,12 +217,14 @@ pub enum CommandPayload {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProcessPayload {
pub profile_path_id: String,
pub uuid: Uuid,
pub event: ProcessPayloadType,
pub message: String,
}
#[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType {
@@ -234,11 +233,13 @@ pub enum ProcessPayloadType {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProfilePayload {
pub profile_path_id: String,
#[serde(flatten)]
pub event: ProfilePayloadType,
}
#[derive(Serialize, Clone)]
#[serde(tag = "event", rename_all = "snake_case")]
pub enum ProfilePayloadType {
@@ -257,6 +258,16 @@ pub enum ProfilePayloadType {
Removed,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}
#[derive(Debug, thiserror::Error)]
pub enum EventError {
#[error("Event state was not properly initialized")]
@@ -269,13 +280,3 @@ pub enum EventError {
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}

View File

@@ -32,15 +32,15 @@ pub fn get_class_paths(
let mut cps = libraries
.iter()
.filter_map(|library| {
if let Some(rules) = &library.rules {
if !parse_rules(
if let Some(rules) = &library.rules
&& !parse_rules(
rules,
java_arch,
&QuickPlayType::None,
minecraft_updated,
) {
return None;
}
)
{
return None;
}
if !library.include_in_classpath {
@@ -504,10 +504,10 @@ pub async fn get_processor_main_class(
let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:") {
if let Some(class) = line.split(':').nth(1) {
return Ok(Some(class.to_string()));
}
if line.starts_with("Main-Class:")
&& let Some(class) = line.split(':').nth(1)
{
return Ok(Some(class.to_string()));
}
}

View File

@@ -290,12 +290,11 @@ pub async fn download_libraries(
loading_try_for_each_concurrent(
stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
if let Some(rules) = &library.rules
&& !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name);
return Ok(());
}
}
if !library.downloadable {
tracing::trace!("Skipped non-downloadable library {}", &library.name);
@@ -311,15 +310,14 @@ pub async fn download_libraries(
return Ok(());
}
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads {
if !artifact.url.is_empty(){
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads
&& !artifact.url.is_empty(){
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
return Ok::<_, crate::Error>(());
}
}
let url = [
library

View File

@@ -341,10 +341,10 @@ pub async fn install_minecraft(
// Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides {
if !sides.contains(&String::from("client")) {
continue;
}
if let Some(sides) = &processor.sides
&& !sides.contains(&String::from("client"))
{
continue;
}
let cp = {

View File

@@ -385,10 +385,10 @@ impl DirectoryInfo {
return Err(e);
}
} else {
if let Some(disk_usage) = get_disk_usage(&move_dir)? {
if total_size > disk_usage {
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
}
if let Some(disk_usage) = get_disk_usage(&move_dir)?
&& total_size > disk_usage
{
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
}
let loader_bar_id = Arc::new(&loader_bar_id);

View File

@@ -9,7 +9,7 @@ use ariadne::networking::message::{
ClientToServerMessage, ServerToClientMessage,
};
use ariadne::users::UserStatus;
use async_tungstenite::WebSocketStream;
use async_tungstenite::WebSocketSender;
use async_tungstenite::tokio::{ConnectStream, connect_async};
use async_tungstenite::tungstenite::Message;
use async_tungstenite::tungstenite::client::IntoClientRequest;
@@ -17,7 +17,6 @@ use bytes::Bytes;
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use either::Either;
use futures::stream::SplitSink;
use futures::{SinkExt, StreamExt};
use reqwest::Method;
use reqwest::header::HeaderValue;
@@ -32,7 +31,7 @@ use tokio::sync::{Mutex, RwLock};
use uuid::Uuid;
pub(super) type WriteSocket =
Arc<RwLock<Option<SplitSink<WebSocketStream<ConnectStream>, Message>>>>;
Arc<RwLock<Option<WebSocketSender<ConnectStream>>>>;
pub(super) type TunnelSockets = Arc<DashMap<Uuid, Arc<InternalTunnelSocket>>>;
pub struct FriendsSocket {
@@ -180,27 +179,24 @@ impl FriendsSocket {
ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO
ServerToClientMessage::SocketConnected { to_socket, new_socket } => {
if let Some(connected_to) = sockets.get(&to_socket) {
if let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone() {
if let Ok(new_stream) = TcpStream::connect(local_addr).await {
if let Some(connected_to) = sockets.get(&to_socket)
&& let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone()
&& let Ok(new_stream) = TcpStream::connect(local_addr).await {
let (read, write) = new_stream.into_split();
sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write))));
Self::socket_read_loop(write_handle.clone(), read, new_socket);
continue;
}
}
}
let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await;
},
ServerToClientMessage::SocketClosed { socket } => {
sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_)));
},
ServerToClientMessage::SocketData { socket, data } => {
if let Some(mut socket) = sockets.get_mut(&socket) {
if let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
if let Some(mut socket) = sockets.get_mut(&socket)
&& let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
let _ = stream.lock().await.write_all(&data).await;
}
}
},
}
}

View File

@@ -100,8 +100,8 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
let profile_path_str = profile_path_str.clone();
let world = world.clone();
tokio::spawn(async move {
if let Ok(state) = State::get().await {
if let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
if let Ok(state) = State::get().await
&& let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
&profile_path_str,
WorldType::Singleplayer,
&world,
@@ -109,7 +109,6 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
).await {
tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}")
}
}
});
}
Some(ProfilePayloadType::WorldUpdated { world })
@@ -150,14 +149,14 @@ pub(crate) async fn watch_profiles_init(
) {
if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) {
for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) {
if let Some(file_name) = file_name.to_str() {
if file_name.starts_with(".DS_Store") {
continue;
};
if let Ok(file_name) = profile_dir.map(|x| x.file_name())
&& let Some(file_name) = file_name.to_str()
{
if file_name.starts_with(".DS_Store") {
continue;
};
watch_profile(file_name, watcher, dirs).await;
}
watch_profile(file_name, watcher, dirs).await;
}
}
}

View File

@@ -76,10 +76,9 @@ where
.loaded_config_dir
.clone()
.and_then(|x| x.to_str().map(|x| x.to_string()))
&& path != old_launcher_root_str
{
if path != old_launcher_root_str {
settings.custom_dir = Some(path);
}
settings.custom_dir = Some(path);
}
settings.prev_custom_dir = Some(old_launcher_root_str.clone());
@@ -136,31 +135,27 @@ where
.await?;
}
if let Some(device_token) = minecraft_auth.token {
if let Ok(private_key) =
if let Some(device_token) = minecraft_auth.token
&& let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key)
{
if let Ok(uuid) = Uuid::parse_str(&device_token.id) {
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token
.token
.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
&& let Ok(uuid) = Uuid::parse_str(&device_token.id)
{
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token.token.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
}
@@ -207,100 +202,93 @@ where
update_version,
..
} = project.metadata
{
if let Some(file) = version
&& let Some(file) = version
.files
.iter()
.find(|x| x.hashes.get("sha512") == Some(&sha512))
{
if let Some(sha1) = file.hashes.get("sha1") {
if let Ok(metadata) = full_path.metadata() {
let file_name = format!(
"{}/{}",
profile.path,
path.replace('\\', "/")
.replace(".disabled", "")
);
&& let Some(sha1) = file.hashes.get("sha1")
{
if let Ok(metadata) = full_path.metadata() {
let file_name = format!(
"{}/{}",
profile.path,
path.replace('\\', "/")
.replace(".disabled", "")
);
cached_entries.push(CacheValue::FileHash(
CachedFileHash {
path: file_name,
size: metadata.len(),
hash: sha1.clone(),
project_type: ProjectType::get_from_parent_folder(&full_path),
},
));
}
cached_entries.push(CacheValue::File(
CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
},
));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(
CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader
.as_str()
.to_string(),
],
update_version_id:
update_version.id.clone(),
},
cached_entries.push(CacheValue::FileHash(
CachedFileHash {
path: file_name,
size: metadata.len(),
hash: sha1.clone(),
project_type:
ProjectType::get_from_parent_folder(
&full_path,
),
);
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries.push(CacheValue::User(
user.clone(),
));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries.push(CacheValue::Version(
(*version).into(),
));
}
},
));
}
cached_entries.push(CacheValue::File(CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
}));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader.as_str().to_string(),
],
update_version_id: update_version
.id
.clone(),
},
));
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries
.push(CacheValue::User(user.clone()));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries
.push(CacheValue::Version((*version).into()));
}
}
@@ -332,16 +320,15 @@ where
.map(|x| x.id),
groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(|x| {
if let Some(project_id) = x.project_id {
if let Some(version_id) = x.version_id {
if let Some(locked) = x.locked {
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
}
if let Some(project_id) = x.project_id
&& let Some(version_id) = x.version_id
&& let Some(locked) = x.locked
{
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
None

View File

@@ -393,10 +393,9 @@ impl Credentials {
..
},
) = *err.raw
&& (source.is_connect() || source.is_timeout())
{
if source.is_connect() || source.is_timeout() {
return Ok(Some(creds));
}
return Ok(Some(creds));
}
Err(err)
@@ -640,36 +639,31 @@ impl DeviceTokenPair {
.fetch_optional(exec)
.await?;
if let Some(x) = res {
if let Ok(uuid) = Uuid::parse_str(&x.uuid) {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(
x.display_claims,
)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
}
if let Some(x) = res
&& let Ok(uuid) = Uuid::parse_str(&x.uuid)
&& let Ok(private_key) = SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(x.display_claims)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
Ok(None)
@@ -724,7 +718,7 @@ const MICROSOFT_CLIENT_ID: &str = "00000000402b5328";
const AUTH_REPLY_URL: &str = "https://login.live.com/oauth20_desktop.srf";
const REQUESTED_SCOPE: &str = "service::user.auth.xboxlive.com::MBI_SSL";
struct RequestWithDate<T> {
pub struct RequestWithDate<T> {
pub date: DateTime<Utc>,
pub value: T,
}

View File

@@ -360,18 +360,17 @@ impl Process {
}
// Write the throwable if present
if !current_content.is_empty() {
if let Err(e) =
if !current_content.is_empty()
&& let Err(e) =
Process::append_to_log_file(
&log_path,
&current_content,
)
{
tracing::error!(
"Failed to write throwable to log file: {}",
e
);
}
{
tracing::error!(
"Failed to write throwable to log file: {}",
e
);
}
}
}
@@ -429,15 +428,13 @@ impl Process {
if let Some(timestamp) =
current_event.timestamp.as_deref()
{
if let Err(e) = Self::maybe_handle_server_join_logging(
&& let Err(e) = Self::maybe_handle_server_join_logging(
profile_path,
timestamp,
message
).await {
tracing::error!("Failed to handle server join logging: {e}");
}
}
}
}
_ => {}
@@ -445,35 +442,29 @@ impl Process {
}
Ok(Event::Text(mut e)) => {
if in_message || in_throwable {
if let Ok(text) = e.unescape() {
if let Ok(text) = e.xml_content() {
current_content.push_str(&text);
}
} else if !in_event
&& !e.inplace_trim_end()
&& !e.inplace_trim_start()
&& let Ok(text) = e.xml_content()
&& let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
)
{
if let Ok(text) = e.unescape() {
if let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
) {
tracing::error!(
"Failed to write to log file: {}",
e
);
}
}
tracing::error!(
"Failed to write to log file: {}",
e
);
}
}
Ok(Event::CData(e)) => {
if in_message || in_throwable {
if let Ok(text) = e
.escape()
.map_err(|x| x.into())
.and_then(|x| x.unescape())
{
current_content.push_str(&text);
}
if (in_message || in_throwable)
&& let Ok(text) = e.xml_content()
{
current_content.push_str(&text);
}
}
_ => (),
@@ -720,16 +711,13 @@ impl Process {
let logs_folder = state.directories.profile_logs_dir(&profile_path);
let log_path = logs_folder.join(LAUNCHER_LOG_PATH);
if log_path.exists() {
if let Err(e) = Process::append_to_log_file(
if log_path.exists()
&& let Err(e) = Process::append_to_log_file(
&log_path,
&format!("\n# Process exited with status: {mc_exit_status}\n"),
) {
tracing::warn!(
"Failed to write exit status to log file: {}",
e
);
}
)
{
tracing::warn!("Failed to write exit status to log file: {}", e);
}
let _ = state.discord_rpc.clear_to_default(true).await;

View File

@@ -595,8 +595,8 @@ impl Profile {
}
#[tracing::instrument(skip(self, semaphore, icon))]
pub async fn set_icon<'a>(
&'a mut self,
pub async fn set_icon(
&mut self,
cache_dir: &Path,
semaphore: &IoSemaphore,
icon: bytes::Bytes,
@@ -629,21 +629,20 @@ impl Profile {
{
let subdirectory =
subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() {
if let Some(file_name) = subdirectory
if subdirectory.is_file()
&& let Some(file_name) = subdirectory
.file_name()
.and_then(|x| x.to_str())
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
keys.push(format!(
"{file_size}-{}/{folder}/{file_name}",
profile.path
));
}
keys.push(format!(
"{file_size}-{}/{folder}/{file_name}",
profile.path
));
}
}
}
@@ -901,30 +900,29 @@ impl Profile {
{
let subdirectory =
subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() {
if let Some(file_name) =
if subdirectory.is_file()
&& let Some(file_name) =
subdirectory.file_name().and_then(|x| x.to_str())
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
keys.push(InitialScanFile {
path: format!(
"{}/{folder}/{}",
self.path,
file_name.trim_end_matches(".disabled")
),
file_name: file_name.to_string(),
project_type,
size: file_size,
cache_key: format!(
"{file_size}-{}/{folder}/{file_name}",
self.path
),
});
}
keys.push(InitialScanFile {
path: format!(
"{}/{folder}/{}",
self.path,
file_name.trim_end_matches(".disabled")
),
file_name: file_name.to_string(),
project_type,
size: file_size,
cache_key: format!(
"{file_size}-{}/{folder}/{file_name}",
self.path
),
});
}
}
}

View File

@@ -254,7 +254,7 @@ where
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write<'a>(
pub async fn write(
path: &Path,
bytes: &[u8],
semaphore: &IoSemaphore,

View File

@@ -191,22 +191,21 @@ async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir();
if base_path.is_dir() {
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if base_path.is_dir()
&& let Ok(dir) = std::fs::read_dir(base_path)
{
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if let Ok(contents) =
std::fs::read_to_string(file_path.clone())
if let Ok(contents) = std::fs::read_to_string(file_path.clone())
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
}
}
@@ -300,20 +299,20 @@ pub async fn check_java_at_filepath(path: &Path) -> crate::Result<JavaVersion> {
}
// Extract version info from it
if let Some(arch) = java_arch {
if let Some(version) = java_version {
if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string();
return Ok(JavaVersion {
parsed_version: version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
if let Some(arch) = java_arch
&& let Some(version) = java_version
{
if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string();
return Ok(JavaVersion {
parsed_version: version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
}
Err(JREError::FailedJavaCheck(java).into())

View File

@@ -33,12 +33,11 @@ pub fn is_feature_supported_in(
if part_version == part_first_release {
continue;
}
if let Ok(part_version) = part_version.parse::<u32>() {
if let Ok(part_first_release) = part_first_release.parse::<u32>() {
if part_version > part_first_release {
return true;
}
}
if let Ok(part_version) = part_version.parse::<u32>()
&& let Ok(part_first_release) = part_first_release.parse::<u32>()
&& part_version > part_first_release
{
return true;
}
}
false

View File

@@ -1,2 +1,2 @@
[toolchain]
channel = "1.88.0"
channel = "1.89.0"