Merge commit 'df1499047ccc8f39d756d5beba60651237aca1c0' into beta

This commit is contained in:
2025-08-10 19:54:24 +03:00
91 changed files with 9957 additions and 7832 deletions

View File

@@ -4,15 +4,16 @@ root = true
[*]
charset = utf-8
indent_style = space
indent_size = 2
indent_size = 4
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
max_line_length = 100
[*.md]
indent_size = 2
max_line_length = off
trim_trailing_whitespace = false
[*.{rs,java,kts}]
indent_size = 4
[*.{json,yml,yaml,ts,vue,scss,css,html,js,cjs,mjs,gltf,prettierrc}]
indent_size = 2

377
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -25,31 +25,31 @@ actix-ws = "0.3.0"
argon2 = { version = "0.5.3", features = ["std"] }
ariadne = { path = "packages/ariadne" }
async_zip = "0.0.17"
async-compression = { version = "0.4.25", default-features = false }
async-compression = { version = "0.4.27", default-features = false }
async-recursion = "1.1.1"
async-stripe = { version = "0.41.0", default-features = false, features = [
"runtime-tokio-hyper-rustls",
] }
async-trait = "0.1.88"
async-tungstenite = { version = "0.29.1", default-features = false, features = [
async-tungstenite = { version = "0.30.0", default-features = false, features = [
"futures-03-sink",
] }
async-walkdir = "2.1.0"
base64 = "0.22.1"
bitflags = "2.9.1"
bytemuck = "1.23.0"
bytemuck = "1.23.1"
bytes = "1.10.1"
censor = "0.3.0"
chardetng = "0.1.17"
chrono = "0.4.41"
clap = "4.5.40"
clap = "4.5.43"
clickhouse = "0.13.3"
color-thief = "0.2.2"
console-subscriber = "0.4.1"
daedalus = { path = "packages/daedalus" }
dashmap = "6.1.0"
data-url = "0.3.1"
deadpool-redis = "0.21.1"
deadpool-redis = "0.22.0"
dirs = "6.0.0"
discord-rich-presence = "0.2.5"
dotenv-build = "0.1.1"
@@ -57,7 +57,7 @@ dotenvy = "0.15.7"
dunce = "1.0.5"
either = "1.15.0"
encoding_rs = "0.8.35"
enumset = "1.1.6"
enumset = "1.1.7"
flate2 = "1.1.2"
fs4 = { version = "0.13.1", default-features = false }
futures = { version = "0.3.31", default-features = false }
@@ -74,15 +74,15 @@ hyper-rustls = { version = "0.27.7", default-features = false, features = [
"ring",
"tls12",
] }
hyper-util = "0.1.14"
hyper-util = "0.1.16"
iana-time-zone = "0.1.63"
image = { version = "0.25.6", default-features = false, features = ["rayon"] }
indexmap = "2.9.0"
indicatif = "0.17.11"
indexmap = "2.10.0"
indicatif = "0.18.0"
itertools = "0.14.0"
jemalloc_pprof = "0.7.0"
jemalloc_pprof = "0.8.1"
json-patch = { version = "4.0.0", default-features = false }
lettre = { version = "0.11.17", default-features = false, features = [
lettre = { version = "0.11.18", default-features = false, features = [
"builder",
"hostname",
"pool",
@@ -92,24 +92,24 @@ lettre = { version = "0.11.17", default-features = false, features = [
"smtp-transport",
] }
maxminddb = "0.26.0"
meilisearch-sdk = { version = "0.28.0", default-features = false }
meilisearch-sdk = { version = "0.29.1", default-features = false }
murmur2 = "0.1.0"
native-dialog = "0.9.0"
notify = { version = "8.0.0", default-features = false }
notify-debouncer-mini = { version = "0.6.0", default-features = false }
notify = { version = "8.2.0", default-features = false }
notify-debouncer-mini = { version = "0.7.0", default-features = false }
p256 = "0.13.2"
paste = "1.0.15"
phf = { version = "0.12.1", features = ["macros"] }
png = "0.17.16"
prometheus = "0.14.0"
quartz_nbt = "0.2.9"
quick-xml = "0.37.5"
quick-xml = "0.38.1"
rand = "=0.8.5" # Locked on 0.8 until argon2 and p256 update to 0.9
rand_chacha = "=0.3.1" # Locked on 0.3 until we can update rand to 0.9
redis = "=0.31.0" # Locked on 0.31 until deadpool-redis updates to 0.32
redis = "0.32.4"
regex = "1.11.1"
reqwest = { version = "0.12.20", default-features = false }
rgb = "0.8.50"
reqwest = { version = "0.12.22", default-features = false }
rgb = "0.8.52"
rust_decimal = { version = "1.37.2", features = [
"serde-with-float",
"serde-with-str",
@@ -121,7 +121,7 @@ rust-s3 = { version = "0.35.1", default-features = false, features = [
"tokio-rustls-tls",
] }
rusty-money = "0.4.1"
sentry = { version = "0.41.0", default-features = false, features = [
sentry = { version = "0.42.0", default-features = false, features = [
"backtrace",
"contexts",
"debug-images",
@@ -129,45 +129,45 @@ sentry = { version = "0.41.0", default-features = false, features = [
"reqwest",
"rustls",
] }
sentry-actix = "0.41.0"
sentry-actix = "0.42.0"
serde = "1.0.219"
serde_bytes = "0.11.17"
serde_cbor = "0.11.2"
serde_ini = "0.2.0"
serde_json = "1.0.140"
serde_with = "3.13.0"
serde_json = "1.0.142"
serde_with = "3.14.0"
serde-xml-rs = "0.8.1" # Also an XML (de)serializer, consider dropping yaserde in favor of this
sha1 = "0.10.6"
sha1_smol = { version = "1.0.1", features = ["std"] }
sha2 = "0.10.9"
spdx = "0.10.8"
spdx = "0.10.9"
sqlx = { version = "0.8.6", default-features = false }
sysinfo = { version = "0.35.2", default-features = false }
sysinfo = { version = "0.36.1", default-features = false }
tar = "0.4.44"
tauri = "2.6.1"
tauri-build = "2.3.0"
tauri-plugin-deep-link = "2.4.0"
tauri-plugin-dialog = "2.3.0"
tauri-plugin-http = "2.5.0"
tauri = "2.7.0"
tauri-build = "2.3.1"
tauri-plugin-deep-link = "2.4.1"
tauri-plugin-dialog = "2.3.2"
tauri-plugin-http = "2.5.1"
tauri-plugin-opener = "2.4.0"
tauri-plugin-os = "2.3.0"
tauri-plugin-single-instance = "2.3.0"
tauri-plugin-single-instance = "2.3.2"
tauri-plugin-updater = { version = "2.9.0", default-features = false, features = [
"rustls-tls",
"zip",
] }
tauri-plugin-window-state = "2.3.0"
tauri-plugin-window-state = "2.4.0"
tempfile = "3.20.0"
theseus = { path = "packages/app-lib" }
thiserror = "2.0.12"
tikv-jemalloc-ctl = "0.6.0"
tikv-jemallocator = "0.6.0"
tokio = "1.45.1"
tokio = "1.47.1"
tokio-stream = "0.1.17"
tokio-util = "0.7.15"
tokio-util = "0.7.16"
totp-rs = "5.7.0"
tracing = "0.1.41"
tracing-actix-web = "0.7.18"
tracing-actix-web = "0.7.19"
tracing-error = "0.2.1"
tracing-subscriber = "0.3.19"
url = "2.5.4"
@@ -179,7 +179,7 @@ whoami = "1.6.0"
winreg = "0.55.0"
woothee = "0.13.0"
yaserde = "0.12.0"
zip = { version = "4.2.0", default-features = false, features = [
zip = { version = "4.3.0", default-features = false, features = [
"bzip2",
"deflate",
"deflate64",
@@ -226,7 +226,7 @@ wildcard_dependencies = "warn"
warnings = "deny"
[patch.crates-io]
wry = { git = "https://github.com/modrinth/wry", rev = "21db186" }
wry = { git = "https://github.com/modrinth/wry", rev = "f2ce0b0" }
# Optimize for speed and reduce size on release builds
[profile.release]

View File

@@ -1,41 +1,67 @@
!macro NSIS_HOOK_POSTINSTALL
SetShellVarContext current
IfFileExists "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe" file_found file_not_found
file_found:
Delete "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
Delete "$LOCALAPPDATA${PRODUCTNAME}\uninstall.exe"
RMDir "$LOCALAPPDATA${PRODUCTNAME}"
!insertmacro DeleteAppUserModelId
; Remove start menu shortcut
!insertmacro MUI_STARTMENU_GETFOLDER Application $AppStartMenuFolder
!insertmacro IsShortcutTarget "$SMPROGRAMS$AppStartMenuFolder${PRODUCTNAME}.lnk" "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
Pop $0
${If} $0 = 1
!insertmacro UnpinShortcut "$SMPROGRAMS$AppStartMenuFolder${PRODUCTNAME}.lnk"
Delete "$SMPROGRAMS$AppStartMenuFolder${PRODUCTNAME}.lnk"
RMDir "$SMPROGRAMS$AppStartMenuFolder"
${EndIf}
!insertmacro IsShortcutTarget "$SMPROGRAMS${PRODUCTNAME}.lnk" "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
Pop $0
${If} $0 = 1
!insertmacro UnpinShortcut "$SMPROGRAMS${PRODUCTNAME}.lnk"
Delete "$SMPROGRAMS${PRODUCTNAME}.lnk"
${EndIf}
!insertmacro IsShortcutTarget "$DESKTOP${PRODUCTNAME}.lnk" "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
Pop $0
${If} $0 = 1
!insertmacro UnpinShortcut "$DESKTOP${PRODUCTNAME}.lnk"
Delete "$DESKTOP${PRODUCTNAME}.lnk"
${EndIf}
DeleteRegKey HKCU "${UNINSTKEY}"
goto end_of_test ;<== important for not continuing on the else branch
file_not_found:
end_of_test:
; https://nsis.sourceforge.io/ShellExecWait
!macro ShellExecWait verb app param workdir show exitoutvar ;only app and show must be != "", every thing else is optional
#define SEE_MASK_NOCLOSEPROCESS 0x40
System::Store S
!if "${NSIS_PTR_SIZE}" > 4
!define /ReDef /math SYSSIZEOF_SHELLEXECUTEINFO 14 * ${NSIS_PTR_SIZE}
!else ifndef SYSSIZEOF_SHELLEXECUTEINFO
!define SYSSIZEOF_SHELLEXECUTEINFO 60
!endif
System::Call '*(&i${SYSSIZEOF_SHELLEXECUTEINFO})i.r0'
System::Call '*$0(i ${SYSSIZEOF_SHELLEXECUTEINFO},i 0x40,p $hwndparent,t "${verb}",t $\'${app}$\',t $\'${param}$\',t "${workdir}",i ${show})p.r0'
System::Call 'shell32::ShellExecuteEx(t)(pr0)i.r1 ?e' ; (t) to trigger A/W selection
${If} $1 <> 0
System::Call '*$0(is,i,p,p,p,p,p,p,p,p,p,p,p,p,p.r1)' ;stack value not really used, just a fancy pop ;)
System::Call 'kernel32::WaitForSingleObject(pr1,i-1)'
System::Call 'kernel32::GetExitCodeProcess(pr1,*i.s)'
System::Call 'kernel32::CloseHandle(pr1)'
${EndIf}
System::Free $0
!if "${exitoutvar}" == ""
pop $0
!endif
System::Store L
!if "${exitoutvar}" != ""
pop ${exitoutvar}
!endif
!macroend
; --------------------------------------------------------------------------------
Var /GLOBAL OldInstallDir
!macro NSIS_HOOK_PREINSTALL
SetShellVarContext all
${If} ${FileExists} "$SMPROGRAMS\${PRODUCTNAME}.lnk"
UserInfo::GetAccountType
Pop $0
${If} $0 != "Admin"
MessageBox MB_ICONINFORMATION|MB_OK "An old installation of the Modrinth App was detected that requires administrator permission to update from. You will be prompted with an admin prompt shortly."
${EndIf}
ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" ""
ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString"
ReadRegStr $OldInstallDir SHCTX "${UNINSTKEY}" "InstallLocation"
StrCpy $OldInstallDir $OldInstallDir "" 1
StrCpy $OldInstallDir $OldInstallDir -1 ""
DetailPrint "Executing $R1"
!insertmacro ShellExecWait "runas" '$R1' '/P _?=$4' "" ${SW_SHOW} $3
${If} $3 <> 0
SetErrorLevel $3
MessageBox MB_ICONEXCLAMATION|MB_OK "Failed to uninstall old global installation"
Abort
${EndIf}
${EndIf}
SetShellVarContext current
!macroend
!macro NSIS_HOOK_POSTINSTALL
!insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$OldInstallDir\${MAINBINARYNAME}.exe"
Pop $0
${If} $0 = 1
!insertmacro SetShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe"
Return
${EndIf}
!macroend

View File

@@ -122,16 +122,13 @@ pub async fn login<R: Runtime>(
.url()?
.as_str()
.starts_with("https://login.live.com/oauth20_desktop.srf")
{
if let Some((_, code)) =
&& let Some((_, code)) =
window.url()?.query_pairs().find(|x| x.0 == "code")
{
window.close()?;
let val =
minecraft_auth::finish_login(&code.clone(), flow).await?;
{
window.close()?;
let val = minecraft_auth::finish_login(&code.clone(), flow).await?;
return Ok(Some(val));
}
return Ok(Some(val));
}
tokio::time::sleep(std::time::Duration::from_millis(50)).await;

View File

@@ -103,11 +103,11 @@ pub async fn should_disable_mouseover() -> bool {
// We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety
if let tauri_plugin_os::Version::Semantic(major, minor, _) =
tauri_plugin_os::version()
&& major >= 12
&& minor >= 3
{
if major >= 12 && minor >= 3 {
// Mac os version is 12.3 or higher, we allow mouseover
return false;
}
// Mac os version is 12.3 or higher, we allow mouseover
return false;
}
true
} else {

View File

@@ -243,10 +243,10 @@ fn main() {
});
#[cfg(not(target_os = "linux"))]
if let Some(window) = app.get_window("main") {
if let Err(e) = window.set_shadow(true) {
tracing::warn!("Failed to set window shadow: {e}");
}
if let Some(window) = app.get_window("main")
&& let Err(e) = window.set_shadow(true)
{
tracing::warn!("Failed to set window shadow: {e}");
}
Ok(())

View File

@@ -15,7 +15,7 @@
"icon": ["icons/128x128.png", "icons/128x128@2x.png", "icons/icon.icns", "icons/icon.ico"],
"windows": {
"nsis": {
"installMode": "perMachine",
"installMode": "currentUser",
"installerHooks": "./nsis/hooks.nsi"
}
},

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1
FROM rust:1.88.0 AS build
FROM rust:1.89.0 AS build
WORKDIR /usr/src/daedalus
COPY . .

View File

@@ -506,27 +506,25 @@ async fn fetch(
return Ok(lib);
}
} else if let Some(url) = &lib.url {
if !url.is_empty() {
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/"
.to_string(),
"https://maven.creeperhost.net/"
.to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
} else if let Some(url) = &lib.url
&& !url.is_empty()
{
insert_mirrored_artifact(
&lib.name,
None,
vec![
url.clone(),
"https://libraries.minecraft.net/".to_string(),
"https://maven.creeperhost.net/".to_string(),
maven_url.to_string(),
],
false,
mirror_artifacts,
)?;
lib.url = Some(format_url("maven/"));
lib.url = Some(format_url("maven/"));
return Ok(lib);
}
return Ok(lib);
}
// Other libraries are generally available in the "maven" directory of the installer. If they are

View File

@@ -93,22 +93,22 @@ async fn main() -> Result<()> {
.ok()
.and_then(|x| x.parse::<bool>().ok())
.unwrap_or(false)
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
{
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
let cache_clears = upload_files
let cache_clears = upload_files
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&x.0))
.chain(
mirror_artifacts
.into_iter()
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
.map(|x| format_url(&format!("maven/{}", x.0))),
)
.collect::<Vec<_>>();
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
// Cloudflare ratelimits cache clears to 500 files per request
for chunk in cache_clears.chunks(500) {
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
.bearer_auth(&token)
.json(&serde_json::json!({
"files": chunk
@@ -128,8 +128,6 @@ async fn main() -> Result<()> {
item: "cloudflare clear cache".to_string(),
}
})?;
}
}
}
}

View File

@@ -167,20 +167,18 @@ pub async fn download_file(
let bytes = x.bytes().await;
if let Ok(bytes) = bytes {
if let Some(sha1) = sha1 {
if &*sha1_async(bytes.clone()).await? != sha1 {
if attempt <= 3 {
continue;
} else {
return Err(
crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into(),
);
if let Some(sha1) = sha1
&& &*sha1_async(bytes.clone()).await? != sha1
{
if attempt <= 3 {
continue;
} else {
return Err(crate::ErrorKind::ChecksumFailure {
hash: sha1.to_string(),
url: url.to_string(),
tries: attempt,
}
.into());
}
}

View File

@@ -3,77 +3,61 @@ title: Labrinth (API)
description: Guide for contributing to Modrinth's backend
---
This project is part of our [monorepo](https://github.com/modrinth/code). You can find it in the `apps/labrinth` directory.
This project is part of our [monorepo](https://github.com/modrinth/code). You can find it in the `apps/labrinth` directory. The instructions below assume that you have switched your working directory to the `apps/labrinth` subdirectory.
[labrinth] is the Rust-based backend serving Modrinth's API with the help of the [Actix](https://actix.rs) framework. To get started with a labrinth instance, install docker, docker-compose (which comes with Docker), and [Rust]. The initial startup can be done simply with the command `docker-compose up`, or with `docker compose up` (Compose V2 and later). That will deploy a PostgreSQL database on port 5432 and a MeiliSearch instance on port 7700. To run the API itself, you'll need to use the `cargo run` command, this will deploy the API on port 8000.
[labrinth] is the Rust-based backend serving Modrinth's API with the help of the [Actix](https://actix.rs) framework. To get started with a labrinth instance, install docker, docker-compose (which comes with Docker), and [Rust]. The initial startup can be done simply with the command `docker-compose up`, or with `docker compose up` (Compose V2 and later). That will deploy a PostgreSQL database on port 5432, a MeiliSearch instance on port 7700, and a [Mailpit](https://mailpit.axllent.org/) SMTP server on port 1025, with a web UI to inspect sent emails on port 8025. To run the API itself, you'll need to use the `cargo run` command, this will deploy the API on port 8000.
To get a basic configuration, copy the `.env.local` file to `.env`. Now, you'll have to install the sqlx CLI, which can be done with cargo:
```bash
cargo install --git https://github.com/launchbadge/sqlx sqlx-cli --no-default-features --features postgres,rustls
```sh
cargo install sqlx-cli --no-default-features --features mysql,sqlite,postgres,rustls,completions
```
From there, you can create the database and perform all database migrations with one simple command:
From there, you can create the database and set up its schema with one simple command:
```bash
sqlx database setup
```sh
cargo sqlx database setup
```
To enable labrinth to create a project, you need to add two things.
To enable labrinth to create projects and serve useful metadata to the frontend build scripts, you'll need to seed the database with several key entities:
1. An entry in the `loaders` table.
2. An entry in the `loaders_project_types` table.
1. Categories, in the `categories` table.
2. Loaders and their fields, in the `loaders`, `loader_fields`, `loader_field_enums`, `loader_field_enum_values`, and `loader_fields_loaders` tables.
3. Project types and their allowed loaders and games, in the `project_types`, `loaders_project_types`, and `loaders_project_types_games` tables.
4. Optionally, to moderate projects from the frontend, an admin user, in the `users` table.
A minimal setup can be done from the command line with [psql](https://www.postgresql.org/docs/current/app-psql.html):
The most convenient way to do this seeding is with the [psql](https://www.postgresql.org/docs/current/app-psql.html) command line tool and the pre-existing seed data fixture. This fixture was generated by dumping the official staging environment database at a specific point in time, and defines an admin user with email `admin@modrinth.invalid` and password `admin`:
```bash
psql --host=localhost --port=5432 -U <username, default is labrinth> -W
```sh
source .env
psql "$DATABASE_URL" < fixtures/labrinth-seed-data-202508052143.sql
```
The default password for the database is `labrinth`. Once you've connected, run
```sql
INSERT INTO loaders VALUES (0, 'placeholder_loader');
INSERT INTO loaders_project_types VALUES (0, 1); -- modloader id, supported type id
INSERT INTO categories VALUES (0, 'placeholder_category', 1); -- category id, category, project type id
```
This will initialize your database with a modloader called 'placeholder_loader', with id 0, and marked as supporting mods only. It will also create a category called 'placeholder_category' that is marked as supporting mods only
If you would like 'placeholder_loader' to be marked as supporting modpacks too, run
```sql
INSERT INTO loaders_project_types VALUES (0, 2); -- modloader id, supported type id
```
If you would like 'placeholder_category' to be marked as supporting modpacks too, run
```sql
INSERT INTO categories VALUES (0, 'placeholder_category', 2); -- modloader id, supported type id
```
You can find more example SQL statements for seeding the database in the `apps/labrinth/tests/files/dummy_data.sql` file.
You can find more example SQL statements for seeding the database in the `tests/files/dummy_data.sql` file.
The majority of configuration is done at runtime using [dotenvy](https://crates.io/crates/dotenvy) and the `.env` file. Each of the variables and what they do can be found in the dropdown below. Additionally, there are three command line options that can be used to specify to MeiliSearch what you want to do.
During development, you might notice that changes made directly to entities in the PostgreSQL database do not seem to take effect. This is often because the Redis cache still holds outdated data. To ensure your updates are reflected, clear the cache by e.g. running `redis-cli FLUSHALL`, which will force Labrinth to fetch the latest data from the database the next time it is needed.
During development, you might notice that changes made directly to entities in the PostgreSQL database do not seem to take effect. This is often because the Redis cache still holds outdated data. To ensure your updates are reflected, clear the cache by e.g. running `redis-cli FLUSHALL`, which will force labrinth to fetch the latest data from the database the next time it is needed.
You can also start labrinth and its backing services at once using `docker compose --profile with-labrinth up`, which will build and start labrinth through its Docker image as if it was yet another service container. To have that container be automatically rebuilt during development as changes to the source code are made, add the `--watch` flag, which enables [Compose Watch](https://docs.docker.com/compose/how-tos/file-watch/). Keep in mind, however, that Compose Watch is bound to be slower than other similar solutions that work outside of a container, particularly on Windows or macOS, where Docker runs in a virtual machine.
<details>
<summary>.env variables & command line options</summary>
#### Basic configuration
`DEBUG`: Whether debugging tools should be enabled
`RUST_LOG`: Specifies what information to log, from rust's [`env-logger`](https://github.com/env-logger-rs/env_logger); a reasonable default is `info,sqlx::query=warn`
`SITE_URL`: The main URL to be used for CORS
`CDN_URL`: The publicly accessible base URL for files uploaded to the CDN
`MODERATION_DISCORD_WEBHOOK`: The URL for a Discord webhook where projects pending approval will be sent
`CLOUDFLARE_INTEGRATION`: Whether labrinth should integrate with Cloudflare's spam protection
`DATABASE_URL`: The URL for the PostgreSQL database
`DATABASE_MIN_CONNECTIONS`: The minimum number of concurrent connections allowed to the database at the same time
`DATABASE_MAX_CONNECTIONS`: The maximum number of concurrent connections allowed to the database at the same time
`MEILISEARCH_ADDR`: The URL for the MeiliSearch instance used for search
`MEILISEARCH_KEY`: The name that MeiliSearch is given
`BIND_ADDR`: The bind address for the server. Supports both IPv4 and IPv6
`DEBUG`: Whether debugging tools should be enabled
`RUST_LOG`: Specifies what information to log, from rust's [`env-logger`](https://github.com/env-logger-rs/env_logger); a reasonable default is `info,sqlx::query=warn`
`SITE_URL`: The main URL to be used for CORS
`CDN_URL`: The publicly accessible base URL for files uploaded to the CDN
`MODERATION_DISCORD_WEBHOOK`: The URL for a Discord webhook where projects pending approval will be sent
`CLOUDFLARE_INTEGRATION`: Whether labrinth should integrate with Cloudflare's spam protection
`DATABASE_URL`: The URL for the PostgreSQL database, including its username, password, host, port, and database name
`DATABASE_MIN_CONNECTIONS`: The minimum number of concurrent connections allowed to the database at the same time
`DATABASE_MAX_CONNECTIONS`: The maximum number of concurrent connections allowed to the database at the same time
`MEILISEARCH_ADDR`: The URL for the MeiliSearch instance used for search
`MEILISEARCH_KEY`: The name that MeiliSearch is given
`BIND_ADDR`: The bind address for the server. Supports both IPv4 and IPv6
`MOCK_FILE_PATH`: The path used to store uploaded files; this has no default value and will panic if unspecified
`SMTP_USERNAME`: The username used to authenticate with the SMTP server
`SMTP_PASSWORD`: The password associated with the `SMTP_USERNAME` for SMTP authentication
@@ -90,7 +74,7 @@ The S3 configuration options are fairly self-explanatory in name, so here's simp
#### Search, OAuth, and miscellaneous options
`LOCAL_INDEX_INTERVAL`: The interval, in seconds, at which the local database is reindexed for searching. Defaults to `3600` seconds (1 hour).
`LOCAL_INDEX_INTERVAL`: The interval, in seconds, at which the local database is reindexed for searching. Defaults to `3600` seconds (1 hour).
`VERSION_INDEX_INTERVAL`: The interval, in seconds, at which versions are reindexed for searching. Defaults to `1800` seconds (30 minutes).
The OAuth configuration options are fairly self-explanatory. For help setting up authentication, please contact us on [Discord].
@@ -99,8 +83,8 @@ The OAuth configuration options are fairly self-explanatory. For help setting up
#### Command line options
`--skip-first-index`: Skips indexing the local database on startup. This is useful to prevent doing unnecessary work when frequently restarting.
`--reconfigure-indices`: Resets the MeiliSearch settings for the search indices and exits.
`--skip-first-index`: Skips indexing the local database on startup. This is useful to prevent doing unnecessary work when frequently restarting.
`--reconfigure-indices`: Resets the MeiliSearch settings for the search indices and exits.
`--reset-indices`: Resets the MeiliSearch indices and exits; this clears all previously indexed mods.
</details>
@@ -109,14 +93,13 @@ The OAuth configuration options are fairly self-explanatory. For help setting up
If you're prepared to contribute by submitting a pull request, ensure you have met the following criteria:
- `cargo fmt` has been run.
- `cargo clippy` has been run.
- `cargo check` has been run.
- `cargo fmt --all` has been run.
- `cargo clippy --all-targets` has been run.
- `cargo sqlx prepare` has been run.
> Note: If you encounter issues with `sqlx` saying 'no queries found' after running `cargo sqlx prepare`, you may need to ensure the installed version of `sqlx-cli` matches the current version of `sqlx` used [in labrinth](https://github.com/modrinth/labrinth/blob/master/Cargo.toml).
[Discord]: https://discord.modrinth.com
[GitHub]: https://github.com/modrinth
[labrinth]: https://github.com/modrinth/labrinth
[labrinth]: https://github.com/modrinth/code/tree/main/apps/labrinth
[Rust]: https://www.rust-lang.org/tools/install

View File

@@ -50,13 +50,35 @@
</div>
<div v-else-if="generatedMessage">
<div>
<ButtonStyled>
<button class="mb-2" @click="useSimpleEditor = !useSimpleEditor">
<template v-if="!useSimpleEditor">
<ToggleLeftIcon aria-hidden="true" />
Use simple mode
</template>
<template v-else>
<ToggleRightIcon aria-hidden="true" />
Use advanced mode
</template>
</button>
</ButtonStyled>
<MarkdownEditor
v-if="!useSimpleEditor"
v-model="message"
:max-height="400"
placeholder="No message generated."
:disabled="false"
:heading-buttons="false"
/>
<textarea
v-else
v-model="message"
type="text"
class="bg-bg-input h-[400px] w-full rounded-lg border border-solid border-divider px-3 py-2 font-mono text-base"
placeholder="No message generated."
autocomplete="off"
@input="persistState"
/>
</div>
</div>
<div v-else-if="isModpackPermissionsStage">
@@ -324,6 +346,8 @@ import {
CheckIcon,
KeyboardIcon,
EyeOffIcon,
ToggleLeftIcon,
ToggleRightIcon,
} from "@modrinth/assets";
import {
checklist,
@@ -368,7 +392,6 @@ import {
type Stage,
finalPermissionMessages,
} from "@modrinth/moderation";
import * as prettier from "prettier";
import ModpackPermissionsFlow from "./ModpackPermissionsFlow.vue";
import KeybindsModal from "./ChecklistKeybindsModal.vue";
import { useModerationStore } from "~/store/moderation.ts";
@@ -392,6 +415,7 @@ const isModpackPermissionsStage = computed(() => {
return currentStageObj.value.id === "modpack-permissions";
});
const useSimpleEditor = ref(false);
const message = ref("");
const generatedMessage = ref(false);
const loadingMessage = ref(false);
@@ -1118,19 +1142,7 @@ async function generateMessage() {
}
}
try {
const formattedMessage = await prettier.format(fullMessage, {
parser: "markdown",
printWidth: 80,
proseWrap: "always",
tabWidth: 2,
useTabs: false,
});
message.value = formattedMessage;
} catch (formattingError) {
console.warn("Failed to format markdown, using original:", formattingError);
message.value = fullMessage;
}
message.value = fullMessage;
generatedMessage.value = true;
} catch (error) {

View File

@@ -100,7 +100,6 @@ import {
ScaleIcon,
} from "@modrinth/assets";
import { defineMessages, useVIntl } from "@vintl/vintl";
import { useLocalStorage } from "@vueuse/core";
import ConfettiExplosion from "vue-confetti-explosion";
import Fuse from "fuse.js";
import ModerationQueueCard from "~/components/ui/moderation/ModerationQueueCard.vue";
@@ -215,7 +214,7 @@ watch(
},
);
const currentFilterType = useLocalStorage("moderation-current-filter-type", () => "All projects");
const currentFilterType = ref("All projects");
const filterTypes: readonly string[] = readonly([
"All projects",
"Modpacks",
@@ -226,7 +225,7 @@ const filterTypes: readonly string[] = readonly([
"Shaders",
]);
const currentSortType = useLocalStorage("moderation-current-sort-type", () => "Oldest");
const currentSortType = ref("Oldest");
const sortTypes: readonly string[] = readonly(["Oldest", "Newest"]);
const currentPage = ref(1);
@@ -287,8 +286,10 @@ const typeFiltered = computed(() => {
const projectType = filterMap[currentFilterType.value];
if (!projectType) return baseFiltered.value;
return baseFiltered.value.filter((queueItem) =>
queueItem.project.project_types.includes(projectType),
return baseFiltered.value.filter(
(queueItem) =>
queueItem.project.project_types.length > 0 &&
queueItem.project.project_types[0] === projectType,
);
});

View File

@@ -72,7 +72,6 @@
import { DropdownSelect, Button, Pagination } from "@modrinth/ui";
import { XIcon, SearchIcon, SortAscIcon, SortDescIcon, FilterIcon } from "@modrinth/assets";
import { defineMessages, useVIntl } from "@vintl/vintl";
import { useLocalStorage } from "@vueuse/core";
import type { Report } from "@modrinth/utils";
import Fuse from "fuse.js";
import type { ExtendedReport } from "@modrinth/moderation";
@@ -170,10 +169,10 @@ watch(
},
);
const currentFilterType = useLocalStorage("moderation-reports-filter-type", () => "All");
const currentFilterType = ref("All");
const filterTypes: readonly string[] = readonly(["All", "Unread", "Read"]);
const currentSortType = useLocalStorage("moderation-reports-sort-type", () => "Oldest");
const currentSortType = ref("Oldest");
const sortTypes: readonly string[] = readonly(["Oldest", "Newest"]);
const currentPage = ref(1);

View File

@@ -0,0 +1,129 @@
DEBUG=true
RUST_LOG=info,sqlx::query=warn
SENTRY_DSN=none
SITE_URL=http://localhost:3000
# This CDN URL matches the local storage backend set below, which uses MOCK_FILE_PATH
CDN_URL=file:///tmp/modrinth
LABRINTH_ADMIN_KEY=feedbeef
RATE_LIMIT_IGNORE_KEY=feedbeef
DATABASE_URL=postgresql://labrinth:labrinth@labrinth-postgres/labrinth
DATABASE_MIN_CONNECTIONS=0
DATABASE_MAX_CONNECTIONS=16
MEILISEARCH_ADDR=http://labrinth-meilisearch:7700
MEILISEARCH_KEY=modrinth
REDIS_URL=redis://labrinth-redis
REDIS_MAX_CONNECTIONS=10000
BIND_ADDR=0.0.0.0:8000
SELF_ADDR=http://labrinth:8000
MODERATION_SLACK_WEBHOOK=
PUBLIC_DISCORD_WEBHOOK=
CLOUDFLARE_INTEGRATION=false
STORAGE_BACKEND=local
MOCK_FILE_PATH=/tmp/modrinth
S3_PUBLIC_BUCKET_NAME=none
S3_PUBLIC_USES_PATH_STYLE_BUCKET=false
S3_PUBLIC_REGION=none
S3_PUBLIC_URL=none
S3_PUBLIC_ACCESS_TOKEN=none
S3_PUBLIC_SECRET=none
S3_PRIVATE_BUCKET_NAME=none
S3_PRIVATE_USES_PATH_STYLE_BUCKET=false
S3_PRIVATE_REGION=none
S3_PRIVATE_URL=none
S3_PRIVATE_ACCESS_TOKEN=none
S3_PRIVATE_SECRET=none
# 1 hour
LOCAL_INDEX_INTERVAL=3600
# 30 minutes
VERSION_INDEX_INTERVAL=1800
RATE_LIMIT_IGNORE_IPS='["127.0.0.1"]'
WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "github.com", "raw.githubusercontent.com"]'
ALLOWED_CALLBACK_URLS='["localhost", ".modrinth.com", "127.0.0.1"]'
GITHUB_CLIENT_ID=none
GITHUB_CLIENT_SECRET=none
GITLAB_CLIENT_ID=none
GITLAB_CLIENT_SECRET=none
DISCORD_CLIENT_ID=none
DISCORD_CLIENT_SECRET=none
MICROSOFT_CLIENT_ID=none
MICROSOFT_CLIENT_SECRET=none
GOOGLE_CLIENT_ID=none
GOOGLE_CLIENT_SECRET=none
PAYPAL_API_URL=https://api-m.sandbox.paypal.com/v1/
PAYPAL_WEBHOOK_ID=none
PAYPAL_CLIENT_ID=none
PAYPAL_CLIENT_SECRET=none
PAYPAL_NVP_USERNAME=none
PAYPAL_NVP_PASSWORD=none
PAYPAL_NVP_SIGNATURE=none
STEAM_API_KEY=none
TREMENDOUS_API_URL=https://testflight.tremendous.com/api/v2/
TREMENDOUS_API_KEY=none
TREMENDOUS_PRIVATE_KEY=none
TREMENDOUS_CAMPAIGN_ID=none
HCAPTCHA_SECRET=none
SMTP_FROM_NAME=Modrinth
SMTP_FROM_ADDRESS=no-reply@mail.modrinth.com
SMTP_USERNAME=
SMTP_PASSWORD=
SMTP_HOST=labrinth-mail
SMTP_PORT=1025
SMTP_TLS=none
SITE_VERIFY_EMAIL_PATH=auth/verify-email
SITE_RESET_PASSWORD_PATH=auth/reset-password
SITE_BILLING_PATH=none
SENDY_URL=none
SENDY_LIST_ID=none
SENDY_API_KEY=none
ANALYTICS_ALLOWED_ORIGINS='["http://127.0.0.1:3000", "http://localhost:3000", "https://modrinth.com", "https://www.modrinth.com", "*"]'
CLICKHOUSE_REPLICATED=false
CLICKHOUSE_URL=http://labrinth-clickhouse:8123
CLICKHOUSE_USER=default
CLICKHOUSE_PASSWORD=default
CLICKHOUSE_DATABASE=staging_ariadne
MAXMIND_LICENSE_KEY=none
FLAME_ANVIL_URL=none
STRIPE_API_KEY=none
STRIPE_WEBHOOK_SECRET=none
ADITUDE_API_KEY=none
PYRO_API_KEY=none
BREX_API_URL=https://platform.brexapis.com/v2/
BREX_API_KEY=none
DELPHI_URL=none
DELPHI_SLACK_WEBHOOK=none
ARCHON_URL=none

View File

@@ -87,11 +87,11 @@ HCAPTCHA_SECRET=none
SMTP_FROM_NAME=Modrinth
SMTP_FROM_ADDRESS=no-reply@mail.modrinth.com
SMTP_USERNAME=none
SMTP_PASSWORD=none
SMTP_HOST=none
SMTP_PORT=465
SMTP_TLS=tls
SMTP_USERNAME=
SMTP_PASSWORD=
SMTP_HOST=localhost
SMTP_PORT=1025
SMTP_TLS=none
SITE_VERIFY_EMAIL_PATH=auth/verify-email
SITE_RESET_PASSWORD_PATH=auth/reset-password

View File

@@ -1,6 +1,6 @@
# syntax=docker/dockerfile:1
FROM rust:1.88.0 AS build
FROM rust:1.89.0 AS build
WORKDIR /usr/src/labrinth
COPY . .

View File

@@ -14,7 +14,7 @@ fn main() {
let git_hash = String::from_utf8(output.stdout)
.expect("valid UTF-8 output from `git` invocation");
println!("cargo::rerun-if-changed=.git/HEAD");
println!("cargo::rerun-if-changed=../../.git/HEAD");
println!("cargo::rustc-env=GIT_HASH={}", git_hash.trim());
let timedate_fmt = Local::now().format("%F @ %I:%M %p");

File diff suppressed because it is too large Load Diff

View File

@@ -5,7 +5,7 @@
"lint": "cargo fmt --check && cargo clippy --all-targets",
"fix": "cargo clippy --all-targets --fix --allow-dirty && cargo fmt",
"dev": "cargo run",
"//": "Labrinth integration tests require a lot of disk space, so in the standard GitHub Actions",
"//": "labrinth integration tests require a lot of disk space, so in the standard GitHub Actions",
"//": "runners we must remove useless development tools from the base image, which frees up ~20 GiB.",
"//": "The command commented out below can be used in CI to debug what is taking up space:",
"//": "sudo du -xh --max-depth=4 / | sort -rh | curl -X POST --data-urlencode content@/dev/fd/0 https://api.mclo.gs/1/log",

View File

@@ -322,12 +322,11 @@ pub async fn is_visible_collection(
} else {
!collection_data.status.is_hidden()
}) && !collection_data.projects.is_empty();
if let Some(user) = &user_option {
if !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
}
if let Some(user) = &user_option
&& !authorized
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
{
authorized = true;
}
Ok(authorized)
}
@@ -356,10 +355,10 @@ pub async fn filter_visible_collections(
for collection in check_collections {
// Collections are simple- if we are the owner or a mod, we can see it
if let Some(user) = user_option {
if user.role.is_mod() || user.id == collection.user_id.into() {
return_collections.push(collection.into());
}
if let Some(user) = user_option
&& (user.role.is_mod() || user.id == collection.user_id.into())
{
return_collections.push(collection.into());
}
}

View File

@@ -39,7 +39,8 @@ pub fn send_email_raw(
let password = dotenvy::var("SMTP_PASSWORD")?;
let host = dotenvy::var("SMTP_HOST")?;
let port = dotenvy::var("SMTP_PORT")?.parse::<u16>().unwrap_or(465);
let creds = Credentials::new(username, password);
let creds =
(!username.is_empty()).then(|| Credentials::new(username, password));
let tls_setting = match dotenvy::var("SMTP_TLS")?.as_str() {
"none" => Tls::None,
"opportunistic_start_tls" => {
@@ -55,13 +56,12 @@ pub fn send_email_raw(
}
};
let mailer = SmtpTransport::relay(&host)?
.port(port)
.tls(tls_setting)
.credentials(creds)
.build();
let mut mailer = SmtpTransport::relay(&host)?.port(port).tls(tls_setting);
if let Some(creds) = creds {
mailer = mailer.credentials(creds);
}
mailer.send(&email)?;
mailer.build().send(&email)?;
Ok(())
}

View File

@@ -95,10 +95,10 @@ impl DBFlow {
redis: &RedisPool,
) -> Result<Option<DBFlow>, DatabaseError> {
let flow = Self::get(id, redis).await?;
if let Some(flow) = flow.as_ref() {
if predicate(flow) {
Self::remove(id, redis).await?;
}
if let Some(flow) = flow.as_ref()
&& predicate(flow)
{
Self::remove(id, redis).await?;
}
Ok(flow)
}

View File

@@ -801,24 +801,24 @@ impl VersionField {
};
if let Some(count) = countable {
if let Some(min) = loader_field.min_val {
if count < min {
return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
if let Some(min) = loader_field.min_val
&& count < min
{
return Err(format!(
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
if let Some(max) = loader_field.max_val {
if count > max {
return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
if let Some(max) = loader_field.max_val
&& count > max
{
return Err(format!(
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
v = serde_json::to_string(&value).unwrap_or_default(),
field_name = loader_field.field,
));
}
}

View File

@@ -483,20 +483,20 @@ impl DBTeamMember {
.await?;
}
if let Some(accepted) = new_accepted {
if accepted {
sqlx::query!(
"
if let Some(accepted) = new_accepted
&& accepted
{
sqlx::query!(
"
UPDATE team_members
SET accepted = TRUE
WHERE (team_id = $1 AND user_id = $2)
",
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
}
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
}
if let Some(payouts_split) = new_payouts_split {

View File

@@ -353,10 +353,10 @@ impl RedisPool {
};
for (idx, key) in fetch_ids.into_iter().enumerate() {
if let Some(locked) = results.get(idx) {
if locked.is_none() {
continue;
}
if let Some(locked) = results.get(idx)
&& locked.is_none()
{
continue;
}
if let Some((key, raw_key)) = ids.remove(&key) {

View File

@@ -71,7 +71,7 @@ pub fn app_setup(
enable_background_tasks: bool,
) -> LabrinthConfig {
info!(
"Starting Labrinth on {}",
"Starting labrinth on {}",
dotenvy::var("BIND_ADDR").unwrap()
);

View File

@@ -71,7 +71,7 @@ async fn main() -> std::io::Result<()> {
if args.run_background_task.is_none() {
info!(
"Starting Labrinth on {}",
"Starting labrinth on {}",
dotenvy::var("BIND_ADDR").unwrap()
);

View File

@@ -334,18 +334,14 @@ impl From<Version> for LegacyVersion {
// the v2 loaders are whatever the corresponding loader fields are
let mut loaders =
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
if loaders.contains(&"mrpack".to_string()) {
if let Some((_, mrpack_loaders)) = data
if loaders.contains(&"mrpack".to_string())
&& let Some((_, mrpack_loaders)) = data
.fields
.into_iter()
.find(|(key, _)| key == "mrpack_loaders")
{
if let Ok(mrpack_loaders) =
serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
}
}
&& let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders)
{
loaders = mrpack_loaders;
}
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();

View File

@@ -43,35 +43,33 @@ impl LegacyResultSearchProject {
pub fn from(result_search_project: ResultSearchProject) -> Self {
let mut categories = result_search_project.categories;
categories.extend(result_search_project.loaders.clone());
if categories.contains(&"mrpack".to_string()) {
if let Some(mrpack_loaders) = result_search_project
if categories.contains(&"mrpack".to_string())
&& let Some(mrpack_loaders) = result_search_project
.project_loader_fields
.get("mrpack_loaders")
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
categories.retain(|c| c != "mrpack");
}
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
categories.retain(|c| c != "mrpack");
}
let mut display_categories = result_search_project.display_categories;
display_categories.extend(result_search_project.loaders);
if display_categories.contains(&"mrpack".to_string()) {
if let Some(mrpack_loaders) = result_search_project
if display_categories.contains(&"mrpack".to_string())
&& let Some(mrpack_loaders) = result_search_project
.project_loader_fields
.get("mrpack_loaders")
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
display_categories.retain(|c| c != "mrpack");
}
{
categories.extend(
mrpack_loaders
.iter()
.filter_map(|c| c.as_str())
.map(String::from),
);
display_categories.retain(|c| c != "mrpack");
}
// Sort then remove duplicates

View File

@@ -166,10 +166,10 @@ impl From<ProjectQueryResult> for Project {
Ok(spdx_expr) => {
let mut vec: Vec<&str> = Vec::new();
for node in spdx_expr.iter() {
if let spdx::expression::ExprNode::Req(req) = node {
if let Some(id) = req.req.license.id() {
vec.push(id.full_name);
}
if let spdx::expression::ExprNode::Req(req) = node
&& let Some(id) = req.req.license.id()
{
vec.push(id.full_name);
}
}
// spdx crate returns AND/OR operations in postfix order

View File

@@ -51,16 +51,16 @@ impl ProjectPermissions {
return Some(ProjectPermissions::all());
}
if let Some(member) = project_team_member {
if member.accepted {
return Some(member.permissions);
}
if let Some(member) = project_team_member
&& member.accepted
{
return Some(member.permissions);
}
if let Some(member) = organization_team_member {
if member.accepted {
return Some(member.permissions);
}
if let Some(member) = organization_team_member
&& member.accepted
{
return Some(member.permissions);
}
if role.is_mod() {
@@ -107,10 +107,10 @@ impl OrganizationPermissions {
return Some(OrganizationPermissions::all());
}
if let Some(member) = team_member {
if member.accepted {
return member.organization_permissions;
}
if let Some(member) = team_member
&& member.accepted
{
return member.organization_permissions;
}
if role.is_mod() {
return Some(

View File

@@ -45,17 +45,15 @@ impl MaxMindIndexer {
if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() {
if let Ok(path) = file.header().path() {
if path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
if let Ok(path) = file.header().path()
&& path.extension().and_then(|x| x.to_str()) == Some("mmdb")
{
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
let reader =
maxminddb::Reader::from_source(buf).unwrap();
let reader = maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader));
}
return Ok(Some(reader));
}
}
}

View File

@@ -371,8 +371,8 @@ impl AutomatedModerationQueue {
for file in
files.iter().filter(|x| x.version_id == version.id.into())
{
if let Some(hash) = file.hashes.get("sha1") {
if let Some((index, (sha1, _, file_name, _))) = hashes
if let Some(hash) = file.hashes.get("sha1")
&& let Some((index, (sha1, _, file_name, _))) = hashes
.iter()
.enumerate()
.find(|(_, (value, _, _, _))| value == hash)
@@ -382,7 +382,6 @@ impl AutomatedModerationQueue {
hashes.remove(index);
}
}
}
}
@@ -420,12 +419,11 @@ impl AutomatedModerationQueue {
.await?;
for row in rows {
if let Some(sha1) = row.sha1 {
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
if let Some(sha1) = row.sha1
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) });
hashes.remove(index);
}
}
}
if hashes.is_empty() {
@@ -499,8 +497,8 @@ impl AutomatedModerationQueue {
let mut insert_ids = Vec::new();
for row in rows {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) {
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id)
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
final_hashes.insert(sha1.clone(), IdentifiedFile {
file_name: file_name.clone(),
status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified),
@@ -512,7 +510,6 @@ impl AutomatedModerationQueue {
hashes.remove(index);
flame_files.remove(curse_index);
}
}
}
if !insert_ids.is_empty() && !insert_hashes.is_empty() {
@@ -581,8 +578,8 @@ impl AutomatedModerationQueue {
for (sha1, _pack_file, file_name, _mumur2) in hashes {
let flame_file = flame_files.iter().find(|x| x.0 == sha1);
if let Some((_, flame_project_id)) = flame_file {
if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
if let Some((_, flame_project_id)) = flame_file
&& let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
missing_metadata.flame_files.insert(sha1, MissingMetadataFlame {
title: project.name.clone(),
file_name,
@@ -592,7 +589,6 @@ impl AutomatedModerationQueue {
continue;
}
}
missing_metadata.unknown_files.insert(sha1, file_name);
}

View File

@@ -257,31 +257,30 @@ impl PayoutsQueue {
)
})?;
if !status.is_success() {
if let Some(obj) = value.as_object() {
if let Some(array) = obj.get("errors") {
#[derive(Deserialize)]
struct TremendousError {
message: String,
}
let err = serde_json::from_value::<TremendousError>(
array.clone(),
)
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
if !status.is_success()
&& let Some(obj) = value.as_object()
{
if let Some(array) = obj.get("errors") {
#[derive(Deserialize)]
struct TremendousError {
message: String,
}
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
let err =
serde_json::from_value::<TremendousError>(array.clone())
.map_err(|_| {
ApiError::Payments(
"could not retrieve Tremendous error json body"
.to_string(),
)
})?;
return Err(ApiError::Payments(err.message));
}
return Err(ApiError::Payments(
"could not retrieve Tremendous error body".to_string(),
));
}
Ok(serde_json::from_value(value)?)
@@ -449,10 +448,10 @@ impl PayoutsQueue {
};
// we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly
if let PayoutInterval::Fixed { .. } = method.interval {
if !product.currency_codes.contains(&"USD".to_string()) {
continue;
}
if let PayoutInterval::Fixed { .. } = method.interval
&& !product.currency_codes.contains(&"USD".to_string())
{
continue;
}
methods.push(method);

View File

@@ -55,10 +55,10 @@ pub fn jemalloc_memory_stats(
) -> Result<(), prometheus::Error> {
let allocated_mem = IntGauge::new(
"labrinth_memory_allocated",
"Labrinth allocated memory",
"labrinth allocated memory",
)?;
let resident_mem =
IntGauge::new("labrinth_resident_memory", "Labrinth resident memory")?;
IntGauge::new("labrinth_resident_memory", "labrinth resident memory")?;
registry.register(Box::new(allocated_mem.clone()))?;
registry.register(Box::new(resident_mem.clone()))?;

View File

@@ -286,17 +286,17 @@ pub async fn refund_charge(
.upsert(&mut transaction)
.await?;
if body.0.unprovision.unwrap_or(false) {
if let Some(subscription_id) = charge.subscription_id {
let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool)
.await?;
if let Some(mut open_charge) = open_charge {
open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
if body.0.unprovision.unwrap_or(false)
&& let Some(subscription_id) = charge.subscription_id
{
let open_charge =
DBCharge::get_open_subscription(subscription_id, &**pool)
.await?;
if let Some(mut open_charge) = open_charge {
open_charge.status = ChargeStatus::Cancelled;
open_charge.due = Utc::now();
open_charge.upsert(&mut transaction).await?;
}
open_charge.upsert(&mut transaction).await?;
}
}
@@ -392,17 +392,16 @@ pub async fn edit_subscription(
}
}
if let Some(interval) = &edit_subscription.interval {
if let Price::Recurring { intervals } = &current_price.prices {
if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64;
} else {
return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!"
.to_string(),
));
}
if let Some(interval) = &edit_subscription.interval
&& let Price::Recurring { intervals } = &current_price.prices
{
if let Some(price) = intervals.get(interval) {
open_charge.subscription_interval = Some(*interval);
open_charge.amount = *price as i64;
} else {
return Err(ApiError::InvalidInput(
"Interval is not valid for this subscription!".to_string(),
));
}
}
@@ -1225,38 +1224,36 @@ pub async fn initiate_payment(
}
};
if let Price::Recurring { .. } = price_item.prices {
if product.unitary {
let user_subscriptions =
if let Price::Recurring { .. } = price_item.prices
&& product.unitary
{
let user_subscriptions =
user_subscription_item::DBUserSubscription::get_all_user(
user.id.into(),
&**pool,
)
.await?;
let user_products =
product_item::DBProductPrice::get_many(
&user_subscriptions
.iter()
.filter(|x| {
x.status
== SubscriptionStatus::Provisioned
})
.map(|x| x.price_id)
.collect::<Vec<_>>(),
&**pool,
)
.await?;
let user_products = product_item::DBProductPrice::get_many(
&user_subscriptions
.iter()
.filter(|x| {
x.status == SubscriptionStatus::Provisioned
})
.map(|x| x.price_id)
.collect::<Vec<_>>(),
&**pool,
)
.await?;
if user_products
.into_iter()
.any(|x| x.product_id == product.id)
{
return Err(ApiError::InvalidInput(
"You are already subscribed to this product!"
.to_string(),
));
}
if user_products
.into_iter()
.any(|x| x.product_id == product.id)
{
return Err(ApiError::InvalidInput(
"You are already subscribed to this product!"
.to_string(),
));
}
}
@@ -2004,38 +2001,36 @@ pub async fn stripe_webhook(
EventType::PaymentMethodAttached => {
if let EventObject::PaymentMethod(payment_method) =
event.data.object
{
if let Some(customer_id) =
&& let Some(customer_id) =
payment_method.customer.map(|x| x.id())
{
let customer = stripe::Customer::retrieve(
&stripe_client,
&customer_id,
&[],
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{
let customer = stripe::Customer::retrieve(
stripe::Customer::update(
&stripe_client,
&customer_id,
&[],
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
)
.await?;
if customer
.invoice_settings
.is_none_or(|x| x.default_payment_method.is_none())
{
stripe::Customer::update(
&stripe_client,
&customer_id,
UpdateCustomer {
invoice_settings: Some(
CustomerInvoiceSettings {
default_payment_method: Some(
payment_method.id.to_string(),
),
..Default::default()
},
),
..Default::default()
},
)
.await?;
}
}
}
}

View File

@@ -79,13 +79,12 @@ impl TempUser {
file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &RedisPool,
) -> Result<crate::database::models::DBUserId, AuthenticationError> {
if let Some(email) = &self.email {
if crate::database::models::DBUser::get_by_email(email, client)
if let Some(email) = &self.email
&& crate::database::models::DBUser::get_by_email(email, client)
.await?
.is_some()
{
return Err(AuthenticationError::DuplicateUser);
}
{
return Err(AuthenticationError::DuplicateUser);
}
let user_id =
@@ -1269,19 +1268,19 @@ pub async fn delete_auth_provider(
.update_user_id(user.id.into(), None, &mut transaction)
.await?;
if delete_provider.provider != AuthProvider::PayPal {
if let Some(email) = user.email {
send_email(
email,
"Authentication method removed",
&format!(
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
delete_provider.provider.as_str()
),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
if delete_provider.provider != AuthProvider::PayPal
&& let Some(email) = user.email
{
send_email(
email,
"Authentication method removed",
&format!(
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
delete_provider.provider.as_str()
),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
transaction.commit().await?;

View File

@@ -189,17 +189,16 @@ pub async fn get_project_meta(
.iter()
.find(|x| Some(x.1.id as i32) == row.flame_project_id)
.map(|x| x.0.clone())
&& let Some(val) = merged.flame_files.remove(&sha1)
{
if let Some(val) = merged.flame_files.remove(&sha1) {
merged.identified.insert(
sha1,
IdentifiedFile {
file_name: val.file_name.clone(),
status: ApprovalType::from_string(&row.status)
.unwrap_or(ApprovalType::Unidentified),
},
);
}
merged.identified.insert(
sha1,
IdentifiedFile {
file_name: val.file_name.clone(),
status: ApprovalType::from_string(&row.status)
.unwrap_or(ApprovalType::Unidentified),
},
);
}
}

View File

@@ -185,69 +185,69 @@ pub async fn edit_pat(
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
if let Some(pat) = pat
&& pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?;
if let Some(scopes) = &info.scopes {
if scopes.is_restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
if let Some(scopes) = &info.scopes {
if scopes.is_restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
sqlx::query!(
"
sqlx::query!(
"
UPDATE pats
SET scopes = $1
WHERE id = $2
",
scopes.bits() as i64,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(name) = &info.name {
sqlx::query!(
"
scopes.bits() as i64,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(name) = &info.name {
sqlx::query!(
"
UPDATE pats
SET name = $1
WHERE id = $2
",
name,
pat.id.0
)
.execute(&mut *transaction)
.await?;
name,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
sqlx::query!(
"
sqlx::query!(
"
UPDATE pats
SET expires = $1
WHERE id = $2
",
expires,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
expires,
pat.id.0
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().finish())
@@ -276,21 +276,21 @@ pub async fn delete_pat(
)
.await?;
if let Some(pat) = pat {
if pat.user_id == user.id.into() {
let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
if let Some(pat) = pat
&& pat.user_id == user.id.into()
{
let mut transaction = pool.begin().await?;
database::models::pat_item::DBPersonalAccessToken::remove(
pat.id,
&mut transaction,
)
.await?;
transaction.commit().await?;
database::models::pat_item::DBPersonalAccessToken::clear_cache(
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().finish())

View File

@@ -185,21 +185,21 @@ pub async fn delete(
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
if let Some(session) = session {
if session.user_id == current_user.id.into() {
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(
Some(session.id),
Some(session.session),
Some(session.user_id),
)],
&redis,
)
.await?;
}
if let Some(session) = session
&& session.user_id == current_user.id.into()
{
let mut transaction = pool.begin().await?;
DBSession::remove(session.id, &mut transaction).await?;
transaction.commit().await?;
DBSession::clear_cache(
vec![(
Some(session.id),
Some(session.session),
Some(session.user_id),
)],
&redis,
)
.await?;
}
Ok(HttpResponse::NoContent().body(""))

View File

@@ -401,14 +401,13 @@ async fn broadcast_to_known_local_friends(
friend.user_id
};
if friend.accepted {
if let Some(socket_ids) =
if friend.accepted
&& let Some(socket_ids) =
sockets.sockets_by_user_id.get(&friend_id.into())
{
for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await;
}
{
for socket_id in socket_ids.iter() {
if let Some(socket) = sockets.sockets.get(&socket_id) {
let _ = send_message(socket.value(), &message).await;
}
}
}

View File

@@ -512,6 +512,7 @@ pub async fn project_edit(
moderation_message_body: v2_new_project.moderation_message_body,
monetization_status: v2_new_project.monetization_status,
side_types_migration_review_status: None, // Not to be exposed in v2
loader_fields: HashMap::new(), // Loader fields are not a thing in v2
};
// This returns 204 or failure so we don't need to do anything with it

View File

@@ -387,17 +387,16 @@ pub async fn revenue_get(
.map(|x| (x.to_string(), HashMap::new()))
.collect::<HashMap<_, _>>();
for value in payouts_values {
if let Some(mod_id) = value.mod_id {
if let Some(amount) = value.amount_sum {
if let Some(interval_start) = value.interval_start {
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
}
}
if let Some(mod_id) = value.mod_id
&& let Some(amount) = value.amount_sum
&& let Some(interval_start) = value.interval_start
{
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
}
}
}

View File

@@ -192,10 +192,10 @@ pub async fn collection_get(
.map(|x| x.1)
.ok();
if let Some(data) = collection_data {
if is_visible_collection(&data, &user_option, false).await? {
return Ok(HttpResponse::Ok().json(Collection::from(data)));
}
if let Some(data) = collection_data
&& is_visible_collection(&data, &user_option, false).await?
{
return Ok(HttpResponse::Ok().json(Collection::from(data)));
}
Err(ApiError::NotFound)
}

View File

@@ -536,11 +536,9 @@ pub async fn create_payout(
Some(true),
)
.await
&& let Some(data) = res.items.first()
{
if let Some(data) = res.items.first() {
payout_item.platform_id =
Some(data.payout_item_id.clone());
}
payout_item.platform_id = Some(data.payout_item_id.clone());
}
}

View File

@@ -11,7 +11,7 @@ use crate::database::redis::RedisPool;
use crate::database::{self, models as db_models};
use crate::file_hosting::{FileHost, FileHostPublicity};
use crate::models;
use crate::models::ids::ProjectId;
use crate::models::ids::{ProjectId, VersionId};
use crate::models::images::ImageContext;
use crate::models::notifications::NotificationBody;
use crate::models::pats::Scopes;
@@ -182,10 +182,10 @@ pub async fn project_get(
.map(|x| x.1)
.ok();
if let Some(data) = project_data {
if is_visible_project(&data.inner, &user_option, &pool, false).await? {
return Ok(HttpResponse::Ok().json(Project::from(data)));
}
if let Some(data) = project_data
&& is_visible_project(&data.inner, &user_option, &pool, false).await?
{
return Ok(HttpResponse::Ok().json(Project::from(data)));
}
Err(ApiError::NotFound)
}
@@ -250,6 +250,8 @@ pub struct EditProject {
pub monetization_status: Option<MonetizationStatus>,
pub side_types_migration_review_status:
Option<SideTypesMigrationReviewStatus>,
#[serde(flatten)]
pub loader_fields: HashMap<String, serde_json::Value>,
}
#[allow(clippy::too_many_arguments)]
@@ -403,34 +405,36 @@ pub async fn project_edit(
.await?;
}
if status.is_searchable() && !project_item.inner.webhook_sent {
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") {
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
&redis,
webhook_url,
None,
)
.await
.ok();
if status.is_searchable()
&& !project_item.inner.webhook_sent
&& let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
{
crate::util::webhook::send_discord_webhook(
project_item.inner.id.into(),
&pool,
&redis,
webhook_url,
None,
)
.await
.ok();
sqlx::query!(
"
sqlx::query!(
"
UPDATE mods
SET webhook_sent = TRUE
WHERE id = $1
",
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
}
id as db_ids::DBProjectId,
)
.execute(&mut *transaction)
.await?;
}
if user.role.is_mod() {
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") {
crate::util::webhook::send_slack_webhook(
if user.role.is_mod()
&& let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK")
{
crate::util::webhook::send_slack_webhook(
project_item.inner.id.into(),
&pool,
&redis,
@@ -449,7 +453,6 @@ pub async fn project_edit(
)
.await
.ok();
}
}
if team_member.is_none_or(|x| !x.accepted) {
@@ -692,45 +695,45 @@ pub async fn project_edit(
.await?;
}
if let Some(links) = &new_project.link_urls {
if !links.is_empty() {
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
if let Some(links) = &new_project.link_urls
&& !links.is_empty()
{
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the links of this project!"
.to_string(),
));
}
}
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!(
"
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
// Deletes all links from hashmap- either will be deleted or be replaced
sqlx::query!(
"
DELETE FROM mods_links
WHERE joining_mod_id = $1 AND joining_platform_id IN (
SELECT id FROM link_platforms WHERE name = ANY($2)
)
",
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
.await?;
id as db_ids::DBProjectId,
&ids_to_delete
)
.execute(&mut *transaction)
.await?;
for (platform, url) in links {
if let Some(url) = url {
let platform_id =
db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
sqlx::query!(
for (platform, url) in links {
if let Some(url) = url {
let platform_id = db_models::categories::LinkPlatform::get_id(
platform,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Platform {} does not exist.",
platform.clone()
))
})?;
sqlx::query!(
"
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
VALUES ($1, $2, $3)
@@ -741,7 +744,6 @@ pub async fn project_edit(
)
.execute(&mut *transaction)
.await?;
}
}
}
}
@@ -870,6 +872,29 @@ pub async fn project_edit(
.await?;
}
if !new_project.loader_fields.is_empty() {
for version in db_models::DBVersion::get_many(
&project_item.versions,
&**pool,
&redis,
)
.await?
{
super::versions::version_edit_helper(
req.clone(),
(VersionId::from(version.inner.id),),
pool.clone(),
redis.clone(),
super::versions::EditVersion {
fields: new_project.loader_fields.clone(),
..Default::default()
},
session_queue.clone(),
)
.await?;
}
}
// check new description and body for links to associated images
// if they no longer exist in the description or body, delete them
let checkable_strings: Vec<&str> =
@@ -2430,7 +2455,7 @@ pub async fn project_get_organization(
organization,
team_members,
);
return Ok(HttpResponse::Ok().json(organization));
Ok(HttpResponse::Ok().json(organization))
} else {
Err(ApiError::NotFound)
}

View File

@@ -767,12 +767,13 @@ pub async fn edit_team_member(
));
}
if let Some(new_permissions) = edit_member.permissions {
if !permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have".to_string(),
));
}
if let Some(new_permissions) = edit_member.permissions
&& !permissions.contains(new_permissions)
{
return Err(ApiError::InvalidInput(
"The new permissions have permissions that you don't have"
.to_string(),
));
}
if edit_member.organization_permissions.is_some() {
@@ -800,13 +801,12 @@ pub async fn edit_team_member(
}
if let Some(new_permissions) = edit_member.organization_permissions
&& !organization_permissions.contains(new_permissions)
{
if !organization_permissions.contains(new_permissions) {
return Err(ApiError::InvalidInput(
return Err(ApiError::InvalidInput(
"The new organization permissions have permissions that you don't have"
.to_string(),
));
}
}
if edit_member.permissions.is_some()
@@ -822,13 +822,13 @@ pub async fn edit_team_member(
}
}
if let Some(payouts_split) = edit_member.payouts_split {
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000)
{
return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(),
));
}
if let Some(payouts_split) = edit_member.payouts_split
&& (payouts_split < Decimal::ZERO
|| payouts_split > Decimal::from(5000))
{
return Err(ApiError::InvalidInput(
"Payouts split must be between 0 and 5000!".to_string(),
));
}
DBTeamMember::edit_team_member(
@@ -883,13 +883,13 @@ pub async fn transfer_ownership(
DBTeam::get_association(id.into(), &**pool).await?;
if let Some(TeamAssociationId::Project(pid)) = team_association_id {
let result = DBProject::get_id(pid, &**pool, &redis).await?;
if let Some(project_item) = result {
if project_item.inner.organization_id.is_some() {
return Err(ApiError::InvalidInput(
if let Some(project_item) = result
&& project_item.inner.organization_id.is_some()
{
return Err(ApiError::InvalidInput(
"You cannot transfer ownership of a project team that is owend by an organization"
.to_string(),
));
}
}
}

View File

@@ -289,36 +289,33 @@ pub async fn thread_get(
.await?
.1;
if let Some(mut data) = thread_data {
if is_authorized_thread(&data, &user, &pool).await? {
let authors = &mut data.members;
if let Some(mut data) = thread_data
&& is_authorized_thread(&data, &user, &pool).await?
{
let authors = &mut data.members;
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
let users: Vec<User> = database::models::DBUser::get_many_ids(
authors, &**pool, &redis,
)
.await?
.into_iter()
.map(From::from)
.collect();
let users: Vec<User> =
database::models::DBUser::get_many_ids(authors, &**pool, &redis)
.await?
.into_iter()
.map(From::from)
.collect();
return Ok(
HttpResponse::Ok().json(Thread::from(data, users, &user))
);
}
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
}
Err(ApiError::NotFound)
}
@@ -454,33 +451,32 @@ pub async fn thread_send_message(
)
.await?;
if let Some(project) = project {
if project.inner.status != ProjectStatus::Processing
&& user.role.is_mod()
{
let members =
database::models::DBTeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
if let Some(project) = project
&& project.inner.status != ProjectStatus::Processing
&& user.role.is_mod()
{
let members =
database::models::DBTeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
}
} else if let Some(report_id) = thread.report_id {
let report = database::models::report_item::DBReport::get(

View File

@@ -522,10 +522,10 @@ async fn version_create_inner(
.fetch_optional(pool)
.await?;
if let Some(project_status) = project_status {
if project_status.status == ProjectStatus::Processing.as_str() {
moderation_queue.projects.insert(project_id.into());
}
if let Some(project_status) = project_status
&& project_status.status == ProjectStatus::Processing.as_str()
{
moderation_queue.projects.insert(project_id.into());
}
Ok(HttpResponse::Ok().json(response))
@@ -871,16 +871,16 @@ pub async fn upload_file(
ref format,
ref files,
} = validation_result
&& dependencies.is_empty()
{
if dependencies.is_empty() {
let hashes: Vec<Vec<u8>> = format
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
let hashes: Vec<Vec<u8>> = format
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
let res = sqlx::query!(
let res = sqlx::query!(
"
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
INNER JOIN files f on h.file_id = f.id
@@ -892,45 +892,44 @@ pub async fn upload_file(
.fetch_all(&mut **transaction)
.await?;
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Embedded.to_string(),
});
}
for file in &format.files {
if let Some(dep) = res.iter().find(|x| {
Some(&*x.hash)
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
dependencies.push(DependencyBuilder {
project_id: Some(models::DBProjectId(dep.project_id)),
version_id: Some(models::DBVersionId(dep.version_id)),
file_name: None,
dependency_type: DependencyType::Embedded.to_string(),
});
} else if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Embedded.to_string(),
});
}
}
for file in files {
if !file.is_empty() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(),
});
}
for file in files {
if !file.is_empty() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(file.to_string()),
dependency_type: DependencyType::Embedded.to_string(),
});
}
}
}
@@ -974,10 +973,10 @@ pub async fn upload_file(
));
}
if let ValidationResult::Warning(msg) = validation_result {
if primary {
return Err(CreateError::InvalidInput(msg.to_string()));
}
if let ValidationResult::Warning(msg) = validation_result
&& primary
{
return Err(CreateError::InvalidInput(msg.to_string()));
}
let url = format!("{cdn_url}/{file_path_encode}");

View File

@@ -148,65 +148,55 @@ pub async fn get_update_from_hash(
&redis,
)
.await?
{
if let Some(project) = database::models::DBProject::get_id(
&& let Some(project) = database::models::DBProject::get_id(
file.project_id,
&**pool,
&redis,
)
.await?
{
let mut versions = database::models::DBVersion::get_many(
&project.versions,
&**pool,
&redis,
)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values
.iter()
.any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(
&first.inner,
&user_option,
&pool,
&redis,
)
.await?
{
return Err(ApiError::NotFound);
}
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(first)));
{
let mut versions = database::models::DBVersion::get_many(
&project.versions,
&**pool,
&redis,
)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.next_back() {
if !is_visible_version(&first.inner, &user_option, &pool, &redis)
.await?
{
return Err(ApiError::NotFound);
}
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(first))
);
}
}
Err(ApiError::NotFound)
@@ -398,13 +388,12 @@ pub async fn update_files(
if let Some(version) = versions
.iter()
.find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm)
{
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
@@ -484,69 +473,59 @@ pub async fn update_individual_files(
for project in projects {
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
if let Some(hash) = file.hashes.get(&algorithm) {
if let Some(query_file) =
if let Some(hash) = file.hashes.get(&algorithm)
&& let Some(query_file) =
update_data.hashes.iter().find(|x| &x.hash == hash)
{
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
{
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
if let Some(version_types) =
&query_file.version_types
{
bool &= version_types.iter().any(|y| {
y.as_str() == x.inner.version_type
});
}
if let Some(loaders) = &query_file.loaders {
bool &= x
.loaders
.iter()
.any(|y| loaders.contains(y));
}
if let Some(loader_fields) =
&query_file.loader_fields
{
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version {
if is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(
version.clone(),
),
);
if let Some(version_types) = &query_file.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
}
if let Some(loaders) = &query_file.loaders {
bool &=
x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &query_file.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) = x
.version_fields
.iter()
.find(|y| y.field_name == *key)
{
values.iter().any(|v| {
x_vf.value.contains_json_value(v)
})
} else {
true
};
}
}
bool
})
.sorted()
.next_back();
if let Some(version) = version
&& is_visible_version(
&version.inner,
&user_option,
&pool,
&redis,
)
.await?
{
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
}

View File

@@ -106,13 +106,12 @@ pub async fn version_project_get_helper(
|| x.inner.version_number == id.1
});
if let Some(version) = version {
if is_visible_version(&version.inner, &user_option, &pool, &redis)
if let Some(version) = version
&& is_visible_version(&version.inner, &user_option, &pool, &redis)
.await?
{
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version)));
}
{
return Ok(HttpResponse::Ok()
.json(models::projects::Version::from(version)));
}
}
@@ -190,12 +189,12 @@ pub async fn version_get_helper(
.map(|x| x.1)
.ok();
if let Some(data) = version_data {
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data))
);
}
if let Some(data) = version_data
&& is_visible_version(&data.inner, &user_option, &pool, &redis).await?
{
return Ok(
HttpResponse::Ok().json(models::projects::Version::from(data))
);
}
Err(ApiError::NotFound)

View File

@@ -15,14 +15,12 @@ pub async fn get_user_status(
return Some(friend_status);
}
if let Ok(mut conn) = redis.pool.get().await {
if let Ok(mut statuses) =
if let Ok(mut conn) = redis.pool.get().await
&& let Ok(mut statuses) =
conn.sscan::<_, String>(get_field_name(user)).await
{
if let Some(status_json) = statuses.next_item().await {
return serde_json::from_str::<UserStatus>(&status_json).ok();
}
}
&& let Some(status_json) = statuses.next_item().await
{
return serde_json::from_str::<UserStatus>(&status_json).ok();
}
None

View File

@@ -138,12 +138,11 @@ fn process_image(
let (orig_width, orig_height) = img.dimensions();
let aspect_ratio = orig_width as f32 / orig_height as f32;
if let Some(target_width) = target_width {
if img.width() > target_width {
let new_height =
(target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
if let Some(target_width) = target_width
&& img.width() > target_width
{
let new_height = (target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
if let Some(min_aspect_ratio) = min_aspect_ratio {

View File

@@ -133,12 +133,11 @@ pub async fn rate_limit_middleware(
.expect("Rate limiter not configured properly")
.clone();
if let Some(key) = req.headers().get("x-ratelimit-key") {
if key.to_str().ok()
if let Some(key) = req.headers().get("x-ratelimit-key")
&& key.to_str().ok()
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
{
return Ok(next.call(req).await?.map_into_left_body());
}
{
return Ok(next.call(req).await?.map_into_left_body());
}
let conn_info = req.connection_info().clone();

View File

@@ -22,46 +22,47 @@ pub fn validation_errors_to_string(
let key_option = map.keys().next();
if let Some(field) = key_option {
if let Some(error) = map.get(field) {
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
if let Some(field) = key_option
&& let Some(error) = map.get(field)
{
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(
*errors.clone(),
Some(format!("of item {field}")),
)
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of item {field}")),
)
Some(format!("of list {field} with index {index}")),
));
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of list {field} with index {index}")),
));
}
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
write!(
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
write!(
&mut output,
"Field {field} {adder} failed validation with error: {}",
error.code
).unwrap();
} else {
write!(
&mut output,
"Field {field} failed validation with error: {}",
error.code
).unwrap();
}
} else {
write!(
&mut output,
"Field {field} failed validation with error: {}",
error.code
)
.unwrap();
}
output
}
};
}
output
}
};
}
String::new()

View File

@@ -238,17 +238,17 @@ pub async fn send_slack_webhook(
}
});
if let Some(icon_url) = metadata.project_icon_url {
if let Some(project_block) = project_block.as_object_mut() {
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
}
if let Some(icon_url) = metadata.project_icon_url
&& let Some(project_block) = project_block.as_object_mut()
{
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
}
blocks.push(project_block);

View File

@@ -292,7 +292,7 @@ pub async fn add_dummy_data(api: &ApiV3, db: TemporaryDatabase) -> DummyData {
let pool = &db.pool.clone();
pool.execute(
include_str!("../files/dummy_data.sql")
include_str!("../fixtures/dummy_data.sql")
.replace("$1", &Scopes::all().bits().to_string())
.as_str(),
)

View File

@@ -222,10 +222,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
}
// Failure test- logged in on a non-team user
@@ -246,10 +246,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
}
// Failure test- logged in with EVERY non-relevant permission
@@ -270,10 +270,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(failure_json_check) = &self.failure_json_check {
failure_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(failure_json_check) = &self.failure_json_check
{
failure_json_check(&test::read_body_json(resp).await);
}
// Patch user's permissions to success permissions
@@ -300,10 +300,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
resp.status().as_u16()
));
}
if resp.status() == StatusCode::OK {
if let Some(success_json_check) = &self.success_json_check {
success_json_check(&test::read_body_json(resp).await);
}
if resp.status() == StatusCode::OK
&& let Some(success_json_check) = &self.success_json_check
{
success_json_check(&test::read_body_json(resp).await);
}
// If the remove_user flag is set, remove the user from the project

View File

@@ -1,2 +1,2 @@
allow-dbg-in-tests = true
msrv = "1.88.0"
msrv = "1.89.0"

View File

@@ -1,6 +1,8 @@
name: labrinth
services:
postgres_db:
image: postgres:alpine
container_name: labrinth-postgres
volumes:
- db-data:/var/lib/postgresql/data
ports:
@@ -16,6 +18,7 @@ services:
retries: 3
meilisearch:
image: getmeili/meilisearch:v1.12.0
container_name: labrinth-meilisearch
restart: on-failure
ports:
- '7700:7700'
@@ -31,6 +34,7 @@ services:
retries: 3
redis:
image: redis:alpine
container_name: labrinth-redis
restart: on-failure
ports:
- '6379:6379'
@@ -43,17 +47,59 @@ services:
retries: 3
clickhouse:
image: clickhouse/clickhouse-server
container_name: labrinth-clickhouse
ports:
- '8123:8123'
environment:
CLICKHOUSE_USER: default
CLICKHOUSE_PASSWORD: default
healthcheck:
test: ['CMD', 'clickhouse-client', '--query', 'SELECT 1']
test: ['CMD-SHELL', 'clickhouse-client --query "SELECT 1"']
interval: 3s
timeout: 5s
retries: 3
mail:
image: axllent/mailpit:v1.27
container_name: labrinth-mail
ports:
- '1025:1025'
- '8025:8025'
environment:
MP_ENABLE_SPAMASSASSIN: postmark
healthcheck:
test: ['CMD', 'wget', '-q', '-O/dev/null', 'http://localhost:8025/api/v1/info']
interval: 3s
timeout: 5s
retries: 3
labrinth:
profiles:
- with-labrinth
build:
context: .
dockerfile: ./apps/labrinth/Dockerfile
container_name: labrinth
ports:
- '8000:8000'
env_file: ./apps/labrinth/.env.docker-compose
volumes:
- labrinth-cdn-data:/tmp/modrinth
depends_on:
postgres_db:
condition: service_healthy
meilisearch:
condition: service_healthy
redis:
condition: service_healthy
clickhouse:
condition: service_healthy
mail:
condition: service_healthy
develop:
watch:
- path: ./apps/labrinth
action: rebuild
volumes:
meilisearch-data:
db-data:
redis-data:
labrinth-cdn-data:

View File

@@ -50,10 +50,10 @@ pub async fn parse_command(
// We assume anything else is a filepath to an .mrpack file
let path = PathBuf::from(command_string);
let path = io::canonicalize(path)?;
if let Some(ext) = path.extension() {
if ext == "mrpack" {
return Ok(CommandPayload::RunMRPack { path });
}
if let Some(ext) = path.extension()
&& ext == "mrpack"
{
return Ok(CommandPayload::RunMRPack { path });
}
emit_warning(&format!(
"Invalid command, unrecognized filetype: {}",

View File

@@ -106,13 +106,13 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
})?;
// removes the old installation of java
if let Some(file) = archive.file_names().next() {
if let Some(dir) = file.split('/').next() {
let path = path.join(dir);
if let Some(file) = archive.file_names().next()
&& let Some(dir) = file.split('/').next()
{
let path = path.join(dir);
if path.exists() {
io::remove_dir_all(path).await?;
}
if path.exists() {
io::remove_dir_all(path).await?;
}
}

View File

@@ -72,11 +72,11 @@ pub async fn remove_user(uuid: uuid::Uuid) -> crate::Result<()> {
if let Some((uuid, user)) = users.remove(&uuid) {
Credentials::remove(uuid, &state.pool).await?;
if user.active {
if let Some((_, mut user)) = users.into_iter().next() {
user.active = true;
user.upsert(&state.pool).await?;
}
if user.active
&& let Some((_, mut user)) = users.into_iter().next()
{
user.active = true;
user.upsert(&state.pool).await?;
}
}

View File

@@ -221,14 +221,14 @@ async fn import_atlauncher_unmanaged(
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
}
if let Some(ref project_id) = description.project_id
&& let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
}
prof.icon_path = description

View File

@@ -383,18 +383,18 @@ pub async fn set_profile_information(
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data.as_ref().is_none_or(|x| x.locked)
},
})
}
if let Some(ref project_id) = description.project_id
&& let Some(ref version_id) = description.version_id
{
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data.as_ref().is_none_or(|x| x.locked)
},
})
}
prof.icon_path = description

View File

@@ -149,13 +149,12 @@ pub async fn install_zipped_mrpack_files(
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env {
if env
if let Some(env) = project.env
&& env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
}
{
return Ok(());
}
let file = fetch_mirrors(
@@ -375,12 +374,12 @@ pub async fn remove_all_related_files(
)
.await?
{
if let Some(metadata) = &project.metadata {
if to_remove.contains(&metadata.project_id) {
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
}
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
}
}
}

View File

@@ -337,28 +337,26 @@ pub async fn update_project(
)
.await?
.remove(project_path)
&& let Some(update_version) = &file.update_version_id
{
if let Some(update_version) = &file.update_version_id {
let path = Profile::add_project_version(
profile_path,
update_version,
&state.pool,
&state.fetch_semaphore,
&state.io_semaphore,
)
.await?;
let path = Profile::add_project_version(
profile_path,
update_version,
&state.pool,
&state.fetch_semaphore,
&state.io_semaphore,
)
.await?;
if path != project_path {
Profile::remove_project(profile_path, project_path).await?;
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited)
.await?;
}
return Ok(path);
if path != project_path {
Profile::remove_project(profile_path, project_path).await?;
}
if !skip_send_event.unwrap_or(false) {
emit_profile(profile_path, ProfilePayloadType::Edited).await?;
}
return Ok(path);
}
Err(crate::ErrorKind::InputError(
@@ -479,10 +477,10 @@ pub async fn export_mrpack(
let included_export_candidates = included_export_candidates
.into_iter()
.filter(|x| {
if let Some(f) = PathBuf::from(x).file_name() {
if f.to_string_lossy().starts_with(".DS_Store") {
return false;
}
if let Some(f) = PathBuf::from(x).file_name()
&& f.to_string_lossy().starts_with(".DS_Store")
{
return false;
}
true
})
@@ -765,7 +763,7 @@ pub async fn try_update_playtime(path: &str) -> crate::Result<()> {
let updated_recent_playtime = profile.recent_time_played;
let res = if updated_recent_playtime > 0 {
// Create update struct to send to Labrinth
// Create update struct to send to labrinth
let modrinth_pack_version_id =
profile.linked_data.as_ref().map(|l| l.version_id.clone());
let playtime_update_json = json!({

View File

@@ -187,6 +187,7 @@ pub enum LoadingBarType {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct LoadingPayload {
pub event: LoadingBarType,
pub loader_uuid: Uuid,
@@ -195,11 +196,7 @@ pub struct LoadingPayload {
}
#[derive(Serialize, Clone)]
pub struct OfflinePayload {
pub offline: bool,
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct WarningPayload {
pub message: String,
}
@@ -223,12 +220,14 @@ pub enum CommandPayload {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProcessPayload {
pub profile_path_id: String,
pub uuid: Uuid,
pub event: ProcessPayloadType,
pub message: String,
}
#[derive(Serialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType {
@@ -237,11 +236,13 @@ pub enum ProcessPayloadType {
}
#[derive(Serialize, Clone)]
#[cfg(feature = "tauri")]
pub struct ProfilePayload {
pub profile_path_id: String,
#[serde(flatten)]
pub event: ProfilePayloadType,
}
#[derive(Serialize, Clone)]
#[serde(tag = "event", rename_all = "snake_case")]
pub enum ProfilePayloadType {
@@ -260,6 +261,16 @@ pub enum ProfilePayloadType {
Removed,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}
#[derive(Debug, thiserror::Error)]
pub enum EventError {
#[error("Event state was not properly initialized")]
@@ -272,13 +283,3 @@ pub enum EventError {
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
#[serde(tag = "event")]
pub enum FriendPayload {
FriendRequest { from: UserId },
UserOffline { id: UserId },
StatusUpdate { user_status: UserStatus },
StatusSync,
}

View File

@@ -32,15 +32,15 @@ pub fn get_class_paths(
let mut cps = libraries
.iter()
.filter_map(|library| {
if let Some(rules) = &library.rules {
if !parse_rules(
if let Some(rules) = &library.rules
&& !parse_rules(
rules,
java_arch,
&QuickPlayType::None,
minecraft_updated,
) {
return None;
}
)
{
return None;
}
if !library.include_in_classpath {
@@ -504,10 +504,10 @@ pub async fn get_processor_main_class(
let mut line = line.map_err(IOError::from)?;
line.retain(|c| !c.is_whitespace());
if line.starts_with("Main-Class:") {
if let Some(class) = line.split(':').nth(1) {
return Ok(Some(class.to_string()));
}
if line.starts_with("Main-Class:")
&& let Some(class) = line.split(':').nth(1)
{
return Ok(Some(class.to_string()));
}
}

View File

@@ -290,12 +290,11 @@ pub async fn download_libraries(
loading_try_for_each_concurrent(
stream::iter(libraries.iter())
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
if let Some(rules) = &library.rules {
if !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
if let Some(rules) = &library.rules
&& !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
tracing::trace!("Skipped library {}", &library.name);
return Ok(());
}
}
if !library.downloadable {
tracing::trace!("Skipped non-downloadable library {}", &library.name);
@@ -311,15 +310,14 @@ pub async fn download_libraries(
return Ok(());
}
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads {
if !artifact.url.is_empty(){
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads
&& !artifact.url.is_empty(){
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
return Ok::<_, crate::Error>(());
}
}
let url = [
library

View File

@@ -344,10 +344,10 @@ pub async fn install_minecraft(
// Forge processors (90-100)
for (index, processor) in processors.iter().enumerate() {
if let Some(sides) = &processor.sides {
if !sides.contains(&String::from("client")) {
continue;
}
if let Some(sides) = &processor.sides
&& !sides.contains(&String::from("client"))
{
continue;
}
let cp = {

View File

@@ -391,10 +391,10 @@ impl DirectoryInfo {
return Err(e);
}
} else {
if let Some(disk_usage) = get_disk_usage(&move_dir)? {
if total_size > disk_usage {
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
}
if let Some(disk_usage) = get_disk_usage(&move_dir)?
&& total_size > disk_usage
{
return Err(crate::ErrorKind::DirectoryMoveError(format!("Not enough space to move directory to {}: only {} bytes available", app_dir.display(), disk_usage)).into());
}
let loader_bar_id = Arc::new(&loader_bar_id);

View File

@@ -9,7 +9,7 @@ use ariadne::networking::message::{
ClientToServerMessage, ServerToClientMessage,
};
use ariadne::users::UserStatus;
use async_tungstenite::WebSocketStream;
use async_tungstenite::WebSocketSender;
use async_tungstenite::tokio::{ConnectStream, connect_async};
use async_tungstenite::tungstenite::Message;
use async_tungstenite::tungstenite::client::IntoClientRequest;
@@ -17,7 +17,6 @@ use bytes::Bytes;
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use either::Either;
use futures::stream::SplitSink;
use futures::{SinkExt, StreamExt};
use reqwest::Method;
use reqwest::header::HeaderValue;
@@ -32,7 +31,7 @@ use tokio::sync::{Mutex, RwLock};
use uuid::Uuid;
pub(super) type WriteSocket =
Arc<RwLock<Option<SplitSink<WebSocketStream<ConnectStream>, Message>>>>;
Arc<RwLock<Option<WebSocketSender<ConnectStream>>>>;
pub(super) type TunnelSockets = Arc<DashMap<Uuid, Arc<InternalTunnelSocket>>>;
pub struct FriendsSocket {
@@ -180,27 +179,24 @@ impl FriendsSocket {
ServerToClientMessage::FriendSocketStoppedListening { .. } => {}, // TODO
ServerToClientMessage::SocketConnected { to_socket, new_socket } => {
if let Some(connected_to) = sockets.get(&to_socket) {
if let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone() {
if let Ok(new_stream) = TcpStream::connect(local_addr).await {
if let Some(connected_to) = sockets.get(&to_socket)
&& let InternalTunnelSocket::Listening(local_addr) = *connected_to.value().clone()
&& let Ok(new_stream) = TcpStream::connect(local_addr).await {
let (read, write) = new_stream.into_split();
sockets.insert(new_socket, Arc::new(InternalTunnelSocket::Connected(Mutex::new(write))));
Self::socket_read_loop(write_handle.clone(), read, new_socket);
continue;
}
}
}
let _ = Self::send_message(&write_handle, ClientToServerMessage::SocketClose { socket: new_socket }).await;
},
ServerToClientMessage::SocketClosed { socket } => {
sockets.remove_if(&socket, |_, x| matches!(*x.clone(), InternalTunnelSocket::Connected(_)));
},
ServerToClientMessage::SocketData { socket, data } => {
if let Some(mut socket) = sockets.get_mut(&socket) {
if let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
if let Some(mut socket) = sockets.get_mut(&socket)
&& let InternalTunnelSocket::Connected(ref stream) = *socket.value_mut().clone() {
let _ = stream.lock().await.write_all(&data).await;
}
}
},
}
}

View File

@@ -100,8 +100,8 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
let profile_path_str = profile_path_str.clone();
let world = world.clone();
tokio::spawn(async move {
if let Ok(state) = State::get().await {
if let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
if let Ok(state) = State::get().await
&& let Err(e) = attached_world_data::AttachedWorldData::remove_for_world(
&profile_path_str,
WorldType::Singleplayer,
&world,
@@ -109,7 +109,6 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
).await {
tracing::warn!("Failed to remove AttachedWorldData for '{world}': {e}")
}
}
});
}
Some(ProfilePayloadType::WorldUpdated { world })
@@ -150,14 +149,14 @@ pub(crate) async fn watch_profiles_init(
) {
if let Ok(profiles_dir) = std::fs::read_dir(dirs.profiles_dir()) {
for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) {
if let Some(file_name) = file_name.to_str() {
if file_name.starts_with(".DS_Store") {
continue;
};
if let Ok(file_name) = profile_dir.map(|x| x.file_name())
&& let Some(file_name) = file_name.to_str()
{
if file_name.starts_with(".DS_Store") {
continue;
};
watch_profile(file_name, watcher, dirs).await;
}
watch_profile(file_name, watcher, dirs).await;
}
}
}

View File

@@ -76,10 +76,9 @@ where
.loaded_config_dir
.clone()
.and_then(|x| x.to_str().map(|x| x.to_string()))
&& path != old_launcher_root_str
{
if path != old_launcher_root_str {
settings.custom_dir = Some(path);
}
settings.custom_dir = Some(path);
}
settings.prev_custom_dir = Some(old_launcher_root_str.clone());
@@ -137,31 +136,27 @@ where
.await?;
}
if let Some(device_token) = minecraft_auth.token {
if let Ok(private_key) =
if let Some(device_token) = minecraft_auth.token
&& let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key)
{
if let Ok(uuid) = Uuid::parse_str(&device_token.id) {
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token
.token
.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
&& let Ok(uuid) = Uuid::parse_str(&device_token.id)
{
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token.token.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
}
@@ -208,100 +203,93 @@ where
update_version,
..
} = project.metadata
{
if let Some(file) = version
&& let Some(file) = version
.files
.iter()
.find(|x| x.hashes.get("sha512") == Some(&sha512))
{
if let Some(sha1) = file.hashes.get("sha1") {
if let Ok(metadata) = full_path.metadata() {
let file_name = format!(
"{}/{}",
profile.path,
path.replace('\\', "/")
.replace(".disabled", "")
);
&& let Some(sha1) = file.hashes.get("sha1")
{
if let Ok(metadata) = full_path.metadata() {
let file_name = format!(
"{}/{}",
profile.path,
path.replace('\\', "/")
.replace(".disabled", "")
);
cached_entries.push(CacheValue::FileHash(
CachedFileHash {
path: file_name,
size: metadata.len(),
hash: sha1.clone(),
project_type: ProjectType::get_from_parent_folder(&full_path),
},
));
}
cached_entries.push(CacheValue::File(
CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
},
));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(
CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader
.as_str()
.to_string(),
],
update_version_id:
update_version.id.clone(),
},
cached_entries.push(CacheValue::FileHash(
CachedFileHash {
path: file_name,
size: metadata.len(),
hash: sha1.clone(),
project_type:
ProjectType::get_from_parent_folder(
&full_path,
),
);
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries.push(CacheValue::User(
user.clone(),
));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries.push(CacheValue::Version(
(*version).into(),
));
}
},
));
}
cached_entries.push(CacheValue::File(CachedFile {
hash: sha1.clone(),
project_id: version.project_id.clone(),
version_id: version.id.clone(),
}));
if let Some(update_version) = update_version {
let mod_loader: ModLoader =
profile.metadata.loader.into();
cached_entries.push(CacheValue::FileUpdate(
CachedFileUpdate {
hash: sha1.clone(),
game_version: profile
.metadata
.game_version
.clone(),
loaders: vec![
mod_loader.as_str().to_string(),
],
update_version_id: update_version
.id
.clone(),
},
));
cached_entries.push(CacheValue::Version(
(*update_version).into(),
));
}
let members = members
.into_iter()
.map(|x| {
let user = User {
id: x.user.id,
username: x.user.username,
avatar_url: x.user.avatar_url,
bio: x.user.bio,
created: x.user.created,
role: x.user.role,
badges: 0,
};
cached_entries
.push(CacheValue::User(user.clone()));
TeamMember {
team_id: x.team_id,
user,
is_owner: x.role == "Owner",
role: x.role,
ordering: x.ordering,
}
})
.collect::<Vec<_>>();
cached_entries.push(CacheValue::Team(members));
cached_entries
.push(CacheValue::Version((*version).into()));
}
}
@@ -333,16 +321,15 @@ where
.map(|x| x.id),
groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(|x| {
if let Some(project_id) = x.project_id {
if let Some(version_id) = x.version_id {
if let Some(locked) = x.locked {
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
}
if let Some(project_id) = x.project_id
&& let Some(version_id) = x.version_id
&& let Some(locked) = x.locked
{
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
None

View File

@@ -477,10 +477,9 @@ impl Credentials {
..
},
) = *err.raw
&& (source.is_connect() || source.is_timeout())
{
if source.is_connect() || source.is_timeout() {
return Ok(Some(creds));
}
return Ok(Some(creds));
}
Err(err)
@@ -729,36 +728,31 @@ impl DeviceTokenPair {
.fetch_optional(exec)
.await?;
if let Some(x) = res {
if let Ok(uuid) = Uuid::parse_str(&x.uuid) {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(
x.display_claims,
)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
}
if let Some(x) = res
&& let Ok(uuid) = Uuid::parse_str(&x.uuid)
&& let Ok(private_key) = SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(x.display_claims)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
Ok(None)
@@ -813,11 +807,7 @@ const MICROSOFT_CLIENT_ID: &str = "00000000402b5328";
const AUTH_REPLY_URL: &str = "https://login.live.com/oauth20_desktop.srf";
const REQUESTED_SCOPE: &str = "service::user.auth.xboxlive.com::MBI_SSL";
/* [AR] Fix
* Weird visibility issue that didn't reproduce before
* Had to make DeviceToken and RequestWithDate pub(crate) to fix compilation error
*/
pub(crate) struct RequestWithDate<T> {
pub struct RequestWithDate<T> {
pub date: DateTime<Utc>,
pub value: T,
}

View File

@@ -198,7 +198,7 @@ pub async fn finish_login_flow(
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentials> {
// The authorization code actually is the access token, since Labrinth doesn't
// The authorization code actually is the access token, since labrinth doesn't
// issue separate authorization codes. Therefore, this is equivalent to an
// implicit OAuth grant flow, and no additional exchanging or finalization is
// needed. TODO not do this for the reasons outlined at

View File

@@ -360,18 +360,17 @@ impl Process {
}
// Write the throwable if present
if !current_content.is_empty() {
if let Err(e) =
if !current_content.is_empty()
&& let Err(e) =
Process::append_to_log_file(
&log_path,
&current_content,
)
{
tracing::error!(
"Failed to write throwable to log file: {}",
e
);
}
{
tracing::error!(
"Failed to write throwable to log file: {}",
e
);
}
}
}
@@ -429,15 +428,13 @@ impl Process {
if let Some(timestamp) =
current_event.timestamp.as_deref()
{
if let Err(e) = Self::maybe_handle_server_join_logging(
&& let Err(e) = Self::maybe_handle_server_join_logging(
profile_path,
timestamp,
message
).await {
tracing::error!("Failed to handle server join logging: {e}");
}
}
}
}
_ => {}
@@ -445,35 +442,29 @@ impl Process {
}
Ok(Event::Text(mut e)) => {
if in_message || in_throwable {
if let Ok(text) = e.unescape() {
if let Ok(text) = e.xml_content() {
current_content.push_str(&text);
}
} else if !in_event
&& !e.inplace_trim_end()
&& !e.inplace_trim_start()
&& let Ok(text) = e.xml_content()
&& let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
)
{
if let Ok(text) = e.unescape() {
if let Err(e) = Process::append_to_log_file(
&log_path,
&format!("{text}\n"),
) {
tracing::error!(
"Failed to write to log file: {}",
e
);
}
}
tracing::error!(
"Failed to write to log file: {}",
e
);
}
}
Ok(Event::CData(e)) => {
if in_message || in_throwable {
if let Ok(text) = e
.escape()
.map_err(|x| x.into())
.and_then(|x| x.unescape())
{
current_content.push_str(&text);
}
if (in_message || in_throwable)
&& let Ok(text) = e.xml_content()
{
current_content.push_str(&text);
}
}
_ => (),
@@ -720,16 +711,13 @@ impl Process {
let logs_folder = state.directories.profile_logs_dir(&profile_path);
let log_path = logs_folder.join(LAUNCHER_LOG_PATH);
if log_path.exists() {
if let Err(e) = Process::append_to_log_file(
if log_path.exists()
&& let Err(e) = Process::append_to_log_file(
&log_path,
&format!("\n# Process exited with status: {mc_exit_status}\n"),
) {
tracing::warn!(
"Failed to write exit status to log file: {}",
e
);
}
)
{
tracing::warn!("Failed to write exit status to log file: {}", e);
}
let _ = state.discord_rpc.clear_to_default(true).await;

View File

@@ -595,8 +595,8 @@ impl Profile {
}
#[tracing::instrument(skip(self, semaphore, icon))]
pub async fn set_icon<'a>(
&'a mut self,
pub async fn set_icon(
&mut self,
cache_dir: &Path,
semaphore: &IoSemaphore,
icon: bytes::Bytes,
@@ -629,21 +629,20 @@ impl Profile {
{
let subdirectory =
subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() {
if let Some(file_name) = subdirectory
if subdirectory.is_file()
&& let Some(file_name) = subdirectory
.file_name()
.and_then(|x| x.to_str())
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
keys.push(format!(
"{file_size}-{}/{folder}/{file_name}",
profile.path
));
}
keys.push(format!(
"{file_size}-{}/{folder}/{file_name}",
profile.path
));
}
}
}
@@ -901,30 +900,29 @@ impl Profile {
{
let subdirectory =
subdirectory.map_err(io::IOError::from)?.path();
if subdirectory.is_file() {
if let Some(file_name) =
if subdirectory.is_file()
&& let Some(file_name) =
subdirectory.file_name().and_then(|x| x.to_str())
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
{
let file_size = subdirectory
.metadata()
.map_err(io::IOError::from)?
.len();
keys.push(InitialScanFile {
path: format!(
"{}/{folder}/{}",
self.path,
file_name.trim_end_matches(".disabled")
),
file_name: file_name.to_string(),
project_type,
size: file_size,
cache_key: format!(
"{file_size}-{}/{folder}/{file_name}",
self.path
),
});
}
keys.push(InitialScanFile {
path: format!(
"{}/{folder}/{}",
self.path,
file_name.trim_end_matches(".disabled")
),
file_name: file_name.to_string(),
project_type,
size: file_size,
cache_key: format!(
"{file_size}-{}/{folder}/{file_name}",
self.path
),
});
}
}
}

View File

@@ -254,7 +254,7 @@ where
}
#[tracing::instrument(skip(bytes, semaphore))]
pub async fn write<'a>(
pub async fn write(
path: &Path,
bytes: &[u8],
semaphore: &IoSemaphore,

View File

@@ -191,22 +191,21 @@ async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir();
if base_path.is_dir() {
if let Ok(dir) = std::fs::read_dir(base_path) {
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if base_path.is_dir()
&& let Ok(dir) = std::fs::read_dir(base_path)
{
for entry in dir.flatten() {
let file_path = entry.path().join("bin");
if let Ok(contents) =
std::fs::read_to_string(file_path.clone())
if let Ok(contents) = std::fs::read_to_string(file_path.clone())
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let entry = entry.path().join(contents);
jre_paths.insert(entry);
} else {
#[cfg(not(target_os = "macos"))]
{
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
let file_path = file_path.join(JAVA_BIN);
jre_paths.insert(file_path);
}
}
}
@@ -300,20 +299,20 @@ pub async fn check_java_at_filepath(path: &Path) -> crate::Result<JavaVersion> {
}
// Extract version info from it
if let Some(arch) = java_arch {
if let Some(version) = java_version {
if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string();
return Ok(JavaVersion {
parsed_version: version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
if let Some(arch) = java_arch
&& let Some(version) = java_version
{
if let Ok(version) = extract_java_version(version) {
let path = java.to_string_lossy().to_string();
return Ok(JavaVersion {
parsed_version: version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
return Err(JREError::InvalidJREVersion(version.to_owned()).into());
}
Err(JREError::FailedJavaCheck(java).into())

View File

@@ -33,12 +33,11 @@ pub fn is_feature_supported_in(
if part_version == part_first_release {
continue;
}
if let Ok(part_version) = part_version.parse::<u32>() {
if let Ok(part_first_release) = part_first_release.parse::<u32>() {
if part_version > part_first_release {
return true;
}
}
if let Ok(part_version) = part_version.parse::<u32>()
&& let Ok(part_first_release) = part_first_release.parse::<u32>()
&& part_version > part_first_release
{
return true;
}
}
false

View File

@@ -169,6 +169,8 @@ import _TerminalSquareIcon from './icons/terminal-square.svg?component'
import _TestIcon from './icons/test.svg?component'
import _TextQuoteIcon from './icons/text-quote.svg?component'
import _TimerIcon from './icons/timer.svg?component'
import _ToggleLeftIcon from './icons/toggle-left.svg?component'
import _ToggleRightIcon from './icons/toggle-right.svg?component'
import _TransferIcon from './icons/transfer.svg?component'
import _TrashIcon from './icons/trash.svg?component'
import _TriangleAlertIcon from './icons/triangle-alert.svg?component'
@@ -362,6 +364,8 @@ export const TerminalSquareIcon = _TerminalSquareIcon
export const TestIcon = _TestIcon
export const TextQuoteIcon = _TextQuoteIcon
export const TimerIcon = _TimerIcon
export const ToggleLeftIcon = _ToggleLeftIcon
export const ToggleRightIcon = _ToggleRightIcon
export const TransferIcon = _TransferIcon
export const TrashIcon = _TrashIcon
export const TriangleAlertIcon = _TriangleAlertIcon

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-toggle-left-icon lucide-toggle-left"><circle cx="9" cy="12" r="3"/><rect width="20" height="14" x="2" y="5" rx="7"/></svg>

After

Width:  |  Height:  |  Size: 324 B

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-toggle-right-icon lucide-toggle-right"><circle cx="15" cy="12" r="3"/><rect width="20" height="14" x="2" y="5" rx="7"/></svg>

After

Width:  |  Height:  |  Size: 327 B

13445
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +1,2 @@
[toolchain]
channel = "1.88.0"
channel = "1.89.0"