You've already forked AstralRinth
Merge commit 'df1499047ccc8f39d756d5beba60651237aca1c0' into beta
This commit is contained in:
@@ -1,41 +1,67 @@
|
||||
!macro NSIS_HOOK_POSTINSTALL
|
||||
SetShellVarContext current
|
||||
|
||||
IfFileExists "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe" file_found file_not_found
|
||||
file_found:
|
||||
Delete "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
|
||||
|
||||
Delete "$LOCALAPPDATA${PRODUCTNAME}\uninstall.exe"
|
||||
RMDir "$LOCALAPPDATA${PRODUCTNAME}"
|
||||
|
||||
!insertmacro DeleteAppUserModelId
|
||||
|
||||
; Remove start menu shortcut
|
||||
!insertmacro MUI_STARTMENU_GETFOLDER Application $AppStartMenuFolder
|
||||
!insertmacro IsShortcutTarget "$SMPROGRAMS$AppStartMenuFolder${PRODUCTNAME}.lnk" "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
|
||||
Pop $0
|
||||
${If} $0 = 1
|
||||
!insertmacro UnpinShortcut "$SMPROGRAMS$AppStartMenuFolder${PRODUCTNAME}.lnk"
|
||||
Delete "$SMPROGRAMS$AppStartMenuFolder${PRODUCTNAME}.lnk"
|
||||
RMDir "$SMPROGRAMS$AppStartMenuFolder"
|
||||
${EndIf}
|
||||
!insertmacro IsShortcutTarget "$SMPROGRAMS${PRODUCTNAME}.lnk" "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
|
||||
Pop $0
|
||||
${If} $0 = 1
|
||||
!insertmacro UnpinShortcut "$SMPROGRAMS${PRODUCTNAME}.lnk"
|
||||
Delete "$SMPROGRAMS${PRODUCTNAME}.lnk"
|
||||
${EndIf}
|
||||
|
||||
!insertmacro IsShortcutTarget "$DESKTOP${PRODUCTNAME}.lnk" "$LOCALAPPDATA${PRODUCTNAME}\theseus_gui.exe"
|
||||
Pop $0
|
||||
${If} $0 = 1
|
||||
!insertmacro UnpinShortcut "$DESKTOP${PRODUCTNAME}.lnk"
|
||||
Delete "$DESKTOP${PRODUCTNAME}.lnk"
|
||||
${EndIf}
|
||||
|
||||
DeleteRegKey HKCU "${UNINSTKEY}"
|
||||
|
||||
goto end_of_test ;<== important for not continuing on the else branch
|
||||
file_not_found:
|
||||
end_of_test:
|
||||
; https://nsis.sourceforge.io/ShellExecWait
|
||||
!macro ShellExecWait verb app param workdir show exitoutvar ;only app and show must be != "", every thing else is optional
|
||||
#define SEE_MASK_NOCLOSEPROCESS 0x40
|
||||
System::Store S
|
||||
!if "${NSIS_PTR_SIZE}" > 4
|
||||
!define /ReDef /math SYSSIZEOF_SHELLEXECUTEINFO 14 * ${NSIS_PTR_SIZE}
|
||||
!else ifndef SYSSIZEOF_SHELLEXECUTEINFO
|
||||
!define SYSSIZEOF_SHELLEXECUTEINFO 60
|
||||
!endif
|
||||
System::Call '*(&i${SYSSIZEOF_SHELLEXECUTEINFO})i.r0'
|
||||
System::Call '*$0(i ${SYSSIZEOF_SHELLEXECUTEINFO},i 0x40,p $hwndparent,t "${verb}",t $\'${app}$\',t $\'${param}$\',t "${workdir}",i ${show})p.r0'
|
||||
System::Call 'shell32::ShellExecuteEx(t)(pr0)i.r1 ?e' ; (t) to trigger A/W selection
|
||||
${If} $1 <> 0
|
||||
System::Call '*$0(is,i,p,p,p,p,p,p,p,p,p,p,p,p,p.r1)' ;stack value not really used, just a fancy pop ;)
|
||||
System::Call 'kernel32::WaitForSingleObject(pr1,i-1)'
|
||||
System::Call 'kernel32::GetExitCodeProcess(pr1,*i.s)'
|
||||
System::Call 'kernel32::CloseHandle(pr1)'
|
||||
${EndIf}
|
||||
System::Free $0
|
||||
!if "${exitoutvar}" == ""
|
||||
pop $0
|
||||
!endif
|
||||
System::Store L
|
||||
!if "${exitoutvar}" != ""
|
||||
pop ${exitoutvar}
|
||||
!endif
|
||||
!macroend
|
||||
|
||||
; --------------------------------------------------------------------------------
|
||||
|
||||
Var /GLOBAL OldInstallDir
|
||||
|
||||
!macro NSIS_HOOK_PREINSTALL
|
||||
SetShellVarContext all
|
||||
${If} ${FileExists} "$SMPROGRAMS\${PRODUCTNAME}.lnk"
|
||||
UserInfo::GetAccountType
|
||||
Pop $0
|
||||
${If} $0 != "Admin"
|
||||
MessageBox MB_ICONINFORMATION|MB_OK "An old installation of the Modrinth App was detected that requires administrator permission to update from. You will be prompted with an admin prompt shortly."
|
||||
${EndIf}
|
||||
|
||||
ReadRegStr $4 SHCTX "${MANUPRODUCTKEY}" ""
|
||||
ReadRegStr $R1 SHCTX "${UNINSTKEY}" "UninstallString"
|
||||
|
||||
ReadRegStr $OldInstallDir SHCTX "${UNINSTKEY}" "InstallLocation"
|
||||
StrCpy $OldInstallDir $OldInstallDir "" 1
|
||||
StrCpy $OldInstallDir $OldInstallDir -1 ""
|
||||
|
||||
DetailPrint "Executing $R1"
|
||||
!insertmacro ShellExecWait "runas" '$R1' '/P _?=$4' "" ${SW_SHOW} $3
|
||||
${If} $3 <> 0
|
||||
SetErrorLevel $3
|
||||
MessageBox MB_ICONEXCLAMATION|MB_OK "Failed to uninstall old global installation"
|
||||
Abort
|
||||
${EndIf}
|
||||
${EndIf}
|
||||
SetShellVarContext current
|
||||
!macroend
|
||||
|
||||
!macro NSIS_HOOK_POSTINSTALL
|
||||
!insertmacro IsShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$OldInstallDir\${MAINBINARYNAME}.exe"
|
||||
Pop $0
|
||||
${If} $0 = 1
|
||||
!insertmacro SetShortcutTarget "$DESKTOP\${PRODUCTNAME}.lnk" "$INSTDIR\${MAINBINARYNAME}.exe"
|
||||
Return
|
||||
${EndIf}
|
||||
!macroend
|
||||
|
||||
@@ -122,16 +122,13 @@ pub async fn login<R: Runtime>(
|
||||
.url()?
|
||||
.as_str()
|
||||
.starts_with("https://login.live.com/oauth20_desktop.srf")
|
||||
{
|
||||
if let Some((_, code)) =
|
||||
&& let Some((_, code)) =
|
||||
window.url()?.query_pairs().find(|x| x.0 == "code")
|
||||
{
|
||||
window.close()?;
|
||||
let val =
|
||||
minecraft_auth::finish_login(&code.clone(), flow).await?;
|
||||
{
|
||||
window.close()?;
|
||||
let val = minecraft_auth::finish_login(&code.clone(), flow).await?;
|
||||
|
||||
return Ok(Some(val));
|
||||
}
|
||||
return Ok(Some(val));
|
||||
}
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||
|
||||
@@ -103,11 +103,11 @@ pub async fn should_disable_mouseover() -> bool {
|
||||
// We try to match version to 12.2 or higher. If unrecognizable to pattern or lower, we default to the css with disabled mouseover for safety
|
||||
if let tauri_plugin_os::Version::Semantic(major, minor, _) =
|
||||
tauri_plugin_os::version()
|
||||
&& major >= 12
|
||||
&& minor >= 3
|
||||
{
|
||||
if major >= 12 && minor >= 3 {
|
||||
// Mac os version is 12.3 or higher, we allow mouseover
|
||||
return false;
|
||||
}
|
||||
// Mac os version is 12.3 or higher, we allow mouseover
|
||||
return false;
|
||||
}
|
||||
true
|
||||
} else {
|
||||
|
||||
@@ -243,10 +243,10 @@ fn main() {
|
||||
});
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
if let Some(window) = app.get_window("main") {
|
||||
if let Err(e) = window.set_shadow(true) {
|
||||
tracing::warn!("Failed to set window shadow: {e}");
|
||||
}
|
||||
if let Some(window) = app.get_window("main")
|
||||
&& let Err(e) = window.set_shadow(true)
|
||||
{
|
||||
tracing::warn!("Failed to set window shadow: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
"icon": ["icons/128x128.png", "icons/128x128@2x.png", "icons/icon.icns", "icons/icon.ico"],
|
||||
"windows": {
|
||||
"nsis": {
|
||||
"installMode": "perMachine",
|
||||
"installMode": "currentUser",
|
||||
"installerHooks": "./nsis/hooks.nsi"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM rust:1.88.0 AS build
|
||||
FROM rust:1.89.0 AS build
|
||||
|
||||
WORKDIR /usr/src/daedalus
|
||||
COPY . .
|
||||
|
||||
@@ -506,27 +506,25 @@ async fn fetch(
|
||||
|
||||
return Ok(lib);
|
||||
}
|
||||
} else if let Some(url) = &lib.url {
|
||||
if !url.is_empty() {
|
||||
insert_mirrored_artifact(
|
||||
&lib.name,
|
||||
None,
|
||||
vec![
|
||||
url.clone(),
|
||||
"https://libraries.minecraft.net/"
|
||||
.to_string(),
|
||||
"https://maven.creeperhost.net/"
|
||||
.to_string(),
|
||||
maven_url.to_string(),
|
||||
],
|
||||
false,
|
||||
mirror_artifacts,
|
||||
)?;
|
||||
} else if let Some(url) = &lib.url
|
||||
&& !url.is_empty()
|
||||
{
|
||||
insert_mirrored_artifact(
|
||||
&lib.name,
|
||||
None,
|
||||
vec![
|
||||
url.clone(),
|
||||
"https://libraries.minecraft.net/".to_string(),
|
||||
"https://maven.creeperhost.net/".to_string(),
|
||||
maven_url.to_string(),
|
||||
],
|
||||
false,
|
||||
mirror_artifacts,
|
||||
)?;
|
||||
|
||||
lib.url = Some(format_url("maven/"));
|
||||
lib.url = Some(format_url("maven/"));
|
||||
|
||||
return Ok(lib);
|
||||
}
|
||||
return Ok(lib);
|
||||
}
|
||||
|
||||
// Other libraries are generally available in the "maven" directory of the installer. If they are
|
||||
|
||||
@@ -93,22 +93,22 @@ async fn main() -> Result<()> {
|
||||
.ok()
|
||||
.and_then(|x| x.parse::<bool>().ok())
|
||||
.unwrap_or(false)
|
||||
&& let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN")
|
||||
&& let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID")
|
||||
{
|
||||
if let Ok(token) = dotenvy::var("CLOUDFLARE_TOKEN") {
|
||||
if let Ok(zone_id) = dotenvy::var("CLOUDFLARE_ZONE_ID") {
|
||||
let cache_clears = upload_files
|
||||
let cache_clears = upload_files
|
||||
.into_iter()
|
||||
.map(|x| format_url(&x.0))
|
||||
.chain(
|
||||
mirror_artifacts
|
||||
.into_iter()
|
||||
.map(|x| format_url(&x.0))
|
||||
.chain(
|
||||
mirror_artifacts
|
||||
.into_iter()
|
||||
.map(|x| format_url(&format!("maven/{}", x.0))),
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
.map(|x| format_url(&format!("maven/{}", x.0))),
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Cloudflare ratelimits cache clears to 500 files per request
|
||||
for chunk in cache_clears.chunks(500) {
|
||||
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
|
||||
// Cloudflare ratelimits cache clears to 500 files per request
|
||||
for chunk in cache_clears.chunks(500) {
|
||||
REQWEST_CLIENT.post(format!("https://api.cloudflare.com/client/v4/zones/{zone_id}/purge_cache"))
|
||||
.bearer_auth(&token)
|
||||
.json(&serde_json::json!({
|
||||
"files": chunk
|
||||
@@ -128,8 +128,6 @@ async fn main() -> Result<()> {
|
||||
item: "cloudflare clear cache".to_string(),
|
||||
}
|
||||
})?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -167,20 +167,18 @@ pub async fn download_file(
|
||||
let bytes = x.bytes().await;
|
||||
|
||||
if let Ok(bytes) = bytes {
|
||||
if let Some(sha1) = sha1 {
|
||||
if &*sha1_async(bytes.clone()).await? != sha1 {
|
||||
if attempt <= 3 {
|
||||
continue;
|
||||
} else {
|
||||
return Err(
|
||||
crate::ErrorKind::ChecksumFailure {
|
||||
hash: sha1.to_string(),
|
||||
url: url.to_string(),
|
||||
tries: attempt,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
if let Some(sha1) = sha1
|
||||
&& &*sha1_async(bytes.clone()).await? != sha1
|
||||
{
|
||||
if attempt <= 3 {
|
||||
continue;
|
||||
} else {
|
||||
return Err(crate::ErrorKind::ChecksumFailure {
|
||||
hash: sha1.to_string(),
|
||||
url: url.to_string(),
|
||||
tries: attempt,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,77 +3,61 @@ title: Labrinth (API)
|
||||
description: Guide for contributing to Modrinth's backend
|
||||
---
|
||||
|
||||
This project is part of our [monorepo](https://github.com/modrinth/code). You can find it in the `apps/labrinth` directory.
|
||||
This project is part of our [monorepo](https://github.com/modrinth/code). You can find it in the `apps/labrinth` directory. The instructions below assume that you have switched your working directory to the `apps/labrinth` subdirectory.
|
||||
|
||||
[labrinth] is the Rust-based backend serving Modrinth's API with the help of the [Actix](https://actix.rs) framework. To get started with a labrinth instance, install docker, docker-compose (which comes with Docker), and [Rust]. The initial startup can be done simply with the command `docker-compose up`, or with `docker compose up` (Compose V2 and later). That will deploy a PostgreSQL database on port 5432 and a MeiliSearch instance on port 7700. To run the API itself, you'll need to use the `cargo run` command, this will deploy the API on port 8000.
|
||||
[labrinth] is the Rust-based backend serving Modrinth's API with the help of the [Actix](https://actix.rs) framework. To get started with a labrinth instance, install docker, docker-compose (which comes with Docker), and [Rust]. The initial startup can be done simply with the command `docker-compose up`, or with `docker compose up` (Compose V2 and later). That will deploy a PostgreSQL database on port 5432, a MeiliSearch instance on port 7700, and a [Mailpit](https://mailpit.axllent.org/) SMTP server on port 1025, with a web UI to inspect sent emails on port 8025. To run the API itself, you'll need to use the `cargo run` command, this will deploy the API on port 8000.
|
||||
|
||||
To get a basic configuration, copy the `.env.local` file to `.env`. Now, you'll have to install the sqlx CLI, which can be done with cargo:
|
||||
|
||||
```bash
|
||||
cargo install --git https://github.com/launchbadge/sqlx sqlx-cli --no-default-features --features postgres,rustls
|
||||
```sh
|
||||
cargo install sqlx-cli --no-default-features --features mysql,sqlite,postgres,rustls,completions
|
||||
```
|
||||
|
||||
From there, you can create the database and perform all database migrations with one simple command:
|
||||
From there, you can create the database and set up its schema with one simple command:
|
||||
|
||||
```bash
|
||||
sqlx database setup
|
||||
```sh
|
||||
cargo sqlx database setup
|
||||
```
|
||||
|
||||
To enable labrinth to create a project, you need to add two things.
|
||||
To enable labrinth to create projects and serve useful metadata to the frontend build scripts, you'll need to seed the database with several key entities:
|
||||
|
||||
1. An entry in the `loaders` table.
|
||||
2. An entry in the `loaders_project_types` table.
|
||||
1. Categories, in the `categories` table.
|
||||
2. Loaders and their fields, in the `loaders`, `loader_fields`, `loader_field_enums`, `loader_field_enum_values`, and `loader_fields_loaders` tables.
|
||||
3. Project types and their allowed loaders and games, in the `project_types`, `loaders_project_types`, and `loaders_project_types_games` tables.
|
||||
4. Optionally, to moderate projects from the frontend, an admin user, in the `users` table.
|
||||
|
||||
A minimal setup can be done from the command line with [psql](https://www.postgresql.org/docs/current/app-psql.html):
|
||||
The most convenient way to do this seeding is with the [psql](https://www.postgresql.org/docs/current/app-psql.html) command line tool and the pre-existing seed data fixture. This fixture was generated by dumping the official staging environment database at a specific point in time, and defines an admin user with email `admin@modrinth.invalid` and password `admin`:
|
||||
|
||||
```bash
|
||||
psql --host=localhost --port=5432 -U <username, default is labrinth> -W
|
||||
```sh
|
||||
source .env
|
||||
psql "$DATABASE_URL" < fixtures/labrinth-seed-data-202508052143.sql
|
||||
```
|
||||
|
||||
The default password for the database is `labrinth`. Once you've connected, run
|
||||
|
||||
```sql
|
||||
INSERT INTO loaders VALUES (0, 'placeholder_loader');
|
||||
INSERT INTO loaders_project_types VALUES (0, 1); -- modloader id, supported type id
|
||||
INSERT INTO categories VALUES (0, 'placeholder_category', 1); -- category id, category, project type id
|
||||
```
|
||||
|
||||
This will initialize your database with a modloader called 'placeholder_loader', with id 0, and marked as supporting mods only. It will also create a category called 'placeholder_category' that is marked as supporting mods only
|
||||
If you would like 'placeholder_loader' to be marked as supporting modpacks too, run
|
||||
|
||||
```sql
|
||||
INSERT INTO loaders_project_types VALUES (0, 2); -- modloader id, supported type id
|
||||
```
|
||||
|
||||
If you would like 'placeholder_category' to be marked as supporting modpacks too, run
|
||||
|
||||
```sql
|
||||
INSERT INTO categories VALUES (0, 'placeholder_category', 2); -- modloader id, supported type id
|
||||
```
|
||||
|
||||
You can find more example SQL statements for seeding the database in the `apps/labrinth/tests/files/dummy_data.sql` file.
|
||||
You can find more example SQL statements for seeding the database in the `tests/files/dummy_data.sql` file.
|
||||
|
||||
The majority of configuration is done at runtime using [dotenvy](https://crates.io/crates/dotenvy) and the `.env` file. Each of the variables and what they do can be found in the dropdown below. Additionally, there are three command line options that can be used to specify to MeiliSearch what you want to do.
|
||||
|
||||
During development, you might notice that changes made directly to entities in the PostgreSQL database do not seem to take effect. This is often because the Redis cache still holds outdated data. To ensure your updates are reflected, clear the cache by e.g. running `redis-cli FLUSHALL`, which will force Labrinth to fetch the latest data from the database the next time it is needed.
|
||||
During development, you might notice that changes made directly to entities in the PostgreSQL database do not seem to take effect. This is often because the Redis cache still holds outdated data. To ensure your updates are reflected, clear the cache by e.g. running `redis-cli FLUSHALL`, which will force labrinth to fetch the latest data from the database the next time it is needed.
|
||||
|
||||
You can also start labrinth and its backing services at once using `docker compose --profile with-labrinth up`, which will build and start labrinth through its Docker image as if it was yet another service container. To have that container be automatically rebuilt during development as changes to the source code are made, add the `--watch` flag, which enables [Compose Watch](https://docs.docker.com/compose/how-tos/file-watch/). Keep in mind, however, that Compose Watch is bound to be slower than other similar solutions that work outside of a container, particularly on Windows or macOS, where Docker runs in a virtual machine.
|
||||
|
||||
<details>
|
||||
<summary>.env variables & command line options</summary>
|
||||
|
||||
#### Basic configuration
|
||||
|
||||
`DEBUG`: Whether debugging tools should be enabled
|
||||
`RUST_LOG`: Specifies what information to log, from rust's [`env-logger`](https://github.com/env-logger-rs/env_logger); a reasonable default is `info,sqlx::query=warn`
|
||||
`SITE_URL`: The main URL to be used for CORS
|
||||
`CDN_URL`: The publicly accessible base URL for files uploaded to the CDN
|
||||
`MODERATION_DISCORD_WEBHOOK`: The URL for a Discord webhook where projects pending approval will be sent
|
||||
`CLOUDFLARE_INTEGRATION`: Whether labrinth should integrate with Cloudflare's spam protection
|
||||
`DATABASE_URL`: The URL for the PostgreSQL database
|
||||
`DATABASE_MIN_CONNECTIONS`: The minimum number of concurrent connections allowed to the database at the same time
|
||||
`DATABASE_MAX_CONNECTIONS`: The maximum number of concurrent connections allowed to the database at the same time
|
||||
`MEILISEARCH_ADDR`: The URL for the MeiliSearch instance used for search
|
||||
`MEILISEARCH_KEY`: The name that MeiliSearch is given
|
||||
`BIND_ADDR`: The bind address for the server. Supports both IPv4 and IPv6
|
||||
`DEBUG`: Whether debugging tools should be enabled
|
||||
`RUST_LOG`: Specifies what information to log, from rust's [`env-logger`](https://github.com/env-logger-rs/env_logger); a reasonable default is `info,sqlx::query=warn`
|
||||
`SITE_URL`: The main URL to be used for CORS
|
||||
`CDN_URL`: The publicly accessible base URL for files uploaded to the CDN
|
||||
`MODERATION_DISCORD_WEBHOOK`: The URL for a Discord webhook where projects pending approval will be sent
|
||||
`CLOUDFLARE_INTEGRATION`: Whether labrinth should integrate with Cloudflare's spam protection
|
||||
`DATABASE_URL`: The URL for the PostgreSQL database, including its username, password, host, port, and database name
|
||||
`DATABASE_MIN_CONNECTIONS`: The minimum number of concurrent connections allowed to the database at the same time
|
||||
`DATABASE_MAX_CONNECTIONS`: The maximum number of concurrent connections allowed to the database at the same time
|
||||
`MEILISEARCH_ADDR`: The URL for the MeiliSearch instance used for search
|
||||
`MEILISEARCH_KEY`: The name that MeiliSearch is given
|
||||
`BIND_ADDR`: The bind address for the server. Supports both IPv4 and IPv6
|
||||
`MOCK_FILE_PATH`: The path used to store uploaded files; this has no default value and will panic if unspecified
|
||||
`SMTP_USERNAME`: The username used to authenticate with the SMTP server
|
||||
`SMTP_PASSWORD`: The password associated with the `SMTP_USERNAME` for SMTP authentication
|
||||
@@ -90,7 +74,7 @@ The S3 configuration options are fairly self-explanatory in name, so here's simp
|
||||
|
||||
#### Search, OAuth, and miscellaneous options
|
||||
|
||||
`LOCAL_INDEX_INTERVAL`: The interval, in seconds, at which the local database is reindexed for searching. Defaults to `3600` seconds (1 hour).
|
||||
`LOCAL_INDEX_INTERVAL`: The interval, in seconds, at which the local database is reindexed for searching. Defaults to `3600` seconds (1 hour).
|
||||
`VERSION_INDEX_INTERVAL`: The interval, in seconds, at which versions are reindexed for searching. Defaults to `1800` seconds (30 minutes).
|
||||
|
||||
The OAuth configuration options are fairly self-explanatory. For help setting up authentication, please contact us on [Discord].
|
||||
@@ -99,8 +83,8 @@ The OAuth configuration options are fairly self-explanatory. For help setting up
|
||||
|
||||
#### Command line options
|
||||
|
||||
`--skip-first-index`: Skips indexing the local database on startup. This is useful to prevent doing unnecessary work when frequently restarting.
|
||||
`--reconfigure-indices`: Resets the MeiliSearch settings for the search indices and exits.
|
||||
`--skip-first-index`: Skips indexing the local database on startup. This is useful to prevent doing unnecessary work when frequently restarting.
|
||||
`--reconfigure-indices`: Resets the MeiliSearch settings for the search indices and exits.
|
||||
`--reset-indices`: Resets the MeiliSearch indices and exits; this clears all previously indexed mods.
|
||||
|
||||
</details>
|
||||
@@ -109,14 +93,13 @@ The OAuth configuration options are fairly self-explanatory. For help setting up
|
||||
|
||||
If you're prepared to contribute by submitting a pull request, ensure you have met the following criteria:
|
||||
|
||||
- `cargo fmt` has been run.
|
||||
- `cargo clippy` has been run.
|
||||
- `cargo check` has been run.
|
||||
- `cargo fmt --all` has been run.
|
||||
- `cargo clippy --all-targets` has been run.
|
||||
- `cargo sqlx prepare` has been run.
|
||||
|
||||
> Note: If you encounter issues with `sqlx` saying 'no queries found' after running `cargo sqlx prepare`, you may need to ensure the installed version of `sqlx-cli` matches the current version of `sqlx` used [in labrinth](https://github.com/modrinth/labrinth/blob/master/Cargo.toml).
|
||||
|
||||
[Discord]: https://discord.modrinth.com
|
||||
[GitHub]: https://github.com/modrinth
|
||||
[labrinth]: https://github.com/modrinth/labrinth
|
||||
[labrinth]: https://github.com/modrinth/code/tree/main/apps/labrinth
|
||||
[Rust]: https://www.rust-lang.org/tools/install
|
||||
|
||||
@@ -50,13 +50,35 @@
|
||||
</div>
|
||||
<div v-else-if="generatedMessage">
|
||||
<div>
|
||||
<ButtonStyled>
|
||||
<button class="mb-2" @click="useSimpleEditor = !useSimpleEditor">
|
||||
<template v-if="!useSimpleEditor">
|
||||
<ToggleLeftIcon aria-hidden="true" />
|
||||
Use simple mode
|
||||
</template>
|
||||
<template v-else>
|
||||
<ToggleRightIcon aria-hidden="true" />
|
||||
Use advanced mode
|
||||
</template>
|
||||
</button>
|
||||
</ButtonStyled>
|
||||
<MarkdownEditor
|
||||
v-if="!useSimpleEditor"
|
||||
v-model="message"
|
||||
:max-height="400"
|
||||
placeholder="No message generated."
|
||||
:disabled="false"
|
||||
:heading-buttons="false"
|
||||
/>
|
||||
<textarea
|
||||
v-else
|
||||
v-model="message"
|
||||
type="text"
|
||||
class="bg-bg-input h-[400px] w-full rounded-lg border border-solid border-divider px-3 py-2 font-mono text-base"
|
||||
placeholder="No message generated."
|
||||
autocomplete="off"
|
||||
@input="persistState"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else-if="isModpackPermissionsStage">
|
||||
@@ -324,6 +346,8 @@ import {
|
||||
CheckIcon,
|
||||
KeyboardIcon,
|
||||
EyeOffIcon,
|
||||
ToggleLeftIcon,
|
||||
ToggleRightIcon,
|
||||
} from "@modrinth/assets";
|
||||
import {
|
||||
checklist,
|
||||
@@ -368,7 +392,6 @@ import {
|
||||
type Stage,
|
||||
finalPermissionMessages,
|
||||
} from "@modrinth/moderation";
|
||||
import * as prettier from "prettier";
|
||||
import ModpackPermissionsFlow from "./ModpackPermissionsFlow.vue";
|
||||
import KeybindsModal from "./ChecklistKeybindsModal.vue";
|
||||
import { useModerationStore } from "~/store/moderation.ts";
|
||||
@@ -392,6 +415,7 @@ const isModpackPermissionsStage = computed(() => {
|
||||
return currentStageObj.value.id === "modpack-permissions";
|
||||
});
|
||||
|
||||
const useSimpleEditor = ref(false);
|
||||
const message = ref("");
|
||||
const generatedMessage = ref(false);
|
||||
const loadingMessage = ref(false);
|
||||
@@ -1118,19 +1142,7 @@ async function generateMessage() {
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const formattedMessage = await prettier.format(fullMessage, {
|
||||
parser: "markdown",
|
||||
printWidth: 80,
|
||||
proseWrap: "always",
|
||||
tabWidth: 2,
|
||||
useTabs: false,
|
||||
});
|
||||
message.value = formattedMessage;
|
||||
} catch (formattingError) {
|
||||
console.warn("Failed to format markdown, using original:", formattingError);
|
||||
message.value = fullMessage;
|
||||
}
|
||||
message.value = fullMessage;
|
||||
|
||||
generatedMessage.value = true;
|
||||
} catch (error) {
|
||||
|
||||
@@ -100,7 +100,6 @@ import {
|
||||
ScaleIcon,
|
||||
} from "@modrinth/assets";
|
||||
import { defineMessages, useVIntl } from "@vintl/vintl";
|
||||
import { useLocalStorage } from "@vueuse/core";
|
||||
import ConfettiExplosion from "vue-confetti-explosion";
|
||||
import Fuse from "fuse.js";
|
||||
import ModerationQueueCard from "~/components/ui/moderation/ModerationQueueCard.vue";
|
||||
@@ -215,7 +214,7 @@ watch(
|
||||
},
|
||||
);
|
||||
|
||||
const currentFilterType = useLocalStorage("moderation-current-filter-type", () => "All projects");
|
||||
const currentFilterType = ref("All projects");
|
||||
const filterTypes: readonly string[] = readonly([
|
||||
"All projects",
|
||||
"Modpacks",
|
||||
@@ -226,7 +225,7 @@ const filterTypes: readonly string[] = readonly([
|
||||
"Shaders",
|
||||
]);
|
||||
|
||||
const currentSortType = useLocalStorage("moderation-current-sort-type", () => "Oldest");
|
||||
const currentSortType = ref("Oldest");
|
||||
const sortTypes: readonly string[] = readonly(["Oldest", "Newest"]);
|
||||
|
||||
const currentPage = ref(1);
|
||||
@@ -287,8 +286,10 @@ const typeFiltered = computed(() => {
|
||||
const projectType = filterMap[currentFilterType.value];
|
||||
if (!projectType) return baseFiltered.value;
|
||||
|
||||
return baseFiltered.value.filter((queueItem) =>
|
||||
queueItem.project.project_types.includes(projectType),
|
||||
return baseFiltered.value.filter(
|
||||
(queueItem) =>
|
||||
queueItem.project.project_types.length > 0 &&
|
||||
queueItem.project.project_types[0] === projectType,
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -72,7 +72,6 @@
|
||||
import { DropdownSelect, Button, Pagination } from "@modrinth/ui";
|
||||
import { XIcon, SearchIcon, SortAscIcon, SortDescIcon, FilterIcon } from "@modrinth/assets";
|
||||
import { defineMessages, useVIntl } from "@vintl/vintl";
|
||||
import { useLocalStorage } from "@vueuse/core";
|
||||
import type { Report } from "@modrinth/utils";
|
||||
import Fuse from "fuse.js";
|
||||
import type { ExtendedReport } from "@modrinth/moderation";
|
||||
@@ -170,10 +169,10 @@ watch(
|
||||
},
|
||||
);
|
||||
|
||||
const currentFilterType = useLocalStorage("moderation-reports-filter-type", () => "All");
|
||||
const currentFilterType = ref("All");
|
||||
const filterTypes: readonly string[] = readonly(["All", "Unread", "Read"]);
|
||||
|
||||
const currentSortType = useLocalStorage("moderation-reports-sort-type", () => "Oldest");
|
||||
const currentSortType = ref("Oldest");
|
||||
const sortTypes: readonly string[] = readonly(["Oldest", "Newest"]);
|
||||
|
||||
const currentPage = ref(1);
|
||||
|
||||
129
apps/labrinth/.env.docker-compose
Normal file
129
apps/labrinth/.env.docker-compose
Normal file
@@ -0,0 +1,129 @@
|
||||
DEBUG=true
|
||||
RUST_LOG=info,sqlx::query=warn
|
||||
SENTRY_DSN=none
|
||||
|
||||
SITE_URL=http://localhost:3000
|
||||
# This CDN URL matches the local storage backend set below, which uses MOCK_FILE_PATH
|
||||
CDN_URL=file:///tmp/modrinth
|
||||
LABRINTH_ADMIN_KEY=feedbeef
|
||||
RATE_LIMIT_IGNORE_KEY=feedbeef
|
||||
|
||||
DATABASE_URL=postgresql://labrinth:labrinth@labrinth-postgres/labrinth
|
||||
DATABASE_MIN_CONNECTIONS=0
|
||||
DATABASE_MAX_CONNECTIONS=16
|
||||
|
||||
MEILISEARCH_ADDR=http://labrinth-meilisearch:7700
|
||||
MEILISEARCH_KEY=modrinth
|
||||
|
||||
REDIS_URL=redis://labrinth-redis
|
||||
REDIS_MAX_CONNECTIONS=10000
|
||||
|
||||
BIND_ADDR=0.0.0.0:8000
|
||||
SELF_ADDR=http://labrinth:8000
|
||||
|
||||
MODERATION_SLACK_WEBHOOK=
|
||||
PUBLIC_DISCORD_WEBHOOK=
|
||||
CLOUDFLARE_INTEGRATION=false
|
||||
|
||||
STORAGE_BACKEND=local
|
||||
MOCK_FILE_PATH=/tmp/modrinth
|
||||
|
||||
S3_PUBLIC_BUCKET_NAME=none
|
||||
S3_PUBLIC_USES_PATH_STYLE_BUCKET=false
|
||||
S3_PUBLIC_REGION=none
|
||||
S3_PUBLIC_URL=none
|
||||
S3_PUBLIC_ACCESS_TOKEN=none
|
||||
S3_PUBLIC_SECRET=none
|
||||
|
||||
S3_PRIVATE_BUCKET_NAME=none
|
||||
S3_PRIVATE_USES_PATH_STYLE_BUCKET=false
|
||||
S3_PRIVATE_REGION=none
|
||||
S3_PRIVATE_URL=none
|
||||
S3_PRIVATE_ACCESS_TOKEN=none
|
||||
S3_PRIVATE_SECRET=none
|
||||
|
||||
# 1 hour
|
||||
LOCAL_INDEX_INTERVAL=3600
|
||||
# 30 minutes
|
||||
VERSION_INDEX_INTERVAL=1800
|
||||
|
||||
RATE_LIMIT_IGNORE_IPS='["127.0.0.1"]'
|
||||
|
||||
WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "github.com", "raw.githubusercontent.com"]'
|
||||
|
||||
ALLOWED_CALLBACK_URLS='["localhost", ".modrinth.com", "127.0.0.1"]'
|
||||
|
||||
GITHUB_CLIENT_ID=none
|
||||
GITHUB_CLIENT_SECRET=none
|
||||
|
||||
GITLAB_CLIENT_ID=none
|
||||
GITLAB_CLIENT_SECRET=none
|
||||
|
||||
DISCORD_CLIENT_ID=none
|
||||
DISCORD_CLIENT_SECRET=none
|
||||
|
||||
MICROSOFT_CLIENT_ID=none
|
||||
MICROSOFT_CLIENT_SECRET=none
|
||||
|
||||
GOOGLE_CLIENT_ID=none
|
||||
GOOGLE_CLIENT_SECRET=none
|
||||
|
||||
PAYPAL_API_URL=https://api-m.sandbox.paypal.com/v1/
|
||||
PAYPAL_WEBHOOK_ID=none
|
||||
PAYPAL_CLIENT_ID=none
|
||||
PAYPAL_CLIENT_SECRET=none
|
||||
PAYPAL_NVP_USERNAME=none
|
||||
PAYPAL_NVP_PASSWORD=none
|
||||
PAYPAL_NVP_SIGNATURE=none
|
||||
|
||||
STEAM_API_KEY=none
|
||||
|
||||
TREMENDOUS_API_URL=https://testflight.tremendous.com/api/v2/
|
||||
TREMENDOUS_API_KEY=none
|
||||
TREMENDOUS_PRIVATE_KEY=none
|
||||
TREMENDOUS_CAMPAIGN_ID=none
|
||||
|
||||
HCAPTCHA_SECRET=none
|
||||
|
||||
SMTP_FROM_NAME=Modrinth
|
||||
SMTP_FROM_ADDRESS=no-reply@mail.modrinth.com
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_HOST=labrinth-mail
|
||||
SMTP_PORT=1025
|
||||
SMTP_TLS=none
|
||||
|
||||
SITE_VERIFY_EMAIL_PATH=auth/verify-email
|
||||
SITE_RESET_PASSWORD_PATH=auth/reset-password
|
||||
SITE_BILLING_PATH=none
|
||||
|
||||
SENDY_URL=none
|
||||
SENDY_LIST_ID=none
|
||||
SENDY_API_KEY=none
|
||||
|
||||
ANALYTICS_ALLOWED_ORIGINS='["http://127.0.0.1:3000", "http://localhost:3000", "https://modrinth.com", "https://www.modrinth.com", "*"]'
|
||||
|
||||
CLICKHOUSE_REPLICATED=false
|
||||
CLICKHOUSE_URL=http://labrinth-clickhouse:8123
|
||||
CLICKHOUSE_USER=default
|
||||
CLICKHOUSE_PASSWORD=default
|
||||
CLICKHOUSE_DATABASE=staging_ariadne
|
||||
|
||||
MAXMIND_LICENSE_KEY=none
|
||||
|
||||
FLAME_ANVIL_URL=none
|
||||
|
||||
STRIPE_API_KEY=none
|
||||
STRIPE_WEBHOOK_SECRET=none
|
||||
|
||||
ADITUDE_API_KEY=none
|
||||
|
||||
PYRO_API_KEY=none
|
||||
|
||||
BREX_API_URL=https://platform.brexapis.com/v2/
|
||||
BREX_API_KEY=none
|
||||
|
||||
DELPHI_URL=none
|
||||
DELPHI_SLACK_WEBHOOK=none
|
||||
|
||||
ARCHON_URL=none
|
||||
@@ -87,11 +87,11 @@ HCAPTCHA_SECRET=none
|
||||
|
||||
SMTP_FROM_NAME=Modrinth
|
||||
SMTP_FROM_ADDRESS=no-reply@mail.modrinth.com
|
||||
SMTP_USERNAME=none
|
||||
SMTP_PASSWORD=none
|
||||
SMTP_HOST=none
|
||||
SMTP_PORT=465
|
||||
SMTP_TLS=tls
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_HOST=localhost
|
||||
SMTP_PORT=1025
|
||||
SMTP_TLS=none
|
||||
|
||||
SITE_VERIFY_EMAIL_PATH=auth/verify-email
|
||||
SITE_RESET_PASSWORD_PATH=auth/reset-password
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
FROM rust:1.88.0 AS build
|
||||
FROM rust:1.89.0 AS build
|
||||
|
||||
WORKDIR /usr/src/labrinth
|
||||
COPY . .
|
||||
|
||||
@@ -14,7 +14,7 @@ fn main() {
|
||||
let git_hash = String::from_utf8(output.stdout)
|
||||
.expect("valid UTF-8 output from `git` invocation");
|
||||
|
||||
println!("cargo::rerun-if-changed=.git/HEAD");
|
||||
println!("cargo::rerun-if-changed=../../.git/HEAD");
|
||||
println!("cargo::rustc-env=GIT_HASH={}", git_hash.trim());
|
||||
|
||||
let timedate_fmt = Local::now().format("%F @ %I:%M %p");
|
||||
|
||||
1108
apps/labrinth/fixtures/labrinth-seed-data-202508052143.sql
Normal file
1108
apps/labrinth/fixtures/labrinth-seed-data-202508052143.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,7 @@
|
||||
"lint": "cargo fmt --check && cargo clippy --all-targets",
|
||||
"fix": "cargo clippy --all-targets --fix --allow-dirty && cargo fmt",
|
||||
"dev": "cargo run",
|
||||
"//": "Labrinth integration tests require a lot of disk space, so in the standard GitHub Actions",
|
||||
"//": "labrinth integration tests require a lot of disk space, so in the standard GitHub Actions",
|
||||
"//": "runners we must remove useless development tools from the base image, which frees up ~20 GiB.",
|
||||
"//": "The command commented out below can be used in CI to debug what is taking up space:",
|
||||
"//": "sudo du -xh --max-depth=4 / | sort -rh | curl -X POST --data-urlencode content@/dev/fd/0 https://api.mclo.gs/1/log",
|
||||
|
||||
@@ -322,12 +322,11 @@ pub async fn is_visible_collection(
|
||||
} else {
|
||||
!collection_data.status.is_hidden()
|
||||
}) && !collection_data.projects.is_empty();
|
||||
if let Some(user) = &user_option {
|
||||
if !authorized
|
||||
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
|
||||
{
|
||||
authorized = true;
|
||||
}
|
||||
if let Some(user) = &user_option
|
||||
&& !authorized
|
||||
&& (user.role.is_mod() || user.id == collection_data.user_id.into())
|
||||
{
|
||||
authorized = true;
|
||||
}
|
||||
Ok(authorized)
|
||||
}
|
||||
@@ -356,10 +355,10 @@ pub async fn filter_visible_collections(
|
||||
|
||||
for collection in check_collections {
|
||||
// Collections are simple- if we are the owner or a mod, we can see it
|
||||
if let Some(user) = user_option {
|
||||
if user.role.is_mod() || user.id == collection.user_id.into() {
|
||||
return_collections.push(collection.into());
|
||||
}
|
||||
if let Some(user) = user_option
|
||||
&& (user.role.is_mod() || user.id == collection.user_id.into())
|
||||
{
|
||||
return_collections.push(collection.into());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -39,7 +39,8 @@ pub fn send_email_raw(
|
||||
let password = dotenvy::var("SMTP_PASSWORD")?;
|
||||
let host = dotenvy::var("SMTP_HOST")?;
|
||||
let port = dotenvy::var("SMTP_PORT")?.parse::<u16>().unwrap_or(465);
|
||||
let creds = Credentials::new(username, password);
|
||||
let creds =
|
||||
(!username.is_empty()).then(|| Credentials::new(username, password));
|
||||
let tls_setting = match dotenvy::var("SMTP_TLS")?.as_str() {
|
||||
"none" => Tls::None,
|
||||
"opportunistic_start_tls" => {
|
||||
@@ -55,13 +56,12 @@ pub fn send_email_raw(
|
||||
}
|
||||
};
|
||||
|
||||
let mailer = SmtpTransport::relay(&host)?
|
||||
.port(port)
|
||||
.tls(tls_setting)
|
||||
.credentials(creds)
|
||||
.build();
|
||||
let mut mailer = SmtpTransport::relay(&host)?.port(port).tls(tls_setting);
|
||||
if let Some(creds) = creds {
|
||||
mailer = mailer.credentials(creds);
|
||||
}
|
||||
|
||||
mailer.send(&email)?;
|
||||
mailer.build().send(&email)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -95,10 +95,10 @@ impl DBFlow {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<DBFlow>, DatabaseError> {
|
||||
let flow = Self::get(id, redis).await?;
|
||||
if let Some(flow) = flow.as_ref() {
|
||||
if predicate(flow) {
|
||||
Self::remove(id, redis).await?;
|
||||
}
|
||||
if let Some(flow) = flow.as_ref()
|
||||
&& predicate(flow)
|
||||
{
|
||||
Self::remove(id, redis).await?;
|
||||
}
|
||||
Ok(flow)
|
||||
}
|
||||
|
||||
@@ -801,24 +801,24 @@ impl VersionField {
|
||||
};
|
||||
|
||||
if let Some(count) = countable {
|
||||
if let Some(min) = loader_field.min_val {
|
||||
if count < min {
|
||||
return Err(format!(
|
||||
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
|
||||
v = serde_json::to_string(&value).unwrap_or_default(),
|
||||
field_name = loader_field.field,
|
||||
));
|
||||
}
|
||||
if let Some(min) = loader_field.min_val
|
||||
&& count < min
|
||||
{
|
||||
return Err(format!(
|
||||
"Provided value '{v}' for {field_name} is less than the minimum of {min}",
|
||||
v = serde_json::to_string(&value).unwrap_or_default(),
|
||||
field_name = loader_field.field,
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(max) = loader_field.max_val {
|
||||
if count > max {
|
||||
return Err(format!(
|
||||
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
|
||||
v = serde_json::to_string(&value).unwrap_or_default(),
|
||||
field_name = loader_field.field,
|
||||
));
|
||||
}
|
||||
if let Some(max) = loader_field.max_val
|
||||
&& count > max
|
||||
{
|
||||
return Err(format!(
|
||||
"Provided value '{v}' for {field_name} is greater than the maximum of {max}",
|
||||
v = serde_json::to_string(&value).unwrap_or_default(),
|
||||
field_name = loader_field.field,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -483,20 +483,20 @@ impl DBTeamMember {
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(accepted) = new_accepted {
|
||||
if accepted {
|
||||
sqlx::query!(
|
||||
"
|
||||
if let Some(accepted) = new_accepted
|
||||
&& accepted
|
||||
{
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE team_members
|
||||
SET accepted = TRUE
|
||||
WHERE (team_id = $1 AND user_id = $2)
|
||||
",
|
||||
id as DBTeamId,
|
||||
user_id as DBUserId,
|
||||
)
|
||||
.execute(&mut **transaction)
|
||||
.await?;
|
||||
}
|
||||
id as DBTeamId,
|
||||
user_id as DBUserId,
|
||||
)
|
||||
.execute(&mut **transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(payouts_split) = new_payouts_split {
|
||||
|
||||
@@ -353,10 +353,10 @@ impl RedisPool {
|
||||
};
|
||||
|
||||
for (idx, key) in fetch_ids.into_iter().enumerate() {
|
||||
if let Some(locked) = results.get(idx) {
|
||||
if locked.is_none() {
|
||||
continue;
|
||||
}
|
||||
if let Some(locked) = results.get(idx)
|
||||
&& locked.is_none()
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some((key, raw_key)) = ids.remove(&key) {
|
||||
|
||||
@@ -71,7 +71,7 @@ pub fn app_setup(
|
||||
enable_background_tasks: bool,
|
||||
) -> LabrinthConfig {
|
||||
info!(
|
||||
"Starting Labrinth on {}",
|
||||
"Starting labrinth on {}",
|
||||
dotenvy::var("BIND_ADDR").unwrap()
|
||||
);
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ async fn main() -> std::io::Result<()> {
|
||||
|
||||
if args.run_background_task.is_none() {
|
||||
info!(
|
||||
"Starting Labrinth on {}",
|
||||
"Starting labrinth on {}",
|
||||
dotenvy::var("BIND_ADDR").unwrap()
|
||||
);
|
||||
|
||||
|
||||
@@ -334,18 +334,14 @@ impl From<Version> for LegacyVersion {
|
||||
// the v2 loaders are whatever the corresponding loader fields are
|
||||
let mut loaders =
|
||||
data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
|
||||
if loaders.contains(&"mrpack".to_string()) {
|
||||
if let Some((_, mrpack_loaders)) = data
|
||||
if loaders.contains(&"mrpack".to_string())
|
||||
&& let Some((_, mrpack_loaders)) = data
|
||||
.fields
|
||||
.into_iter()
|
||||
.find(|(key, _)| key == "mrpack_loaders")
|
||||
{
|
||||
if let Ok(mrpack_loaders) =
|
||||
serde_json::from_value(mrpack_loaders)
|
||||
{
|
||||
loaders = mrpack_loaders;
|
||||
}
|
||||
}
|
||||
&& let Ok(mrpack_loaders) = serde_json::from_value(mrpack_loaders)
|
||||
{
|
||||
loaders = mrpack_loaders;
|
||||
}
|
||||
let loaders = loaders.into_iter().map(Loader).collect::<Vec<_>>();
|
||||
|
||||
|
||||
@@ -43,35 +43,33 @@ impl LegacyResultSearchProject {
|
||||
pub fn from(result_search_project: ResultSearchProject) -> Self {
|
||||
let mut categories = result_search_project.categories;
|
||||
categories.extend(result_search_project.loaders.clone());
|
||||
if categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project
|
||||
if categories.contains(&"mrpack".to_string())
|
||||
&& let Some(mrpack_loaders) = result_search_project
|
||||
.project_loader_fields
|
||||
.get("mrpack_loaders")
|
||||
{
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
.iter()
|
||||
.filter_map(|c| c.as_str())
|
||||
.map(String::from),
|
||||
);
|
||||
categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
{
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
.iter()
|
||||
.filter_map(|c| c.as_str())
|
||||
.map(String::from),
|
||||
);
|
||||
categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
let mut display_categories = result_search_project.display_categories;
|
||||
display_categories.extend(result_search_project.loaders);
|
||||
if display_categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project
|
||||
if display_categories.contains(&"mrpack".to_string())
|
||||
&& let Some(mrpack_loaders) = result_search_project
|
||||
.project_loader_fields
|
||||
.get("mrpack_loaders")
|
||||
{
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
.iter()
|
||||
.filter_map(|c| c.as_str())
|
||||
.map(String::from),
|
||||
);
|
||||
display_categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
{
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
.iter()
|
||||
.filter_map(|c| c.as_str())
|
||||
.map(String::from),
|
||||
);
|
||||
display_categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
|
||||
// Sort then remove duplicates
|
||||
|
||||
@@ -166,10 +166,10 @@ impl From<ProjectQueryResult> for Project {
|
||||
Ok(spdx_expr) => {
|
||||
let mut vec: Vec<&str> = Vec::new();
|
||||
for node in spdx_expr.iter() {
|
||||
if let spdx::expression::ExprNode::Req(req) = node {
|
||||
if let Some(id) = req.req.license.id() {
|
||||
vec.push(id.full_name);
|
||||
}
|
||||
if let spdx::expression::ExprNode::Req(req) = node
|
||||
&& let Some(id) = req.req.license.id()
|
||||
{
|
||||
vec.push(id.full_name);
|
||||
}
|
||||
}
|
||||
// spdx crate returns AND/OR operations in postfix order
|
||||
|
||||
@@ -51,16 +51,16 @@ impl ProjectPermissions {
|
||||
return Some(ProjectPermissions::all());
|
||||
}
|
||||
|
||||
if let Some(member) = project_team_member {
|
||||
if member.accepted {
|
||||
return Some(member.permissions);
|
||||
}
|
||||
if let Some(member) = project_team_member
|
||||
&& member.accepted
|
||||
{
|
||||
return Some(member.permissions);
|
||||
}
|
||||
|
||||
if let Some(member) = organization_team_member {
|
||||
if member.accepted {
|
||||
return Some(member.permissions);
|
||||
}
|
||||
if let Some(member) = organization_team_member
|
||||
&& member.accepted
|
||||
{
|
||||
return Some(member.permissions);
|
||||
}
|
||||
|
||||
if role.is_mod() {
|
||||
@@ -107,10 +107,10 @@ impl OrganizationPermissions {
|
||||
return Some(OrganizationPermissions::all());
|
||||
}
|
||||
|
||||
if let Some(member) = team_member {
|
||||
if member.accepted {
|
||||
return member.organization_permissions;
|
||||
}
|
||||
if let Some(member) = team_member
|
||||
&& member.accepted
|
||||
{
|
||||
return member.organization_permissions;
|
||||
}
|
||||
if role.is_mod() {
|
||||
return Some(
|
||||
|
||||
@@ -45,17 +45,15 @@ impl MaxMindIndexer {
|
||||
|
||||
if let Ok(entries) = archive.entries() {
|
||||
for mut file in entries.flatten() {
|
||||
if let Ok(path) = file.header().path() {
|
||||
if path.extension().and_then(|x| x.to_str()) == Some("mmdb")
|
||||
{
|
||||
let mut buf = Vec::new();
|
||||
file.read_to_end(&mut buf).unwrap();
|
||||
if let Ok(path) = file.header().path()
|
||||
&& path.extension().and_then(|x| x.to_str()) == Some("mmdb")
|
||||
{
|
||||
let mut buf = Vec::new();
|
||||
file.read_to_end(&mut buf).unwrap();
|
||||
|
||||
let reader =
|
||||
maxminddb::Reader::from_source(buf).unwrap();
|
||||
let reader = maxminddb::Reader::from_source(buf).unwrap();
|
||||
|
||||
return Ok(Some(reader));
|
||||
}
|
||||
return Ok(Some(reader));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -371,8 +371,8 @@ impl AutomatedModerationQueue {
|
||||
for file in
|
||||
files.iter().filter(|x| x.version_id == version.id.into())
|
||||
{
|
||||
if let Some(hash) = file.hashes.get("sha1") {
|
||||
if let Some((index, (sha1, _, file_name, _))) = hashes
|
||||
if let Some(hash) = file.hashes.get("sha1")
|
||||
&& let Some((index, (sha1, _, file_name, _))) = hashes
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, (value, _, _, _))| value == hash)
|
||||
@@ -382,7 +382,6 @@ impl AutomatedModerationQueue {
|
||||
|
||||
hashes.remove(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -420,12 +419,11 @@ impl AutomatedModerationQueue {
|
||||
.await?;
|
||||
|
||||
for row in rows {
|
||||
if let Some(sha1) = row.sha1 {
|
||||
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
|
||||
if let Some(sha1) = row.sha1
|
||||
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == &sha1) {
|
||||
final_hashes.insert(sha1.clone(), IdentifiedFile { file_name: file_name.clone(), status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified) });
|
||||
hashes.remove(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if hashes.is_empty() {
|
||||
@@ -499,8 +497,8 @@ impl AutomatedModerationQueue {
|
||||
let mut insert_ids = Vec::new();
|
||||
|
||||
for row in rows {
|
||||
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id) {
|
||||
if let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
|
||||
if let Some((curse_index, (hash, _flame_id))) = flame_files.iter().enumerate().find(|(_, x)| Some(x.1 as i32) == row.flame_project_id)
|
||||
&& let Some((index, (sha1, _, file_name, _))) = hashes.iter().enumerate().find(|(_, (value, _, _, _))| value == hash) {
|
||||
final_hashes.insert(sha1.clone(), IdentifiedFile {
|
||||
file_name: file_name.clone(),
|
||||
status: ApprovalType::from_string(&row.status).unwrap_or(ApprovalType::Unidentified),
|
||||
@@ -512,7 +510,6 @@ impl AutomatedModerationQueue {
|
||||
hashes.remove(index);
|
||||
flame_files.remove(curse_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !insert_ids.is_empty() && !insert_hashes.is_empty() {
|
||||
@@ -581,8 +578,8 @@ impl AutomatedModerationQueue {
|
||||
for (sha1, _pack_file, file_name, _mumur2) in hashes {
|
||||
let flame_file = flame_files.iter().find(|x| x.0 == sha1);
|
||||
|
||||
if let Some((_, flame_project_id)) = flame_file {
|
||||
if let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
|
||||
if let Some((_, flame_project_id)) = flame_file
|
||||
&& let Some(project) = flame_projects.iter().find(|x| &x.id == flame_project_id) {
|
||||
missing_metadata.flame_files.insert(sha1, MissingMetadataFlame {
|
||||
title: project.name.clone(),
|
||||
file_name,
|
||||
@@ -592,7 +589,6 @@ impl AutomatedModerationQueue {
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
missing_metadata.unknown_files.insert(sha1, file_name);
|
||||
}
|
||||
|
||||
@@ -257,31 +257,30 @@ impl PayoutsQueue {
|
||||
)
|
||||
})?;
|
||||
|
||||
if !status.is_success() {
|
||||
if let Some(obj) = value.as_object() {
|
||||
if let Some(array) = obj.get("errors") {
|
||||
#[derive(Deserialize)]
|
||||
struct TremendousError {
|
||||
message: String,
|
||||
}
|
||||
|
||||
let err = serde_json::from_value::<TremendousError>(
|
||||
array.clone(),
|
||||
)
|
||||
.map_err(|_| {
|
||||
ApiError::Payments(
|
||||
"could not retrieve Tremendous error json body"
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
return Err(ApiError::Payments(err.message));
|
||||
if !status.is_success()
|
||||
&& let Some(obj) = value.as_object()
|
||||
{
|
||||
if let Some(array) = obj.get("errors") {
|
||||
#[derive(Deserialize)]
|
||||
struct TremendousError {
|
||||
message: String,
|
||||
}
|
||||
|
||||
return Err(ApiError::Payments(
|
||||
"could not retrieve Tremendous error body".to_string(),
|
||||
));
|
||||
let err =
|
||||
serde_json::from_value::<TremendousError>(array.clone())
|
||||
.map_err(|_| {
|
||||
ApiError::Payments(
|
||||
"could not retrieve Tremendous error json body"
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
return Err(ApiError::Payments(err.message));
|
||||
}
|
||||
|
||||
return Err(ApiError::Payments(
|
||||
"could not retrieve Tremendous error body".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(serde_json::from_value(value)?)
|
||||
@@ -449,10 +448,10 @@ impl PayoutsQueue {
|
||||
};
|
||||
|
||||
// we do not support interval gift cards with non US based currencies since we cannot do currency conversions properly
|
||||
if let PayoutInterval::Fixed { .. } = method.interval {
|
||||
if !product.currency_codes.contains(&"USD".to_string()) {
|
||||
continue;
|
||||
}
|
||||
if let PayoutInterval::Fixed { .. } = method.interval
|
||||
&& !product.currency_codes.contains(&"USD".to_string())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
methods.push(method);
|
||||
|
||||
@@ -55,10 +55,10 @@ pub fn jemalloc_memory_stats(
|
||||
) -> Result<(), prometheus::Error> {
|
||||
let allocated_mem = IntGauge::new(
|
||||
"labrinth_memory_allocated",
|
||||
"Labrinth allocated memory",
|
||||
"labrinth allocated memory",
|
||||
)?;
|
||||
let resident_mem =
|
||||
IntGauge::new("labrinth_resident_memory", "Labrinth resident memory")?;
|
||||
IntGauge::new("labrinth_resident_memory", "labrinth resident memory")?;
|
||||
|
||||
registry.register(Box::new(allocated_mem.clone()))?;
|
||||
registry.register(Box::new(resident_mem.clone()))?;
|
||||
|
||||
@@ -286,17 +286,17 @@ pub async fn refund_charge(
|
||||
.upsert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
if body.0.unprovision.unwrap_or(false) {
|
||||
if let Some(subscription_id) = charge.subscription_id {
|
||||
let open_charge =
|
||||
DBCharge::get_open_subscription(subscription_id, &**pool)
|
||||
.await?;
|
||||
if let Some(mut open_charge) = open_charge {
|
||||
open_charge.status = ChargeStatus::Cancelled;
|
||||
open_charge.due = Utc::now();
|
||||
if body.0.unprovision.unwrap_or(false)
|
||||
&& let Some(subscription_id) = charge.subscription_id
|
||||
{
|
||||
let open_charge =
|
||||
DBCharge::get_open_subscription(subscription_id, &**pool)
|
||||
.await?;
|
||||
if let Some(mut open_charge) = open_charge {
|
||||
open_charge.status = ChargeStatus::Cancelled;
|
||||
open_charge.due = Utc::now();
|
||||
|
||||
open_charge.upsert(&mut transaction).await?;
|
||||
}
|
||||
open_charge.upsert(&mut transaction).await?;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -392,17 +392,16 @@ pub async fn edit_subscription(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(interval) = &edit_subscription.interval {
|
||||
if let Price::Recurring { intervals } = ¤t_price.prices {
|
||||
if let Some(price) = intervals.get(interval) {
|
||||
open_charge.subscription_interval = Some(*interval);
|
||||
open_charge.amount = *price as i64;
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Interval is not valid for this subscription!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
if let Some(interval) = &edit_subscription.interval
|
||||
&& let Price::Recurring { intervals } = ¤t_price.prices
|
||||
{
|
||||
if let Some(price) = intervals.get(interval) {
|
||||
open_charge.subscription_interval = Some(*interval);
|
||||
open_charge.amount = *price as i64;
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Interval is not valid for this subscription!".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1225,38 +1224,36 @@ pub async fn initiate_payment(
|
||||
}
|
||||
};
|
||||
|
||||
if let Price::Recurring { .. } = price_item.prices {
|
||||
if product.unitary {
|
||||
let user_subscriptions =
|
||||
if let Price::Recurring { .. } = price_item.prices
|
||||
&& product.unitary
|
||||
{
|
||||
let user_subscriptions =
|
||||
user_subscription_item::DBUserSubscription::get_all_user(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let user_products =
|
||||
product_item::DBProductPrice::get_many(
|
||||
&user_subscriptions
|
||||
.iter()
|
||||
.filter(|x| {
|
||||
x.status
|
||||
== SubscriptionStatus::Provisioned
|
||||
})
|
||||
.map(|x| x.price_id)
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
let user_products = product_item::DBProductPrice::get_many(
|
||||
&user_subscriptions
|
||||
.iter()
|
||||
.filter(|x| {
|
||||
x.status == SubscriptionStatus::Provisioned
|
||||
})
|
||||
.map(|x| x.price_id)
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if user_products
|
||||
.into_iter()
|
||||
.any(|x| x.product_id == product.id)
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You are already subscribed to this product!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
if user_products
|
||||
.into_iter()
|
||||
.any(|x| x.product_id == product.id)
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You are already subscribed to this product!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2004,38 +2001,36 @@ pub async fn stripe_webhook(
|
||||
EventType::PaymentMethodAttached => {
|
||||
if let EventObject::PaymentMethod(payment_method) =
|
||||
event.data.object
|
||||
{
|
||||
if let Some(customer_id) =
|
||||
&& let Some(customer_id) =
|
||||
payment_method.customer.map(|x| x.id())
|
||||
{
|
||||
let customer = stripe::Customer::retrieve(
|
||||
&stripe_client,
|
||||
&customer_id,
|
||||
&[],
|
||||
)
|
||||
.await?;
|
||||
|
||||
if customer
|
||||
.invoice_settings
|
||||
.is_none_or(|x| x.default_payment_method.is_none())
|
||||
{
|
||||
let customer = stripe::Customer::retrieve(
|
||||
stripe::Customer::update(
|
||||
&stripe_client,
|
||||
&customer_id,
|
||||
&[],
|
||||
UpdateCustomer {
|
||||
invoice_settings: Some(
|
||||
CustomerInvoiceSettings {
|
||||
default_payment_method: Some(
|
||||
payment_method.id.to_string(),
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if customer
|
||||
.invoice_settings
|
||||
.is_none_or(|x| x.default_payment_method.is_none())
|
||||
{
|
||||
stripe::Customer::update(
|
||||
&stripe_client,
|
||||
&customer_id,
|
||||
UpdateCustomer {
|
||||
invoice_settings: Some(
|
||||
CustomerInvoiceSettings {
|
||||
default_payment_method: Some(
|
||||
payment_method.id.to_string(),
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,13 +79,12 @@ impl TempUser {
|
||||
file_host: &Arc<dyn FileHost + Send + Sync>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<crate::database::models::DBUserId, AuthenticationError> {
|
||||
if let Some(email) = &self.email {
|
||||
if crate::database::models::DBUser::get_by_email(email, client)
|
||||
if let Some(email) = &self.email
|
||||
&& crate::database::models::DBUser::get_by_email(email, client)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
return Err(AuthenticationError::DuplicateUser);
|
||||
}
|
||||
{
|
||||
return Err(AuthenticationError::DuplicateUser);
|
||||
}
|
||||
|
||||
let user_id =
|
||||
@@ -1269,19 +1268,19 @@ pub async fn delete_auth_provider(
|
||||
.update_user_id(user.id.into(), None, &mut transaction)
|
||||
.await?;
|
||||
|
||||
if delete_provider.provider != AuthProvider::PayPal {
|
||||
if let Some(email) = user.email {
|
||||
send_email(
|
||||
email,
|
||||
"Authentication method removed",
|
||||
&format!(
|
||||
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
|
||||
delete_provider.provider.as_str()
|
||||
),
|
||||
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
if delete_provider.provider != AuthProvider::PayPal
|
||||
&& let Some(email) = user.email
|
||||
{
|
||||
send_email(
|
||||
email,
|
||||
"Authentication method removed",
|
||||
&format!(
|
||||
"When logging into Modrinth, you can no longer log in using the {} authentication provider.",
|
||||
delete_provider.provider.as_str()
|
||||
),
|
||||
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
@@ -189,17 +189,16 @@ pub async fn get_project_meta(
|
||||
.iter()
|
||||
.find(|x| Some(x.1.id as i32) == row.flame_project_id)
|
||||
.map(|x| x.0.clone())
|
||||
&& let Some(val) = merged.flame_files.remove(&sha1)
|
||||
{
|
||||
if let Some(val) = merged.flame_files.remove(&sha1) {
|
||||
merged.identified.insert(
|
||||
sha1,
|
||||
IdentifiedFile {
|
||||
file_name: val.file_name.clone(),
|
||||
status: ApprovalType::from_string(&row.status)
|
||||
.unwrap_or(ApprovalType::Unidentified),
|
||||
},
|
||||
);
|
||||
}
|
||||
merged.identified.insert(
|
||||
sha1,
|
||||
IdentifiedFile {
|
||||
file_name: val.file_name.clone(),
|
||||
status: ApprovalType::from_string(&row.status)
|
||||
.unwrap_or(ApprovalType::Unidentified),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -185,69 +185,69 @@ pub async fn edit_pat(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(pat) = pat {
|
||||
if pat.user_id == user.id.into() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
if let Some(pat) = pat
|
||||
&& pat.user_id == user.id.into()
|
||||
{
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(scopes) = &info.scopes {
|
||||
if scopes.is_restricted() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Invalid scopes requested!".to_string(),
|
||||
));
|
||||
}
|
||||
if let Some(scopes) = &info.scopes {
|
||||
if scopes.is_restricted() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Invalid scopes requested!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE pats
|
||||
SET scopes = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
scopes.bits() as i64,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
if let Some(name) = &info.name {
|
||||
sqlx::query!(
|
||||
"
|
||||
scopes.bits() as i64,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
if let Some(name) = &info.name {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE pats
|
||||
SET name = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
name,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
name,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
if let Some(expires) = &info.expires {
|
||||
if expires < &Utc::now() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Expire date must be in the future!".to_string(),
|
||||
));
|
||||
}
|
||||
if let Some(expires) = &info.expires {
|
||||
if expires < &Utc::now() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Expire date must be in the future!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE pats
|
||||
SET expires = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
expires,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::DBPersonalAccessToken::clear_cache(
|
||||
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
|
||||
&redis,
|
||||
expires,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::DBPersonalAccessToken::clear_cache(
|
||||
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
@@ -276,21 +276,21 @@ pub async fn delete_pat(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(pat) = pat {
|
||||
if pat.user_id == user.id.into() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
database::models::pat_item::DBPersonalAccessToken::remove(
|
||||
pat.id,
|
||||
&mut transaction,
|
||||
)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::DBPersonalAccessToken::clear_cache(
|
||||
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
if let Some(pat) = pat
|
||||
&& pat.user_id == user.id.into()
|
||||
{
|
||||
let mut transaction = pool.begin().await?;
|
||||
database::models::pat_item::DBPersonalAccessToken::remove(
|
||||
pat.id,
|
||||
&mut transaction,
|
||||
)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::DBPersonalAccessToken::clear_cache(
|
||||
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
|
||||
@@ -185,21 +185,21 @@ pub async fn delete(
|
||||
|
||||
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(session) = session {
|
||||
if session.user_id == current_user.id.into() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
DBSession::remove(session.id, &mut transaction).await?;
|
||||
transaction.commit().await?;
|
||||
DBSession::clear_cache(
|
||||
vec![(
|
||||
Some(session.id),
|
||||
Some(session.session),
|
||||
Some(session.user_id),
|
||||
)],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
if let Some(session) = session
|
||||
&& session.user_id == current_user.id.into()
|
||||
{
|
||||
let mut transaction = pool.begin().await?;
|
||||
DBSession::remove(session.id, &mut transaction).await?;
|
||||
transaction.commit().await?;
|
||||
DBSession::clear_cache(
|
||||
vec![(
|
||||
Some(session.id),
|
||||
Some(session.session),
|
||||
Some(session.user_id),
|
||||
)],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
|
||||
@@ -401,14 +401,13 @@ async fn broadcast_to_known_local_friends(
|
||||
friend.user_id
|
||||
};
|
||||
|
||||
if friend.accepted {
|
||||
if let Some(socket_ids) =
|
||||
if friend.accepted
|
||||
&& let Some(socket_ids) =
|
||||
sockets.sockets_by_user_id.get(&friend_id.into())
|
||||
{
|
||||
for socket_id in socket_ids.iter() {
|
||||
if let Some(socket) = sockets.sockets.get(&socket_id) {
|
||||
let _ = send_message(socket.value(), &message).await;
|
||||
}
|
||||
{
|
||||
for socket_id in socket_ids.iter() {
|
||||
if let Some(socket) = sockets.sockets.get(&socket_id) {
|
||||
let _ = send_message(socket.value(), &message).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -512,6 +512,7 @@ pub async fn project_edit(
|
||||
moderation_message_body: v2_new_project.moderation_message_body,
|
||||
monetization_status: v2_new_project.monetization_status,
|
||||
side_types_migration_review_status: None, // Not to be exposed in v2
|
||||
loader_fields: HashMap::new(), // Loader fields are not a thing in v2
|
||||
};
|
||||
|
||||
// This returns 204 or failure so we don't need to do anything with it
|
||||
|
||||
@@ -387,17 +387,16 @@ pub async fn revenue_get(
|
||||
.map(|x| (x.to_string(), HashMap::new()))
|
||||
.collect::<HashMap<_, _>>();
|
||||
for value in payouts_values {
|
||||
if let Some(mod_id) = value.mod_id {
|
||||
if let Some(amount) = value.amount_sum {
|
||||
if let Some(interval_start) = value.interval_start {
|
||||
let id_string = to_base62(mod_id as u64);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(interval_start.timestamp(), amount);
|
||||
}
|
||||
}
|
||||
if let Some(mod_id) = value.mod_id
|
||||
&& let Some(amount) = value.amount_sum
|
||||
&& let Some(interval_start) = value.interval_start
|
||||
{
|
||||
let id_string = to_base62(mod_id as u64);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(interval_start.timestamp(), amount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,10 +192,10 @@ pub async fn collection_get(
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(data) = collection_data {
|
||||
if is_visible_collection(&data, &user_option, false).await? {
|
||||
return Ok(HttpResponse::Ok().json(Collection::from(data)));
|
||||
}
|
||||
if let Some(data) = collection_data
|
||||
&& is_visible_collection(&data, &user_option, false).await?
|
||||
{
|
||||
return Ok(HttpResponse::Ok().json(Collection::from(data)));
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
@@ -536,11 +536,9 @@ pub async fn create_payout(
|
||||
Some(true),
|
||||
)
|
||||
.await
|
||||
&& let Some(data) = res.items.first()
|
||||
{
|
||||
if let Some(data) = res.items.first() {
|
||||
payout_item.platform_id =
|
||||
Some(data.payout_item_id.clone());
|
||||
}
|
||||
payout_item.platform_id = Some(data.payout_item_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ use crate::database::redis::RedisPool;
|
||||
use crate::database::{self, models as db_models};
|
||||
use crate::file_hosting::{FileHost, FileHostPublicity};
|
||||
use crate::models;
|
||||
use crate::models::ids::ProjectId;
|
||||
use crate::models::ids::{ProjectId, VersionId};
|
||||
use crate::models::images::ImageContext;
|
||||
use crate::models::notifications::NotificationBody;
|
||||
use crate::models::pats::Scopes;
|
||||
@@ -182,10 +182,10 @@ pub async fn project_get(
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(data) = project_data {
|
||||
if is_visible_project(&data.inner, &user_option, &pool, false).await? {
|
||||
return Ok(HttpResponse::Ok().json(Project::from(data)));
|
||||
}
|
||||
if let Some(data) = project_data
|
||||
&& is_visible_project(&data.inner, &user_option, &pool, false).await?
|
||||
{
|
||||
return Ok(HttpResponse::Ok().json(Project::from(data)));
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@@ -250,6 +250,8 @@ pub struct EditProject {
|
||||
pub monetization_status: Option<MonetizationStatus>,
|
||||
pub side_types_migration_review_status:
|
||||
Option<SideTypesMigrationReviewStatus>,
|
||||
#[serde(flatten)]
|
||||
pub loader_fields: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -403,34 +405,36 @@ pub async fn project_edit(
|
||||
.await?;
|
||||
}
|
||||
|
||||
if status.is_searchable() && !project_item.inner.webhook_sent {
|
||||
if let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK") {
|
||||
crate::util::webhook::send_discord_webhook(
|
||||
project_item.inner.id.into(),
|
||||
&pool,
|
||||
&redis,
|
||||
webhook_url,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
if status.is_searchable()
|
||||
&& !project_item.inner.webhook_sent
|
||||
&& let Ok(webhook_url) = dotenvy::var("PUBLIC_DISCORD_WEBHOOK")
|
||||
{
|
||||
crate::util::webhook::send_discord_webhook(
|
||||
project_item.inner.id.into(),
|
||||
&pool,
|
||||
&redis,
|
||||
webhook_url,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE mods
|
||||
SET webhook_sent = TRUE
|
||||
WHERE id = $1
|
||||
",
|
||||
id as db_ids::DBProjectId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
id as db_ids::DBProjectId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if user.role.is_mod() {
|
||||
if let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK") {
|
||||
crate::util::webhook::send_slack_webhook(
|
||||
if user.role.is_mod()
|
||||
&& let Ok(webhook_url) = dotenvy::var("MODERATION_SLACK_WEBHOOK")
|
||||
{
|
||||
crate::util::webhook::send_slack_webhook(
|
||||
project_item.inner.id.into(),
|
||||
&pool,
|
||||
&redis,
|
||||
@@ -449,7 +453,6 @@ pub async fn project_edit(
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
if team_member.is_none_or(|x| !x.accepted) {
|
||||
@@ -692,45 +695,45 @@ pub async fn project_edit(
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(links) = &new_project.link_urls {
|
||||
if !links.is_empty() {
|
||||
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
if let Some(links) = &new_project.link_urls
|
||||
&& !links.is_empty()
|
||||
{
|
||||
if !perms.contains(ProjectPermissions::EDIT_DETAILS) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have the permissions to edit the links of this project!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
|
||||
// Deletes all links from hashmap- either will be deleted or be replaced
|
||||
sqlx::query!(
|
||||
"
|
||||
let ids_to_delete = links.keys().cloned().collect::<Vec<String>>();
|
||||
// Deletes all links from hashmap- either will be deleted or be replaced
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM mods_links
|
||||
WHERE joining_mod_id = $1 AND joining_platform_id IN (
|
||||
SELECT id FROM link_platforms WHERE name = ANY($2)
|
||||
)
|
||||
",
|
||||
id as db_ids::DBProjectId,
|
||||
&ids_to_delete
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
id as db_ids::DBProjectId,
|
||||
&ids_to_delete
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
for (platform, url) in links {
|
||||
if let Some(url) = url {
|
||||
let platform_id =
|
||||
db_models::categories::LinkPlatform::get_id(
|
||||
platform,
|
||||
&mut *transaction,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"Platform {} does not exist.",
|
||||
platform.clone()
|
||||
))
|
||||
})?;
|
||||
sqlx::query!(
|
||||
for (platform, url) in links {
|
||||
if let Some(url) = url {
|
||||
let platform_id = db_models::categories::LinkPlatform::get_id(
|
||||
platform,
|
||||
&mut *transaction,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"Platform {} does not exist.",
|
||||
platform.clone()
|
||||
))
|
||||
})?;
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO mods_links (joining_mod_id, joining_platform_id, url)
|
||||
VALUES ($1, $2, $3)
|
||||
@@ -741,7 +744,6 @@ pub async fn project_edit(
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -870,6 +872,29 @@ pub async fn project_edit(
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !new_project.loader_fields.is_empty() {
|
||||
for version in db_models::DBVersion::get_many(
|
||||
&project_item.versions,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
super::versions::version_edit_helper(
|
||||
req.clone(),
|
||||
(VersionId::from(version.inner.id),),
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
super::versions::EditVersion {
|
||||
fields: new_project.loader_fields.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
session_queue.clone(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
// check new description and body for links to associated images
|
||||
// if they no longer exist in the description or body, delete them
|
||||
let checkable_strings: Vec<&str> =
|
||||
@@ -2430,7 +2455,7 @@ pub async fn project_get_organization(
|
||||
organization,
|
||||
team_members,
|
||||
);
|
||||
return Ok(HttpResponse::Ok().json(organization));
|
||||
Ok(HttpResponse::Ok().json(organization))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
@@ -767,12 +767,13 @@ pub async fn edit_team_member(
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(new_permissions) = edit_member.permissions {
|
||||
if !permissions.contains(new_permissions) {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"The new permissions have permissions that you don't have".to_string(),
|
||||
));
|
||||
}
|
||||
if let Some(new_permissions) = edit_member.permissions
|
||||
&& !permissions.contains(new_permissions)
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"The new permissions have permissions that you don't have"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if edit_member.organization_permissions.is_some() {
|
||||
@@ -800,13 +801,12 @@ pub async fn edit_team_member(
|
||||
}
|
||||
|
||||
if let Some(new_permissions) = edit_member.organization_permissions
|
||||
&& !organization_permissions.contains(new_permissions)
|
||||
{
|
||||
if !organization_permissions.contains(new_permissions) {
|
||||
return Err(ApiError::InvalidInput(
|
||||
return Err(ApiError::InvalidInput(
|
||||
"The new organization permissions have permissions that you don't have"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if edit_member.permissions.is_some()
|
||||
@@ -822,13 +822,13 @@ pub async fn edit_team_member(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(payouts_split) = edit_member.payouts_split {
|
||||
if payouts_split < Decimal::ZERO || payouts_split > Decimal::from(5000)
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Payouts split must be between 0 and 5000!".to_string(),
|
||||
));
|
||||
}
|
||||
if let Some(payouts_split) = edit_member.payouts_split
|
||||
&& (payouts_split < Decimal::ZERO
|
||||
|| payouts_split > Decimal::from(5000))
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Payouts split must be between 0 and 5000!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
DBTeamMember::edit_team_member(
|
||||
@@ -883,13 +883,13 @@ pub async fn transfer_ownership(
|
||||
DBTeam::get_association(id.into(), &**pool).await?;
|
||||
if let Some(TeamAssociationId::Project(pid)) = team_association_id {
|
||||
let result = DBProject::get_id(pid, &**pool, &redis).await?;
|
||||
if let Some(project_item) = result {
|
||||
if project_item.inner.organization_id.is_some() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
if let Some(project_item) = result
|
||||
&& project_item.inner.organization_id.is_some()
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You cannot transfer ownership of a project team that is owend by an organization"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -289,36 +289,33 @@ pub async fn thread_get(
|
||||
.await?
|
||||
.1;
|
||||
|
||||
if let Some(mut data) = thread_data {
|
||||
if is_authorized_thread(&data, &user, &pool).await? {
|
||||
let authors = &mut data.members;
|
||||
if let Some(mut data) = thread_data
|
||||
&& is_authorized_thread(&data, &user, &pool).await?
|
||||
{
|
||||
let authors = &mut data.members;
|
||||
|
||||
authors.append(
|
||||
&mut data
|
||||
.messages
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
if x.hide_identity && !user.role.is_mod() {
|
||||
None
|
||||
} else {
|
||||
x.author_id
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
authors.append(
|
||||
&mut data
|
||||
.messages
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
if x.hide_identity && !user.role.is_mod() {
|
||||
None
|
||||
} else {
|
||||
x.author_id
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let users: Vec<User> = database::models::DBUser::get_many_ids(
|
||||
authors, &**pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect();
|
||||
let users: Vec<User> =
|
||||
database::models::DBUser::get_many_ids(authors, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect();
|
||||
|
||||
return Ok(
|
||||
HttpResponse::Ok().json(Thread::from(data, users, &user))
|
||||
);
|
||||
}
|
||||
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
@@ -454,33 +451,32 @@ pub async fn thread_send_message(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(project) = project {
|
||||
if project.inner.status != ProjectStatus::Processing
|
||||
&& user.role.is_mod()
|
||||
{
|
||||
let members =
|
||||
database::models::DBTeamMember::get_from_team_full(
|
||||
project.inner.team_id,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
NotificationBuilder {
|
||||
body: NotificationBody::ModeratorMessage {
|
||||
thread_id: thread.id.into(),
|
||||
message_id: id.into(),
|
||||
project_id: Some(project.inner.id.into()),
|
||||
report_id: None,
|
||||
},
|
||||
}
|
||||
.insert_many(
|
||||
members.into_iter().map(|x| x.user_id).collect(),
|
||||
&mut transaction,
|
||||
if let Some(project) = project
|
||||
&& project.inner.status != ProjectStatus::Processing
|
||||
&& user.role.is_mod()
|
||||
{
|
||||
let members =
|
||||
database::models::DBTeamMember::get_from_team_full(
|
||||
project.inner.team_id,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
NotificationBuilder {
|
||||
body: NotificationBody::ModeratorMessage {
|
||||
thread_id: thread.id.into(),
|
||||
message_id: id.into(),
|
||||
project_id: Some(project.inner.id.into()),
|
||||
report_id: None,
|
||||
},
|
||||
}
|
||||
.insert_many(
|
||||
members.into_iter().map(|x| x.user_id).collect(),
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
} else if let Some(report_id) = thread.report_id {
|
||||
let report = database::models::report_item::DBReport::get(
|
||||
|
||||
@@ -522,10 +522,10 @@ async fn version_create_inner(
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
if let Some(project_status) = project_status {
|
||||
if project_status.status == ProjectStatus::Processing.as_str() {
|
||||
moderation_queue.projects.insert(project_id.into());
|
||||
}
|
||||
if let Some(project_status) = project_status
|
||||
&& project_status.status == ProjectStatus::Processing.as_str()
|
||||
{
|
||||
moderation_queue.projects.insert(project_id.into());
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
@@ -871,16 +871,16 @@ pub async fn upload_file(
|
||||
ref format,
|
||||
ref files,
|
||||
} = validation_result
|
||||
&& dependencies.is_empty()
|
||||
{
|
||||
if dependencies.is_empty() {
|
||||
let hashes: Vec<Vec<u8>> = format
|
||||
.files
|
||||
.iter()
|
||||
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.collect();
|
||||
let hashes: Vec<Vec<u8>> = format
|
||||
.files
|
||||
.iter()
|
||||
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let res = sqlx::query!(
|
||||
let res = sqlx::query!(
|
||||
"
|
||||
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
|
||||
INNER JOIN files f on h.file_id = f.id
|
||||
@@ -892,45 +892,44 @@ pub async fn upload_file(
|
||||
.fetch_all(&mut **transaction)
|
||||
.await?;
|
||||
|
||||
for file in &format.files {
|
||||
if let Some(dep) = res.iter().find(|x| {
|
||||
Some(&*x.hash)
|
||||
== file
|
||||
.hashes
|
||||
.get(&PackFileHash::Sha1)
|
||||
.map(|x| x.as_bytes())
|
||||
}) {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: Some(models::DBProjectId(dep.project_id)),
|
||||
version_id: Some(models::DBVersionId(dep.version_id)),
|
||||
file_name: None,
|
||||
dependency_type: DependencyType::Embedded.to_string(),
|
||||
});
|
||||
} else if let Some(first_download) = file.downloads.first() {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: None,
|
||||
version_id: None,
|
||||
file_name: Some(
|
||||
first_download
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or(first_download)
|
||||
.to_string(),
|
||||
),
|
||||
dependency_type: DependencyType::Embedded.to_string(),
|
||||
});
|
||||
}
|
||||
for file in &format.files {
|
||||
if let Some(dep) = res.iter().find(|x| {
|
||||
Some(&*x.hash)
|
||||
== file
|
||||
.hashes
|
||||
.get(&PackFileHash::Sha1)
|
||||
.map(|x| x.as_bytes())
|
||||
}) {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: Some(models::DBProjectId(dep.project_id)),
|
||||
version_id: Some(models::DBVersionId(dep.version_id)),
|
||||
file_name: None,
|
||||
dependency_type: DependencyType::Embedded.to_string(),
|
||||
});
|
||||
} else if let Some(first_download) = file.downloads.first() {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: None,
|
||||
version_id: None,
|
||||
file_name: Some(
|
||||
first_download
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or(first_download)
|
||||
.to_string(),
|
||||
),
|
||||
dependency_type: DependencyType::Embedded.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for file in files {
|
||||
if !file.is_empty() {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: None,
|
||||
version_id: None,
|
||||
file_name: Some(file.to_string()),
|
||||
dependency_type: DependencyType::Embedded.to_string(),
|
||||
});
|
||||
}
|
||||
for file in files {
|
||||
if !file.is_empty() {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: None,
|
||||
version_id: None,
|
||||
file_name: Some(file.to_string()),
|
||||
dependency_type: DependencyType::Embedded.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -974,10 +973,10 @@ pub async fn upload_file(
|
||||
));
|
||||
}
|
||||
|
||||
if let ValidationResult::Warning(msg) = validation_result {
|
||||
if primary {
|
||||
return Err(CreateError::InvalidInput(msg.to_string()));
|
||||
}
|
||||
if let ValidationResult::Warning(msg) = validation_result
|
||||
&& primary
|
||||
{
|
||||
return Err(CreateError::InvalidInput(msg.to_string()));
|
||||
}
|
||||
|
||||
let url = format!("{cdn_url}/{file_path_encode}");
|
||||
|
||||
@@ -148,65 +148,55 @@ pub async fn get_update_from_hash(
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
if let Some(project) = database::models::DBProject::get_id(
|
||||
&& let Some(project) = database::models::DBProject::get_id(
|
||||
file.project_id,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
let mut versions = database::models::DBVersion::get_many(
|
||||
&project.versions,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
if let Some(version_types) = &update_data.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
if let Some(loaders) = &update_data.loaders {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(loader_fields) = &update_data.loader_fields {
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) = x
|
||||
.version_fields
|
||||
.iter()
|
||||
.find(|y| y.field_name == *key)
|
||||
{
|
||||
values
|
||||
.iter()
|
||||
.any(|v| x_vf.value.contains_json_value(v))
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.sorted();
|
||||
|
||||
if let Some(first) = versions.next_back() {
|
||||
if !is_visible_version(
|
||||
&first.inner,
|
||||
&user_option,
|
||||
&pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
return Ok(HttpResponse::Ok()
|
||||
.json(models::projects::Version::from(first)));
|
||||
{
|
||||
let mut versions = database::models::DBVersion::get_many(
|
||||
&project.versions,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
if let Some(version_types) = &update_data.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
if let Some(loaders) = &update_data.loaders {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(loader_fields) = &update_data.loader_fields {
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) =
|
||||
x.version_fields.iter().find(|y| y.field_name == *key)
|
||||
{
|
||||
values.iter().any(|v| x_vf.value.contains_json_value(v))
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.sorted();
|
||||
|
||||
if let Some(first) = versions.next_back() {
|
||||
if !is_visible_version(&first.inner, &user_option, &pool, &redis)
|
||||
.await?
|
||||
{
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
return Ok(
|
||||
HttpResponse::Ok().json(models::projects::Version::from(first))
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
@@ -398,13 +388,12 @@ pub async fn update_files(
|
||||
if let Some(version) = versions
|
||||
.iter()
|
||||
.find(|x| x.inner.project_id == file.project_id)
|
||||
&& let Some(hash) = file.hashes.get(&algorithm)
|
||||
{
|
||||
if let Some(hash) = file.hashes.get(&algorithm) {
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(version.clone()),
|
||||
);
|
||||
}
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(version.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -484,69 +473,59 @@ pub async fn update_individual_files(
|
||||
|
||||
for project in projects {
|
||||
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
|
||||
if let Some(hash) = file.hashes.get(&algorithm) {
|
||||
if let Some(query_file) =
|
||||
if let Some(hash) = file.hashes.get(&algorithm)
|
||||
&& let Some(query_file) =
|
||||
update_data.hashes.iter().find(|x| &x.hash == hash)
|
||||
{
|
||||
let version = all_versions
|
||||
.iter()
|
||||
.filter(|x| x.inner.project_id == file.project_id)
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
{
|
||||
let version = all_versions
|
||||
.iter()
|
||||
.filter(|x| x.inner.project_id == file.project_id)
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
|
||||
if let Some(version_types) =
|
||||
&query_file.version_types
|
||||
{
|
||||
bool &= version_types.iter().any(|y| {
|
||||
y.as_str() == x.inner.version_type
|
||||
});
|
||||
}
|
||||
if let Some(loaders) = &query_file.loaders {
|
||||
bool &= x
|
||||
.loaders
|
||||
.iter()
|
||||
.any(|y| loaders.contains(y));
|
||||
}
|
||||
|
||||
if let Some(loader_fields) =
|
||||
&query_file.loader_fields
|
||||
{
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) = x
|
||||
.version_fields
|
||||
.iter()
|
||||
.find(|y| y.field_name == *key)
|
||||
{
|
||||
values.iter().any(|v| {
|
||||
x_vf.value.contains_json_value(v)
|
||||
})
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.sorted()
|
||||
.next_back();
|
||||
|
||||
if let Some(version) = version {
|
||||
if is_visible_version(
|
||||
&version.inner,
|
||||
&user_option,
|
||||
&pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(
|
||||
version.clone(),
|
||||
),
|
||||
);
|
||||
if let Some(version_types) = &query_file.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
}
|
||||
if let Some(loaders) = &query_file.loaders {
|
||||
bool &=
|
||||
x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
|
||||
if let Some(loader_fields) = &query_file.loader_fields {
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) = x
|
||||
.version_fields
|
||||
.iter()
|
||||
.find(|y| y.field_name == *key)
|
||||
{
|
||||
values.iter().any(|v| {
|
||||
x_vf.value.contains_json_value(v)
|
||||
})
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.sorted()
|
||||
.next_back();
|
||||
|
||||
if let Some(version) = version
|
||||
&& is_visible_version(
|
||||
&version.inner,
|
||||
&user_option,
|
||||
&pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(version.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,13 +106,12 @@ pub async fn version_project_get_helper(
|
||||
|| x.inner.version_number == id.1
|
||||
});
|
||||
|
||||
if let Some(version) = version {
|
||||
if is_visible_version(&version.inner, &user_option, &pool, &redis)
|
||||
if let Some(version) = version
|
||||
&& is_visible_version(&version.inner, &user_option, &pool, &redis)
|
||||
.await?
|
||||
{
|
||||
return Ok(HttpResponse::Ok()
|
||||
.json(models::projects::Version::from(version)));
|
||||
}
|
||||
{
|
||||
return Ok(HttpResponse::Ok()
|
||||
.json(models::projects::Version::from(version)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,12 +189,12 @@ pub async fn version_get_helper(
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(data) = version_data {
|
||||
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
|
||||
return Ok(
|
||||
HttpResponse::Ok().json(models::projects::Version::from(data))
|
||||
);
|
||||
}
|
||||
if let Some(data) = version_data
|
||||
&& is_visible_version(&data.inner, &user_option, &pool, &redis).await?
|
||||
{
|
||||
return Ok(
|
||||
HttpResponse::Ok().json(models::projects::Version::from(data))
|
||||
);
|
||||
}
|
||||
|
||||
Err(ApiError::NotFound)
|
||||
|
||||
@@ -15,14 +15,12 @@ pub async fn get_user_status(
|
||||
return Some(friend_status);
|
||||
}
|
||||
|
||||
if let Ok(mut conn) = redis.pool.get().await {
|
||||
if let Ok(mut statuses) =
|
||||
if let Ok(mut conn) = redis.pool.get().await
|
||||
&& let Ok(mut statuses) =
|
||||
conn.sscan::<_, String>(get_field_name(user)).await
|
||||
{
|
||||
if let Some(status_json) = statuses.next_item().await {
|
||||
return serde_json::from_str::<UserStatus>(&status_json).ok();
|
||||
}
|
||||
}
|
||||
&& let Some(status_json) = statuses.next_item().await
|
||||
{
|
||||
return serde_json::from_str::<UserStatus>(&status_json).ok();
|
||||
}
|
||||
|
||||
None
|
||||
|
||||
@@ -138,12 +138,11 @@ fn process_image(
|
||||
let (orig_width, orig_height) = img.dimensions();
|
||||
let aspect_ratio = orig_width as f32 / orig_height as f32;
|
||||
|
||||
if let Some(target_width) = target_width {
|
||||
if img.width() > target_width {
|
||||
let new_height =
|
||||
(target_width as f32 / aspect_ratio).round() as u32;
|
||||
img = img.resize(target_width, new_height, FilterType::Lanczos3);
|
||||
}
|
||||
if let Some(target_width) = target_width
|
||||
&& img.width() > target_width
|
||||
{
|
||||
let new_height = (target_width as f32 / aspect_ratio).round() as u32;
|
||||
img = img.resize(target_width, new_height, FilterType::Lanczos3);
|
||||
}
|
||||
|
||||
if let Some(min_aspect_ratio) = min_aspect_ratio {
|
||||
|
||||
@@ -133,12 +133,11 @@ pub async fn rate_limit_middleware(
|
||||
.expect("Rate limiter not configured properly")
|
||||
.clone();
|
||||
|
||||
if let Some(key) = req.headers().get("x-ratelimit-key") {
|
||||
if key.to_str().ok()
|
||||
if let Some(key) = req.headers().get("x-ratelimit-key")
|
||||
&& key.to_str().ok()
|
||||
== dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref()
|
||||
{
|
||||
return Ok(next.call(req).await?.map_into_left_body());
|
||||
}
|
||||
{
|
||||
return Ok(next.call(req).await?.map_into_left_body());
|
||||
}
|
||||
|
||||
let conn_info = req.connection_info().clone();
|
||||
|
||||
@@ -22,46 +22,47 @@ pub fn validation_errors_to_string(
|
||||
|
||||
let key_option = map.keys().next();
|
||||
|
||||
if let Some(field) = key_option {
|
||||
if let Some(error) = map.get(field) {
|
||||
return match error {
|
||||
ValidationErrorsKind::Struct(errors) => {
|
||||
validation_errors_to_string(
|
||||
if let Some(field) = key_option
|
||||
&& let Some(error) = map.get(field)
|
||||
{
|
||||
return match error {
|
||||
ValidationErrorsKind::Struct(errors) => {
|
||||
validation_errors_to_string(
|
||||
*errors.clone(),
|
||||
Some(format!("of item {field}")),
|
||||
)
|
||||
}
|
||||
ValidationErrorsKind::List(list) => {
|
||||
if let Some((index, errors)) = list.iter().next() {
|
||||
output.push_str(&validation_errors_to_string(
|
||||
*errors.clone(),
|
||||
Some(format!("of item {field}")),
|
||||
)
|
||||
Some(format!("of list {field} with index {index}")),
|
||||
));
|
||||
}
|
||||
ValidationErrorsKind::List(list) => {
|
||||
if let Some((index, errors)) = list.iter().next() {
|
||||
output.push_str(&validation_errors_to_string(
|
||||
*errors.clone(),
|
||||
Some(format!("of list {field} with index {index}")),
|
||||
));
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
ValidationErrorsKind::Field(errors) => {
|
||||
if let Some(error) = errors.first() {
|
||||
if let Some(adder) = adder {
|
||||
write!(
|
||||
output
|
||||
}
|
||||
ValidationErrorsKind::Field(errors) => {
|
||||
if let Some(error) = errors.first() {
|
||||
if let Some(adder) = adder {
|
||||
write!(
|
||||
&mut output,
|
||||
"Field {field} {adder} failed validation with error: {}",
|
||||
error.code
|
||||
).unwrap();
|
||||
} else {
|
||||
write!(
|
||||
&mut output,
|
||||
"Field {field} failed validation with error: {}",
|
||||
error.code
|
||||
).unwrap();
|
||||
}
|
||||
} else {
|
||||
write!(
|
||||
&mut output,
|
||||
"Field {field} failed validation with error: {}",
|
||||
error.code
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
String::new()
|
||||
|
||||
@@ -238,17 +238,17 @@ pub async fn send_slack_webhook(
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(icon_url) = metadata.project_icon_url {
|
||||
if let Some(project_block) = project_block.as_object_mut() {
|
||||
project_block.insert(
|
||||
"accessory".to_string(),
|
||||
serde_json::json!({
|
||||
"type": "image",
|
||||
"image_url": icon_url,
|
||||
"alt_text": metadata.project_title
|
||||
}),
|
||||
);
|
||||
}
|
||||
if let Some(icon_url) = metadata.project_icon_url
|
||||
&& let Some(project_block) = project_block.as_object_mut()
|
||||
{
|
||||
project_block.insert(
|
||||
"accessory".to_string(),
|
||||
serde_json::json!({
|
||||
"type": "image",
|
||||
"image_url": icon_url,
|
||||
"alt_text": metadata.project_title
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
blocks.push(project_block);
|
||||
|
||||
@@ -292,7 +292,7 @@ pub async fn add_dummy_data(api: &ApiV3, db: TemporaryDatabase) -> DummyData {
|
||||
let pool = &db.pool.clone();
|
||||
|
||||
pool.execute(
|
||||
include_str!("../files/dummy_data.sql")
|
||||
include_str!("../fixtures/dummy_data.sql")
|
||||
.replace("$1", &Scopes::all().bits().to_string())
|
||||
.as_str(),
|
||||
)
|
||||
|
||||
@@ -222,10 +222,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
|
||||
resp.status().as_u16()
|
||||
));
|
||||
}
|
||||
if resp.status() == StatusCode::OK {
|
||||
if let Some(failure_json_check) = &self.failure_json_check {
|
||||
failure_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
if resp.status() == StatusCode::OK
|
||||
&& let Some(failure_json_check) = &self.failure_json_check
|
||||
{
|
||||
failure_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
|
||||
// Failure test- logged in on a non-team user
|
||||
@@ -246,10 +246,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
|
||||
resp.status().as_u16()
|
||||
));
|
||||
}
|
||||
if resp.status() == StatusCode::OK {
|
||||
if let Some(failure_json_check) = &self.failure_json_check {
|
||||
failure_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
if resp.status() == StatusCode::OK
|
||||
&& let Some(failure_json_check) = &self.failure_json_check
|
||||
{
|
||||
failure_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
|
||||
// Failure test- logged in with EVERY non-relevant permission
|
||||
@@ -270,10 +270,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
|
||||
resp.status().as_u16()
|
||||
));
|
||||
}
|
||||
if resp.status() == StatusCode::OK {
|
||||
if let Some(failure_json_check) = &self.failure_json_check {
|
||||
failure_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
if resp.status() == StatusCode::OK
|
||||
&& let Some(failure_json_check) = &self.failure_json_check
|
||||
{
|
||||
failure_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
|
||||
// Patch user's permissions to success permissions
|
||||
@@ -300,10 +300,10 @@ impl<'a, A: Api> PermissionsTest<'a, A> {
|
||||
resp.status().as_u16()
|
||||
));
|
||||
}
|
||||
if resp.status() == StatusCode::OK {
|
||||
if let Some(success_json_check) = &self.success_json_check {
|
||||
success_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
if resp.status() == StatusCode::OK
|
||||
&& let Some(success_json_check) = &self.success_json_check
|
||||
{
|
||||
success_json_check(&test::read_body_json(resp).await);
|
||||
}
|
||||
|
||||
// If the remove_user flag is set, remove the user from the project
|
||||
|
||||
Reference in New Issue
Block a user