You've already forked AstralRinth
forked from didirus/AstralRinth
Initial Auth Impl + More Caching (#647)
* Port redis to staging * redis cache on staging * add back legacy auth callback * Begin work on new auth flows * Finish all auth flows * Finish base session authentication * run prep + fix clippy * make compilation work
This commit is contained in:
31
.env
31
.env
@@ -15,17 +15,16 @@ CLOUDFLARE_INTEGRATION=false
|
||||
DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth
|
||||
DATABASE_MIN_CONNECTIONS=0
|
||||
DATABASE_MAX_CONNECTIONS=16
|
||||
|
||||
REDIS_URL=redis://localhost
|
||||
|
||||
MEILISEARCH_ADDR=http://localhost:7700
|
||||
MEILISEARCH_KEY=modrinth
|
||||
|
||||
BIND_ADDR=127.0.0.1:8000
|
||||
SELF_ADDR=http://127.0.0.1:8000
|
||||
SELF_ADDR=http://localhost:8000
|
||||
MOCK_FILE_PATH=/tmp/modrinth
|
||||
|
||||
MINOS_URL=http://127.0.0.1:4000
|
||||
KRATOS_URL=http://127.0.0.1:4433
|
||||
ORY_AUTH_BEARER=none
|
||||
|
||||
STORAGE_BACKEND=local
|
||||
|
||||
BACKBLAZE_KEY_ID=none
|
||||
@@ -43,8 +42,6 @@ LOCAL_INDEX_INTERVAL=3600
|
||||
# 30 minutes
|
||||
VERSION_INDEX_INTERVAL=1800
|
||||
|
||||
GITHUB_CLIENT_ID=none
|
||||
|
||||
RATE_LIMIT_IGNORE_IPS='["127.0.0.1"]'
|
||||
|
||||
WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "edge.forgecdn.net", "github.com", "raw.githubusercontent.com"]'
|
||||
@@ -54,9 +51,23 @@ ALLOWED_CALLBACK_URLS='["localhost", ".modrinth.com", "127.0.0.1"]'
|
||||
ARIADNE_ADMIN_KEY=feedbeef
|
||||
ARIADNE_URL=https://staging-ariadne.modrinth.com/v1/
|
||||
|
||||
STRIPE_TOKEN=none
|
||||
STRIPE_WEBHOOK_SECRET=none
|
||||
|
||||
PAYPAL_API_URL=https://api-m.sandbox.paypal.com/v1/
|
||||
PAYPAL_CLIENT_ID=none
|
||||
PAYPAL_CLIENT_SECRET=none
|
||||
|
||||
GITHUB_CLIENT_ID=none
|
||||
GITHUB_CLIENT_SECRET=none
|
||||
|
||||
GITLAB_CLIENT_ID=none
|
||||
GITLAB_CLIENT_SECRET=none
|
||||
|
||||
DISCORD_CLIENT_ID=none
|
||||
DISCORD_CLIENT_SECRET=none
|
||||
|
||||
MICROSOFT_CLIENT_ID=none
|
||||
MICROSOFT_CLIENT_SECRET=none
|
||||
|
||||
GOOGLE_CLIENT_ID=none
|
||||
GOOGLE_CLIENT_SECRET=none
|
||||
|
||||
STEAM_API_KEY=none
|
||||
|
||||
1480
Cargo.lock
generated
1480
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
55
Cargo.toml
55
Cargo.toml
@@ -12,63 +12,68 @@ path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
actix = "0.13.0"
|
||||
actix-web = "4.3.0"
|
||||
actix-web = "4.3.1"
|
||||
actix-rt = "2.8.0"
|
||||
actix-multipart = "0.6.0"
|
||||
actix-cors = "0.6.4"
|
||||
|
||||
tokio = { version = "1.25.0", features = ["sync"] }
|
||||
tokio-stream = "0.1.11"
|
||||
tokio = { version = "1.29.1", features = ["sync"] }
|
||||
tokio-stream = "0.1.14"
|
||||
|
||||
futures = "0.3.26"
|
||||
futures = "0.3.28"
|
||||
futures-timer = "3.0.2"
|
||||
async-trait = "0.1.64"
|
||||
async-trait = "0.1.70"
|
||||
dashmap = "5.4.0"
|
||||
lazy_static = "1.4.0"
|
||||
|
||||
meilisearch-sdk = "0.22.0"
|
||||
rust-s3 = "0.32.3"
|
||||
reqwest = { version = "0.11.14", features = ["json", "multipart"] }
|
||||
rust-s3 = "0.33.0"
|
||||
reqwest = { version = "0.11.18", features = ["json", "multipart"] }
|
||||
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_with = "2.2.0"
|
||||
chrono = { version = "0.4.23", features = ["serde"]}
|
||||
serde_with = "3.0.0"
|
||||
chrono = { version = "0.4.26", features = ["serde"]}
|
||||
yaserde = "0.8.0"
|
||||
yaserde_derive = "0.8.0"
|
||||
xml-rs = "0.8.4"
|
||||
xml-rs = "0.8.15"
|
||||
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
bytes = "1.4.0"
|
||||
base64 = "0.21.0"
|
||||
base64 = "0.21.2"
|
||||
sha1 = { version = "0.6.1", features = ["std"] }
|
||||
sha2 = "0.9.9"
|
||||
hmac = "0.11.0"
|
||||
bitflags = "1.3.2"
|
||||
hex = "0.4.3"
|
||||
|
||||
url = "2.3.1"
|
||||
url = "2.4.0"
|
||||
urlencoding = "2.1.2"
|
||||
|
||||
zip = "0.6.4"
|
||||
zip = "0.6.6"
|
||||
|
||||
itertools = "0.10.5"
|
||||
itertools = "0.11.0"
|
||||
|
||||
validator = { version = "0.16.0", features = ["derive", "phone"] }
|
||||
regex = "1.7.1"
|
||||
validator = { version = "0.16.1", features = ["derive", "phone"] }
|
||||
regex = "1.8.4"
|
||||
censor = "0.3.0"
|
||||
spdx = { version = "0.10.0", features = ["text"] }
|
||||
spdx = { version = "0.10.1", features = ["text"] }
|
||||
|
||||
dotenvy = "0.15.6"
|
||||
log = "0.4.17"
|
||||
dotenvy = "0.15.7"
|
||||
log = "0.4.19"
|
||||
env_logger = "0.10.0"
|
||||
thiserror = "1.0.38"
|
||||
thiserror = "1.0.41"
|
||||
|
||||
sqlx = { version = "0.6.2", features = ["runtime-actix-rustls", "postgres", "chrono", "offline", "macros", "migrate", "decimal", "json"] }
|
||||
rust_decimal = { version = "1.28.1", features = ["serde-with-float", "serde-with-str"] }
|
||||
sqlx = { version = "0.6.3", features = ["offline", "runtime-tokio-rustls", "postgres", "chrono", "macros", "migrate", "decimal", "json"] }
|
||||
rust_decimal = { version = "1.30.0", features = ["serde-with-float", "serde-with-str"] }
|
||||
redis = { version = "0.23.0", features = ["tokio-comp", "ahash", "r2d2"]}
|
||||
deadpool-redis = "0.12.0"
|
||||
|
||||
sentry = { version = "0.30.0", features = ["profiling"] }
|
||||
sentry-actix = "0.30.0"
|
||||
sentry = { version = "0.31.5", features = ["profiling"] }
|
||||
sentry-actix = "0.31.5"
|
||||
|
||||
image = "0.24.5"
|
||||
image = "0.24.6"
|
||||
color-thief = "0.2.2"
|
||||
|
||||
woothee = "0.13.0"
|
||||
@@ -20,6 +20,14 @@ services:
|
||||
- meilisearch-data:/meili_data
|
||||
environment:
|
||||
MEILI_MASTER_KEY: modrinth
|
||||
redis:
|
||||
image: redis:alpine
|
||||
restart: on-failure
|
||||
ports:
|
||||
- '6379:6379'
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
volumes:
|
||||
meilisearch-data:
|
||||
db-data:
|
||||
redis-data:
|
||||
48
migrations/20230628180115_kill-ory.sql
Normal file
48
migrations/20230628180115_kill-ory.sql
Normal file
@@ -0,0 +1,48 @@
|
||||
ALTER TABLE users DROP COLUMN kratos_id;
|
||||
|
||||
ALTER TABLE states ADD COLUMN provider varchar(64) NOT NULL default 'github';
|
||||
|
||||
ALTER TABLE users ADD COLUMN discord_id bigint;
|
||||
ALTER TABLE users ADD COLUMN gitlab_id bigint;
|
||||
ALTER TABLE users ADD COLUMN google_id varchar(256);
|
||||
ALTER TABLE users ADD COLUMN steam_id bigint;
|
||||
ALTER TABLE users ADD COLUMN microsoft_id varchar(256);
|
||||
|
||||
CREATE INDEX users_discord_id
|
||||
ON users (discord_id);
|
||||
CREATE INDEX users_gitlab_id
|
||||
ON users (gitlab_id);
|
||||
CREATE INDEX users_google_id
|
||||
ON users (google_id);
|
||||
CREATE INDEX users_steam_id
|
||||
ON users (steam_id);
|
||||
CREATE INDEX users_microsoft_id
|
||||
ON users (microsoft_id);
|
||||
|
||||
ALTER TABLE users ALTER COLUMN avatar_url TYPE varchar(1024);
|
||||
ALTER TABLE users ADD COLUMN password TEXT NULL;
|
||||
ALTER TABLE users ADD COLUMN email_verified BOOLEAN NOT NULL DEFAULT FALSE;
|
||||
|
||||
CREATE TABLE sessions (
|
||||
id bigint NOT NULL PRIMARY KEY,
|
||||
session varchar(64) NOT NULL UNIQUE,
|
||||
user_id BIGINT NOT NULL REFERENCES users(id),
|
||||
created timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
last_login timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
|
||||
expires timestamptz DEFAULT CURRENT_TIMESTAMP + interval '14 days' NOT NULL,
|
||||
refresh_expires timestamptz DEFAULT CURRENT_TIMESTAMP + interval '60 days' NOT NULL,
|
||||
|
||||
city varchar(256) NULL,
|
||||
country varchar(256) NULL,
|
||||
ip varchar(512) NOT NULL,
|
||||
|
||||
os varchar(256) NULL,
|
||||
platform varchar(256) NULL,
|
||||
user_agent varchar(1024) NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX sessions_user_id
|
||||
ON sessions (user_id);
|
||||
|
||||
ALTER TABLE mods DROP COLUMN game_versions;
|
||||
ALTER TABLE mods DROP COLUMN loaders;
|
||||
3449
sqlx-data.json
3449
sqlx-data.json
File diff suppressed because it is too large
Load Diff
194
src/auth/checks.rs
Normal file
194
src/auth/checks.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use crate::database;
|
||||
use crate::database::models::project_item::QueryProject;
|
||||
use crate::database::models::version_item::QueryVersion;
|
||||
use crate::database::{models, Project, Version};
|
||||
use crate::models::users::User;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::web;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub async fn is_authorized(
|
||||
project_data: &Project,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<bool, ApiError> {
|
||||
let mut authorized = !project_data.status.is_hidden();
|
||||
|
||||
if let Some(user) = &user_option {
|
||||
if !authorized {
|
||||
if user.role.is_mod() {
|
||||
authorized = true;
|
||||
} else {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
let project_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
|
||||
project_data.team_id as database::models::ids::TeamId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(&***pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
authorized = project_exists.unwrap_or(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(authorized)
|
||||
}
|
||||
|
||||
pub async fn filter_authorized_projects(
|
||||
projects: Vec<QueryProject>,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<Vec<crate::models::projects::Project>, ApiError> {
|
||||
let mut return_projects = Vec::new();
|
||||
let mut check_projects = Vec::new();
|
||||
|
||||
for project in projects {
|
||||
if !project.inner.status.is_hidden()
|
||||
|| user_option
|
||||
.as_ref()
|
||||
.map(|x| x.role.is_mod())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return_projects.push(project.into());
|
||||
} else if user_option.is_some() {
|
||||
check_projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
if !check_projects.is_empty() {
|
||||
if let Some(user) = user_option {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
use futures::TryStreamExt;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT m.id id, m.team_id team_id FROM team_members tm
|
||||
INNER JOIN mods m ON m.team_id = tm.team_id
|
||||
WHERE tm.team_id = ANY($1) AND tm.user_id = $2
|
||||
",
|
||||
&check_projects
|
||||
.iter()
|
||||
.map(|x| x.inner.team_id.0)
|
||||
.collect::<Vec<_>>(),
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_many(&***pool)
|
||||
.try_for_each(|e| {
|
||||
if let Some(row) = e.right() {
|
||||
check_projects.retain(|x| {
|
||||
let bool = x.inner.id.0 == row.id && x.inner.team_id.0 == row.team_id;
|
||||
|
||||
if bool {
|
||||
return_projects.push(x.clone().into());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
}
|
||||
|
||||
futures::future::ready(Ok(()))
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(return_projects)
|
||||
}
|
||||
|
||||
pub async fn is_authorized_version(
|
||||
version_data: &Version,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<bool, ApiError> {
|
||||
let mut authorized = !version_data.status.is_hidden();
|
||||
|
||||
if let Some(user) = &user_option {
|
||||
if !authorized {
|
||||
if user.role.is_mod() {
|
||||
authorized = true;
|
||||
} else {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
let version_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 WHERE m.id = $1)",
|
||||
version_data.project_id as database::models::ids::ProjectId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(&***pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
authorized = version_exists.unwrap_or(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(authorized)
|
||||
}
|
||||
|
||||
pub async fn filter_authorized_versions(
|
||||
versions: Vec<QueryVersion>,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<Vec<crate::models::projects::Version>, ApiError> {
|
||||
let mut return_versions = Vec::new();
|
||||
let mut check_versions = Vec::new();
|
||||
|
||||
for version in versions {
|
||||
if !version.inner.status.is_hidden()
|
||||
|| user_option
|
||||
.as_ref()
|
||||
.map(|x| x.role.is_mod())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return_versions.push(version.into());
|
||||
} else if user_option.is_some() {
|
||||
check_versions.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
if !check_versions.is_empty() {
|
||||
if let Some(user) = user_option {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
use futures::TryStreamExt;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2
|
||||
WHERE m.id = ANY($1)
|
||||
",
|
||||
&check_versions
|
||||
.iter()
|
||||
.map(|x| x.inner.project_id.0)
|
||||
.collect::<Vec<_>>(),
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_many(&***pool)
|
||||
.try_for_each(|e| {
|
||||
if let Some(row) = e.right() {
|
||||
check_versions.retain(|x| {
|
||||
let bool = x.inner.project_id.0 == row.id;
|
||||
|
||||
if bool {
|
||||
return_versions.push(x.clone().into());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
}
|
||||
|
||||
futures::future::ready(Ok(()))
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(return_versions)
|
||||
}
|
||||
889
src/auth/flows.rs
Normal file
889
src/auth/flows.rs
Normal file
@@ -0,0 +1,889 @@
|
||||
use crate::database::models::{generate_state_id, StateId};
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::parse_strings_from_var;
|
||||
|
||||
use actix_web::web::{scope, Data, Query, ServiceConfig};
|
||||
use actix_web::{get, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use rust_decimal::Decimal;
|
||||
|
||||
use crate::auth::session::issue_session;
|
||||
use crate::auth::AuthenticationError;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::users::{Badges, Role};
|
||||
use crate::util::ext::{get_image_content_type, get_image_ext};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut ServiceConfig) {
|
||||
cfg.service(scope("auth").service(auth_callback).service(init));
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthProvider {
|
||||
#[default]
|
||||
GitHub,
|
||||
Discord,
|
||||
Microsoft,
|
||||
GitLab,
|
||||
Google,
|
||||
Steam,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TempUser {
|
||||
pub id: String,
|
||||
pub username: String,
|
||||
pub email: Option<String>,
|
||||
|
||||
pub avatar_url: Option<String>,
|
||||
pub bio: Option<String>,
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
impl AuthProvider {
|
||||
pub fn get_redirect_url(&self, state: StateId) -> Result<String, AuthenticationError> {
|
||||
let state = to_base62(state.0 as u64);
|
||||
let self_addr = dotenvy::var("SELF_ADDR")?;
|
||||
let raw_redirect_uri = format!("{}/v2/auth/callback", self_addr);
|
||||
let redirect_uri = urlencoding::encode(&raw_redirect_uri);
|
||||
|
||||
Ok(match self {
|
||||
AuthProvider::GitHub => {
|
||||
let client_id = dotenvy::var("GITHUB_CLIENT_ID")?;
|
||||
|
||||
format!(
|
||||
"https://github.com/login/oauth/authorize?client_id={}&state={}&scope=read%3Auser%20user%3Aemail&redirect_uri={}",
|
||||
client_id,
|
||||
state,
|
||||
redirect_uri,
|
||||
)
|
||||
}
|
||||
AuthProvider::Discord => {
|
||||
let client_id = dotenvy::var("DISCORD_CLIENT_ID")?;
|
||||
|
||||
format!("https://discord.com/api/oauth2/authorize?client_id={}&state={}&response_type=code&scope=identify%20email&redirect_uri={}", client_id, state, redirect_uri)
|
||||
}
|
||||
AuthProvider::Microsoft => {
|
||||
let client_id = dotenvy::var("MICROSOFT_CLIENT_ID")?;
|
||||
|
||||
format!("https://login.live.com/oauth20_authorize.srf?client_id={}&response_type=code&scope=user.read&state={}&prompt=select_account&redirect_uri={}", client_id, state, redirect_uri)
|
||||
}
|
||||
AuthProvider::GitLab => {
|
||||
let client_id = dotenvy::var("GITLAB_CLIENT_ID")?;
|
||||
|
||||
format!(
|
||||
"https://gitlab.com/oauth/authorize?client_id={}&state={}&scope=read_user+profile+email&response_type=code&redirect_uri={}",
|
||||
client_id,
|
||||
state,
|
||||
redirect_uri,
|
||||
)
|
||||
}
|
||||
AuthProvider::Google => {
|
||||
let client_id = dotenvy::var("GOOGLE_CLIENT_ID")?;
|
||||
|
||||
format!(
|
||||
"https://accounts.google.com/o/oauth2/v2/auth?client_id={}&state={}&scope={}&response_type=code&redirect_uri={}",
|
||||
client_id,
|
||||
state,
|
||||
urlencoding::encode("https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile"),
|
||||
redirect_uri,
|
||||
)
|
||||
}
|
||||
AuthProvider::Steam => {
|
||||
format!(
|
||||
"https://steamcommunity.com/openid/login?openid.ns={}&openid.mode={}&openid.return_to={}{}{}&openid.realm={}&openid.identity={}&openid.claimed_id={}",
|
||||
urlencoding::encode("http://specs.openid.net/auth/2.0"),
|
||||
"checkid_setup",
|
||||
redirect_uri, urlencoding::encode("?state="), state,
|
||||
self_addr,
|
||||
"http://specs.openid.net/auth/2.0/identifier_select",
|
||||
"http://specs.openid.net/auth/2.0/identifier_select",
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_token(
|
||||
&self,
|
||||
query: HashMap<String, String>,
|
||||
) -> Result<String, AuthenticationError> {
|
||||
let redirect_uri = format!("{}/v2/auth/callback", dotenvy::var("SELF_ADDR")?);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct AccessToken {
|
||||
pub access_token: String,
|
||||
}
|
||||
|
||||
let res = match self {
|
||||
AuthProvider::GitHub => {
|
||||
let code = query
|
||||
.get("code")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let client_id = dotenvy::var("GITHUB_CLIENT_ID")?;
|
||||
let client_secret = dotenvy::var("GITHUB_CLIENT_SECRET")?;
|
||||
|
||||
let url = format!(
|
||||
"https://github.com/login/oauth/access_token?client_id={}&client_secret={}&code={}&redirect_uri={}",
|
||||
client_id, client_secret, code, redirect_uri
|
||||
);
|
||||
|
||||
let token: AccessToken = reqwest::Client::new()
|
||||
.post(&url)
|
||||
.header(reqwest::header::ACCEPT, "application/json")
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
token.access_token
|
||||
}
|
||||
AuthProvider::Discord => {
|
||||
let code = query
|
||||
.get("code")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let client_id = dotenvy::var("DISCORD_CLIENT_ID")?;
|
||||
let client_secret = dotenvy::var("DISCORD_CLIENT_SECRET")?;
|
||||
|
||||
let mut map = HashMap::new();
|
||||
map.insert("client_id", &*client_id);
|
||||
map.insert("client_secret", &*client_secret);
|
||||
map.insert("code", code);
|
||||
map.insert("grant_type", "authorization_code");
|
||||
map.insert("redirect_uri", &redirect_uri);
|
||||
|
||||
let token: AccessToken = reqwest::Client::new()
|
||||
.post("https://discord.com/api/v10/oauth2/token")
|
||||
.header(reqwest::header::ACCEPT, "application/json")
|
||||
.form(&map)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
token.access_token
|
||||
}
|
||||
AuthProvider::Microsoft => {
|
||||
let code = query
|
||||
.get("code")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let client_id = dotenvy::var("MICROSOFT_CLIENT_ID")?;
|
||||
let client_secret = dotenvy::var("MICROSOFT_CLIENT_SECRET")?;
|
||||
|
||||
let mut map = HashMap::new();
|
||||
map.insert("client_id", &*client_id);
|
||||
map.insert("client_secret", &*client_secret);
|
||||
map.insert("code", code);
|
||||
map.insert("grant_type", "authorization_code");
|
||||
map.insert("redirect_uri", &redirect_uri);
|
||||
|
||||
let token: AccessToken = reqwest::Client::new()
|
||||
.post("https://login.live.com/oauth20_token.srf")
|
||||
.header(reqwest::header::ACCEPT, "application/json")
|
||||
.form(&map)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
token.access_token
|
||||
}
|
||||
AuthProvider::GitLab => {
|
||||
let code = query
|
||||
.get("code")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let client_id = dotenvy::var("GITLAB_CLIENT_ID")?;
|
||||
let client_secret = dotenvy::var("GITLAB_CLIENT_SECRET")?;
|
||||
|
||||
let mut map = HashMap::new();
|
||||
map.insert("client_id", &*client_id);
|
||||
map.insert("client_secret", &*client_secret);
|
||||
map.insert("code", code);
|
||||
map.insert("grant_type", "authorization_code");
|
||||
map.insert("redirect_uri", &redirect_uri);
|
||||
|
||||
let token: AccessToken = reqwest::Client::new()
|
||||
.post("https://gitlab.com/oauth/token")
|
||||
.header(reqwest::header::ACCEPT, "application/json")
|
||||
.form(&map)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
token.access_token
|
||||
}
|
||||
AuthProvider::Google => {
|
||||
let code = query
|
||||
.get("code")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let client_id = dotenvy::var("GOOGLE_CLIENT_ID")?;
|
||||
let client_secret = dotenvy::var("GOOGLE_CLIENT_SECRET")?;
|
||||
|
||||
let mut map = HashMap::new();
|
||||
map.insert("client_id", &*client_id);
|
||||
map.insert("client_secret", &*client_secret);
|
||||
map.insert("code", code);
|
||||
map.insert("grant_type", "authorization_code");
|
||||
map.insert("redirect_uri", &redirect_uri);
|
||||
|
||||
let token: AccessToken = reqwest::Client::new()
|
||||
.post("https://oauth2.googleapis.com/token")
|
||||
.header(reqwest::header::ACCEPT, "application/json")
|
||||
.form(&map)
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
token.access_token
|
||||
}
|
||||
AuthProvider::Steam => {
|
||||
let mut form = HashMap::new();
|
||||
|
||||
let signed = query
|
||||
.get("openid.signed")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
form.insert(
|
||||
"openid.assoc_handle".to_string(),
|
||||
&**query
|
||||
.get("openid.assoc_handle")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
|
||||
);
|
||||
form.insert("openid.signed".to_string(), &**signed);
|
||||
form.insert(
|
||||
"openid.sig".to_string(),
|
||||
&**query
|
||||
.get("openid.sig")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
|
||||
);
|
||||
form.insert("openid.ns".to_string(), "http://specs.openid.net/auth/2.0");
|
||||
form.insert("openid.mode".to_string(), "check_authentication");
|
||||
|
||||
for val in signed.split(',') {
|
||||
if let Some(arr_val) = query.get(&format!("openid.{}", val)) {
|
||||
form.insert(format!("openid.{}", val), &**arr_val);
|
||||
}
|
||||
}
|
||||
|
||||
let res = reqwest::Client::new()
|
||||
.post("https://steamcommunity.com/openid/login")
|
||||
.header("Accept-language", "en")
|
||||
.form(&form)
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
|
||||
if res.contains("is_valid:true") {
|
||||
let identity = query
|
||||
.get("openid.identity")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
identity
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?
|
||||
.to_string()
|
||||
} else {
|
||||
return Err(AuthenticationError::InvalidCredentials);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn get_user(&self, token: &str) -> Result<TempUser, AuthenticationError> {
|
||||
let res = match self {
|
||||
AuthProvider::GitHub => {
|
||||
let response = reqwest::Client::new()
|
||||
.get("https://api.github.com/user")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(AUTHORIZATION, format!("token {token}"))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if token.starts_with("gho_") {
|
||||
let client_id = response
|
||||
.headers()
|
||||
.get("x-oauth-client-id")
|
||||
.and_then(|x| x.to_str().ok());
|
||||
|
||||
if client_id != Some(&*dotenvy::var("GITHUB_CLIENT_ID").unwrap()) {
|
||||
return Err(AuthenticationError::InvalidClientId);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GitHubUser {
|
||||
pub login: String,
|
||||
pub id: u64,
|
||||
pub avatar_url: String,
|
||||
pub name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
pub bio: Option<String>,
|
||||
}
|
||||
|
||||
let github_user: GitHubUser = response.json().await?;
|
||||
|
||||
TempUser {
|
||||
id: github_user.id.to_string(),
|
||||
username: github_user.login,
|
||||
email: github_user.email,
|
||||
avatar_url: Some(github_user.avatar_url),
|
||||
bio: github_user.bio,
|
||||
name: github_user.name,
|
||||
}
|
||||
}
|
||||
AuthProvider::Discord => {
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct DiscordUser {
|
||||
pub username: String,
|
||||
pub id: String,
|
||||
pub avatar: Option<String>,
|
||||
pub global_name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
let discord_user: DiscordUser = reqwest::Client::new()
|
||||
.get("https://discord.com/api/v10/users/@me")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(AUTHORIZATION, format!("Bearer {token}"))
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
let id = discord_user.id.clone();
|
||||
TempUser {
|
||||
id: discord_user.id,
|
||||
username: discord_user.username,
|
||||
email: discord_user.email,
|
||||
avatar_url: discord_user
|
||||
.avatar
|
||||
.map(|x| format!("https://cdn.discordapp.com/avatars/{}/{}.webp", id, x)),
|
||||
bio: None,
|
||||
name: discord_user.global_name,
|
||||
}
|
||||
}
|
||||
AuthProvider::Microsoft => {
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MicrosoftUser {
|
||||
pub id: String,
|
||||
pub display_name: Option<String>,
|
||||
pub mail: Option<String>,
|
||||
pub user_principal_name: String,
|
||||
}
|
||||
|
||||
let microsoft_user: MicrosoftUser = reqwest::Client::new()
|
||||
.get("https://graph.microsoft.com/v1.0/me?$select=id,displayName,mail,userPrincipalName")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(AUTHORIZATION, format!("Bearer {token}"))
|
||||
.send()
|
||||
.await?.json().await?;
|
||||
|
||||
TempUser {
|
||||
id: microsoft_user.id,
|
||||
username: microsoft_user
|
||||
.user_principal_name
|
||||
.split('@')
|
||||
.next()
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
email: microsoft_user.mail,
|
||||
avatar_url: None,
|
||||
bio: None,
|
||||
name: microsoft_user.display_name,
|
||||
}
|
||||
}
|
||||
AuthProvider::GitLab => {
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GitLabUser {
|
||||
pub id: i32,
|
||||
pub username: String,
|
||||
pub email: Option<String>,
|
||||
pub avatar_url: Option<String>,
|
||||
pub name: Option<String>,
|
||||
pub bio: Option<String>,
|
||||
}
|
||||
|
||||
let gitlab_user: GitLabUser = reqwest::Client::new()
|
||||
.get("https://gitlab.com/api/v4/user")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(AUTHORIZATION, format!("Bearer {token}"))
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
TempUser {
|
||||
id: gitlab_user.id.to_string(),
|
||||
username: gitlab_user.username,
|
||||
email: gitlab_user.email,
|
||||
avatar_url: gitlab_user.avatar_url,
|
||||
bio: gitlab_user.bio,
|
||||
name: gitlab_user.name,
|
||||
}
|
||||
}
|
||||
AuthProvider::Google => {
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct GoogleUser {
|
||||
pub id: String,
|
||||
pub email: String,
|
||||
pub name: Option<String>,
|
||||
pub bio: Option<String>,
|
||||
pub picture: Option<String>,
|
||||
}
|
||||
|
||||
let google_user: GoogleUser = reqwest::Client::new()
|
||||
.get("https://www.googleapis.com/userinfo/v2/me")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(AUTHORIZATION, format!("Bearer {token}"))
|
||||
.send()
|
||||
.await?
|
||||
.json()
|
||||
.await?;
|
||||
|
||||
TempUser {
|
||||
id: google_user.id,
|
||||
username: google_user
|
||||
.email
|
||||
.split('@')
|
||||
.next()
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
email: Some(google_user.email),
|
||||
avatar_url: google_user.picture,
|
||||
bio: None,
|
||||
name: google_user.name,
|
||||
}
|
||||
}
|
||||
AuthProvider::Steam => {
|
||||
let api_key = dotenvy::var("STEAM_API_KEY")?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SteamResponse {
|
||||
response: Players,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Players {
|
||||
players: Vec<Player>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Player {
|
||||
steamid: String,
|
||||
personaname: String,
|
||||
profileurl: String,
|
||||
avatar: Option<String>,
|
||||
}
|
||||
|
||||
let response: String = reqwest::get(
|
||||
&format!(
|
||||
"https://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/?key={}&steamids={}",
|
||||
api_key,
|
||||
token
|
||||
)
|
||||
)
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
|
||||
let mut response: SteamResponse = serde_json::from_str(&response)?;
|
||||
|
||||
if let Some(player) = response.response.players.pop() {
|
||||
let username = player
|
||||
.profileurl
|
||||
.trim_matches('/')
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or(&player.steamid)
|
||||
.to_string();
|
||||
TempUser {
|
||||
id: player.steamid,
|
||||
username,
|
||||
email: None,
|
||||
avatar_url: player.avatar,
|
||||
bio: None,
|
||||
name: Some(player.personaname),
|
||||
}
|
||||
} else {
|
||||
return Err(AuthenticationError::InvalidCredentials);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn get_user_id<'a, 'b, E>(
|
||||
&self,
|
||||
id: &str,
|
||||
executor: E,
|
||||
) -> Result<Option<crate::database::models::UserId>, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Ok(match self {
|
||||
AuthProvider::GitHub => {
|
||||
let value = sqlx::query!(
|
||||
"SELECT id FROM users WHERE github_id = $1",
|
||||
id.parse::<i64>()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
value.map(|x| crate::database::models::UserId(x.id))
|
||||
}
|
||||
AuthProvider::Discord => {
|
||||
let value = sqlx::query!(
|
||||
"SELECT id FROM users WHERE discord_id = $1",
|
||||
id.parse::<i64>()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
value.map(|x| crate::database::models::UserId(x.id))
|
||||
}
|
||||
AuthProvider::Microsoft => {
|
||||
let value = sqlx::query!("SELECT id FROM users WHERE microsoft_id = $1", id)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
value.map(|x| crate::database::models::UserId(x.id))
|
||||
}
|
||||
AuthProvider::GitLab => {
|
||||
let value = sqlx::query!(
|
||||
"SELECT id FROM users WHERE gitlab_id = $1",
|
||||
id.parse::<i64>()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
value.map(|x| crate::database::models::UserId(x.id))
|
||||
}
|
||||
AuthProvider::Google => {
|
||||
let value = sqlx::query!("SELECT id FROM users WHERE google_id = $1", id)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
value.map(|x| crate::database::models::UserId(x.id))
|
||||
}
|
||||
AuthProvider::Steam => {
|
||||
let value = sqlx::query!(
|
||||
"SELECT id FROM users WHERE steam_id = $1",
|
||||
id.parse::<i64>()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
value.map(|x| crate::database::models::UserId(x.id))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
AuthProvider::GitHub => "github",
|
||||
AuthProvider::Discord => "discord",
|
||||
AuthProvider::Microsoft => "microsoft",
|
||||
AuthProvider::GitLab => "gitlab",
|
||||
AuthProvider::Google => "google",
|
||||
AuthProvider::Steam => "steam",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_str(string: &str) -> AuthProvider {
|
||||
match string {
|
||||
"github" => AuthProvider::GitHub,
|
||||
"discord" => AuthProvider::Discord,
|
||||
"microsoft" => AuthProvider::Microsoft,
|
||||
"gitlab" => AuthProvider::GitLab,
|
||||
"google" => AuthProvider::Google,
|
||||
"steam" => AuthProvider::Steam,
|
||||
_ => AuthProvider::GitHub,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for AuthProvider {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fmt.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct AuthorizationInit {
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub provider: AuthProvider,
|
||||
}
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Authorization {
|
||||
pub code: String,
|
||||
pub state: String,
|
||||
}
|
||||
|
||||
// Init link takes us to GitHub API and calls back to callback endpoint with a code and state
|
||||
// http://localhost:8000/auth/init?url=https://modrinth.com
|
||||
#[get("init")]
|
||||
pub async fn init(
|
||||
Query(info): Query<AuthorizationInit>, // callback url
|
||||
client: Data<PgPool>,
|
||||
) -> Result<HttpResponse, AuthenticationError> {
|
||||
let url = url::Url::parse(&info.url).map_err(|_| AuthenticationError::Url)?;
|
||||
|
||||
let allowed_callback_urls = parse_strings_from_var("ALLOWED_CALLBACK_URLS").unwrap_or_default();
|
||||
let domain = url.host_str().ok_or(AuthenticationError::Url)?;
|
||||
if !allowed_callback_urls.iter().any(|x| domain.ends_with(x)) && domain != "modrinth.com" {
|
||||
return Err(AuthenticationError::Url);
|
||||
}
|
||||
|
||||
let mut transaction = client.begin().await?;
|
||||
|
||||
let state = generate_state_id(&mut transaction).await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO states (id, url, provider)
|
||||
VALUES ($1, $2, $3)
|
||||
",
|
||||
state.0,
|
||||
info.url,
|
||||
info.provider.to_string()
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
let url = info.provider.get_redirect_url(state)?;
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*url))
|
||||
.json(serde_json::json!({ "url": url })))
|
||||
}
|
||||
|
||||
#[get("callback")]
|
||||
pub async fn auth_callback(
|
||||
req: HttpRequest,
|
||||
Query(query): Query<HashMap<String, String>>,
|
||||
client: Data<PgPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
redis: Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, AuthenticationError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
|
||||
let state = query
|
||||
.get("state")
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let state_id: u64 = parse_base62(state)?;
|
||||
|
||||
let result_option = sqlx::query!(
|
||||
"
|
||||
SELECT url, expires, provider FROM states
|
||||
WHERE id = $1
|
||||
",
|
||||
state_id as i64
|
||||
)
|
||||
.fetch_optional(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// Extract cookie header from request
|
||||
if let Some(result) = result_option {
|
||||
// Extract cookie header to get authenticated user from Minos
|
||||
let duration: chrono::Duration = result.expires - Utc::now();
|
||||
if duration.num_seconds() < 0 {
|
||||
return Err(AuthenticationError::InvalidCredentials);
|
||||
}
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM states
|
||||
WHERE id = $1
|
||||
",
|
||||
state_id as i64
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let provider = AuthProvider::from_str(&result.provider);
|
||||
|
||||
let token = provider.get_token(query).await?;
|
||||
let oauth_user = provider.get_user(&token).await?;
|
||||
let user_id = if let Some(user_id) = provider
|
||||
.get_user_id(&oauth_user.id, &mut *transaction)
|
||||
.await?
|
||||
{
|
||||
user_id
|
||||
} else {
|
||||
let user_id = crate::database::models::generate_user_id(&mut transaction).await?;
|
||||
|
||||
let mut username_increment: i32 = 0;
|
||||
let mut username = None;
|
||||
|
||||
while username.is_none() {
|
||||
let test_username = format!(
|
||||
"{}{}",
|
||||
oauth_user.username,
|
||||
if username_increment > 0 {
|
||||
username_increment.to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
}
|
||||
);
|
||||
|
||||
let new_id =
|
||||
crate::database::models::User::get(&test_username, &**client, &redis).await?;
|
||||
|
||||
if new_id.is_none() {
|
||||
username = Some(test_username);
|
||||
} else {
|
||||
username_increment += 1;
|
||||
}
|
||||
}
|
||||
|
||||
let avatar_url = if let Some(avatar_url) = oauth_user.avatar_url {
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
|
||||
let res = reqwest::get(&avatar_url).await?;
|
||||
let headers = res.headers().clone();
|
||||
|
||||
let img_data = if let Some(content_type) = headers
|
||||
.get(reqwest::header::CONTENT_TYPE)
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
{
|
||||
get_image_ext(content_type).map(|ext| (ext, content_type))
|
||||
} else if let Some(ext) = avatar_url.rsplit('.').next() {
|
||||
get_image_content_type(ext).map(|content_type| (ext, content_type))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some((ext, content_type)) = img_data {
|
||||
let bytes = res.bytes().await?;
|
||||
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
||||
|
||||
let upload_data = file_host
|
||||
.upload_file(
|
||||
content_type,
|
||||
&format!(
|
||||
"user/{}/{}.{}",
|
||||
crate::models::users::UserId::from(user_id),
|
||||
hash,
|
||||
ext
|
||||
),
|
||||
bytes,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Some(format!("{}/{}", cdn_url, upload_data.file_name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(username) = username {
|
||||
crate::database::models::User {
|
||||
id: user_id,
|
||||
github_id: if provider == AuthProvider::GitHub {
|
||||
Some(
|
||||
oauth_user
|
||||
.id
|
||||
.clone()
|
||||
.parse()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
discord_id: if provider == AuthProvider::Discord {
|
||||
Some(
|
||||
oauth_user
|
||||
.id
|
||||
.parse()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
gitlab_id: if provider == AuthProvider::GitLab {
|
||||
Some(
|
||||
oauth_user
|
||||
.id
|
||||
.parse()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
google_id: if provider == AuthProvider::Google {
|
||||
Some(oauth_user.id.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
steam_id: if provider == AuthProvider::Steam {
|
||||
Some(
|
||||
oauth_user
|
||||
.id
|
||||
.parse()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
microsoft_id: if provider == AuthProvider::Microsoft {
|
||||
Some(oauth_user.id)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
username,
|
||||
name: oauth_user.name,
|
||||
email: oauth_user.email,
|
||||
avatar_url,
|
||||
bio: oauth_user.bio,
|
||||
created: Utc::now(),
|
||||
role: Role::Developer.to_string(),
|
||||
badges: Badges::default(),
|
||||
balance: Decimal::ZERO,
|
||||
payout_wallet: None,
|
||||
payout_wallet_type: None,
|
||||
payout_address: None,
|
||||
}
|
||||
.insert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
user_id
|
||||
} else {
|
||||
return Err(AuthenticationError::InvalidCredentials);
|
||||
}
|
||||
};
|
||||
|
||||
let session = issue_session(req, user_id, &mut transaction, &redis).await?;
|
||||
transaction.commit().await?;
|
||||
|
||||
let redirect_url = if result.url.contains('?') {
|
||||
format!("{}&code={}", result.url, session.session)
|
||||
} else {
|
||||
format!("{}?code={}", result.url, session.session)
|
||||
};
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*redirect_url))
|
||||
.json(serde_json::json!({ "url": redirect_url })))
|
||||
} else {
|
||||
Err(AuthenticationError::InvalidCredentials)
|
||||
}
|
||||
}
|
||||
81
src/auth/mod.rs
Normal file
81
src/auth/mod.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
pub mod checks;
|
||||
pub mod flows;
|
||||
pub mod pat;
|
||||
mod session;
|
||||
pub mod validate;
|
||||
|
||||
pub use checks::{
|
||||
filter_authorized_projects, filter_authorized_versions, is_authorized, is_authorized_version,
|
||||
};
|
||||
pub use flows::config;
|
||||
pub use pat::{generate_pat, get_user_from_pat, PersonalAccessToken};
|
||||
pub use validate::{check_is_moderator_from_headers, get_user_from_headers};
|
||||
|
||||
use crate::file_hosting::FileHostingError;
|
||||
use crate::models::error::ApiError;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::HttpResponse;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AuthenticationError {
|
||||
#[error("Environment Error")]
|
||||
Env(#[from] dotenvy::Error),
|
||||
#[error("An unknown database error occurred: {0}")]
|
||||
Sqlx(#[from] sqlx::Error),
|
||||
#[error("Database Error: {0}")]
|
||||
Database(#[from] crate::database::models::DatabaseError),
|
||||
#[error("Error while parsing JSON: {0}")]
|
||||
SerDe(#[from] serde_json::Error),
|
||||
#[error("Error while communicating to external oauth provider")]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
#[error("Error while decoding PAT: {0}")]
|
||||
Decoding(#[from] crate::models::ids::DecodingError),
|
||||
#[error("Invalid Authentication Credentials")]
|
||||
InvalidCredentials,
|
||||
#[error("Authentication method was not valid")]
|
||||
InvalidAuthMethod,
|
||||
#[error("GitHub Token from incorrect Client ID")]
|
||||
InvalidClientId,
|
||||
#[error("Invalid callback URL specified")]
|
||||
Url,
|
||||
#[error("Error uploading user profile picture")]
|
||||
FileHosting(#[from] FileHostingError),
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for AuthenticationError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
AuthenticationError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthenticationError::Sqlx(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthenticationError::Database(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthenticationError::SerDe(..) => StatusCode::BAD_REQUEST,
|
||||
AuthenticationError::Reqwest(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthenticationError::InvalidCredentials => StatusCode::UNAUTHORIZED,
|
||||
AuthenticationError::Decoding(..) => StatusCode::BAD_REQUEST,
|
||||
AuthenticationError::InvalidAuthMethod => StatusCode::UNAUTHORIZED,
|
||||
AuthenticationError::InvalidClientId => StatusCode::UNAUTHORIZED,
|
||||
AuthenticationError::Url => StatusCode::BAD_REQUEST,
|
||||
AuthenticationError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
error: match self {
|
||||
AuthenticationError::Env(..) => "environment_error",
|
||||
AuthenticationError::Sqlx(..) => "database_error",
|
||||
AuthenticationError::Database(..) => "database_error",
|
||||
AuthenticationError::SerDe(..) => "invalid_input",
|
||||
AuthenticationError::Reqwest(..) => "network_error",
|
||||
AuthenticationError::InvalidCredentials => "invalid_credentials",
|
||||
AuthenticationError::Decoding(..) => "decoding_error",
|
||||
AuthenticationError::InvalidAuthMethod => "invalid_auth_method",
|
||||
AuthenticationError::InvalidClientId => "invalid_client_id",
|
||||
AuthenticationError::Url => "url_error",
|
||||
AuthenticationError::FileHosting(..) => "file_hosting",
|
||||
},
|
||||
description: &self.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,4 @@
|
||||
/*!
|
||||
Current edition of Ory kratos does not support PAT access of data, so this module is how we allow for PAT authentication.
|
||||
|
||||
|
||||
Just as a summary: Don't implement this flow in your application!
|
||||
*/
|
||||
|
||||
use super::auth::AuthenticationError;
|
||||
use crate::auth::AuthenticationError;
|
||||
use crate::database;
|
||||
use crate::database::models::{DatabaseError, UserId};
|
||||
use crate::models::users::{self, Badges, RecipientType, RecipientWallet};
|
||||
@@ -35,11 +28,11 @@ where
|
||||
let row = sqlx::query!(
|
||||
"
|
||||
SELECT pats.expires_at,
|
||||
u.id, u.name, u.kratos_id, u.email, u.github_id,
|
||||
u.id, u.name, u.email,
|
||||
u.avatar_url, u.username, u.bio,
|
||||
u.created, u.role, u.badges,
|
||||
u.balance, u.payout_wallet, u.payout_wallet_type,
|
||||
u.payout_address
|
||||
u.balance, u.payout_wallet, u.payout_wallet_type, u.payout_address,
|
||||
github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id
|
||||
FROM pats LEFT OUTER JOIN users u ON pats.user_id = u.id
|
||||
WHERE access_token = $1
|
||||
",
|
||||
@@ -54,9 +47,13 @@ where
|
||||
|
||||
return Ok(Some(database::models::User {
|
||||
id: UserId(row.id),
|
||||
kratos_id: row.kratos_id,
|
||||
name: row.name,
|
||||
github_id: row.github_id,
|
||||
discord_id: row.discord_id,
|
||||
gitlab_id: row.gitlab_id,
|
||||
google_id: row.google_id,
|
||||
steam_id: row.steam_id,
|
||||
microsoft_id: row.microsoft_id,
|
||||
email: row.email,
|
||||
avatar_url: row.avatar_url,
|
||||
username: row.username,
|
||||
80
src/auth/session.rs
Normal file
80
src/auth/session.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use crate::auth::AuthenticationError;
|
||||
use crate::database::models::session_item::{Session, SessionBuilder};
|
||||
use crate::database::models::UserId;
|
||||
use crate::util::env::parse_var;
|
||||
use actix_web::HttpRequest;
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use woothee::parser::Parser;
|
||||
|
||||
pub async fn issue_session(
|
||||
req: HttpRequest,
|
||||
user_id: UserId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Session, AuthenticationError> {
|
||||
let conn_info = req.connection_info().clone();
|
||||
let ip_addr = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
|
||||
if let Some(header) = req.headers().get("CF-Connecting-IP") {
|
||||
header.to_str().ok()
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
}
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
};
|
||||
|
||||
let country = req
|
||||
.headers()
|
||||
.get("cf-ipcountry")
|
||||
.and_then(|x| x.to_str().ok());
|
||||
let city = req.headers().get("cf-ipcity").and_then(|x| x.to_str().ok());
|
||||
|
||||
let user_agent = req
|
||||
.headers()
|
||||
.get("user-agent")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
let parser = Parser::new();
|
||||
let info = parser.parse(user_agent);
|
||||
let os = if let Some(info) = info {
|
||||
Some((info.os, info.name))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let session = ChaCha20Rng::from_entropy()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(60)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
|
||||
let session = format!("mra_{session}");
|
||||
|
||||
let id = SessionBuilder {
|
||||
session,
|
||||
user_id,
|
||||
os: os.map(|x| x.0.to_string()),
|
||||
platform: os.map(|x| x.1.to_string()),
|
||||
city: city.map(|x| x.to_string()),
|
||||
country: country.map(|x| x.to_string()),
|
||||
ip: ip_addr
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?
|
||||
.to_string(),
|
||||
user_agent: user_agent.to_string(),
|
||||
}
|
||||
.insert(transaction)
|
||||
.await?;
|
||||
|
||||
let session = Session::get_id(id, &mut *transaction, redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
Ok(session)
|
||||
}
|
||||
|
||||
// TODO: List user sessions route
|
||||
// TODO: Delete User Session Route / logout
|
||||
// TODO: Refresh session route
|
||||
108
src/auth/validate.rs
Normal file
108
src/auth/validate.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use crate::auth::flows::AuthProvider;
|
||||
use crate::auth::get_user_from_pat;
|
||||
use crate::auth::AuthenticationError;
|
||||
use crate::database::models::user_item;
|
||||
use crate::models::users::{Role, User, UserId, UserPayoutData};
|
||||
use actix_web::http::header::HeaderMap;
|
||||
use reqwest::header::{HeaderValue, AUTHORIZATION};
|
||||
|
||||
pub async fn get_user_from_headers<'a, E>(
|
||||
headers: &HeaderMap,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<User, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let token: Option<&HeaderValue> = headers.get(AUTHORIZATION);
|
||||
|
||||
// Fetch DB user record and minos user from headers
|
||||
let db_user = get_user_record_from_bearer_token(
|
||||
token
|
||||
.ok_or_else(|| AuthenticationError::InvalidAuthMethod)?
|
||||
.to_str()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
executor,
|
||||
redis,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
let user = User {
|
||||
id: UserId::from(db_user.id),
|
||||
github_id: db_user.github_id.map(|x| x as u64),
|
||||
// discord_id: minos_user.discord_id,
|
||||
// google_id: minos_user.google_id,
|
||||
// microsoft_id: minos_user.microsoft_id,
|
||||
// apple_id: minos_user.apple_id,
|
||||
// gitlab_id: minos_user.gitlab_id,
|
||||
username: db_user.username,
|
||||
name: db_user.name,
|
||||
email: db_user.email,
|
||||
avatar_url: db_user.avatar_url,
|
||||
bio: db_user.bio,
|
||||
created: db_user.created,
|
||||
role: Role::from_string(&db_user.role),
|
||||
badges: db_user.badges,
|
||||
payout_data: Some(UserPayoutData {
|
||||
balance: db_user.balance,
|
||||
payout_wallet: db_user.payout_wallet,
|
||||
payout_wallet_type: db_user.payout_wallet_type,
|
||||
payout_address: db_user.payout_address,
|
||||
}),
|
||||
};
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
pub async fn get_user_record_from_bearer_token<'a, 'b, E>(
|
||||
token: &str,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<user_item::User>, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let token: &str = token.trim_start_matches("Bearer ");
|
||||
|
||||
let possible_user = match token.split_once('_') {
|
||||
Some(("modrinth", _)) => get_user_from_pat(token, executor).await?,
|
||||
Some(("mra", _)) => {
|
||||
let session =
|
||||
crate::database::models::session_item::Session::get(token, executor, redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
user_item::User::get_id(session.user_id, executor, redis).await?
|
||||
}
|
||||
Some(("github", _)) | Some(("gho", _)) | Some(("ghp", _)) => {
|
||||
let user = AuthProvider::GitHub.get_user(token).await?;
|
||||
let id = AuthProvider::GitHub.get_user_id(&user.id, executor).await?;
|
||||
|
||||
user_item::User::get_id(
|
||||
id.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
|
||||
executor,
|
||||
redis,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
_ => return Err(AuthenticationError::InvalidAuthMethod),
|
||||
};
|
||||
Ok(possible_user)
|
||||
}
|
||||
|
||||
pub async fn check_is_moderator_from_headers<'a, 'b, E>(
|
||||
headers: &HeaderMap,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<User, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let user = get_user_from_headers(headers, executor, redis).await?;
|
||||
|
||||
if user.role.is_mod() {
|
||||
Ok(user)
|
||||
} else {
|
||||
Err(AuthenticationError::InvalidCredentials)
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use super::DatabaseError;
|
||||
use crate::models::ids::base62_impl::to_base62;
|
||||
use crate::models::ids::random_base62_rng;
|
||||
use censor::Censor;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::sqlx_macros::Type;
|
||||
|
||||
const ID_RETRY_COUNT: usize = 20;
|
||||
@@ -129,35 +129,43 @@ generate_ids!(
|
||||
ThreadMessageId
|
||||
);
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Deserialize)]
|
||||
generate_ids!(
|
||||
pub generate_session_id,
|
||||
SessionId,
|
||||
8,
|
||||
"SELECT EXISTS(SELECT 1 FROM sessions WHERE id=$1)",
|
||||
SessionId
|
||||
);
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct UserId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Eq, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, Type, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct TeamId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct TeamMemberId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Deserialize, Hash)]
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ProjectId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ProjectTypeId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct StatusId(pub i32);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct SideTypeId(pub i32);
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct DonationPlatformId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct VersionId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
@@ -177,7 +185,7 @@ pub struct ReportId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct ReportTypeId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize, Serialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct FileId(pub i64);
|
||||
|
||||
@@ -196,13 +204,17 @@ pub struct NotificationId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct NotificationActionId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize, Eq, PartialEq)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ThreadId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ThreadMessageId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct SessionId(pub i64);
|
||||
|
||||
use crate::models::ids;
|
||||
|
||||
impl From<ids::ProjectId> for ProjectId {
|
||||
@@ -285,3 +297,8 @@ impl From<ThreadMessageId> for ids::ThreadMessageId {
|
||||
ids::ThreadMessageId(id.0 as u64)
|
||||
}
|
||||
}
|
||||
impl From<SessionId> for ids::SessionId {
|
||||
fn from(id: SessionId) -> Self {
|
||||
ids::SessionId(id.0 as u64)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ pub mod ids;
|
||||
pub mod notification_item;
|
||||
pub mod project_item;
|
||||
pub mod report_item;
|
||||
pub mod session_item;
|
||||
pub mod team_item;
|
||||
pub mod thread_item;
|
||||
pub mod user_item;
|
||||
@@ -21,11 +22,13 @@ pub use version_item::Version;
|
||||
#[derive(Error, Debug)]
|
||||
pub enum DatabaseError {
|
||||
#[error("Error while interacting with the database: {0}")]
|
||||
Database(#[from] sqlx::error::Error),
|
||||
Database(#[from] sqlx::Error),
|
||||
#[error("Error while trying to generate random ID")]
|
||||
RandomId,
|
||||
#[error("A database request failed")]
|
||||
Other(String),
|
||||
#[error("Error while parsing JSON: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
#[error("Error while interacting with the cache: {0}")]
|
||||
CacheError(#[from] redis::RedisError),
|
||||
#[error("Redis Pool Error: {0}")]
|
||||
RedisPool(#[from] deadpool_redis::PoolError),
|
||||
#[error("Error while serializing with the cache: {0}")]
|
||||
SerdeCacheError(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
312
src/database/models/session_item.rs
Normal file
312
src/database/models/session_item.rs
Normal file
@@ -0,0 +1,312 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::DatabaseError;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
use chrono::{DateTime, Utc};
|
||||
use redis::cmd;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const SESSIONS_NAMESPACE: &str = "sessions";
|
||||
const SESSIONS_IDS_NAMESPACE: &str = "sessions_ids";
|
||||
const SESSIONS_USERS_NAMESPACE: &str = "sessions_users";
|
||||
const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes
|
||||
|
||||
// TODO: Manage sessions cache + clear cache when needed
|
||||
|
||||
pub struct SessionBuilder {
|
||||
pub session: String,
|
||||
pub user_id: UserId,
|
||||
|
||||
pub os: Option<String>,
|
||||
pub platform: Option<String>,
|
||||
|
||||
pub city: Option<String>,
|
||||
pub country: Option<String>,
|
||||
|
||||
pub ip: String,
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
impl SessionBuilder {
|
||||
pub async fn insert(
|
||||
&self,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<SessionId, DatabaseError> {
|
||||
let id = generate_session_id(&mut *transaction).await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO sessions (
|
||||
id, session, user_id, os, platform,
|
||||
city, country, ip, user_agent
|
||||
)
|
||||
VALUES (
|
||||
$1, $2, $3, $4, $5,
|
||||
$6, $7, $8, $9
|
||||
)
|
||||
",
|
||||
id as SessionId,
|
||||
self.session,
|
||||
self.user_id as UserId,
|
||||
self.os,
|
||||
self.platform,
|
||||
self.city,
|
||||
self.country,
|
||||
self.ip,
|
||||
self.user_agent,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct Session {
|
||||
pub id: SessionId,
|
||||
pub session: String,
|
||||
pub user_id: UserId,
|
||||
|
||||
pub created: DateTime<Utc>,
|
||||
pub last_login: DateTime<Utc>,
|
||||
pub expires: DateTime<Utc>,
|
||||
pub refresh_expires: DateTime<Utc>,
|
||||
|
||||
pub os: Option<String>,
|
||||
pub platform: Option<String>,
|
||||
pub user_agent: String,
|
||||
|
||||
pub city: Option<String>,
|
||||
pub country: Option<String>,
|
||||
pub ip: String,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub async fn get<'a, E, T: ToString>(
|
||||
id: T,
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Self::get_many(&[id], exec, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_id<'a, 'b, E>(
|
||||
id: SessionId,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Session::get_many(&[crate::models::ids::SessionId::from(id)], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_many_ids<'a, E>(
|
||||
user_ids: &[SessionId],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let ids = user_ids
|
||||
.iter()
|
||||
.map(|x| crate::models::ids::SessionId::from(*x))
|
||||
.collect::<Vec<_>>();
|
||||
Session::get_many(&ids, exec, redis).await
|
||||
}
|
||||
|
||||
pub async fn get_many<'a, E, T: ToString>(
|
||||
session_strings: &[T],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<Session>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::TryStreamExt;
|
||||
|
||||
if session_strings.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let mut found_sessions = Vec::new();
|
||||
let mut remaining_strings = session_strings
|
||||
.iter()
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut session_ids = session_strings
|
||||
.iter()
|
||||
.flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
session_ids.append(
|
||||
&mut cmd("MGET")
|
||||
.arg(
|
||||
session_strings
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", SESSIONS_IDS_NAMESPACE, x.to_string()))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<i64>>>(&mut redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect(),
|
||||
);
|
||||
|
||||
if !session_ids.is_empty() {
|
||||
let sessions = cmd("MGET")
|
||||
.arg(
|
||||
session_ids
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", SESSIONS_NAMESPACE, x))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
for session in sessions {
|
||||
if let Some(session) =
|
||||
session.and_then(|x| serde_json::from_str::<Session>(&x).ok())
|
||||
{
|
||||
remaining_strings
|
||||
.retain(|x| &to_base62(session.id.0 as u64) != x && &session.session != x);
|
||||
found_sessions.push(session);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !remaining_strings.is_empty() {
|
||||
let session_ids_parsed: Vec<i64> = session_strings
|
||||
.iter()
|
||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||
.map(|x| x as i64)
|
||||
.collect();
|
||||
let db_sessions: Vec<Session> = sqlx::query!(
|
||||
"
|
||||
SELECT id, user_id, session, created, last_login, expires, refresh_expires, os, platform,
|
||||
city, country, ip, user_agent
|
||||
FROM sessions
|
||||
WHERE id = ANY($1) OR session = ANY($2)
|
||||
ORDER BY created DESC
|
||||
",
|
||||
&session_ids_parsed,
|
||||
&session_strings.into_iter().map(|x| x.to_string()).collect::<Vec<_>>(),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|x| Session {
|
||||
id: SessionId(x.id),
|
||||
session: x.session,
|
||||
user_id: UserId(x.user_id),
|
||||
created: x.created,
|
||||
last_login: x.last_login,
|
||||
expires: x.expires,
|
||||
refresh_expires: x.refresh_expires,
|
||||
os: x.os,
|
||||
platform: x.platform,
|
||||
city: x.city,
|
||||
country: x.country,
|
||||
ip: x.ip,
|
||||
user_agent: x.user_agent,
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Session>>()
|
||||
.await?;
|
||||
|
||||
for session in db_sessions {
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", SESSIONS_NAMESPACE, session.id.0))
|
||||
.arg(serde_json::to_string(&session)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", SESSIONS_IDS_NAMESPACE, session.session))
|
||||
.arg(session.id.0)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
found_sessions.push(session);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_sessions)
|
||||
}
|
||||
|
||||
pub async fn get_user_sessions<'a, E>(
|
||||
user_id: UserId,
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<SessionId>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let mut redis = redis.get().await?;
|
||||
let res = cmd("GET")
|
||||
.arg(format!("{}:{}", SESSIONS_USERS_NAMESPACE, user_id.0))
|
||||
.query_async::<_, Option<Vec<i64>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res.into_iter().map(SessionId).collect());
|
||||
}
|
||||
|
||||
use futures::TryStreamExt;
|
||||
let db_sessions: Vec<SessionId> = sqlx::query!(
|
||||
"
|
||||
SELECT id
|
||||
FROM sessions
|
||||
WHERE user_id = $1
|
||||
ORDER BY created DESC
|
||||
",
|
||||
user_id.0,
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|x| SessionId(x.id))) })
|
||||
.try_collect::<Vec<SessionId>>()
|
||||
.await?;
|
||||
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", SESSIONS_USERS_NAMESPACE, user_id.0))
|
||||
.arg(serde_json::to_string(&db_sessions)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
Ok(db_sessions)
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: SessionId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
// redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM sessions WHERE id = $1
|
||||
",
|
||||
id as SessionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,14 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::User;
|
||||
use crate::models::teams::Permissions;
|
||||
use crate::models::users::{Badges, RecipientType, RecipientWallet};
|
||||
use crate::models::users::Badges;
|
||||
use itertools::Itertools;
|
||||
use redis::cmd;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const TEAMS_NAMESPACE: &str = "teams";
|
||||
const DEFAULT_EXPIRY: i64 = 1800;
|
||||
|
||||
pub struct TeamBuilder {
|
||||
pub members: Vec<TeamMemberBuilder>,
|
||||
@@ -90,6 +96,7 @@ pub struct TeamMember {
|
||||
}
|
||||
|
||||
/// A member of a team
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct QueryTeamMember {
|
||||
pub id: TeamMemberId,
|
||||
pub team_id: TeamId,
|
||||
@@ -107,81 +114,139 @@ impl TeamMember {
|
||||
pub async fn get_from_team_full<'a, 'b, E>(
|
||||
id: TeamId,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<QueryTeamMember>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
Self::get_from_team_full_many(&[id], executor).await
|
||||
Self::get_from_team_full_many(&[id], executor, redis).await
|
||||
}
|
||||
|
||||
pub async fn get_from_team_full_many<'a, E>(
|
||||
team_ids: &[TeamId],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<QueryTeamMember>, super::DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
if team_ids.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let team_ids_parsed: Vec<i64> = team_ids.iter().map(|x| x.0).collect();
|
||||
let mut team_ids_parsed: Vec<i64> = team_ids.iter().map(|x| x.0).collect();
|
||||
|
||||
let teams = sqlx::query!(
|
||||
"
|
||||
SELECT tm.id id, tm.team_id team_id, tm.role member_role, tm.permissions permissions, tm.accepted accepted, tm.payouts_split payouts_split, tm.ordering,
|
||||
u.id user_id, u.name user_name, u.email email, u.kratos_id kratos_id, u.github_id github_id,
|
||||
u.avatar_url avatar_url, u.username username, u.bio bio,
|
||||
u.created created, u.role user_role, u.badges badges, u.balance balance,
|
||||
u.payout_wallet payout_wallet, u.payout_wallet_type payout_wallet_type,
|
||||
u.payout_address payout_address
|
||||
FROM team_members tm
|
||||
INNER JOIN users u ON u.id = tm.user_id
|
||||
WHERE tm.team_id = ANY($1)
|
||||
ORDER BY tm.team_id, tm.ordering
|
||||
",
|
||||
&team_ids_parsed
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
if let Some(m) = e.right() {
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
Ok(Some(Ok(QueryTeamMember {
|
||||
id: TeamMemberId(m.id),
|
||||
team_id: TeamId(m.team_id),
|
||||
role: m.member_role,
|
||||
permissions: Permissions::from_bits(m.permissions as u64).unwrap_or_default(),
|
||||
accepted: m.accepted,
|
||||
user: User {
|
||||
id: UserId(m.user_id),
|
||||
github_id: m.github_id,
|
||||
kratos_id: m.kratos_id,
|
||||
name: m.user_name,
|
||||
email: m.email,
|
||||
avatar_url: m.avatar_url,
|
||||
username: m.username,
|
||||
bio: m.bio,
|
||||
created: m.created,
|
||||
role: m.user_role,
|
||||
badges: Badges::from_bits(m.badges as u64).unwrap_or_default(),
|
||||
balance: m.balance,
|
||||
payout_wallet: m.payout_wallet.map(|x| RecipientWallet::from_string(&x)),
|
||||
payout_wallet_type: m.payout_wallet_type.map(|x| RecipientType::from_string(&x)),
|
||||
payout_address: m.payout_address,
|
||||
},
|
||||
payouts_split: m.payouts_split,
|
||||
ordering: m.ordering,
|
||||
})))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})
|
||||
.try_collect::<Vec<Result<QueryTeamMember, super::DatabaseError>>>()
|
||||
.await?;
|
||||
let mut found_teams = Vec::new();
|
||||
|
||||
let team_members = teams
|
||||
.into_iter()
|
||||
.collect::<Result<Vec<QueryTeamMember>, super::DatabaseError>>()?;
|
||||
let teams = cmd("MGET")
|
||||
.arg(
|
||||
team_ids_parsed
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", TEAMS_NAMESPACE, x))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
Ok(team_members)
|
||||
for team_raw in teams {
|
||||
if let Some(mut team) = team_raw
|
||||
.clone()
|
||||
.and_then(|x| serde_json::from_str::<Vec<QueryTeamMember>>(&x).ok())
|
||||
{
|
||||
if let Some(team_id) = team.first().map(|x| x.team_id) {
|
||||
team_ids_parsed.retain(|x| &team_id.0 != x);
|
||||
}
|
||||
|
||||
found_teams.append(&mut team);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if !team_ids_parsed.is_empty() {
|
||||
let teams: Vec<QueryTeamMember> = sqlx::query!(
|
||||
"
|
||||
SELECT tm.id id, tm.team_id team_id, tm.role member_role, tm.permissions permissions, tm.accepted accepted, tm.payouts_split payouts_split, tm.ordering,
|
||||
u.id user_id, u.name user_name,
|
||||
u.avatar_url avatar_url, u.username username, u.bio bio,
|
||||
u.created created, u.role user_role, u.badges badges
|
||||
FROM team_members tm
|
||||
INNER JOIN users u ON u.id = tm.user_id
|
||||
WHERE tm.team_id = ANY($1)
|
||||
ORDER BY tm.team_id, tm.ordering
|
||||
",
|
||||
&team_ids_parsed
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|m|
|
||||
QueryTeamMember {
|
||||
id: TeamMemberId(m.id),
|
||||
team_id: TeamId(m.team_id),
|
||||
role: m.member_role,
|
||||
permissions: Permissions::from_bits(m.permissions as u64).unwrap_or_default(),
|
||||
accepted: m.accepted,
|
||||
user: User {
|
||||
id: UserId(m.user_id),
|
||||
github_id: None,
|
||||
discord_id: None,
|
||||
gitlab_id: None,
|
||||
google_id: None,
|
||||
steam_id: None,
|
||||
name: m.user_name,
|
||||
email: None,
|
||||
avatar_url: m.avatar_url,
|
||||
username: m.username,
|
||||
bio: m.bio,
|
||||
created: m.created,
|
||||
role: m.user_role,
|
||||
badges: Badges::from_bits(m.badges as u64).unwrap_or_default(),
|
||||
balance: Decimal::ZERO,
|
||||
payout_wallet: None,
|
||||
payout_wallet_type: None,
|
||||
payout_address: None,
|
||||
microsoft_id: None,
|
||||
},
|
||||
payouts_split: m.payouts_split,
|
||||
ordering: m.ordering,
|
||||
}
|
||||
))
|
||||
})
|
||||
.try_collect::<Vec<QueryTeamMember>>()
|
||||
.await?;
|
||||
|
||||
for (id, members) in &teams.into_iter().group_by(|x| x.team_id) {
|
||||
let mut members = members.collect::<Vec<_>>();
|
||||
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", TEAMS_NAMESPACE, id.0))
|
||||
.arg(serde_json::to_string(&members)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
found_teams.append(&mut members);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_teams)
|
||||
}
|
||||
|
||||
pub async fn clear_cache(
|
||||
id: TeamId,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<(), super::DatabaseError> {
|
||||
let mut redis = redis.get().await?;
|
||||
cmd("DEL")
|
||||
.arg(format!("{}:{}", TEAMS_NAMESPACE, id.0))
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Gets a team member from a user id and team id. Does not return pending members.
|
||||
|
||||
@@ -1,12 +1,28 @@
|
||||
use super::ids::{ProjectId, UserId};
|
||||
use crate::database::models::DatabaseError;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
use crate::models::users::{Badges, RecipientType, RecipientWallet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use redis::cmd;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const USERS_NAMESPACE: &str = "users";
|
||||
const USER_USERNAMES_NAMESPACE: &str = "users_usernames";
|
||||
// const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
|
||||
const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct User {
|
||||
pub id: UserId,
|
||||
pub kratos_id: Option<String>, // None if legacy user unconnected to Minos/Kratos
|
||||
|
||||
pub github_id: Option<i64>,
|
||||
pub discord_id: Option<i64>,
|
||||
pub gitlab_id: Option<i64>,
|
||||
pub google_id: Option<String>,
|
||||
pub steam_id: Option<i64>,
|
||||
pub microsoft_id: Option<String>,
|
||||
|
||||
pub username: String,
|
||||
pub name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
@@ -29,22 +45,29 @@ impl User {
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO users (
|
||||
id, kratos_id, username, name, email,
|
||||
avatar_url, bio, created
|
||||
id, username, name, email,
|
||||
avatar_url, bio, created,
|
||||
github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id
|
||||
)
|
||||
VALUES (
|
||||
$1, $2, $3, $4, $5,
|
||||
$6, $7, $8
|
||||
$6, $7,
|
||||
$8, $9, $10, $11, $12, $13
|
||||
)
|
||||
",
|
||||
self.id as UserId,
|
||||
self.kratos_id,
|
||||
&self.username,
|
||||
self.name.as_ref(),
|
||||
self.email.as_ref(),
|
||||
self.avatar_url.as_ref(),
|
||||
self.bio.as_ref(),
|
||||
self.created,
|
||||
self.github_id,
|
||||
self.discord_id,
|
||||
self.gitlab_id,
|
||||
self.google_id,
|
||||
self.steam_id,
|
||||
self.microsoft_id,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
@@ -52,199 +75,192 @@ impl User {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get<'a, 'b, E>(id: UserId, executor: E) -> Result<Option<Self>, sqlx::error::Error>
|
||||
pub async fn get<'a, 'b, E>(
|
||||
string: &str,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Self::get_many(&[id], executor)
|
||||
User::get_many(&[string], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_from_github_id<'a, 'b, E>(
|
||||
github_id: u64,
|
||||
pub async fn get_id<'a, 'b, E>(
|
||||
id: UserId,
|
||||
executor: E,
|
||||
) -> Result<Option<Self>, sqlx::error::Error>
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT u.id, u.name, u.email, u.kratos_id,
|
||||
u.avatar_url, u.username, u.bio,
|
||||
u.created, u.role, u.badges,
|
||||
u.balance, u.payout_wallet, u.payout_wallet_type,
|
||||
u.payout_address
|
||||
FROM users u
|
||||
WHERE u.github_id = $1
|
||||
",
|
||||
github_id as i64,
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
if let Some(row) = result {
|
||||
Ok(Some(User {
|
||||
id: UserId(row.id),
|
||||
github_id: Some(github_id as i64),
|
||||
name: row.name,
|
||||
email: row.email,
|
||||
kratos_id: row.kratos_id,
|
||||
avatar_url: row.avatar_url,
|
||||
username: row.username,
|
||||
bio: row.bio,
|
||||
created: row.created,
|
||||
role: row.role,
|
||||
badges: Badges::from_bits(row.badges as u64).unwrap_or_default(),
|
||||
balance: row.balance,
|
||||
payout_wallet: row.payout_wallet.map(|x| RecipientWallet::from_string(&x)),
|
||||
payout_wallet_type: row
|
||||
.payout_wallet_type
|
||||
.map(|x| RecipientType::from_string(&x)),
|
||||
payout_address: row.payout_address,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
User::get_many(&[crate::models::ids::UserId::from(id)], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_from_minos_kratos_id<'a, 'b, E>(
|
||||
kratos_id: String,
|
||||
executor: E,
|
||||
) -> Result<Option<Self>, sqlx::error::Error>
|
||||
pub async fn get_many_ids<'a, E>(
|
||||
user_ids: &[UserId],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT u.id, u.name, u.kratos_id, u.email, u.github_id,
|
||||
u.avatar_url, u.username, u.bio,
|
||||
u.created, u.role, u.badges,
|
||||
u.balance, u.payout_wallet, u.payout_wallet_type,
|
||||
u.payout_address
|
||||
FROM users u
|
||||
WHERE u.kratos_id = $1
|
||||
",
|
||||
kratos_id as String,
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
if let Some(row) = result {
|
||||
Ok(Some(User {
|
||||
id: UserId(row.id),
|
||||
kratos_id: row.kratos_id,
|
||||
github_id: row.github_id,
|
||||
name: row.name,
|
||||
email: row.email,
|
||||
avatar_url: row.avatar_url,
|
||||
username: row.username,
|
||||
bio: row.bio,
|
||||
created: row.created,
|
||||
role: row.role,
|
||||
badges: Badges::from_bits(row.badges as u64).unwrap_or_default(),
|
||||
balance: row.balance,
|
||||
payout_wallet: row.payout_wallet.map(|x| RecipientWallet::from_string(&x)),
|
||||
payout_wallet_type: row
|
||||
.payout_wallet_type
|
||||
.map(|x| RecipientType::from_string(&x)),
|
||||
payout_address: row.payout_address,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
let ids = user_ids
|
||||
.iter()
|
||||
.map(|x| crate::models::ids::UserId::from(*x))
|
||||
.collect::<Vec<_>>();
|
||||
User::get_many(&ids, exec, redis).await
|
||||
}
|
||||
|
||||
pub async fn get_from_username<'a, 'b, E>(
|
||||
username: String,
|
||||
executor: E,
|
||||
) -> Result<Option<Self>, sqlx::error::Error>
|
||||
pub async fn get_many<'a, E, T: ToString>(
|
||||
users_strings: &[T],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<User>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT u.id, u.kratos_id, u.name, u.email, u.github_id,
|
||||
u.avatar_url, u.username, u.bio,
|
||||
u.created, u.role, u.badges,
|
||||
u.balance, u.payout_wallet, u.payout_wallet_type,
|
||||
u.payout_address
|
||||
FROM users u
|
||||
WHERE LOWER(u.username) = LOWER($1)
|
||||
",
|
||||
username
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
use futures::TryStreamExt;
|
||||
|
||||
if let Some(row) = result {
|
||||
Ok(Some(User {
|
||||
id: UserId(row.id),
|
||||
kratos_id: row.kratos_id,
|
||||
github_id: row.github_id,
|
||||
name: row.name,
|
||||
email: row.email,
|
||||
avatar_url: row.avatar_url,
|
||||
username: row.username,
|
||||
bio: row.bio,
|
||||
created: row.created,
|
||||
role: row.role,
|
||||
badges: Badges::from_bits(row.badges as u64).unwrap_or_default(),
|
||||
balance: row.balance,
|
||||
payout_wallet: row.payout_wallet.map(|x| RecipientWallet::from_string(&x)),
|
||||
payout_wallet_type: row
|
||||
.payout_wallet_type
|
||||
.map(|x| RecipientType::from_string(&x)),
|
||||
payout_address: row.payout_address,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
if users_strings.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_many<'a, E>(user_ids: &[UserId], exec: E) -> Result<Vec<User>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let user_ids_parsed: Vec<i64> = user_ids.iter().map(|x| x.0).collect();
|
||||
let users = sqlx::query!(
|
||||
"
|
||||
SELECT u.id, u.kratos_id, u.name, u.email, u.github_id,
|
||||
u.avatar_url, u.username, u.bio,
|
||||
u.created, u.role, u.badges,
|
||||
u.balance, u.payout_wallet, u.payout_wallet_type,
|
||||
u.payout_address
|
||||
FROM users u
|
||||
WHERE u.id = ANY($1)
|
||||
",
|
||||
&user_ids_parsed
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|u| User {
|
||||
id: UserId(u.id),
|
||||
kratos_id: u.kratos_id,
|
||||
github_id: u.github_id,
|
||||
name: u.name,
|
||||
email: u.email,
|
||||
avatar_url: u.avatar_url,
|
||||
username: u.username,
|
||||
bio: u.bio,
|
||||
created: u.created,
|
||||
role: u.role,
|
||||
badges: Badges::from_bits(u.badges as u64).unwrap_or_default(),
|
||||
balance: u.balance,
|
||||
payout_wallet: u.payout_wallet.map(|x| RecipientWallet::from_string(&x)),
|
||||
payout_wallet_type: u.payout_wallet_type.map(|x| RecipientType::from_string(&x)),
|
||||
payout_address: u.payout_address,
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<User>>()
|
||||
.await?;
|
||||
let mut found_users = Vec::new();
|
||||
let mut remaining_strings = users_strings
|
||||
.iter()
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(users)
|
||||
let mut user_ids = users_strings
|
||||
.iter()
|
||||
.flat_map(|x| parse_base62(&x.to_string()).map(|x| x as i64))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
user_ids.append(
|
||||
&mut cmd("MGET")
|
||||
.arg(
|
||||
users_strings
|
||||
.iter()
|
||||
.map(|x| {
|
||||
format!(
|
||||
"{}:{}",
|
||||
USER_USERNAMES_NAMESPACE,
|
||||
x.to_string().to_lowercase()
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<i64>>>(&mut redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect(),
|
||||
);
|
||||
|
||||
if !user_ids.is_empty() {
|
||||
let users = cmd("MGET")
|
||||
.arg(
|
||||
user_ids
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", USERS_NAMESPACE, x))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
for user in users {
|
||||
if let Some(user) = user.and_then(|x| serde_json::from_str::<User>(&x).ok()) {
|
||||
remaining_strings
|
||||
.retain(|x| &to_base62(user.id.0 as u64) != x && &user.username != x);
|
||||
found_users.push(user);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !remaining_strings.is_empty() {
|
||||
let user_ids_parsed: Vec<i64> = remaining_strings
|
||||
.iter()
|
||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||
.map(|x| x as i64)
|
||||
.collect();
|
||||
let db_users: Vec<User> = sqlx::query!(
|
||||
"
|
||||
SELECT id, name, email,
|
||||
avatar_url, username, bio,
|
||||
created, role, badges,
|
||||
balance, payout_wallet, payout_wallet_type, payout_address,
|
||||
github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id
|
||||
FROM users
|
||||
WHERE id = ANY($1) OR username = ANY($2)
|
||||
",
|
||||
&user_ids_parsed,
|
||||
&remaining_strings
|
||||
.into_iter()
|
||||
.map(|x| x.to_string().to_lowercase())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|u| User {
|
||||
id: UserId(u.id),
|
||||
github_id: u.github_id,
|
||||
discord_id: u.discord_id,
|
||||
gitlab_id: u.gitlab_id,
|
||||
google_id: u.google_id,
|
||||
steam_id: u.steam_id,
|
||||
microsoft_id: u.microsoft_id,
|
||||
name: u.name,
|
||||
email: u.email,
|
||||
avatar_url: u.avatar_url,
|
||||
username: u.username,
|
||||
bio: u.bio,
|
||||
created: u.created,
|
||||
role: u.role,
|
||||
badges: Badges::from_bits(u.badges as u64).unwrap_or_default(),
|
||||
balance: u.balance,
|
||||
payout_wallet: u.payout_wallet.map(|x| RecipientWallet::from_string(&x)),
|
||||
payout_wallet_type: u
|
||||
.payout_wallet_type
|
||||
.map(|x| RecipientType::from_string(&x)),
|
||||
payout_address: u.payout_address,
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<User>>()
|
||||
.await?;
|
||||
|
||||
for user in db_users {
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", USERS_NAMESPACE, user.id.0))
|
||||
.arg(serde_json::to_string(&user)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
cmd("SET")
|
||||
.arg(format!(
|
||||
"{}:{}",
|
||||
USER_USERNAMES_NAMESPACE,
|
||||
user.username.to_lowercase()
|
||||
))
|
||||
.arg(user.id.0)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
found_users.push(user);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_users)
|
||||
}
|
||||
|
||||
pub async fn get_projects<'a, E>(
|
||||
@@ -273,321 +289,207 @@ impl User {
|
||||
Ok(projects)
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: UserId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
let deleted_user: UserId = crate::models::users::DELETED_USER.into();
|
||||
pub async fn clear_caches(
|
||||
user_ids: &[(UserId, Option<String>)],
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let mut redis = redis.get().await?;
|
||||
let mut cmd = cmd("DEL");
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE team_members
|
||||
SET user_id = $1
|
||||
WHERE (user_id = $2 AND role = $3)
|
||||
",
|
||||
deleted_user as UserId,
|
||||
id as UserId,
|
||||
crate::models::teams::OWNER_ROLE
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET author_id = $1
|
||||
WHERE (author_id = $2)
|
||||
",
|
||||
deleted_user as UserId,
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
use futures::TryStreamExt;
|
||||
let notifications: Vec<i64> = sqlx::query!(
|
||||
"
|
||||
SELECT n.id FROM notifications n
|
||||
WHERE n.user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|m| m.id)) })
|
||||
.try_collect::<Vec<i64>>()
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM reports
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM mod_follows
|
||||
WHERE follower_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications_actions
|
||||
WHERE notification_id = ANY($1)
|
||||
",
|
||||
¬ifications
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM team_members
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM payouts_values
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM historical_payouts
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM users
|
||||
WHERE id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
pub async fn remove_full(
|
||||
id: UserId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
use futures::TryStreamExt;
|
||||
let projects: Vec<ProjectId> = sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id
|
||||
WHERE tm.user_id = $1 AND tm.role = $2
|
||||
",
|
||||
id as UserId,
|
||||
crate::models::teams::OWNER_ROLE
|
||||
)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) })
|
||||
.try_collect::<Vec<ProjectId>>()
|
||||
.await?;
|
||||
|
||||
for project_id in projects {
|
||||
let _result =
|
||||
super::project_item::Project::remove_full(project_id, transaction).await?;
|
||||
}
|
||||
|
||||
let notifications: Vec<i64> = sqlx::query!(
|
||||
"
|
||||
SELECT n.id FROM notifications n
|
||||
WHERE n.user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|m| m.id)) })
|
||||
.try_collect::<Vec<i64>>()
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications_actions
|
||||
WHERE notification_id = ANY($1)
|
||||
",
|
||||
¬ifications
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let deleted_user: UserId = crate::models::users::DELETED_USER.into();
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET author_id = $1
|
||||
WHERE (author_id = $2)
|
||||
",
|
||||
deleted_user as UserId,
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM team_members
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE threads_messages
|
||||
SET body = '{"type": "deleted"}', author_id = $2
|
||||
WHERE author_id = $1
|
||||
"#,
|
||||
id as UserId,
|
||||
deleted_user as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM threads_members
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM users
|
||||
WHERE id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
pub async fn get_id_from_username_or_id<'a, 'b, E>(
|
||||
username_or_id: &str,
|
||||
executor: E,
|
||||
) -> Result<Option<UserId>, sqlx::error::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let id_option = crate::models::ids::base62_impl::parse_base62(username_or_id).ok();
|
||||
|
||||
if let Some(id) = id_option {
|
||||
let id = UserId(id as i64);
|
||||
|
||||
let mut user_id = sqlx::query!(
|
||||
"
|
||||
SELECT id FROM users
|
||||
WHERE id = $1
|
||||
",
|
||||
id as UserId
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?
|
||||
.map(|x| UserId(x.id));
|
||||
|
||||
if user_id.is_none() {
|
||||
user_id = sqlx::query!(
|
||||
"
|
||||
SELECT id FROM users
|
||||
WHERE LOWER(username) = LOWER($1)
|
||||
",
|
||||
username_or_id
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?
|
||||
.map(|x| UserId(x.id));
|
||||
for (id, username) in user_ids {
|
||||
cmd.arg(format!("{}:{}", USERS_NAMESPACE, id.0));
|
||||
if let Some(username) = username {
|
||||
cmd.arg(format!(
|
||||
"{}:{}",
|
||||
USER_USERNAMES_NAMESPACE,
|
||||
username.to_lowercase()
|
||||
));
|
||||
}
|
||||
|
||||
Ok(user_id)
|
||||
} else {
|
||||
let id = sqlx::query!(
|
||||
"
|
||||
SELECT id FROM users
|
||||
WHERE LOWER(username) = LOWER($1)
|
||||
",
|
||||
username_or_id
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
Ok(id.map(|x| UserId(x.id)))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn merge_minos_user<'a, 'b, E>(
|
||||
&self,
|
||||
kratos_id: &str,
|
||||
executor: E,
|
||||
) -> Result<(), sqlx::error::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
// If the user exists, link the Minos user into the existing user rather tham create a new one
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET kratos_id = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
kratos_id,
|
||||
self.id.0,
|
||||
)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
cmd.query_async::<_, ()>(&mut redis).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: UserId,
|
||||
full: bool,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
let user = Self::get_id(id, &mut *transaction, redis).await?;
|
||||
|
||||
if let Some(delete_user) = user {
|
||||
User::clear_caches(&[(id, Some(delete_user.username))], redis).await?;
|
||||
|
||||
let deleted_user: UserId = crate::models::users::DELETED_USER.into();
|
||||
|
||||
if full {
|
||||
let projects: Vec<ProjectId> = sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id
|
||||
WHERE tm.user_id = $1 AND tm.role = $2
|
||||
",
|
||||
id as UserId,
|
||||
crate::models::teams::OWNER_ROLE
|
||||
)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) })
|
||||
.try_collect::<Vec<ProjectId>>()
|
||||
.await?;
|
||||
|
||||
for project_id in projects {
|
||||
let _result =
|
||||
super::project_item::Project::remove(project_id, transaction, redis)
|
||||
.await?;
|
||||
}
|
||||
} else {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE team_members
|
||||
SET user_id = $1
|
||||
WHERE (user_id = $2 AND role = $3)
|
||||
",
|
||||
deleted_user as UserId,
|
||||
id as UserId,
|
||||
crate::models::teams::OWNER_ROLE
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET author_id = $1
|
||||
WHERE (author_id = $2)
|
||||
",
|
||||
deleted_user as UserId,
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
use futures::TryStreamExt;
|
||||
let notifications: Vec<i64> = sqlx::query!(
|
||||
"
|
||||
SELECT n.id FROM notifications n
|
||||
WHERE n.user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|m| m.id)) })
|
||||
.try_collect::<Vec<i64>>()
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications_actions
|
||||
WHERE notification_id = ANY($1)
|
||||
",
|
||||
¬ifications
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM reports
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM mod_follows
|
||||
WHERE follower_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM team_members
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM payouts_values
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM historical_payouts
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
r#"
|
||||
UPDATE threads_messages
|
||||
SET body = '{"type": "deleted"}', author_id = $2
|
||||
WHERE author_id = $1
|
||||
"#,
|
||||
id as UserId,
|
||||
deleted_user as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM threads_members
|
||||
WHERE user_id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM users
|
||||
WHERE id = $1
|
||||
",
|
||||
id as UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Ok(Some(()))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,20 @@
|
||||
use super::ids::*;
|
||||
use super::DatabaseError;
|
||||
use crate::models::ids::base62_impl::parse_base62;
|
||||
use crate::models::projects::{FileType, VersionStatus, VersionType};
|
||||
use crate::models::projects::{FileType, VersionStatus};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
use itertools::Itertools;
|
||||
use redis::cmd;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
|
||||
const VERSIONS_NAMESPACE: &str = "versions";
|
||||
// TODO: Cache version slugs call
|
||||
// const VERSIONS_SLUGS_NAMESPACE: &str = "versions_slugs";
|
||||
const VERSION_FILES_NAMESPACE: &str = "versions_files";
|
||||
const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes
|
||||
|
||||
pub struct VersionBuilder {
|
||||
pub version_id: VersionId,
|
||||
pub project_id: ProjectId,
|
||||
@@ -199,7 +207,7 @@ impl VersionBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct Version {
|
||||
pub id: VersionId,
|
||||
pub project_id: ProjectId,
|
||||
@@ -254,20 +262,18 @@ impl Version {
|
||||
|
||||
pub async fn remove_full(
|
||||
id: VersionId,
|
||||
redis: &deadpool_redis::Pool,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::Error> {
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)
|
||||
",
|
||||
id as VersionId,
|
||||
)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await?;
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
let result = Self::get(id, &mut *transaction, redis).await?;
|
||||
|
||||
if !result.exists.unwrap_or(false) {
|
||||
let result = if let Some(result) = result {
|
||||
result
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
Version::clear_cache(&result, redis).await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
@@ -374,276 +380,383 @@ impl Version {
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
crate::database::models::Project::update_game_versions(
|
||||
ProjectId(project_id.mod_id),
|
||||
&mut *transaction,
|
||||
)
|
||||
.await?;
|
||||
crate::database::models::Project::update_loaders(
|
||||
ProjectId(project_id.mod_id),
|
||||
&mut *transaction,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
pub async fn get_project_versions<'a, E>(
|
||||
project_id: ProjectId,
|
||||
game_versions: Option<Vec<String>>,
|
||||
loaders: Option<Vec<String>>,
|
||||
version_type: Option<VersionType>,
|
||||
limit: Option<u32>,
|
||||
offset: Option<u32>,
|
||||
exec: E,
|
||||
) -> Result<Vec<VersionId>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let vec = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT ON(v.date_published, v.id) version_id, v.date_published FROM versions v
|
||||
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE v.mod_id = $1 AND ($4::varchar IS NULL OR v.version_type = $4)
|
||||
ORDER BY v.date_published DESC, v.id
|
||||
LIMIT $5 OFFSET $6
|
||||
",
|
||||
project_id as ProjectId,
|
||||
&game_versions.unwrap_or_default(),
|
||||
&loaders.unwrap_or_default(),
|
||||
version_type.map(|x| x.as_str()),
|
||||
limit.map(|x| x as i64),
|
||||
offset.map(|x| x as i64),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.version_id))) })
|
||||
.try_collect::<Vec<VersionId>>()
|
||||
.await?;
|
||||
|
||||
Ok(vec)
|
||||
}
|
||||
|
||||
pub async fn get_projects_versions<'a, E>(
|
||||
project_ids: Vec<ProjectId>,
|
||||
game_versions: Option<Vec<String>>,
|
||||
loaders: Option<Vec<String>>,
|
||||
version_type: Option<VersionType>,
|
||||
limit: Option<u32>,
|
||||
offset: Option<u32>,
|
||||
exec: E,
|
||||
) -> Result<HashMap<ProjectId, Vec<VersionId>>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let vec = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT ON(v.date_published, v.id) version_id, v.mod_id, v.date_published FROM versions v
|
||||
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE v.mod_id = ANY($1) AND ($4::varchar IS NULL OR v.version_type = $4)
|
||||
ORDER BY v.date_published, v.id ASC
|
||||
LIMIT $5 OFFSET $6
|
||||
",
|
||||
&project_ids.into_iter().map(|x| x.0).collect::<Vec<i64>>(),
|
||||
&game_versions.unwrap_or_default(),
|
||||
&loaders.unwrap_or_default(),
|
||||
version_type.map(|x| x.as_str()),
|
||||
limit.map(|x| x as i64),
|
||||
offset.map(|x| x as i64),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|v| (ProjectId(v.mod_id), VersionId(v.version_id)))) })
|
||||
.try_collect::<Vec<(ProjectId, VersionId)>>()
|
||||
.await?;
|
||||
|
||||
let mut map: HashMap<ProjectId, Vec<VersionId>> = HashMap::new();
|
||||
|
||||
for (project_id, version_id) in vec {
|
||||
if let Some(value) = map.get_mut(&project_id) {
|
||||
value.push(version_id);
|
||||
} else {
|
||||
map.insert(project_id, vec![version_id]);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
pub async fn get_full<'a, 'b, E>(
|
||||
pub async fn get<'a, 'b, E>(
|
||||
id: VersionId,
|
||||
executor: E,
|
||||
) -> Result<Option<QueryVersion>, sqlx::error::Error>
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<QueryVersion>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Self::get_many_full(&[id], executor)
|
||||
Self::get_many(&[id], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_many_full<'a, E>(
|
||||
pub async fn get_many<'a, E>(
|
||||
version_ids: &[VersionId],
|
||||
exec: E,
|
||||
) -> Result<Vec<QueryVersion>, sqlx::Error>
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<QueryVersion>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
if version_ids.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let mut version_ids_parsed: Vec<i64> = version_ids.iter().map(|x| x.0).collect();
|
||||
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let mut found_versions = Vec::new();
|
||||
|
||||
let versions = cmd("MGET")
|
||||
.arg(
|
||||
version_ids_parsed
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", VERSIONS_NAMESPACE, x))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
for version in versions {
|
||||
if let Some(version) =
|
||||
version.and_then(|x| serde_json::from_str::<QueryVersion>(&x).ok())
|
||||
{
|
||||
version_ids_parsed.retain(|x| &version.inner.id.0 != x);
|
||||
found_versions.push(version);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if !version_ids_parsed.is_empty() {
|
||||
let db_versions: Vec<QueryVersion> = sqlx::query!(
|
||||
"
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
||||
FROM versions v
|
||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
||||
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
||||
LEFT OUTER JOIN files f on v.id = f.version_id
|
||||
LEFT OUTER JOIN hashes h on f.id = h.file_id
|
||||
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY v.id
|
||||
ORDER BY v.date_published ASC;
|
||||
",
|
||||
&version_ids_parsed
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|v|
|
||||
QueryVersion {
|
||||
inner: Version {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
author_id: UserId(v.author_id),
|
||||
name: v.version_name,
|
||||
version_number: v.version_number,
|
||||
changelog: v.changelog,
|
||||
changelog_url: None,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
version_type: v.version_type,
|
||||
featured: v.featured,
|
||||
status: VersionStatus::from_str(&v.status),
|
||||
requested_status: v.requested_status
|
||||
.map(|x| VersionStatus::from_str(&x)),
|
||||
},
|
||||
files: {
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
let hashes: Vec<Hash> = serde_json::from_value(
|
||||
v.hashes.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let files: Vec<File> = serde_json::from_value(
|
||||
v.files.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut files = files.into_iter().map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.algorithm.clone(),
|
||||
hash.hash.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url,
|
||||
filename: x.filename,
|
||||
hashes: file_hashes,
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
file_type: x.file_type,
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(|a, b| {
|
||||
if a.primary {
|
||||
Ordering::Less
|
||||
} else if b.primary {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
a.filename.cmp(&b.filename)
|
||||
}
|
||||
});
|
||||
|
||||
files
|
||||
},
|
||||
game_versions: {
|
||||
#[derive(Deserialize)]
|
||||
struct GameVersion {
|
||||
pub version: String,
|
||||
pub created: DateTime<Utc>,
|
||||
}
|
||||
|
||||
let mut game_versions: Vec<GameVersion> = serde_json::from_value(
|
||||
v.game_versions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
||||
|
||||
game_versions.into_iter().map(|x| x.version).collect()
|
||||
},
|
||||
loaders: v.loaders.unwrap_or_default(),
|
||||
dependencies: serde_json::from_value(
|
||||
v.dependencies.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
))
|
||||
})
|
||||
.try_collect::<Vec<QueryVersion>>()
|
||||
.await?;
|
||||
|
||||
for version in db_versions {
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", VERSIONS_NAMESPACE, version.inner.id.0))
|
||||
.arg(serde_json::to_string(&version)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
found_versions.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_versions)
|
||||
}
|
||||
|
||||
pub async fn get_file_from_hash<'a, 'b, E>(
|
||||
algo: String,
|
||||
hash: String,
|
||||
version_id: Option<VersionId>,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<SingleFile>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let version_ids_parsed: Vec<i64> = version_ids.iter().map(|x| x.0).collect();
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('version', gv.version, 'created', gv.created)) filter (where gv.version is not null) game_versions,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('id', f.id, 'url', f.url, 'filename', f.filename, 'primary', f.is_primary, 'size', f.size, 'file_type', f.file_type)) filter (where f.id is not null) files,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'), 'file_id', h.file_id)) filter (where h.hash is not null) hashes,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)) filter (where d.dependency_type is not null) dependencies
|
||||
FROM versions v
|
||||
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
|
||||
LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id
|
||||
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
||||
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
||||
LEFT OUTER JOIN files f on v.id = f.version_id
|
||||
LEFT OUTER JOIN hashes h on f.id = h.file_id
|
||||
LEFT OUTER JOIN dependencies d on v.id = d.dependent_id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY v.id
|
||||
ORDER BY v.date_published ASC;
|
||||
",
|
||||
&version_ids_parsed
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|v|
|
||||
QueryVersion {
|
||||
inner: Version {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
author_id: UserId(v.author_id),
|
||||
name: v.version_name,
|
||||
version_number: v.version_number,
|
||||
changelog: v.changelog,
|
||||
changelog_url: None,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
version_type: v.version_type,
|
||||
featured: v.featured,
|
||||
status: VersionStatus::from_str(&v.status),
|
||||
requested_status: v.requested_status
|
||||
.map(|x| VersionStatus::from_str(&x)),
|
||||
},
|
||||
files: {
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
let hashes: Vec<Hash> = serde_json::from_value(
|
||||
v.hashes.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let files: Vec<File> = serde_json::from_value(
|
||||
v.files.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut files = files.into_iter().map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.algorithm.clone(),
|
||||
hash.hash.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url,
|
||||
filename: x.filename,
|
||||
hashes: file_hashes,
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
file_type: x.file_type,
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
files.sort_by(|a, b| {
|
||||
if a.primary {
|
||||
Ordering::Less
|
||||
} else if b.primary {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
a.filename.cmp(&b.filename)
|
||||
}
|
||||
});
|
||||
|
||||
files
|
||||
},
|
||||
game_versions: {
|
||||
#[derive(Deserialize)]
|
||||
struct GameVersion {
|
||||
pub version: String,
|
||||
pub created: DateTime<Utc>,
|
||||
}
|
||||
|
||||
let mut game_versions: Vec<GameVersion> = serde_json::from_value(
|
||||
v.game_versions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
|
||||
|
||||
game_versions.into_iter().map(|x| x.version).collect()
|
||||
},
|
||||
loaders: v.loaders.unwrap_or_default(),
|
||||
dependencies: serde_json::from_value(
|
||||
v.dependencies.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
))
|
||||
})
|
||||
.try_collect::<Vec<QueryVersion>>()
|
||||
Self::get_files_from_hash(algo, &[hash], executor, redis)
|
||||
.await
|
||||
.map(|x| {
|
||||
x.into_iter()
|
||||
.find_or_first(|x| Some(x.version_id) == version_id)
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn get_files_from_hash<'a, 'b, E>(
|
||||
algorithm: String,
|
||||
hashes: &[String],
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<SingleFile>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
if hashes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let mut file_ids_parsed = hashes.to_vec();
|
||||
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let mut found_files = Vec::new();
|
||||
|
||||
let files = cmd("MGET")
|
||||
.arg(
|
||||
file_ids_parsed
|
||||
.iter()
|
||||
.map(|hash| format!("{}:{}_{}", VERSION_FILES_NAMESPACE, algorithm, hash))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
for file in files {
|
||||
if let Some(mut file) =
|
||||
file.and_then(|x| serde_json::from_str::<Vec<SingleFile>>(&x).ok())
|
||||
{
|
||||
file_ids_parsed.retain(|x| {
|
||||
!file
|
||||
.iter()
|
||||
.any(|y| y.hashes.iter().any(|z| z.0 == &algorithm && z.1 == x))
|
||||
});
|
||||
found_files.append(&mut file);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if !file_ids_parsed.is_empty() {
|
||||
let db_files: Vec<SingleFile> = sqlx::query!(
|
||||
"
|
||||
SELECT f.id, f.version_id, v.mod_id, f.url, f.filename, f.is_primary, f.size, f.file_type,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('algorithm', h.algorithm, 'hash', encode(h.hash, 'escape'))) filter (where h.hash is not null) hashes
|
||||
FROM files f
|
||||
INNER JOIN versions v on v.id = f.version_id
|
||||
INNER JOIN hashes h on h.file_id = f.id
|
||||
WHERE h.algorithm = $1 AND h.hash = ANY($2)
|
||||
GROUP BY f.id, v.mod_id, v.date_published
|
||||
ORDER BY v.date_published
|
||||
",
|
||||
algorithm,
|
||||
&file_ids_parsed.into_iter().map(|x| x.as_bytes().to_vec()).collect::<Vec<_>>(),
|
||||
)
|
||||
.fetch_many(executor)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|f| {
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
SingleFile {
|
||||
id: FileId(f.id),
|
||||
version_id: VersionId(f.version_id),
|
||||
project_id: ProjectId(f.mod_id),
|
||||
url: f.url,
|
||||
filename: f.filename,
|
||||
hashes: serde_json::from_value::<Vec<Hash>>(
|
||||
f.hashes.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default().into_iter().map(|x| (x.algorithm, x.hash)).collect(),
|
||||
primary: f.is_primary,
|
||||
size: f.size as u32,
|
||||
file_type: f.file_type.map(|x| FileType::from_str(&x)),
|
||||
}
|
||||
}
|
||||
))
|
||||
})
|
||||
.try_collect::<Vec<SingleFile>>()
|
||||
.await?;
|
||||
|
||||
let mut save_files: HashMap<String, Vec<SingleFile>> = HashMap::new();
|
||||
|
||||
for file in db_files {
|
||||
for (algo, hash) in &file.hashes {
|
||||
let key = format!("{}_{}", algo, hash);
|
||||
|
||||
if let Some(files) = save_files.get_mut(&key) {
|
||||
files.push(file.clone());
|
||||
} else {
|
||||
save_files.insert(key, vec![file.clone()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (key, mut files) in save_files {
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", VERSIONS_NAMESPACE, key))
|
||||
.arg(serde_json::to_string(&files)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
found_files.append(&mut files);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_files)
|
||||
}
|
||||
|
||||
pub async fn clear_cache(
|
||||
version: &QueryVersion,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let mut cmd = cmd("DEL");
|
||||
|
||||
cmd.arg(format!("{}:{}", VERSIONS_NAMESPACE, version.inner.id.0));
|
||||
|
||||
for file in &version.files {
|
||||
for (algo, hash) in &file.hashes {
|
||||
cmd.arg(format!("{}:{}_{}", VERSION_FILES_NAMESPACE, algo, hash));
|
||||
}
|
||||
}
|
||||
|
||||
cmd.query_async::<_, ()>(&mut redis).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: Needs to be cached
|
||||
pub async fn get_full_from_id_slug<'a, 'b, E>(
|
||||
project_id_or_slug: &str,
|
||||
slug: &str,
|
||||
executor: E,
|
||||
) -> Result<Option<QueryVersion>, sqlx::error::Error>
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<QueryVersion>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
@@ -665,14 +778,14 @@ impl Version {
|
||||
.await?;
|
||||
|
||||
if let Some(version_id) = id {
|
||||
Version::get_full(VersionId(version_id.id), executor).await
|
||||
Ok(Version::get(VersionId(version_id.id), executor, redis).await?)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct QueryVersion {
|
||||
pub inner: Version,
|
||||
|
||||
@@ -682,7 +795,7 @@ pub struct QueryVersion {
|
||||
pub dependencies: Vec<QueryDependency>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct QueryDependency {
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub version_id: Option<VersionId>,
|
||||
@@ -690,7 +803,7 @@ pub struct QueryDependency {
|
||||
pub dependency_type: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct QueryFile {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
@@ -700,3 +813,16 @@ pub struct QueryFile {
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
pub struct SingleFile {
|
||||
pub id: FileId,
|
||||
pub version_id: VersionId,
|
||||
pub project_id: ProjectId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub hashes: HashMap<String, String>,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
pub mod status;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SEARCH_READY: AtomicBool = AtomicBool::new(false);
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
use actix_web::web;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub async fn test_database(postgres: web::Data<PgPool>) -> Result<(), sqlx::Error> {
|
||||
let mut transaction = postgres.acquire().await?;
|
||||
sqlx::query(
|
||||
"
|
||||
SELECT 1
|
||||
",
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
23
src/main.rs
23
src/main.rs
@@ -8,6 +8,7 @@ use crate::util::env::{parse_strings_from_var, parse_var};
|
||||
use actix_cors::Cors;
|
||||
use actix_web::{web, App, HttpServer};
|
||||
use chrono::{DateTime, Utc};
|
||||
use deadpool_redis::{Config, Runtime};
|
||||
use env_logger::Env;
|
||||
use log::{error, info, warn};
|
||||
use search::indexing::index_projects;
|
||||
@@ -15,9 +16,9 @@ use search::indexing::IndexingSettings;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
mod auth;
|
||||
mod database;
|
||||
mod file_hosting;
|
||||
mod health;
|
||||
mod models;
|
||||
mod queue;
|
||||
mod ratelimit;
|
||||
@@ -74,6 +75,12 @@ async fn main() -> std::io::Result<()> {
|
||||
.await
|
||||
.expect("Database connection failed");
|
||||
|
||||
// Redis connector
|
||||
let redis_cfg = Config::from_url(dotenvy::var("REDIS_URL").expect("Redis URL not set"));
|
||||
let redis_pool = redis_cfg
|
||||
.create_pool(Some(Runtime::Tokio1))
|
||||
.expect("Redis connection failed");
|
||||
|
||||
let storage_backend = dotenvy::var("STORAGE_BACKEND").unwrap_or_else(|_| "local".to_string());
|
||||
|
||||
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = match storage_backend.as_str() {
|
||||
@@ -152,6 +159,7 @@ async fn main() -> std::io::Result<()> {
|
||||
|
||||
// Changes statuses of scheduled projects/versions
|
||||
let pool_ref = pool.clone();
|
||||
// TODO: Clear cache when these are run
|
||||
scheduler.run(std::time::Duration::from_secs(60), move || {
|
||||
let pool_ref = pool_ref.clone();
|
||||
info!("Releasing scheduled versions/projects!");
|
||||
@@ -245,7 +253,7 @@ async fn main() -> std::io::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok::<(), crate::routes::ApiError>(())
|
||||
Ok::<(), routes::ApiError>(())
|
||||
};
|
||||
|
||||
if let Err(e) = do_steps.await {
|
||||
@@ -342,6 +350,7 @@ async fn main() -> std::io::Result<()> {
|
||||
routes::ApiError::Validation(err.to_string()).into()
|
||||
}),
|
||||
)
|
||||
.app_data(web::Data::new(redis_pool.clone()))
|
||||
.app_data(web::Data::new(pool.clone()))
|
||||
.app_data(web::Data::new(file_host.clone()))
|
||||
.app_data(web::Data::new(search_config.clone()))
|
||||
@@ -352,6 +361,7 @@ async fn main() -> std::io::Result<()> {
|
||||
.configure(routes::root_config)
|
||||
.configure(routes::v2::config)
|
||||
.configure(routes::v3::config)
|
||||
.configure(auth::config)
|
||||
.default_service(web::get().to(routes::not_found))
|
||||
})
|
||||
.bind(dotenvy::var("BIND_ADDR").unwrap())?
|
||||
@@ -387,9 +397,6 @@ fn check_env_vars() -> bool {
|
||||
|
||||
failed |= check_var::<String>("SITE_URL");
|
||||
failed |= check_var::<String>("CDN_URL");
|
||||
failed |= check_var::<String>("MINOS_URL");
|
||||
failed |= check_var::<String>("KRATOS_URL");
|
||||
failed |= check_var::<String>("ORY_AUTH_BEARER");
|
||||
failed |= check_var::<String>("LABRINTH_ADMIN_KEY");
|
||||
failed |= check_var::<String>("RATE_LIMIT_IGNORE_KEY");
|
||||
failed |= check_var::<String>("DATABASE_URL");
|
||||
@@ -398,6 +405,8 @@ fn check_env_vars() -> bool {
|
||||
failed |= check_var::<String>("BIND_ADDR");
|
||||
failed |= check_var::<String>("SELF_ADDR");
|
||||
|
||||
failed |= check_var::<String>("REDIS_URL");
|
||||
|
||||
failed |= check_var::<String>("STORAGE_BACKEND");
|
||||
|
||||
let storage_backend = dotenvy::var("STORAGE_BACKEND").ok();
|
||||
@@ -431,13 +440,11 @@ fn check_env_vars() -> bool {
|
||||
failed |= check_var::<usize>("VERSION_INDEX_INTERVAL");
|
||||
|
||||
failed |= check_var::<String>("GITHUB_CLIENT_ID");
|
||||
failed |= check_var::<String>("GITHUB_CLIENT_SECRET");
|
||||
|
||||
failed |= check_var::<String>("ARIADNE_ADMIN_KEY");
|
||||
failed |= check_var::<String>("ARIADNE_URL");
|
||||
|
||||
failed |= check_var::<String>("STRIPE_TOKEN");
|
||||
failed |= check_var::<String>("STRIPE_WEBHOOK_SECRET");
|
||||
|
||||
failed |= check_var::<String>("PAYPAL_API_URL");
|
||||
failed |= check_var::<String>("PAYPAL_CLIENT_ID");
|
||||
failed |= check_var::<String>("PAYPAL_CLIENT_SECRET");
|
||||
|
||||
@@ -3,6 +3,7 @@ use thiserror::Error;
|
||||
pub use super::notifications::NotificationId;
|
||||
pub use super::projects::{ProjectId, VersionId};
|
||||
pub use super::reports::ReportId;
|
||||
pub use super::sessions::SessionId;
|
||||
pub use super::teams::TeamId;
|
||||
pub use super::threads::ThreadId;
|
||||
pub use super::threads::ThreadMessageId;
|
||||
@@ -113,6 +114,7 @@ base62_id_impl!(ReportId, ReportId);
|
||||
base62_id_impl!(NotificationId, NotificationId);
|
||||
base62_id_impl!(ThreadId, ThreadId);
|
||||
base62_id_impl!(ThreadMessageId, ThreadMessageId);
|
||||
base62_id_impl!(SessionId, SessionId);
|
||||
|
||||
pub mod base62_impl {
|
||||
use serde::de::{self, Deserializer, Visitor};
|
||||
|
||||
@@ -4,6 +4,7 @@ pub mod notifications;
|
||||
pub mod pack;
|
||||
pub mod projects;
|
||||
pub mod reports;
|
||||
pub mod sessions;
|
||||
pub mod teams;
|
||||
pub mod threads;
|
||||
pub mod users;
|
||||
|
||||
@@ -165,8 +165,8 @@ impl From<QueryProject> for Project {
|
||||
followers: m.follows as u32,
|
||||
categories: data.categories,
|
||||
additional_categories: data.additional_categories,
|
||||
game_versions: m.game_versions,
|
||||
loaders: m.loaders,
|
||||
game_versions: data.game_versions,
|
||||
loaders: data.loaders,
|
||||
versions: data.versions.into_iter().map(|v| v.into()).collect(),
|
||||
icon_url: m.icon_url,
|
||||
issues_url: m.issues_url,
|
||||
@@ -449,7 +449,7 @@ impl MonetizationStatus {
|
||||
}
|
||||
|
||||
/// A specific version of a project
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct Version {
|
||||
/// The ID of the version, encoded as a base62 string.
|
||||
pub id: VersionId,
|
||||
@@ -633,7 +633,7 @@ impl VersionStatus {
|
||||
}
|
||||
|
||||
/// A single project file, with a url for the file and the file's hash
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct VersionFile {
|
||||
/// A map of hashes of the file. The key is the hashing algorithm
|
||||
/// and the value is the string version of the hash.
|
||||
@@ -749,6 +749,15 @@ impl FileType {
|
||||
FileType::Unknown => "unknown",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_str(string: &str) -> FileType {
|
||||
match string {
|
||||
"required-resource-pack" => FileType::RequiredResourcePack,
|
||||
"optional-resource-pack" => FileType::OptionalResourcePack,
|
||||
"unknown" => FileType::Unknown,
|
||||
_ => FileType::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A specific version of Minecraft
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::ids::Base62Id;
|
||||
use crate::models::ids::{ThreadId, UserId};
|
||||
use crate::database::models::report_item::QueryReport as DBReport;
|
||||
use crate::models::ids::{ProjectId, ThreadId, UserId, VersionId};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -40,3 +41,33 @@ impl ItemType {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DBReport> for Report {
|
||||
fn from(x: DBReport) -> Self {
|
||||
let mut item_id = "".to_string();
|
||||
let mut item_type = ItemType::Unknown;
|
||||
|
||||
if let Some(project_id) = x.project_id {
|
||||
item_id = ProjectId::from(project_id).to_string();
|
||||
item_type = ItemType::Project;
|
||||
} else if let Some(version_id) = x.version_id {
|
||||
item_id = VersionId::from(version_id).to_string();
|
||||
item_type = ItemType::Version;
|
||||
} else if let Some(user_id) = x.user_id {
|
||||
item_id = UserId::from(user_id).to_string();
|
||||
item_type = ItemType::User;
|
||||
}
|
||||
|
||||
Report {
|
||||
id: x.id.into(),
|
||||
report_type: x.report_type,
|
||||
item_id,
|
||||
item_type,
|
||||
reporter: x.reporter.into(),
|
||||
body: x.body,
|
||||
created: x.created,
|
||||
closed: x.closed,
|
||||
thread_id: x.thread_id.map(|x| x.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
29
src/models/sessions.rs
Normal file
29
src/models/sessions.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use super::ids::Base62Id;
|
||||
use crate::models::users::UserId;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(from = "Base62Id")]
|
||||
#[serde(into = "Base62Id")]
|
||||
pub struct SessionId(pub u64);
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct Session {
|
||||
pub id: SessionId,
|
||||
pub session: String,
|
||||
pub user_id: UserId,
|
||||
|
||||
pub created: DateTime<Utc>,
|
||||
pub last_login: DateTime<Utc>,
|
||||
pub expires: DateTime<Utc>,
|
||||
pub refresh_expires: DateTime<Utc>,
|
||||
|
||||
pub os: Option<String>,
|
||||
pub platform: Option<String>,
|
||||
pub user_agent: String,
|
||||
|
||||
pub city: Option<String>,
|
||||
pub country: Option<String>,
|
||||
pub ip: String,
|
||||
}
|
||||
@@ -82,3 +82,44 @@ impl ThreadType {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
pub fn from(data: crate::database::models::Thread, users: Vec<User>, user: &User) -> Self {
|
||||
let thread_type = data.type_;
|
||||
|
||||
Thread {
|
||||
id: data.id.into(),
|
||||
type_: thread_type,
|
||||
messages: data
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter(|x| {
|
||||
if let MessageBody::Text { private, .. } = x.body {
|
||||
!private || user.role.is_mod()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
.map(|x| ThreadMessage {
|
||||
id: x.id.into(),
|
||||
author_id: if users
|
||||
.iter()
|
||||
.find(|y| x.author_id == Some(y.id.into()))
|
||||
.map(|x| x.role.is_mod() && !user.role.is_mod())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
x.author_id.map(|x| x.into())
|
||||
},
|
||||
body: x.body,
|
||||
created: x.created,
|
||||
})
|
||||
.collect(),
|
||||
members: users
|
||||
.into_iter()
|
||||
.filter(|x| !x.role.is_mod() || user.role.is_mod())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,6 @@ impl Default for Badges {
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct User {
|
||||
pub id: UserId,
|
||||
pub kratos_id: Option<String>, // None if legacy user unconnected to Minos/Kratos
|
||||
pub username: String,
|
||||
pub name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
@@ -48,11 +47,11 @@ pub struct User {
|
||||
pub badges: Badges,
|
||||
pub payout_data: Option<UserPayoutData>,
|
||||
pub github_id: Option<u64>,
|
||||
pub discord_id: Option<u64>,
|
||||
pub google_id: Option<u128>,
|
||||
pub microsoft_id: Option<u64>,
|
||||
pub apple_id: Option<u64>,
|
||||
pub gitlab_id: Option<u64>,
|
||||
// pub discord_id: Option<u64>,
|
||||
// pub google_id: Option<u128>,
|
||||
// pub microsoft_id: Option<u64>,
|
||||
// pub apple_id: Option<u64>,
|
||||
// pub gitlab_id: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
@@ -136,7 +135,6 @@ impl From<DBUser> for User {
|
||||
fn from(data: DBUser) -> Self {
|
||||
Self {
|
||||
id: data.id.into(),
|
||||
kratos_id: data.kratos_id,
|
||||
username: data.username,
|
||||
name: data.name,
|
||||
email: None,
|
||||
@@ -147,11 +145,11 @@ impl From<DBUser> for User {
|
||||
badges: data.badges,
|
||||
payout_data: None,
|
||||
github_id: None,
|
||||
discord_id: None,
|
||||
google_id: None,
|
||||
microsoft_id: None,
|
||||
apple_id: None,
|
||||
gitlab_id: None,
|
||||
// discord_id: None,
|
||||
// google_id: None,
|
||||
// microsoft_id: None,
|
||||
// apple_id: None,
|
||||
// gitlab_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
use crate::health::status::test_database;
|
||||
use crate::health::SEARCH_READY;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{get, HttpResponse};
|
||||
use serde_json::json;
|
||||
use sqlx::PgPool;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[get("/health")]
|
||||
pub async fn health_get(client: Data<PgPool>) -> HttpResponse {
|
||||
// Check database connection:
|
||||
let result = test_database(client).await;
|
||||
if result.is_err() {
|
||||
let data = json!({
|
||||
"ready": false,
|
||||
"reason": "Database connection error"
|
||||
});
|
||||
return HttpResponse::InternalServerError().json(data);
|
||||
}
|
||||
if !SEARCH_READY.load(Ordering::Acquire) {
|
||||
let data = json!({
|
||||
"ready": false,
|
||||
"reason": "Indexing is not finished"
|
||||
});
|
||||
return HttpResponse::InternalServerError().json(data);
|
||||
}
|
||||
HttpResponse::Ok().json(json!({
|
||||
"ready": true,
|
||||
"reason": "Everything is OK"
|
||||
}))
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
use crate::auth::{get_user_from_headers, is_authorized_version};
|
||||
use crate::database::models::project_item::QueryProject;
|
||||
use crate::database::models::version_item::{QueryFile, QueryVersion};
|
||||
use crate::models::projects::{ProjectId, VersionId};
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::{get_user_from_headers, is_authorized_version};
|
||||
use crate::{database, util::auth::is_authorized};
|
||||
use crate::{auth::is_authorized, database};
|
||||
use actix_web::{get, route, web, HttpRequest, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
@@ -66,10 +66,10 @@ pub async fn maven_metadata(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let project_id = params.into_inner().0;
|
||||
let project_data =
|
||||
database::models::Project::get_from_slug_or_project_id(&project_id, &**pool).await?;
|
||||
let project_data = database::models::Project::get(&project_id, &**pool, &redis).await?;
|
||||
|
||||
let data = if let Some(data) = project_data {
|
||||
data
|
||||
@@ -77,9 +77,11 @@ pub async fn maven_metadata(
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if !is_authorized(&data, &user_option, &pool).await? {
|
||||
if !is_authorized(&data.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
@@ -90,7 +92,7 @@ pub async fn maven_metadata(
|
||||
WHERE mod_id = $1 AND status = ANY($2)
|
||||
ORDER BY date_published ASC
|
||||
",
|
||||
data.id as database::models::ids::ProjectId,
|
||||
data.inner.id as database::models::ids::ProjectId,
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_listed())
|
||||
.map(|x| x.to_string())
|
||||
@@ -118,7 +120,7 @@ pub async fn maven_metadata(
|
||||
new_versions.push(value);
|
||||
}
|
||||
|
||||
let project_id: ProjectId = data.id.into();
|
||||
let project_id: ProjectId = data.inner.id.into();
|
||||
|
||||
let respdata = Metadata {
|
||||
group_id: "maven.modrinth".to_string(),
|
||||
@@ -132,7 +134,7 @@ pub async fn maven_metadata(
|
||||
versions: Versions {
|
||||
versions: new_versions,
|
||||
},
|
||||
last_updated: data.updated.format("%Y%m%d%H%M%S").to_string(),
|
||||
last_updated: data.inner.updated.format("%Y%m%d%H%M%S").to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -185,10 +187,10 @@ pub async fn version_file(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let project_data =
|
||||
database::models::Project::get_full_from_slug_or_project_id(&project_id, &**pool).await?;
|
||||
let project_data = database::models::Project::get(&project_id, &**pool, &redis).await?;
|
||||
|
||||
let project = if let Some(data) = project_data {
|
||||
data
|
||||
@@ -196,7 +198,9 @@ pub async fn version_file(
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
@@ -221,7 +225,7 @@ pub async fn version_file(
|
||||
};
|
||||
|
||||
let version = if let Some(version) =
|
||||
database::models::Version::get_full(database::models::ids::VersionId(vid.id), &**pool)
|
||||
database::models::Version::get(database::models::ids::VersionId(vid.id), &**pool, &redis)
|
||||
.await?
|
||||
{
|
||||
version
|
||||
@@ -266,10 +270,10 @@ pub async fn version_file_sha1(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let project_data =
|
||||
database::models::Project::get_full_from_slug_or_project_id(&project_id, &**pool).await?;
|
||||
let project_data = database::models::Project::get(&project_id, &**pool, &redis).await?;
|
||||
|
||||
let project = if let Some(data) = project_data {
|
||||
data
|
||||
@@ -277,7 +281,9 @@ pub async fn version_file_sha1(
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
@@ -302,7 +308,7 @@ pub async fn version_file_sha1(
|
||||
};
|
||||
|
||||
let version = if let Some(version) =
|
||||
database::models::Version::get_full(database::models::ids::VersionId(vid.id), &**pool)
|
||||
database::models::Version::get(database::models::ids::VersionId(vid.id), &**pool, &redis)
|
||||
.await?
|
||||
{
|
||||
version
|
||||
@@ -321,10 +327,10 @@ pub async fn version_file_sha512(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let project_data =
|
||||
database::models::Project::get_full_from_slug_or_project_id(&project_id, &**pool).await?;
|
||||
let project_data = database::models::Project::get(&project_id, &**pool, &redis).await?;
|
||||
|
||||
let project = if let Some(data) = project_data {
|
||||
data
|
||||
@@ -332,7 +338,9 @@ pub async fn version_file_sha512(
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
@@ -357,7 +365,7 @@ pub async fn version_file_sha512(
|
||||
};
|
||||
|
||||
let version = if let Some(version) =
|
||||
database::models::Version::get_full(database::models::ids::VersionId(vid.id), &**pool)
|
||||
database::models::Version::get(database::models::ids::VersionId(vid.id), &**pool, &redis)
|
||||
.await?
|
||||
{
|
||||
version
|
||||
|
||||
@@ -6,7 +6,6 @@ use futures::FutureExt;
|
||||
pub mod v2;
|
||||
pub mod v3;
|
||||
|
||||
mod health;
|
||||
mod index;
|
||||
mod maven;
|
||||
mod not_found;
|
||||
@@ -16,7 +15,6 @@ pub use self::not_found::not_found;
|
||||
|
||||
pub fn root_config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(index::index_get);
|
||||
cfg.service(health::health_get);
|
||||
cfg.service(web::scope("maven").configure(maven::config));
|
||||
cfg.service(web::scope("updates").configure(updates::config));
|
||||
cfg.service(
|
||||
@@ -47,7 +45,7 @@ pub enum ApiError {
|
||||
#[error("Deserialization error: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
#[error("Authentication Error: {0}")]
|
||||
Authentication(#[from] crate::util::auth::AuthenticationError),
|
||||
Authentication(#[from] crate::auth::AuthenticationError),
|
||||
#[error("Authentication Error: {0}")]
|
||||
CustomAuthentication(String),
|
||||
#[error("Invalid Input: {0}")]
|
||||
@@ -60,8 +58,6 @@ pub enum ApiError {
|
||||
Indexing(#[from] crate::search::indexing::IndexingError),
|
||||
#[error("Ariadne Error: {0}")]
|
||||
Analytics(String),
|
||||
#[error("Crypto Error: {0}")]
|
||||
Crypto(String),
|
||||
#[error("Payments Error: {0}")]
|
||||
Payments(String),
|
||||
#[error("Discord Error: {0}")]
|
||||
@@ -88,7 +84,6 @@ impl actix_web::ResponseError for ApiError {
|
||||
ApiError::InvalidInput(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Validation(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Analytics(..) => StatusCode::FAILED_DEPENDENCY,
|
||||
ApiError::Crypto(..) => StatusCode::FORBIDDEN,
|
||||
ApiError::Payments(..) => StatusCode::FAILED_DEPENDENCY,
|
||||
ApiError::DiscordError(..) => StatusCode::FAILED_DEPENDENCY,
|
||||
ApiError::Decoding(..) => StatusCode::BAD_REQUEST,
|
||||
@@ -112,7 +107,6 @@ impl actix_web::ResponseError for ApiError {
|
||||
ApiError::InvalidInput(..) => "invalid_input",
|
||||
ApiError::Validation(..) => "invalid_input",
|
||||
ApiError::Analytics(..) => "analytics_error",
|
||||
ApiError::Crypto(..) => "crypto_error",
|
||||
ApiError::Payments(..) => "payments_error",
|
||||
ApiError::DiscordError(..) => "discord_error",
|
||||
ApiError::Decoding(..) => "decoding_error",
|
||||
|
||||
@@ -4,9 +4,9 @@ use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use serde::Serialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::auth::{filter_authorized_versions, get_user_from_headers, is_authorized};
|
||||
use crate::database;
|
||||
use crate::models::projects::VersionType;
|
||||
use crate::util::auth::{filter_authorized_versions, get_user_from_headers, is_authorized};
|
||||
|
||||
use super::ApiError;
|
||||
|
||||
@@ -19,36 +19,36 @@ pub async fn forge_updates(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
const ERROR: &str = "The specified project does not exist!";
|
||||
|
||||
let (id,) = info.into_inner();
|
||||
|
||||
let project = database::models::Project::get_from_slug_or_project_id(&id, &**pool)
|
||||
let project = database::models::Project::get(&id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::InvalidInput(ERROR.to_string()))?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if !is_authorized(&project, &user_option, &pool).await? {
|
||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
||||
return Err(ApiError::InvalidInput(ERROR.to_string()));
|
||||
}
|
||||
|
||||
let version_ids = database::models::Version::get_project_versions(
|
||||
project.id,
|
||||
None,
|
||||
Some(vec!["forge".to_string()]),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
&**pool,
|
||||
let versions = database::models::Version::get_many(&project.versions, &**pool, &redis).await?;
|
||||
|
||||
let mut versions = filter_authorized_versions(
|
||||
versions
|
||||
.into_iter()
|
||||
.filter(|x| x.loaders.iter().any(|y| *y == "forge"))
|
||||
.collect(),
|
||||
&user_option,
|
||||
&pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let versions = database::models::Version::get_many_full(&version_ids, &**pool).await?;
|
||||
|
||||
let mut versions = filter_authorized_versions(versions, &user_option, &pool).await?;
|
||||
|
||||
versions.sort_by(|a, b| b.date_published.cmp(&a.date_published));
|
||||
|
||||
#[derive(Serialize)]
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
use crate::database::models::user_item;
|
||||
use crate::database::models::{User, UserId};
|
||||
use crate::models::ids::ProjectId;
|
||||
use crate::models::projects::MonetizationStatus;
|
||||
use crate::models::users::User;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::{link_or_insert_new_user, MinosNewUser};
|
||||
use crate::util::guards::admin_key_guard;
|
||||
use crate::DownloadQueue;
|
||||
use actix_web::{get, patch, post, web, HttpResponse};
|
||||
use actix_web::{patch, post, web, HttpResponse};
|
||||
use chrono::{DateTime, SecondsFormat, Utc};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::Deserialize;
|
||||
@@ -19,110 +17,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("admin")
|
||||
.service(count_download)
|
||||
.service(add_minos_user)
|
||||
.service(edit_github_id)
|
||||
.service(edit_email)
|
||||
.service(get_legacy_account)
|
||||
.service(process_payout),
|
||||
);
|
||||
}
|
||||
|
||||
// Adds a Minos user to the database
|
||||
// This is an internal endpoint, and should not be used by applications, only by the Minos backend
|
||||
#[post("_minos-user-callback", guard = "admin_key_guard")]
|
||||
pub async fn add_minos_user(
|
||||
minos_user: web::Json<MinosNewUser>, // getting directly from Kratos rather than Minos, so unparse
|
||||
client: web::Data<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let minos_new_user = minos_user.into_inner();
|
||||
let mut transaction = client.begin().await?;
|
||||
link_or_insert_new_user(&mut transaction, minos_new_user).await?;
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
// Add or update a user's GitHub ID by their kratos id
|
||||
// OIDC ids should be kept in Minos, but Github is duplicated in Labrinth for legacy support
|
||||
// This should not be directly useable by applications, only by the Minos backend
|
||||
// user id is passed in path, github id is passed in body
|
||||
#[derive(Deserialize)]
|
||||
pub struct EditGithubId {
|
||||
github_id: Option<String>,
|
||||
}
|
||||
#[post("_edit_github_id/{kratos_id}", guard = "admin_key_guard")]
|
||||
pub async fn edit_github_id(
|
||||
pool: web::Data<PgPool>,
|
||||
kratos_id: web::Path<String>,
|
||||
github_id: web::Json<EditGithubId>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let github_id = github_id.into_inner().github_id;
|
||||
// Parse error if github inner id not a number
|
||||
let github_id = github_id
|
||||
.as_ref()
|
||||
.map(|x| x.parse::<i64>())
|
||||
.transpose()
|
||||
.map_err(|_| ApiError::InvalidInput("Github id must be a number".to_string()))?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET github_id = $1
|
||||
WHERE kratos_id = $2
|
||||
",
|
||||
github_id,
|
||||
kratos_id.into_inner()
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
// Update a user's email ID by their kratos id
|
||||
// email ids should be kept in Minos, but email is duplicated in Labrinth for legacy support (and to avoid Minos calls)
|
||||
// This should not be directly useable by applications, only by the Minos backend
|
||||
// user id is passed in path, email is passed in body
|
||||
#[derive(Deserialize)]
|
||||
pub struct EditEmail {
|
||||
email: String,
|
||||
}
|
||||
#[post("_edit_email/{kratos_id}", guard = "admin_key_guard")]
|
||||
pub async fn edit_email(
|
||||
pool: web::Data<PgPool>,
|
||||
kratos_id: web::Path<String>,
|
||||
email: web::Json<EditEmail>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let email = email.into_inner().email;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET email = $1
|
||||
WHERE kratos_id = $2
|
||||
",
|
||||
email,
|
||||
kratos_id.into_inner()
|
||||
)
|
||||
.execute(&mut transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[get("_legacy_account/{github_id}", guard = "admin_key_guard")]
|
||||
|
||||
pub async fn get_legacy_account(
|
||||
pool: web::Data<PgPool>,
|
||||
github_id: web::Path<i32>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let github_id = github_id.into_inner();
|
||||
let user = user_item::User::get_from_github_id(github_id as u64, &**pool).await?;
|
||||
let user: Option<User> = user.map(|u| u.into());
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DownloadBody {
|
||||
pub url: String,
|
||||
@@ -214,6 +112,7 @@ pub struct PayoutData {
|
||||
#[post("/_process_payout", guard = "admin_key_guard")]
|
||||
pub async fn process_payout(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
data: web::Json<PayoutData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let start: DateTime<Utc> = DateTime::from_utc(
|
||||
@@ -409,6 +308,8 @@ pub async fn process_payout(
|
||||
let sum_splits: Decimal = project.team_members.iter().map(|x| x.1).sum();
|
||||
let sum_tm_splits: Decimal = project.split_team_members.iter().map(|x| x.1).sum();
|
||||
|
||||
let mut clear_cache_users = Vec::new();
|
||||
|
||||
if sum_splits > Decimal::ZERO {
|
||||
for (user_id, split) in project.team_members {
|
||||
let payout: Decimal = data.amount
|
||||
@@ -445,6 +346,7 @@ pub async fn process_payout(
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
clear_cache_users.push(user_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -481,9 +383,19 @@ pub async fn process_payout(
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
clear_cache_users.push(user_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
User::clear_caches(
|
||||
&clear_cache_users
|
||||
.into_iter()
|
||||
.map(|x| (UserId(x), None))
|
||||
.collect::<Vec<_>>(),
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,214 +0,0 @@
|
||||
/*!
|
||||
This auth module is how we allow for authentication within the Modrinth sphere.
|
||||
It uses a self-hosted Ory Kratos instance on the backend, powered by our Minos backend.
|
||||
|
||||
Applications interacting with the authenticated API (a very small portion - notifications, private projects, editing/creating projects
|
||||
and versions) should include the Ory authentication cookie in their requests. This cookie is set by the Ory Kratos instance and Minos provides function to access these.
|
||||
|
||||
In addition, you can use a logged-in-account to generate a PAT.
|
||||
This token can be passed in as a Bearer token in the Authorization header, as an alternative to a cookie.
|
||||
This is useful for applications that don't have a frontend, or for applications that need to access the authenticated API on behalf of a user.
|
||||
|
||||
Just as a summary: Don't implement this flow in your application!
|
||||
*/
|
||||
|
||||
use crate::database::models::{self, generate_state_id};
|
||||
use crate::models::error::ApiError;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
use crate::models::ids::DecodingError;
|
||||
|
||||
use crate::parse_strings_from_var;
|
||||
use crate::util::auth::{get_minos_user_from_cookies, AuthenticationError};
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::web::{scope, Data, Query, ServiceConfig};
|
||||
use actix_web::{get, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
use thiserror::Error;
|
||||
|
||||
pub fn config(cfg: &mut ServiceConfig) {
|
||||
cfg.service(scope("auth").service(auth_callback).service(init));
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AuthorizationError {
|
||||
#[error("Environment Error")]
|
||||
Env(#[from] dotenvy::Error),
|
||||
#[error("An unknown database error occured: {0}")]
|
||||
SqlxDatabase(#[from] sqlx::Error),
|
||||
#[error("Database Error: {0}")]
|
||||
Database(#[from] crate::database::models::DatabaseError),
|
||||
#[error("Error while parsing JSON: {0}")]
|
||||
SerDe(#[from] serde_json::Error),
|
||||
#[error("Error with communicating to Minos")]
|
||||
Minos(#[from] reqwest::Error),
|
||||
#[error("Invalid Authentication credentials")]
|
||||
InvalidCredentials,
|
||||
#[error("Authentication Error: {0}")]
|
||||
Authentication(#[from] crate::util::auth::AuthenticationError),
|
||||
#[error("Error while decoding Base62")]
|
||||
Decoding(#[from] DecodingError),
|
||||
#[error("Invalid callback URL specified")]
|
||||
Url,
|
||||
#[error("User exists in Minos but not in Labrinth")]
|
||||
DatabaseMismatch,
|
||||
}
|
||||
impl actix_web::ResponseError for AuthorizationError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
AuthorizationError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthorizationError::SqlxDatabase(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthorizationError::Database(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthorizationError::SerDe(..) => StatusCode::BAD_REQUEST,
|
||||
AuthorizationError::Minos(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
AuthorizationError::InvalidCredentials => StatusCode::UNAUTHORIZED,
|
||||
AuthorizationError::Decoding(..) => StatusCode::BAD_REQUEST,
|
||||
AuthorizationError::Authentication(..) => StatusCode::UNAUTHORIZED,
|
||||
AuthorizationError::Url => StatusCode::BAD_REQUEST,
|
||||
AuthorizationError::DatabaseMismatch => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
error: match self {
|
||||
AuthorizationError::Env(..) => "environment_error",
|
||||
AuthorizationError::SqlxDatabase(..) => "database_error",
|
||||
AuthorizationError::Database(..) => "database_error",
|
||||
AuthorizationError::SerDe(..) => "invalid_input",
|
||||
AuthorizationError::Minos(..) => "network_error",
|
||||
AuthorizationError::InvalidCredentials => "invalid_credentials",
|
||||
AuthorizationError::Decoding(..) => "decoding_error",
|
||||
AuthorizationError::Authentication(..) => "authentication_error",
|
||||
AuthorizationError::Url => "url_error",
|
||||
AuthorizationError::DatabaseMismatch => "database_mismatch",
|
||||
},
|
||||
description: &self.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct AuthorizationInit {
|
||||
pub url: String,
|
||||
}
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct StateResponse {
|
||||
pub state: String,
|
||||
}
|
||||
|
||||
// Init link takes us to Minos API and calls back to callback endpoint with a code and state
|
||||
//http://<URL>:8000/api/v1/auth/init?url=https%3A%2F%2Fmodrinth.com%2Fmods
|
||||
#[get("init")]
|
||||
pub async fn init(
|
||||
Query(info): Query<AuthorizationInit>, // callback url
|
||||
client: Data<PgPool>,
|
||||
) -> Result<HttpResponse, AuthorizationError> {
|
||||
let url = url::Url::parse(&info.url).map_err(|_| AuthorizationError::Url)?;
|
||||
|
||||
let allowed_callback_urls = parse_strings_from_var("ALLOWED_CALLBACK_URLS").unwrap_or_default();
|
||||
let domain = url.host_str().ok_or(AuthorizationError::Url)?; // TODO: change back to .domain() (host_str is so we can use 127.0.0.1)
|
||||
if !allowed_callback_urls.iter().any(|x| domain.ends_with(x)) && domain != "modrinth.com" {
|
||||
return Err(AuthorizationError::Url);
|
||||
}
|
||||
|
||||
let mut transaction = client.begin().await?;
|
||||
|
||||
let state = generate_state_id(&mut transaction).await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO states (id, url)
|
||||
VALUES ($1, $2)
|
||||
",
|
||||
state.0,
|
||||
info.url
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
let kratos_url = dotenvy::var("KRATOS_URL")?;
|
||||
let labrinth_url = dotenvy::var("SELF_ADDR")?;
|
||||
let url = format!(
|
||||
// Callback URL of initialization is /callback below.
|
||||
"{kratos_url}/self-service/login/browser?return_to={labrinth_url}/v2/auth/callback?state={}",
|
||||
to_base62(state.0 as u64)
|
||||
);
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*url))
|
||||
.json(AuthorizationInit { url }))
|
||||
}
|
||||
|
||||
#[get("callback")]
|
||||
pub async fn auth_callback(
|
||||
req: HttpRequest,
|
||||
Query(state): Query<StateResponse>,
|
||||
client: Data<PgPool>,
|
||||
) -> Result<HttpResponse, AuthorizationError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
let state_id: u64 = parse_base62(&state.state)?;
|
||||
|
||||
let result_option = sqlx::query!(
|
||||
"
|
||||
SELECT url, expires FROM states
|
||||
WHERE id = $1
|
||||
",
|
||||
state_id as i64
|
||||
)
|
||||
.fetch_optional(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// Extract cookie header from request
|
||||
let cookie_header = req.headers().get("Cookie");
|
||||
if let Some(result) = result_option {
|
||||
if let Some(cookie_header) = cookie_header {
|
||||
// Extract cookie header to get authenticated user from Minos
|
||||
let duration: chrono::Duration = result.expires - Utc::now();
|
||||
if duration.num_seconds() < 0 {
|
||||
return Err(AuthorizationError::InvalidCredentials);
|
||||
}
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM states
|
||||
WHERE id = $1
|
||||
",
|
||||
state_id as i64
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
// Attempt to create a minos user from the cookie header- if this fails, the user is invalid
|
||||
let minos_user = get_minos_user_from_cookies(
|
||||
cookie_header
|
||||
.to_str()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
.await?;
|
||||
let user_result =
|
||||
models::User::get_from_minos_kratos_id(minos_user.id.clone(), &mut transaction)
|
||||
.await?;
|
||||
|
||||
// Cookies exist, but user does not exist in database, meaning they are invalid
|
||||
if user_result.is_none() {
|
||||
return Err(AuthorizationError::DatabaseMismatch);
|
||||
}
|
||||
transaction.commit().await?;
|
||||
|
||||
// Cookie is attached now, so redirect to the original URL
|
||||
// Do not re-append cookie header, as it is not needed,
|
||||
// because all redirects are to various modrinth.com subdomains
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*result.url))
|
||||
.json(AuthorizationInit { url: result.url }))
|
||||
} else {
|
||||
Err(AuthorizationError::InvalidCredentials)
|
||||
}
|
||||
} else {
|
||||
Err(AuthorizationError::InvalidCredentials)
|
||||
}
|
||||
}
|
||||
@@ -1,325 +0,0 @@
|
||||
use crate::models::users::UserId;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::get_user_from_headers;
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
|
||||
use hmac::{Hmac, Mac, NewMac};
|
||||
use itertools::Itertools;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{json, Value};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("midas")
|
||||
.service(init_checkout)
|
||||
.service(init_customer_portal)
|
||||
.service(handle_stripe_webhook),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CheckoutData {
|
||||
pub price_id: String,
|
||||
}
|
||||
|
||||
#[post("/_stripe-init-checkout")]
|
||||
pub async fn init_checkout(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
data: web::Json<CheckoutData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Session {
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
let session = client
|
||||
.post("https://api.stripe.com/v1/checkout/sessions")
|
||||
.header(
|
||||
"Authorization",
|
||||
format!("Bearer {}", dotenvy::var("STRIPE_TOKEN")?),
|
||||
)
|
||||
.form(&[
|
||||
("mode", "subscription"),
|
||||
("line_items[0][price]", &*data.price_id),
|
||||
("line_items[0][quantity]", "1"),
|
||||
("success_url", "https://modrinth.com/welcome-to-midas"),
|
||||
("cancel_url", "https://modrinth.com/midas"),
|
||||
("metadata[user_id]", &user.id.to_string()),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|_| ApiError::Payments("Error while creating checkout session!".to_string()))?
|
||||
.json::<Session>()
|
||||
.await
|
||||
.map_err(|_| {
|
||||
ApiError::Payments("Error while deserializing checkout response!".to_string())
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(json!(
|
||||
{
|
||||
"url": session.url
|
||||
}
|
||||
)))
|
||||
}
|
||||
|
||||
#[post("/_stripe-init-portal")]
|
||||
pub async fn init_customer_portal(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
|
||||
let customer_id = sqlx::query!(
|
||||
"
|
||||
SELECT u.stripe_customer_id
|
||||
FROM users u
|
||||
WHERE u.id = $1
|
||||
",
|
||||
user.id.0 as i64,
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?
|
||||
.and_then(|x| x.stripe_customer_id)
|
||||
.ok_or_else(|| ApiError::InvalidInput("User is not linked to stripe account!".to_string()))?;
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Session {
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
let session = client
|
||||
.post("https://api.stripe.com/v1/billing_portal/sessions")
|
||||
.header(
|
||||
"Authorization",
|
||||
format!("Bearer {}", dotenvy::var("STRIPE_TOKEN")?),
|
||||
)
|
||||
.form(&[
|
||||
("customer", &*customer_id),
|
||||
("return_url", "https://modrinth.com/settings/billing"),
|
||||
])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|_| ApiError::Payments("Error while creating billing session!".to_string()))?
|
||||
.json::<Session>()
|
||||
.await
|
||||
.map_err(|_| {
|
||||
ApiError::Payments("Error while deserializing billing response!".to_string())
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(json!(
|
||||
{
|
||||
"url": session.url
|
||||
}
|
||||
)))
|
||||
}
|
||||
|
||||
#[post("/_stripe-webook")]
|
||||
pub async fn handle_stripe_webhook(
|
||||
body: String,
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
if let Some(signature_raw) = req
|
||||
.headers()
|
||||
.get("Stripe-Signature")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
{
|
||||
let mut timestamp = None;
|
||||
let mut signature = None;
|
||||
for val in signature_raw.split(',') {
|
||||
let key_val = val.split('=').collect_vec();
|
||||
|
||||
if key_val.len() == 2 {
|
||||
if key_val[0] == "v1" {
|
||||
signature = hex::decode(key_val[1]).ok()
|
||||
} else if key_val[0] == "t" {
|
||||
timestamp = key_val[1].parse::<i64>().ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(timestamp) = timestamp {
|
||||
if let Some(signature) = signature {
|
||||
type HmacSha256 = Hmac<sha2::Sha256>;
|
||||
|
||||
let mut key =
|
||||
HmacSha256::new_from_slice(dotenvy::var("STRIPE_WEBHOOK_SECRET")?.as_bytes())
|
||||
.map_err(|_| {
|
||||
ApiError::Crypto(
|
||||
"Unable to initialize HMAC instance due to invalid key length!"
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
key.update(format!("{timestamp}.{body}").as_bytes());
|
||||
|
||||
key.verify(&signature).map_err(|_| {
|
||||
ApiError::Crypto("Unable to verify webhook signature!".to_string())
|
||||
})?;
|
||||
|
||||
if timestamp < (Utc::now() - Duration::minutes(5)).timestamp()
|
||||
|| timestamp > (Utc::now() + Duration::minutes(5)).timestamp()
|
||||
{
|
||||
return Err(ApiError::Crypto("Webhook signature expired!".to_string()));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::Crypto("Missing signature!".to_string()));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::Crypto("Missing timestamp!".to_string()));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::Crypto("Missing signature header!".to_string()));
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct StripeWebhookBody {
|
||||
#[serde(rename = "type")]
|
||||
type_: String,
|
||||
data: StripeWebhookObject,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct StripeWebhookObject {
|
||||
object: Value,
|
||||
}
|
||||
|
||||
let webhook: StripeWebhookBody = serde_json::from_str(&body)?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CheckoutSession {
|
||||
customer: String,
|
||||
metadata: SessionMetadata,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SessionMetadata {
|
||||
user_id: UserId,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Invoice {
|
||||
customer: String,
|
||||
// paid: bool,
|
||||
lines: InvoiceLineItems,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct InvoiceLineItems {
|
||||
pub data: Vec<InvoiceLineItem>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct InvoiceLineItem {
|
||||
period: Period,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Period {
|
||||
// start: i64,
|
||||
end: i64,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Subscription {
|
||||
customer: String,
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
// TODO: Currently hardcoded to midas-only. When we add more stuff should include price IDs
|
||||
match &*webhook.type_ {
|
||||
"checkout.session.completed" => {
|
||||
let session: CheckoutSession = serde_json::from_value(webhook.data.object)?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET stripe_customer_id = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
session.customer,
|
||||
session.metadata.user_id.0 as i64,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
"invoice.paid" => {
|
||||
let invoice: Invoice = serde_json::from_value(webhook.data.object)?;
|
||||
|
||||
if let Some(item) = invoice.lines.data.first() {
|
||||
let expires: DateTime<Utc> = DateTime::from_utc(
|
||||
NaiveDateTime::from_timestamp_opt(item.period.end, 0).unwrap_or_default(),
|
||||
Utc,
|
||||
) + Duration::days(1);
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET midas_expires = $1, is_overdue = FALSE
|
||||
WHERE (stripe_customer_id = $2)
|
||||
",
|
||||
expires,
|
||||
invoice.customer,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
"invoice.payment_failed" => {
|
||||
let invoice: Invoice = serde_json::from_value(webhook.data.object)?;
|
||||
|
||||
let customer_id = sqlx::query!(
|
||||
"
|
||||
SELECT u.id
|
||||
FROM users u
|
||||
WHERE u.stripe_customer_id = $1
|
||||
",
|
||||
invoice.customer,
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?
|
||||
.map(|x| x.id);
|
||||
|
||||
if let Some(user_id) = customer_id {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET is_overdue = TRUE
|
||||
WHERE (id = $1)
|
||||
",
|
||||
user_id,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
"customer.subscription.deleted" => {
|
||||
let session: Subscription = serde_json::from_value(webhook.data.object)?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET stripe_customer_id = NULL, midas_expires = NULL, is_overdue = NULL
|
||||
WHERE (stripe_customer_id = $1)
|
||||
",
|
||||
session.customer,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
mod admin;
|
||||
mod auth;
|
||||
mod midas;
|
||||
mod moderation;
|
||||
mod notifications;
|
||||
mod pats;
|
||||
@@ -22,20 +20,26 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
cfg.service(
|
||||
actix_web::web::scope("v2")
|
||||
.configure(admin::config)
|
||||
.configure(auth::config)
|
||||
.configure(midas::config)
|
||||
.configure(crate::auth::config)
|
||||
.configure(moderation::config)
|
||||
.configure(notifications::config)
|
||||
.configure(pats::config)
|
||||
.configure(project_creation::config)
|
||||
// SHOULD CACHE
|
||||
.configure(projects::config)
|
||||
.configure(reports::config)
|
||||
// should cache in future
|
||||
.configure(statistics::config)
|
||||
// should cache in future
|
||||
.configure(tags::config)
|
||||
// should cache
|
||||
.configure(teams::config)
|
||||
.configure(threads::config)
|
||||
// should cache
|
||||
.configure(users::config)
|
||||
// should cache in future
|
||||
.configure(version_file::config)
|
||||
// SHOULD CACHE
|
||||
.configure(versions::config),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use super::ApiError;
|
||||
use crate::auth::check_is_moderator_from_headers;
|
||||
use crate::database;
|
||||
use crate::models::projects::ProjectStatus;
|
||||
use crate::util::auth::check_is_moderator_from_headers;
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
@@ -24,9 +24,10 @@ fn default_count() -> i16 {
|
||||
pub async fn get_projects(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
count: web::Query<ResultCount>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(req.headers(), &**pool).await?;
|
||||
check_is_moderator_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
@@ -45,7 +46,7 @@ pub async fn get_projects(
|
||||
.try_collect::<Vec<database::models::ProjectId>>()
|
||||
.await?;
|
||||
|
||||
let projects: Vec<_> = database::Project::get_many_full(&project_ids, &**pool)
|
||||
let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::projects::Project::from)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database;
|
||||
use crate::models::ids::NotificationId;
|
||||
use crate::models::notifications::Notification;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::get_user_from_headers;
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@@ -30,8 +30,9 @@ pub async fn notifications_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
use database::models::notification_item::Notification as DBNotification;
|
||||
use database::models::NotificationId as DBNotificationId;
|
||||
@@ -60,8 +61,9 @@ pub async fn notification_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
|
||||
@@ -84,8 +86,9 @@ pub async fn notification_read(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
|
||||
@@ -117,8 +120,9 @@ pub async fn notification_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
|
||||
@@ -150,8 +154,9 @@ pub async fn notifications_read(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
@@ -185,8 +190,9 @@ pub async fn notifications_delete(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
|
||||
@@ -9,10 +9,10 @@ use crate::database;
|
||||
use crate::database::models::generate_pat_id;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::auth::{generate_pat, PersonalAccessToken};
|
||||
use crate::models::users::UserId;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::get_user_from_headers;
|
||||
use crate::util::pat::{generate_pat, PersonalAccessToken};
|
||||
|
||||
use actix_web::web::{self, Data, Query};
|
||||
use actix_web::{delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
@@ -46,8 +46,13 @@ pub struct ModifyPersonalAccessToken {
|
||||
// Get all personal access tokens for the given user. Minos/Kratos cookie must be attached for it to work.
|
||||
// Does not return the actual access token, only the ID + metadata.
|
||||
#[get("pat")]
|
||||
pub async fn get_pats(req: HttpRequest, pool: Data<PgPool>) -> Result<HttpResponse, ApiError> {
|
||||
let user: crate::models::users::User = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
pub async fn get_pats(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user: crate::models::users::User =
|
||||
get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let db_user_id: database::models::UserId = database::models::UserId::from(user.id);
|
||||
|
||||
let pats = sqlx::query!(
|
||||
@@ -84,8 +89,10 @@ pub async fn create_pat(
|
||||
req: HttpRequest,
|
||||
Query(info): Query<CreatePersonalAccessToken>, // callback url
|
||||
pool: Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user: crate::models::users::User = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user: crate::models::users::User =
|
||||
get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let db_user_id: database::models::UserId = database::models::UserId::from(user.id);
|
||||
|
||||
let mut transaction: sqlx::Transaction<sqlx::Postgres> = pool.begin().await?;
|
||||
@@ -135,8 +142,10 @@ pub async fn edit_pat(
|
||||
id: web::Path<String>,
|
||||
Query(info): Query<ModifyPersonalAccessToken>, // callback url
|
||||
pool: Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user: crate::models::users::User = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user: crate::models::users::User =
|
||||
get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let pat_id = database::models::PatId(parse_base62(&id)? as i64);
|
||||
let db_user_id: database::models::UserId = database::models::UserId::from(user.id);
|
||||
|
||||
@@ -198,8 +207,10 @@ pub async fn delete_pat(
|
||||
req: HttpRequest,
|
||||
id: web::Path<String>,
|
||||
pool: Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user: crate::models::users::User = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user: crate::models::users::User =
|
||||
get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let pat_id = database::models::PatId(parse_base62(&id)? as i64);
|
||||
let db_user_id: database::models::UserId = database::models::UserId::from(user.id);
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::version_creation::InitialVersionData;
|
||||
use crate::auth::{get_user_from_headers, AuthenticationError};
|
||||
use crate::database::models;
|
||||
use crate::database::models::thread_item::ThreadBuilder;
|
||||
use crate::file_hosting::{FileHost, FileHostingError};
|
||||
@@ -10,7 +11,6 @@ use crate::models::projects::{
|
||||
use crate::models::threads::ThreadType;
|
||||
use crate::models::users::UserId;
|
||||
use crate::search::indexing::IndexingError;
|
||||
use crate::util::auth::{get_user_from_headers_transaction, AuthenticationError};
|
||||
use crate::util::routes::read_from_field;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_multipart::{Field, Multipart};
|
||||
@@ -270,6 +270,7 @@ pub async fn project_create(
|
||||
req: HttpRequest,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<deadpool_redis::Pool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
@@ -282,6 +283,7 @@ pub async fn project_create(
|
||||
&***file_host,
|
||||
&mut uploaded_files,
|
||||
&client,
|
||||
&redis,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -336,12 +338,13 @@ async fn project_create_inner(
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
pool: &PgPool,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
// The base URL for files uploaded to backblaze
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
|
||||
// The currently logged in user
|
||||
let current_user = get_user_from_headers_transaction(req.headers(), &mut *transaction).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), pool, redis).await?;
|
||||
|
||||
let project_id: ProjectId = models::generate_project_id(transaction).await?.into();
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::auth::{filter_authorized_projects, get_user_from_headers, is_authorized};
|
||||
use crate::database;
|
||||
use crate::database::models::notification_item::NotificationBuilder;
|
||||
use crate::database::models::thread_item::ThreadMessageBuilder;
|
||||
@@ -12,7 +13,6 @@ use crate::models::teams::Permissions;
|
||||
use crate::models::threads::MessageBody;
|
||||
use crate::routes::ApiError;
|
||||
use crate::search::{search_for_project, SearchConfig, SearchError};
|
||||
use crate::util::auth::{filter_authorized_projects, get_user_from_headers, is_authorized};
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
@@ -74,6 +74,7 @@ pub struct RandomProjects {
|
||||
pub async fn random_projects_get(
|
||||
web::Query(count): web::Query<RandomProjects>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
count
|
||||
.validate()
|
||||
@@ -94,7 +95,7 @@ pub async fn random_projects_get(
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let projects_data = database::models::Project::get_many_full(&project_ids, &**pool)
|
||||
let projects_data = database::models::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Project::from)
|
||||
@@ -113,16 +114,14 @@ pub async fn projects_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let project_ids: Vec<database::models::ids::ProjectId> =
|
||||
serde_json::from_str::<Vec<ProjectId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect();
|
||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||
let projects_data = database::models::Project::get_many(&ids, &**pool, &redis).await?;
|
||||
|
||||
let projects_data = database::models::Project::get_many_full(&project_ids, &**pool).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let projects = filter_authorized_projects(projects_data, &user_option, &pool).await?;
|
||||
|
||||
@@ -134,13 +133,15 @@ pub async fn project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_data =
|
||||
database::models::Project::get_full_from_slug_or_project_id(&string, &**pool).await?;
|
||||
let project_data = database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(data) = project_data {
|
||||
if is_authorized(&data.inner, &user_option, &pool).await? {
|
||||
@@ -155,52 +156,15 @@ pub async fn project_get(
|
||||
pub async fn project_get_check(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let slug = info.into_inner().0;
|
||||
|
||||
let id_option = parse_base62(&slug).ok();
|
||||
let project_data = database::models::Project::get(&slug, &**pool, &redis).await?;
|
||||
|
||||
let id = if let Some(id) = id_option {
|
||||
let id = sqlx::query!(
|
||||
"
|
||||
SELECT id FROM mods
|
||||
WHERE id = $1
|
||||
",
|
||||
id as i64
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?;
|
||||
|
||||
if id.is_none() {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id FROM mods
|
||||
WHERE slug = LOWER($1)
|
||||
",
|
||||
&slug
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?
|
||||
.map(|x| x.id)
|
||||
} else {
|
||||
id.map(|x| x.id)
|
||||
}
|
||||
} else {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id FROM mods
|
||||
WHERE slug = LOWER($1)
|
||||
",
|
||||
&slug
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?
|
||||
.map(|x| x.id)
|
||||
};
|
||||
|
||||
if let Some(id) = id {
|
||||
if let Some(project) = project_data {
|
||||
Ok(HttpResponse::Ok().json(json! ({
|
||||
"id": models::ids::ProjectId(id as u64)
|
||||
"id": models::ids::ProjectId::from(project.inner.id)
|
||||
})))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
@@ -218,52 +182,23 @@ pub async fn dependency_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = database::models::Project::get_from_slug_or_project_id(&string, &**pool).await?;
|
||||
let result = database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(project) = result {
|
||||
if !is_authorized(&project, &user_option, &pool).await? {
|
||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
let id = project.id;
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let dependencies = sqlx::query!(
|
||||
"
|
||||
SELECT d.dependency_id, COALESCE(vd.mod_id, 0) mod_id, d.mod_dependency_id
|
||||
FROM versions v
|
||||
INNER JOIN dependencies d ON d.dependent_id = v.id
|
||||
LEFT JOIN versions vd ON d.dependency_id = vd.id
|
||||
WHERE v.mod_id = $1
|
||||
",
|
||||
id as database::models::ProjectId
|
||||
)
|
||||
.fetch_many(&**pool)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|x| {
|
||||
(
|
||||
x.dependency_id.map(database::models::VersionId),
|
||||
if x.mod_id == Some(0) {
|
||||
None
|
||||
} else {
|
||||
x.mod_id.map(database::models::ProjectId)
|
||||
},
|
||||
x.mod_dependency_id.map(database::models::ProjectId),
|
||||
)
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<(
|
||||
Option<database::models::VersionId>,
|
||||
Option<database::models::ProjectId>,
|
||||
Option<database::models::ProjectId>,
|
||||
)>>()
|
||||
.await?;
|
||||
let dependencies =
|
||||
database::Project::get_dependencies(project.inner.id, &**pool, &redis).await?;
|
||||
|
||||
let project_ids = dependencies
|
||||
.iter()
|
||||
@@ -285,8 +220,8 @@ pub async fn dependency_list(
|
||||
.filter_map(|x| x.0)
|
||||
.collect::<Vec<database::models::VersionId>>();
|
||||
let (projects_result, versions_result) = futures::future::try_join(
|
||||
database::Project::get_many_full(&project_ids, &**pool),
|
||||
database::Version::get_many_full(&dep_version_ids, &**pool),
|
||||
database::Project::get_many_ids(&project_ids, &**pool, &redis),
|
||||
database::Version::get_many(&dep_version_ids, &**pool, &redis),
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -417,16 +352,16 @@ pub async fn project_edit(
|
||||
pool: web::Data<PgPool>,
|
||||
config: web::Data<SearchConfig>,
|
||||
new_project: web::Json<EditProject>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
new_project
|
||||
.validate()
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let result =
|
||||
database::models::Project::get_full_from_slug_or_project_id(&string, &**pool).await?;
|
||||
let result = database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
if let Some(project_item) = result {
|
||||
let id = project_item.inner.id;
|
||||
@@ -889,7 +824,7 @@ pub async fn project_edit(
|
||||
|
||||
// Make sure the new slug is different from the old one
|
||||
// We are able to unwrap here because the slug is always set
|
||||
if !slug.eq(&project_item.inner.slug.unwrap_or_default()) {
|
||||
if !slug.eq(&project_item.inner.slug.clone().unwrap_or_default()) {
|
||||
let results = sqlx::query!(
|
||||
"
|
||||
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1))
|
||||
@@ -1151,6 +1086,14 @@ pub async fn project_edit(
|
||||
.await?;
|
||||
}
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
@@ -1232,8 +1175,9 @@ pub async fn projects_edit(
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
bulk_edit_project: web::Json<BulkEditProject>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
bulk_edit_project
|
||||
.validate()
|
||||
@@ -1245,7 +1189,8 @@ pub async fn projects_edit(
|
||||
.map(|x| x.into())
|
||||
.collect();
|
||||
|
||||
let projects_data = database::models::Project::get_many_full(&project_ids, &**pool).await?;
|
||||
let projects_data =
|
||||
database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = project_ids
|
||||
.iter()
|
||||
@@ -1262,7 +1207,7 @@ pub async fn projects_edit(
|
||||
.map(|x| x.inner.team_id)
|
||||
.collect::<Vec<database::models::TeamId>>();
|
||||
let team_members =
|
||||
database::models::TeamMember::get_from_team_full_many(&team_ids, &**pool).await?;
|
||||
database::models::TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
||||
|
||||
let categories = database::models::categories::Category::list(&**pool).await?;
|
||||
let donation_platforms = database::models::categories::DonationPlatform::list(&**pool).await?;
|
||||
@@ -1538,6 +1483,9 @@ pub async fn projects_edit(
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
database::models::Project::clear_cache(project.inner.id, project.inner.slug, None, &redis)
|
||||
.await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
@@ -1556,9 +1504,10 @@ pub async fn project_schedule(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
scheduling_data: web::Json<SchedulingData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
if scheduling_data.time < Utc::now() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
@@ -1573,11 +1522,11 @@ pub async fn project_schedule(
|
||||
}
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let result = database::models::Project::get_from_slug_or_project_id(&string, &**pool).await?;
|
||||
let result = database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
if let Some(project_item) = result {
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
project_item.team_id,
|
||||
project_item.inner.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -1601,11 +1550,19 @@ pub async fn project_schedule(
|
||||
",
|
||||
ProjectStatus::Scheduled.as_str(),
|
||||
scheduling_data.time,
|
||||
project_item.id as database::models::ids::ProjectId,
|
||||
project_item.inner.id as database::models::ids::ProjectId,
|
||||
)
|
||||
.execute(&**pool)
|
||||
.await?;
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
@@ -1623,15 +1580,16 @@ pub async fn project_icon_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let project_item = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
@@ -1639,7 +1597,7 @@ pub async fn project_icon_edit(
|
||||
|
||||
if !user.role.is_mod() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
project_item.team_id,
|
||||
project_item.inner.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -1656,7 +1614,7 @@ pub async fn project_icon_edit(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(icon) = project_item.icon_url {
|
||||
if let Some(icon) = project_item.inner.icon_url {
|
||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||
|
||||
if let Some(icon_path) = name {
|
||||
@@ -1670,7 +1628,7 @@ pub async fn project_icon_edit(
|
||||
let color = crate::util::img::get_color_from_img(&bytes)?;
|
||||
|
||||
let hash = sha1::Sha1::from(&bytes).hexdigest();
|
||||
let project_id: ProjectId = project_item.id.into();
|
||||
let project_id: ProjectId = project_item.inner.id.into();
|
||||
let upload_data = file_host
|
||||
.upload_file(
|
||||
content_type,
|
||||
@@ -1689,11 +1647,19 @@ pub async fn project_icon_edit(
|
||||
",
|
||||
format!("{}/{}", cdn_url, upload_data.file_name),
|
||||
color.map(|x| x as i32),
|
||||
project_item.id as database::models::ids::ProjectId,
|
||||
project_item.inner.id as database::models::ids::ProjectId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -1710,12 +1676,13 @@ pub async fn delete_project_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let project_item = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
@@ -1723,7 +1690,7 @@ pub async fn delete_project_icon(
|
||||
|
||||
if !user.role.is_mod() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
project_item.team_id,
|
||||
project_item.inner.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -1741,7 +1708,7 @@ pub async fn delete_project_icon(
|
||||
}
|
||||
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
if let Some(icon) = project_item.icon_url {
|
||||
if let Some(icon) = project_item.inner.icon_url {
|
||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||
|
||||
if let Some(icon_path) = name {
|
||||
@@ -1757,11 +1724,19 @@ pub async fn delete_project_icon(
|
||||
SET icon_url = NULL, color = NULL
|
||||
WHERE (id = $1)
|
||||
",
|
||||
project_item.id as database::models::ids::ProjectId,
|
||||
project_item.inner.id as database::models::ids::ProjectId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -1778,12 +1753,14 @@ pub struct GalleryCreateQuery {
|
||||
}
|
||||
|
||||
#[post("{id}/gallery")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn add_gallery_item(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryCreateQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
@@ -1792,15 +1769,14 @@ pub async fn add_gallery_item(
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item =
|
||||
database::models::Project::get_full_from_slug_or_project_id(&string, &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
})?;
|
||||
let project_item = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
if project_item.gallery_items.len() > 64 {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
@@ -1880,6 +1856,14 @@ pub async fn add_gallery_item(
|
||||
.insert(project_item.inner.id, &mut transaction)
|
||||
.await?;
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -1919,14 +1903,15 @@ pub async fn edit_gallery_item(
|
||||
web::Query(item): web::Query<GalleryEditQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
item.validate()
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let project_item = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let project_item = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
@@ -1934,7 +1919,7 @@ pub async fn edit_gallery_item(
|
||||
|
||||
if !user.role.is_mod() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
project_item.team_id,
|
||||
project_item.inner.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -1979,7 +1964,7 @@ pub async fn edit_gallery_item(
|
||||
SET featured = $2
|
||||
WHERE mod_id = $1
|
||||
",
|
||||
project_item.id as database::models::ids::ProjectId,
|
||||
project_item.inner.id as database::models::ids::ProjectId,
|
||||
false,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
@@ -2038,6 +2023,14 @@ pub async fn edit_gallery_item(
|
||||
.await?;
|
||||
}
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -2054,12 +2047,13 @@ pub async fn delete_gallery_item(
|
||||
web::Query(item): web::Query<GalleryDeleteQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project_item = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let project_item = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
@@ -2067,7 +2061,7 @@ pub async fn delete_gallery_item(
|
||||
|
||||
if !user.role.is_mod() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id(
|
||||
project_item.team_id,
|
||||
project_item.inner.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -2121,6 +2115,14 @@ pub async fn delete_gallery_item(
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
database::models::Project::clear_cache(
|
||||
project_item.inner.id,
|
||||
project_item.inner.slug,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -2131,12 +2133,13 @@ pub async fn project_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
config: web::Data<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let project = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let project = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
@@ -2144,7 +2147,7 @@ pub async fn project_delete(
|
||||
|
||||
if !user.role.is_admin() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
||||
project.id,
|
||||
project.inner.id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -2166,11 +2169,12 @@ pub async fn project_delete(
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = database::models::Project::remove_full(project.id, &mut transaction).await?;
|
||||
let result =
|
||||
database::models::Project::remove(project.inner.id, &mut transaction, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
delete_from_index(project.id.into(), config).await?;
|
||||
delete_from_index(project.inner.id.into(), config).await?;
|
||||
|
||||
if result.is_some() {
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -2184,20 +2188,21 @@ pub async fn project_follow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let result = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
let user_id: database::models::ids::UserId = user.id.into();
|
||||
let project_id: database::models::ids::ProjectId = result.id;
|
||||
let project_id: database::models::ids::ProjectId = result.inner.id;
|
||||
|
||||
if !is_authorized(&result, &Some(user), &pool).await? {
|
||||
if !is_authorized(&result.inner, &Some(user), &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
@@ -2253,18 +2258,19 @@ pub async fn project_unfollow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = database::models::Project::get_from_slug_or_project_id(&string, &**pool)
|
||||
let result = database::models::Project::get(&string, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified project does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
let user_id: database::models::ids::UserId = user.id.into();
|
||||
let project_id = result.id;
|
||||
let project_id = result.inner.id;
|
||||
|
||||
let following = sqlx::query!(
|
||||
"
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
||||
use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder};
|
||||
use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId};
|
||||
use crate::models::reports::{ItemType, Report};
|
||||
use crate::models::threads::{MessageBody, ThreadType};
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::{
|
||||
check_is_moderator_from_headers, get_user_from_headers, get_user_from_headers_transaction,
|
||||
};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use futures::StreamExt;
|
||||
@@ -35,10 +33,11 @@ pub async fn report_create(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
mut body: web::Payload,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let current_user = get_user_from_headers_transaction(req.headers(), &mut transaction).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let mut bytes = web::BytesMut::new();
|
||||
while let Some(item) = body.next().await {
|
||||
@@ -179,9 +178,10 @@ fn default_all() -> bool {
|
||||
pub async fn reports(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
count: web::Query<ReportsRequestOptions>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
@@ -225,10 +225,10 @@ pub async fn reports(
|
||||
let query_reports =
|
||||
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
||||
|
||||
let mut reports = Vec::new();
|
||||
let mut reports: Vec<Report> = Vec::new();
|
||||
|
||||
for x in query_reports {
|
||||
reports.push(to_report(x));
|
||||
reports.push(x.into());
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(reports))
|
||||
@@ -244,6 +244,7 @@ pub async fn reports_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ReportIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let report_ids: Vec<crate::database::models::ids::ReportId> =
|
||||
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
|
||||
@@ -254,12 +255,12 @@ pub async fn reports_get(
|
||||
let reports_data =
|
||||
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
||||
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let all_reports = reports_data
|
||||
.into_iter()
|
||||
.filter(|x| user.role.is_mod() || x.reporter == user.id.into())
|
||||
.map(to_report)
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<Report>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(all_reports))
|
||||
@@ -269,9 +270,10 @@ pub async fn reports_get(
|
||||
pub async fn report_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id = info.into_inner().0.into();
|
||||
|
||||
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||
@@ -281,7 +283,8 @@ pub async fn report_get(
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(to_report(report)))
|
||||
let report: Report = report.into();
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
@@ -298,10 +301,11 @@ pub struct EditReport {
|
||||
pub async fn report_edit(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
edit_report: web::Json<EditReport>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id = info.into_inner().0.into();
|
||||
|
||||
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||
@@ -374,8 +378,9 @@ pub async fn report_delete(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(req.headers(), &**pool).await?;
|
||||
check_is_moderator_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
let result = crate::database::models::report_item::Report::remove_full(
|
||||
@@ -391,31 +396,3 @@ pub async fn report_delete(
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
}
|
||||
|
||||
fn to_report(x: crate::database::models::report_item::QueryReport) -> Report {
|
||||
let mut item_id = "".to_string();
|
||||
let mut item_type = ItemType::Unknown;
|
||||
|
||||
if let Some(project_id) = x.project_id {
|
||||
item_id = ProjectId::from(project_id).to_string();
|
||||
item_type = ItemType::Project;
|
||||
} else if let Some(version_id) = x.version_id {
|
||||
item_id = VersionId::from(version_id).to_string();
|
||||
item_type = ItemType::Version;
|
||||
} else if let Some(user_id) = x.user_id {
|
||||
item_id = UserId::from(user_id).to_string();
|
||||
item_type = ItemType::User;
|
||||
}
|
||||
|
||||
Report {
|
||||
id: x.id.into(),
|
||||
report_type: x.report_type,
|
||||
item_id,
|
||||
item_type,
|
||||
reporter: x.reporter.into(),
|
||||
body: x.body,
|
||||
created: x.created,
|
||||
closed: x.closed,
|
||||
thread_id: x.thread_id.map(|x| x.into()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::auth::{get_user_from_headers, is_authorized};
|
||||
use crate::database::models::notification_item::NotificationBuilder;
|
||||
use crate::database::models::TeamMember;
|
||||
use crate::models::ids::ProjectId;
|
||||
@@ -5,7 +6,6 @@ use crate::models::notifications::NotificationBody;
|
||||
use crate::models::teams::{Permissions, TeamId};
|
||||
use crate::models::users::UserId;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::{get_user_from_headers, is_authorized};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -30,25 +30,27 @@ pub async fn team_members_get_project(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
let project_data =
|
||||
crate::database::models::Project::get_from_slug_or_project_id(&string, &**pool).await?;
|
||||
let project_data = crate::database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project_data {
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if !is_authorized(&project, ¤t_user, &pool).await? {
|
||||
let members_data =
|
||||
TeamMember::get_from_team_full(project.inner.team_id, &**pool, &redis).await?;
|
||||
|
||||
if !is_authorized(&project.inner, ¤t_user, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
let members_data = TeamMember::get_from_team_full(project.team_id, &**pool).await?;
|
||||
|
||||
if let Some(user) = ¤t_user {
|
||||
let team_member =
|
||||
TeamMember::get_from_user_id(project.team_id, user.id.into(), &**pool)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
let team_member = members_data
|
||||
.iter()
|
||||
.find(|x| x.user.id == user.id.into() && x.accepted);
|
||||
|
||||
if team_member.is_some() {
|
||||
let team_members: Vec<_> = members_data
|
||||
@@ -83,16 +85,19 @@ pub async fn team_members_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
let members_data = TeamMember::get_from_team_full(id.into(), &**pool).await?;
|
||||
let members_data = TeamMember::get_from_team_full(id.into(), &**pool, &redis).await?;
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(user) = ¤t_user {
|
||||
let team_member = TeamMember::get_from_user_id(id.into(), user.id.into(), &**pool)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
let team_member = members_data
|
||||
.iter()
|
||||
.find(|x| x.user.id == user.id.into() && x.accepted);
|
||||
|
||||
if team_member.is_some() {
|
||||
let team_members: Vec<_> = members_data
|
||||
@@ -129,6 +134,7 @@ pub async fn teams_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<TeamIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
use itertools::Itertools;
|
||||
|
||||
@@ -137,34 +143,39 @@ pub async fn teams_get(
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<crate::database::models::ids::TeamId>>();
|
||||
|
||||
let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool).await?;
|
||||
let teams_data = TeamMember::get_from_team_full_many(&team_ids, &**pool, &redis).await?;
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let accepted = if let Some(user) = current_user {
|
||||
TeamMember::get_from_user_id_many(&team_ids, user.id.into(), &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|m| m.team_id.0)
|
||||
.collect()
|
||||
} else {
|
||||
std::collections::HashSet::new()
|
||||
};
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let teams_groups = teams_data.into_iter().group_by(|data| data.team_id.0);
|
||||
|
||||
let mut teams: Vec<Vec<crate::models::teams::TeamMember>> = vec![];
|
||||
|
||||
for (id, member_data) in &teams_groups {
|
||||
if accepted.contains(&id) {
|
||||
let team_members =
|
||||
member_data.map(|data| crate::models::teams::TeamMember::from(data, false));
|
||||
for (_, member_data) in &teams_groups {
|
||||
let members = member_data.collect::<Vec<_>>();
|
||||
|
||||
let team_member = if let Some(user) = ¤t_user {
|
||||
members
|
||||
.iter()
|
||||
.find(|x| x.user.id == user.id.into() && x.accepted)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if team_member.is_some() {
|
||||
let team_members = members
|
||||
.into_iter()
|
||||
.map(|data| crate::models::teams::TeamMember::from(data, false));
|
||||
|
||||
teams.push(team_members.collect());
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
let team_members = member_data
|
||||
let team_members = members
|
||||
.into_iter()
|
||||
.filter(|x| x.accepted)
|
||||
.map(|data| crate::models::teams::TeamMember::from(data, true));
|
||||
|
||||
@@ -179,9 +190,10 @@ pub async fn join_team(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let team_id = info.into_inner().0.into();
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let member =
|
||||
TeamMember::get_from_user_id_pending(team_id, current_user.id.into(), &**pool).await?;
|
||||
@@ -207,6 +219,8 @@ pub async fn join_team(
|
||||
)
|
||||
.await?;
|
||||
|
||||
TeamMember::clear_cache(team_id, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
@@ -244,12 +258,13 @@ pub async fn add_team_member(
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_member: web::Json<NewTeamMember>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let team_id = info.into_inner().0.into();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let member = TeamMember::get_from_user_id(team_id, current_user.id.into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@@ -281,12 +296,8 @@ pub async fn add_team_member(
|
||||
));
|
||||
}
|
||||
|
||||
let request = crate::database::models::team_item::TeamMember::get_from_user_id_pending(
|
||||
team_id,
|
||||
new_member.user_id.into(),
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
let request =
|
||||
TeamMember::get_from_user_id_pending(team_id, new_member.user_id.into(), &**pool).await?;
|
||||
|
||||
if let Some(req) = request {
|
||||
if req.accepted {
|
||||
@@ -300,7 +311,7 @@ pub async fn add_team_member(
|
||||
}
|
||||
}
|
||||
|
||||
crate::database::models::User::get(member.user_id, &**pool)
|
||||
crate::database::models::User::get_id(member.user_id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::InvalidInput("An invalid User ID specified".to_string()))?;
|
||||
|
||||
@@ -340,6 +351,8 @@ pub async fn add_team_member(
|
||||
.insert(new_member.user_id.into(), &mut transaction)
|
||||
.await?;
|
||||
|
||||
TeamMember::clear_cache(team_id, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -359,12 +372,13 @@ pub async fn edit_team_member(
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
edit_member: web::Json<EditTeamMember>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = info.into_inner();
|
||||
let id = ids.0.into();
|
||||
let user_id = ids.1.into();
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@@ -430,6 +444,8 @@ pub async fn edit_team_member(
|
||||
)
|
||||
.await?;
|
||||
|
||||
TeamMember::clear_cache(id, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -446,10 +462,11 @@ pub async fn transfer_ownership(
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_owner: web::Json<TransferOwnership>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
if !current_user.role.is_admin() {
|
||||
let member = TeamMember::get_from_user_id(id.into(), current_user.id.into(), &**pool)
|
||||
@@ -505,6 +522,8 @@ pub async fn transfer_ownership(
|
||||
)
|
||||
.await?;
|
||||
|
||||
TeamMember::clear_cache(id.into(), &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
@@ -515,12 +534,13 @@ pub async fn remove_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = info.into_inner();
|
||||
let id = ids.0.into();
|
||||
let user_id = ids.1.into();
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let member = TeamMember::get_from_user_id(id, current_user.id.into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
@@ -566,6 +586,8 @@ pub async fn remove_team_member(
|
||||
));
|
||||
}
|
||||
|
||||
TeamMember::clear_cache(id, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
||||
use crate::database;
|
||||
use crate::database::models::notification_item::NotificationBuilder;
|
||||
use crate::database::models::thread_item::ThreadMessageBuilder;
|
||||
use crate::models::ids::ThreadMessageId;
|
||||
use crate::models::notifications::NotificationBody;
|
||||
use crate::models::projects::ProjectStatus;
|
||||
use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadMessage, ThreadType};
|
||||
use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType};
|
||||
use crate::models::users::User;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use futures::TryStreamExt;
|
||||
use serde::Deserialize;
|
||||
@@ -68,6 +68,7 @@ pub async fn filter_authorized_threads(
|
||||
threads: Vec<database::models::Thread>,
|
||||
user: &User,
|
||||
pool: &web::Data<PgPool>,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<Thread>, ApiError> {
|
||||
let user_id: database::models::UserId = user.id.into();
|
||||
|
||||
@@ -171,7 +172,7 @@ pub async fn filter_authorized_threads(
|
||||
.collect::<Vec<database::models::UserId>>(),
|
||||
);
|
||||
|
||||
let users: Vec<User> = database::models::User::get_many(&user_ids, &***pool)
|
||||
let users: Vec<User> = database::models::User::get_many_ids(&user_ids, &***pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
@@ -190,7 +191,7 @@ pub async fn filter_authorized_threads(
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
final_threads.push(convert_thread(
|
||||
final_threads.push(Thread::from(
|
||||
thread,
|
||||
users
|
||||
.iter()
|
||||
@@ -204,56 +205,18 @@ pub async fn filter_authorized_threads(
|
||||
Ok(final_threads)
|
||||
}
|
||||
|
||||
fn convert_thread(data: database::models::Thread, users: Vec<User>, user: &User) -> Thread {
|
||||
let thread_type = data.type_;
|
||||
|
||||
Thread {
|
||||
id: data.id.into(),
|
||||
type_: thread_type,
|
||||
messages: data
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter(|x| {
|
||||
if let MessageBody::Text { private, .. } = x.body {
|
||||
!private || user.role.is_mod()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
.map(|x| ThreadMessage {
|
||||
id: x.id.into(),
|
||||
author_id: if users
|
||||
.iter()
|
||||
.find(|y| x.author_id == Some(y.id.into()))
|
||||
.map(|x| x.role.is_mod() && !user.role.is_mod())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
x.author_id.map(|x| x.into())
|
||||
},
|
||||
body: x.body,
|
||||
created: x.created,
|
||||
})
|
||||
.collect(),
|
||||
members: users
|
||||
.into_iter()
|
||||
.filter(|x| !x.role.is_mod() || user.role.is_mod())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{id}")]
|
||||
pub async fn thread_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0.into();
|
||||
|
||||
let thread_data = database::models::Thread::get(string, &**pool).await?;
|
||||
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
if let Some(mut data) = thread_data {
|
||||
if is_authorized_thread(&data, &user, &pool).await? {
|
||||
@@ -267,13 +230,13 @@ pub async fn thread_get(
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let users: Vec<User> = database::models::User::get_many(authors, &**pool)
|
||||
let users: Vec<User> = database::models::User::get_many_ids(authors, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect();
|
||||
|
||||
return Ok(HttpResponse::Ok().json(convert_thread(data, users, &user)));
|
||||
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
|
||||
}
|
||||
}
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
@@ -289,8 +252,9 @@ pub async fn threads_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ThreadIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let thread_ids: Vec<database::models::ids::ThreadId> =
|
||||
serde_json::from_str::<Vec<ThreadId>>(&ids.ids)?
|
||||
@@ -300,7 +264,7 @@ pub async fn threads_get(
|
||||
|
||||
let threads_data = database::models::Thread::get_many(&thread_ids, &**pool).await?;
|
||||
|
||||
let threads = filter_authorized_threads(threads_data, &user, &pool).await?;
|
||||
let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(threads))
|
||||
}
|
||||
@@ -316,8 +280,9 @@ pub async fn thread_send_message(
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_message: web::Json<NewThreadMessage>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let string: database::models::ThreadId = info.into_inner().0.into();
|
||||
|
||||
@@ -392,6 +357,7 @@ pub async fn thread_send_message(
|
||||
let members = database::models::TeamMember::get_from_team_full(
|
||||
database::models::TeamId(record.team_id),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -475,8 +441,9 @@ pub async fn thread_send_message(
|
||||
pub async fn moderation_inbox(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = check_is_moderator_from_headers(req.headers(), &**pool).await?;
|
||||
let user = check_is_moderator_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let ids = sqlx::query!(
|
||||
"
|
||||
@@ -491,7 +458,7 @@ pub async fn moderation_inbox(
|
||||
.await?;
|
||||
|
||||
let threads_data = database::models::Thread::get_many(&ids, &**pool).await?;
|
||||
let threads = filter_authorized_threads(threads_data, &user, &pool).await?;
|
||||
let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(threads))
|
||||
}
|
||||
@@ -501,8 +468,9 @@ pub async fn thread_read(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(req.headers(), &**pool).await?;
|
||||
check_is_moderator_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
let mut transaction = pool.begin().await?;
|
||||
@@ -528,8 +496,9 @@ pub async fn message_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadMessageId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let result = database::models::ThreadMessage::get(info.into_inner().0.into(), &**pool).await?;
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database::models::User;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::notifications::Notification;
|
||||
@@ -5,7 +6,6 @@ use crate::models::projects::Project;
|
||||
use crate::models::users::{Badges, RecipientType, RecipientWallet, Role, UserId};
|
||||
use crate::queue::payouts::{PayoutAmount, PayoutItem, PayoutsQueue};
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::auth::get_user_from_headers;
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
@@ -43,8 +43,9 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
pub async fn user_auth_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
Ok(HttpResponse::Ok().json(get_user_from_headers(req.headers(), &**pool).await?))
|
||||
Ok(HttpResponse::Ok().json(get_user_from_headers(req.headers(), &**pool, &redis).await?))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -57,8 +58,9 @@ pub struct UserData {
|
||||
pub async fn user_data_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let data = sqlx::query!(
|
||||
"
|
||||
@@ -93,13 +95,11 @@ pub struct UserIds {
|
||||
pub async fn users_get(
|
||||
web::Query(ids): web::Query<UserIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_ids = serde_json::from_str::<Vec<UserId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<crate::database::models::UserId>>();
|
||||
let user_ids = serde_json::from_str::<Vec<String>>(&ids.ids)?;
|
||||
|
||||
let users_data = User::get_many(&user_ids, &**pool).await?;
|
||||
let users_data = User::get_many(&user_ids, &**pool, &redis).await?;
|
||||
|
||||
let users: Vec<crate::models::users::User> = users_data.into_iter().map(From::from).collect();
|
||||
|
||||
@@ -110,21 +110,9 @@ pub async fn users_get(
|
||||
pub async fn user_get(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
let id_option: Option<UserId> = serde_json::from_str(&format!("\"{string}\"")).ok();
|
||||
|
||||
let mut user_data;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
user_data = User::get(id.into(), &**pool).await?;
|
||||
|
||||
if user_data.is_none() {
|
||||
user_data = User::get_from_username(string, &**pool).await?;
|
||||
}
|
||||
} else {
|
||||
user_data = User::get_from_username(string, &**pool).await?;
|
||||
}
|
||||
let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(data) = user_data {
|
||||
let response: crate::models::users::User = data.into();
|
||||
@@ -139,12 +127,15 @@ pub async fn projects_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let user_id: UserId = id.into();
|
||||
|
||||
let can_view_private = user
|
||||
@@ -153,12 +144,13 @@ pub async fn projects_list(
|
||||
|
||||
let project_data = User::get_projects(id, &**pool).await?;
|
||||
|
||||
let response: Vec<_> = crate::database::Project::get_many_full(&project_data, &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| can_view_private || x.inner.status.is_searchable())
|
||||
.map(Project::from)
|
||||
.collect();
|
||||
let response: Vec<_> =
|
||||
crate::database::Project::get_many_ids(&project_data, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| can_view_private || x.inner.status.is_searchable())
|
||||
.map(Project::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
} else {
|
||||
@@ -211,29 +203,30 @@ pub struct EditPayoutData {
|
||||
pub async fn user_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_user: web::Json<EditUser>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
new_user
|
||||
.validate()
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(actual_user) = id_option {
|
||||
let id = actual_user.id;
|
||||
let user_id: UserId = id.into();
|
||||
|
||||
if user.id == user_id || user.role.is_mod() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(username) = &new_user.username {
|
||||
let existing_user_id_option =
|
||||
User::get_id_from_username_or_id(username, &**pool).await?;
|
||||
let existing_user_id_option = User::get(username, &**pool, &redis).await?;
|
||||
|
||||
if existing_user_id_option
|
||||
.map(UserId::from)
|
||||
.map(|x| UserId::from(x.id))
|
||||
.map(|id| id == user.id)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
@@ -394,6 +387,7 @@ pub async fn user_edit(
|
||||
}
|
||||
}
|
||||
|
||||
User::clear_caches(&[(id, Some(actual_user.username))], &redis).await?;
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
@@ -417,34 +411,24 @@ pub async fn user_icon_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
if let Some(content_type) = crate::util::ext::get_image_content_type(&ext.ext) {
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if user.id != id.into() && !user.role.is_mod() {
|
||||
if let Some(actual_user) = id_option {
|
||||
if user.id != actual_user.id.into() && !user.role.is_mod() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to edit this user's icon.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut icon_url = user.avatar_url;
|
||||
|
||||
let user_id: UserId = id.into();
|
||||
|
||||
if user.id != user_id {
|
||||
let new_user = User::get(id, &**pool).await?;
|
||||
|
||||
if let Some(new) = new_user {
|
||||
icon_url = new.avatar_url;
|
||||
} else {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
}
|
||||
let icon_url = actual_user.avatar_url;
|
||||
let user_id: UserId = actual_user.id.into();
|
||||
|
||||
if let Some(icon) = icon_url {
|
||||
let name = icon.split(&format!("{cdn_url}/")).nth(1);
|
||||
@@ -473,10 +457,12 @@ pub async fn user_icon_edit(
|
||||
WHERE (id = $2)
|
||||
",
|
||||
format!("{}/{}", cdn_url, upload_data.file_name),
|
||||
id as crate::database::models::ids::UserId,
|
||||
actual_user.id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&**pool)
|
||||
.await?;
|
||||
User::clear_caches(&[(actual_user.id, None)], &redis).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
@@ -505,11 +491,12 @@ pub async fn user_delete(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
removal_type: web::Query<RemovalType>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to delete this user!".to_string(),
|
||||
@@ -518,11 +505,13 @@ pub async fn user_delete(
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = if &*removal_type.removal_type == "full" {
|
||||
User::remove_full(id, &mut transaction).await?
|
||||
} else {
|
||||
User::remove(id, &mut transaction).await?
|
||||
};
|
||||
let result = User::remove(
|
||||
id,
|
||||
removal_type.removal_type == "full",
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
@@ -541,11 +530,12 @@ pub async fn user_follows(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to see the projects this user follows!".to_string(),
|
||||
@@ -569,11 +559,12 @@ pub async fn user_follows(
|
||||
.try_collect::<Vec<crate::database::models::ProjectId>>()
|
||||
.await?;
|
||||
|
||||
let projects: Vec<_> = crate::database::Project::get_many_full(&project_ids, &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Project::from)
|
||||
.collect();
|
||||
let projects: Vec<_> =
|
||||
crate::database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Project::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
} else {
|
||||
@@ -586,11 +577,12 @@ pub async fn user_notifications(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to see the notifications of this user!".to_string(),
|
||||
@@ -624,11 +616,12 @@ pub async fn user_payouts(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to see the payouts of this user!".to_string(),
|
||||
@@ -699,13 +692,14 @@ pub async fn user_payouts_request(
|
||||
pool: web::Data<PgPool>,
|
||||
data: web::Json<PayoutData>,
|
||||
payouts_queue: web::Data<Arc<Mutex<PayoutsQueue>>>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut payouts_queue = payouts_queue.lock().await;
|
||||
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let id_option = User::get_id_from_username_or_id(&info.into_inner().0, &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option {
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to request payouts of this user!".to_string(),
|
||||
@@ -761,6 +755,7 @@ pub async fn user_payouts_request(
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
User::clear_caches(&[(id, None)], &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::project_creation::{CreateError, UploadedFile};
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database::models;
|
||||
use crate::database::models::notification_item::NotificationBuilder;
|
||||
use crate::database::models::version_item::{
|
||||
@@ -12,7 +13,6 @@ use crate::models::projects::{
|
||||
VersionId, VersionStatus, VersionType,
|
||||
};
|
||||
use crate::models::teams::Permissions;
|
||||
use crate::util::auth::get_user_from_headers_transaction;
|
||||
use crate::util::routes::read_from_field;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use crate::validate::{validate_file, ValidationResult};
|
||||
@@ -82,6 +82,7 @@ pub async fn version_create(
|
||||
req: HttpRequest,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<deadpool_redis::Pool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
@@ -91,8 +92,10 @@ pub async fn version_create(
|
||||
req,
|
||||
&mut payload,
|
||||
&mut transaction,
|
||||
&redis,
|
||||
&***file_host,
|
||||
&mut uploaded_files,
|
||||
&client,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -116,8 +119,10 @@ async fn version_create_inner(
|
||||
req: HttpRequest,
|
||||
payload: &mut Multipart,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &deadpool_redis::Pool,
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
pool: &PgPool,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
|
||||
@@ -127,7 +132,7 @@ async fn version_create_inner(
|
||||
let all_game_versions = models::categories::GameVersion::list(&mut *transaction).await?;
|
||||
let all_loaders = models::categories::Loader::list(&mut *transaction).await?;
|
||||
|
||||
let user = get_user_from_headers_transaction(req.headers(), &mut *transaction).await?;
|
||||
let user = get_user_from_headers(req.headers(), pool, redis).await?;
|
||||
|
||||
let mut error = None;
|
||||
while let Some(item) = payload.next().await {
|
||||
@@ -417,8 +422,7 @@ async fn version_create_inner(
|
||||
let project_id = builder.project_id;
|
||||
builder.insert(transaction).await?;
|
||||
|
||||
models::Project::update_game_versions(project_id, &mut *transaction).await?;
|
||||
models::Project::update_loaders(project_id, &mut *transaction).await?;
|
||||
models::Project::clear_cache(project_id, None, Some(true), redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
@@ -430,6 +434,7 @@ pub async fn upload_file_to_version(
|
||||
url_data: web::Path<(VersionId,)>,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
@@ -442,6 +447,7 @@ pub async fn upload_file_to_version(
|
||||
&mut payload,
|
||||
client,
|
||||
&mut transaction,
|
||||
redis,
|
||||
&***file_host,
|
||||
&mut uploaded_files,
|
||||
version_id,
|
||||
@@ -470,6 +476,7 @@ async fn upload_file_to_version_inner(
|
||||
payload: &mut Multipart,
|
||||
client: Data<PgPool>,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: Data<deadpool_redis::Pool>,
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
version_id: models::VersionId,
|
||||
@@ -479,9 +486,9 @@ async fn upload_file_to_version_inner(
|
||||
let mut initial_file_data: Option<InitialFileData> = None;
|
||||
let mut file_builders: Vec<VersionFileBuilder> = Vec::new();
|
||||
|
||||
let user = get_user_from_headers_transaction(req.headers(), &mut *transaction).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**client, &redis).await?;
|
||||
|
||||
let result = models::Version::get_full(version_id, &**client).await?;
|
||||
let result = models::Version::get(version_id, &**client, &redis).await?;
|
||||
|
||||
let version = match result {
|
||||
Some(v) => v,
|
||||
@@ -493,8 +500,8 @@ async fn upload_file_to_version_inner(
|
||||
};
|
||||
|
||||
if !user.role.is_admin() {
|
||||
let team_member = models::TeamMember::get_from_user_id_version(
|
||||
version_id,
|
||||
let team_member = models::TeamMember::get_from_user_id_project(
|
||||
version.inner.project_id,
|
||||
user.id.into(),
|
||||
&mut *transaction,
|
||||
)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use super::ApiError;
|
||||
use crate::database::models::{version_item::QueryVersion, DatabaseError};
|
||||
use crate::auth::{
|
||||
filter_authorized_projects, filter_authorized_versions, get_user_from_headers,
|
||||
is_authorized_version,
|
||||
};
|
||||
use crate::models::ids::VersionId;
|
||||
use crate::models::projects::{GameVersion, Loader, Project, Version};
|
||||
use crate::models::projects::VersionType;
|
||||
use crate::models::teams::Permissions;
|
||||
use crate::util::auth::get_user_from_headers;
|
||||
use crate::util::routes::ok_or_not_found;
|
||||
use crate::{database, models};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
@@ -25,7 +25,6 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("version_files")
|
||||
.service(get_versions_from_hashes)
|
||||
.service(download_files)
|
||||
.service(update_files),
|
||||
);
|
||||
}
|
||||
@@ -34,8 +33,6 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
pub struct HashQuery {
|
||||
#[serde(default = "default_algorithm")]
|
||||
pub algorithm: String,
|
||||
#[serde(default = "default_multiple")]
|
||||
pub multiple: bool,
|
||||
pub version_id: Option<VersionId>,
|
||||
}
|
||||
|
||||
@@ -43,59 +40,40 @@ fn default_algorithm() -> String {
|
||||
"sha1".into()
|
||||
}
|
||||
|
||||
fn default_multiple() -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}
|
||||
#[get("{version_id}")]
|
||||
pub async fn get_version_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT f.version_id version_id
|
||||
FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v on f.version_id = v.id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = $2 AND m.status != ALL($4)
|
||||
ORDER BY v.date_published ASC
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
hash.as_bytes(),
|
||||
hash_query.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let file = database::models::Version::get_file_from_hash(
|
||||
hash_query.algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
let version_ids = result
|
||||
.iter()
|
||||
.map(|x| database::models::VersionId(x.version_id))
|
||||
.collect::<Vec<_>>();
|
||||
let versions_data = database::models::Version::get_many_full(&version_ids, &**pool).await?;
|
||||
if let Some(file) = file {
|
||||
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(first) = versions_data.first() {
|
||||
if hash_query.multiple {
|
||||
Ok(HttpResponse::Ok().json(
|
||||
versions_data
|
||||
.into_iter()
|
||||
.map(models::projects::Version::from)
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
if let Some(version) = version {
|
||||
if !is_authorized_version(&version.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(models::projects::Version::from(version)))
|
||||
} else {
|
||||
Ok(HttpResponse::Ok().json(models::projects::Version::from(first.clone())))
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
@@ -110,42 +88,40 @@ pub struct DownloadRedirect {
|
||||
// under /api/v1/version_file/{hash}/download
|
||||
#[get("{version_id}/download")]
|
||||
pub async fn download_version(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let mut transaction = pool.begin().await?;
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT f.url url, f.id id, f.version_id version_id, v.mod_id project_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = $2 AND m.status != ALL($4)
|
||||
ORDER BY v.date_published ASC
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
hash.as_bytes(),
|
||||
hash_query.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let file = database::models::Version::get_file_from_hash(
|
||||
hash_query.algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.fetch_optional(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if let Some(id) = result {
|
||||
transaction.commit().await?;
|
||||
if let Some(file) = file {
|
||||
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*id.url))
|
||||
.json(DownloadRedirect { url: id.url }))
|
||||
if let Some(version) = version {
|
||||
if !is_authorized_version(&version.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*file.url))
|
||||
.json(DownloadRedirect { url: file.url }))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
@@ -157,33 +133,26 @@ pub async fn delete_file(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT f.id id, f.version_id version_id, f.filename filename, v.version_number version_number, v.mod_id project_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id
|
||||
WHERE h.algorithm = $2 AND h.hash = $1
|
||||
ORDER BY v.date_published ASC
|
||||
",
|
||||
hash.as_bytes(),
|
||||
hash_query.algorithm
|
||||
let file = database::models::Version::get_file_from_hash(
|
||||
hash_query.algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
.await?;
|
||||
|
||||
if let Some(row) = result.iter().find_or_first(|x| {
|
||||
hash_query.version_id.is_none()
|
||||
|| Some(x.version_id) == hash_query.version_id.map(|x| x.0 as i64)
|
||||
}) {
|
||||
if let Some(row) = file {
|
||||
if !user.role.is_admin() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_version(
|
||||
database::models::ids::VersionId(row.version_id),
|
||||
row.version_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -205,24 +174,15 @@ pub async fn delete_file(
|
||||
}
|
||||
}
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
let version = database::models::Version::get(row.version_id, &**pool, &redis).await?;
|
||||
if let Some(version) = version {
|
||||
if version.files.len() < 2 {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Versions must have at least one file uploaded to them".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let files = sqlx::query!(
|
||||
"
|
||||
SELECT f.id id FROM files f
|
||||
WHERE f.version_id = $1
|
||||
",
|
||||
row.version_id
|
||||
)
|
||||
.fetch_many(&**pool)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|_| ())) })
|
||||
.try_collect::<Vec<()>>()
|
||||
.await?;
|
||||
|
||||
if files.len() < 2 {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Versions must have at least one file uploaded to them".to_string(),
|
||||
));
|
||||
database::models::Version::clear_cache(&version, &redis).await?;
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
@@ -232,7 +192,7 @@ pub async fn delete_file(
|
||||
DELETE FROM hashes
|
||||
WHERE file_id = $1
|
||||
",
|
||||
row.id
|
||||
row.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
@@ -242,7 +202,7 @@ pub async fn delete_file(
|
||||
DELETE FROM files
|
||||
WHERE files.id = $1
|
||||
",
|
||||
row.id,
|
||||
row.id.0,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
@@ -257,82 +217,72 @@ pub async fn delete_file(
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateData {
|
||||
pub loaders: Vec<Loader>,
|
||||
pub game_versions: Vec<GameVersion>,
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
|
||||
#[post("{version_id}/update")]
|
||||
pub async fn get_update_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
update_data: web::Json<UpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
|
||||
// get version_id from hash
|
||||
// get mod_id from hash
|
||||
// get latest version satisfying conditions - if not found
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT v.mod_id project_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = $2 AND m.status != ALL($4)
|
||||
ORDER BY v.date_published ASC
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
hash.as_bytes(),
|
||||
hash_query.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
if let Some(file) = database::models::Version::get_file_from_hash(
|
||||
hash_query.algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?;
|
||||
|
||||
if let Some(id) = result {
|
||||
let version_ids = database::models::Version::get_project_versions(
|
||||
database::models::ProjectId(id.project_id),
|
||||
Some(
|
||||
update_data
|
||||
.game_versions
|
||||
.clone()
|
||||
.await?
|
||||
{
|
||||
if let Some(project) =
|
||||
database::models::Project::get_id(file.project_id, &**pool, &redis).await?
|
||||
{
|
||||
let mut versions =
|
||||
database::models::Version::get_many(&project.versions, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| x.0)
|
||||
.collect(),
|
||||
),
|
||||
Some(
|
||||
update_data
|
||||
.loaders
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|x| x.0)
|
||||
.collect(),
|
||||
),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
|
||||
if let Some(version_id) = version_ids.first() {
|
||||
let version_data = database::models::Version::get_full(*version_id, &**pool).await?;
|
||||
if let Some(version_types) = &update_data.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
if let Some(loaders) = &update_data.loaders {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(game_versions) = &update_data.game_versions {
|
||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
||||
}
|
||||
|
||||
ok_or_not_found::<QueryVersion, Version>(version_data)
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
bool
|
||||
})
|
||||
.sorted_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if let Some(first) = versions.pop() {
|
||||
if !is_authorized_version(&first.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(first)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
|
||||
// Requests above with multiple versions below
|
||||
@@ -345,274 +295,164 @@ pub struct FileHashes {
|
||||
// under /api/v2/version_files
|
||||
#[post("")]
|
||||
pub async fn get_versions_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let hashes_parsed: Vec<Vec<u8>> = file_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT h.hash hash, h.algorithm algorithm, f.version_id version_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = ANY($2::bytea[]) AND m.status != ALL($4)
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
hashes_parsed.as_slice(),
|
||||
file_data.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
let version_ids = result
|
||||
.iter()
|
||||
.map(|x| database::models::VersionId(x.version_id))
|
||||
.collect::<Vec<_>>();
|
||||
let versions_data = database::models::Version::get_many_full(&version_ids, &**pool).await?;
|
||||
|
||||
let response: Result<HashMap<String, Version>, ApiError> = result
|
||||
.into_iter()
|
||||
.filter_map(|row| {
|
||||
versions_data
|
||||
.clone()
|
||||
.into_iter()
|
||||
.find(|x| x.inner.id.0 == row.version_id)
|
||||
.map(|v| {
|
||||
if let Ok(parsed_hash) = String::from_utf8(row.hash) {
|
||||
Ok((parsed_hash, crate::models::projects::Version::from(v)))
|
||||
} else {
|
||||
Err(ApiError::Database(DatabaseError::Other(format!(
|
||||
"Could not parse hash for version {}",
|
||||
row.version_id
|
||||
))))
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(response?))
|
||||
}
|
||||
|
||||
#[post("project")]
|
||||
pub async fn get_projects_from_hashes(
|
||||
pool: web::Data<PgPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let hashes_parsed: Vec<Vec<u8>> = file_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT h.hash hash, h.algorithm algorithm, m.id project_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = ANY($2::bytea[]) AND m.status != ALL($4)
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
hashes_parsed.as_slice(),
|
||||
file_data.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
let project_ids = result
|
||||
.iter()
|
||||
.map(|x| database::models::ProjectId(x.project_id))
|
||||
.collect::<Vec<_>>();
|
||||
let versions_data = database::models::Project::get_many_full(&project_ids, &**pool).await?;
|
||||
|
||||
let response: Result<HashMap<String, Project>, ApiError> = result
|
||||
.into_iter()
|
||||
.filter_map(|row| {
|
||||
versions_data
|
||||
.clone()
|
||||
.into_iter()
|
||||
.find(|x| x.inner.id.0 == row.project_id)
|
||||
.map(|v| {
|
||||
if let Ok(parsed_hash) = String::from_utf8(row.hash) {
|
||||
Ok((parsed_hash, crate::models::projects::Project::from(v)))
|
||||
} else {
|
||||
Err(ApiError::Database(DatabaseError::Other(format!(
|
||||
"Could not parse hash for version {}",
|
||||
row.project_id
|
||||
))))
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(response?))
|
||||
}
|
||||
|
||||
#[post("download")]
|
||||
pub async fn download_files(
|
||||
pool: web::Data<PgPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let hashes_parsed: Vec<Vec<u8>> = file_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT f.url url, h.hash hash, h.algorithm algorithm, f.version_id version_id, v.mod_id project_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = ANY($2::bytea[]) AND m.status != ALL($4)
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>(),
|
||||
hashes_parsed.as_slice(),
|
||||
file_data.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_all(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let response = result
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
if let Ok(parsed_hash) = String::from_utf8(row.hash) {
|
||||
Ok((parsed_hash, row.url))
|
||||
} else {
|
||||
Err(ApiError::Database(DatabaseError::Other(format!(
|
||||
"Could not parse hash for version {}",
|
||||
row.version_id
|
||||
))))
|
||||
}
|
||||
})
|
||||
.collect::<Result<HashMap<String, String>, ApiError>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(response?))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ManyUpdateData {
|
||||
pub algorithm: String,
|
||||
pub hashes: Vec<String>,
|
||||
pub loaders: Vec<Loader>,
|
||||
pub game_versions: Vec<GameVersion>,
|
||||
}
|
||||
|
||||
#[post("update")]
|
||||
pub async fn update_files(
|
||||
pool: web::Data<PgPool>,
|
||||
update_data: web::Json<ManyUpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let hashes_parsed: Vec<Vec<u8>> = update_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT h.hash, v.mod_id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
INNER JOIN versions v ON v.id = f.version_id AND v.status != ALL($1)
|
||||
INNER JOIN mods m on v.mod_id = m.id
|
||||
WHERE h.algorithm = $3 AND h.hash = ANY($2::bytea[]) AND m.status != ALL($4)
|
||||
",
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
hashes_parsed.as_slice(),
|
||||
update_data.algorithm,
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_hidden())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right()
|
||||
.map(|m| (m.hash, database::models::ids::ProjectId(m.mod_id))))
|
||||
})
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let mut version_ids: HashMap<database::models::VersionId, Vec<u8>> = HashMap::new();
|
||||
|
||||
let updated_versions = database::models::Version::get_projects_versions(
|
||||
result
|
||||
.iter()
|
||||
.map(|x| x.1)
|
||||
.collect::<Vec<database::models::ProjectId>>()
|
||||
.clone(),
|
||||
Some(
|
||||
update_data
|
||||
.game_versions
|
||||
.clone()
|
||||
.iter()
|
||||
.map(|x| x.0.clone())
|
||||
.collect(),
|
||||
),
|
||||
Some(
|
||||
update_data
|
||||
.loaders
|
||||
.clone()
|
||||
.iter()
|
||||
.map(|x| x.0.clone())
|
||||
.collect(),
|
||||
),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
file_data.algorithm.clone(),
|
||||
&file_data.hashes,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
for (hash, id) in result {
|
||||
if let Some(latest_version) = updated_versions.get(&id).and_then(|x| x.last()) {
|
||||
version_ids.insert(*latest_version, hash);
|
||||
}
|
||||
}
|
||||
|
||||
let query_version_ids = version_ids.keys().copied().collect::<Vec<_>>();
|
||||
let versions = database::models::Version::get_many_full(&query_version_ids, &**pool).await?;
|
||||
let version_ids = files.iter().map(|x| x.version_id).collect::<Vec<_>>();
|
||||
let versions_data = filter_authorized_versions(
|
||||
database::models::Version::get_many(&version_ids, &**pool, &redis).await?,
|
||||
&user_option,
|
||||
&pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
|
||||
for version in versions {
|
||||
let hash = version_ids.get(&version.inner.id);
|
||||
|
||||
if let Some(hash) = hash {
|
||||
if let Ok(parsed_hash) = String::from_utf8(hash.clone()) {
|
||||
response.insert(parsed_hash, models::projects::Version::from(version));
|
||||
} else {
|
||||
let version_id: VersionId = version.inner.id.into();
|
||||
|
||||
return Err(ApiError::Database(DatabaseError::Other(format!(
|
||||
"Could not parse hash for version {version_id}"
|
||||
))));
|
||||
for version in versions_data {
|
||||
for file in files.iter().filter(|x| x.version_id == version.id.into()) {
|
||||
if let Some(hash) = file.hashes.get(&file_data.algorithm) {
|
||||
response.insert(hash.clone(), version.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[post("project")]
|
||||
pub async fn get_projects_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
file_data.algorithm.clone(),
|
||||
&file_data.hashes,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_ids = files.iter().map(|x| x.project_id).collect::<Vec<_>>();
|
||||
|
||||
let projects_data = filter_authorized_projects(
|
||||
database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?,
|
||||
&user_option,
|
||||
&pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
|
||||
for project in projects_data {
|
||||
for file in files.iter().filter(|x| x.project_id == project.id.into()) {
|
||||
if let Some(hash) = file.hashes.get(&file_data.algorithm) {
|
||||
response.insert(hash.clone(), project.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ManyUpdateData {
|
||||
pub algorithm: String,
|
||||
pub hashes: Vec<String>,
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
|
||||
#[post("update")]
|
||||
pub async fn update_files(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
update_data: web::Json<ManyUpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
update_data.algorithm.clone(),
|
||||
&update_data.hashes,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let projects = database::models::Project::get_many_ids(
|
||||
&files.iter().map(|x| x.project_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let all_versions = database::models::Version::get_many(
|
||||
&projects
|
||||
.iter()
|
||||
.flat_map(|x| x.versions.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
|
||||
for project in projects {
|
||||
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
|
||||
let version = all_versions
|
||||
.iter()
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
|
||||
if let Some(version_types) = &update_data.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
if let Some(loaders) = &update_data.loaders {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(game_versions) = &update_data.game_versions {
|
||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
||||
}
|
||||
|
||||
bool
|
||||
})
|
||||
.sorted_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published))
|
||||
.next();
|
||||
|
||||
if let Some(version) = version {
|
||||
if is_authorized_version(&version.inner, &user_option, &pool).await? {
|
||||
if let Some(hash) = file.hashes.get(&update_data.algorithm) {
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(version.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use super::ApiError;
|
||||
use crate::auth::{
|
||||
filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version,
|
||||
};
|
||||
use crate::database;
|
||||
use crate::models;
|
||||
use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType};
|
||||
use crate::models::teams::Permissions;
|
||||
use crate::util::auth::{
|
||||
filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version,
|
||||
};
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -33,8 +33,8 @@ pub struct VersionListFilters {
|
||||
pub loaders: Option<String>,
|
||||
pub featured: Option<bool>,
|
||||
pub version_type: Option<VersionType>,
|
||||
pub limit: Option<u32>,
|
||||
pub offset: Option<u32>,
|
||||
pub limit: Option<usize>,
|
||||
pub offset: Option<usize>,
|
||||
}
|
||||
|
||||
#[get("version")]
|
||||
@@ -43,38 +43,50 @@ pub async fn version_list(
|
||||
info: web::Path<(String,)>,
|
||||
web::Query(filters): web::Query<VersionListFilters>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = database::models::Project::get_from_slug_or_project_id(&string, &**pool).await?;
|
||||
let result = database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(project) = result {
|
||||
if !is_authorized(&project, &user_option, &pool).await? {
|
||||
if !is_authorized(&project.inner, &user_option, &pool).await? {
|
||||
return Ok(HttpResponse::NotFound().body(""));
|
||||
}
|
||||
|
||||
let id = project.id;
|
||||
let version_filters = filters
|
||||
.game_versions
|
||||
.as_ref()
|
||||
.map(|x| serde_json::from_str::<Vec<String>>(x).unwrap_or_default());
|
||||
let loader_filters = filters
|
||||
.loaders
|
||||
.as_ref()
|
||||
.map(|x| serde_json::from_str::<Vec<String>>(x).unwrap_or_default());
|
||||
let mut versions = database::models::Version::get_many(&project.versions, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.skip(filters.offset.unwrap_or(0))
|
||||
.take(filters.limit.unwrap_or(usize::MAX))
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
|
||||
let version_ids = database::models::Version::get_project_versions(
|
||||
id,
|
||||
filters
|
||||
.game_versions
|
||||
.as_ref()
|
||||
.map(|x| serde_json::from_str(x).unwrap_or_default()),
|
||||
filters
|
||||
.loaders
|
||||
.as_ref()
|
||||
.map(|x| serde_json::from_str(x).unwrap_or_default()),
|
||||
filters.version_type,
|
||||
filters.limit,
|
||||
filters.offset,
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
if let Some(version_type) = filters.version_type {
|
||||
bool &= &*x.inner.version_type == version_type.as_str();
|
||||
}
|
||||
if let Some(loaders) = &loader_filters {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(game_versions) = &version_filters {
|
||||
bool &= x.game_versions.iter().any(|y| game_versions.contains(y));
|
||||
}
|
||||
|
||||
let mut versions = database::models::Version::get_many_full(&version_ids, &**pool).await?;
|
||||
bool
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut response = versions
|
||||
.iter()
|
||||
@@ -139,12 +151,15 @@ pub async fn version_project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner();
|
||||
let version_data =
|
||||
database::models::Version::get_full_from_id_slug(&id.0, &id.1, &**pool).await?;
|
||||
database::models::Version::get_full_from_id_slug(&id.0, &id.1, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(data) = version_data {
|
||||
if is_authorized_version(&data.inner, &user_option, &pool).await? {
|
||||
@@ -165,14 +180,17 @@ pub async fn versions_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<VersionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let version_ids = serde_json::from_str::<Vec<models::ids::VersionId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<database::models::VersionId>>();
|
||||
let versions_data = database::models::Version::get_many_full(&version_ids, &**pool).await?;
|
||||
let versions_data = database::models::Version::get_many(&version_ids, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let versions = filter_authorized_versions(versions_data, &user_option, &pool).await?;
|
||||
|
||||
@@ -184,11 +202,14 @@ pub async fn version_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
let version_data = database::models::Version::get_full(id.into(), &**pool).await?;
|
||||
let version_data = database::models::Version::get(id.into(), &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
|
||||
let user_option = get_user_from_headers(req.headers(), &**pool, &redis)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(data) = version_data {
|
||||
if is_authorized_version(&data.inner, &user_option, &pool).await? {
|
||||
@@ -240,9 +261,10 @@ pub async fn version_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
new_version: web::Json<EditVersion>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
new_version
|
||||
.validate()
|
||||
@@ -251,14 +273,15 @@ pub async fn version_edit(
|
||||
let version_id = info.into_inner().0;
|
||||
let id = version_id.into();
|
||||
|
||||
let result = database::models::Version::get_full(id, &**pool).await?;
|
||||
let result = database::models::Version::get(id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(version_item) = result {
|
||||
let project_item =
|
||||
database::models::Project::get_full(version_item.inner.project_id, &**pool).await?;
|
||||
database::models::Project::get_id(version_item.inner.project_id, &**pool, &redis)
|
||||
.await?;
|
||||
|
||||
let team_member = database::models::TeamMember::get_from_user_id_version(
|
||||
version_item.inner.id,
|
||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
||||
version_item.inner.project_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -390,12 +413,6 @@ pub async fn version_edit(
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
database::models::Project::update_game_versions(
|
||||
version_item.inner.project_id,
|
||||
&mut transaction,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(loaders) = &new_version.loaders {
|
||||
@@ -429,12 +446,6 @@ pub async fn version_edit(
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
database::models::Project::update_loaders(
|
||||
version_item.inner.project_id,
|
||||
&mut transaction,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(featured) = &new_version.featured {
|
||||
@@ -595,6 +606,14 @@ pub async fn version_edit(
|
||||
}
|
||||
}
|
||||
|
||||
database::models::Version::clear_cache(&version_item, &redis).await?;
|
||||
database::models::Project::clear_cache(
|
||||
version_item.inner.project_id,
|
||||
None,
|
||||
Some(true),
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
@@ -618,9 +637,10 @@ pub async fn version_schedule(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
scheduling_data: web::Json<SchedulingData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
|
||||
if scheduling_data.time < Utc::now() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
@@ -635,11 +655,11 @@ pub async fn version_schedule(
|
||||
}
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let result = database::models::Version::get_full(string.into(), &**pool).await?;
|
||||
let result = database::models::Version::get(string.into(), &**pool, &redis).await?;
|
||||
|
||||
if let Some(version_item) = result {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_version(
|
||||
version_item.inner.id,
|
||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
||||
version_item.inner.project_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -655,6 +675,7 @@ pub async fn version_schedule(
|
||||
));
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
@@ -665,9 +686,12 @@ pub async fn version_schedule(
|
||||
scheduling_data.time,
|
||||
version_item.inner.id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&**pool)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
database::models::Version::clear_cache(&version_item, &redis).await?;
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
@@ -679,13 +703,20 @@ pub async fn version_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<deadpool_redis::Pool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let user = get_user_from_headers(req.headers(), &**pool, &redis).await?;
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let version = database::models::Version::get(id.into(), &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified version does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
if !user.role.is_admin() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_version(
|
||||
id.into(),
|
||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
||||
version.inner.project_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
@@ -709,7 +740,11 @@ pub async fn version_delete(
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = database::models::Version::remove_full(id.into(), &mut transaction).await?;
|
||||
let result =
|
||||
database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?;
|
||||
|
||||
database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
|
||||
588
src/util/auth.rs
588
src/util/auth.rs
@@ -1,588 +0,0 @@
|
||||
use crate::database;
|
||||
use crate::database::models::project_item::QueryProject;
|
||||
use crate::database::models::user_item;
|
||||
use crate::database::models::version_item::QueryVersion;
|
||||
use crate::database::{models, Project, Version};
|
||||
use crate::models::users::{Badges, Role, User, UserId, UserPayoutData};
|
||||
use crate::routes::ApiError;
|
||||
use crate::Utc;
|
||||
use actix_web::http::header::HeaderMap;
|
||||
use actix_web::http::header::COOKIE;
|
||||
use actix_web::web;
|
||||
use reqwest::header::{HeaderValue, AUTHORIZATION};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::serde_as;
|
||||
use serde_with::DisplayFromStr;
|
||||
use sqlx::PgPool;
|
||||
use thiserror::Error;
|
||||
|
||||
use super::pat::get_user_from_pat;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AuthenticationError {
|
||||
#[error("An unknown database error occurred")]
|
||||
Sqlx(#[from] sqlx::Error),
|
||||
#[error("Database Error: {0}")]
|
||||
Database(#[from] models::DatabaseError),
|
||||
#[error("Error while parsing JSON: {0}")]
|
||||
SerDe(#[from] serde_json::Error),
|
||||
#[error("Error while communicating over the internet: {0}")]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
#[error("Error while decoding PAT: {0}")]
|
||||
Decoding(#[from] crate::models::ids::DecodingError),
|
||||
#[error("Invalid Authentication Credentials")]
|
||||
InvalidCredentials,
|
||||
#[error("Authentication method was not valid")]
|
||||
InvalidAuthMethod,
|
||||
#[error("GitHub Token from incorrect Client ID")]
|
||||
InvalidClientId,
|
||||
}
|
||||
|
||||
// A user as stored in the Minos database
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct MinosUser {
|
||||
pub id: String, // This is the unique generated Ory name
|
||||
pub username: String, // unique username
|
||||
pub email: String,
|
||||
pub name: Option<String>, // real name
|
||||
pub github_id: Option<u64>,
|
||||
pub discord_id: Option<u64>,
|
||||
pub google_id: Option<u128>,
|
||||
pub gitlab_id: Option<u64>,
|
||||
pub microsoft_id: Option<u64>,
|
||||
pub apple_id: Option<u64>,
|
||||
}
|
||||
|
||||
// A payload marking a new user in Minos, with data to be inserted into Labrinth
|
||||
#[serde_as]
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct MinosNewUser {
|
||||
pub id: String, // This is the unique generated Ory name
|
||||
pub username: String, // unique username
|
||||
pub email: String,
|
||||
|
||||
pub name: Option<String>, // real name
|
||||
pub default_picture: Option<String>, // uri of default avatar
|
||||
#[serde_as(as = "Option<DisplayFromStr>")]
|
||||
pub github_id: Option<i64>, // we allow Github to be submitted to connect to an existing account
|
||||
}
|
||||
|
||||
// Attempt to append a Minos user to an existing user, if one exists
|
||||
// (combining the the legacy user with the Minos user)
|
||||
pub async fn link_or_insert_new_user(
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
minos_new_user: MinosNewUser,
|
||||
) -> Result<(), AuthenticationError> {
|
||||
// If the user with this Github ID already exists, we can just merge the two accounts
|
||||
if let Some(github_id) = minos_new_user.github_id {
|
||||
if let Some(existing_user) =
|
||||
user_item::User::get_from_github_id(github_id as u64, &mut *transaction).await?
|
||||
{
|
||||
existing_user
|
||||
.merge_minos_user(&minos_new_user.id, &mut *transaction)
|
||||
.await?;
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
// No user exists, so we need to create a new user
|
||||
insert_new_user(transaction, minos_new_user).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Insert a new user into the database from a MinosUser
|
||||
pub async fn insert_new_user(
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
minos_new_user: MinosNewUser,
|
||||
) -> Result<(), AuthenticationError> {
|
||||
let user_id = crate::database::models::generate_user_id(transaction).await?;
|
||||
|
||||
database::models::User {
|
||||
id: user_id,
|
||||
kratos_id: Some(minos_new_user.id),
|
||||
username: minos_new_user.username,
|
||||
name: minos_new_user.name,
|
||||
email: Some(minos_new_user.email),
|
||||
avatar_url: minos_new_user.default_picture,
|
||||
bio: None,
|
||||
github_id: minos_new_user.github_id,
|
||||
created: Utc::now(),
|
||||
role: Role::Developer.to_string(),
|
||||
badges: Badges::default(),
|
||||
balance: Decimal::ZERO,
|
||||
payout_wallet: None,
|
||||
payout_wallet_type: None,
|
||||
payout_address: None,
|
||||
}
|
||||
.insert(transaction)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Gets MinosUser from Kratos ID
|
||||
// This uses an administrative bearer token to access the Minos API
|
||||
// Should NOT be directly accessible to users
|
||||
pub async fn get_minos_user(kratos_id: &str) -> Result<MinosUser, AuthenticationError> {
|
||||
let ory_auth_bearer = dotenvy::var("ORY_AUTH_BEARER").unwrap();
|
||||
let req = reqwest::Client::new()
|
||||
.get(format!(
|
||||
"{}/admin/user/{kratos_id}",
|
||||
dotenvy::var("MINOS_URL").unwrap()
|
||||
))
|
||||
.header(reqwest::header::USER_AGENT, "Labrinth")
|
||||
.header(
|
||||
reqwest::header::AUTHORIZATION,
|
||||
format!("Bearer {ory_auth_bearer}"),
|
||||
);
|
||||
let res = req.send().await?.error_for_status()?;
|
||||
let res = res.json().await?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
// pass the cookies to Minos to get the user.
|
||||
pub async fn get_minos_user_from_cookies(cookies: &str) -> Result<MinosUser, AuthenticationError> {
|
||||
let req = reqwest::Client::new()
|
||||
.get(dotenvy::var("MINOS_URL").unwrap() + "/user")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(reqwest::header::COOKIE, cookies);
|
||||
let res = req.send().await?;
|
||||
|
||||
let res = match res.status() {
|
||||
reqwest::StatusCode::OK => res,
|
||||
reqwest::StatusCode::UNAUTHORIZED => return Err(AuthenticationError::InvalidCredentials),
|
||||
_ => res.error_for_status()?,
|
||||
};
|
||||
Ok(res.json().await?)
|
||||
}
|
||||
|
||||
pub async fn get_user_from_headers<'a, E>(
|
||||
headers: &HeaderMap,
|
||||
executor: E,
|
||||
) -> Result<User, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let token: Option<&reqwest::header::HeaderValue> = headers.get(AUTHORIZATION);
|
||||
let cookies_unparsed: Option<&reqwest::header::HeaderValue> = headers.get(COOKIE);
|
||||
|
||||
// Fetch DB user record and minos user from headers
|
||||
let (db_user, minos_user) = match (token, cookies_unparsed) {
|
||||
// If both, favour the bearer token first- redirect to cookie on failure
|
||||
(Some(token), Some(cookies)) => {
|
||||
match get_db_and_minos_user_from_bearer_token(token, executor).await {
|
||||
Ok((db, minos)) => (db, minos),
|
||||
Err(_) => get_db_and_minos_user_from_cookies(cookies, executor).await?,
|
||||
}
|
||||
}
|
||||
(Some(token), _) => get_db_and_minos_user_from_bearer_token(token, executor).await?,
|
||||
(_, Some(cookies)) => get_db_and_minos_user_from_cookies(cookies, executor).await?,
|
||||
_ => return Err(AuthenticationError::InvalidAuthMethod), // No credentials passed
|
||||
};
|
||||
|
||||
let user = User {
|
||||
id: UserId::from(db_user.id),
|
||||
kratos_id: db_user.kratos_id,
|
||||
github_id: minos_user.github_id,
|
||||
discord_id: minos_user.discord_id,
|
||||
google_id: minos_user.google_id,
|
||||
microsoft_id: minos_user.microsoft_id,
|
||||
apple_id: minos_user.apple_id,
|
||||
gitlab_id: minos_user.gitlab_id,
|
||||
username: db_user.username,
|
||||
name: db_user.name,
|
||||
email: db_user.email,
|
||||
avatar_url: db_user.avatar_url,
|
||||
bio: db_user.bio,
|
||||
created: db_user.created,
|
||||
role: Role::from_string(&db_user.role),
|
||||
badges: db_user.badges,
|
||||
payout_data: Some(UserPayoutData {
|
||||
balance: db_user.balance,
|
||||
payout_wallet: db_user.payout_wallet,
|
||||
payout_wallet_type: db_user.payout_wallet_type,
|
||||
payout_address: db_user.payout_address,
|
||||
}),
|
||||
};
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
pub async fn get_user_from_headers_transaction(
|
||||
headers: &HeaderMap,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<User, AuthenticationError> {
|
||||
let token: Option<&reqwest::header::HeaderValue> = headers.get(AUTHORIZATION);
|
||||
let cookies_unparsed: Option<&reqwest::header::HeaderValue> = headers.get(COOKIE);
|
||||
|
||||
// Fetch DB user record and minos user from headers
|
||||
let (db_user, minos_user) = match (token, cookies_unparsed) {
|
||||
// If both, favour the bearer token first- redirect to cookie on failure
|
||||
(Some(token), Some(cookies)) => {
|
||||
match get_db_and_minos_user_from_bearer_token(token, &mut *transaction).await {
|
||||
Ok((db, minos)) => (db, minos),
|
||||
Err(_) => get_db_and_minos_user_from_cookies(cookies, &mut *transaction).await?,
|
||||
}
|
||||
}
|
||||
(Some(token), _) => {
|
||||
get_db_and_minos_user_from_bearer_token(token, &mut *transaction).await?
|
||||
}
|
||||
(_, Some(cookies)) => {
|
||||
get_db_and_minos_user_from_cookies(cookies, &mut *transaction).await?
|
||||
}
|
||||
_ => return Err(AuthenticationError::InvalidAuthMethod), // No credentials passed
|
||||
};
|
||||
|
||||
let user = User {
|
||||
id: UserId::from(db_user.id),
|
||||
kratos_id: db_user.kratos_id,
|
||||
github_id: minos_user.github_id,
|
||||
discord_id: minos_user.discord_id,
|
||||
google_id: minos_user.google_id,
|
||||
microsoft_id: minos_user.microsoft_id,
|
||||
apple_id: minos_user.apple_id,
|
||||
gitlab_id: minos_user.gitlab_id,
|
||||
username: db_user.username,
|
||||
name: db_user.name,
|
||||
email: db_user.email,
|
||||
avatar_url: db_user.avatar_url,
|
||||
bio: db_user.bio,
|
||||
created: db_user.created,
|
||||
role: Role::from_string(&db_user.role),
|
||||
badges: db_user.badges,
|
||||
payout_data: Some(UserPayoutData {
|
||||
balance: db_user.balance,
|
||||
payout_wallet: db_user.payout_wallet,
|
||||
payout_wallet_type: db_user.payout_wallet_type,
|
||||
payout_address: db_user.payout_address,
|
||||
}),
|
||||
};
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
pub async fn get_db_and_minos_user_from_bearer_token<'a, E>(
|
||||
token: &HeaderValue,
|
||||
executor: E,
|
||||
) -> Result<(user_item::User, MinosUser), AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let db_user = get_user_record_from_bearer_token(
|
||||
token
|
||||
.to_str()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
executor,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
let minos_user = get_minos_user(
|
||||
&db_user
|
||||
.kratos_id
|
||||
.clone()
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
.await?;
|
||||
Ok((db_user, minos_user))
|
||||
}
|
||||
|
||||
pub async fn get_db_and_minos_user_from_cookies<'a, E>(
|
||||
cookies: &HeaderValue,
|
||||
executor: E,
|
||||
) -> Result<(user_item::User, MinosUser), AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let minos_user = get_minos_user_from_cookies(
|
||||
cookies
|
||||
.to_str()
|
||||
.map_err(|_| AuthenticationError::InvalidCredentials)?,
|
||||
)
|
||||
.await?;
|
||||
let db_user = models::User::get_from_minos_kratos_id(minos_user.id.clone(), executor)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
Ok((db_user, minos_user))
|
||||
}
|
||||
|
||||
pub async fn get_user_record_from_bearer_token<'a, 'b, E>(
|
||||
token: &str,
|
||||
executor: E,
|
||||
) -> Result<Option<user_item::User>, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
if token.starts_with("Bearer ") {
|
||||
let token: &str = token.trim_start_matches("Bearer ");
|
||||
|
||||
// Tokens beginning with Ory are considered to be Kratos tokens (in reality, extracted cookies) and can be forwarded to Minos
|
||||
let possible_user = match token.split_once('_') {
|
||||
Some(("modrinth", _)) => get_user_from_pat(token, executor).await?,
|
||||
Some(("ory", _)) => get_user_from_minos_session_token(token, executor).await?,
|
||||
Some(("github", _)) | Some(("gho", _)) | Some(("ghp", _)) => {
|
||||
get_user_from_github_token(token, executor).await?
|
||||
}
|
||||
_ => return Err(AuthenticationError::InvalidAuthMethod),
|
||||
};
|
||||
Ok(possible_user)
|
||||
} else {
|
||||
Err(AuthenticationError::InvalidAuthMethod)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_user_from_minos_session_token<'a, 'b, E>(
|
||||
token: &str,
|
||||
executor: E,
|
||||
) -> Result<Option<user_item::User>, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let ory_auth_bearer = dotenvy::var("ORY_AUTH_BEARER").unwrap();
|
||||
let req = reqwest::Client::new()
|
||||
.get(dotenvy::var("MINOS_URL").unwrap() + "/admin/user/token?token=" + token)
|
||||
.header(reqwest::header::USER_AGENT, "Labrinth")
|
||||
.header(
|
||||
reqwest::header::AUTHORIZATION,
|
||||
format!("Bearer {ory_auth_bearer}"),
|
||||
);
|
||||
let res = req.send().await?.error_for_status()?;
|
||||
let minos_user: MinosUser = res.json().await?;
|
||||
let db_user = models::User::get_from_minos_kratos_id(minos_user.id.clone(), executor).await?;
|
||||
Ok(db_user)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GitHubUser {
|
||||
pub id: u64,
|
||||
}
|
||||
// Get a database user from a GitHub PAT
|
||||
pub async fn get_user_from_github_token<'a, E>(
|
||||
access_token: &str,
|
||||
executor: E,
|
||||
) -> Result<Option<user_item::User>, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let response = reqwest::Client::new()
|
||||
.get("https://api.github.com/user")
|
||||
.header(reqwest::header::USER_AGENT, "Modrinth")
|
||||
.header(AUTHORIZATION, format!("token {access_token}"))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if access_token.starts_with("gho_") {
|
||||
let client_id = response
|
||||
.headers()
|
||||
.get("x-oauth-client-id")
|
||||
.and_then(|x| x.to_str().ok());
|
||||
|
||||
if client_id != Some(&*dotenvy::var("GITHUB_CLIENT_ID").unwrap()) {
|
||||
return Err(AuthenticationError::InvalidClientId);
|
||||
}
|
||||
}
|
||||
|
||||
let github_user: GitHubUser = response.json().await?;
|
||||
|
||||
Ok(user_item::User::get_from_github_id(github_user.id, executor).await?)
|
||||
}
|
||||
|
||||
pub async fn check_is_moderator_from_headers<'a, 'b, E>(
|
||||
headers: &HeaderMap,
|
||||
executor: E,
|
||||
) -> Result<User, AuthenticationError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let user = get_user_from_headers(headers, executor).await?;
|
||||
|
||||
if user.role.is_mod() {
|
||||
Ok(user)
|
||||
} else {
|
||||
Err(AuthenticationError::InvalidCredentials)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn is_authorized(
|
||||
project_data: &Project,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<bool, ApiError> {
|
||||
let mut authorized = !project_data.status.is_hidden();
|
||||
|
||||
if let Some(user) = &user_option {
|
||||
if !authorized {
|
||||
if user.role.is_mod() {
|
||||
authorized = true;
|
||||
} else {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
let project_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
|
||||
project_data.team_id as database::models::ids::TeamId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(&***pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
authorized = project_exists.unwrap_or(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(authorized)
|
||||
}
|
||||
|
||||
pub async fn filter_authorized_projects(
|
||||
projects: Vec<QueryProject>,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<Vec<crate::models::projects::Project>, ApiError> {
|
||||
let mut return_projects = Vec::new();
|
||||
let mut check_projects = Vec::new();
|
||||
|
||||
for project in projects {
|
||||
if !project.inner.status.is_hidden()
|
||||
|| user_option
|
||||
.as_ref()
|
||||
.map(|x| x.role.is_mod())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return_projects.push(project.into());
|
||||
} else if user_option.is_some() {
|
||||
check_projects.push(project);
|
||||
}
|
||||
}
|
||||
|
||||
if !check_projects.is_empty() {
|
||||
if let Some(user) = user_option {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
use futures::TryStreamExt;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT m.id id, m.team_id team_id FROM team_members tm
|
||||
INNER JOIN mods m ON m.team_id = tm.team_id
|
||||
WHERE tm.team_id = ANY($1) AND tm.user_id = $2
|
||||
",
|
||||
&check_projects
|
||||
.iter()
|
||||
.map(|x| x.inner.team_id.0)
|
||||
.collect::<Vec<_>>(),
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_many(&***pool)
|
||||
.try_for_each(|e| {
|
||||
if let Some(row) = e.right() {
|
||||
check_projects.retain(|x| {
|
||||
let bool = x.inner.id.0 == row.id && x.inner.team_id.0 == row.team_id;
|
||||
|
||||
if bool {
|
||||
return_projects.push(x.clone().into());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
}
|
||||
|
||||
futures::future::ready(Ok(()))
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(return_projects)
|
||||
}
|
||||
|
||||
pub async fn is_authorized_version(
|
||||
version_data: &Version,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<bool, ApiError> {
|
||||
let mut authorized = !version_data.status.is_hidden();
|
||||
|
||||
if let Some(user) = &user_option {
|
||||
if !authorized {
|
||||
if user.role.is_mod() {
|
||||
authorized = true;
|
||||
} else {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
let version_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 WHERE m.id = $1)",
|
||||
version_data.project_id as database::models::ids::ProjectId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(&***pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
authorized = version_exists.unwrap_or(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(authorized)
|
||||
}
|
||||
|
||||
pub async fn filter_authorized_versions(
|
||||
versions: Vec<QueryVersion>,
|
||||
user_option: &Option<User>,
|
||||
pool: &web::Data<PgPool>,
|
||||
) -> Result<Vec<crate::models::projects::Version>, ApiError> {
|
||||
let mut return_versions = Vec::new();
|
||||
let mut check_versions = Vec::new();
|
||||
|
||||
for version in versions {
|
||||
if !version.inner.status.is_hidden()
|
||||
|| user_option
|
||||
.as_ref()
|
||||
.map(|x| x.role.is_mod())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return_versions.push(version.into());
|
||||
} else if user_option.is_some() {
|
||||
check_versions.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
if !check_versions.is_empty() {
|
||||
if let Some(user) = user_option {
|
||||
let user_id: models::ids::UserId = user.id.into();
|
||||
|
||||
use futures::TryStreamExt;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2
|
||||
WHERE m.id = ANY($1)
|
||||
",
|
||||
&check_versions
|
||||
.iter()
|
||||
.map(|x| x.inner.project_id.0)
|
||||
.collect::<Vec<_>>(),
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_many(&***pool)
|
||||
.try_for_each(|e| {
|
||||
if let Some(row) = e.right() {
|
||||
check_versions.retain(|x| {
|
||||
let bool = x.inner.project_id.0 == row.id;
|
||||
|
||||
if bool {
|
||||
return_versions.push(x.clone().into());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
}
|
||||
|
||||
futures::future::ready(Ok(()))
|
||||
})
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(return_versions)
|
||||
}
|
||||
@@ -9,6 +9,17 @@ pub fn get_image_content_type(extension: &str) -> Option<&'static str> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_image_ext(content_type: &str) -> Option<&'static str> {
|
||||
match content_type {
|
||||
"image/bmp" => Some("bmp"),
|
||||
"image/gif" => Some("gif"),
|
||||
"image/jpeg" => Some("jpg"),
|
||||
"image/png" => Some("png"),
|
||||
"image/webp" => Some("webp"),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn project_file_type(ext: &str) -> Option<&str> {
|
||||
match ext {
|
||||
"jar" => Some("application/java-archive"),
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
pub mod auth;
|
||||
pub mod env;
|
||||
pub mod ext;
|
||||
pub mod guards;
|
||||
pub mod img;
|
||||
pub mod pat;
|
||||
pub mod routes;
|
||||
pub mod validate;
|
||||
pub mod webhook;
|
||||
|
||||
@@ -2,10 +2,8 @@ use crate::routes::v2::project_creation::CreateError;
|
||||
use crate::routes::ApiError;
|
||||
use actix_multipart::Field;
|
||||
use actix_web::web::Payload;
|
||||
use actix_web::HttpResponse;
|
||||
use bytes::BytesMut;
|
||||
use futures::StreamExt;
|
||||
use serde::Serialize;
|
||||
|
||||
pub async fn read_from_payload(
|
||||
payload: &mut Payload,
|
||||
@@ -40,14 +38,3 @@ pub async fn read_from_field(
|
||||
}
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub(crate) fn ok_or_not_found<T, U>(version_data: Option<T>) -> Result<HttpResponse, ApiError>
|
||||
where
|
||||
U: From<T> + Serialize,
|
||||
{
|
||||
if let Some(data) = version_data {
|
||||
Ok(HttpResponse::Ok().json(U::from(data)))
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().body(""))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user