You've already forked AstralRinth
forked from didirus/AstralRinth
move to monorepo dir
This commit is contained in:
218
apps/labrinth/src/routes/analytics.rs
Normal file
218
apps/labrinth/src/routes/analytics.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::analytics::{PageView, Playtime};
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::analytics::AnalyticsQueue;
|
||||
use crate::queue::maxmind::MaxMindIndexer;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::date::get_current_tenths_of_ms;
|
||||
use crate::util::env::parse_strings_from_var;
|
||||
use actix_web::{post, web};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr};
|
||||
use std::sync::Arc;
|
||||
use url::Url;
|
||||
|
||||
pub const FILTERED_HEADERS: &[&str] = &[
|
||||
"authorization",
|
||||
"cookie",
|
||||
"modrinth-admin",
|
||||
// we already retrieve/use these elsewhere- so they are unneeded
|
||||
"user-agent",
|
||||
"cf-connecting-ip",
|
||||
"cf-ipcountry",
|
||||
"x-forwarded-for",
|
||||
"x-real-ip",
|
||||
// We don't need the information vercel provides from its headers
|
||||
"x-vercel-ip-city",
|
||||
"x-vercel-ip-timezone",
|
||||
"x-vercel-ip-longitude",
|
||||
"x-vercel-proxy-signature",
|
||||
"x-vercel-ip-country-region",
|
||||
"x-vercel-forwarded-for",
|
||||
"x-vercel-proxied-for",
|
||||
"x-vercel-proxy-signature-ts",
|
||||
"x-vercel-ip-latitude",
|
||||
"x-vercel-ip-country",
|
||||
];
|
||||
|
||||
pub fn convert_to_ip_v6(src: &str) -> Result<Ipv6Addr, AddrParseError> {
|
||||
let ip_addr: IpAddr = src.parse()?;
|
||||
|
||||
Ok(match ip_addr {
|
||||
IpAddr::V4(x) => x.to_ipv6_mapped(),
|
||||
IpAddr::V6(x) => x,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct UrlInput {
|
||||
url: String,
|
||||
}
|
||||
|
||||
//this route should be behind the cloudflare WAF to prevent non-browsers from calling it
|
||||
#[post("view")]
|
||||
pub async fn page_view_ingest(
|
||||
req: HttpRequest,
|
||||
maxmind: web::Data<Arc<MaxMindIndexer>>,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
url_input: web::Json<UrlInput>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
|
||||
.await
|
||||
.ok();
|
||||
let conn_info = req.connection_info().peer_addr().map(|x| x.to_string());
|
||||
|
||||
let url = Url::parse(&url_input.url)
|
||||
.map_err(|_| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?;
|
||||
|
||||
let domain = url
|
||||
.host_str()
|
||||
.ok_or_else(|| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?;
|
||||
|
||||
let allowed_origins = parse_strings_from_var("CORS_ALLOWED_ORIGINS").unwrap_or_default();
|
||||
if !(domain.ends_with(".modrinth.com")
|
||||
|| domain == "modrinth.com"
|
||||
|| allowed_origins.contains(&"*".to_string()))
|
||||
{
|
||||
return Err(ApiError::InvalidInput(
|
||||
"invalid page view URL specified!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let headers = req
|
||||
.headers()
|
||||
.into_iter()
|
||||
.map(|(key, val)| {
|
||||
(
|
||||
key.to_string().to_lowercase(),
|
||||
val.to_str().unwrap_or_default().to_string(),
|
||||
)
|
||||
})
|
||||
.collect::<HashMap<String, String>>();
|
||||
|
||||
let ip = convert_to_ip_v6(if let Some(header) = headers.get("cf-connecting-ip") {
|
||||
header
|
||||
} else {
|
||||
conn_info.as_deref().unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
|
||||
|
||||
let mut view = PageView {
|
||||
recorded: get_current_tenths_of_ms(),
|
||||
domain: domain.to_string(),
|
||||
site_path: url.path().to_string(),
|
||||
user_id: 0,
|
||||
project_id: 0,
|
||||
ip,
|
||||
country: maxmind.query(ip).await.unwrap_or_default(),
|
||||
user_agent: headers.get("user-agent").cloned().unwrap_or_default(),
|
||||
headers: headers
|
||||
.into_iter()
|
||||
.filter(|x| !FILTERED_HEADERS.contains(&&*x.0))
|
||||
.collect(),
|
||||
monetized: true,
|
||||
};
|
||||
|
||||
if let Some(segments) = url.path_segments() {
|
||||
let segments_vec = segments.collect::<Vec<_>>();
|
||||
|
||||
if segments_vec.len() >= 2 {
|
||||
const PROJECT_TYPES: &[&str] = &[
|
||||
"mod",
|
||||
"modpack",
|
||||
"plugin",
|
||||
"resourcepack",
|
||||
"shader",
|
||||
"datapack",
|
||||
];
|
||||
|
||||
if PROJECT_TYPES.contains(&segments_vec[0]) {
|
||||
let project =
|
||||
crate::database::models::Project::get(segments_vec[1], &**pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project {
|
||||
view.project_id = project.inner.id.0 as u64;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((_, user)) = user {
|
||||
view.user_id = user.id.0;
|
||||
}
|
||||
|
||||
analytics_queue.add_view(view);
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct PlaytimeInput {
|
||||
seconds: u16,
|
||||
loader: String,
|
||||
game_version: String,
|
||||
parent: Option<crate::models::ids::VersionId>,
|
||||
}
|
||||
|
||||
#[post("playtime")]
|
||||
pub async fn playtime_ingest(
|
||||
req: HttpRequest,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
playtime_input: web::Json<HashMap<crate::models::ids::VersionId, PlaytimeInput>>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (_, user) = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PERFORM_ANALYTICS]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let playtimes = playtime_input.0;
|
||||
|
||||
if playtimes.len() > 2000 {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Too much playtime entered for version!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let versions = crate::database::models::Version::get_many(
|
||||
&playtimes.iter().map(|x| (*x.0).into()).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
for (id, playtime) in playtimes {
|
||||
if playtime.seconds > 300 {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) {
|
||||
analytics_queue.add_playtime(Playtime {
|
||||
recorded: get_current_tenths_of_ms(),
|
||||
seconds: playtime.seconds as u64,
|
||||
user_id: user.id.0,
|
||||
project_id: version.inner.project_id.0 as u64,
|
||||
version_id: version.inner.id.0 as u64,
|
||||
loader: playtime.loader,
|
||||
game_version: playtime.game_version,
|
||||
parent: playtime.parent.map(|x| x.0).unwrap_or(0),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
14
apps/labrinth/src/routes/index.rs
Normal file
14
apps/labrinth/src/routes/index.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use actix_web::{get, HttpResponse};
|
||||
use serde_json::json;
|
||||
|
||||
#[get("/")]
|
||||
pub async fn index_get() -> HttpResponse {
|
||||
let data = json!({
|
||||
"name": "modrinth-labrinth",
|
||||
"version": env!("CARGO_PKG_VERSION"),
|
||||
"documentation": "https://docs.modrinth.com",
|
||||
"about": "Welcome traveler!"
|
||||
});
|
||||
|
||||
HttpResponse::Ok().json(data)
|
||||
}
|
||||
147
apps/labrinth/src/routes/internal/admin.rs
Normal file
147
apps/labrinth/src/routes/internal/admin.rs
Normal file
@@ -0,0 +1,147 @@
|
||||
use crate::auth::validate::get_user_record_from_bearer_token;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::analytics::Download;
|
||||
use crate::models::ids::ProjectId;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::analytics::AnalyticsQueue;
|
||||
use crate::queue::maxmind::MaxMindIndexer;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::search::SearchConfig;
|
||||
use crate::util::date::get_current_tenths_of_ms;
|
||||
use crate::util::guards::admin_key_guard;
|
||||
use actix_web::{patch, post, web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::net::Ipv4Addr;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("admin")
|
||||
.service(count_download)
|
||||
.service(force_reindex),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DownloadBody {
|
||||
pub url: String,
|
||||
pub project_id: ProjectId,
|
||||
pub version_name: String,
|
||||
|
||||
pub ip: String,
|
||||
pub headers: HashMap<String, String>,
|
||||
}
|
||||
|
||||
// This is an internal route, cannot be used without key
|
||||
#[patch("/_count-download", guard = "admin_key_guard")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn count_download(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
maxmind: web::Data<Arc<MaxMindIndexer>>,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
download_body: web::Json<DownloadBody>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let token = download_body
|
||||
.headers
|
||||
.iter()
|
||||
.find(|x| x.0.to_lowercase() == "authorization")
|
||||
.map(|x| &**x.1);
|
||||
|
||||
let user = get_user_record_from_bearer_token(&req, token, &**pool, &redis, &session_queue)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
let project_id: crate::database::models::ids::ProjectId = download_body.project_id.into();
|
||||
|
||||
let id_option = crate::models::ids::base62_impl::parse_base62(&download_body.version_name)
|
||||
.ok()
|
||||
.map(|x| x as i64);
|
||||
|
||||
let (version_id, project_id) = if let Some(version) = sqlx::query!(
|
||||
"
|
||||
SELECT v.id id, v.mod_id mod_id FROM files f
|
||||
INNER JOIN versions v ON v.id = f.version_id
|
||||
WHERE f.url = $1
|
||||
",
|
||||
download_body.url,
|
||||
)
|
||||
.fetch_optional(pool.as_ref())
|
||||
.await?
|
||||
{
|
||||
(version.id, version.mod_id)
|
||||
} else if let Some(version) = sqlx::query!(
|
||||
"
|
||||
SELECT id, mod_id FROM versions
|
||||
WHERE ((version_number = $1 OR id = $3) AND mod_id = $2)
|
||||
",
|
||||
download_body.version_name,
|
||||
project_id as crate::database::models::ids::ProjectId,
|
||||
id_option
|
||||
)
|
||||
.fetch_optional(pool.as_ref())
|
||||
.await?
|
||||
{
|
||||
(version.id, version.mod_id)
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Specified version does not exist!".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let url = url::Url::parse(&download_body.url)
|
||||
.map_err(|_| ApiError::InvalidInput("invalid download URL specified!".to_string()))?;
|
||||
|
||||
let ip = crate::routes::analytics::convert_to_ip_v6(&download_body.ip)
|
||||
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
|
||||
|
||||
analytics_queue.add_download(Download {
|
||||
recorded: get_current_tenths_of_ms(),
|
||||
domain: url.host_str().unwrap_or_default().to_string(),
|
||||
site_path: url.path().to_string(),
|
||||
user_id: user
|
||||
.and_then(|(scopes, x)| {
|
||||
if scopes.contains(Scopes::PERFORM_ANALYTICS) {
|
||||
Some(x.id.0 as u64)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0),
|
||||
project_id: project_id as u64,
|
||||
version_id: version_id as u64,
|
||||
ip,
|
||||
country: maxmind.query(ip).await.unwrap_or_default(),
|
||||
user_agent: download_body
|
||||
.headers
|
||||
.get("user-agent")
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
headers: download_body
|
||||
.headers
|
||||
.clone()
|
||||
.into_iter()
|
||||
.filter(|x| !crate::routes::analytics::FILTERED_HEADERS.contains(&&*x.0.to_lowercase()))
|
||||
.collect(),
|
||||
});
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
#[post("/_force_reindex", guard = "admin_key_guard")]
|
||||
pub async fn force_reindex(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
config: web::Data<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
use crate::search::indexing::index_projects;
|
||||
let redis = redis.get_ref();
|
||||
index_projects(pool.as_ref().clone(), redis.clone(), &config).await?;
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
1847
apps/labrinth/src/routes/internal/billing.rs
Normal file
1847
apps/labrinth/src/routes/internal/billing.rs
Normal file
File diff suppressed because it is too large
Load Diff
2372
apps/labrinth/src/routes/internal/flows.rs
Normal file
2372
apps/labrinth/src/routes/internal/flows.rs
Normal file
File diff suppressed because it is too large
Load Diff
177
apps/labrinth/src/routes/internal/gdpr.rs
Normal file
177
apps/labrinth/src/routes/internal/gdpr.rs
Normal file
@@ -0,0 +1,177 @@
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::scope("gdpr").service(export));
|
||||
}
|
||||
|
||||
#[post("/export")]
|
||||
pub async fn export(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let user_id = user.id.into();
|
||||
|
||||
let collection_ids = crate::database::models::User::get_collections(user_id, &**pool).await?;
|
||||
let collections =
|
||||
crate::database::models::Collection::get_many(&collection_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::collections::Collection::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let follows = crate::database::models::User::get_follows(user_id, &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::ids::ProjectId::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let projects = crate::database::models::User::get_projects(user_id, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::ids::ProjectId::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let org_ids = crate::database::models::User::get_organizations(user_id, &**pool).await?;
|
||||
let orgs = crate::database::models::organization_item::Organization::get_many_ids(
|
||||
&org_ids, &**pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
// TODO: add team members
|
||||
.map(|x| crate::models::organizations::Organization::from(x, vec![]))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let notifs = crate::database::models::notification_item::Notification::get_many_user(
|
||||
user_id, &**pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::notifications::Notification::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let oauth_clients =
|
||||
crate::database::models::oauth_client_item::OAuthClient::get_all_user_clients(
|
||||
user_id, &**pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::oauth_clients::OAuthClient::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let oauth_authorizations = crate::database::models::oauth_client_authorization_item::OAuthClientAuthorization::get_all_for_user(
|
||||
user_id, &**pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::oauth_clients::OAuthClientAuthorization::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let pat_ids = crate::database::models::pat_item::PersonalAccessToken::get_user_pats(
|
||||
user_id, &**pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
let pats = crate::database::models::pat_item::PersonalAccessToken::get_many_ids(
|
||||
&pat_ids, &**pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| crate::models::pats::PersonalAccessToken::from(x, false))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let payout_ids =
|
||||
crate::database::models::payout_item::Payout::get_all_for_user(user_id, &**pool).await?;
|
||||
|
||||
let payouts = crate::database::models::payout_item::Payout::get_many(&payout_ids, &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::payouts::Payout::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let report_ids =
|
||||
crate::database::models::user_item::User::get_reports(user_id, &**pool).await?;
|
||||
let reports = crate::database::models::report_item::Report::get_many(&report_ids, &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::reports::Report::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let message_ids = sqlx::query!(
|
||||
"
|
||||
SELECT id FROM threads_messages WHERE author_id = $1 AND hide_identity = FALSE
|
||||
",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_all(pool.as_ref())
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| crate::database::models::ids::ThreadMessageId(x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let messages =
|
||||
crate::database::models::thread_item::ThreadMessage::get_many(&message_ids, &**pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| crate::models::threads::ThreadMessage::from(x, &user))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let uploaded_images_ids = sqlx::query!(
|
||||
"SELECT id FROM uploaded_images WHERE owner_id = $1",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_all(pool.as_ref())
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| crate::database::models::ids::ImageId(x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let uploaded_images =
|
||||
crate::database::models::image_item::Image::get_many(&uploaded_images_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::images::Image::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let subscriptions =
|
||||
crate::database::models::user_subscription_item::UserSubscriptionItem::get_all_user(
|
||||
user_id, &**pool,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::billing::UserSubscription::from)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
"user": user,
|
||||
"collections": collections,
|
||||
"follows": follows,
|
||||
"projects": projects,
|
||||
"orgs": orgs,
|
||||
"notifs": notifs,
|
||||
"oauth_clients": oauth_clients,
|
||||
"oauth_authorizations": oauth_authorizations,
|
||||
"pats": pats,
|
||||
"payouts": payouts,
|
||||
"reports": reports,
|
||||
"messages": messages,
|
||||
"uploaded_images": uploaded_images,
|
||||
"subscriptions": subscriptions,
|
||||
})))
|
||||
}
|
||||
26
apps/labrinth/src/routes/internal/mod.rs
Normal file
26
apps/labrinth/src/routes/internal/mod.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
pub(crate) mod admin;
|
||||
pub mod billing;
|
||||
pub mod flows;
|
||||
pub mod gdpr;
|
||||
pub mod moderation;
|
||||
pub mod pats;
|
||||
pub mod session;
|
||||
|
||||
use super::v3::oauth_clients;
|
||||
pub use super::ApiError;
|
||||
use crate::util::cors::default_cors;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
cfg.service(
|
||||
actix_web::web::scope("_internal")
|
||||
.wrap(default_cors())
|
||||
.configure(admin::config)
|
||||
.configure(oauth_clients::config)
|
||||
.configure(session::config)
|
||||
.configure(flows::config)
|
||||
.configure(pats::config)
|
||||
.configure(moderation::config)
|
||||
.configure(billing::config)
|
||||
.configure(gdpr::config),
|
||||
);
|
||||
}
|
||||
313
apps/labrinth/src/routes/internal/moderation.rs
Normal file
313
apps/labrinth/src/routes/internal/moderation.rs
Normal file
@@ -0,0 +1,313 @@
|
||||
use super::ApiError;
|
||||
use crate::database;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::random_base62;
|
||||
use crate::models::projects::ProjectStatus;
|
||||
use crate::queue::moderation::{ApprovalType, IdentifiedFile, MissingMetadata};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::{auth::check_is_moderator_from_headers, models::pats::Scopes};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("moderation/projects", web::get().to(get_projects));
|
||||
cfg.route("moderation/project/{id}", web::get().to(get_project_meta));
|
||||
cfg.route("moderation/project", web::post().to(set_project_meta));
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ResultCount {
|
||||
#[serde(default = "default_count")]
|
||||
pub count: i16,
|
||||
}
|
||||
|
||||
fn default_count() -> i16 {
|
||||
100
|
||||
}
|
||||
|
||||
pub async fn get_projects(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ResultCount>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let project_ids = sqlx::query!(
|
||||
"
|
||||
SELECT id FROM mods
|
||||
WHERE status = $1
|
||||
ORDER BY queued ASC
|
||||
LIMIT $2;
|
||||
",
|
||||
ProjectStatus::Processing.as_str(),
|
||||
count.count as i64
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.map_ok(|m| database::models::ProjectId(m.id))
|
||||
.try_collect::<Vec<database::models::ProjectId>>()
|
||||
.await?;
|
||||
|
||||
let projects: Vec<_> = database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(crate::models::projects::Project::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
}
|
||||
|
||||
pub async fn get_project_meta(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
info: web::Path<(String,)>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_id = info.into_inner().0;
|
||||
let project = database::models::Project::get(&project_id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project {
|
||||
let rows = sqlx::query!(
|
||||
"
|
||||
SELECT
|
||||
f.metadata, v.id version_id
|
||||
FROM versions v
|
||||
INNER JOIN files f ON f.version_id = v.id
|
||||
WHERE v.mod_id = $1
|
||||
",
|
||||
project.inner.id.0
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
let mut merged = MissingMetadata {
|
||||
identified: HashMap::new(),
|
||||
flame_files: HashMap::new(),
|
||||
unknown_files: HashMap::new(),
|
||||
};
|
||||
|
||||
let mut check_hashes = Vec::new();
|
||||
let mut check_flames = Vec::new();
|
||||
|
||||
for row in rows {
|
||||
if let Some(metadata) = row
|
||||
.metadata
|
||||
.and_then(|x| serde_json::from_value::<MissingMetadata>(x).ok())
|
||||
{
|
||||
merged.identified.extend(metadata.identified);
|
||||
merged.flame_files.extend(metadata.flame_files);
|
||||
merged.unknown_files.extend(metadata.unknown_files);
|
||||
|
||||
check_hashes.extend(merged.flame_files.keys().cloned());
|
||||
check_hashes.extend(merged.unknown_files.keys().cloned());
|
||||
check_flames.extend(merged.flame_files.values().map(|x| x.id as i32));
|
||||
}
|
||||
}
|
||||
|
||||
let rows = sqlx::query!(
|
||||
"
|
||||
SELECT encode(mef.sha1, 'escape') sha1, mel.status status
|
||||
FROM moderation_external_files mef
|
||||
INNER JOIN moderation_external_licenses mel ON mef.external_license_id = mel.id
|
||||
WHERE mef.sha1 = ANY($1)
|
||||
",
|
||||
&check_hashes
|
||||
.iter()
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
for row in rows {
|
||||
if let Some(sha1) = row.sha1 {
|
||||
if let Some(val) = merged.flame_files.remove(&sha1) {
|
||||
merged.identified.insert(
|
||||
sha1,
|
||||
IdentifiedFile {
|
||||
file_name: val.file_name,
|
||||
status: ApprovalType::from_string(&row.status)
|
||||
.unwrap_or(ApprovalType::Unidentified),
|
||||
},
|
||||
);
|
||||
} else if let Some(val) = merged.unknown_files.remove(&sha1) {
|
||||
merged.identified.insert(
|
||||
sha1,
|
||||
IdentifiedFile {
|
||||
file_name: val,
|
||||
status: ApprovalType::from_string(&row.status)
|
||||
.unwrap_or(ApprovalType::Unidentified),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let rows = sqlx::query!(
|
||||
"
|
||||
SELECT mel.id, mel.flame_project_id, mel.status status
|
||||
FROM moderation_external_licenses mel
|
||||
WHERE mel.flame_project_id = ANY($1)
|
||||
",
|
||||
&check_flames,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
for row in rows {
|
||||
if let Some(sha1) = merged
|
||||
.flame_files
|
||||
.iter()
|
||||
.find(|x| Some(x.1.id as i32) == row.flame_project_id)
|
||||
.map(|x| x.0.clone())
|
||||
{
|
||||
if let Some(val) = merged.flame_files.remove(&sha1) {
|
||||
merged.identified.insert(
|
||||
sha1,
|
||||
IdentifiedFile {
|
||||
file_name: val.file_name.clone(),
|
||||
status: ApprovalType::from_string(&row.status)
|
||||
.unwrap_or(ApprovalType::Unidentified),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(merged))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum Judgement {
|
||||
Flame {
|
||||
id: i32,
|
||||
status: ApprovalType,
|
||||
link: String,
|
||||
title: String,
|
||||
},
|
||||
Unknown {
|
||||
status: ApprovalType,
|
||||
proof: Option<String>,
|
||||
link: Option<String>,
|
||||
title: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
pub async fn set_project_meta(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
judgements: web::Json<HashMap<String, Judgement>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let mut ids = Vec::new();
|
||||
let mut titles = Vec::new();
|
||||
let mut statuses = Vec::new();
|
||||
let mut links = Vec::new();
|
||||
let mut proofs = Vec::new();
|
||||
let mut flame_ids = Vec::new();
|
||||
|
||||
let mut file_hashes = Vec::new();
|
||||
|
||||
for (hash, judgement) in judgements.0 {
|
||||
let id = random_base62(8);
|
||||
|
||||
let (title, status, link, proof, flame_id) = match judgement {
|
||||
Judgement::Flame {
|
||||
id,
|
||||
status,
|
||||
link,
|
||||
title,
|
||||
} => (
|
||||
Some(title),
|
||||
status,
|
||||
Some(link),
|
||||
Some("See Flame page/license for permission".to_string()),
|
||||
Some(id),
|
||||
),
|
||||
Judgement::Unknown {
|
||||
status,
|
||||
proof,
|
||||
link,
|
||||
title,
|
||||
} => (title, status, link, proof, None),
|
||||
};
|
||||
|
||||
ids.push(id as i64);
|
||||
titles.push(title);
|
||||
statuses.push(status.as_str());
|
||||
links.push(link);
|
||||
proofs.push(proof);
|
||||
flame_ids.push(flame_id);
|
||||
file_hashes.push(hash);
|
||||
}
|
||||
|
||||
sqlx::query(
|
||||
"
|
||||
INSERT INTO moderation_external_licenses (id, title, status, link, proof, flame_project_id)
|
||||
SELECT * FROM UNNEST ($1::bigint[], $2::varchar[], $3::varchar[], $4::varchar[], $5::varchar[], $6::integer[])
|
||||
"
|
||||
)
|
||||
.bind(&ids[..])
|
||||
.bind(&titles[..])
|
||||
.bind(&statuses[..])
|
||||
.bind(&links[..])
|
||||
.bind(&proofs[..])
|
||||
.bind(&flame_ids[..])
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query(
|
||||
"
|
||||
INSERT INTO moderation_external_files (sha1, external_license_id)
|
||||
SELECT * FROM UNNEST ($1::bytea[], $2::bigint[])
|
||||
ON CONFLICT (sha1)
|
||||
DO NOTHING
|
||||
",
|
||||
)
|
||||
.bind(&file_hashes[..])
|
||||
.bind(&ids[..])
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
282
apps/labrinth/src/routes/internal/pats.rs
Normal file
282
apps/labrinth/src/routes/internal/pats.rs
Normal file
@@ -0,0 +1,282 @@
|
||||
use crate::database;
|
||||
use crate::database::models::generate_pat_id;
|
||||
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::routes::ApiError;
|
||||
|
||||
use crate::database::redis::RedisPool;
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{delete, get, patch, post, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::Rng;
|
||||
use rand_chacha::rand_core::SeedableRng;
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use crate::models::pats::{PersonalAccessToken, Scopes};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use serde::Deserialize;
|
||||
use sqlx::postgres::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(get_pats);
|
||||
cfg.service(create_pat);
|
||||
cfg.service(edit_pat);
|
||||
cfg.service(delete_pat);
|
||||
}
|
||||
|
||||
#[get("pat")]
|
||||
pub async fn get_pats(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let pat_ids = database::models::pat_item::PersonalAccessToken::get_user_pats(
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let pats =
|
||||
database::models::pat_item::PersonalAccessToken::get_many_ids(&pat_ids, &**pool, &redis)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(
|
||||
pats.into_iter()
|
||||
.map(|x| PersonalAccessToken::from(x, false))
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct NewPersonalAccessToken {
|
||||
pub scopes: Scopes,
|
||||
#[validate(length(min = 3, max = 255))]
|
||||
pub name: String,
|
||||
pub expires: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[post("pat")]
|
||||
pub async fn create_pat(
|
||||
req: HttpRequest,
|
||||
info: web::Json<NewPersonalAccessToken>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
info.0
|
||||
.validate()
|
||||
.map_err(|err| ApiError::InvalidInput(validation_errors_to_string(err, None)))?;
|
||||
|
||||
if info.scopes.is_restricted() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Invalid scopes requested!".to_string(),
|
||||
));
|
||||
}
|
||||
if info.expires < Utc::now() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Expire date must be in the future!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_CREATE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let id = generate_pat_id(&mut transaction).await?;
|
||||
|
||||
let token = ChaCha20Rng::from_entropy()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(60)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
let token = format!("mrp_{}", token);
|
||||
|
||||
let name = info.name.clone();
|
||||
database::models::pat_item::PersonalAccessToken {
|
||||
id,
|
||||
name: name.clone(),
|
||||
access_token: token.clone(),
|
||||
scopes: info.scopes,
|
||||
user_id: user.id.into(),
|
||||
created: Utc::now(),
|
||||
expires: info.expires,
|
||||
last_used: None,
|
||||
}
|
||||
.insert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::PersonalAccessToken::clear_cache(
|
||||
vec![(None, None, Some(user.id.into()))],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(PersonalAccessToken {
|
||||
id: id.into(),
|
||||
name,
|
||||
access_token: Some(token),
|
||||
scopes: info.scopes,
|
||||
user_id: user.id,
|
||||
created: Utc::now(),
|
||||
expires: info.expires,
|
||||
last_used: None,
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct ModifyPersonalAccessToken {
|
||||
pub scopes: Option<Scopes>,
|
||||
#[validate(length(min = 3, max = 255))]
|
||||
pub name: Option<String>,
|
||||
pub expires: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[patch("pat/{id}")]
|
||||
pub async fn edit_pat(
|
||||
req: HttpRequest,
|
||||
id: web::Path<(String,)>,
|
||||
info: web::Json<ModifyPersonalAccessToken>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let id = id.into_inner().0;
|
||||
let pat = database::models::pat_item::PersonalAccessToken::get(&id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(pat) = pat {
|
||||
if pat.user_id == user.id.into() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(scopes) = &info.scopes {
|
||||
if scopes.is_restricted() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Invalid scopes requested!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE pats
|
||||
SET scopes = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
scopes.bits() as i64,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
if let Some(name) = &info.name {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE pats
|
||||
SET name = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
name,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
if let Some(expires) = &info.expires {
|
||||
if expires < &Utc::now() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Expire date must be in the future!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE pats
|
||||
SET expires = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
expires,
|
||||
pat.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::PersonalAccessToken::clear_cache(
|
||||
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[delete("pat/{id}")]
|
||||
pub async fn delete_pat(
|
||||
req: HttpRequest,
|
||||
id: web::Path<(String,)>,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAT_DELETE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id = id.into_inner().0;
|
||||
let pat = database::models::pat_item::PersonalAccessToken::get(&id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(pat) = pat {
|
||||
if pat.user_id == user.id.into() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
database::models::pat_item::PersonalAccessToken::remove(pat.id, &mut transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
database::models::pat_item::PersonalAccessToken::clear_cache(
|
||||
vec![(Some(pat.id), Some(pat.access_token), Some(pat.user_id))],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
252
apps/labrinth/src/routes/internal/session.rs
Normal file
252
apps/labrinth/src/routes/internal/session.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use crate::auth::{get_user_from_headers, AuthenticationError};
|
||||
use crate::database::models::session_item::Session as DBSession;
|
||||
use crate::database::models::session_item::SessionBuilder;
|
||||
use crate::database::models::UserId;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::sessions::Session;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::env::parse_var;
|
||||
use actix_web::http::header::AUTHORIZATION;
|
||||
use actix_web::web::{scope, Data, ServiceConfig};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use rand::distributions::Alphanumeric;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use sqlx::PgPool;
|
||||
use woothee::parser::Parser;
|
||||
|
||||
pub fn config(cfg: &mut ServiceConfig) {
|
||||
cfg.service(
|
||||
scope("session")
|
||||
.service(list)
|
||||
.service(delete)
|
||||
.service(refresh),
|
||||
);
|
||||
}
|
||||
|
||||
pub struct SessionMetadata {
|
||||
pub city: Option<String>,
|
||||
pub country: Option<String>,
|
||||
pub ip: String,
|
||||
|
||||
pub os: Option<String>,
|
||||
pub platform: Option<String>,
|
||||
pub user_agent: String,
|
||||
}
|
||||
|
||||
pub async fn get_session_metadata(
|
||||
req: &HttpRequest,
|
||||
) -> Result<SessionMetadata, AuthenticationError> {
|
||||
let conn_info = req.connection_info().clone();
|
||||
let ip_addr = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
|
||||
if let Some(header) = req.headers().get("CF-Connecting-IP") {
|
||||
header.to_str().ok()
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
}
|
||||
} else {
|
||||
conn_info.peer_addr()
|
||||
};
|
||||
|
||||
let country = req
|
||||
.headers()
|
||||
.get("cf-ipcountry")
|
||||
.and_then(|x| x.to_str().ok());
|
||||
let city = req.headers().get("cf-ipcity").and_then(|x| x.to_str().ok());
|
||||
|
||||
let user_agent = req
|
||||
.headers()
|
||||
.get("user-agent")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
let parser = Parser::new();
|
||||
let info = parser.parse(user_agent);
|
||||
let os = if let Some(info) = info {
|
||||
Some((info.os, info.name))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(SessionMetadata {
|
||||
os: os.map(|x| x.0.to_string()),
|
||||
platform: os.map(|x| x.1.to_string()),
|
||||
city: city.map(|x| x.to_string()),
|
||||
country: country.map(|x| x.to_string()),
|
||||
ip: ip_addr
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?
|
||||
.to_string(),
|
||||
user_agent: user_agent.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn issue_session(
|
||||
req: HttpRequest,
|
||||
user_id: UserId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<DBSession, AuthenticationError> {
|
||||
let metadata = get_session_metadata(&req).await?;
|
||||
|
||||
let session = ChaCha20Rng::from_entropy()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(60)
|
||||
.map(char::from)
|
||||
.collect::<String>();
|
||||
|
||||
let session = format!("mra_{session}");
|
||||
|
||||
let id = SessionBuilder {
|
||||
session,
|
||||
user_id,
|
||||
os: metadata.os,
|
||||
platform: metadata.platform,
|
||||
city: metadata.city,
|
||||
country: metadata.country,
|
||||
ip: metadata.ip,
|
||||
user_agent: metadata.user_agent,
|
||||
}
|
||||
.insert(transaction)
|
||||
.await?;
|
||||
|
||||
let session = DBSession::get_id(id, &mut **transaction, redis)
|
||||
.await?
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
DBSession::clear_cache(
|
||||
vec![(
|
||||
Some(session.id),
|
||||
Some(session.session.clone()),
|
||||
Some(session.user_id),
|
||||
)],
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(session)
|
||||
}
|
||||
|
||||
#[get("list")]
|
||||
pub async fn list(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let session = req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
|
||||
|
||||
let session_ids = DBSession::get_user_sessions(current_user.id.into(), &**pool, &redis).await?;
|
||||
let sessions = DBSession::get_many_ids(&session_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| x.expires > Utc::now())
|
||||
.map(|x| Session::from(x, false, Some(session)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(sessions))
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn delete(
|
||||
info: web::Path<(String,)>,
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_DELETE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let session = DBSession::get(info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(session) = session {
|
||||
if session.user_id == current_user.id.into() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
DBSession::remove(session.id, &mut transaction).await?;
|
||||
transaction.commit().await?;
|
||||
DBSession::clear_cache(
|
||||
vec![(
|
||||
Some(session.id),
|
||||
Some(session.session),
|
||||
Some(session.user_id),
|
||||
)],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
#[post("refresh")]
|
||||
pub async fn refresh(
|
||||
req: HttpRequest,
|
||||
pool: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
|
||||
.await?
|
||||
.1;
|
||||
let session = req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| ApiError::Authentication(AuthenticationError::InvalidCredentials))?;
|
||||
|
||||
let session = DBSession::get(session, &**pool, &redis).await?;
|
||||
|
||||
if let Some(session) = session {
|
||||
if current_user.id != session.user_id.into() || session.refresh_expires < Utc::now() {
|
||||
return Err(ApiError::Authentication(
|
||||
AuthenticationError::InvalidCredentials,
|
||||
));
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
DBSession::remove(session.id, &mut transaction).await?;
|
||||
let new_session = issue_session(req, session.user_id, &mut transaction, &redis).await?;
|
||||
transaction.commit().await?;
|
||||
DBSession::clear_cache(
|
||||
vec![(
|
||||
Some(session.id),
|
||||
Some(session.session),
|
||||
Some(session.user_id),
|
||||
)],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(Session::from(new_session, true, None)))
|
||||
} else {
|
||||
Err(ApiError::Authentication(
|
||||
AuthenticationError::InvalidCredentials,
|
||||
))
|
||||
}
|
||||
}
|
||||
408
apps/labrinth/src/routes/maven.rs
Normal file
408
apps/labrinth/src/routes/maven.rs
Normal file
@@ -0,0 +1,408 @@
|
||||
use crate::auth::checks::{is_visible_project, is_visible_version};
|
||||
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||
use crate::database::models::loader_fields::Loader;
|
||||
use crate::database::models::project_item::QueryProject;
|
||||
use crate::database::models::version_item::{QueryFile, QueryVersion};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::projects::{ProjectId, VersionId};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::{auth::get_user_from_headers, database};
|
||||
use actix_web::{get, route, web, HttpRequest, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use yaserde_derive::YaSerialize;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(maven_metadata);
|
||||
cfg.service(version_file_sha512);
|
||||
cfg.service(version_file_sha1);
|
||||
cfg.service(version_file);
|
||||
}
|
||||
|
||||
// TODO: These were modified in v3 and should be tested
|
||||
|
||||
#[derive(Default, Debug, Clone, YaSerialize)]
|
||||
#[yaserde(root = "metadata", rename = "metadata")]
|
||||
pub struct Metadata {
|
||||
#[yaserde(rename = "groupId")]
|
||||
group_id: String,
|
||||
#[yaserde(rename = "artifactId")]
|
||||
artifact_id: String,
|
||||
versioning: Versioning,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, YaSerialize)]
|
||||
#[yaserde(rename = "versioning")]
|
||||
pub struct Versioning {
|
||||
latest: String,
|
||||
release: String,
|
||||
versions: Versions,
|
||||
#[yaserde(rename = "lastUpdated")]
|
||||
last_updated: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, YaSerialize)]
|
||||
#[yaserde(rename = "versions")]
|
||||
pub struct Versions {
|
||||
#[yaserde(rename = "version")]
|
||||
versions: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, YaSerialize)]
|
||||
#[yaserde(rename = "project", namespace = "http://maven.apache.org/POM/4.0.0")]
|
||||
pub struct MavenPom {
|
||||
#[yaserde(rename = "xsi:schemaLocation", attribute)]
|
||||
schema_location: String,
|
||||
#[yaserde(rename = "xmlns:xsi", attribute)]
|
||||
xsi: String,
|
||||
#[yaserde(rename = "modelVersion")]
|
||||
model_version: String,
|
||||
#[yaserde(rename = "groupId")]
|
||||
group_id: String,
|
||||
#[yaserde(rename = "artifactId")]
|
||||
artifact_id: String,
|
||||
version: String,
|
||||
name: String,
|
||||
description: String,
|
||||
}
|
||||
|
||||
#[get("maven/modrinth/{id}/maven-metadata.xml")]
|
||||
pub async fn maven_metadata(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let project_id = params.into_inner().0;
|
||||
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let version_names = sqlx::query!(
|
||||
"
|
||||
SELECT id, version_number, version_type
|
||||
FROM versions
|
||||
WHERE mod_id = $1 AND status = ANY($2)
|
||||
ORDER BY ordering ASC NULLS LAST, date_published ASC
|
||||
",
|
||||
project.inner.id as database::models::ids::ProjectId,
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_listed())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?;
|
||||
|
||||
let mut new_versions = Vec::new();
|
||||
let mut vals = HashSet::new();
|
||||
let mut latest_release = None;
|
||||
|
||||
for row in version_names {
|
||||
let value = if vals.contains(&row.version_number) {
|
||||
format!("{}", VersionId(row.id as u64))
|
||||
} else {
|
||||
row.version_number
|
||||
};
|
||||
|
||||
vals.insert(value.clone());
|
||||
if row.version_type == "release" {
|
||||
latest_release = Some(value.clone())
|
||||
}
|
||||
|
||||
new_versions.push(value);
|
||||
}
|
||||
|
||||
let project_id: ProjectId = project.inner.id.into();
|
||||
|
||||
let respdata = Metadata {
|
||||
group_id: "maven.modrinth".to_string(),
|
||||
artifact_id: project_id.to_string(),
|
||||
versioning: Versioning {
|
||||
latest: new_versions
|
||||
.last()
|
||||
.unwrap_or(&"release".to_string())
|
||||
.to_string(),
|
||||
release: latest_release.unwrap_or_default(),
|
||||
versions: Versions {
|
||||
versions: new_versions,
|
||||
},
|
||||
last_updated: project.inner.updated.format("%Y%m%d%H%M%S").to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok()
|
||||
.content_type("text/xml")
|
||||
.body(yaserde::ser::to_string(&respdata).map_err(ApiError::Xml)?))
|
||||
}
|
||||
|
||||
async fn find_version(
|
||||
project: &QueryProject,
|
||||
vcoords: &String,
|
||||
pool: &PgPool,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryVersion>, ApiError> {
|
||||
let id_option = crate::models::ids::base62_impl::parse_base62(vcoords)
|
||||
.ok()
|
||||
.map(|x| x as i64);
|
||||
|
||||
let all_versions = database::models::Version::get_many(&project.versions, pool, redis).await?;
|
||||
|
||||
let exact_matches = all_versions
|
||||
.iter()
|
||||
.filter(|x| &x.inner.version_number == vcoords || Some(x.inner.id.0) == id_option)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if exact_matches.len() == 1 {
|
||||
return Ok(Some(exact_matches[0].clone()));
|
||||
}
|
||||
|
||||
// Try to parse version filters from version coords.
|
||||
let Some((vnumber, filter)) = vcoords.rsplit_once('-') else {
|
||||
return Ok(exact_matches.first().map(|x| (*x).clone()));
|
||||
};
|
||||
|
||||
let db_loaders: HashSet<String> = Loader::list(pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| x.loader)
|
||||
.collect();
|
||||
|
||||
let (loaders, game_versions) = filter
|
||||
.split(',')
|
||||
.map(String::from)
|
||||
.partition::<Vec<_>, _>(|el| db_loaders.contains(el));
|
||||
|
||||
let matched = all_versions
|
||||
.iter()
|
||||
.filter(|x| {
|
||||
let mut bool = x.inner.version_number == vnumber;
|
||||
|
||||
if !loaders.is_empty() {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
|
||||
// For maven in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive
|
||||
if !game_versions.is_empty() {
|
||||
let version_game_versions = x
|
||||
.version_fields
|
||||
.clone()
|
||||
.into_iter()
|
||||
.find_map(|v| MinecraftGameVersion::try_from_version_field(&v).ok());
|
||||
if let Some(version_game_versions) = version_game_versions {
|
||||
bool &= version_game_versions
|
||||
.iter()
|
||||
.any(|y| game_versions.contains(&y.version));
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(matched
|
||||
.first()
|
||||
.or_else(|| exact_matches.first())
|
||||
.copied()
|
||||
.cloned())
|
||||
}
|
||||
|
||||
fn find_file<'a>(
|
||||
project_id: &str,
|
||||
vcoords: &str,
|
||||
version: &'a QueryVersion,
|
||||
file: &str,
|
||||
) -> Option<&'a QueryFile> {
|
||||
if let Some(selected_file) = version.files.iter().find(|x| x.filename == file) {
|
||||
return Some(selected_file);
|
||||
}
|
||||
|
||||
// Minecraft mods are not going to be both a mod and a modpack, so this minecraft-specific handling is fine
|
||||
// As there can be multiple project types, returns the first allowable match
|
||||
let mut fileexts = vec![];
|
||||
for project_type in version.project_types.iter() {
|
||||
match project_type.as_str() {
|
||||
"mod" => fileexts.push("jar"),
|
||||
"modpack" => fileexts.push("mrpack"),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
for fileext in fileexts {
|
||||
if file == format!("{}-{}.{}", &project_id, &vcoords, fileext) {
|
||||
return version
|
||||
.files
|
||||
.iter()
|
||||
.find(|x| x.primary)
|
||||
.or_else(|| version.files.iter().last());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[route(
|
||||
"maven/modrinth/{id}/{versionnum}/{file}",
|
||||
method = "GET",
|
||||
method = "HEAD"
|
||||
)]
|
||||
pub async fn version_file(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
if file == format!("{}-{}.pom", &project_id, &vnum) {
|
||||
let respdata = MavenPom {
|
||||
schema_location:
|
||||
"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||
.to_string(),
|
||||
xsi: "http://www.w3.org/2001/XMLSchema-instance".to_string(),
|
||||
model_version: "4.0.0".to_string(),
|
||||
group_id: "maven.modrinth".to_string(),
|
||||
artifact_id: project_id,
|
||||
version: vnum,
|
||||
name: project.inner.name,
|
||||
description: project.inner.description,
|
||||
};
|
||||
return Ok(HttpResponse::Ok()
|
||||
.content_type("text/xml")
|
||||
.body(yaserde::ser::to_string(&respdata).map_err(ApiError::Xml)?));
|
||||
} else if let Some(selected_file) = find_file(&project_id, &vnum, &version, &file) {
|
||||
return Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("location", &*selected_file.url))
|
||||
.body(""));
|
||||
}
|
||||
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
#[get("maven/modrinth/{id}/{versionnum}/{file}.sha1")]
|
||||
pub async fn version_file_sha1(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
Ok(find_file(&project_id, &vnum, &version, &file)
|
||||
.and_then(|file| file.hashes.get("sha1"))
|
||||
.map(|hash_str| HttpResponse::Ok().body(hash_str.clone()))
|
||||
.unwrap_or_else(|| HttpResponse::NotFound().body("")))
|
||||
}
|
||||
|
||||
#[get("maven/modrinth/{id}/{versionnum}/{file}.sha512")]
|
||||
pub async fn version_file_sha512(
|
||||
req: HttpRequest,
|
||||
params: web::Path<(String, String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (project_id, vnum, file) = params.into_inner();
|
||||
let Some(project) = database::models::Project::get(&project_id, &**pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let Some(version) = find_version(&project, &vnum, &pool, &redis).await? else {
|
||||
return Err(ApiError::NotFound);
|
||||
};
|
||||
|
||||
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
Ok(find_file(&project_id, &vnum, &version, &file)
|
||||
.and_then(|file| file.hashes.get("sha512"))
|
||||
.map(|hash_str| HttpResponse::Ok().body(hash_str.clone()))
|
||||
.unwrap_or_else(|| HttpResponse::NotFound().body("")))
|
||||
}
|
||||
211
apps/labrinth/src/routes/mod.rs
Normal file
211
apps/labrinth/src/routes/mod.rs
Normal file
@@ -0,0 +1,211 @@
|
||||
use crate::file_hosting::FileHostingError;
|
||||
use crate::routes::analytics::{page_view_ingest, playtime_ingest};
|
||||
use crate::util::cors::default_cors;
|
||||
use crate::util::env::parse_strings_from_var;
|
||||
use actix_cors::Cors;
|
||||
use actix_files::Files;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use futures::FutureExt;
|
||||
|
||||
pub mod internal;
|
||||
pub mod v2;
|
||||
pub mod v3;
|
||||
|
||||
pub mod v2_reroute;
|
||||
|
||||
mod analytics;
|
||||
mod index;
|
||||
mod maven;
|
||||
mod not_found;
|
||||
mod updates;
|
||||
|
||||
pub use self::not_found::not_found;
|
||||
|
||||
pub fn root_config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("maven")
|
||||
.wrap(default_cors())
|
||||
.configure(maven::config),
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("updates")
|
||||
.wrap(default_cors())
|
||||
.configure(updates::config),
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("analytics")
|
||||
.wrap(
|
||||
Cors::default()
|
||||
.allowed_origin_fn(|origin, _req_head| {
|
||||
let allowed_origins =
|
||||
parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").unwrap_or_default();
|
||||
|
||||
allowed_origins.contains(&"*".to_string())
|
||||
|| allowed_origins
|
||||
.contains(&origin.to_str().unwrap_or_default().to_string())
|
||||
})
|
||||
.allowed_methods(vec!["GET", "POST"])
|
||||
.allowed_headers(vec![
|
||||
actix_web::http::header::AUTHORIZATION,
|
||||
actix_web::http::header::ACCEPT,
|
||||
actix_web::http::header::CONTENT_TYPE,
|
||||
])
|
||||
.max_age(3600),
|
||||
)
|
||||
.service(page_view_ingest)
|
||||
.service(playtime_ingest),
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("api/v1")
|
||||
.wrap(default_cors())
|
||||
.wrap_fn(|req, _srv| {
|
||||
async {
|
||||
Ok(req.into_response(
|
||||
HttpResponse::Gone()
|
||||
.content_type("application/json")
|
||||
.body(r#"{"error":"api_deprecated","description":"You are using an application that uses an outdated version of Modrinth's API. Please either update it or switch to another application. For developers: https://docs.modrinth.com/docs/migrations/v1-to-v2/"}"#)
|
||||
))
|
||||
}.boxed_local()
|
||||
})
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("")
|
||||
.wrap(default_cors())
|
||||
.service(index::index_get)
|
||||
.service(Files::new("/", "assets/")),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum ApiError {
|
||||
#[error("Environment Error")]
|
||||
Env(#[from] dotenvy::Error),
|
||||
#[error("Error while uploading file: {0}")]
|
||||
FileHosting(#[from] FileHostingError),
|
||||
#[error("Database Error: {0}")]
|
||||
Database(#[from] crate::database::models::DatabaseError),
|
||||
#[error("Database Error: {0}")]
|
||||
SqlxDatabase(#[from] sqlx::Error),
|
||||
#[error("Clickhouse Error: {0}")]
|
||||
Clickhouse(#[from] clickhouse::error::Error),
|
||||
#[error("Internal server error: {0}")]
|
||||
Xml(String),
|
||||
#[error("Deserialization error: {0}")]
|
||||
Json(#[from] serde_json::Error),
|
||||
#[error("Authentication Error: {0}")]
|
||||
Authentication(#[from] crate::auth::AuthenticationError),
|
||||
#[error("Authentication Error: {0}")]
|
||||
CustomAuthentication(String),
|
||||
#[error("Invalid Input: {0}")]
|
||||
InvalidInput(String),
|
||||
#[error("Error while validating input: {0}")]
|
||||
Validation(String),
|
||||
#[error("Search Error: {0}")]
|
||||
Search(#[from] meilisearch_sdk::errors::Error),
|
||||
#[error("Indexing Error: {0}")]
|
||||
Indexing(#[from] crate::search::indexing::IndexingError),
|
||||
#[error("Payments Error: {0}")]
|
||||
Payments(String),
|
||||
#[error("Discord Error: {0}")]
|
||||
Discord(String),
|
||||
#[error("Captcha Error. Try resubmitting the form.")]
|
||||
Turnstile,
|
||||
#[error("Error while decoding Base62: {0}")]
|
||||
Decoding(#[from] crate::models::ids::DecodingError),
|
||||
#[error("Image Parsing Error: {0}")]
|
||||
ImageParse(#[from] image::ImageError),
|
||||
#[error("Password Hashing Error: {0}")]
|
||||
PasswordHashing(#[from] argon2::password_hash::Error),
|
||||
#[error("Password strength checking error: {0}")]
|
||||
PasswordStrengthCheck(#[from] zxcvbn::ZxcvbnError),
|
||||
#[error("{0}")]
|
||||
Mail(#[from] crate::auth::email::MailError),
|
||||
#[error("Error while rerouting request: {0}")]
|
||||
Reroute(#[from] reqwest::Error),
|
||||
#[error("Unable to read Zip Archive: {0}")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error("IO Error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("Resource not found")]
|
||||
NotFound,
|
||||
#[error("You are being rate-limited. Please wait {0} milliseconds. 0/{1} remaining.")]
|
||||
RateLimitError(u128, u32),
|
||||
#[error("Error while interacting with payment processor: {0}")]
|
||||
Stripe(#[from] stripe::StripeError),
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
pub fn as_api_error<'a>(&self) -> crate::models::error::ApiError<'a> {
|
||||
crate::models::error::ApiError {
|
||||
error: match self {
|
||||
ApiError::Env(..) => "environment_error",
|
||||
ApiError::SqlxDatabase(..) => "database_error",
|
||||
ApiError::Database(..) => "database_error",
|
||||
ApiError::Authentication(..) => "unauthorized",
|
||||
ApiError::CustomAuthentication(..) => "unauthorized",
|
||||
ApiError::Xml(..) => "xml_error",
|
||||
ApiError::Json(..) => "json_error",
|
||||
ApiError::Search(..) => "search_error",
|
||||
ApiError::Indexing(..) => "indexing_error",
|
||||
ApiError::FileHosting(..) => "file_hosting_error",
|
||||
ApiError::InvalidInput(..) => "invalid_input",
|
||||
ApiError::Validation(..) => "invalid_input",
|
||||
ApiError::Payments(..) => "payments_error",
|
||||
ApiError::Discord(..) => "discord_error",
|
||||
ApiError::Turnstile => "turnstile_error",
|
||||
ApiError::Decoding(..) => "decoding_error",
|
||||
ApiError::ImageParse(..) => "invalid_image",
|
||||
ApiError::PasswordHashing(..) => "password_hashing_error",
|
||||
ApiError::PasswordStrengthCheck(..) => "strength_check_error",
|
||||
ApiError::Mail(..) => "mail_error",
|
||||
ApiError::Clickhouse(..) => "clickhouse_error",
|
||||
ApiError::Reroute(..) => "reroute_error",
|
||||
ApiError::NotFound => "not_found",
|
||||
ApiError::Zip(..) => "zip_error",
|
||||
ApiError::Io(..) => "io_error",
|
||||
ApiError::RateLimitError(..) => "ratelimit_error",
|
||||
ApiError::Stripe(..) => "stripe_error",
|
||||
},
|
||||
description: self.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for ApiError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
ApiError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::Database(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::SqlxDatabase(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::Clickhouse(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::Authentication(..) => StatusCode::UNAUTHORIZED,
|
||||
ApiError::CustomAuthentication(..) => StatusCode::UNAUTHORIZED,
|
||||
ApiError::Xml(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::Json(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Search(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::Indexing(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::InvalidInput(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Validation(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Payments(..) => StatusCode::FAILED_DEPENDENCY,
|
||||
ApiError::Discord(..) => StatusCode::FAILED_DEPENDENCY,
|
||||
ApiError::Turnstile => StatusCode::BAD_REQUEST,
|
||||
ApiError::Decoding(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::ImageParse(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::PasswordHashing(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::PasswordStrengthCheck(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Mail(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::Reroute(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
ApiError::NotFound => StatusCode::NOT_FOUND,
|
||||
ApiError::Zip(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::Io(..) => StatusCode::BAD_REQUEST,
|
||||
ApiError::RateLimitError(..) => StatusCode::TOO_MANY_REQUESTS,
|
||||
ApiError::Stripe(..) => StatusCode::FAILED_DEPENDENCY,
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(self.as_api_error())
|
||||
}
|
||||
}
|
||||
11
apps/labrinth/src/routes/not_found.rs
Normal file
11
apps/labrinth/src/routes/not_found.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use crate::models::error::ApiError;
|
||||
use actix_web::{HttpResponse, Responder};
|
||||
|
||||
pub async fn not_found() -> impl Responder {
|
||||
let data = ApiError {
|
||||
error: "not_found",
|
||||
description: "the requested route does not exist".to_string(),
|
||||
};
|
||||
|
||||
HttpResponse::NotFound().json(data)
|
||||
}
|
||||
129
apps/labrinth/src/routes/updates.rs
Normal file
129
apps/labrinth/src/routes/updates.rs
Normal file
@@ -0,0 +1,129 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
use crate::auth::checks::{filter_visible_versions, is_visible_project};
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database;
|
||||
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::projects::VersionType;
|
||||
use crate::queue::session::AuthQueue;
|
||||
|
||||
use super::ApiError;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(forge_updates);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct NeoForge {
|
||||
#[serde(default = "default_neoforge")]
|
||||
pub neoforge: String,
|
||||
}
|
||||
|
||||
fn default_neoforge() -> String {
|
||||
"none".into()
|
||||
}
|
||||
|
||||
#[get("{id}/forge_updates.json")]
|
||||
pub async fn forge_updates(
|
||||
req: HttpRequest,
|
||||
web::Query(neo): web::Query<NeoForge>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
const ERROR: &str = "The specified project does not exist!";
|
||||
|
||||
let (id,) = info.into_inner();
|
||||
|
||||
let project = database::models::Project::get(&id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::InvalidInput(ERROR.to_string()))?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::InvalidInput(ERROR.to_string()));
|
||||
}
|
||||
|
||||
let versions = database::models::Version::get_many(&project.versions, &**pool, &redis).await?;
|
||||
|
||||
let loaders = match &*neo.neoforge {
|
||||
"only" => |x: &String| *x == "neoforge",
|
||||
"include" => |x: &String| *x == "forge" || *x == "neoforge",
|
||||
_ => |x: &String| *x == "forge",
|
||||
};
|
||||
|
||||
let mut versions = filter_visible_versions(
|
||||
versions
|
||||
.into_iter()
|
||||
.filter(|x| x.loaders.iter().any(loaders))
|
||||
.collect(),
|
||||
&user_option,
|
||||
&pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
versions.sort_by(|a, b| b.date_published.cmp(&a.date_published));
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct ForgeUpdates {
|
||||
homepage: String,
|
||||
promos: HashMap<String, String>,
|
||||
}
|
||||
|
||||
let mut response = ForgeUpdates {
|
||||
homepage: format!(
|
||||
"{}/mod/{}",
|
||||
dotenvy::var("SITE_URL").unwrap_or_default(),
|
||||
id
|
||||
),
|
||||
promos: HashMap::new(),
|
||||
};
|
||||
|
||||
for version in versions {
|
||||
// For forge in particular, we will hardcode it to use GameVersions rather than generic loader fields, as this is minecraft-java exclusive
|
||||
// Will have duplicates between game_versions (for non-forge loaders), but that's okay as
|
||||
// before v3 this was stored to the project and not the version
|
||||
let game_versions: Vec<String> = version
|
||||
.fields
|
||||
.iter()
|
||||
.find(|(key, _)| key.as_str() == MinecraftGameVersion::FIELD_NAME)
|
||||
.and_then(|(_, value)| serde_json::from_value::<Vec<String>>(value.clone()).ok())
|
||||
.unwrap_or_default();
|
||||
|
||||
if version.version_type == VersionType::Release {
|
||||
for game_version in &game_versions {
|
||||
response
|
||||
.promos
|
||||
.entry(format!("{}-recommended", game_version))
|
||||
.or_insert_with(|| version.version_number.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for game_version in &game_versions {
|
||||
response
|
||||
.promos
|
||||
.entry(format!("{}-latest", game_version))
|
||||
.or_insert_with(|| version.version_number.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
40
apps/labrinth/src/routes/v2/mod.rs
Normal file
40
apps/labrinth/src/routes/v2/mod.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
mod moderation;
|
||||
mod notifications;
|
||||
pub(crate) mod project_creation;
|
||||
mod projects;
|
||||
mod reports;
|
||||
mod statistics;
|
||||
pub mod tags;
|
||||
mod teams;
|
||||
mod threads;
|
||||
mod users;
|
||||
mod version_creation;
|
||||
pub mod version_file;
|
||||
mod versions;
|
||||
|
||||
pub use super::ApiError;
|
||||
use crate::util::cors::default_cors;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
cfg.service(
|
||||
actix_web::web::scope("v2")
|
||||
.wrap(default_cors())
|
||||
.configure(super::internal::admin::config)
|
||||
// Todo: separate these- they need to also follow v2-v3 conversion
|
||||
.configure(super::internal::session::config)
|
||||
.configure(super::internal::flows::config)
|
||||
.configure(super::internal::pats::config)
|
||||
.configure(moderation::config)
|
||||
.configure(notifications::config)
|
||||
.configure(project_creation::config)
|
||||
.configure(projects::config)
|
||||
.configure(reports::config)
|
||||
.configure(statistics::config)
|
||||
.configure(tags::config)
|
||||
.configure(teams::config)
|
||||
.configure(threads::config)
|
||||
.configure(users::config)
|
||||
.configure(version_file::config)
|
||||
.configure(versions::config),
|
||||
);
|
||||
}
|
||||
51
apps/labrinth/src/routes/v2/moderation.rs
Normal file
51
apps/labrinth/src/routes/v2/moderation.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use super::ApiError;
|
||||
use crate::models::projects::Project;
|
||||
use crate::models::v2::projects::LegacyProject;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::internal;
|
||||
use crate::{database::redis::RedisPool, routes::v2_reroute};
|
||||
use actix_web::{get, web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::scope("moderation").service(get_projects));
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ResultCount {
|
||||
#[serde(default = "default_count")]
|
||||
pub count: i16,
|
||||
}
|
||||
|
||||
fn default_count() -> i16 {
|
||||
100
|
||||
}
|
||||
|
||||
#[get("projects")]
|
||||
pub async fn get_projects(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ResultCount>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = internal::moderation::get_projects(
|
||||
req,
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
web::Query(internal::moderation::ResultCount { count: count.count }),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert to V2 projects
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
146
apps/labrinth/src/routes/v2/notifications.rs
Normal file
146
apps/labrinth/src/routes/v2/notifications.rs
Normal file
@@ -0,0 +1,146 @@
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::NotificationId;
|
||||
use crate::models::notifications::Notification;
|
||||
use crate::models::v2::notifications::LegacyNotification;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v2_reroute;
|
||||
use crate::routes::v3;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(notifications_get);
|
||||
cfg.service(notifications_delete);
|
||||
cfg.service(notifications_read);
|
||||
|
||||
cfg.service(
|
||||
web::scope("notification")
|
||||
.service(notification_get)
|
||||
.service(notification_read)
|
||||
.service(notification_delete),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct NotificationIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
#[get("notifications")]
|
||||
pub async fn notifications_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let resp = v3::notifications::notifications_get(
|
||||
req,
|
||||
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error);
|
||||
match v2_reroute::extract_ok_json::<Vec<Notification>>(resp?).await {
|
||||
Ok(notifications) => {
|
||||
let notifications: Vec<LegacyNotification> = notifications
|
||||
.into_iter()
|
||||
.map(LegacyNotification::from)
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(notifications))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{id}")]
|
||||
pub async fn notification_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::notifications::notification_get(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
match v2_reroute::extract_ok_json::<Notification>(response).await {
|
||||
Ok(notification) => {
|
||||
let notification = LegacyNotification::from(notification);
|
||||
Ok(HttpResponse::Ok().json(notification))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[patch("{id}")]
|
||||
pub async fn notification_read(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notification_read(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn notification_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notification_delete(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[patch("notifications")]
|
||||
pub async fn notifications_read(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notifications_read(
|
||||
req,
|
||||
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("notifications")]
|
||||
pub async fn notifications_delete(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::notifications::notifications_delete(
|
||||
req,
|
||||
web::Query(v3::notifications::NotificationIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
259
apps/labrinth/src/routes/v2/project_creation.rs
Normal file
259
apps/labrinth/src/routes/v2/project_creation.rs
Normal file
@@ -0,0 +1,259 @@
|
||||
use crate::database::models::version_item;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models;
|
||||
use crate::models::ids::ImageId;
|
||||
use crate::models::projects::{Loader, Project, ProjectStatus};
|
||||
use crate::models::v2::projects::{DonationLink, LegacyProject, LegacySideType};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::default_project_type;
|
||||
use crate::routes::v3::project_creation::{CreateError, NewGalleryItem};
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{post, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
use super::version_creation::InitialVersionData;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
cfg.service(project_create);
|
||||
}
|
||||
|
||||
pub fn default_requested_status() -> ProjectStatus {
|
||||
ProjectStatus::Approved
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||
struct ProjectCreateData {
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
#[serde(alias = "mod_name")]
|
||||
/// The title or name of the project.
|
||||
pub title: String,
|
||||
#[validate(length(min = 1, max = 64))]
|
||||
#[serde(default = "default_project_type")]
|
||||
/// The project type of this mod
|
||||
pub project_type: String,
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
#[serde(alias = "mod_slug")]
|
||||
/// The slug of a project, used for vanity URLs
|
||||
pub slug: String,
|
||||
#[validate(length(min = 3, max = 255))]
|
||||
#[serde(alias = "mod_description")]
|
||||
/// A short description of the project.
|
||||
pub description: String,
|
||||
#[validate(length(max = 65536))]
|
||||
#[serde(alias = "mod_body")]
|
||||
/// A long description of the project, in markdown.
|
||||
pub body: String,
|
||||
|
||||
/// The support range for the client project
|
||||
pub client_side: LegacySideType,
|
||||
/// The support range for the server project
|
||||
pub server_side: LegacySideType,
|
||||
|
||||
#[validate(length(max = 32))]
|
||||
#[validate]
|
||||
/// A list of initial versions to upload with the created project
|
||||
pub initial_versions: Vec<InitialVersionData>,
|
||||
#[validate(length(max = 3))]
|
||||
/// A list of the categories that the project is in.
|
||||
pub categories: Vec<String>,
|
||||
#[validate(length(max = 256))]
|
||||
#[serde(default = "Vec::new")]
|
||||
/// A list of the categories that the project is in.
|
||||
pub additional_categories: Vec<String>,
|
||||
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
/// An optional link to where to submit bugs or issues with the project.
|
||||
pub issues_url: Option<String>,
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
/// An optional link to the source code for the project.
|
||||
pub source_url: Option<String>,
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
/// An optional link to the project's wiki page or other relevant information.
|
||||
pub wiki_url: Option<String>,
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
/// An optional link to the project's license page
|
||||
pub license_url: Option<String>,
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
/// An optional link to the project's discord.
|
||||
pub discord_url: Option<String>,
|
||||
/// An optional list of all donation links the project has\
|
||||
#[validate]
|
||||
pub donation_urls: Option<Vec<DonationLink>>,
|
||||
|
||||
/// An optional boolean. If true, the project will be created as a draft.
|
||||
pub is_draft: Option<bool>,
|
||||
|
||||
/// The license id that the project follows
|
||||
pub license_id: String,
|
||||
|
||||
#[validate(length(max = 64))]
|
||||
#[validate]
|
||||
/// The multipart names of the gallery items to upload
|
||||
pub gallery_items: Option<Vec<NewGalleryItem>>,
|
||||
#[serde(default = "default_requested_status")]
|
||||
/// The status of the mod to be set once it is approved
|
||||
pub requested_status: ProjectStatus,
|
||||
|
||||
// Associations to uploaded images in body/description
|
||||
#[validate(length(max = 10))]
|
||||
#[serde(default)]
|
||||
pub uploaded_images: Vec<ImageId>,
|
||||
|
||||
/// The id of the organization to create the project in
|
||||
pub organization_id: Option<models::ids::OrganizationId>,
|
||||
}
|
||||
|
||||
#[post("project")]
|
||||
pub async fn project_create(
|
||||
req: HttpRequest,
|
||||
payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
// Convert V2 multipart payload to V3 multipart payload
|
||||
let payload = v2_reroute::alter_actix_multipart(
|
||||
payload,
|
||||
req.headers().clone(),
|
||||
|legacy_create: ProjectCreateData, _| async move {
|
||||
// Side types will be applied to each version
|
||||
let client_side = legacy_create.client_side;
|
||||
let server_side = legacy_create.server_side;
|
||||
|
||||
let project_type = legacy_create.project_type;
|
||||
|
||||
let initial_versions = legacy_create
|
||||
.initial_versions
|
||||
.into_iter()
|
||||
.map(|v| {
|
||||
let mut fields = HashMap::new();
|
||||
fields.extend(v2_reroute::convert_side_types_v3(client_side, server_side));
|
||||
fields.insert("game_versions".to_string(), json!(v.game_versions));
|
||||
|
||||
// Modpacks now use the "mrpack" loader, and loaders are converted to loader fields.
|
||||
// Setting of 'project_type' directly is removed, it's loader-based now.
|
||||
if project_type == "modpack" {
|
||||
fields.insert("mrpack_loaders".to_string(), json!(v.loaders));
|
||||
}
|
||||
|
||||
let loaders = if project_type == "modpack" {
|
||||
vec![Loader("mrpack".to_string())]
|
||||
} else {
|
||||
v.loaders
|
||||
};
|
||||
|
||||
v3::version_creation::InitialVersionData {
|
||||
project_id: v.project_id,
|
||||
file_parts: v.file_parts,
|
||||
version_number: v.version_number,
|
||||
version_title: v.version_title,
|
||||
version_body: v.version_body,
|
||||
dependencies: v.dependencies,
|
||||
release_channel: v.release_channel,
|
||||
loaders,
|
||||
featured: v.featured,
|
||||
primary_file: v.primary_file,
|
||||
status: v.status,
|
||||
file_types: v.file_types,
|
||||
uploaded_images: v.uploaded_images,
|
||||
ordering: v.ordering,
|
||||
fields,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut link_urls = HashMap::new();
|
||||
if let Some(issue_url) = legacy_create.issues_url {
|
||||
link_urls.insert("issues".to_string(), issue_url);
|
||||
}
|
||||
if let Some(source_url) = legacy_create.source_url {
|
||||
link_urls.insert("source".to_string(), source_url);
|
||||
}
|
||||
if let Some(wiki_url) = legacy_create.wiki_url {
|
||||
link_urls.insert("wiki".to_string(), wiki_url);
|
||||
}
|
||||
if let Some(discord_url) = legacy_create.discord_url {
|
||||
link_urls.insert("discord".to_string(), discord_url);
|
||||
}
|
||||
if let Some(donation_urls) = legacy_create.donation_urls {
|
||||
for donation_url in donation_urls {
|
||||
link_urls.insert(donation_url.platform, donation_url.url);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(v3::project_creation::ProjectCreateData {
|
||||
name: legacy_create.title,
|
||||
slug: legacy_create.slug,
|
||||
summary: legacy_create.description, // Description becomes summary
|
||||
description: legacy_create.body, // Body becomes description
|
||||
initial_versions,
|
||||
categories: legacy_create.categories,
|
||||
additional_categories: legacy_create.additional_categories,
|
||||
license_url: legacy_create.license_url,
|
||||
link_urls,
|
||||
is_draft: legacy_create.is_draft,
|
||||
license_id: legacy_create.license_id,
|
||||
gallery_items: legacy_create.gallery_items,
|
||||
requested_status: legacy_create.requested_status,
|
||||
uploaded_images: legacy_create.uploaded_images,
|
||||
organization_id: legacy_create.organization_id,
|
||||
})
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Call V3 project creation
|
||||
let response = v3::project_creation::project_create(
|
||||
req,
|
||||
payload,
|
||||
client.clone(),
|
||||
redis.clone(),
|
||||
file_host,
|
||||
session_queue,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Project>(response).await {
|
||||
Ok(project) => {
|
||||
let version_item = match project.versions.first() {
|
||||
Some(vid) => version_item::Version::get((*vid).into(), &**client, &redis).await?,
|
||||
None => None,
|
||||
};
|
||||
let project = LegacyProject::from(project, version_item);
|
||||
Ok(HttpResponse::Ok().json(project))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
907
apps/labrinth/src/routes/v2/projects.rs
Normal file
907
apps/labrinth/src/routes/v2/projects.rs
Normal file
@@ -0,0 +1,907 @@
|
||||
use crate::database::models::categories::LinkPlatform;
|
||||
use crate::database::models::{project_item, version_item};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::projects::{
|
||||
Link, MonetizationStatus, Project, ProjectStatus, SearchRequest, Version,
|
||||
};
|
||||
use crate::models::v2::projects::{DonationLink, LegacyProject, LegacySideType, LegacyVersion};
|
||||
use crate::models::v2::search::LegacySearchResults;
|
||||
use crate::queue::moderation::AutomatedModerationQueue;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::projects::ProjectIds;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use crate::search::{search_for_project, SearchConfig, SearchError};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(project_search);
|
||||
cfg.service(projects_get);
|
||||
cfg.service(projects_edit);
|
||||
cfg.service(random_projects_get);
|
||||
|
||||
cfg.service(
|
||||
web::scope("project")
|
||||
.service(project_get)
|
||||
.service(project_get_check)
|
||||
.service(project_delete)
|
||||
.service(project_edit)
|
||||
.service(project_icon_edit)
|
||||
.service(delete_project_icon)
|
||||
.service(add_gallery_item)
|
||||
.service(edit_gallery_item)
|
||||
.service(delete_gallery_item)
|
||||
.service(project_follow)
|
||||
.service(project_unfollow)
|
||||
.service(super::teams::team_members_get_project)
|
||||
.service(
|
||||
web::scope("{project_id}")
|
||||
.service(super::versions::version_list)
|
||||
.service(super::versions::version_project_get)
|
||||
.service(dependency_list),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[get("search")]
|
||||
pub async fn project_search(
|
||||
web::Query(info): web::Query<SearchRequest>,
|
||||
config: web::Data<SearchConfig>,
|
||||
) -> Result<HttpResponse, SearchError> {
|
||||
// Search now uses loader_fields instead of explicit 'client_side' and 'server_side' fields
|
||||
// While the backend for this has changed, it doesnt affect much
|
||||
// in the API calls except that 'versions:x' is now 'game_versions:x'
|
||||
let facets: Option<Vec<Vec<String>>> = if let Some(facets) = info.facets {
|
||||
let facets = serde_json::from_str::<Vec<Vec<String>>>(&facets)?;
|
||||
|
||||
// These loaders specifically used to be combined with 'mod' to be a plugin, but now
|
||||
// they are their own loader type. We will convert 'mod' to 'mod' OR 'plugin'
|
||||
// as it essentially was before.
|
||||
let facets = v2_reroute::convert_plugin_loader_facets_v3(facets);
|
||||
|
||||
Some(
|
||||
facets
|
||||
.into_iter()
|
||||
.map(|facet| {
|
||||
facet
|
||||
.into_iter()
|
||||
.map(|facet| {
|
||||
if let Some((key, operator, val)) = parse_facet(&facet) {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
match key.as_str() {
|
||||
"versions" => "game_versions",
|
||||
"project_type" => "project_types",
|
||||
"title" => "name",
|
||||
x => x,
|
||||
},
|
||||
operator,
|
||||
val
|
||||
)
|
||||
} else {
|
||||
facet.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let info = SearchRequest {
|
||||
facets: facets.and_then(|x| serde_json::to_string(&x).ok()),
|
||||
..info
|
||||
};
|
||||
|
||||
let results = search_for_project(&info, &config).await?;
|
||||
|
||||
let results = LegacySearchResults::from(results);
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
/// Parses a facet into a key, operator, and value
|
||||
fn parse_facet(facet: &str) -> Option<(String, String, String)> {
|
||||
let mut key = String::new();
|
||||
let mut operator = String::new();
|
||||
let mut val = String::new();
|
||||
|
||||
let mut iterator = facet.chars();
|
||||
while let Some(char) = iterator.next() {
|
||||
match char {
|
||||
':' | '=' => {
|
||||
operator.push(char);
|
||||
val = iterator.collect::<String>();
|
||||
return Some((key, operator, val));
|
||||
}
|
||||
'<' | '>' => {
|
||||
operator.push(char);
|
||||
if let Some(next_char) = iterator.next() {
|
||||
if next_char == '=' {
|
||||
operator.push(next_char);
|
||||
} else {
|
||||
val.push(next_char);
|
||||
}
|
||||
}
|
||||
val.push_str(&iterator.collect::<String>());
|
||||
return Some((key, operator, val));
|
||||
}
|
||||
' ' => continue,
|
||||
_ => key.push(char),
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct RandomProjects {
|
||||
#[validate(range(min = 1, max = 100))]
|
||||
pub count: u32,
|
||||
}
|
||||
|
||||
#[get("projects_random")]
|
||||
pub async fn random_projects_get(
|
||||
web::Query(count): web::Query<RandomProjects>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let count = v3::projects::RandomProjects { count: count.count };
|
||||
|
||||
let response =
|
||||
v3::projects::random_projects_get(web::Query(count), pool.clone(), redis.clone())
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("projects")]
|
||||
pub async fn projects_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Call V3 project creation
|
||||
let response = v3::projects::projects_get(
|
||||
req,
|
||||
web::Query(ids),
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{id}")]
|
||||
pub async fn project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Convert V2 data to V3 data
|
||||
// Call V3 project creation
|
||||
let response = v3::projects::project_get(req, info, pool.clone(), redis.clone(), session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Project>(response).await {
|
||||
Ok(project) => {
|
||||
let version_item = match project.versions.first() {
|
||||
Some(vid) => version_item::Version::get((*vid).into(), &**pool, &redis).await?,
|
||||
None => None,
|
||||
};
|
||||
let project = LegacyProject::from(project, version_item);
|
||||
Ok(HttpResponse::Ok().json(project))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
//checks the validity of a project id or slug
|
||||
#[get("{id}/check")]
|
||||
pub async fn project_get_check(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns an id only, do not need to convert
|
||||
v3::projects::project_get_check(info, pool, redis)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct DependencyInfo {
|
||||
pub projects: Vec<LegacyProject>,
|
||||
pub versions: Vec<LegacyVersion>,
|
||||
}
|
||||
|
||||
#[get("dependencies")]
|
||||
pub async fn dependency_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// TODO: tests, probably
|
||||
let response =
|
||||
v3::projects::dependency_list(req, info, pool.clone(), redis.clone(), session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
match v2_reroute::extract_ok_json::<crate::routes::v3::projects::DependencyInfo>(response).await
|
||||
{
|
||||
Ok(dependency_info) => {
|
||||
let converted_projects =
|
||||
LegacyProject::from_many(dependency_info.projects, &**pool, &redis).await?;
|
||||
let converted_versions = dependency_info
|
||||
.versions
|
||||
.into_iter()
|
||||
.map(LegacyVersion::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(DependencyInfo {
|
||||
projects: converted_projects,
|
||||
versions: converted_versions,
|
||||
}))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct EditProject {
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
pub title: Option<String>,
|
||||
#[validate(length(min = 3, max = 256))]
|
||||
pub description: Option<String>,
|
||||
#[validate(length(max = 65536))]
|
||||
pub body: Option<String>,
|
||||
#[validate(length(max = 3))]
|
||||
pub categories: Option<Vec<String>>,
|
||||
#[validate(length(max = 256))]
|
||||
pub additional_categories: Option<Vec<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub issues_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub source_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub wiki_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub license_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub discord_url: Option<Option<String>>,
|
||||
#[validate]
|
||||
pub donation_urls: Option<Vec<DonationLink>>,
|
||||
pub license_id: Option<String>,
|
||||
pub client_side: Option<LegacySideType>,
|
||||
pub server_side: Option<LegacySideType>,
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
pub slug: Option<String>,
|
||||
pub status: Option<ProjectStatus>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
pub requested_status: Option<Option<ProjectStatus>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(max = 2000))]
|
||||
pub moderation_message: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(max = 65536))]
|
||||
pub moderation_message_body: Option<Option<String>>,
|
||||
pub monetization_status: Option<MonetizationStatus>,
|
||||
}
|
||||
|
||||
#[patch("{id}")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn project_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
new_project: web::Json<EditProject>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
moderation_queue: web::Data<AutomatedModerationQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let v2_new_project = new_project.into_inner();
|
||||
let client_side = v2_new_project.client_side;
|
||||
let server_side = v2_new_project.server_side;
|
||||
let new_slug = v2_new_project.slug.clone();
|
||||
|
||||
// TODO: Some kind of handling here to ensure project type is fine.
|
||||
// We expect the version uploaded to be of loader type modpack, but there might not be a way to check here for that.
|
||||
// After all, theoretically, they could be creating a genuine 'fabric' mod, and modpack no longer carries information on whether its a mod or modpack,
|
||||
// as those are out to the versions.
|
||||
|
||||
// Ideally this would, if the project 'should' be a modpack:
|
||||
// - change the loaders to mrpack only
|
||||
// - add categories to the project for the corresponding loaders
|
||||
|
||||
let mut new_links = HashMap::new();
|
||||
if let Some(issues_url) = v2_new_project.issues_url {
|
||||
if let Some(issues_url) = issues_url {
|
||||
new_links.insert("issues".to_string(), Some(issues_url));
|
||||
} else {
|
||||
new_links.insert("issues".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(source_url) = v2_new_project.source_url {
|
||||
if let Some(source_url) = source_url {
|
||||
new_links.insert("source".to_string(), Some(source_url));
|
||||
} else {
|
||||
new_links.insert("source".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(wiki_url) = v2_new_project.wiki_url {
|
||||
if let Some(wiki_url) = wiki_url {
|
||||
new_links.insert("wiki".to_string(), Some(wiki_url));
|
||||
} else {
|
||||
new_links.insert("wiki".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(discord_url) = v2_new_project.discord_url {
|
||||
if let Some(discord_url) = discord_url {
|
||||
new_links.insert("discord".to_string(), Some(discord_url));
|
||||
} else {
|
||||
new_links.insert("discord".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
// In v2, setting donation links resets all other donation links
|
||||
// (resetting to the new ones)
|
||||
if let Some(donation_urls) = v2_new_project.donation_urls {
|
||||
// Fetch current donation links from project so we know what to delete
|
||||
let fetched_example_project = project_item::Project::get(&info.0, &**pool, &redis).await?;
|
||||
let donation_links = fetched_example_project
|
||||
.map(|x| {
|
||||
x.urls
|
||||
.into_iter()
|
||||
.filter_map(|l| {
|
||||
if l.donation {
|
||||
Some(Link::from(l)) // TODO: tests
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// Set existing donation links to None
|
||||
for old_link in donation_links {
|
||||
new_links.insert(old_link.platform, None);
|
||||
}
|
||||
|
||||
// Add new donation links
|
||||
for donation_url in donation_urls {
|
||||
new_links.insert(donation_url.id, Some(donation_url.url));
|
||||
}
|
||||
}
|
||||
|
||||
let new_project = v3::projects::EditProject {
|
||||
name: v2_new_project.title,
|
||||
summary: v2_new_project.description, // Description becomes summary
|
||||
description: v2_new_project.body, // Body becomes description
|
||||
categories: v2_new_project.categories,
|
||||
additional_categories: v2_new_project.additional_categories,
|
||||
license_url: v2_new_project.license_url,
|
||||
link_urls: Some(new_links),
|
||||
license_id: v2_new_project.license_id,
|
||||
slug: v2_new_project.slug,
|
||||
status: v2_new_project.status,
|
||||
requested_status: v2_new_project.requested_status,
|
||||
moderation_message: v2_new_project.moderation_message,
|
||||
moderation_message_body: v2_new_project.moderation_message_body,
|
||||
monetization_status: v2_new_project.monetization_status,
|
||||
};
|
||||
|
||||
// This returns 204 or failure so we don't need to do anything with it
|
||||
let project_id = info.clone().0;
|
||||
let mut response = v3::projects::project_edit(
|
||||
req.clone(),
|
||||
info,
|
||||
pool.clone(),
|
||||
search_config,
|
||||
web::Json(new_project),
|
||||
redis.clone(),
|
||||
session_queue.clone(),
|
||||
moderation_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// If client and server side were set, we will call
|
||||
// the version setting route for each version to set the side types for each of them.
|
||||
if response.status().is_success() && (client_side.is_some() || server_side.is_some()) {
|
||||
let project_item =
|
||||
project_item::Project::get(&new_slug.unwrap_or(project_id), &**pool, &redis).await?;
|
||||
let version_ids = project_item.map(|x| x.versions).unwrap_or_default();
|
||||
let versions = version_item::Version::get_many(&version_ids, &**pool, &redis).await?;
|
||||
for version in versions {
|
||||
let version = Version::from(version);
|
||||
let mut fields = version.fields;
|
||||
let (current_client_side, current_server_side) =
|
||||
v2_reroute::convert_side_types_v2(&fields, None);
|
||||
let client_side = client_side.unwrap_or(current_client_side);
|
||||
let server_side = server_side.unwrap_or(current_server_side);
|
||||
fields.extend(v2_reroute::convert_side_types_v3(client_side, server_side));
|
||||
|
||||
response = v3::versions::version_edit_helper(
|
||||
req.clone(),
|
||||
(version.id,),
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
v3::versions::EditVersion {
|
||||
fields,
|
||||
..Default::default()
|
||||
},
|
||||
session_queue.clone(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct BulkEditProject {
|
||||
#[validate(length(max = 3))]
|
||||
pub categories: Option<Vec<String>>,
|
||||
#[validate(length(max = 3))]
|
||||
pub add_categories: Option<Vec<String>>,
|
||||
pub remove_categories: Option<Vec<String>>,
|
||||
|
||||
#[validate(length(max = 256))]
|
||||
pub additional_categories: Option<Vec<String>>,
|
||||
#[validate(length(max = 3))]
|
||||
pub add_additional_categories: Option<Vec<String>>,
|
||||
pub remove_additional_categories: Option<Vec<String>>,
|
||||
|
||||
#[validate]
|
||||
pub donation_urls: Option<Vec<DonationLink>>,
|
||||
#[validate]
|
||||
pub add_donation_urls: Option<Vec<DonationLink>>,
|
||||
#[validate]
|
||||
pub remove_donation_urls: Option<Vec<DonationLink>>,
|
||||
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub issues_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub source_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub wiki_url: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 2048)
|
||||
)]
|
||||
pub discord_url: Option<Option<String>>,
|
||||
}
|
||||
|
||||
#[patch("projects")]
|
||||
pub async fn projects_edit(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ProjectIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
bulk_edit_project: web::Json<BulkEditProject>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let bulk_edit_project = bulk_edit_project.into_inner();
|
||||
|
||||
let mut link_urls = HashMap::new();
|
||||
|
||||
// If we are *setting* donation links, we will set every possible donation link to None, as
|
||||
// setting will delete all of them then 're-add' the ones we want to keep
|
||||
if let Some(donation_url) = bulk_edit_project.donation_urls {
|
||||
let link_platforms = LinkPlatform::list(&**pool, &redis).await?;
|
||||
for link in link_platforms {
|
||||
if link.donation {
|
||||
link_urls.insert(link.name, None);
|
||||
}
|
||||
}
|
||||
// add
|
||||
for donation_url in donation_url {
|
||||
link_urls.insert(donation_url.id, Some(donation_url.url));
|
||||
}
|
||||
}
|
||||
|
||||
// For every delete, we will set the link to None
|
||||
if let Some(donation_url) = bulk_edit_project.remove_donation_urls {
|
||||
for donation_url in donation_url {
|
||||
link_urls.insert(donation_url.id, None);
|
||||
}
|
||||
}
|
||||
|
||||
// For every add, we will set the link to the new url
|
||||
if let Some(donation_url) = bulk_edit_project.add_donation_urls {
|
||||
for donation_url in donation_url {
|
||||
link_urls.insert(donation_url.id, Some(donation_url.url));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(issue_url) = bulk_edit_project.issues_url {
|
||||
if let Some(issue_url) = issue_url {
|
||||
link_urls.insert("issues".to_string(), Some(issue_url));
|
||||
} else {
|
||||
link_urls.insert("issues".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(source_url) = bulk_edit_project.source_url {
|
||||
if let Some(source_url) = source_url {
|
||||
link_urls.insert("source".to_string(), Some(source_url));
|
||||
} else {
|
||||
link_urls.insert("source".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(wiki_url) = bulk_edit_project.wiki_url {
|
||||
if let Some(wiki_url) = wiki_url {
|
||||
link_urls.insert("wiki".to_string(), Some(wiki_url));
|
||||
} else {
|
||||
link_urls.insert("wiki".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(discord_url) = bulk_edit_project.discord_url {
|
||||
if let Some(discord_url) = discord_url {
|
||||
link_urls.insert("discord".to_string(), Some(discord_url));
|
||||
} else {
|
||||
link_urls.insert("discord".to_string(), None);
|
||||
}
|
||||
}
|
||||
|
||||
// This returns NoContent or failure so we don't need to do anything with it
|
||||
v3::projects::projects_edit(
|
||||
req,
|
||||
web::Query(ids),
|
||||
pool.clone(),
|
||||
web::Json(v3::projects::BulkEditProject {
|
||||
categories: bulk_edit_project.categories,
|
||||
add_categories: bulk_edit_project.add_categories,
|
||||
remove_categories: bulk_edit_project.remove_categories,
|
||||
additional_categories: bulk_edit_project.additional_categories,
|
||||
add_additional_categories: bulk_edit_project.add_additional_categories,
|
||||
remove_additional_categories: bulk_edit_project.remove_additional_categories,
|
||||
link_urls: Some(link_urls),
|
||||
}),
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Extension {
|
||||
pub ext: String,
|
||||
}
|
||||
|
||||
#[patch("{id}/icon")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn project_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_icon_edit(
|
||||
web::Query(v3::projects::Extension { ext: ext.ext }),
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
file_host,
|
||||
payload,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}/icon")]
|
||||
pub async fn delete_project_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::delete_project_icon(req, info, pool, redis, file_host, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct GalleryCreateQuery {
|
||||
pub featured: bool,
|
||||
#[validate(length(min = 1, max = 255))]
|
||||
pub title: Option<String>,
|
||||
#[validate(length(min = 1, max = 2048))]
|
||||
pub description: Option<String>,
|
||||
pub ordering: Option<i64>,
|
||||
}
|
||||
|
||||
#[post("{id}/gallery")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn add_gallery_item(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryCreateQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::add_gallery_item(
|
||||
web::Query(v3::projects::Extension { ext: ext.ext }),
|
||||
req,
|
||||
web::Query(v3::projects::GalleryCreateQuery {
|
||||
featured: item.featured,
|
||||
name: item.title,
|
||||
description: item.description,
|
||||
ordering: item.ordering,
|
||||
}),
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
file_host,
|
||||
payload,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct GalleryEditQuery {
|
||||
/// The url of the gallery item to edit
|
||||
pub url: String,
|
||||
pub featured: Option<bool>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(min = 1, max = 255))]
|
||||
pub title: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(min = 1, max = 2048))]
|
||||
pub description: Option<Option<String>>,
|
||||
pub ordering: Option<i64>,
|
||||
}
|
||||
|
||||
#[patch("{id}/gallery")]
|
||||
pub async fn edit_gallery_item(
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryEditQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::edit_gallery_item(
|
||||
req,
|
||||
web::Query(v3::projects::GalleryEditQuery {
|
||||
url: item.url,
|
||||
featured: item.featured,
|
||||
name: item.title,
|
||||
description: item.description,
|
||||
ordering: item.ordering,
|
||||
}),
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct GalleryDeleteQuery {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[delete("{id}/gallery")]
|
||||
pub async fn delete_gallery_item(
|
||||
req: HttpRequest,
|
||||
web::Query(item): web::Query<GalleryDeleteQuery>,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::delete_gallery_item(
|
||||
req,
|
||||
web::Query(v3::projects::GalleryDeleteQuery { url: item.url }),
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
file_host,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn project_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_delete(req, info, pool, redis, search_config, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[post("{id}/follow")]
|
||||
pub async fn project_follow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_follow(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}/follow")]
|
||||
pub async fn project_unfollow(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::projects::project_unfollow(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
188
apps/labrinth/src/routes/v2/reports.rs
Normal file
188
apps/labrinth/src/routes/v2/reports.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::reports::Report;
|
||||
use crate::models::v2::reports::LegacyReport;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(reports_get);
|
||||
cfg.service(reports);
|
||||
cfg.service(report_create);
|
||||
cfg.service(report_edit);
|
||||
cfg.service(report_delete);
|
||||
cfg.service(report_get);
|
||||
}
|
||||
|
||||
#[post("report")]
|
||||
pub async fn report_create(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
body: web::Payload,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::reports::report_create(req, pool, body, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Report>(response).await {
|
||||
Ok(report) => {
|
||||
let report = LegacyReport::from(report);
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ReportsRequestOptions {
|
||||
#[serde(default = "default_count")]
|
||||
count: i16,
|
||||
#[serde(default = "default_all")]
|
||||
all: bool,
|
||||
}
|
||||
|
||||
fn default_count() -> i16 {
|
||||
100
|
||||
}
|
||||
fn default_all() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[get("report")]
|
||||
pub async fn reports(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ReportsRequestOptions>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::reports::reports(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
web::Query(v3::reports::ReportsRequestOptions {
|
||||
count: count.count,
|
||||
all: count.all,
|
||||
}),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Report>>(response).await {
|
||||
Ok(reports) => {
|
||||
let reports: Vec<_> = reports.into_iter().map(LegacyReport::from).collect();
|
||||
Ok(HttpResponse::Ok().json(reports))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ReportIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
#[get("reports")]
|
||||
pub async fn reports_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ReportIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::reports::reports_get(
|
||||
req,
|
||||
web::Query(v3::reports::ReportIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Report>>(response).await {
|
||||
Ok(report_list) => {
|
||||
let report_list: Vec<_> = report_list.into_iter().map(LegacyReport::from).collect();
|
||||
Ok(HttpResponse::Ok().json(report_list))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("report/{id}")]
|
||||
pub async fn report_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::reports::report_get(req, pool, redis, info, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Report>(response).await {
|
||||
Ok(report) => {
|
||||
let report = LegacyReport::from(report);
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct EditReport {
|
||||
#[validate(length(max = 65536))]
|
||||
pub body: Option<String>,
|
||||
pub closed: Option<bool>,
|
||||
}
|
||||
|
||||
#[patch("report/{id}")]
|
||||
pub async fn report_edit(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
edit_report: web::Json<EditReport>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let edit_report = edit_report.into_inner();
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::reports::report_edit(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
info,
|
||||
session_queue,
|
||||
web::Json(v3::reports::EditReport {
|
||||
body: edit_report.body,
|
||||
closed: edit_report.closed,
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("report/{id}")]
|
||||
pub async fn report_delete(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert
|
||||
v3::reports::report_delete(req, pool, info, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
39
apps/labrinth/src/routes/v2/statistics.rs
Normal file
39
apps/labrinth/src/routes/v2/statistics.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use crate::routes::{
|
||||
v2_reroute,
|
||||
v3::{self, statistics::V3Stats},
|
||||
ApiError,
|
||||
};
|
||||
use actix_web::{get, web, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(get_stats);
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct V2Stats {
|
||||
pub projects: Option<i64>,
|
||||
pub versions: Option<i64>,
|
||||
pub authors: Option<i64>,
|
||||
pub files: Option<i64>,
|
||||
}
|
||||
|
||||
#[get("statistics")]
|
||||
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::statistics::get_stats(pool)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
match v2_reroute::extract_ok_json::<V3Stats>(response).await {
|
||||
Ok(stats) => {
|
||||
let stats = V2Stats {
|
||||
projects: stats.projects,
|
||||
versions: stats.versions,
|
||||
authors: stats.authors,
|
||||
files: stats.files,
|
||||
};
|
||||
Ok(HttpResponse::Ok().json(stats))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
314
apps/labrinth/src/routes/v2/tags.rs
Normal file
314
apps/labrinth/src/routes/v2/tags.rs
Normal file
@@ -0,0 +1,314 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::ApiError;
|
||||
use crate::database::models::loader_fields::LoaderFieldEnumValue;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::v2::projects::LegacySideType;
|
||||
use crate::routes::v2_reroute::capitalize_first;
|
||||
use crate::routes::v3::tags::{LinkPlatformQueryData, LoaderFieldsEnumQuery};
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_web::{get, web, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use itertools::Itertools;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("tag")
|
||||
.service(category_list)
|
||||
.service(loader_list)
|
||||
.service(game_version_list)
|
||||
.service(license_list)
|
||||
.service(license_text)
|
||||
.service(donation_platform_list)
|
||||
.service(report_type_list)
|
||||
.service(project_type_list)
|
||||
.service(side_type_list),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct CategoryData {
|
||||
pub icon: String,
|
||||
pub name: String,
|
||||
pub project_type: String,
|
||||
pub header: String,
|
||||
}
|
||||
|
||||
#[get("category")]
|
||||
pub async fn category_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::tags::category_list(pool, redis).await?;
|
||||
|
||||
// Convert to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<v3::tags::CategoryData>>(response).await {
|
||||
Ok(categories) => {
|
||||
let categories = categories
|
||||
.into_iter()
|
||||
.map(|c| CategoryData {
|
||||
icon: c.icon,
|
||||
name: c.name,
|
||||
project_type: c.project_type,
|
||||
header: c.header,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(categories))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct LoaderData {
|
||||
pub icon: String,
|
||||
pub name: String,
|
||||
pub supported_project_types: Vec<String>,
|
||||
}
|
||||
|
||||
#[get("loader")]
|
||||
pub async fn loader_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::tags::loader_list(pool, redis).await?;
|
||||
|
||||
// Convert to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<v3::tags::LoaderData>>(response).await {
|
||||
Ok(loaders) => {
|
||||
let loaders = loaders
|
||||
.into_iter()
|
||||
.filter(|l| &*l.name != "mrpack")
|
||||
.map(|l| {
|
||||
let mut supported_project_types = l.supported_project_types;
|
||||
// Add generic 'project' type to all loaders, which is the v2 representation of
|
||||
// a project type before any versions are set.
|
||||
supported_project_types.push("project".to_string());
|
||||
|
||||
if ["forge", "fabric", "quilt", "neoforge"].contains(&&*l.name) {
|
||||
supported_project_types.push("modpack".to_string());
|
||||
}
|
||||
|
||||
if supported_project_types.contains(&"datapack".to_string())
|
||||
|| supported_project_types.contains(&"plugin".to_string())
|
||||
{
|
||||
supported_project_types.push("mod".to_string());
|
||||
}
|
||||
|
||||
LoaderData {
|
||||
icon: l.icon,
|
||||
name: l.name,
|
||||
supported_project_types,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(loaders))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct GameVersionQueryData {
|
||||
pub version: String,
|
||||
pub version_type: String,
|
||||
pub date: DateTime<Utc>,
|
||||
pub major: bool,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct GameVersionQuery {
|
||||
#[serde(rename = "type")]
|
||||
type_: Option<String>,
|
||||
major: Option<bool>,
|
||||
}
|
||||
|
||||
#[get("game_version")]
|
||||
pub async fn game_version_list(
|
||||
pool: web::Data<PgPool>,
|
||||
query: web::Query<GameVersionQuery>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut filters = HashMap::new();
|
||||
if let Some(type_) = &query.type_ {
|
||||
filters.insert("type".to_string(), serde_json::json!(type_));
|
||||
}
|
||||
if let Some(major) = query.major {
|
||||
filters.insert("major".to_string(), serde_json::json!(major));
|
||||
}
|
||||
let response = v3::tags::loader_fields_list(
|
||||
pool,
|
||||
web::Query(LoaderFieldsEnumQuery {
|
||||
loader_field: "game_versions".to_string(),
|
||||
filters: Some(filters),
|
||||
}),
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Convert to V2 format
|
||||
Ok(
|
||||
match v2_reroute::extract_ok_json::<Vec<LoaderFieldEnumValue>>(response).await {
|
||||
Ok(fields) => {
|
||||
let fields = fields
|
||||
.into_iter()
|
||||
.map(|f| GameVersionQueryData {
|
||||
version: f.value,
|
||||
version_type: f
|
||||
.metadata
|
||||
.get("type")
|
||||
.and_then(|m| m.as_str())
|
||||
.unwrap_or_default()
|
||||
.to_string(),
|
||||
date: f.created,
|
||||
major: f
|
||||
.metadata
|
||||
.get("major")
|
||||
.and_then(|m| m.as_bool())
|
||||
.unwrap_or_default(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
HttpResponse::Ok().json(fields)
|
||||
}
|
||||
Err(response) => response,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct License {
|
||||
pub short: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[get("license")]
|
||||
pub async fn license_list() -> HttpResponse {
|
||||
let response = v3::tags::license_list().await;
|
||||
|
||||
// Convert to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<v3::tags::License>>(response).await {
|
||||
Ok(licenses) => {
|
||||
let licenses = licenses
|
||||
.into_iter()
|
||||
.map(|l| License {
|
||||
short: l.short,
|
||||
name: l.name,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
HttpResponse::Ok().json(licenses)
|
||||
}
|
||||
Err(response) => response,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
pub struct LicenseText {
|
||||
pub title: String,
|
||||
pub body: String,
|
||||
}
|
||||
|
||||
#[get("license/{id}")]
|
||||
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
|
||||
let license = v3::tags::license_text(params)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert to V2 format
|
||||
Ok(
|
||||
match v2_reroute::extract_ok_json::<v3::tags::LicenseText>(license).await {
|
||||
Ok(license) => HttpResponse::Ok().json(LicenseText {
|
||||
title: license.title,
|
||||
body: license.body,
|
||||
}),
|
||||
Err(response) => response,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize, PartialEq, Eq, Debug)]
|
||||
pub struct DonationPlatformQueryData {
|
||||
// The difference between name and short is removed in v3.
|
||||
// Now, the 'id' becomes the name, and the 'name' is removed (the frontend uses the id as the name)
|
||||
// pub short: String,
|
||||
pub short: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[get("donation_platform")]
|
||||
pub async fn donation_platform_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::tags::link_platform_list(pool, redis).await?;
|
||||
|
||||
// Convert to V2 format
|
||||
Ok(
|
||||
match v2_reroute::extract_ok_json::<Vec<LinkPlatformQueryData>>(response).await {
|
||||
Ok(platforms) => {
|
||||
let platforms = platforms
|
||||
.into_iter()
|
||||
.filter_map(|p| {
|
||||
if p.donation {
|
||||
Some(DonationPlatformQueryData {
|
||||
// Short vs name is no longer a recognized difference in v3.
|
||||
// We capitalize to recreate the old behavior, with some special handling.
|
||||
// This may result in different behaviour for platforms added after the v3 migration.
|
||||
name: match p.name.as_str() {
|
||||
"bmac" => "Buy Me A Coffee".to_string(),
|
||||
"github" => "GitHub Sponsors".to_string(),
|
||||
"ko-fi" => "Ko-fi".to_string(),
|
||||
"paypal" => "PayPal".to_string(),
|
||||
// Otherwise, capitalize it
|
||||
_ => capitalize_first(&p.name),
|
||||
},
|
||||
short: p.name,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
HttpResponse::Ok().json(platforms)
|
||||
}
|
||||
Err(response) => response,
|
||||
},
|
||||
)
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[get("report_type")]
|
||||
pub async fn report_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// This returns a list of strings directly, so we don't need to convert to v2 format.
|
||||
v3::tags::report_type_list(pool, redis)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[get("project_type")]
|
||||
pub async fn project_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// This returns a list of strings directly, so we don't need to convert to v2 format.
|
||||
v3::tags::project_type_list(pool, redis)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[get("side_type")]
|
||||
pub async fn side_type_list() -> Result<HttpResponse, ApiError> {
|
||||
// Original side types are no longer reflected in the database.
|
||||
// Therefore, we hardcode and return all the fields that are supported by our v2 conversion logic.
|
||||
let side_types = [
|
||||
LegacySideType::Required,
|
||||
LegacySideType::Optional,
|
||||
LegacySideType::Unsupported,
|
||||
LegacySideType::Unknown,
|
||||
];
|
||||
let side_types = side_types.iter().map(|s| s.to_string()).collect_vec();
|
||||
Ok(HttpResponse::Ok().json(side_types))
|
||||
}
|
||||
265
apps/labrinth/src/routes/v2/teams.rs
Normal file
265
apps/labrinth/src/routes/v2/teams.rs
Normal file
@@ -0,0 +1,265 @@
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::teams::{OrganizationPermissions, ProjectPermissions, TeamId, TeamMember};
|
||||
use crate::models::users::UserId;
|
||||
use crate::models::v2::teams::LegacyTeamMember;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(teams_get);
|
||||
|
||||
cfg.service(
|
||||
web::scope("team")
|
||||
.service(team_members_get)
|
||||
.service(edit_team_member)
|
||||
.service(transfer_ownership)
|
||||
.service(add_team_member)
|
||||
.service(join_team)
|
||||
.service(remove_team_member),
|
||||
);
|
||||
}
|
||||
|
||||
// Returns all members of a project,
|
||||
// including the team members of the project's team, but
|
||||
// also the members of the organization's team if the project is associated with an organization
|
||||
// (Unlike team_members_get_project, which only returns the members of the project's team)
|
||||
// They can be differentiated by the "organization_permissions" field being null or not
|
||||
#[get("{id}/members")]
|
||||
pub async fn team_members_get_project(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::teams::team_members_get_project(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<TeamMember>>(response).await {
|
||||
Ok(members) => {
|
||||
let members = members
|
||||
.into_iter()
|
||||
.map(LegacyTeamMember::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(members))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
// Returns all members of a team, but not necessarily those of a project-team's organization (unlike team_members_get_project)
|
||||
#[get("{id}/members")]
|
||||
pub async fn team_members_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::teams::team_members_get(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<TeamMember>>(response).await {
|
||||
Ok(members) => {
|
||||
let members = members
|
||||
.into_iter()
|
||||
.map(LegacyTeamMember::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(members))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct TeamIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
#[get("teams")]
|
||||
pub async fn teams_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<TeamIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::teams::teams_get(
|
||||
req,
|
||||
web::Query(v3::teams::TeamIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error);
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Vec<TeamMember>>>(response?).await {
|
||||
Ok(members) => {
|
||||
let members = members
|
||||
.into_iter()
|
||||
.map(|members| {
|
||||
members
|
||||
.into_iter()
|
||||
.map(LegacyTeamMember::from)
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(members))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[post("{id}/join")]
|
||||
pub async fn join_team(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::join_team(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
fn default_role() -> String {
|
||||
"Member".to_string()
|
||||
}
|
||||
|
||||
fn default_ordering() -> i64 {
|
||||
0
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct NewTeamMember {
|
||||
pub user_id: UserId,
|
||||
#[serde(default = "default_role")]
|
||||
pub role: String,
|
||||
#[serde(default)]
|
||||
pub permissions: ProjectPermissions,
|
||||
#[serde(default)]
|
||||
pub organization_permissions: Option<OrganizationPermissions>,
|
||||
#[serde(default)]
|
||||
#[serde(with = "rust_decimal::serde::float")]
|
||||
pub payouts_split: Decimal,
|
||||
#[serde(default = "default_ordering")]
|
||||
pub ordering: i64,
|
||||
}
|
||||
|
||||
#[post("{id}/members")]
|
||||
pub async fn add_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_member: web::Json<NewTeamMember>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::add_team_member(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::teams::NewTeamMember {
|
||||
user_id: new_member.user_id,
|
||||
role: new_member.role.clone(),
|
||||
permissions: new_member.permissions,
|
||||
organization_permissions: new_member.organization_permissions,
|
||||
payouts_split: new_member.payouts_split,
|
||||
ordering: new_member.ordering,
|
||||
}),
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct EditTeamMember {
|
||||
pub permissions: Option<ProjectPermissions>,
|
||||
pub organization_permissions: Option<OrganizationPermissions>,
|
||||
pub role: Option<String>,
|
||||
pub payouts_split: Option<Decimal>,
|
||||
pub ordering: Option<i64>,
|
||||
}
|
||||
|
||||
#[patch("{id}/members/{user_id}")]
|
||||
pub async fn edit_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
edit_member: web::Json<EditTeamMember>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::edit_team_member(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::teams::EditTeamMember {
|
||||
permissions: edit_member.permissions,
|
||||
organization_permissions: edit_member.organization_permissions,
|
||||
role: edit_member.role.clone(),
|
||||
payouts_split: edit_member.payouts_split,
|
||||
ordering: edit_member.ordering,
|
||||
}),
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TransferOwnership {
|
||||
pub user_id: UserId,
|
||||
}
|
||||
|
||||
#[patch("{id}/owner")]
|
||||
pub async fn transfer_ownership(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_owner: web::Json<TransferOwnership>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::transfer_ownership(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::teams::TransferOwnership {
|
||||
user_id: new_owner.user_id,
|
||||
}),
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}/members/{user_id}")]
|
||||
pub async fn remove_team_member(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(TeamId, UserId)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::teams::remove_team_member(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
116
apps/labrinth/src/routes/v2/threads.rs
Normal file
116
apps/labrinth/src/routes/v2/threads.rs
Normal file
@@ -0,0 +1,116 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::ids::ThreadMessageId;
|
||||
use crate::models::threads::{MessageBody, Thread, ThreadId};
|
||||
use crate::models::v2::threads::LegacyThread;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("thread")
|
||||
.service(thread_get)
|
||||
.service(thread_send_message),
|
||||
);
|
||||
cfg.service(web::scope("message").service(message_delete));
|
||||
cfg.service(threads_get);
|
||||
}
|
||||
|
||||
#[get("{id}")]
|
||||
pub async fn thread_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
v3::threads::thread_get(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ThreadIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
#[get("threads")]
|
||||
pub async fn threads_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ThreadIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::threads::threads_get(
|
||||
req,
|
||||
web::Query(v3::threads::ThreadIds { ids: ids.ids }),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Thread>>(response).await {
|
||||
Ok(threads) => {
|
||||
let threads = threads
|
||||
.into_iter()
|
||||
.map(LegacyThread::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(threads))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct NewThreadMessage {
|
||||
pub body: MessageBody,
|
||||
}
|
||||
|
||||
#[post("{id}")]
|
||||
pub async fn thread_send_message(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_message: web::Json<NewThreadMessage>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_message = new_message.into_inner();
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::threads::thread_send_message(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
web::Json(v3::threads::NewThreadMessage {
|
||||
body: new_message.body,
|
||||
}),
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn message_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadMessageId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::threads::message_delete(req, info, pool, redis, session_queue, file_host)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
269
apps/labrinth/src/routes/v2/users.rs
Normal file
269
apps/labrinth/src/routes/v2/users.rs
Normal file
@@ -0,0 +1,269 @@
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::notifications::Notification;
|
||||
use crate::models::projects::Project;
|
||||
use crate::models::users::{Badges, Role, User};
|
||||
use crate::models::v2::notifications::LegacyNotification;
|
||||
use crate::models::v2::projects::LegacyProject;
|
||||
use crate::models::v2::user::LegacyUser;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3, ApiError};
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(user_auth_get);
|
||||
cfg.service(users_get);
|
||||
|
||||
cfg.service(
|
||||
web::scope("user")
|
||||
.service(user_get)
|
||||
.service(projects_list)
|
||||
.service(user_delete)
|
||||
.service(user_edit)
|
||||
.service(user_icon_edit)
|
||||
.service(user_notifications)
|
||||
.service(user_follows),
|
||||
);
|
||||
}
|
||||
|
||||
#[get("user")]
|
||||
pub async fn user_auth_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_auth_get(req, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<User>(response).await {
|
||||
Ok(user) => {
|
||||
let user = LegacyUser::from(user);
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UserIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
#[get("users")]
|
||||
pub async fn users_get(
|
||||
web::Query(ids): web::Query<UserIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response =
|
||||
v3::users::users_get(web::Query(v3::users::UserIds { ids: ids.ids }), pool, redis)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<User>>(response).await {
|
||||
Ok(users) => {
|
||||
let legacy_users: Vec<LegacyUser> = users.into_iter().map(LegacyUser::from).collect();
|
||||
Ok(HttpResponse::Ok().json(legacy_users))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{id}")]
|
||||
pub async fn user_get(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_get(info, pool, redis)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<User>(response).await {
|
||||
Ok(user) => {
|
||||
let user = LegacyUser::from(user);
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{user_id}/projects")]
|
||||
pub async fn projects_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::projects_list(req, info, pool.clone(), redis.clone(), session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert to V2 projects
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct EditUser {
|
||||
#[validate(length(min = 1, max = 39), regex = "RE_URL_SAFE")]
|
||||
pub username: Option<String>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(min = 1, max = 64), regex = "RE_URL_SAFE")]
|
||||
pub name: Option<Option<String>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(max = 160))]
|
||||
pub bio: Option<Option<String>>,
|
||||
pub role: Option<Role>,
|
||||
pub badges: Option<Badges>,
|
||||
}
|
||||
|
||||
#[patch("{id}")]
|
||||
pub async fn user_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
new_user: web::Json<EditUser>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_user = new_user.into_inner();
|
||||
// Returns NoContent, so we don't need to convert to V2
|
||||
v3::users::user_edit(
|
||||
req,
|
||||
info,
|
||||
web::Json(v3::users::EditUser {
|
||||
username: new_user.username,
|
||||
bio: new_user.bio,
|
||||
role: new_user.role,
|
||||
badges: new_user.badges,
|
||||
venmo_handle: None,
|
||||
}),
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Extension {
|
||||
pub ext: String,
|
||||
}
|
||||
|
||||
#[patch("{id}/icon")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn user_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert to V2
|
||||
v3::users::user_icon_edit(
|
||||
web::Query(v3::users::Extension { ext: ext.ext }),
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
file_host,
|
||||
payload,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn user_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert to V2
|
||||
v3::users::user_delete(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[get("{id}/follows")]
|
||||
pub async fn user_follows(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_follows(req, info, pool.clone(), redis.clone(), session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert to V2 projects
|
||||
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
|
||||
Ok(project) => {
|
||||
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(legacy_projects))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{id}/notifications")]
|
||||
pub async fn user_notifications(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response = v3::users::user_notifications(req, info, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Notification>>(response).await {
|
||||
Ok(notifications) => {
|
||||
let legacy_notifications: Vec<LegacyNotification> = notifications
|
||||
.into_iter()
|
||||
.map(LegacyNotification::from)
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(legacy_notifications))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
298
apps/labrinth/src/routes/v2/version_creation.rs
Normal file
298
apps/labrinth/src/routes/v2/version_creation.rs
Normal file
@@ -0,0 +1,298 @@
|
||||
use crate::database::models::loader_fields::VersionField;
|
||||
use crate::database::models::{project_item, version_item};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::ids::ImageId;
|
||||
use crate::models::projects::{
|
||||
Dependency, FileType, Loader, ProjectId, Version, VersionId, VersionStatus, VersionType,
|
||||
};
|
||||
use crate::models::v2::projects::LegacyVersion;
|
||||
use crate::queue::moderation::AutomatedModerationQueue;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::routes::v3::version_creation;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::http::header::ContentDisposition;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sqlx::postgres::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn default_requested_status() -> VersionStatus {
|
||||
VersionStatus::Listed
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||
pub struct InitialVersionData {
|
||||
#[serde(alias = "mod_id")]
|
||||
pub project_id: Option<ProjectId>,
|
||||
#[validate(length(min = 1, max = 256))]
|
||||
pub file_parts: Vec<String>,
|
||||
#[validate(
|
||||
length(min = 1, max = 32),
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
pub version_number: String,
|
||||
#[validate(
|
||||
length(min = 1, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
#[serde(alias = "name")]
|
||||
pub version_title: String,
|
||||
#[validate(length(max = 65536))]
|
||||
#[serde(alias = "changelog")]
|
||||
pub version_body: Option<String>,
|
||||
#[validate(
|
||||
length(min = 0, max = 4096),
|
||||
custom(function = "crate::util::validate::validate_deps")
|
||||
)]
|
||||
pub dependencies: Vec<Dependency>,
|
||||
#[validate(length(min = 1))]
|
||||
pub game_versions: Vec<String>,
|
||||
#[serde(alias = "version_type")]
|
||||
pub release_channel: VersionType,
|
||||
#[validate(length(min = 1))]
|
||||
pub loaders: Vec<Loader>,
|
||||
pub featured: bool,
|
||||
pub primary_file: Option<String>,
|
||||
#[serde(default = "default_requested_status")]
|
||||
pub status: VersionStatus,
|
||||
#[serde(default = "HashMap::new")]
|
||||
pub file_types: HashMap<String, Option<FileType>>,
|
||||
// Associations to uploaded images in changelog
|
||||
#[validate(length(max = 10))]
|
||||
#[serde(default)]
|
||||
pub uploaded_images: Vec<ImageId>,
|
||||
|
||||
// The ordering relative to other versions
|
||||
pub ordering: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
struct InitialFileData {
|
||||
#[serde(default = "HashMap::new")]
|
||||
pub file_types: HashMap<String, Option<FileType>>,
|
||||
}
|
||||
|
||||
// under `/api/v1/version`
|
||||
#[post("version")]
|
||||
pub async fn version_create(
|
||||
req: HttpRequest,
|
||||
payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
moderation_queue: Data<AutomatedModerationQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let payload = v2_reroute::alter_actix_multipart(
|
||||
payload,
|
||||
req.headers().clone(),
|
||||
|legacy_create: InitialVersionData, content_dispositions: Vec<ContentDisposition>| {
|
||||
let client = client.clone();
|
||||
let redis = redis.clone();
|
||||
async move {
|
||||
// Convert input data to V3 format
|
||||
let mut fields = HashMap::new();
|
||||
fields.insert(
|
||||
"game_versions".to_string(),
|
||||
json!(legacy_create.game_versions),
|
||||
);
|
||||
|
||||
// Get all possible side-types for loaders given- we will use these to check if we need to convert/apply singleplayer, etc.
|
||||
let loaders = match v3::tags::loader_list(client.clone(), redis.clone()).await {
|
||||
Ok(loader_response) => {
|
||||
(v2_reroute::extract_ok_json::<Vec<v3::tags::LoaderData>>(loader_response)
|
||||
.await)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
Err(_) => vec![],
|
||||
};
|
||||
|
||||
let loader_fields_aggregate = loaders
|
||||
.into_iter()
|
||||
.filter_map(|loader| {
|
||||
if legacy_create.loaders.contains(&Loader(loader.name.clone())) {
|
||||
Some(loader.supported_fields)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Copies side types of another version of the project.
|
||||
// If no version exists, defaults to all false.
|
||||
// This is inherently lossy, but not much can be done about it, as side types are no longer associated with projects,
|
||||
// so the 'missing' ones can't be easily accessed, and versions do need to have these fields explicitly set.
|
||||
let side_type_loader_field_names = [
|
||||
"singleplayer",
|
||||
"client_and_server",
|
||||
"client_only",
|
||||
"server_only",
|
||||
];
|
||||
|
||||
// Check if loader_fields_aggregate contains any of these side types
|
||||
// We assume these four fields are linked together.
|
||||
if loader_fields_aggregate
|
||||
.iter()
|
||||
.any(|f| side_type_loader_field_names.contains(&f.as_str()))
|
||||
{
|
||||
// If so, we get the fields of the example version of the project, and set the side types to match.
|
||||
fields.extend(
|
||||
side_type_loader_field_names
|
||||
.iter()
|
||||
.map(|f| (f.to_string(), json!(false))),
|
||||
);
|
||||
if let Some(example_version_fields) =
|
||||
get_example_version_fields(legacy_create.project_id, client, &redis).await?
|
||||
{
|
||||
fields.extend(example_version_fields.into_iter().filter_map(|f| {
|
||||
if side_type_loader_field_names.contains(&f.field_name.as_str()) {
|
||||
Some((f.field_name, f.value.serialize_internal()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
// Handle project type via file extension prediction
|
||||
let mut project_type = None;
|
||||
for file_part in &legacy_create.file_parts {
|
||||
if let Some(ext) = file_part.split('.').last() {
|
||||
match ext {
|
||||
"mrpack" | "mrpack-primary" => {
|
||||
project_type = Some("modpack");
|
||||
break;
|
||||
}
|
||||
// No other type matters
|
||||
_ => {}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Similarly, check actual content disposition for mrpacks, in case file_parts is wrong
|
||||
for content_disposition in content_dispositions {
|
||||
// Uses version_create functions to get the file name and extension
|
||||
let (_, file_extension) = version_creation::get_name_ext(&content_disposition)?;
|
||||
crate::util::ext::project_file_type(file_extension)
|
||||
.ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?;
|
||||
|
||||
if file_extension == "mrpack" {
|
||||
project_type = Some("modpack");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Modpacks now use the "mrpack" loader, and loaders are converted to loader fields.
|
||||
// Setting of 'project_type' directly is removed, it's loader-based now.
|
||||
if project_type == Some("modpack") {
|
||||
fields.insert("mrpack_loaders".to_string(), json!(legacy_create.loaders));
|
||||
}
|
||||
|
||||
let loaders = if project_type == Some("modpack") {
|
||||
vec![Loader("mrpack".to_string())]
|
||||
} else {
|
||||
legacy_create.loaders
|
||||
};
|
||||
|
||||
Ok(v3::version_creation::InitialVersionData {
|
||||
project_id: legacy_create.project_id,
|
||||
file_parts: legacy_create.file_parts,
|
||||
version_number: legacy_create.version_number,
|
||||
version_title: legacy_create.version_title,
|
||||
version_body: legacy_create.version_body,
|
||||
dependencies: legacy_create.dependencies,
|
||||
release_channel: legacy_create.release_channel,
|
||||
loaders,
|
||||
featured: legacy_create.featured,
|
||||
primary_file: legacy_create.primary_file,
|
||||
status: legacy_create.status,
|
||||
file_types: legacy_create.file_types,
|
||||
uploaded_images: legacy_create.uploaded_images,
|
||||
ordering: legacy_create.ordering,
|
||||
fields,
|
||||
})
|
||||
}
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Call V3 project creation
|
||||
let response = v3::version_creation::version_create(
|
||||
req,
|
||||
payload,
|
||||
client.clone(),
|
||||
redis.clone(),
|
||||
file_host,
|
||||
session_queue,
|
||||
moderation_queue,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
// Gets version fields of an example version of a project, if one exists.
|
||||
async fn get_example_version_fields(
|
||||
project_id: Option<ProjectId>,
|
||||
pool: Data<PgPool>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<Vec<VersionField>>, CreateError> {
|
||||
let project_id = match project_id {
|
||||
Some(project_id) => project_id,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let vid = match project_item::Project::get_id(project_id.into(), &**pool, redis)
|
||||
.await?
|
||||
.and_then(|p| p.versions.first().cloned())
|
||||
{
|
||||
Some(vid) => vid,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
let example_version = match version_item::Version::get(vid, &**pool, redis).await? {
|
||||
Some(version) => version,
|
||||
None => return Ok(None),
|
||||
};
|
||||
Ok(Some(example_version.version_fields))
|
||||
}
|
||||
|
||||
// under /api/v1/version/{version_id}
|
||||
#[post("{version_id}/file")]
|
||||
pub async fn upload_file_to_version(
|
||||
req: HttpRequest,
|
||||
url_data: web::Path<(VersionId,)>,
|
||||
payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
// Returns NoContent, so no need to convert to V2
|
||||
let response = v3::version_creation::upload_file_to_version(
|
||||
req,
|
||||
url_data,
|
||||
payload,
|
||||
client.clone(),
|
||||
redis.clone(),
|
||||
file_host,
|
||||
session_queue,
|
||||
)
|
||||
.await?;
|
||||
Ok(response)
|
||||
}
|
||||
355
apps/labrinth/src/routes/v2/version_file.rs
Normal file
355
apps/labrinth/src/routes/v2/version_file.rs
Normal file
@@ -0,0 +1,355 @@
|
||||
use super::ApiError;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::projects::{Project, Version, VersionType};
|
||||
use crate::models::v2::projects::{LegacyProject, LegacyVersion};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::version_file::HashQuery;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("version_file")
|
||||
.service(delete_file)
|
||||
.service(get_version_from_hash)
|
||||
.service(download_version)
|
||||
.service(get_update_from_hash)
|
||||
.service(get_projects_from_hashes),
|
||||
);
|
||||
|
||||
cfg.service(
|
||||
web::scope("version_files")
|
||||
.service(get_versions_from_hashes)
|
||||
.service(update_files)
|
||||
.service(update_individual_files),
|
||||
);
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}
|
||||
#[get("{version_id}")]
|
||||
pub async fn get_version_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let response =
|
||||
v3::version_file::get_version_from_hash(req, info, pool, redis, hash_query, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}/download
|
||||
#[get("{version_id}/download")]
|
||||
pub async fn download_version(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns TemporaryRedirect, so no need to convert to V2
|
||||
v3::version_file::download_version(req, info, pool, redis, hash_query, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}
|
||||
#[delete("{version_id}")]
|
||||
pub async fn delete_file(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so no need to convert to V2
|
||||
v3::version_file::delete_file(req, info, pool, redis, hash_query, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UpdateData {
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
|
||||
#[post("{version_id}/update")]
|
||||
pub async fn get_update_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
update_data: web::Json<UpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let update_data = update_data.into_inner();
|
||||
let mut loader_fields = HashMap::new();
|
||||
let mut game_versions = vec![];
|
||||
for gv in update_data.game_versions.into_iter().flatten() {
|
||||
game_versions.push(serde_json::json!(gv.clone()));
|
||||
}
|
||||
if !game_versions.is_empty() {
|
||||
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||
}
|
||||
let update_data = v3::version_file::UpdateData {
|
||||
loaders: update_data.loaders.clone(),
|
||||
version_types: update_data.version_types.clone(),
|
||||
loader_fields: Some(loader_fields),
|
||||
};
|
||||
|
||||
let response = v3::version_file::get_update_from_hash(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
hash_query,
|
||||
web::Json(update_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
// Requests above with multiple versions below
|
||||
#[derive(Deserialize)]
|
||||
pub struct FileHashes {
|
||||
pub algorithm: Option<String>,
|
||||
pub hashes: Vec<String>,
|
||||
}
|
||||
|
||||
// under /api/v2/version_files
|
||||
#[post("")]
|
||||
pub async fn get_versions_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let file_data = file_data.into_inner();
|
||||
let file_data = v3::version_file::FileHashes {
|
||||
algorithm: file_data.algorithm,
|
||||
hashes: file_data.hashes,
|
||||
};
|
||||
let response = v3::version_file::get_versions_from_hashes(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
web::Json(file_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert to V2
|
||||
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
|
||||
Ok(versions) => {
|
||||
let v2_versions = versions
|
||||
.into_iter()
|
||||
.map(|(hash, version)| {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
(hash, v2_version)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Ok(HttpResponse::Ok().json(v2_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[post("project")]
|
||||
pub async fn get_projects_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let file_data = file_data.into_inner();
|
||||
let file_data = v3::version_file::FileHashes {
|
||||
algorithm: file_data.algorithm,
|
||||
hashes: file_data.hashes,
|
||||
};
|
||||
let response = v3::version_file::get_projects_from_hashes(
|
||||
req,
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
web::Json(file_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert to V2
|
||||
match v2_reroute::extract_ok_json::<HashMap<String, Project>>(response).await {
|
||||
Ok(projects_hashes) => {
|
||||
let hash_to_project_id = projects_hashes
|
||||
.iter()
|
||||
.map(|(hash, project)| {
|
||||
let project_id = project.id;
|
||||
(hash.clone(), project_id)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
let legacy_projects =
|
||||
LegacyProject::from_many(projects_hashes.into_values().collect(), &**pool, &redis)
|
||||
.await?;
|
||||
let legacy_projects_hashes = hash_to_project_id
|
||||
.into_iter()
|
||||
.filter_map(|(hash, project_id)| {
|
||||
let legacy_project =
|
||||
legacy_projects.iter().find(|x| x.id == project_id)?.clone();
|
||||
Some((hash, legacy_project))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(legacy_projects_hashes))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ManyUpdateData {
|
||||
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
|
||||
pub hashes: Vec<String>,
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
|
||||
#[post("update")]
|
||||
pub async fn update_files(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyUpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let update_data = update_data.into_inner();
|
||||
let update_data = v3::version_file::ManyUpdateData {
|
||||
loaders: update_data.loaders.clone(),
|
||||
version_types: update_data.version_types.clone(),
|
||||
game_versions: update_data.game_versions.clone(),
|
||||
algorithm: update_data.algorithm,
|
||||
hashes: update_data.hashes,
|
||||
};
|
||||
|
||||
let response = v3::version_file::update_files(pool, redis, web::Json(update_data))
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
|
||||
Ok(returned_versions) => {
|
||||
let v3_versions = returned_versions
|
||||
.into_iter()
|
||||
.map(|(hash, version)| {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
(hash, v2_version)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Ok(HttpResponse::Ok().json(v3_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct FileUpdateData {
|
||||
pub hash: String,
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ManyFileUpdateData {
|
||||
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
|
||||
pub hashes: Vec<FileUpdateData>,
|
||||
}
|
||||
|
||||
#[post("update_individual")]
|
||||
pub async fn update_individual_files(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyFileUpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let update_data = update_data.into_inner();
|
||||
let update_data = v3::version_file::ManyFileUpdateData {
|
||||
algorithm: update_data.algorithm,
|
||||
hashes: update_data
|
||||
.hashes
|
||||
.into_iter()
|
||||
.map(|x| {
|
||||
let mut loader_fields = HashMap::new();
|
||||
let mut game_versions = vec![];
|
||||
for gv in x.game_versions.into_iter().flatten() {
|
||||
game_versions.push(serde_json::json!(gv.clone()));
|
||||
}
|
||||
if !game_versions.is_empty() {
|
||||
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||
}
|
||||
v3::version_file::FileUpdateData {
|
||||
hash: x.hash.clone(),
|
||||
loaders: x.loaders.clone(),
|
||||
loader_fields: Some(loader_fields),
|
||||
version_types: x.version_types,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let response = v3::version_file::update_individual_files(
|
||||
req,
|
||||
pool,
|
||||
redis,
|
||||
web::Json(update_data),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response).await {
|
||||
Ok(returned_versions) => {
|
||||
let v3_versions = returned_versions
|
||||
.into_iter()
|
||||
.map(|(hash, version)| {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
(hash, v2_version)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
Ok(HttpResponse::Ok().json(v3_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
321
apps/labrinth/src/routes/v2/versions.rs
Normal file
321
apps/labrinth/src/routes/v2/versions.rs
Normal file
@@ -0,0 +1,321 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::ApiError;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models;
|
||||
use crate::models::ids::VersionId;
|
||||
use crate::models::projects::{Dependency, FileType, Version, VersionStatus, VersionType};
|
||||
use crate::models::v2::projects::LegacyVersion;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use crate::search::SearchConfig;
|
||||
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(versions_get);
|
||||
cfg.service(super::version_creation::version_create);
|
||||
|
||||
cfg.service(
|
||||
web::scope("version")
|
||||
.service(version_get)
|
||||
.service(version_delete)
|
||||
.service(version_edit)
|
||||
.service(super::version_creation::upload_file_to_version),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct VersionListFilters {
|
||||
pub game_versions: Option<String>,
|
||||
pub loaders: Option<String>,
|
||||
pub featured: Option<bool>,
|
||||
pub version_type: Option<VersionType>,
|
||||
pub limit: Option<usize>,
|
||||
pub offset: Option<usize>,
|
||||
}
|
||||
|
||||
#[get("version")]
|
||||
pub async fn version_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
web::Query(filters): web::Query<VersionListFilters>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let loaders = if let Some(loaders) = filters.loaders {
|
||||
if let Ok(mut loaders) = serde_json::from_str::<Vec<String>>(&loaders) {
|
||||
loaders.push("mrpack".to_string());
|
||||
Some(loaders)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let loader_fields = if let Some(game_versions) = filters.game_versions {
|
||||
// TODO: extract this logic which is similar to the other v2->v3 version_file functions
|
||||
let mut loader_fields = HashMap::new();
|
||||
serde_json::from_str::<Vec<String>>(&game_versions)
|
||||
.ok()
|
||||
.and_then(|versions| {
|
||||
let mut game_versions: Vec<serde_json::Value> = vec![];
|
||||
for gv in versions {
|
||||
game_versions.push(serde_json::json!(gv.clone()));
|
||||
}
|
||||
loader_fields.insert("game_versions".to_string(), game_versions);
|
||||
|
||||
if let Some(ref loaders) = loaders {
|
||||
loader_fields.insert(
|
||||
"loaders".to_string(),
|
||||
loaders
|
||||
.iter()
|
||||
.map(|x| serde_json::json!(x.clone()))
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
|
||||
serde_json::to_string(&loader_fields).ok()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let filters = v3::versions::VersionListFilters {
|
||||
loader_fields,
|
||||
loaders: loaders.and_then(|x| serde_json::to_string(&x).ok()),
|
||||
featured: filters.featured,
|
||||
version_type: filters.version_type,
|
||||
limit: filters.limit,
|
||||
offset: filters.offset,
|
||||
};
|
||||
|
||||
let response =
|
||||
v3::versions::version_list(req, info, web::Query(filters), pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Version>>(response).await {
|
||||
Ok(versions) => {
|
||||
let v2_versions = versions
|
||||
.into_iter()
|
||||
.map(LegacyVersion::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(v2_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
// Given a project ID/slug and a version slug
|
||||
#[get("version/{slug}")]
|
||||
pub async fn version_project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner();
|
||||
let response = v3::versions::version_project_get_helper(req, id, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct VersionIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
#[get("versions")]
|
||||
pub async fn versions_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<VersionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = v3::versions::VersionIds { ids: ids.ids };
|
||||
let response = v3::versions::versions_get(req, web::Query(ids), pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Vec<Version>>(response).await {
|
||||
Ok(versions) => {
|
||||
let v2_versions = versions
|
||||
.into_iter()
|
||||
.map(LegacyVersion::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(HttpResponse::Ok().json(v2_versions))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("{version_id}")]
|
||||
pub async fn version_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
let response = v3::versions::version_get_helper(req, id, pool, redis, session_queue)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
// Convert response to V2 format
|
||||
match v2_reroute::extract_ok_json::<Version>(response).await {
|
||||
Ok(version) => {
|
||||
let v2_version = LegacyVersion::from(version);
|
||||
Ok(HttpResponse::Ok().json(v2_version))
|
||||
}
|
||||
Err(response) => Ok(response),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct EditVersion {
|
||||
#[validate(
|
||||
length(min = 1, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
pub name: Option<String>,
|
||||
#[validate(
|
||||
length(min = 1, max = 32),
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
pub version_number: Option<String>,
|
||||
#[validate(length(max = 65536))]
|
||||
pub changelog: Option<String>,
|
||||
pub version_type: Option<models::projects::VersionType>,
|
||||
#[validate(
|
||||
length(min = 0, max = 4096),
|
||||
custom(function = "crate::util::validate::validate_deps")
|
||||
)]
|
||||
pub dependencies: Option<Vec<Dependency>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub loaders: Option<Vec<models::projects::Loader>>,
|
||||
pub featured: Option<bool>,
|
||||
pub downloads: Option<u32>,
|
||||
pub status: Option<VersionStatus>,
|
||||
pub file_types: Option<Vec<EditVersionFileType>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct EditVersionFileType {
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
#[patch("{id}")]
|
||||
pub async fn version_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
new_version: web::Json<EditVersion>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_version = new_version.into_inner();
|
||||
|
||||
let mut fields = HashMap::new();
|
||||
if new_version.game_versions.is_some() {
|
||||
fields.insert(
|
||||
"game_versions".to_string(),
|
||||
serde_json::json!(new_version.game_versions),
|
||||
);
|
||||
}
|
||||
|
||||
// Get the older version to get info from
|
||||
let old_version = v3::versions::version_get_helper(
|
||||
req.clone(),
|
||||
(*info).0,
|
||||
pool.clone(),
|
||||
redis.clone(),
|
||||
session_queue.clone(),
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
let old_version = match v2_reroute::extract_ok_json::<Version>(old_version).await {
|
||||
Ok(version) => version,
|
||||
Err(response) => return Ok(response),
|
||||
};
|
||||
|
||||
// If this has 'mrpack_loaders' as a loader field previously, this is a modpack.
|
||||
// Therefore, if we are modifying the 'loader' field in this case,
|
||||
// we are actually modifying the 'mrpack_loaders' loader field
|
||||
let mut loaders = new_version.loaders.clone();
|
||||
if old_version.fields.contains_key("mrpack_loaders") && new_version.loaders.is_some() {
|
||||
fields.insert(
|
||||
"mrpack_loaders".to_string(),
|
||||
serde_json::json!(new_version.loaders),
|
||||
);
|
||||
loaders = None;
|
||||
}
|
||||
|
||||
let new_version = v3::versions::EditVersion {
|
||||
name: new_version.name,
|
||||
version_number: new_version.version_number,
|
||||
changelog: new_version.changelog,
|
||||
version_type: new_version.version_type,
|
||||
dependencies: new_version.dependencies,
|
||||
loaders,
|
||||
featured: new_version.featured,
|
||||
downloads: new_version.downloads,
|
||||
status: new_version.status,
|
||||
file_types: new_version.file_types.map(|v| {
|
||||
v.into_iter()
|
||||
.map(|evft| v3::versions::EditVersionFileType {
|
||||
algorithm: evft.algorithm,
|
||||
hash: evft.hash,
|
||||
file_type: evft.file_type,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}),
|
||||
ordering: None,
|
||||
fields,
|
||||
};
|
||||
|
||||
let response = v3::versions::version_edit(
|
||||
req,
|
||||
info,
|
||||
pool,
|
||||
redis,
|
||||
web::Json(serde_json::to_value(new_version)?),
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
#[delete("{version_id}")]
|
||||
pub async fn version_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
// Returns NoContent, so we don't need to convert the response
|
||||
v3::versions::version_delete(req, info, pool, redis, session_queue, search_config)
|
||||
.await
|
||||
.or_else(v2_reroute::flatten_404_error)
|
||||
}
|
||||
310
apps/labrinth/src/routes/v2_reroute.rs
Normal file
310
apps/labrinth/src/routes/v2_reroute.rs
Normal file
@@ -0,0 +1,310 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::v3::project_creation::CreateError;
|
||||
use super::ApiError;
|
||||
use crate::models::v2::projects::LegacySideType;
|
||||
use crate::util::actix::{generate_multipart, MultipartSegment, MultipartSegmentData};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::http::header::{ContentDisposition, HeaderMap, TryIntoHeaderPair};
|
||||
use actix_web::HttpResponse;
|
||||
use futures::{stream, Future, StreamExt};
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub async fn extract_ok_json<T>(response: HttpResponse) -> Result<T, HttpResponse>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
{
|
||||
// If the response is StatusCode::OK, parse the json and return it
|
||||
if response.status() == actix_web::http::StatusCode::OK {
|
||||
let failure_http_response = || {
|
||||
HttpResponse::InternalServerError().json(json!({
|
||||
"error": "reroute_error",
|
||||
"description": "Could not parse response from V2 redirection of route."
|
||||
}))
|
||||
};
|
||||
// Takes json out of HttpResponse, mutates it, then regenerates the HttpResponse
|
||||
let body = response.into_body();
|
||||
let bytes = actix_web::body::to_bytes(body)
|
||||
.await
|
||||
.map_err(|_| failure_http_response())?;
|
||||
let json_value: T = serde_json::from_slice(&bytes).map_err(|_| failure_http_response())?;
|
||||
Ok(json_value)
|
||||
} else {
|
||||
Err(response)
|
||||
}
|
||||
}
|
||||
|
||||
// This only removes the body of 404 responses
|
||||
// This should not be used on the fallback no-route-found handler
|
||||
pub fn flatten_404_error(res: ApiError) -> Result<HttpResponse, ApiError> {
|
||||
match res {
|
||||
ApiError::NotFound => Ok(HttpResponse::NotFound().body("")),
|
||||
_ => Err(res),
|
||||
}
|
||||
}
|
||||
|
||||
// Allows internal modification of an actix multipart file
|
||||
// Expected:
|
||||
// 1. A json segment
|
||||
// 2. Any number of other binary segments
|
||||
// 'closure' is called with the json value, and the content disposition of the other segments
|
||||
pub async fn alter_actix_multipart<T, U, Fut>(
|
||||
mut multipart: Multipart,
|
||||
mut headers: HeaderMap,
|
||||
mut closure: impl FnMut(T, Vec<ContentDisposition>) -> Fut,
|
||||
) -> Result<Multipart, CreateError>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
U: serde::Serialize,
|
||||
Fut: Future<Output = Result<U, CreateError>>,
|
||||
{
|
||||
let mut segments: Vec<MultipartSegment> = Vec::new();
|
||||
|
||||
let mut json = None;
|
||||
let mut json_segment = None;
|
||||
let mut content_dispositions = Vec::new();
|
||||
|
||||
if let Some(field) = multipart.next().await {
|
||||
let mut field = field?;
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
let field_name = content_disposition.get_name().unwrap_or("");
|
||||
let field_filename = content_disposition.get_filename();
|
||||
let field_content_type = field.content_type();
|
||||
let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
while let Some(chunk) = field.next().await {
|
||||
let data = chunk?;
|
||||
buffer.extend_from_slice(&data);
|
||||
}
|
||||
|
||||
{
|
||||
let json_value: T = serde_json::from_slice(&buffer)?;
|
||||
json = Some(json_value);
|
||||
}
|
||||
|
||||
json_segment = Some(MultipartSegment {
|
||||
name: field_name.to_string(),
|
||||
filename: field_filename.map(|s| s.to_string()),
|
||||
content_type: field_content_type,
|
||||
data: MultipartSegmentData::Binary(vec![]), // Initialize to empty, will be finished after
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(field) = multipart.next().await {
|
||||
let mut field = field?;
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
let field_name = content_disposition.get_name().unwrap_or("");
|
||||
let field_filename = content_disposition.get_filename();
|
||||
let field_content_type = field.content_type();
|
||||
let field_content_type = field_content_type.map(|ct| ct.to_string());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
while let Some(chunk) = field.next().await {
|
||||
let data = chunk?;
|
||||
buffer.extend_from_slice(&data);
|
||||
}
|
||||
|
||||
content_dispositions.push(content_disposition.clone());
|
||||
segments.push(MultipartSegment {
|
||||
name: field_name.to_string(),
|
||||
filename: field_filename.map(|s| s.to_string()),
|
||||
content_type: field_content_type,
|
||||
data: MultipartSegmentData::Binary(buffer),
|
||||
})
|
||||
}
|
||||
|
||||
// Finishes the json segment, with aggregated content dispositions
|
||||
{
|
||||
let json_value = json.ok_or(CreateError::InvalidInput(
|
||||
"No json segment found in multipart.".to_string(),
|
||||
))?;
|
||||
let mut json_segment = json_segment.ok_or(CreateError::InvalidInput(
|
||||
"No json segment found in multipart.".to_string(),
|
||||
))?;
|
||||
|
||||
// Call closure, with the json value and names of the other segments
|
||||
let json_value: U = closure(json_value, content_dispositions).await?;
|
||||
let buffer = serde_json::to_vec(&json_value)?;
|
||||
json_segment.data = MultipartSegmentData::Binary(buffer);
|
||||
|
||||
// Insert the json segment at the beginning
|
||||
segments.insert(0, json_segment);
|
||||
}
|
||||
|
||||
let (boundary, payload) = generate_multipart(segments);
|
||||
|
||||
match (
|
||||
"Content-Type",
|
||||
format!("multipart/form-data; boundary={}", boundary).as_str(),
|
||||
)
|
||||
.try_into_pair()
|
||||
{
|
||||
Ok((key, value)) => {
|
||||
headers.insert(key, value);
|
||||
}
|
||||
Err(err) => {
|
||||
CreateError::InvalidInput(format!("Error inserting test header: {:?}.", err));
|
||||
}
|
||||
};
|
||||
|
||||
let new_multipart = Multipart::new(&headers, stream::once(async { Ok(payload) }));
|
||||
|
||||
Ok(new_multipart)
|
||||
}
|
||||
|
||||
// Converts a "client_side" and "server_side" pair into the new v3 corresponding fields
|
||||
pub fn convert_side_types_v3(
|
||||
client_side: LegacySideType,
|
||||
server_side: LegacySideType,
|
||||
) -> HashMap<String, Value> {
|
||||
use LegacySideType::{Optional, Required};
|
||||
|
||||
let singleplayer = client_side == Required
|
||||
|| client_side == Optional
|
||||
|| server_side == Required
|
||||
|| server_side == Optional;
|
||||
let client_and_server = singleplayer;
|
||||
let client_only =
|
||||
(client_side == Required || client_side == Optional) && server_side != Required;
|
||||
let server_only =
|
||||
(server_side == Required || server_side == Optional) && client_side != Required;
|
||||
|
||||
let mut fields = HashMap::new();
|
||||
fields.insert("singleplayer".to_string(), json!(singleplayer));
|
||||
fields.insert("client_and_server".to_string(), json!(client_and_server));
|
||||
fields.insert("client_only".to_string(), json!(client_only));
|
||||
fields.insert("server_only".to_string(), json!(server_only));
|
||||
fields
|
||||
}
|
||||
|
||||
// Converts plugin loaders from v2 to v3, for search facets
|
||||
// Within every 1st and 2nd level (the ones allowed in v2), we convert every instance of:
|
||||
// "project_type:mod" to "project_type:plugin" OR "project_type:mod"
|
||||
pub fn convert_plugin_loader_facets_v3(facets: Vec<Vec<String>>) -> Vec<Vec<String>> {
|
||||
facets
|
||||
.into_iter()
|
||||
.map(|inner_facets| {
|
||||
if inner_facets == ["project_type:mod"] {
|
||||
vec![
|
||||
"project_type:plugin".to_string(),
|
||||
"project_type:datapack".to_string(),
|
||||
"project_type:mod".to_string(),
|
||||
]
|
||||
} else {
|
||||
inner_facets
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
// Convert search facets from V3 back to v2
|
||||
// this is not lossless. (See tests)
|
||||
pub fn convert_side_types_v2(
|
||||
side_types: &HashMap<String, Value>,
|
||||
project_type: Option<&str>,
|
||||
) -> (LegacySideType, LegacySideType) {
|
||||
let client_and_server = side_types
|
||||
.get("client_and_server")
|
||||
.and_then(|x| x.as_bool())
|
||||
.unwrap_or(false);
|
||||
let singleplayer = side_types
|
||||
.get("singleplayer")
|
||||
.and_then(|x| x.as_bool())
|
||||
.unwrap_or(client_and_server);
|
||||
let client_only = side_types
|
||||
.get("client_only")
|
||||
.and_then(|x| x.as_bool())
|
||||
.unwrap_or(false);
|
||||
let server_only = side_types
|
||||
.get("server_only")
|
||||
.and_then(|x| x.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
convert_side_types_v2_bools(
|
||||
Some(singleplayer),
|
||||
client_only,
|
||||
server_only,
|
||||
Some(client_and_server),
|
||||
project_type,
|
||||
)
|
||||
}
|
||||
|
||||
// Client side, server side
|
||||
pub fn convert_side_types_v2_bools(
|
||||
singleplayer: Option<bool>,
|
||||
client_only: bool,
|
||||
server_only: bool,
|
||||
client_and_server: Option<bool>,
|
||||
project_type: Option<&str>,
|
||||
) -> (LegacySideType, LegacySideType) {
|
||||
use LegacySideType::{Optional, Required, Unknown, Unsupported};
|
||||
|
||||
match project_type {
|
||||
Some("plugin") => (Unsupported, Required),
|
||||
Some("datapack") => (Optional, Required),
|
||||
Some("shader") => (Required, Unsupported),
|
||||
Some("resourcepack") => (Required, Unsupported),
|
||||
_ => {
|
||||
let singleplayer = singleplayer.or(client_and_server).unwrap_or(false);
|
||||
|
||||
match (singleplayer, client_only, server_only) {
|
||||
// Only singleplayer
|
||||
(true, false, false) => (Required, Required),
|
||||
|
||||
// Client only and not server only
|
||||
(false, true, false) => (Required, Unsupported),
|
||||
(true, true, false) => (Required, Unsupported),
|
||||
|
||||
// Server only and not client only
|
||||
(false, false, true) => (Unsupported, Required),
|
||||
(true, false, true) => (Unsupported, Required),
|
||||
|
||||
// Both server only and client only
|
||||
(true, true, true) => (Optional, Optional),
|
||||
(false, true, true) => (Optional, Optional),
|
||||
|
||||
// Bad type
|
||||
(false, false, false) => (Unknown, Unknown),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn capitalize_first(input: &str) -> String {
|
||||
let mut result = input.to_owned();
|
||||
if let Some(first_char) = result.get_mut(0..1) {
|
||||
first_char.make_ascii_uppercase();
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::models::v2::projects::LegacySideType::{Optional, Required, Unsupported};
|
||||
|
||||
#[test]
|
||||
fn convert_types() {
|
||||
// Converting types from V2 to V3 and back should be idempotent- for certain pairs
|
||||
let lossy_pairs = [
|
||||
(Optional, Unsupported),
|
||||
(Unsupported, Optional),
|
||||
(Required, Optional),
|
||||
(Optional, Required),
|
||||
(Unsupported, Unsupported),
|
||||
];
|
||||
|
||||
for client_side in [Required, Optional, Unsupported] {
|
||||
for server_side in [Required, Optional, Unsupported] {
|
||||
if lossy_pairs.contains(&(client_side, server_side)) {
|
||||
continue;
|
||||
}
|
||||
let side_types = convert_side_types_v3(client_side, server_side);
|
||||
let (client_side2, server_side2) = convert_side_types_v2(&side_types, None);
|
||||
assert_eq!(client_side, client_side2);
|
||||
assert_eq!(server_side, server_side2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
635
apps/labrinth/src/routes/v3/analytics_get.rs
Normal file
635
apps/labrinth/src/routes/v3/analytics_get.rs
Normal file
@@ -0,0 +1,635 @@
|
||||
use super::ApiError;
|
||||
use crate::database;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::teams::ProjectPermissions;
|
||||
use crate::{
|
||||
auth::get_user_from_headers,
|
||||
database::models::user_item,
|
||||
models::{
|
||||
ids::{base62_impl::to_base62, ProjectId, VersionId},
|
||||
pats::Scopes,
|
||||
},
|
||||
queue::session::AuthQueue,
|
||||
};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::types::PgInterval;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryInto;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("analytics")
|
||||
.route("playtime", web::get().to(playtimes_get))
|
||||
.route("views", web::get().to(views_get))
|
||||
.route("downloads", web::get().to(downloads_get))
|
||||
.route("revenue", web::get().to(revenue_get))
|
||||
.route(
|
||||
"countries/downloads",
|
||||
web::get().to(countries_downloads_get),
|
||||
)
|
||||
.route("countries/views", web::get().to(countries_views_get)),
|
||||
);
|
||||
}
|
||||
|
||||
/// The json data to be passed to fetch analytic data
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively.
|
||||
/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day)
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct GetData {
|
||||
// only one of project_ids or version_ids should be used
|
||||
// if neither are provided, all projects the user has access to will be used
|
||||
pub project_ids: Option<String>,
|
||||
|
||||
pub start_date: Option<DateTime<Utc>>, // defaults to 2 weeks ago
|
||||
pub end_date: Option<DateTime<Utc>>, // defaults to now
|
||||
|
||||
pub resolution_minutes: Option<u32>, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries)
|
||||
}
|
||||
|
||||
/// Get playtime data for a set of projects or versions
|
||||
/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data
|
||||
/// eg:
|
||||
/// {
|
||||
/// "4N1tEhnO": {
|
||||
/// "20230824": 23
|
||||
/// }
|
||||
///}
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct FetchedPlaytime {
|
||||
pub time: u64,
|
||||
pub total_seconds: u64,
|
||||
pub loader_seconds: HashMap<String, u64>,
|
||||
pub game_version_seconds: HashMap<String, u64>,
|
||||
pub parent_seconds: HashMap<VersionId, u64>,
|
||||
}
|
||||
pub async fn playtimes_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)?;
|
||||
|
||||
let project_ids = data
|
||||
.project_ids
|
||||
.as_ref()
|
||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||
.transpose()?;
|
||||
|
||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
|
||||
|
||||
// Get the views
|
||||
let playtimes = crate::clickhouse::fetch_playtimes(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
clickhouse.into_inner(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut hm = HashMap::new();
|
||||
for playtime in playtimes {
|
||||
let id_string = to_base62(playtime.id);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(playtime.time, playtime.total);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
}
|
||||
|
||||
/// Get view data for a set of projects or versions
|
||||
/// Data is returned as a hashmap of project/version ids to a hashmap of days to views
|
||||
/// eg:
|
||||
/// {
|
||||
/// "4N1tEhnO": {
|
||||
/// "20230824": 1090
|
||||
/// }
|
||||
///}
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
pub async fn views_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)?;
|
||||
|
||||
let project_ids = data
|
||||
.project_ids
|
||||
.as_ref()
|
||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||
.transpose()?;
|
||||
|
||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
|
||||
|
||||
// Get the views
|
||||
let views = crate::clickhouse::fetch_views(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
clickhouse.into_inner(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut hm = HashMap::new();
|
||||
for views in views {
|
||||
let id_string = to_base62(views.id);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(views.time, views.total);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
}
|
||||
|
||||
/// Get download data for a set of projects or versions
|
||||
/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads
|
||||
/// eg:
|
||||
/// {
|
||||
/// "4N1tEhnO": {
|
||||
/// "20230824": 32
|
||||
/// }
|
||||
///}
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
pub async fn downloads_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)?;
|
||||
|
||||
let project_ids = data
|
||||
.project_ids
|
||||
.as_ref()
|
||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||
.transpose()?;
|
||||
|
||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
let project_ids = filter_allowed_ids(project_ids, user_option, &pool, &redis, None).await?;
|
||||
|
||||
// Get the downloads
|
||||
let downloads = crate::clickhouse::fetch_downloads(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
clickhouse.into_inner(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut hm = HashMap::new();
|
||||
for downloads in downloads {
|
||||
let id_string = to_base62(downloads.id);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(downloads.time, downloads.total);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
}
|
||||
|
||||
/// Get payout data for a set of projects
|
||||
/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day
|
||||
/// eg:
|
||||
/// {
|
||||
/// "4N1tEhnO": {
|
||||
/// "20230824": 0.001
|
||||
/// }
|
||||
///}
|
||||
/// ONLY project IDs can be used. Unauthorized projects will be filtered out.
|
||||
pub async fn revenue_get(
|
||||
req: HttpRequest,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)?;
|
||||
|
||||
let project_ids = data
|
||||
.project_ids
|
||||
.as_ref()
|
||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||
.transpose()?;
|
||||
|
||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||
|
||||
// Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command
|
||||
// Round start_date down to nearest resolution
|
||||
let diff = start_date.timestamp() % (resolution_minutes as i64 * 60);
|
||||
let start_date = start_date - Duration::seconds(diff);
|
||||
|
||||
// Round end_date up to nearest resolution
|
||||
let diff = end_date.timestamp() % (resolution_minutes as i64 * 60);
|
||||
let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
let project_ids =
|
||||
filter_allowed_ids(project_ids, user.clone(), &pool, &redis, Some(true)).await?;
|
||||
|
||||
let duration: PgInterval = Duration::minutes(resolution_minutes as i64)
|
||||
.try_into()
|
||||
.map_err(|_| ApiError::InvalidInput("Invalid resolution_minutes".to_string()))?;
|
||||
// Get the revenue data
|
||||
let project_ids = project_ids.unwrap_or_default();
|
||||
|
||||
struct PayoutValue {
|
||||
mod_id: Option<i64>,
|
||||
amount_sum: Option<rust_decimal::Decimal>,
|
||||
interval_start: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
let payouts_values = if project_ids.is_empty() {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start
|
||||
FROM payouts_values
|
||||
WHERE user_id = $1 AND created BETWEEN $2 AND $3
|
||||
GROUP by mod_id, interval_start ORDER BY interval_start
|
||||
",
|
||||
user.id.0 as i64,
|
||||
start_date,
|
||||
end_date,
|
||||
duration,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?.into_iter().map(|x| PayoutValue {
|
||||
mod_id: x.mod_id,
|
||||
amount_sum: x.amount_sum,
|
||||
interval_start: x.interval_start,
|
||||
}).collect::<Vec<_>>()
|
||||
} else {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start
|
||||
FROM payouts_values
|
||||
WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3
|
||||
GROUP by mod_id, interval_start ORDER BY interval_start
|
||||
",
|
||||
&project_ids.iter().map(|x| x.0 as i64).collect::<Vec<_>>(),
|
||||
start_date,
|
||||
end_date,
|
||||
duration,
|
||||
)
|
||||
.fetch_all(&**pool)
|
||||
.await?.into_iter().map(|x| PayoutValue {
|
||||
mod_id: x.mod_id,
|
||||
amount_sum: x.amount_sum,
|
||||
interval_start: x.interval_start,
|
||||
}).collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
let mut hm: HashMap<_, _> = project_ids
|
||||
.into_iter()
|
||||
.map(|x| (x.to_string(), HashMap::new()))
|
||||
.collect::<HashMap<_, _>>();
|
||||
for value in payouts_values {
|
||||
if let Some(mod_id) = value.mod_id {
|
||||
if let Some(amount) = value.amount_sum {
|
||||
if let Some(interval_start) = value.interval_start {
|
||||
let id_string = to_base62(mod_id as u64);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(interval_start.timestamp(), amount);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
}
|
||||
|
||||
/// Get country data for a set of projects or versions
|
||||
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads.
|
||||
/// Unknown countries are labeled "".
|
||||
/// This is usuable to see significant performing countries per project
|
||||
/// eg:
|
||||
/// {
|
||||
/// "4N1tEhnO": {
|
||||
/// "CAN": 22
|
||||
/// }
|
||||
///}
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
|
||||
pub async fn countries_downloads_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)?;
|
||||
|
||||
let project_ids = data
|
||||
.project_ids
|
||||
.as_ref()
|
||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||
.transpose()?;
|
||||
|
||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
|
||||
|
||||
// Get the countries
|
||||
let countries = crate::clickhouse::fetch_countries_downloads(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
clickhouse.into_inner(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut hm = HashMap::new();
|
||||
for views in countries {
|
||||
let id_string = to_base62(views.id);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(views.country, views.total);
|
||||
}
|
||||
}
|
||||
|
||||
let hm: HashMap<String, HashMap<String, u64>> = hm
|
||||
.into_iter()
|
||||
.map(|(key, value)| (key, condense_countries(value)))
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
}
|
||||
|
||||
/// Get country data for a set of projects or versions
|
||||
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views.
|
||||
/// Unknown countries are labeled "".
|
||||
/// This is usuable to see significant performing countries per project
|
||||
/// eg:
|
||||
/// {
|
||||
/// "4N1tEhnO": {
|
||||
/// "CAN": 56165
|
||||
/// }
|
||||
///}
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
|
||||
pub async fn countries_views_get(
|
||||
req: HttpRequest,
|
||||
clickhouse: web::Data<clickhouse::Client>,
|
||||
data: web::Query<GetData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::ANALYTICS]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)?;
|
||||
|
||||
let project_ids = data
|
||||
.project_ids
|
||||
.as_ref()
|
||||
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
|
||||
.transpose()?;
|
||||
|
||||
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
|
||||
|
||||
// Get the countries
|
||||
let countries = crate::clickhouse::fetch_countries_views(
|
||||
project_ids.unwrap_or_default(),
|
||||
start_date,
|
||||
end_date,
|
||||
clickhouse.into_inner(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut hm = HashMap::new();
|
||||
for views in countries {
|
||||
let id_string = to_base62(views.id);
|
||||
if !hm.contains_key(&id_string) {
|
||||
hm.insert(id_string.clone(), HashMap::new());
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&id_string) {
|
||||
hm.insert(views.country, views.total);
|
||||
}
|
||||
}
|
||||
|
||||
let hm: HashMap<String, HashMap<String, u64>> = hm
|
||||
.into_iter()
|
||||
.map(|(key, value)| (key, condense_countries(value)))
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(hm))
|
||||
}
|
||||
|
||||
fn condense_countries(countries: HashMap<String, u64>) -> HashMap<String, u64> {
|
||||
// Every country under '15' (view or downloads) should be condensed into 'XX'
|
||||
let mut hm = HashMap::new();
|
||||
for (mut country, count) in countries {
|
||||
if count < 50 {
|
||||
country = "XX".to_string();
|
||||
}
|
||||
if !hm.contains_key(&country) {
|
||||
hm.insert(country.to_string(), 0);
|
||||
}
|
||||
if let Some(hm) = hm.get_mut(&country) {
|
||||
*hm += count;
|
||||
}
|
||||
}
|
||||
hm
|
||||
}
|
||||
|
||||
async fn filter_allowed_ids(
|
||||
mut project_ids: Option<Vec<String>>,
|
||||
user: crate::models::users::User,
|
||||
pool: &web::Data<PgPool>,
|
||||
redis: &RedisPool,
|
||||
remove_defaults: Option<bool>,
|
||||
) -> Result<Option<Vec<ProjectId>>, ApiError> {
|
||||
// If no project_ids or version_ids are provided, we default to all projects the user has *public* access to
|
||||
if project_ids.is_none() && !remove_defaults.unwrap_or(false) {
|
||||
project_ids = Some(
|
||||
user_item::User::get_projects(user.id.into(), &***pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| ProjectId::from(x).to_string())
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
let project_ids = if let Some(project_strings) = project_ids {
|
||||
let projects_data =
|
||||
database::models::Project::get_many(&project_strings, &***pool, redis).await?;
|
||||
|
||||
let team_ids = projects_data
|
||||
.iter()
|
||||
.map(|x| x.inner.team_id)
|
||||
.collect::<Vec<database::models::TeamId>>();
|
||||
let team_members =
|
||||
database::models::TeamMember::get_from_team_full_many(&team_ids, &***pool, redis)
|
||||
.await?;
|
||||
|
||||
let organization_ids = projects_data
|
||||
.iter()
|
||||
.filter_map(|x| x.inner.organization_id)
|
||||
.collect::<Vec<database::models::OrganizationId>>();
|
||||
let organizations =
|
||||
database::models::Organization::get_many_ids(&organization_ids, &***pool, redis)
|
||||
.await?;
|
||||
|
||||
let organization_team_ids = organizations
|
||||
.iter()
|
||||
.map(|x| x.team_id)
|
||||
.collect::<Vec<database::models::TeamId>>();
|
||||
let organization_team_members = database::models::TeamMember::get_from_team_full_many(
|
||||
&organization_team_ids,
|
||||
&***pool,
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let ids = projects_data
|
||||
.into_iter()
|
||||
.filter(|project| {
|
||||
let team_member = team_members
|
||||
.iter()
|
||||
.find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into());
|
||||
|
||||
let organization = project
|
||||
.inner
|
||||
.organization_id
|
||||
.and_then(|oid| organizations.iter().find(|x| x.id == oid));
|
||||
|
||||
let organization_team_member = if let Some(organization) = organization {
|
||||
organization_team_members
|
||||
.iter()
|
||||
.find(|x| x.team_id == organization.team_id && x.user_id == user.id.into())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
||||
&user.role,
|
||||
&team_member.cloned(),
|
||||
&organization_team_member.cloned(),
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
permissions.contains(ProjectPermissions::VIEW_ANALYTICS)
|
||||
})
|
||||
.map(|x| x.inner.id.into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Some(ids)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// Only one of project_ids or version_ids will be Some
|
||||
Ok(project_ids)
|
||||
}
|
||||
541
apps/labrinth/src/routes/v3/collections.rs
Normal file
541
apps/labrinth/src/routes/v3/collections.rs
Normal file
@@ -0,0 +1,541 @@
|
||||
use crate::auth::checks::is_visible_collection;
|
||||
use crate::auth::{filter_visible_collections, get_user_from_headers};
|
||||
use crate::database::models::{collection_item, generate_collection_id, project_item};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::collections::{Collection, CollectionStatus};
|
||||
use crate::models::ids::base62_impl::parse_base62;
|
||||
use crate::models::ids::{CollectionId, ProjectId};
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::img::delete_old_images;
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use crate::{database, models};
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::sync::Arc;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("collections", web::get().to(collections_get));
|
||||
cfg.route("collection", web::post().to(collection_create));
|
||||
|
||||
cfg.service(
|
||||
web::scope("collection")
|
||||
.route("{id}", web::get().to(collection_get))
|
||||
.route("{id}", web::delete().to(collection_delete))
|
||||
.route("{id}", web::patch().to(collection_edit))
|
||||
.route("{id}/icon", web::patch().to(collection_icon_edit))
|
||||
.route("{id}/icon", web::delete().to(delete_collection_icon)),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||
pub struct CollectionCreateData {
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
/// The title or name of the project.
|
||||
pub name: String,
|
||||
#[validate(length(min = 3, max = 255))]
|
||||
/// A short description of the collection.
|
||||
pub description: Option<String>,
|
||||
#[validate(length(max = 32))]
|
||||
#[serde(default = "Vec::new")]
|
||||
/// A list of initial projects to use with the created collection
|
||||
pub projects: Vec<String>,
|
||||
}
|
||||
|
||||
pub async fn collection_create(
|
||||
req: HttpRequest,
|
||||
collection_create_data: web::Json<CollectionCreateData>,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let collection_create_data = collection_create_data.into_inner();
|
||||
|
||||
// The currently logged in user
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**client,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_CREATE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
collection_create_data
|
||||
.validate()
|
||||
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let mut transaction = client.begin().await?;
|
||||
|
||||
let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into();
|
||||
|
||||
let initial_project_ids = project_item::Project::get_many(
|
||||
&collection_create_data.projects,
|
||||
&mut *transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| x.inner.id.into())
|
||||
.collect::<Vec<ProjectId>>();
|
||||
|
||||
let collection_builder_actual = collection_item::CollectionBuilder {
|
||||
collection_id: collection_id.into(),
|
||||
user_id: current_user.id.into(),
|
||||
name: collection_create_data.name,
|
||||
description: collection_create_data.description,
|
||||
status: CollectionStatus::Listed,
|
||||
projects: initial_project_ids
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|x| x.into())
|
||||
.collect(),
|
||||
};
|
||||
let collection_builder = collection_builder_actual.clone();
|
||||
|
||||
let now = Utc::now();
|
||||
collection_builder_actual.insert(&mut transaction).await?;
|
||||
|
||||
let response = crate::models::collections::Collection {
|
||||
id: collection_id,
|
||||
user: collection_builder.user_id.into(),
|
||||
name: collection_builder.name.clone(),
|
||||
description: collection_builder.description.clone(),
|
||||
created: now,
|
||||
updated: now,
|
||||
icon_url: None,
|
||||
color: None,
|
||||
status: collection_builder.status,
|
||||
projects: initial_project_ids,
|
||||
};
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CollectionIds {
|
||||
pub ids: String,
|
||||
}
|
||||
pub async fn collections_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<CollectionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
|
||||
let ids = ids
|
||||
.into_iter()
|
||||
.map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64)))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let collections = filter_visible_collections(collections_data, &user_option).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(collections))
|
||||
}
|
||||
|
||||
pub async fn collection_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection_data = database::models::Collection::get(id, &**pool, &redis).await?;
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(data) = collection_data {
|
||||
if is_visible_collection(&data, &user_option).await? {
|
||||
return Ok(HttpResponse::Ok().json(Collection::from(data)));
|
||||
}
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct EditCollection {
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
pub name: Option<String>,
|
||||
#[validate(length(min = 3, max = 256))]
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
pub description: Option<Option<String>>,
|
||||
pub status: Option<CollectionStatus>,
|
||||
#[validate(length(max = 1024))]
|
||||
pub new_projects: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
pub async fn collection_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_collection: web::Json<EditCollection>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
new_collection
|
||||
.validate()
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let result = database::models::Collection::get(id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(collection_item) = result {
|
||||
if !can_modify_collection(&collection_item, &user) {
|
||||
return Ok(HttpResponse::Unauthorized().body(""));
|
||||
}
|
||||
|
||||
let id = collection_item.id;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(name) = &new_collection.name {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE collections
|
||||
SET name = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
name.trim(),
|
||||
id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(description) = &new_collection.description {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE collections
|
||||
SET description = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
description.as_ref(),
|
||||
id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(status) = &new_collection.status {
|
||||
if !(user.role.is_mod()
|
||||
|| collection_item.status.is_approved() && status.can_be_requested())
|
||||
{
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to set this status!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE collections
|
||||
SET status = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
status.to_string(),
|
||||
id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(new_project_ids) = &new_collection.new_projects {
|
||||
// Delete all existing projects
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM collections_mods
|
||||
WHERE collection_id = $1
|
||||
",
|
||||
collection_item.id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let collection_item_ids = new_project_ids
|
||||
.iter()
|
||||
.map(|_| collection_item.id.0)
|
||||
.collect_vec();
|
||||
let mut validated_project_ids = Vec::new();
|
||||
for project_id in new_project_ids {
|
||||
let project = database::models::Project::get(project_id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"The specified project {project_id} does not exist!"
|
||||
))
|
||||
})?;
|
||||
validated_project_ids.push(project.inner.id.0);
|
||||
}
|
||||
// Insert- don't throw an error if it already exists
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO collections_mods (collection_id, mod_id)
|
||||
SELECT * FROM UNNEST ($1::int8[], $2::int8[])
|
||||
ON CONFLICT DO NOTHING
|
||||
",
|
||||
&collection_item_ids[..],
|
||||
&validated_project_ids[..],
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE collections
|
||||
SET updated = NOW()
|
||||
WHERE id = $1
|
||||
",
|
||||
collection_item.id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Extension {
|
||||
pub ext: String,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn collection_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection_item = database::models::Collection::get(id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
if !can_modify_collection(&collection_item, &user) {
|
||||
return Ok(HttpResponse::Unauthorized().body(""));
|
||||
}
|
||||
|
||||
delete_old_images(
|
||||
collection_item.icon_url,
|
||||
collection_item.raw_icon_url,
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let bytes =
|
||||
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
||||
|
||||
let collection_id: CollectionId = collection_item.id.into();
|
||||
let upload_result = crate::util::img::upload_image_optimized(
|
||||
&format!("data/{}", collection_id),
|
||||
bytes.freeze(),
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE collections
|
||||
SET icon_url = $1, raw_icon_url = $2, color = $3
|
||||
WHERE (id = $4)
|
||||
",
|
||||
upload_result.url,
|
||||
upload_result.raw_url,
|
||||
upload_result.color.map(|x| x as i32),
|
||||
collection_item.id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
pub async fn delete_collection_icon(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection_item = database::models::Collection::get(id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
||||
})?;
|
||||
if !can_modify_collection(&collection_item, &user) {
|
||||
return Ok(HttpResponse::Unauthorized().body(""));
|
||||
}
|
||||
|
||||
delete_old_images(
|
||||
collection_item.icon_url,
|
||||
collection_item.raw_icon_url,
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE collections
|
||||
SET icon_url = NULL, raw_icon_url = NULL, color = NULL
|
||||
WHERE (id = $1)
|
||||
",
|
||||
collection_item.id as database::models::ids::CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
pub async fn collection_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_DELETE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let string = info.into_inner().0;
|
||||
let id = database::models::CollectionId(parse_base62(&string)? as i64);
|
||||
let collection = database::models::Collection::get(id, &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified collection does not exist!".to_string())
|
||||
})?;
|
||||
if !can_modify_collection(&collection, &user) {
|
||||
return Ok(HttpResponse::Unauthorized().body(""));
|
||||
}
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result =
|
||||
database::models::Collection::remove(collection.id, &mut transaction, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::Collection::clear_cache(collection.id, &redis).await?;
|
||||
|
||||
if result.is_some() {
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
fn can_modify_collection(
|
||||
collection: &database::models::Collection,
|
||||
user: &models::users::User,
|
||||
) -> bool {
|
||||
collection.user_id == user.id.into() || user.role.is_mod()
|
||||
}
|
||||
227
apps/labrinth/src/routes/v3/images.rs
Normal file
227
apps/labrinth/src/routes/v3/images.rs
Normal file
@@ -0,0 +1,227 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::threads::is_authorized_thread;
|
||||
use crate::auth::checks::{is_team_member_project, is_team_member_version};
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database;
|
||||
use crate::database::models::{project_item, report_item, thread_item, version_item};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::ids::{ThreadMessageId, VersionId};
|
||||
use crate::models::images::{Image, ImageContext};
|
||||
use crate::models::reports::ReportId;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::img::upload_image_optimized;
|
||||
use crate::util::routes::read_from_payload;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("image", web::post().to(images_add));
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ImageUpload {
|
||||
pub ext: String,
|
||||
|
||||
// Context must be an allowed context
|
||||
// currently: project, version, thread_message, report
|
||||
pub context: String,
|
||||
|
||||
// Optional context id to associate with
|
||||
pub project_id: Option<String>, // allow slug or id
|
||||
pub version_id: Option<VersionId>,
|
||||
pub thread_message_id: Option<ThreadMessageId>,
|
||||
pub report_id: Option<ReportId>,
|
||||
}
|
||||
|
||||
pub async fn images_add(
|
||||
req: HttpRequest,
|
||||
web::Query(data): web::Query<ImageUpload>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut context = ImageContext::from_str(&data.context, None);
|
||||
|
||||
let scopes = vec![context.relevant_scope()];
|
||||
|
||||
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes))
|
||||
.await?
|
||||
.1;
|
||||
|
||||
// Attempt to associated a supplied id with the context
|
||||
// If the context cannot be found, or the user is not authorized to upload images for the context, return an error
|
||||
match &mut context {
|
||||
ImageContext::Project { project_id } => {
|
||||
if let Some(id) = data.project_id {
|
||||
let project = project_item::Project::get(&id, &**pool, &redis).await?;
|
||||
if let Some(project) = project {
|
||||
if is_team_member_project(&project.inner, &Some(user.clone()), &pool).await? {
|
||||
*project_id = Some(project.inner.id.into());
|
||||
} else {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You are not authorized to upload images for this project".to_string(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"The project could not be found.".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
ImageContext::Version { version_id } => {
|
||||
if let Some(id) = data.version_id {
|
||||
let version = version_item::Version::get(id.into(), &**pool, &redis).await?;
|
||||
if let Some(version) = version {
|
||||
if is_team_member_version(&version.inner, &Some(user.clone()), &pool, &redis)
|
||||
.await?
|
||||
{
|
||||
*version_id = Some(version.inner.id.into());
|
||||
} else {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You are not authorized to upload images for this version".to_string(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"The version could not be found.".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
ImageContext::ThreadMessage { thread_message_id } => {
|
||||
if let Some(id) = data.thread_message_id {
|
||||
let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The thread message could not found.".to_string())
|
||||
})?;
|
||||
let thread = thread_item::Thread::get(thread_message.thread_id, &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The thread associated with the thread message could not be found"
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
if is_authorized_thread(&thread, &user, &pool).await? {
|
||||
*thread_message_id = Some(thread_message.id.into());
|
||||
} else {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You are not authorized to upload images for this thread message"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
ImageContext::Report { report_id } => {
|
||||
if let Some(id) = data.report_id {
|
||||
let report = report_item::Report::get(id.into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The report could not be found.".to_string())
|
||||
})?;
|
||||
let thread = thread_item::Thread::get(report.thread_id, &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(
|
||||
"The thread associated with the report could not be found.".to_string(),
|
||||
)
|
||||
})?;
|
||||
if is_authorized_thread(&thread, &user, &pool).await? {
|
||||
*report_id = Some(report.id.into());
|
||||
} else {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You are not authorized to upload images for this report".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
ImageContext::Unknown => {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Context must be one of: project, version, thread_message, report".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Upload the image to the file host
|
||||
let bytes =
|
||||
read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?;
|
||||
|
||||
let content_length = bytes.len();
|
||||
let upload_result = upload_image_optimized(
|
||||
"data/cached_images",
|
||||
bytes.freeze(),
|
||||
&data.ext,
|
||||
None,
|
||||
None,
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let db_image: database::models::Image = database::models::Image {
|
||||
id: database::models::generate_image_id(&mut transaction).await?,
|
||||
url: upload_result.url,
|
||||
raw_url: upload_result.raw_url,
|
||||
size: content_length as u64,
|
||||
created: chrono::Utc::now(),
|
||||
owner_id: database::models::UserId::from(user.id),
|
||||
context: context.context_as_str().to_string(),
|
||||
project_id: if let ImageContext::Project {
|
||||
project_id: Some(id),
|
||||
} = context
|
||||
{
|
||||
Some(crate::database::models::ProjectId::from(id))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
version_id: if let ImageContext::Version {
|
||||
version_id: Some(id),
|
||||
} = context
|
||||
{
|
||||
Some(database::models::VersionId::from(id))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
thread_message_id: if let ImageContext::ThreadMessage {
|
||||
thread_message_id: Some(id),
|
||||
} = context
|
||||
{
|
||||
Some(database::models::ThreadMessageId::from(id))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
report_id: if let ImageContext::Report {
|
||||
report_id: Some(id),
|
||||
} = context
|
||||
{
|
||||
Some(database::models::ReportId::from(id))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
};
|
||||
|
||||
// Insert
|
||||
db_image.insert(&mut transaction).await?;
|
||||
|
||||
let image = Image {
|
||||
id: db_image.id.into(),
|
||||
url: db_image.url,
|
||||
size: db_image.size,
|
||||
created: db_image.created,
|
||||
owner_id: db_image.owner_id.into(),
|
||||
context,
|
||||
};
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(image))
|
||||
}
|
||||
53
apps/labrinth/src/routes/v3/mod.rs
Normal file
53
apps/labrinth/src/routes/v3/mod.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
pub use super::ApiError;
|
||||
use crate::util::cors::default_cors;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use serde_json::json;
|
||||
|
||||
pub mod analytics_get;
|
||||
pub mod collections;
|
||||
pub mod images;
|
||||
pub mod notifications;
|
||||
pub mod organizations;
|
||||
pub mod payouts;
|
||||
pub mod project_creation;
|
||||
pub mod projects;
|
||||
pub mod reports;
|
||||
pub mod statistics;
|
||||
pub mod tags;
|
||||
pub mod teams;
|
||||
pub mod threads;
|
||||
pub mod users;
|
||||
pub mod version_creation;
|
||||
pub mod version_file;
|
||||
pub mod versions;
|
||||
|
||||
pub mod oauth_clients;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("v3")
|
||||
.wrap(default_cors())
|
||||
.configure(analytics_get::config)
|
||||
.configure(collections::config)
|
||||
.configure(images::config)
|
||||
.configure(notifications::config)
|
||||
.configure(organizations::config)
|
||||
.configure(project_creation::config)
|
||||
.configure(projects::config)
|
||||
.configure(reports::config)
|
||||
.configure(statistics::config)
|
||||
.configure(tags::config)
|
||||
.configure(teams::config)
|
||||
.configure(threads::config)
|
||||
.configure(users::config)
|
||||
.configure(version_file::config)
|
||||
.configure(payouts::config)
|
||||
.configure(versions::config),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn hello_world() -> Result<HttpResponse, ApiError> {
|
||||
Ok(HttpResponse::Ok().json(json!({
|
||||
"hello": "world",
|
||||
})))
|
||||
}
|
||||
289
apps/labrinth/src/routes/v3/notifications.rs
Normal file
289
apps/labrinth/src/routes/v3/notifications.rs
Normal file
@@ -0,0 +1,289 @@
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::NotificationId;
|
||||
use crate::models::notifications::Notification;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("notifications", web::get().to(notifications_get));
|
||||
cfg.route("notifications", web::patch().to(notifications_read));
|
||||
cfg.route("notifications", web::delete().to(notifications_delete));
|
||||
|
||||
cfg.service(
|
||||
web::scope("notification")
|
||||
.route("{id}", web::get().to(notification_get))
|
||||
.route("{id}", web::patch().to(notification_read))
|
||||
.route("{id}", web::delete().to(notification_delete)),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct NotificationIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
pub async fn notifications_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
use database::models::notification_item::Notification as DBNotification;
|
||||
use database::models::NotificationId as DBNotificationId;
|
||||
|
||||
let notification_ids: Vec<DBNotificationId> =
|
||||
serde_json::from_str::<Vec<NotificationId>>(ids.ids.as_str())?
|
||||
.into_iter()
|
||||
.map(DBNotificationId::from)
|
||||
.collect();
|
||||
|
||||
let notifications_data: Vec<DBNotification> =
|
||||
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
||||
.await?;
|
||||
|
||||
let notifications: Vec<Notification> = notifications_data
|
||||
.into_iter()
|
||||
.filter(|n| n.user_id == user.id.into() || user.role.is_admin())
|
||||
.map(Notification::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(notifications))
|
||||
}
|
||||
|
||||
pub async fn notification_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let notification_data =
|
||||
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
||||
|
||||
if let Some(data) = notification_data {
|
||||
if user.id == data.user_id.into() || user.role.is_admin() {
|
||||
Ok(HttpResponse::Ok().json(Notification::from(data)))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn notification_read(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let notification_data =
|
||||
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
||||
|
||||
if let Some(data) = notification_data {
|
||||
if data.user_id == user.id.into() || user.role.is_admin() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
database::models::notification_item::Notification::read(
|
||||
id.into(),
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::CustomAuthentication(
|
||||
"You are not authorized to read this notification!".to_string(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn notification_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(NotificationId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let notification_data =
|
||||
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
|
||||
|
||||
if let Some(data) = notification_data {
|
||||
if data.user_id == user.id.into() || user.role.is_admin() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
database::models::notification_item::Notification::remove(
|
||||
id.into(),
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::CustomAuthentication(
|
||||
"You are not authorized to delete this notification!".to_string(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn notifications_read(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let notifications_data =
|
||||
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
||||
.await?;
|
||||
|
||||
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
|
||||
|
||||
for notification in notifications_data {
|
||||
if notification.user_id == user.id.into() || user.role.is_admin() {
|
||||
notifications.push(notification.id);
|
||||
}
|
||||
}
|
||||
|
||||
database::models::notification_item::Notification::read_many(
|
||||
¬ifications,
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
pub async fn notifications_delete(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<NotificationIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let notifications_data =
|
||||
database::models::notification_item::Notification::get_many(¬ification_ids, &**pool)
|
||||
.await?;
|
||||
|
||||
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
|
||||
|
||||
for notification in notifications_data {
|
||||
if notification.user_id == user.id.into() || user.role.is_admin() {
|
||||
notifications.push(notification.id);
|
||||
}
|
||||
}
|
||||
|
||||
database::models::notification_item::Notification::remove_many(
|
||||
¬ifications,
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
559
apps/labrinth/src/routes/v3/oauth_clients.rs
Normal file
559
apps/labrinth/src/routes/v3/oauth_clients.rs
Normal file
@@ -0,0 +1,559 @@
|
||||
use std::{collections::HashSet, fmt::Display, sync::Arc};
|
||||
|
||||
use actix_web::{
|
||||
delete, get, patch, post,
|
||||
web::{self, scope},
|
||||
HttpRequest, HttpResponse,
|
||||
};
|
||||
use chrono::Utc;
|
||||
use itertools::Itertools;
|
||||
use rand::{distributions::Alphanumeric, Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
use super::ApiError;
|
||||
use crate::{
|
||||
auth::{checks::ValidateAuthorized, get_user_from_headers},
|
||||
database::{
|
||||
models::{
|
||||
generate_oauth_client_id, generate_oauth_redirect_id,
|
||||
oauth_client_authorization_item::OAuthClientAuthorization,
|
||||
oauth_client_item::{OAuthClient, OAuthRedirectUri},
|
||||
DatabaseError, OAuthClientId, User,
|
||||
},
|
||||
redis::RedisPool,
|
||||
},
|
||||
models::{
|
||||
self,
|
||||
oauth_clients::{GetOAuthClientsRequest, OAuthClientCreationResult},
|
||||
pats::Scopes,
|
||||
},
|
||||
queue::session::AuthQueue,
|
||||
routes::v3::project_creation::CreateError,
|
||||
util::validate::validation_errors_to_string,
|
||||
};
|
||||
use crate::{
|
||||
file_hosting::FileHost,
|
||||
models::{ids::base62_impl::parse_base62, oauth_clients::DeleteOAuthClientQueryParam},
|
||||
util::routes::read_from_payload,
|
||||
};
|
||||
|
||||
use crate::database::models::oauth_client_item::OAuthClient as DBOAuthClient;
|
||||
use crate::models::ids::OAuthClientId as ApiOAuthClientId;
|
||||
use crate::util::img::{delete_old_images, upload_image_optimized};
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
scope("oauth")
|
||||
.configure(crate::auth::oauth::config)
|
||||
.service(revoke_oauth_authorization)
|
||||
.service(oauth_client_create)
|
||||
.service(oauth_client_edit)
|
||||
.service(oauth_client_delete)
|
||||
.service(oauth_client_icon_edit)
|
||||
.service(oauth_client_icon_delete)
|
||||
.service(get_client)
|
||||
.service(get_clients)
|
||||
.service(get_user_oauth_authorizations),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn get_user_clients(
|
||||
req: HttpRequest,
|
||||
info: web::Path<String>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let target_user = User::get(&info.into_inner(), &**pool, &redis).await?;
|
||||
|
||||
if let Some(target_user) = target_user {
|
||||
if target_user.id != current_user.id.into() && !current_user.role.is_admin() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to see the OAuth clients of this user!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let clients = OAuthClient::get_all_user_clients(target_user.id, &**pool).await?;
|
||||
|
||||
let response = clients
|
||||
.into_iter()
|
||||
.map(models::oauth_clients::OAuthClient::from)
|
||||
.collect_vec();
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("app/{id}")]
|
||||
pub async fn get_client(
|
||||
id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let clients = get_clients_inner(&[id.into_inner()], pool).await?;
|
||||
if let Some(client) = clients.into_iter().next() {
|
||||
Ok(HttpResponse::Ok().json(client))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("apps")]
|
||||
pub async fn get_clients(
|
||||
info: web::Query<GetOAuthClientsRequest>,
|
||||
pool: web::Data<PgPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let ids: Vec<_> = info
|
||||
.ids
|
||||
.iter()
|
||||
.map(|id| parse_base62(id).map(ApiOAuthClientId))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let clients = get_clients_inner(&ids, pool).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(clients))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct NewOAuthApp {
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_name"),
|
||||
length(min = 3, max = 255)
|
||||
)]
|
||||
pub name: String,
|
||||
|
||||
#[validate(custom(function = "crate::util::validate::validate_no_restricted_scopes"))]
|
||||
pub max_scopes: Scopes,
|
||||
|
||||
pub redirect_uris: Vec<String>,
|
||||
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 255)
|
||||
)]
|
||||
pub url: Option<String>,
|
||||
|
||||
#[validate(length(max = 255))]
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[post("app")]
|
||||
pub async fn oauth_client_create<'a>(
|
||||
req: HttpRequest,
|
||||
new_oauth_app: web::Json<NewOAuthApp>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
new_oauth_app
|
||||
.validate()
|
||||
.map_err(|e| CreateError::ValidationError(validation_errors_to_string(e, None)))?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let client_id = generate_oauth_client_id(&mut transaction).await?;
|
||||
|
||||
let client_secret = generate_oauth_client_secret();
|
||||
let client_secret_hash = DBOAuthClient::hash_secret(&client_secret);
|
||||
|
||||
let redirect_uris =
|
||||
create_redirect_uris(&new_oauth_app.redirect_uris, client_id, &mut transaction).await?;
|
||||
|
||||
let client = OAuthClient {
|
||||
id: client_id,
|
||||
icon_url: None,
|
||||
raw_icon_url: None,
|
||||
max_scopes: new_oauth_app.max_scopes,
|
||||
name: new_oauth_app.name.clone(),
|
||||
redirect_uris,
|
||||
created: Utc::now(),
|
||||
created_by: current_user.id.into(),
|
||||
url: new_oauth_app.url.clone(),
|
||||
description: new_oauth_app.description.clone(),
|
||||
secret_hash: client_secret_hash,
|
||||
};
|
||||
client.clone().insert(&mut transaction).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
let client = models::oauth_clients::OAuthClient::from(client);
|
||||
|
||||
Ok(HttpResponse::Ok().json(OAuthClientCreationResult {
|
||||
client,
|
||||
client_secret,
|
||||
}))
|
||||
}
|
||||
|
||||
#[delete("app/{id}")]
|
||||
pub async fn oauth_client_delete<'a>(
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let client = OAuthClient::get(client_id.into_inner().into(), &**pool).await?;
|
||||
if let Some(client) = client {
|
||||
client.validate_authorized(Some(¤t_user))?;
|
||||
OAuthClient::remove(client.id, &**pool).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct OAuthClientEdit {
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_name"),
|
||||
length(min = 3, max = 255)
|
||||
)]
|
||||
pub name: Option<String>,
|
||||
|
||||
#[validate(custom(function = "crate::util::validate::validate_no_restricted_scopes"))]
|
||||
pub max_scopes: Option<Scopes>,
|
||||
|
||||
#[validate(length(min = 1))]
|
||||
pub redirect_uris: Option<Vec<String>>,
|
||||
|
||||
#[validate(
|
||||
custom(function = "crate::util::validate::validate_url"),
|
||||
length(max = 255)
|
||||
)]
|
||||
pub url: Option<Option<String>>,
|
||||
|
||||
#[validate(length(max = 255))]
|
||||
pub description: Option<Option<String>>,
|
||||
}
|
||||
|
||||
#[patch("app/{id}")]
|
||||
pub async fn oauth_client_edit(
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
client_updates: web::Json<OAuthClientEdit>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
client_updates
|
||||
.validate()
|
||||
.map_err(|e| ApiError::Validation(validation_errors_to_string(e, None)))?;
|
||||
|
||||
if let Some(existing_client) = OAuthClient::get(client_id.into_inner().into(), &**pool).await? {
|
||||
existing_client.validate_authorized(Some(¤t_user))?;
|
||||
|
||||
let mut updated_client = existing_client.clone();
|
||||
let OAuthClientEdit {
|
||||
name,
|
||||
max_scopes,
|
||||
redirect_uris,
|
||||
url,
|
||||
description,
|
||||
} = client_updates.into_inner();
|
||||
if let Some(name) = name {
|
||||
updated_client.name = name;
|
||||
}
|
||||
|
||||
if let Some(max_scopes) = max_scopes {
|
||||
updated_client.max_scopes = max_scopes;
|
||||
}
|
||||
|
||||
if let Some(url) = url {
|
||||
updated_client.url = url;
|
||||
}
|
||||
|
||||
if let Some(description) = description {
|
||||
updated_client.description = description;
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
updated_client
|
||||
.update_editable_fields(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if let Some(redirects) = redirect_uris {
|
||||
edit_redirects(redirects, &existing_client, &mut transaction).await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Extension {
|
||||
pub ext: String,
|
||||
}
|
||||
|
||||
#[patch("app/{id}/icon")]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn oauth_client_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let client = OAuthClient::get((*client_id).into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified client does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
client.validate_authorized(Some(&user))?;
|
||||
|
||||
delete_old_images(
|
||||
client.icon_url.clone(),
|
||||
client.raw_icon_url.clone(),
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let bytes =
|
||||
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
||||
let upload_result = upload_image_optimized(
|
||||
&format!("data/{}", client_id),
|
||||
bytes.freeze(),
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let mut editable_client = client.clone();
|
||||
editable_client.icon_url = Some(upload_result.url);
|
||||
editable_client.raw_icon_url = Some(upload_result.raw_url);
|
||||
|
||||
editable_client
|
||||
.update_editable_fields(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
#[delete("app/{id}/icon")]
|
||||
pub async fn oauth_client_icon_delete(
|
||||
req: HttpRequest,
|
||||
client_id: web::Path<ApiOAuthClientId>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let client = OAuthClient::get((*client_id).into(), &**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified client does not exist!".to_string())
|
||||
})?;
|
||||
client.validate_authorized(Some(&user))?;
|
||||
|
||||
delete_old_images(
|
||||
client.icon_url.clone(),
|
||||
client.raw_icon_url.clone(),
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let mut editable_client = client.clone();
|
||||
editable_client.icon_url = None;
|
||||
editable_client.raw_icon_url = None;
|
||||
|
||||
editable_client
|
||||
.update_editable_fields(&mut *transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
}
|
||||
|
||||
#[get("authorizations")]
|
||||
pub async fn get_user_oauth_authorizations(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let authorizations =
|
||||
OAuthClientAuthorization::get_all_for_user(current_user.id.into(), &**pool).await?;
|
||||
|
||||
let mapped: Vec<models::oauth_clients::OAuthClientAuthorization> =
|
||||
authorizations.into_iter().map(|a| a.into()).collect_vec();
|
||||
|
||||
Ok(HttpResponse::Ok().json(mapped))
|
||||
}
|
||||
|
||||
#[delete("authorizations")]
|
||||
pub async fn revoke_oauth_authorization(
|
||||
req: HttpRequest,
|
||||
info: web::Query<DeleteOAuthClientQueryParam>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::SESSION_ACCESS]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
OAuthClientAuthorization::remove(info.client_id.into(), current_user.id.into(), &**pool)
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().body(""))
|
||||
}
|
||||
|
||||
fn generate_oauth_client_secret() -> String {
|
||||
ChaCha20Rng::from_entropy()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(32)
|
||||
.map(char::from)
|
||||
.collect::<String>()
|
||||
}
|
||||
|
||||
async fn create_redirect_uris(
|
||||
uri_strings: impl IntoIterator<Item = impl Display>,
|
||||
client_id: OAuthClientId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Vec<OAuthRedirectUri>, DatabaseError> {
|
||||
let mut redirect_uris = vec![];
|
||||
for uri in uri_strings.into_iter() {
|
||||
let id = generate_oauth_redirect_id(transaction).await?;
|
||||
redirect_uris.push(OAuthRedirectUri {
|
||||
id,
|
||||
client_id,
|
||||
uri: uri.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(redirect_uris)
|
||||
}
|
||||
|
||||
async fn edit_redirects(
|
||||
redirects: Vec<String>,
|
||||
existing_client: &OAuthClient,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let updated_redirects: HashSet<String> = redirects.into_iter().collect();
|
||||
let original_redirects: HashSet<String> = existing_client
|
||||
.redirect_uris
|
||||
.iter()
|
||||
.map(|r| r.uri.to_string())
|
||||
.collect();
|
||||
|
||||
let redirects_to_add = create_redirect_uris(
|
||||
updated_redirects.difference(&original_redirects),
|
||||
existing_client.id,
|
||||
&mut *transaction,
|
||||
)
|
||||
.await?;
|
||||
OAuthClient::insert_redirect_uris(&redirects_to_add, &mut **transaction).await?;
|
||||
|
||||
let mut redirects_to_remove = existing_client.redirect_uris.clone();
|
||||
redirects_to_remove.retain(|r| !updated_redirects.contains(&r.uri));
|
||||
OAuthClient::remove_redirect_uris(redirects_to_remove.iter().map(|r| r.id), &mut **transaction)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_clients_inner(
|
||||
ids: &[ApiOAuthClientId],
|
||||
pool: web::Data<PgPool>,
|
||||
) -> Result<Vec<models::oauth_clients::OAuthClient>, ApiError> {
|
||||
let ids: Vec<OAuthClientId> = ids.iter().map(|i| (*i).into()).collect();
|
||||
let clients = OAuthClient::get_many(&ids, &**pool).await?;
|
||||
|
||||
Ok(clients.into_iter().map(|c| c.into()).collect_vec())
|
||||
}
|
||||
1091
apps/labrinth/src/routes/v3/organizations.rs
Normal file
1091
apps/labrinth/src/routes/v3/organizations.rs
Normal file
File diff suppressed because it is too large
Load Diff
934
apps/labrinth/src/routes/v3/payouts.rs
Normal file
934
apps/labrinth/src/routes/v3/payouts.rs
Normal file
@@ -0,0 +1,934 @@
|
||||
use crate::auth::validate::get_user_record_from_bearer_token;
|
||||
use crate::auth::{get_user_from_headers, AuthenticationError};
|
||||
use crate::database::models::generate_payout_id;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::PayoutId;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::payouts::{PayoutMethodType, PayoutStatus};
|
||||
use crate::queue::payouts::{make_aditude_request, PayoutsQueue};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
|
||||
use chrono::{Datelike, Duration, TimeZone, Utc, Weekday};
|
||||
use hex::ToHex;
|
||||
use hmac::{Hmac, Mac, NewMac};
|
||||
use reqwest::Method;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use sha2::Sha256;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("payout")
|
||||
.service(paypal_webhook)
|
||||
.service(tremendous_webhook)
|
||||
.service(user_payouts)
|
||||
.service(create_payout)
|
||||
.service(cancel_payout)
|
||||
.service(payment_methods)
|
||||
.service(get_balance)
|
||||
.service(platform_revenue),
|
||||
);
|
||||
}
|
||||
|
||||
#[post("_paypal")]
|
||||
pub async fn paypal_webhook(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
payouts: web::Data<PayoutsQueue>,
|
||||
body: String,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let auth_algo = req
|
||||
.headers()
|
||||
.get("PAYPAL-AUTH-ALGO")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| ApiError::InvalidInput("missing auth algo".to_string()))?;
|
||||
let cert_url = req
|
||||
.headers()
|
||||
.get("PAYPAL-CERT-URL")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| ApiError::InvalidInput("missing cert url".to_string()))?;
|
||||
let transmission_id = req
|
||||
.headers()
|
||||
.get("PAYPAL-TRANSMISSION-ID")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| ApiError::InvalidInput("missing transmission ID".to_string()))?;
|
||||
let transmission_sig = req
|
||||
.headers()
|
||||
.get("PAYPAL-TRANSMISSION-SIG")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| ApiError::InvalidInput("missing transmission sig".to_string()))?;
|
||||
let transmission_time = req
|
||||
.headers()
|
||||
.get("PAYPAL-TRANSMISSION-TIME")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.ok_or_else(|| ApiError::InvalidInput("missing transmission time".to_string()))?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct WebHookResponse {
|
||||
verification_status: String,
|
||||
}
|
||||
|
||||
let webhook_res = payouts
|
||||
.make_paypal_request::<(), WebHookResponse>(
|
||||
Method::POST,
|
||||
"notifications/verify-webhook-signature",
|
||||
None,
|
||||
// This is needed as serde re-orders fields, which causes the validation to fail for PayPal.
|
||||
Some(format!(
|
||||
"{{
|
||||
\"auth_algo\": \"{auth_algo}\",
|
||||
\"cert_url\": \"{cert_url}\",
|
||||
\"transmission_id\": \"{transmission_id}\",
|
||||
\"transmission_sig\": \"{transmission_sig}\",
|
||||
\"transmission_time\": \"{transmission_time}\",
|
||||
\"webhook_id\": \"{}\",
|
||||
\"webhook_event\": {body}
|
||||
}}",
|
||||
dotenvy::var("PAYPAL_WEBHOOK_ID")?
|
||||
)),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if &webhook_res.verification_status != "SUCCESS" {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Invalid webhook signature".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PayPalResource {
|
||||
pub payout_item_id: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PayPalWebhook {
|
||||
pub event_type: String,
|
||||
pub resource: PayPalResource,
|
||||
}
|
||||
|
||||
let webhook = serde_json::from_str::<PayPalWebhook>(&body)?;
|
||||
|
||||
match &*webhook.event_type {
|
||||
"PAYMENT.PAYOUTS-ITEM.BLOCKED"
|
||||
| "PAYMENT.PAYOUTS-ITEM.DENIED"
|
||||
| "PAYMENT.PAYOUTS-ITEM.REFUNDED"
|
||||
| "PAYMENT.PAYOUTS-ITEM.RETURNED"
|
||||
| "PAYMENT.PAYOUTS-ITEM.CANCELED" => {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = sqlx::query!(
|
||||
"SELECT user_id, amount, fee FROM payouts WHERE platform_id = $1 AND status = $2",
|
||||
webhook.resource.payout_item_id,
|
||||
PayoutStatus::InTransit.as_str()
|
||||
)
|
||||
.fetch_optional(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if let Some(result) = result {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE payouts
|
||||
SET status = $1
|
||||
WHERE platform_id = $2
|
||||
",
|
||||
if &*webhook.event_type == "PAYMENT.PAYOUTS-ITEM.CANCELED" {
|
||||
PayoutStatus::Cancelled
|
||||
} else {
|
||||
PayoutStatus::Failed
|
||||
}
|
||||
.as_str(),
|
||||
webhook.resource.payout_item_id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
crate::database::models::user_item::User::clear_caches(
|
||||
&[(crate::database::models::UserId(result.user_id), None)],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
"PAYMENT.PAYOUTS-ITEM.SUCCEEDED" => {
|
||||
let mut transaction = pool.begin().await?;
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE payouts
|
||||
SET status = $1
|
||||
WHERE platform_id = $2
|
||||
",
|
||||
PayoutStatus::Success.as_str(),
|
||||
webhook.resource.payout_item_id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[post("_tremendous")]
|
||||
pub async fn tremendous_webhook(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
body: String,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let signature = req
|
||||
.headers()
|
||||
.get("Tremendous-Webhook-Signature")
|
||||
.and_then(|x| x.to_str().ok())
|
||||
.and_then(|x| x.split('=').next_back())
|
||||
.ok_or_else(|| ApiError::InvalidInput("missing webhook signature".to_string()))?;
|
||||
|
||||
let mut mac: Hmac<Sha256> =
|
||||
Hmac::new_from_slice(dotenvy::var("TREMENDOUS_PRIVATE_KEY")?.as_bytes())
|
||||
.map_err(|_| ApiError::Payments("error initializing HMAC".to_string()))?;
|
||||
mac.update(body.as_bytes());
|
||||
let request_signature = mac.finalize().into_bytes().encode_hex::<String>();
|
||||
|
||||
if &*request_signature != signature {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Invalid webhook signature".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TremendousResource {
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TremendousPayload {
|
||||
pub resource: TremendousResource,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TremendousWebhook {
|
||||
pub event: String,
|
||||
pub payload: TremendousPayload,
|
||||
}
|
||||
|
||||
let webhook = serde_json::from_str::<TremendousWebhook>(&body)?;
|
||||
|
||||
match &*webhook.event {
|
||||
"REWARDS.CANCELED" | "REWARDS.DELIVERY.FAILED" => {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = sqlx::query!(
|
||||
"SELECT user_id, amount, fee FROM payouts WHERE platform_id = $1 AND status = $2",
|
||||
webhook.payload.resource.id,
|
||||
PayoutStatus::InTransit.as_str()
|
||||
)
|
||||
.fetch_optional(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if let Some(result) = result {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE payouts
|
||||
SET status = $1
|
||||
WHERE platform_id = $2
|
||||
",
|
||||
if &*webhook.event == "REWARDS.CANCELED" {
|
||||
PayoutStatus::Cancelled
|
||||
} else {
|
||||
PayoutStatus::Failed
|
||||
}
|
||||
.as_str(),
|
||||
webhook.payload.resource.id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
crate::database::models::user_item::User::clear_caches(
|
||||
&[(crate::database::models::UserId(result.user_id), None)],
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
"REWARDS.DELIVERY.SUCCEEDED" => {
|
||||
let mut transaction = pool.begin().await?;
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE payouts
|
||||
SET status = $1
|
||||
WHERE platform_id = $2
|
||||
",
|
||||
PayoutStatus::Success.as_str(),
|
||||
webhook.payload.resource.id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[get("")]
|
||||
pub async fn user_payouts(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let payout_ids =
|
||||
crate::database::models::payout_item::Payout::get_all_for_user(user.id.into(), &**pool)
|
||||
.await?;
|
||||
let payouts =
|
||||
crate::database::models::payout_item::Payout::get_many(&payout_ids, &**pool).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(
|
||||
payouts
|
||||
.into_iter()
|
||||
.map(crate::models::payouts::Payout::from)
|
||||
.collect::<Vec<_>>(),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Withdrawal {
|
||||
#[serde(with = "rust_decimal::serde::float")]
|
||||
amount: Decimal,
|
||||
method: PayoutMethodType,
|
||||
method_id: String,
|
||||
}
|
||||
|
||||
#[post("")]
|
||||
pub async fn create_payout(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
body: web::Json<Withdrawal>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
payouts_queue: web::Data<PayoutsQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, user) =
|
||||
get_user_record_from_bearer_token(&req, None, &**pool, &redis, &session_queue)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::Authentication(AuthenticationError::InvalidCredentials))?;
|
||||
|
||||
if !scopes.contains(Scopes::PAYOUTS_WRITE) {
|
||||
return Err(ApiError::Authentication(
|
||||
AuthenticationError::InvalidCredentials,
|
||||
));
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT balance FROM users WHERE id = $1 FOR UPDATE
|
||||
",
|
||||
user.id.0
|
||||
)
|
||||
.fetch_optional(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let balance = get_user_balance(user.id, &pool).await?;
|
||||
if balance.available < body.amount || body.amount < Decimal::ZERO {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You do not have enough funds to make this payout!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let payout_method = payouts_queue
|
||||
.get_payout_methods()
|
||||
.await?
|
||||
.into_iter()
|
||||
.find(|x| x.id == body.method_id)
|
||||
.ok_or_else(|| ApiError::InvalidInput("Invalid payment method specified!".to_string()))?;
|
||||
|
||||
let fee = std::cmp::min(
|
||||
std::cmp::max(
|
||||
payout_method.fee.min,
|
||||
payout_method.fee.percentage * body.amount,
|
||||
),
|
||||
payout_method.fee.max.unwrap_or(Decimal::MAX),
|
||||
);
|
||||
|
||||
let transfer = (body.amount - fee).round_dp(2);
|
||||
if transfer <= Decimal::ZERO {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You need to withdraw more to cover the fee!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let payout_id = generate_payout_id(&mut transaction).await?;
|
||||
|
||||
let payout_item = match body.method {
|
||||
PayoutMethodType::Venmo | PayoutMethodType::PayPal => {
|
||||
let (wallet, wallet_type, address, display_address) =
|
||||
if body.method == PayoutMethodType::Venmo {
|
||||
if let Some(venmo) = user.venmo_handle {
|
||||
("Venmo", "user_handle", venmo.clone(), venmo)
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Venmo address has not been set for account!".to_string(),
|
||||
));
|
||||
}
|
||||
} else if let Some(paypal_id) = user.paypal_id {
|
||||
if let Some(paypal_country) = user.paypal_country {
|
||||
if &*paypal_country == "US" && &*body.method_id != "paypal_us" {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Please use the US PayPal transfer option!".to_string(),
|
||||
));
|
||||
} else if &*paypal_country != "US" && &*body.method_id == "paypal_us" {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Please use the International PayPal transfer option!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
(
|
||||
"PayPal",
|
||||
"paypal_id",
|
||||
paypal_id.clone(),
|
||||
user.paypal_email.unwrap_or(paypal_id),
|
||||
)
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Please re-link your PayPal account!".to_string(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You have not linked a PayPal account!".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PayPalLink {
|
||||
href: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PayoutsResponse {
|
||||
pub links: Vec<PayPalLink>,
|
||||
}
|
||||
|
||||
let mut payout_item = crate::database::models::payout_item::Payout {
|
||||
id: payout_id,
|
||||
user_id: user.id,
|
||||
created: Utc::now(),
|
||||
status: PayoutStatus::InTransit,
|
||||
amount: transfer,
|
||||
fee: Some(fee),
|
||||
method: Some(body.method),
|
||||
method_address: Some(display_address),
|
||||
platform_id: None,
|
||||
};
|
||||
|
||||
let res: PayoutsResponse = payouts_queue.make_paypal_request(
|
||||
Method::POST,
|
||||
"payments/payouts",
|
||||
Some(
|
||||
json! ({
|
||||
"sender_batch_header": {
|
||||
"sender_batch_id": format!("{}-payouts", Utc::now().to_rfc3339()),
|
||||
"email_subject": "You have received a payment from Modrinth!",
|
||||
"email_message": "Thank you for creating projects on Modrinth. Please claim this payment within 30 days.",
|
||||
},
|
||||
"items": [{
|
||||
"amount": {
|
||||
"currency": "USD",
|
||||
"value": transfer.to_string()
|
||||
},
|
||||
"receiver": address,
|
||||
"note": "Payment from Modrinth creator monetization program",
|
||||
"recipient_type": wallet_type,
|
||||
"recipient_wallet": wallet,
|
||||
"sender_item_id": crate::models::ids::PayoutId::from(payout_id),
|
||||
}]
|
||||
})
|
||||
),
|
||||
None,
|
||||
None
|
||||
).await?;
|
||||
|
||||
if let Some(link) = res.links.first() {
|
||||
#[derive(Deserialize)]
|
||||
struct PayoutItem {
|
||||
pub payout_item_id: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PayoutData {
|
||||
pub items: Vec<PayoutItem>,
|
||||
}
|
||||
|
||||
if let Ok(res) = payouts_queue
|
||||
.make_paypal_request::<(), PayoutData>(
|
||||
Method::GET,
|
||||
&link.href,
|
||||
None,
|
||||
None,
|
||||
Some(true),
|
||||
)
|
||||
.await
|
||||
{
|
||||
if let Some(data) = res.items.first() {
|
||||
payout_item.platform_id = Some(data.payout_item_id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
payout_item
|
||||
}
|
||||
PayoutMethodType::Tremendous => {
|
||||
if let Some(email) = user.email {
|
||||
if user.email_verified {
|
||||
let mut payout_item = crate::database::models::payout_item::Payout {
|
||||
id: payout_id,
|
||||
user_id: user.id,
|
||||
created: Utc::now(),
|
||||
status: PayoutStatus::InTransit,
|
||||
amount: transfer,
|
||||
fee: Some(fee),
|
||||
method: Some(PayoutMethodType::Tremendous),
|
||||
method_address: Some(email.clone()),
|
||||
platform_id: None,
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Reward {
|
||||
pub id: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Order {
|
||||
pub rewards: Vec<Reward>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct TremendousResponse {
|
||||
pub order: Order,
|
||||
}
|
||||
|
||||
let res: TremendousResponse = payouts_queue
|
||||
.make_tremendous_request(
|
||||
Method::POST,
|
||||
"orders",
|
||||
Some(json! ({
|
||||
"payment": {
|
||||
"funding_source_id": "BALANCE",
|
||||
},
|
||||
"rewards": [{
|
||||
"value": {
|
||||
"denomination": transfer
|
||||
},
|
||||
"delivery": {
|
||||
"method": "EMAIL"
|
||||
},
|
||||
"recipient": {
|
||||
"name": user.username,
|
||||
"email": email
|
||||
},
|
||||
"products": [
|
||||
&body.method_id,
|
||||
],
|
||||
"campaign_id": dotenvy::var("TREMENDOUS_CAMPAIGN_ID")?,
|
||||
}]
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(reward) = res.order.rewards.first() {
|
||||
payout_item.platform_id = Some(reward.id.clone())
|
||||
}
|
||||
|
||||
payout_item
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You must verify your account email to proceed!".to_string(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You must add an email to your account to proceed!".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
PayoutMethodType::Unknown => {
|
||||
return Err(ApiError::Payments(
|
||||
"Invalid payment method specified!".to_string(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
payout_item.insert(&mut transaction).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
crate::database::models::User::clear_caches(&[(user.id, None)], &redis).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
||||
#[delete("{id}")]
|
||||
pub async fn cancel_payout(
|
||||
info: web::Path<(PayoutId,)>,
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
payouts: web::Data<PayoutsQueue>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
let payout = crate::database::models::payout_item::Payout::get(id.into(), &**pool).await?;
|
||||
|
||||
if let Some(payout) = payout {
|
||||
if payout.user_id != user.id.into() && !user.role.is_admin() {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
}
|
||||
|
||||
if let Some(platform_id) = payout.platform_id {
|
||||
if let Some(method) = payout.method {
|
||||
if payout.status != PayoutStatus::InTransit {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Payout cannot be cancelled!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
match method {
|
||||
PayoutMethodType::Venmo | PayoutMethodType::PayPal => {
|
||||
payouts
|
||||
.make_paypal_request::<(), ()>(
|
||||
Method::POST,
|
||||
&format!("payments/payouts-item/{}/cancel", platform_id),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
PayoutMethodType::Tremendous => {
|
||||
payouts
|
||||
.make_tremendous_request::<(), ()>(
|
||||
Method::POST,
|
||||
&format!("rewards/{}/cancel", platform_id),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
PayoutMethodType::Unknown => {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Payout cannot be cancelled!".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE payouts
|
||||
SET status = $1
|
||||
WHERE platform_id = $2
|
||||
",
|
||||
PayoutStatus::Cancelling.as_str(),
|
||||
platform_id
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
} else {
|
||||
Err(ApiError::InvalidInput(
|
||||
"Payout cannot be cancelled!".to_string(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::InvalidInput(
|
||||
"Payout cannot be cancelled!".to_string(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Ok(HttpResponse::NotFound().finish())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct MethodFilter {
|
||||
pub country: Option<String>,
|
||||
}
|
||||
|
||||
#[get("methods")]
|
||||
pub async fn payment_methods(
|
||||
payouts_queue: web::Data<PayoutsQueue>,
|
||||
filter: web::Query<MethodFilter>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let methods = payouts_queue
|
||||
.get_payout_methods()
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| {
|
||||
let mut val = true;
|
||||
|
||||
if let Some(country) = &filter.country {
|
||||
val &= x.supported_countries.contains(country);
|
||||
}
|
||||
|
||||
val
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(methods))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct UserBalance {
|
||||
pub available: Decimal,
|
||||
pub pending: Decimal,
|
||||
}
|
||||
|
||||
#[get("balance")]
|
||||
pub async fn get_balance(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PAYOUTS_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let balance = get_user_balance(user.id.into(), &pool).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(balance))
|
||||
}
|
||||
|
||||
async fn get_user_balance(
|
||||
user_id: crate::database::models::ids::UserId,
|
||||
pool: &PgPool,
|
||||
) -> Result<UserBalance, sqlx::Error> {
|
||||
let available = sqlx::query!(
|
||||
"
|
||||
SELECT SUM(amount)
|
||||
FROM payouts_values
|
||||
WHERE user_id = $1 AND date_available <= NOW()
|
||||
",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
let pending = sqlx::query!(
|
||||
"
|
||||
SELECT SUM(amount)
|
||||
FROM payouts_values
|
||||
WHERE user_id = $1 AND date_available > NOW()
|
||||
",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
let withdrawn = sqlx::query!(
|
||||
"
|
||||
SELECT SUM(amount) amount, SUM(fee) fee
|
||||
FROM payouts
|
||||
WHERE user_id = $1 AND (status = 'success' OR status = 'in-transit')
|
||||
",
|
||||
user_id.0
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
let available = available
|
||||
.map(|x| x.sum.unwrap_or(Decimal::ZERO))
|
||||
.unwrap_or(Decimal::ZERO);
|
||||
let pending = pending
|
||||
.map(|x| x.sum.unwrap_or(Decimal::ZERO))
|
||||
.unwrap_or(Decimal::ZERO);
|
||||
let (withdrawn, fees) = withdrawn
|
||||
.map(|x| {
|
||||
(
|
||||
x.amount.unwrap_or(Decimal::ZERO),
|
||||
x.fee.unwrap_or(Decimal::ZERO),
|
||||
)
|
||||
})
|
||||
.unwrap_or((Decimal::ZERO, Decimal::ZERO));
|
||||
|
||||
Ok(UserBalance {
|
||||
available: available.round_dp(16) - withdrawn.round_dp(16) - fees.round_dp(16),
|
||||
pending,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct RevenueResponse {
|
||||
pub all_time: Decimal,
|
||||
pub data: Vec<RevenueData>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct RevenueData {
|
||||
pub time: u64,
|
||||
pub revenue: Decimal,
|
||||
pub creator_revenue: Decimal,
|
||||
}
|
||||
|
||||
#[get("platform_revenue")]
|
||||
pub async fn platform_revenue(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut redis = redis.connect().await?;
|
||||
|
||||
const PLATFORM_REVENUE_NAMESPACE: &str = "platform_revenue";
|
||||
|
||||
let res: Option<RevenueResponse> = redis
|
||||
.get_deserialized_from_json(PLATFORM_REVENUE_NAMESPACE, "0")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(HttpResponse::Ok().json(res));
|
||||
}
|
||||
|
||||
let all_time_payouts = sqlx::query!(
|
||||
"
|
||||
SELECT SUM(amount) from payouts_values
|
||||
",
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?
|
||||
.and_then(|x| x.sum)
|
||||
.unwrap_or(Decimal::ZERO);
|
||||
|
||||
let points =
|
||||
make_aditude_request(&["METRIC_REVENUE", "METRIC_IMPRESSIONS"], "30d", "1d").await?;
|
||||
|
||||
let mut points_map = HashMap::new();
|
||||
|
||||
for point in points {
|
||||
for point in point.points_list {
|
||||
let entry = points_map.entry(point.time.seconds).or_insert((None, None));
|
||||
|
||||
if let Some(revenue) = point.metric.revenue {
|
||||
entry.0 = Some(revenue);
|
||||
}
|
||||
|
||||
if let Some(impressions) = point.metric.impressions {
|
||||
entry.1 = Some(impressions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut revenue_data = Vec::new();
|
||||
let now = Utc::now();
|
||||
|
||||
for i in 1..=30 {
|
||||
let time = now - Duration::days(i);
|
||||
let start = time
|
||||
.date_naive()
|
||||
.and_hms_opt(0, 0, 0)
|
||||
.unwrap()
|
||||
.and_utc()
|
||||
.timestamp();
|
||||
|
||||
if let Some((revenue, impressions)) = points_map.remove(&(start as u64)) {
|
||||
// Before 9/5/24, when legacy payouts were in effect.
|
||||
if start >= 1725494400 {
|
||||
let revenue = revenue.unwrap_or(Decimal::ZERO);
|
||||
let impressions = impressions.unwrap_or(0);
|
||||
|
||||
// Modrinth's share of ad revenue
|
||||
let modrinth_cut = Decimal::from(1) / Decimal::from(4);
|
||||
// Clean.io fee (ad antimalware). Per 1000 impressions.
|
||||
let clean_io_fee = Decimal::from(8) / Decimal::from(1000);
|
||||
|
||||
let net_revenue =
|
||||
revenue - (clean_io_fee * Decimal::from(impressions) / Decimal::from(1000));
|
||||
|
||||
let payout = net_revenue * (Decimal::from(1) - modrinth_cut);
|
||||
|
||||
revenue_data.push(RevenueData {
|
||||
time: start as u64,
|
||||
revenue: net_revenue,
|
||||
creator_revenue: payout,
|
||||
});
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
revenue_data.push(get_legacy_data_point(start as u64));
|
||||
}
|
||||
|
||||
let res = RevenueResponse {
|
||||
all_time: all_time_payouts,
|
||||
data: revenue_data,
|
||||
};
|
||||
|
||||
redis
|
||||
.set_serialized_to_json(PLATFORM_REVENUE_NAMESPACE, 0, &res, Some(60 * 60))
|
||||
.await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(res))
|
||||
}
|
||||
|
||||
fn get_legacy_data_point(timestamp: u64) -> RevenueData {
|
||||
let start = Utc.timestamp_opt(timestamp as i64, 0).unwrap();
|
||||
|
||||
let old_payouts_budget = Decimal::from(10_000);
|
||||
|
||||
let days = Decimal::from(28);
|
||||
let weekdays = Decimal::from(20);
|
||||
let weekend_bonus = Decimal::from(5) / Decimal::from(4);
|
||||
|
||||
let weekday_amount = old_payouts_budget / (weekdays + (weekend_bonus) * (days - weekdays));
|
||||
let weekend_amount = weekday_amount * weekend_bonus;
|
||||
|
||||
let payout = match start.weekday() {
|
||||
Weekday::Sat | Weekday::Sun => weekend_amount,
|
||||
_ => weekday_amount,
|
||||
};
|
||||
|
||||
RevenueData {
|
||||
time: timestamp,
|
||||
revenue: payout,
|
||||
creator_revenue: payout * (Decimal::from(9) / Decimal::from(10)),
|
||||
}
|
||||
}
|
||||
984
apps/labrinth/src/routes/v3/project_creation.rs
Normal file
984
apps/labrinth/src/routes/v3/project_creation.rs
Normal file
@@ -0,0 +1,984 @@
|
||||
use super::version_creation::{try_create_version_fields, InitialVersionData};
|
||||
use crate::auth::{get_user_from_headers, AuthenticationError};
|
||||
use crate::database::models::loader_fields::{Loader, LoaderField, LoaderFieldEnumValue};
|
||||
use crate::database::models::thread_item::ThreadBuilder;
|
||||
use crate::database::models::{self, image_item, User};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::{FileHost, FileHostingError};
|
||||
use crate::models::error::ApiError;
|
||||
use crate::models::ids::base62_impl::to_base62;
|
||||
use crate::models::ids::{ImageId, OrganizationId};
|
||||
use crate::models::images::{Image, ImageContext};
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::projects::{
|
||||
License, Link, MonetizationStatus, ProjectId, ProjectStatus, VersionId, VersionStatus,
|
||||
};
|
||||
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
|
||||
use crate::models::threads::ThreadType;
|
||||
use crate::models::users::UserId;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::search::indexing::IndexingError;
|
||||
use crate::util::img::upload_image_optimized;
|
||||
use crate::util::routes::read_from_field;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_multipart::{Field, Multipart};
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use futures::stream::StreamExt;
|
||||
use image::ImageError;
|
||||
use itertools::Itertools;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
|
||||
cfg.route("project", web::post().to(project_create));
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum CreateError {
|
||||
#[error("Environment Error")]
|
||||
EnvError(#[from] dotenvy::Error),
|
||||
#[error("An unknown database error occurred")]
|
||||
SqlxDatabaseError(#[from] sqlx::Error),
|
||||
#[error("Database Error: {0}")]
|
||||
DatabaseError(#[from] models::DatabaseError),
|
||||
#[error("Indexing Error: {0}")]
|
||||
IndexingError(#[from] IndexingError),
|
||||
#[error("Error while parsing multipart payload: {0}")]
|
||||
MultipartError(#[from] actix_multipart::MultipartError),
|
||||
#[error("Error while parsing JSON: {0}")]
|
||||
SerDeError(#[from] serde_json::Error),
|
||||
#[error("Error while validating input: {0}")]
|
||||
ValidationError(String),
|
||||
#[error("Error while uploading file: {0}")]
|
||||
FileHostingError(#[from] FileHostingError),
|
||||
#[error("Error while validating uploaded file: {0}")]
|
||||
FileValidationError(#[from] crate::validate::ValidationError),
|
||||
#[error("{}", .0)]
|
||||
MissingValueError(String),
|
||||
#[error("Invalid format for image: {0}")]
|
||||
InvalidIconFormat(String),
|
||||
#[error("Error with multipart data: {0}")]
|
||||
InvalidInput(String),
|
||||
#[error("Invalid game version: {0}")]
|
||||
InvalidGameVersion(String),
|
||||
#[error("Invalid loader: {0}")]
|
||||
InvalidLoader(String),
|
||||
#[error("Invalid category: {0}")]
|
||||
InvalidCategory(String),
|
||||
#[error("Invalid file type for version file: {0}")]
|
||||
InvalidFileType(String),
|
||||
#[error("Slug is already taken!")]
|
||||
SlugCollision,
|
||||
#[error("Authentication Error: {0}")]
|
||||
Unauthorized(#[from] AuthenticationError),
|
||||
#[error("Authentication Error: {0}")]
|
||||
CustomAuthenticationError(String),
|
||||
#[error("Image Parsing Error: {0}")]
|
||||
ImageError(#[from] ImageError),
|
||||
#[error("Reroute Error: {0}")]
|
||||
RerouteError(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
impl actix_web::ResponseError for CreateError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
match self {
|
||||
CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
CreateError::IndexingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
CreateError::SerDeError(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::MultipartError(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::InvalidIconFormat(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::InvalidInput(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED,
|
||||
CreateError::CustomAuthenticationError(..) => StatusCode::UNAUTHORIZED,
|
||||
CreateError::SlugCollision => StatusCode::BAD_REQUEST,
|
||||
CreateError::ValidationError(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::FileValidationError(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::ImageError(..) => StatusCode::BAD_REQUEST,
|
||||
CreateError::RerouteError(..) => StatusCode::INTERNAL_SERVER_ERROR,
|
||||
}
|
||||
}
|
||||
|
||||
fn error_response(&self) -> HttpResponse {
|
||||
HttpResponse::build(self.status_code()).json(ApiError {
|
||||
error: match self {
|
||||
CreateError::EnvError(..) => "environment_error",
|
||||
CreateError::SqlxDatabaseError(..) => "database_error",
|
||||
CreateError::DatabaseError(..) => "database_error",
|
||||
CreateError::IndexingError(..) => "indexing_error",
|
||||
CreateError::FileHostingError(..) => "file_hosting_error",
|
||||
CreateError::SerDeError(..) => "invalid_input",
|
||||
CreateError::MultipartError(..) => "invalid_input",
|
||||
CreateError::MissingValueError(..) => "invalid_input",
|
||||
CreateError::InvalidIconFormat(..) => "invalid_input",
|
||||
CreateError::InvalidInput(..) => "invalid_input",
|
||||
CreateError::InvalidGameVersion(..) => "invalid_input",
|
||||
CreateError::InvalidLoader(..) => "invalid_input",
|
||||
CreateError::InvalidCategory(..) => "invalid_input",
|
||||
CreateError::InvalidFileType(..) => "invalid_input",
|
||||
CreateError::Unauthorized(..) => "unauthorized",
|
||||
CreateError::CustomAuthenticationError(..) => "unauthorized",
|
||||
CreateError::SlugCollision => "invalid_input",
|
||||
CreateError::ValidationError(..) => "invalid_input",
|
||||
CreateError::FileValidationError(..) => "invalid_input",
|
||||
CreateError::ImageError(..) => "invalid_image",
|
||||
CreateError::RerouteError(..) => "reroute_error",
|
||||
},
|
||||
description: self.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_project_type() -> String {
|
||||
"mod".to_string()
|
||||
}
|
||||
|
||||
fn default_requested_status() -> ProjectStatus {
|
||||
ProjectStatus::Approved
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||
pub struct ProjectCreateData {
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
#[serde(alias = "mod_name")]
|
||||
/// The title or name of the project.
|
||||
pub name: String,
|
||||
#[validate(
|
||||
length(min = 3, max = 64),
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
#[serde(alias = "mod_slug")]
|
||||
/// The slug of a project, used for vanity URLs
|
||||
pub slug: String,
|
||||
#[validate(length(min = 3, max = 255))]
|
||||
#[serde(alias = "mod_description")]
|
||||
/// A short description of the project.
|
||||
pub summary: String,
|
||||
#[validate(length(max = 65536))]
|
||||
#[serde(alias = "mod_body")]
|
||||
/// A long description of the project, in markdown.
|
||||
pub description: String,
|
||||
|
||||
#[validate(length(max = 32))]
|
||||
#[validate]
|
||||
/// A list of initial versions to upload with the created project
|
||||
pub initial_versions: Vec<InitialVersionData>,
|
||||
#[validate(length(max = 3))]
|
||||
/// A list of the categories that the project is in.
|
||||
pub categories: Vec<String>,
|
||||
#[validate(length(max = 256))]
|
||||
#[serde(default = "Vec::new")]
|
||||
/// A list of the categories that the project is in.
|
||||
pub additional_categories: Vec<String>,
|
||||
|
||||
/// An optional link to the project's license page
|
||||
pub license_url: Option<String>,
|
||||
/// An optional list of all donation links the project has
|
||||
#[validate(custom(function = "crate::util::validate::validate_url_hashmap_values"))]
|
||||
#[serde(default)]
|
||||
pub link_urls: HashMap<String, String>,
|
||||
|
||||
/// An optional boolean. If true, the project will be created as a draft.
|
||||
pub is_draft: Option<bool>,
|
||||
|
||||
/// The license id that the project follows
|
||||
pub license_id: String,
|
||||
|
||||
#[validate(length(max = 64))]
|
||||
#[validate]
|
||||
/// The multipart names of the gallery items to upload
|
||||
pub gallery_items: Option<Vec<NewGalleryItem>>,
|
||||
#[serde(default = "default_requested_status")]
|
||||
/// The status of the mod to be set once it is approved
|
||||
pub requested_status: ProjectStatus,
|
||||
|
||||
// Associations to uploaded images in body/description
|
||||
#[validate(length(max = 10))]
|
||||
#[serde(default)]
|
||||
pub uploaded_images: Vec<ImageId>,
|
||||
|
||||
/// The id of the organization to create the project in
|
||||
pub organization_id: Option<OrganizationId>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Clone)]
|
||||
pub struct NewGalleryItem {
|
||||
/// The name of the multipart item where the gallery media is located
|
||||
pub item: String,
|
||||
/// Whether the gallery item should show in search or not
|
||||
pub featured: bool,
|
||||
#[validate(length(min = 1, max = 2048))]
|
||||
/// The title of the gallery item
|
||||
pub name: Option<String>,
|
||||
#[validate(length(min = 1, max = 2048))]
|
||||
/// The description of the gallery item
|
||||
pub description: Option<String>,
|
||||
pub ordering: i64,
|
||||
}
|
||||
|
||||
pub struct UploadedFile {
|
||||
pub file_id: String,
|
||||
pub file_name: String,
|
||||
}
|
||||
|
||||
pub async fn undo_uploads(
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &[UploadedFile],
|
||||
) -> Result<(), CreateError> {
|
||||
for file in uploaded_files {
|
||||
file_host
|
||||
.delete_file_version(&file.file_id, &file.file_name)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn project_create(
|
||||
req: HttpRequest,
|
||||
mut payload: Multipart,
|
||||
client: Data<PgPool>,
|
||||
redis: Data<RedisPool>,
|
||||
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
session_queue: Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
let mut transaction = client.begin().await?;
|
||||
let mut uploaded_files = Vec::new();
|
||||
|
||||
let result = project_create_inner(
|
||||
req,
|
||||
&mut payload,
|
||||
&mut transaction,
|
||||
&***file_host,
|
||||
&mut uploaded_files,
|
||||
&client,
|
||||
&redis,
|
||||
&session_queue,
|
||||
)
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
let undo_result = undo_uploads(&***file_host, &uploaded_files).await;
|
||||
let rollback_result = transaction.rollback().await;
|
||||
|
||||
undo_result?;
|
||||
if let Err(e) = rollback_result {
|
||||
return Err(e.into());
|
||||
}
|
||||
} else {
|
||||
transaction.commit().await?;
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
/*
|
||||
|
||||
Project Creation Steps:
|
||||
Get logged in user
|
||||
Must match the author in the version creation
|
||||
|
||||
1. Data
|
||||
- Gets "data" field from multipart form; must be first
|
||||
- Verification: string lengths
|
||||
- Create versions
|
||||
- Some shared logic with version creation
|
||||
- Create list of VersionBuilders
|
||||
- Create ProjectBuilder
|
||||
|
||||
2. Upload
|
||||
- Icon: check file format & size
|
||||
- Upload to backblaze & record URL
|
||||
- Project files
|
||||
- Check for matching version
|
||||
- File size limits?
|
||||
- Check file type
|
||||
- Eventually, malware scan
|
||||
- Upload to backblaze & create VersionFileBuilder
|
||||
-
|
||||
|
||||
3. Creation
|
||||
- Database stuff
|
||||
- Add project data to indexing queue
|
||||
*/
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn project_create_inner(
|
||||
req: HttpRequest,
|
||||
payload: &mut Multipart,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
pool: &PgPool,
|
||||
redis: &RedisPool,
|
||||
session_queue: &AuthQueue,
|
||||
) -> Result<HttpResponse, CreateError> {
|
||||
// The base URL for files uploaded to backblaze
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
|
||||
// The currently logged in user
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
pool,
|
||||
redis,
|
||||
session_queue,
|
||||
Some(&[Scopes::PROJECT_CREATE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let project_id: ProjectId = models::generate_project_id(transaction).await?.into();
|
||||
let all_loaders = models::loader_fields::Loader::list(&mut **transaction, redis).await?;
|
||||
|
||||
let project_create_data: ProjectCreateData;
|
||||
let mut versions;
|
||||
let mut versions_map = std::collections::HashMap::new();
|
||||
let mut gallery_urls = Vec::new();
|
||||
{
|
||||
// The first multipart field must be named "data" and contain a
|
||||
// JSON `ProjectCreateData` object.
|
||||
|
||||
let mut field = payload
|
||||
.next()
|
||||
.await
|
||||
.map(|m| m.map_err(CreateError::MultipartError))
|
||||
.unwrap_or_else(|| {
|
||||
Err(CreateError::MissingValueError(String::from(
|
||||
"No `data` field in multipart upload",
|
||||
)))
|
||||
})?;
|
||||
|
||||
let content_disposition = field.content_disposition();
|
||||
let name = content_disposition
|
||||
.get_name()
|
||||
.ok_or_else(|| CreateError::MissingValueError(String::from("Missing content name")))?;
|
||||
|
||||
if name != "data" {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"`data` field must come before file fields",
|
||||
)));
|
||||
}
|
||||
|
||||
let mut data = Vec::new();
|
||||
while let Some(chunk) = field.next().await {
|
||||
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
|
||||
}
|
||||
let create_data: ProjectCreateData = serde_json::from_slice(&data)?;
|
||||
|
||||
create_data
|
||||
.validate()
|
||||
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let slug_project_id_option: Option<ProjectId> =
|
||||
serde_json::from_str(&format!("\"{}\"", create_data.slug)).ok();
|
||||
|
||||
if let Some(slug_project_id) = slug_project_id_option {
|
||||
let slug_project_id: models::ids::ProjectId = slug_project_id.into();
|
||||
let results = sqlx::query!(
|
||||
"
|
||||
SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)
|
||||
",
|
||||
slug_project_id as models::ids::ProjectId
|
||||
)
|
||||
.fetch_one(&mut **transaction)
|
||||
.await
|
||||
.map_err(|e| CreateError::DatabaseError(e.into()))?;
|
||||
|
||||
if results.exists.unwrap_or(false) {
|
||||
return Err(CreateError::SlugCollision);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let results = sqlx::query!(
|
||||
"
|
||||
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1))
|
||||
",
|
||||
create_data.slug
|
||||
)
|
||||
.fetch_one(&mut **transaction)
|
||||
.await
|
||||
.map_err(|e| CreateError::DatabaseError(e.into()))?;
|
||||
|
||||
if results.exists.unwrap_or(false) {
|
||||
return Err(CreateError::SlugCollision);
|
||||
}
|
||||
}
|
||||
|
||||
// Create VersionBuilders for the versions specified in `initial_versions`
|
||||
versions = Vec::with_capacity(create_data.initial_versions.len());
|
||||
for (i, data) in create_data.initial_versions.iter().enumerate() {
|
||||
// Create a map of multipart field names to version indices
|
||||
for name in &data.file_parts {
|
||||
if versions_map.insert(name.to_owned(), i).is_some() {
|
||||
// If the name is already used
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"Duplicate multipart field name",
|
||||
)));
|
||||
}
|
||||
}
|
||||
versions.push(
|
||||
create_initial_version(
|
||||
data,
|
||||
project_id,
|
||||
current_user.id,
|
||||
&all_loaders,
|
||||
transaction,
|
||||
redis,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
project_create_data = create_data;
|
||||
}
|
||||
|
||||
let mut icon_data = None;
|
||||
|
||||
let mut error = None;
|
||||
while let Some(item) = payload.next().await {
|
||||
let mut field: Field = item?;
|
||||
|
||||
if error.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let result = async {
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
|
||||
let name = content_disposition.get_name().ok_or_else(|| {
|
||||
CreateError::MissingValueError("Missing content name".to_string())
|
||||
})?;
|
||||
|
||||
let (file_name, file_extension) =
|
||||
super::version_creation::get_name_ext(&content_disposition)?;
|
||||
|
||||
if name == "icon" {
|
||||
if icon_data.is_some() {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"Projects can only have one icon",
|
||||
)));
|
||||
}
|
||||
// Upload the icon to the cdn
|
||||
icon_data = Some(
|
||||
process_icon_upload(
|
||||
uploaded_files,
|
||||
project_id.0,
|
||||
file_extension,
|
||||
file_host,
|
||||
field,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(gallery_items) = &project_create_data.gallery_items {
|
||||
if gallery_items.iter().filter(|a| a.featured).count() > 1 {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"Only one gallery image can be featured.",
|
||||
)));
|
||||
}
|
||||
if let Some(item) = gallery_items.iter().find(|x| x.item == name) {
|
||||
let data = read_from_field(
|
||||
&mut field,
|
||||
2 * (1 << 20),
|
||||
"Gallery image exceeds the maximum of 2MiB.",
|
||||
)
|
||||
.await?;
|
||||
|
||||
let (_, file_extension) =
|
||||
super::version_creation::get_name_ext(&content_disposition)?;
|
||||
|
||||
let url = format!("data/{project_id}/images");
|
||||
let upload_result = upload_image_optimized(
|
||||
&url,
|
||||
data.freeze(),
|
||||
file_extension,
|
||||
Some(350),
|
||||
Some(1.0),
|
||||
file_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| CreateError::InvalidIconFormat(e.to_string()))?;
|
||||
|
||||
uploaded_files.push(UploadedFile {
|
||||
file_id: upload_result.raw_url_path.clone(),
|
||||
file_name: upload_result.raw_url_path,
|
||||
});
|
||||
gallery_urls.push(crate::models::projects::GalleryItem {
|
||||
url: upload_result.url,
|
||||
raw_url: upload_result.raw_url,
|
||||
featured: item.featured,
|
||||
name: item.name.clone(),
|
||||
description: item.description.clone(),
|
||||
created: Utc::now(),
|
||||
ordering: item.ordering,
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let index = if let Some(i) = versions_map.get(name) {
|
||||
*i
|
||||
} else {
|
||||
return Err(CreateError::InvalidInput(format!(
|
||||
"File `{file_name}` (field {name}) isn't specified in the versions data"
|
||||
)));
|
||||
};
|
||||
// `index` is always valid for these lists
|
||||
let created_version = versions.get_mut(index).unwrap();
|
||||
let version_data = project_create_data.initial_versions.get(index).unwrap();
|
||||
// TODO: maybe redundant is this calculation done elsewhere?
|
||||
|
||||
let existing_file_names = created_version
|
||||
.files
|
||||
.iter()
|
||||
.map(|x| x.filename.clone())
|
||||
.collect();
|
||||
// Upload the new jar file
|
||||
super::version_creation::upload_file(
|
||||
&mut field,
|
||||
file_host,
|
||||
version_data.file_parts.len(),
|
||||
uploaded_files,
|
||||
&mut created_version.files,
|
||||
&mut created_version.dependencies,
|
||||
&cdn_url,
|
||||
&content_disposition,
|
||||
project_id,
|
||||
created_version.version_id.into(),
|
||||
&created_version.version_fields,
|
||||
version_data.loaders.clone(),
|
||||
version_data.primary_file.is_some(),
|
||||
version_data.primary_file.as_deref() == Some(name),
|
||||
None,
|
||||
existing_file_names,
|
||||
transaction,
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
error = result.err();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(error) = error {
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
{
|
||||
// Check to make sure that all specified files were uploaded
|
||||
for (version_data, builder) in project_create_data
|
||||
.initial_versions
|
||||
.iter()
|
||||
.zip(versions.iter())
|
||||
{
|
||||
if version_data.file_parts.len() != builder.files.len() {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"Some files were specified in initial_versions but not uploaded",
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the list of category names to actual categories
|
||||
let mut categories = Vec::with_capacity(project_create_data.categories.len());
|
||||
for category in &project_create_data.categories {
|
||||
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
|
||||
if ids.is_empty() {
|
||||
return Err(CreateError::InvalidCategory(category.clone()));
|
||||
}
|
||||
|
||||
// TODO: We should filter out categories that don't match the project type of any of the versions
|
||||
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
|
||||
categories.extend(ids.values());
|
||||
}
|
||||
|
||||
let mut additional_categories =
|
||||
Vec::with_capacity(project_create_data.additional_categories.len());
|
||||
for category in &project_create_data.additional_categories {
|
||||
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
|
||||
if ids.is_empty() {
|
||||
return Err(CreateError::InvalidCategory(category.clone()));
|
||||
}
|
||||
// TODO: We should filter out categories that don't match the project type of any of the versions
|
||||
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
|
||||
additional_categories.extend(ids.values());
|
||||
}
|
||||
|
||||
let mut members = vec![];
|
||||
|
||||
if let Some(organization_id) = project_create_data.organization_id {
|
||||
let org = models::Organization::get_id(organization_id.into(), pool, redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
CreateError::InvalidInput("Invalid organization ID specified!".to_string())
|
||||
})?;
|
||||
|
||||
let team_member =
|
||||
models::TeamMember::get_from_user_id(org.team_id, current_user.id.into(), pool)
|
||||
.await?;
|
||||
|
||||
let perms =
|
||||
OrganizationPermissions::get_permissions_by_role(¤t_user.role, &team_member);
|
||||
|
||||
if !perms
|
||||
.map(|x| x.contains(OrganizationPermissions::ADD_PROJECT))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return Err(CreateError::CustomAuthenticationError(
|
||||
"You do not have the permissions to create projects in this organization!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
members.push(models::team_item::TeamMemberBuilder {
|
||||
user_id: current_user.id.into(),
|
||||
role: crate::models::teams::DEFAULT_ROLE.to_owned(),
|
||||
is_owner: true,
|
||||
permissions: ProjectPermissions::all(),
|
||||
organization_permissions: None,
|
||||
accepted: true,
|
||||
payouts_split: Decimal::ONE_HUNDRED,
|
||||
ordering: 0,
|
||||
})
|
||||
}
|
||||
let team = models::team_item::TeamBuilder { members };
|
||||
|
||||
let team_id = team.insert(&mut *transaction).await?;
|
||||
|
||||
let status;
|
||||
if project_create_data.is_draft.unwrap_or(false) {
|
||||
status = ProjectStatus::Draft;
|
||||
} else {
|
||||
status = ProjectStatus::Processing;
|
||||
if project_create_data.initial_versions.is_empty() {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"Project submitted for review with no initial versions",
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let license_id =
|
||||
spdx::Expression::parse(&project_create_data.license_id).map_err(|err| {
|
||||
CreateError::InvalidInput(format!("Invalid SPDX license identifier: {err}"))
|
||||
})?;
|
||||
|
||||
let mut link_urls = vec![];
|
||||
|
||||
let link_platforms =
|
||||
models::categories::LinkPlatform::list(&mut **transaction, redis).await?;
|
||||
for (platform, url) in &project_create_data.link_urls {
|
||||
let platform_id =
|
||||
models::categories::LinkPlatform::get_id(platform, &mut **transaction)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
CreateError::InvalidInput(format!(
|
||||
"Link platform {} does not exist.",
|
||||
platform.clone()
|
||||
))
|
||||
})?;
|
||||
let link_platform = link_platforms
|
||||
.iter()
|
||||
.find(|x| x.id == platform_id)
|
||||
.ok_or_else(|| {
|
||||
CreateError::InvalidInput(format!(
|
||||
"Link platform {} does not exist.",
|
||||
platform.clone()
|
||||
))
|
||||
})?;
|
||||
link_urls.push(models::project_item::LinkUrl {
|
||||
platform_id,
|
||||
platform_name: link_platform.name.clone(),
|
||||
url: url.clone(),
|
||||
donation: link_platform.donation,
|
||||
})
|
||||
}
|
||||
|
||||
let project_builder_actual = models::project_item::ProjectBuilder {
|
||||
project_id: project_id.into(),
|
||||
team_id,
|
||||
organization_id: project_create_data.organization_id.map(|x| x.into()),
|
||||
name: project_create_data.name,
|
||||
summary: project_create_data.summary,
|
||||
description: project_create_data.description,
|
||||
icon_url: icon_data.clone().map(|x| x.0),
|
||||
raw_icon_url: icon_data.clone().map(|x| x.1),
|
||||
|
||||
license_url: project_create_data.license_url,
|
||||
categories,
|
||||
additional_categories,
|
||||
initial_versions: versions,
|
||||
status,
|
||||
requested_status: Some(project_create_data.requested_status),
|
||||
license: license_id.to_string(),
|
||||
slug: Some(project_create_data.slug),
|
||||
link_urls,
|
||||
gallery_items: gallery_urls
|
||||
.iter()
|
||||
.map(|x| models::project_item::GalleryItem {
|
||||
image_url: x.url.clone(),
|
||||
raw_image_url: x.raw_url.clone(),
|
||||
featured: x.featured,
|
||||
name: x.name.clone(),
|
||||
description: x.description.clone(),
|
||||
created: x.created,
|
||||
ordering: x.ordering,
|
||||
})
|
||||
.collect(),
|
||||
color: icon_data.and_then(|x| x.2),
|
||||
monetization_status: MonetizationStatus::Monetized,
|
||||
};
|
||||
let project_builder = project_builder_actual.clone();
|
||||
|
||||
let now = Utc::now();
|
||||
|
||||
let id = project_builder_actual.insert(&mut *transaction).await?;
|
||||
User::clear_project_cache(&[current_user.id.into()], redis).await?;
|
||||
|
||||
for image_id in project_create_data.uploaded_images {
|
||||
if let Some(db_image) =
|
||||
image_item::Image::get(image_id.into(), &mut **transaction, redis).await?
|
||||
{
|
||||
let image: Image = db_image.into();
|
||||
if !matches!(image.context, ImageContext::Project { .. })
|
||||
|| image.context.inner_id().is_some()
|
||||
{
|
||||
return Err(CreateError::InvalidInput(format!(
|
||||
"Image {} is not unused and in the 'project' context",
|
||||
image_id
|
||||
)));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE uploaded_images
|
||||
SET mod_id = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
id as models::ids::ProjectId,
|
||||
image_id.0 as i64
|
||||
)
|
||||
.execute(&mut **transaction)
|
||||
.await?;
|
||||
|
||||
image_item::Image::clear_cache(image.id.into(), redis).await?;
|
||||
} else {
|
||||
return Err(CreateError::InvalidInput(format!(
|
||||
"Image {} does not exist",
|
||||
image_id
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let thread_id = ThreadBuilder {
|
||||
type_: ThreadType::Project,
|
||||
members: vec![],
|
||||
project_id: Some(id),
|
||||
report_id: None,
|
||||
}
|
||||
.insert(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let loaders = project_builder
|
||||
.initial_versions
|
||||
.iter()
|
||||
.flat_map(|v| v.loaders.clone())
|
||||
.unique()
|
||||
.collect::<Vec<_>>();
|
||||
let (project_types, games) = Loader::list(&mut **transaction, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.fold(
|
||||
(Vec::new(), Vec::new()),
|
||||
|(mut project_types, mut games), loader| {
|
||||
if loaders.contains(&loader.id) {
|
||||
project_types.extend(loader.supported_project_types);
|
||||
games.extend(loader.supported_games);
|
||||
}
|
||||
(project_types, games)
|
||||
},
|
||||
);
|
||||
|
||||
let response = crate::models::projects::Project {
|
||||
id: project_id,
|
||||
slug: project_builder.slug.clone(),
|
||||
project_types,
|
||||
games,
|
||||
team_id: team_id.into(),
|
||||
organization: project_create_data.organization_id,
|
||||
name: project_builder.name.clone(),
|
||||
summary: project_builder.summary.clone(),
|
||||
description: project_builder.description.clone(),
|
||||
published: now,
|
||||
updated: now,
|
||||
approved: None,
|
||||
queued: None,
|
||||
status,
|
||||
requested_status: project_builder.requested_status,
|
||||
moderator_message: None,
|
||||
license: License {
|
||||
id: project_create_data.license_id.clone(),
|
||||
name: "".to_string(),
|
||||
url: project_builder.license_url.clone(),
|
||||
},
|
||||
downloads: 0,
|
||||
followers: 0,
|
||||
categories: project_create_data.categories,
|
||||
additional_categories: project_create_data.additional_categories,
|
||||
loaders: vec![],
|
||||
versions: project_builder
|
||||
.initial_versions
|
||||
.iter()
|
||||
.map(|v| v.version_id.into())
|
||||
.collect::<Vec<_>>(),
|
||||
icon_url: project_builder.icon_url.clone(),
|
||||
link_urls: project_builder
|
||||
.link_urls
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|x| (x.platform_name.clone(), Link::from(x)))
|
||||
.collect(),
|
||||
gallery: gallery_urls,
|
||||
color: project_builder.color,
|
||||
thread_id: thread_id.into(),
|
||||
monetization_status: MonetizationStatus::Monetized,
|
||||
fields: HashMap::new(), // Fields instantiate to empty
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_initial_version(
|
||||
version_data: &InitialVersionData,
|
||||
project_id: ProjectId,
|
||||
author: UserId,
|
||||
all_loaders: &[models::loader_fields::Loader],
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<models::version_item::VersionBuilder, CreateError> {
|
||||
if version_data.project_id.is_some() {
|
||||
return Err(CreateError::InvalidInput(String::from(
|
||||
"Found project id in initial version for new project",
|
||||
)));
|
||||
}
|
||||
|
||||
version_data
|
||||
.validate()
|
||||
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
|
||||
|
||||
// Randomly generate a new id to be used for the version
|
||||
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
|
||||
|
||||
let loaders = version_data
|
||||
.loaders
|
||||
.iter()
|
||||
.map(|x| {
|
||||
all_loaders
|
||||
.iter()
|
||||
.find(|y| y.loader == x.0)
|
||||
.ok_or_else(|| CreateError::InvalidLoader(x.0.clone()))
|
||||
.map(|y| y.id)
|
||||
})
|
||||
.collect::<Result<Vec<models::LoaderId>, CreateError>>()?;
|
||||
|
||||
let loader_fields = LoaderField::get_fields(&loaders, &mut **transaction, redis).await?;
|
||||
let mut loader_field_enum_values =
|
||||
LoaderFieldEnumValue::list_many_loader_fields(&loader_fields, &mut **transaction, redis)
|
||||
.await?;
|
||||
|
||||
let version_fields = try_create_version_fields(
|
||||
version_id,
|
||||
&version_data.fields,
|
||||
&loader_fields,
|
||||
&mut loader_field_enum_values,
|
||||
)?;
|
||||
|
||||
let dependencies = version_data
|
||||
.dependencies
|
||||
.iter()
|
||||
.map(|d| models::version_item::DependencyBuilder {
|
||||
version_id: d.version_id.map(|x| x.into()),
|
||||
project_id: d.project_id.map(|x| x.into()),
|
||||
dependency_type: d.dependency_type.to_string(),
|
||||
file_name: None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let version = models::version_item::VersionBuilder {
|
||||
version_id: version_id.into(),
|
||||
project_id: project_id.into(),
|
||||
author_id: author.into(),
|
||||
name: version_data.version_title.clone(),
|
||||
version_number: version_data.version_number.clone(),
|
||||
changelog: version_data.version_body.clone().unwrap_or_default(),
|
||||
files: Vec::new(),
|
||||
dependencies,
|
||||
loaders,
|
||||
version_fields,
|
||||
featured: version_data.featured,
|
||||
status: VersionStatus::Listed,
|
||||
version_type: version_data.release_channel.to_string(),
|
||||
requested_status: None,
|
||||
ordering: version_data.ordering,
|
||||
};
|
||||
|
||||
Ok(version)
|
||||
}
|
||||
|
||||
async fn process_icon_upload(
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
id: u64,
|
||||
file_extension: &str,
|
||||
file_host: &dyn FileHost,
|
||||
mut field: Field,
|
||||
) -> Result<(String, String, Option<u32>), CreateError> {
|
||||
let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?;
|
||||
let upload_result = crate::util::img::upload_image_optimized(
|
||||
&format!("data/{}", to_base62(id)),
|
||||
data.freeze(),
|
||||
file_extension,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
file_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| CreateError::InvalidIconFormat(e.to_string()))?;
|
||||
|
||||
uploaded_files.push(UploadedFile {
|
||||
file_id: upload_result.raw_url_path.clone(),
|
||||
file_name: upload_result.raw_url_path,
|
||||
});
|
||||
|
||||
uploaded_files.push(UploadedFile {
|
||||
file_id: upload_result.url_path.clone(),
|
||||
file_name: upload_result.url_path,
|
||||
});
|
||||
|
||||
Ok((
|
||||
upload_result.url,
|
||||
upload_result.raw_url,
|
||||
upload_result.color,
|
||||
))
|
||||
}
|
||||
2234
apps/labrinth/src/routes/v3/projects.rs
Normal file
2234
apps/labrinth/src/routes/v3/projects.rs
Normal file
File diff suppressed because it is too large
Load Diff
507
apps/labrinth/src/routes/v3/reports.rs
Normal file
507
apps/labrinth/src/routes/v3/reports.rs
Normal file
@@ -0,0 +1,507 @@
|
||||
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
|
||||
use crate::database;
|
||||
use crate::database::models::image_item;
|
||||
use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::ImageId;
|
||||
use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId};
|
||||
use crate::models::images::{Image, ImageContext};
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::reports::{ItemType, Report};
|
||||
use crate::models::threads::{MessageBody, ThreadType};
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use crate::util::img;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use futures::StreamExt;
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("report", web::post().to(report_create));
|
||||
cfg.route("report", web::get().to(reports));
|
||||
cfg.route("reports", web::get().to(reports_get));
|
||||
cfg.route("report/{id}", web::get().to(report_get));
|
||||
cfg.route("report/{id}", web::patch().to(report_edit));
|
||||
cfg.route("report/{id}", web::delete().to(report_delete));
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct CreateReport {
|
||||
pub report_type: String,
|
||||
pub item_id: String,
|
||||
pub item_type: ItemType,
|
||||
pub body: String,
|
||||
// Associations to uploaded images
|
||||
#[validate(length(max = 10))]
|
||||
#[serde(default)]
|
||||
pub uploaded_images: Vec<ImageId>,
|
||||
}
|
||||
|
||||
pub async fn report_create(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
mut body: web::Payload,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let current_user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_CREATE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let mut bytes = web::BytesMut::new();
|
||||
while let Some(item) = body.next().await {
|
||||
bytes.extend_from_slice(&item.map_err(|_| {
|
||||
ApiError::InvalidInput("Error while parsing request payload!".to_string())
|
||||
})?);
|
||||
}
|
||||
let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?;
|
||||
|
||||
let id = crate::database::models::generate_report_id(&mut transaction).await?;
|
||||
let report_type = crate::database::models::categories::ReportType::get_id(
|
||||
&new_report.report_type,
|
||||
&mut *transaction,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type))
|
||||
})?;
|
||||
|
||||
let mut report = crate::database::models::report_item::Report {
|
||||
id,
|
||||
report_type_id: report_type,
|
||||
project_id: None,
|
||||
version_id: None,
|
||||
user_id: None,
|
||||
body: new_report.body.clone(),
|
||||
reporter: current_user.id.into(),
|
||||
created: Utc::now(),
|
||||
closed: false,
|
||||
};
|
||||
|
||||
match new_report.item_type {
|
||||
ItemType::Project => {
|
||||
let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?);
|
||||
|
||||
let result = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)",
|
||||
project_id.0 as i64
|
||||
)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if !result.exists.unwrap_or(false) {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Project could not be found: {}",
|
||||
new_report.item_id
|
||||
)));
|
||||
}
|
||||
|
||||
report.project_id = Some(project_id.into())
|
||||
}
|
||||
ItemType::Version => {
|
||||
let version_id = VersionId(parse_base62(new_report.item_id.as_str())?);
|
||||
|
||||
let result = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)",
|
||||
version_id.0 as i64
|
||||
)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if !result.exists.unwrap_or(false) {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Version could not be found: {}",
|
||||
new_report.item_id
|
||||
)));
|
||||
}
|
||||
|
||||
report.version_id = Some(version_id.into())
|
||||
}
|
||||
ItemType::User => {
|
||||
let user_id = UserId(parse_base62(new_report.item_id.as_str())?);
|
||||
|
||||
let result = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM users WHERE id = $1)",
|
||||
user_id.0 as i64
|
||||
)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if !result.exists.unwrap_or(false) {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"User could not be found: {}",
|
||||
new_report.item_id
|
||||
)));
|
||||
}
|
||||
|
||||
report.user_id = Some(user_id.into())
|
||||
}
|
||||
ItemType::Unknown => {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Invalid report item type: {}",
|
||||
new_report.item_type.as_str()
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
report.insert(&mut transaction).await?;
|
||||
|
||||
for image_id in new_report.uploaded_images {
|
||||
if let Some(db_image) =
|
||||
image_item::Image::get(image_id.into(), &mut *transaction, &redis).await?
|
||||
{
|
||||
let image: Image = db_image.into();
|
||||
if !matches!(image.context, ImageContext::Report { .. })
|
||||
|| image.context.inner_id().is_some()
|
||||
{
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Image {} is not unused and in the 'report' context",
|
||||
image_id
|
||||
)));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE uploaded_images
|
||||
SET report_id = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
id.0 as i64,
|
||||
image_id.0 as i64
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
image_item::Image::clear_cache(image.id.into(), &redis).await?;
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Image {} could not be found",
|
||||
image_id
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let thread_id = ThreadBuilder {
|
||||
type_: ThreadType::Report,
|
||||
members: vec![],
|
||||
project_id: None,
|
||||
report_id: Some(report.id),
|
||||
}
|
||||
.insert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(Report {
|
||||
id: id.into(),
|
||||
report_type: new_report.report_type.clone(),
|
||||
item_id: new_report.item_id.clone(),
|
||||
item_type: new_report.item_type.clone(),
|
||||
reporter: current_user.id,
|
||||
body: new_report.body.clone(),
|
||||
created: Utc::now(),
|
||||
closed: false,
|
||||
thread_id: thread_id.into(),
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ReportsRequestOptions {
|
||||
#[serde(default = "default_count")]
|
||||
pub count: i16,
|
||||
#[serde(default = "default_all")]
|
||||
pub all: bool,
|
||||
}
|
||||
|
||||
fn default_count() -> i16 {
|
||||
100
|
||||
}
|
||||
fn default_all() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub async fn reports(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
count: web::Query<ReportsRequestOptions>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let report_ids = if user.role.is_mod() && count.all {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id FROM reports
|
||||
WHERE closed = FALSE
|
||||
ORDER BY created ASC
|
||||
LIMIT $1;
|
||||
",
|
||||
count.count as i64
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
|
||||
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
||||
.await?
|
||||
} else {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id FROM reports
|
||||
WHERE closed = FALSE AND reporter = $1
|
||||
ORDER BY created ASC
|
||||
LIMIT $2;
|
||||
",
|
||||
user.id.0 as i64,
|
||||
count.count as i64
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
|
||||
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
|
||||
.await?
|
||||
};
|
||||
|
||||
let query_reports =
|
||||
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
||||
|
||||
let mut reports: Vec<Report> = Vec::new();
|
||||
|
||||
for x in query_reports {
|
||||
reports.push(x.into());
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(reports))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ReportIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
pub async fn reports_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ReportIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let report_ids: Vec<crate::database::models::ids::ReportId> =
|
||||
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect();
|
||||
|
||||
let reports_data =
|
||||
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let all_reports = reports_data
|
||||
.into_iter()
|
||||
.filter(|x| user.role.is_mod() || x.reporter == user.id.into())
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<Report>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(all_reports))
|
||||
}
|
||||
|
||||
pub async fn report_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id = info.into_inner().0.into();
|
||||
|
||||
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||
|
||||
if let Some(report) = report {
|
||||
if !user.role.is_mod() && report.reporter != user.id.into() {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let report: Report = report.into();
|
||||
Ok(HttpResponse::Ok().json(report))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Validate)]
|
||||
pub struct EditReport {
|
||||
#[validate(length(max = 65536))]
|
||||
pub body: Option<String>,
|
||||
pub closed: Option<bool>,
|
||||
}
|
||||
|
||||
pub async fn report_edit(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
edit_report: web::Json<EditReport>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id = info.into_inner().0.into();
|
||||
|
||||
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
|
||||
|
||||
if let Some(report) = report {
|
||||
if !user.role.is_mod() && report.reporter != user.id.into() {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(edit_body) = &edit_report.body {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE reports
|
||||
SET body = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
edit_body,
|
||||
id as crate::database::models::ids::ReportId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(edit_closed) = edit_report.closed {
|
||||
if !user.role.is_mod() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You cannot reopen a report!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
ThreadMessageBuilder {
|
||||
author_id: Some(user.id.into()),
|
||||
body: if !edit_closed && report.closed {
|
||||
MessageBody::ThreadReopen
|
||||
} else {
|
||||
MessageBody::ThreadClosure
|
||||
},
|
||||
thread_id: report.thread_id,
|
||||
hide_identity: user.role.is_mod(),
|
||||
}
|
||||
.insert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE reports
|
||||
SET closed = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
edit_closed,
|
||||
id as crate::database::models::ids::ReportId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// delete any images no longer in the body
|
||||
let checkable_strings: Vec<&str> = vec![&edit_report.body]
|
||||
.into_iter()
|
||||
.filter_map(|x: &Option<String>| x.as_ref().map(|y| y.as_str()))
|
||||
.collect();
|
||||
let image_context = ImageContext::Report {
|
||||
report_id: Some(id.into()),
|
||||
};
|
||||
img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn report_delete(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
info: web::Path<(crate::models::reports::ReportId,)>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::REPORT_DELETE]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let id = info.into_inner().0;
|
||||
let context = ImageContext::Report {
|
||||
report_id: Some(id),
|
||||
};
|
||||
let uploaded_images =
|
||||
database::models::Image::get_many_contexted(context, &mut transaction).await?;
|
||||
for image in uploaded_images {
|
||||
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
|
||||
}
|
||||
|
||||
let result =
|
||||
crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
|
||||
if result.is_some() {
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
92
apps/labrinth/src/routes/v3/statistics.rs
Normal file
92
apps/labrinth/src/routes/v3/statistics.rs
Normal file
@@ -0,0 +1,92 @@
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("statistics", web::get().to(get_stats));
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct V3Stats {
|
||||
pub projects: Option<i64>,
|
||||
pub versions: Option<i64>,
|
||||
pub authors: Option<i64>,
|
||||
pub files: Option<i64>,
|
||||
}
|
||||
|
||||
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
|
||||
let projects = sqlx::query!(
|
||||
"
|
||||
SELECT COUNT(id)
|
||||
FROM mods
|
||||
WHERE status = ANY($1)
|
||||
",
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_searchable())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.await?;
|
||||
|
||||
let versions = sqlx::query!(
|
||||
"
|
||||
SELECT COUNT(v.id)
|
||||
FROM versions v
|
||||
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
|
||||
WHERE v.status = ANY($2)
|
||||
",
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_searchable())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_listed())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.await?;
|
||||
|
||||
let authors = sqlx::query!(
|
||||
"
|
||||
SELECT COUNT(DISTINCT u.id)
|
||||
FROM users u
|
||||
INNER JOIN team_members tm on u.id = tm.user_id AND tm.accepted = TRUE
|
||||
INNER JOIN mods m on tm.team_id = m.team_id AND m.status = ANY($1)
|
||||
",
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_searchable())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.await?;
|
||||
|
||||
let files = sqlx::query!(
|
||||
"
|
||||
SELECT COUNT(f.id) FROM files f
|
||||
INNER JOIN versions v on f.version_id = v.id AND v.status = ANY($2)
|
||||
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
|
||||
",
|
||||
&*crate::models::projects::ProjectStatus::iterator()
|
||||
.filter(|x| x.is_searchable())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_listed())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
)
|
||||
.fetch_one(&**pool)
|
||||
.await?;
|
||||
|
||||
let v3_stats = V3Stats {
|
||||
projects: projects.count,
|
||||
versions: versions.count,
|
||||
authors: authors.count,
|
||||
files: files.count,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(v3_stats))
|
||||
}
|
||||
252
apps/labrinth/src/routes/v3/tags.rs
Normal file
252
apps/labrinth/src/routes/v3/tags.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::ApiError;
|
||||
use crate::database::models::categories::{Category, LinkPlatform, ProjectType, ReportType};
|
||||
use crate::database::models::loader_fields::{
|
||||
Game, Loader, LoaderField, LoaderFieldEnumValue, LoaderFieldType,
|
||||
};
|
||||
use crate::database::redis::RedisPool;
|
||||
use actix_web::{web, HttpResponse};
|
||||
|
||||
use itertools::Itertools;
|
||||
use serde_json::Value;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("tag")
|
||||
.route("category", web::get().to(category_list))
|
||||
.route("loader", web::get().to(loader_list)),
|
||||
)
|
||||
.route("games", web::get().to(games_list))
|
||||
.route("loader_field", web::get().to(loader_fields_list))
|
||||
.route("license", web::get().to(license_list))
|
||||
.route("license/{id}", web::get().to(license_text))
|
||||
.route("link_platform", web::get().to(link_platform_list))
|
||||
.route("report_type", web::get().to(report_type_list))
|
||||
.route("project_type", web::get().to(project_type_list));
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct GameData {
|
||||
pub slug: String,
|
||||
pub name: String,
|
||||
pub icon: Option<String>,
|
||||
pub banner: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn games_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = Game::list(&**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| GameData {
|
||||
slug: x.slug,
|
||||
name: x.name,
|
||||
icon: x.icon_url,
|
||||
banner: x.banner_url,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct CategoryData {
|
||||
pub icon: String,
|
||||
pub name: String,
|
||||
pub project_type: String,
|
||||
pub header: String,
|
||||
}
|
||||
|
||||
pub async fn category_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = Category::list(&**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| CategoryData {
|
||||
icon: x.icon,
|
||||
name: x.category,
|
||||
project_type: x.project_type,
|
||||
header: x.header,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct LoaderData {
|
||||
pub icon: String,
|
||||
pub name: String,
|
||||
pub supported_project_types: Vec<String>,
|
||||
pub supported_games: Vec<String>,
|
||||
pub supported_fields: Vec<String>, // Available loader fields for this loader
|
||||
pub metadata: Value,
|
||||
}
|
||||
|
||||
pub async fn loader_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let loaders = Loader::list(&**pool, &redis).await?;
|
||||
|
||||
let loader_fields = LoaderField::get_fields_per_loader(
|
||||
&loaders.iter().map(|x| x.id).collect_vec(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut results = loaders
|
||||
.into_iter()
|
||||
.map(|x| LoaderData {
|
||||
icon: x.icon,
|
||||
name: x.loader,
|
||||
supported_project_types: x.supported_project_types,
|
||||
supported_games: x.supported_games,
|
||||
supported_fields: loader_fields
|
||||
.get(&x.id)
|
||||
.map(|x| x.iter().map(|x| x.field.clone()).collect_vec())
|
||||
.unwrap_or_default(),
|
||||
metadata: x.metadata,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize)]
|
||||
pub struct LoaderFieldsEnumQuery {
|
||||
pub loader_field: String,
|
||||
pub filters: Option<HashMap<String, Value>>, // For metadata
|
||||
}
|
||||
|
||||
// Provides the variants for any enumerable loader field.
|
||||
pub async fn loader_fields_list(
|
||||
pool: web::Data<PgPool>,
|
||||
query: web::Query<LoaderFieldsEnumQuery>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let query = query.into_inner();
|
||||
let loader_field = LoaderField::get_fields_all(&**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.find(|x| x.field == query.loader_field)
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"'{}' was not a valid loader field.",
|
||||
query.loader_field
|
||||
))
|
||||
})?;
|
||||
|
||||
let loader_field_enum_id = match loader_field.field_type {
|
||||
LoaderFieldType::Enum(enum_id) | LoaderFieldType::ArrayEnum(enum_id) => enum_id,
|
||||
_ => {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"'{}' is not an enumerable field, but an '{}' field.",
|
||||
query.loader_field,
|
||||
loader_field.field_type.to_str()
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
let results: Vec<_> = if let Some(filters) = query.filters {
|
||||
LoaderFieldEnumValue::list_filter(loader_field_enum_id, filters, &**pool, &redis).await?
|
||||
} else {
|
||||
LoaderFieldEnumValue::list(loader_field_enum_id, &**pool, &redis).await?
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct License {
|
||||
pub short: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
pub async fn license_list() -> HttpResponse {
|
||||
let licenses = spdx::identifiers::LICENSES;
|
||||
let mut results: Vec<License> = Vec::with_capacity(licenses.len());
|
||||
|
||||
for (short, name, _) in licenses {
|
||||
results.push(License {
|
||||
short: short.to_string(),
|
||||
name: name.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
HttpResponse::Ok().json(results)
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct LicenseText {
|
||||
pub title: String,
|
||||
pub body: String,
|
||||
}
|
||||
|
||||
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
|
||||
let license_id = params.into_inner().0;
|
||||
|
||||
if license_id == *crate::models::projects::DEFAULT_LICENSE_ID {
|
||||
return Ok(HttpResponse::Ok().json(LicenseText {
|
||||
title: "All Rights Reserved".to_string(),
|
||||
body: "All rights reserved unless explicitly stated.".to_string(),
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(license) = spdx::license_id(&license_id) {
|
||||
return Ok(HttpResponse::Ok().json(LicenseText {
|
||||
title: license.full_name.to_string(),
|
||||
body: license.text().to_string(),
|
||||
}));
|
||||
}
|
||||
|
||||
Err(ApiError::InvalidInput(
|
||||
"Invalid SPDX identifier specified".to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize)]
|
||||
pub struct LinkPlatformQueryData {
|
||||
pub name: String,
|
||||
pub donation: bool,
|
||||
}
|
||||
|
||||
pub async fn link_platform_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results: Vec<LinkPlatformQueryData> = LinkPlatform::list(&**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| LinkPlatformQueryData {
|
||||
name: x.name,
|
||||
donation: x.donation,
|
||||
})
|
||||
.collect();
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
pub async fn report_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = ReportType::list(&**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
pub async fn project_type_list(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let results = ProjectType::list(&**pool, &redis).await?;
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
1022
apps/labrinth/src/routes/v3/teams.rs
Normal file
1022
apps/labrinth/src/routes/v3/teams.rs
Normal file
File diff suppressed because it is too large
Load Diff
596
apps/labrinth/src/routes/v3/threads.rs
Normal file
596
apps/labrinth/src/routes/v3/threads.rs
Normal file
@@ -0,0 +1,596 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database;
|
||||
use crate::database::models::image_item;
|
||||
use crate::database::models::notification_item::NotificationBuilder;
|
||||
use crate::database::models::thread_item::ThreadMessageBuilder;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::ids::ThreadMessageId;
|
||||
use crate::models::images::{Image, ImageContext};
|
||||
use crate::models::notifications::NotificationBody;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::projects::ProjectStatus;
|
||||
use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType};
|
||||
use crate::models::users::User;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::ApiError;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use futures::TryStreamExt;
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("thread")
|
||||
.route("{id}", web::get().to(thread_get))
|
||||
.route("{id}", web::post().to(thread_send_message)),
|
||||
);
|
||||
cfg.service(web::scope("message").route("{id}", web::delete().to(message_delete)));
|
||||
cfg.route("threads", web::get().to(threads_get));
|
||||
}
|
||||
|
||||
pub async fn is_authorized_thread(
|
||||
thread: &database::models::Thread,
|
||||
user: &User,
|
||||
pool: &PgPool,
|
||||
) -> Result<bool, ApiError> {
|
||||
if user.role.is_mod() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let user_id: database::models::UserId = user.id.into();
|
||||
Ok(match thread.type_ {
|
||||
ThreadType::Report => {
|
||||
if let Some(report_id) = thread.report_id {
|
||||
let report_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1 AND reporter = $2)",
|
||||
report_id as database::models::ids::ReportId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
report_exists.unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ThreadType::Project => {
|
||||
if let Some(project_id) = thread.project_id {
|
||||
let project_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 WHERE m.id = $1)",
|
||||
project_id as database::models::ids::ProjectId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
if !project_exists.unwrap_or(false) {
|
||||
let org_exists = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN organizations o ON m.organization_id = o.id INNER JOIN team_members tm ON tm.team_id = o.team_id AND tm.user_id = $2 WHERE m.id = $1)",
|
||||
project_id as database::models::ids::ProjectId,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await?
|
||||
.exists;
|
||||
|
||||
org_exists.unwrap_or(false)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
ThreadType::DirectMessage => thread.members.contains(&user_id),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn filter_authorized_threads(
|
||||
threads: Vec<database::models::Thread>,
|
||||
user: &User,
|
||||
pool: &web::Data<PgPool>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Thread>, ApiError> {
|
||||
let user_id: database::models::UserId = user.id.into();
|
||||
|
||||
let mut return_threads = Vec::new();
|
||||
let mut check_threads = Vec::new();
|
||||
|
||||
for thread in threads {
|
||||
if user.role.is_mod()
|
||||
|| (thread.type_ == ThreadType::DirectMessage && thread.members.contains(&user_id))
|
||||
{
|
||||
return_threads.push(thread);
|
||||
} else {
|
||||
check_threads.push(thread);
|
||||
}
|
||||
}
|
||||
|
||||
if !check_threads.is_empty() {
|
||||
let project_thread_ids = check_threads
|
||||
.iter()
|
||||
.filter(|x| x.type_ == ThreadType::Project)
|
||||
.flat_map(|x| x.project_id.map(|x| x.0))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !project_thread_ids.is_empty() {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2
|
||||
WHERE m.id = ANY($1)
|
||||
",
|
||||
&*project_thread_ids,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch(&***pool)
|
||||
.map_ok(|row| {
|
||||
check_threads.retain(|x| {
|
||||
let bool = x.project_id.map(|x| x.0) == Some(row.id);
|
||||
|
||||
if bool {
|
||||
return_threads.push(x.clone());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
})
|
||||
.try_collect::<Vec<()>>()
|
||||
.await?;
|
||||
}
|
||||
|
||||
let org_project_thread_ids = check_threads
|
||||
.iter()
|
||||
.filter(|x| x.type_ == ThreadType::Project)
|
||||
.flat_map(|x| x.project_id.map(|x| x.0))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !org_project_thread_ids.is_empty() {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN organizations o ON o.id = m.organization_id
|
||||
INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2
|
||||
WHERE m.id = ANY($1)
|
||||
",
|
||||
&*project_thread_ids,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch(&***pool)
|
||||
.map_ok(|row| {
|
||||
check_threads.retain(|x| {
|
||||
let bool = x.project_id.map(|x| x.0) == Some(row.id);
|
||||
|
||||
if bool {
|
||||
return_threads.push(x.clone());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
})
|
||||
.try_collect::<Vec<()>>()
|
||||
.await?;
|
||||
}
|
||||
|
||||
let report_thread_ids = check_threads
|
||||
.iter()
|
||||
.filter(|x| x.type_ == ThreadType::Report)
|
||||
.flat_map(|x| x.report_id.map(|x| x.0))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !report_thread_ids.is_empty() {
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id FROM reports
|
||||
WHERE id = ANY($1) AND reporter = $2
|
||||
",
|
||||
&*report_thread_ids,
|
||||
user_id as database::models::ids::UserId,
|
||||
)
|
||||
.fetch(&***pool)
|
||||
.map_ok(|row| {
|
||||
check_threads.retain(|x| {
|
||||
let bool = x.report_id.map(|x| x.0) == Some(row.id);
|
||||
|
||||
if bool {
|
||||
return_threads.push(x.clone());
|
||||
}
|
||||
|
||||
!bool
|
||||
});
|
||||
})
|
||||
.try_collect::<Vec<()>>()
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut user_ids = return_threads
|
||||
.iter()
|
||||
.flat_map(|x| x.members.clone())
|
||||
.collect::<Vec<database::models::UserId>>();
|
||||
user_ids.append(
|
||||
&mut return_threads
|
||||
.iter()
|
||||
.flat_map(|x| {
|
||||
x.messages
|
||||
.iter()
|
||||
.filter_map(|x| x.author_id)
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<database::models::UserId>>(),
|
||||
);
|
||||
|
||||
let users: Vec<User> = database::models::User::get_many_ids(&user_ids, &***pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect();
|
||||
|
||||
let mut final_threads = Vec::new();
|
||||
|
||||
for thread in return_threads {
|
||||
let mut authors = thread.members.clone();
|
||||
|
||||
authors.append(
|
||||
&mut thread
|
||||
.messages
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
if x.hide_identity && !user.role.is_mod() {
|
||||
None
|
||||
} else {
|
||||
x.author_id
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
final_threads.push(Thread::from(
|
||||
thread,
|
||||
users
|
||||
.iter()
|
||||
.filter(|x| authors.contains(&x.id.into()))
|
||||
.cloned()
|
||||
.collect(),
|
||||
user,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(final_threads)
|
||||
}
|
||||
|
||||
pub async fn thread_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0.into();
|
||||
|
||||
let thread_data = database::models::Thread::get(string, &**pool).await?;
|
||||
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
if let Some(mut data) = thread_data {
|
||||
if is_authorized_thread(&data, &user, &pool).await? {
|
||||
let authors = &mut data.members;
|
||||
|
||||
authors.append(
|
||||
&mut data
|
||||
.messages
|
||||
.iter()
|
||||
.filter_map(|x| {
|
||||
if x.hide_identity && !user.role.is_mod() {
|
||||
None
|
||||
} else {
|
||||
x.author_id
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let users: Vec<User> = database::models::User::get_many_ids(authors, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect();
|
||||
|
||||
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
|
||||
}
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ThreadIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
pub async fn threads_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<ThreadIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let thread_ids: Vec<database::models::ids::ThreadId> =
|
||||
serde_json::from_str::<Vec<ThreadId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect();
|
||||
|
||||
let threads_data = database::models::Thread::get_many(&thread_ids, &**pool).await?;
|
||||
|
||||
let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(threads))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct NewThreadMessage {
|
||||
pub body: MessageBody,
|
||||
}
|
||||
|
||||
pub async fn thread_send_message(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
new_message: web::Json<NewThreadMessage>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let string: database::models::ThreadId = info.into_inner().0.into();
|
||||
|
||||
if let MessageBody::Text {
|
||||
body,
|
||||
replying_to,
|
||||
private,
|
||||
..
|
||||
} = &new_message.body
|
||||
{
|
||||
if body.len() > 65536 {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Input body is too long!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if *private && !user.role.is_mod() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You are not allowed to send private messages!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(replying_to) = replying_to {
|
||||
let thread_message =
|
||||
database::models::ThreadMessage::get((*replying_to).into(), &**pool).await?;
|
||||
|
||||
if let Some(thread_message) = thread_message {
|
||||
if thread_message.thread_id != string {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Message replied to is from another thread!".to_string(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Message replied to does not exist!".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You may only send text messages through this route!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let result = database::models::Thread::get(string, &**pool).await?;
|
||||
|
||||
if let Some(thread) = result {
|
||||
if !is_authorized_thread(&thread, &user, &pool).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let id = ThreadMessageBuilder {
|
||||
author_id: Some(user.id.into()),
|
||||
body: new_message.body.clone(),
|
||||
thread_id: thread.id,
|
||||
hide_identity: user.role.is_mod(),
|
||||
}
|
||||
.insert(&mut transaction)
|
||||
.await?;
|
||||
|
||||
if let Some(project_id) = thread.project_id {
|
||||
let project = database::models::Project::get_id(project_id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(project) = project {
|
||||
if project.inner.status != ProjectStatus::Processing && user.role.is_mod() {
|
||||
let members = database::models::TeamMember::get_from_team_full(
|
||||
project.inner.team_id,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
NotificationBuilder {
|
||||
body: NotificationBody::ModeratorMessage {
|
||||
thread_id: thread.id.into(),
|
||||
message_id: id.into(),
|
||||
project_id: Some(project.inner.id.into()),
|
||||
report_id: None,
|
||||
},
|
||||
}
|
||||
.insert_many(
|
||||
members.into_iter().map(|x| x.user_id).collect(),
|
||||
&mut transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
} else if let Some(report_id) = thread.report_id {
|
||||
let report = database::models::report_item::Report::get(report_id, &**pool).await?;
|
||||
|
||||
if let Some(report) = report {
|
||||
if report.closed && !user.role.is_mod() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You may not reply to a closed report".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if user.id != report.reporter.into() {
|
||||
NotificationBuilder {
|
||||
body: NotificationBody::ModeratorMessage {
|
||||
thread_id: thread.id.into(),
|
||||
message_id: id.into(),
|
||||
project_id: None,
|
||||
report_id: Some(report.id.into()),
|
||||
},
|
||||
}
|
||||
.insert(report.reporter, &mut transaction, &redis)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let MessageBody::Text {
|
||||
associated_images, ..
|
||||
} = &new_message.body
|
||||
{
|
||||
for image_id in associated_images {
|
||||
if let Some(db_image) =
|
||||
image_item::Image::get((*image_id).into(), &mut *transaction, &redis).await?
|
||||
{
|
||||
let image: Image = db_image.into();
|
||||
if !matches!(image.context, ImageContext::ThreadMessage { .. })
|
||||
|| image.context.inner_id().is_some()
|
||||
{
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Image {} is not unused and in the 'thread_message' context",
|
||||
image_id
|
||||
)));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE uploaded_images
|
||||
SET thread_message_id = $1
|
||||
WHERE id = $2
|
||||
",
|
||||
thread.id.0,
|
||||
image_id.0 as i64
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
image_item::Image::clear_cache(image.id.into(), &redis).await?;
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Image {} does not exist",
|
||||
image_id
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn message_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(ThreadMessageId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::THREAD_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let result = database::models::ThreadMessage::get(info.into_inner().0.into(), &**pool).await?;
|
||||
|
||||
if let Some(thread) = result {
|
||||
if !user.role.is_mod() && thread.author_id != Some(user.id.into()) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You cannot delete this message!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let context = ImageContext::ThreadMessage {
|
||||
thread_message_id: Some(thread.id.into()),
|
||||
};
|
||||
let images = database::Image::get_many_contexted(context, &mut transaction).await?;
|
||||
let cdn_url = dotenvy::var("CDN_URL")?;
|
||||
for image in images {
|
||||
let name = image.url.split(&format!("{cdn_url}/")).nth(1);
|
||||
if let Some(icon_path) = name {
|
||||
file_host.delete_file_version("", icon_path).await?;
|
||||
}
|
||||
database::Image::remove(image.id, &mut transaction, &redis).await?;
|
||||
}
|
||||
|
||||
let private = if let MessageBody::Text { private, .. } = thread.body {
|
||||
private
|
||||
} else if let MessageBody::Deleted { private, .. } = thread.body {
|
||||
private
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
database::models::ThreadMessage::remove_full(thread.id, private, &mut transaction).await?;
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
627
apps/labrinth/src/routes/v3/users.rs
Normal file
627
apps/labrinth/src/routes/v3/users.rs
Normal file
@@ -0,0 +1,627 @@
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
use super::{oauth_clients::get_user_clients, ApiError};
|
||||
use crate::util::img::delete_old_images;
|
||||
use crate::{
|
||||
auth::{filter_visible_projects, get_user_from_headers},
|
||||
database::{models::User, redis::RedisPool},
|
||||
file_hosting::FileHost,
|
||||
models::{
|
||||
collections::{Collection, CollectionStatus},
|
||||
ids::UserId,
|
||||
notifications::Notification,
|
||||
pats::Scopes,
|
||||
projects::Project,
|
||||
users::{Badges, Role},
|
||||
},
|
||||
queue::session::AuthQueue,
|
||||
util::{routes::read_from_payload, validate::validation_errors_to_string},
|
||||
};
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route("user", web::get().to(user_auth_get));
|
||||
cfg.route("users", web::get().to(users_get));
|
||||
|
||||
cfg.service(
|
||||
web::scope("user")
|
||||
.route("{user_id}/projects", web::get().to(projects_list))
|
||||
.route("{id}", web::get().to(user_get))
|
||||
.route("{user_id}/collections", web::get().to(collections_list))
|
||||
.route("{user_id}/organizations", web::get().to(orgs_list))
|
||||
.route("{id}", web::patch().to(user_edit))
|
||||
.route("{id}/icon", web::patch().to(user_icon_edit))
|
||||
.route("{id}", web::delete().to(user_delete))
|
||||
.route("{id}/follows", web::get().to(user_follows))
|
||||
.route("{id}/notifications", web::get().to(user_notifications))
|
||||
.route("{id}/oauth_apps", web::get().to(get_user_clients)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn projects_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let project_data = User::get_projects(id, &**pool, &redis).await?;
|
||||
|
||||
let projects: Vec<_> =
|
||||
crate::database::Project::get_many_ids(&project_data, &**pool, &redis).await?;
|
||||
let projects = filter_visible_projects(projects, &user, &pool, true).await?;
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn user_auth_get(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, mut user) = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_READ]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !scopes.contains(Scopes::USER_READ_EMAIL) {
|
||||
user.email = None;
|
||||
}
|
||||
|
||||
if !scopes.contains(Scopes::PAYOUTS_READ) {
|
||||
user.payout_data = None;
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(user))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UserIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
pub async fn users_get(
|
||||
web::Query(ids): web::Query<UserIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_ids = serde_json::from_str::<Vec<String>>(&ids.ids)?;
|
||||
|
||||
let users_data = User::get_many(&user_ids, &**pool, &redis).await?;
|
||||
|
||||
let users: Vec<crate::models::users::User> = users_data.into_iter().map(From::from).collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(users))
|
||||
}
|
||||
|
||||
pub async fn user_get(
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(data) = user_data {
|
||||
let response: crate::models::users::User = data.into();
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn collections_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::COLLECTION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let user_id: UserId = id.into();
|
||||
|
||||
let can_view_private = user
|
||||
.map(|y| y.role.is_mod() || y.id == user_id)
|
||||
.unwrap_or(false);
|
||||
|
||||
let project_data = User::get_collections(id, &**pool).await?;
|
||||
|
||||
let response: Vec<_> =
|
||||
crate::database::models::Collection::get_many(&project_data, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| can_view_private || matches!(x.status, CollectionStatus::Listed))
|
||||
.map(Collection::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn orgs_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
let org_data = User::get_organizations(id, &**pool).await?;
|
||||
|
||||
let organizations_data =
|
||||
crate::database::models::organization_item::Organization::get_many_ids(
|
||||
&org_data, &**pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let team_ids = organizations_data
|
||||
.iter()
|
||||
.map(|x| x.team_id)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let teams_data = crate::database::models::TeamMember::get_from_team_full_many(
|
||||
&team_ids, &**pool, &redis,
|
||||
)
|
||||
.await?;
|
||||
let users = User::get_many_ids(
|
||||
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut organizations = vec![];
|
||||
let mut team_groups = HashMap::new();
|
||||
for item in teams_data {
|
||||
team_groups.entry(item.team_id).or_insert(vec![]).push(item);
|
||||
}
|
||||
|
||||
for data in organizations_data {
|
||||
let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]);
|
||||
let logged_in = user
|
||||
.as_ref()
|
||||
.and_then(|user| {
|
||||
members_data
|
||||
.iter()
|
||||
.find(|x| x.user_id == user.id.into() && x.accepted)
|
||||
})
|
||||
.is_some();
|
||||
|
||||
let team_members: Vec<_> = members_data
|
||||
.into_iter()
|
||||
.filter(|x| logged_in || x.accepted || id == x.user_id)
|
||||
.flat_map(|data| {
|
||||
users.iter().find(|x| x.id == data.user_id).map(|user| {
|
||||
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
let organization = crate::models::organizations::Organization::from(data, team_members);
|
||||
organizations.push(organization);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(organizations))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
pub struct EditUser {
|
||||
#[validate(length(min = 1, max = 39), regex = "RE_URL_SAFE")]
|
||||
pub username: Option<String>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
#[validate(length(max = 160))]
|
||||
pub bio: Option<Option<String>>,
|
||||
pub role: Option<Role>,
|
||||
pub badges: Option<Badges>,
|
||||
#[validate(length(max = 160))]
|
||||
pub venmo_handle: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn user_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
new_user: web::Json<EditUser>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let (scopes, user) = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
)
|
||||
.await?;
|
||||
|
||||
new_user
|
||||
.validate()
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(actual_user) = id_option {
|
||||
let id = actual_user.id;
|
||||
let user_id: UserId = id.into();
|
||||
|
||||
if user.id == user_id || user.role.is_mod() {
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(username) = &new_user.username {
|
||||
let existing_user_id_option = User::get(username, &**pool, &redis).await?;
|
||||
|
||||
if existing_user_id_option
|
||||
.map(|x| UserId::from(x.id))
|
||||
.map(|id| id == user.id)
|
||||
.unwrap_or(true)
|
||||
{
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET username = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
username,
|
||||
id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
} else {
|
||||
return Err(ApiError::InvalidInput(format!(
|
||||
"Username {username} is taken!"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(bio) = &new_user.bio {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET bio = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
bio.as_deref(),
|
||||
id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(role) = &new_user.role {
|
||||
if !user.role.is_admin() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have the permissions to edit the role of this user!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let role = role.to_string();
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET role = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
role,
|
||||
id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(badges) = &new_user.badges {
|
||||
if !user.role.is_admin() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have the permissions to edit the badges of this user!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET badges = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
badges.bits() as i64,
|
||||
id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(venmo_handle) = &new_user.venmo_handle {
|
||||
if !scopes.contains(Scopes::PAYOUTS_WRITE) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have the permissions to edit the venmo handle of this user!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET venmo_handle = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
venmo_handle,
|
||||
id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
transaction.commit().await?;
|
||||
User::clear_caches(&[(id, Some(actual_user.username))], &redis).await?;
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to edit this user!".to_string(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Extension {
|
||||
pub ext: String,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn user_icon_edit(
|
||||
web::Query(ext): web::Query<Extension>,
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
|
||||
mut payload: web::Payload,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(actual_user) = id_option {
|
||||
if user.id != actual_user.id.into() && !user.role.is_mod() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to edit this user's icon.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
delete_old_images(
|
||||
actual_user.avatar_url,
|
||||
actual_user.raw_avatar_url,
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let bytes =
|
||||
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
|
||||
|
||||
let user_id: UserId = actual_user.id.into();
|
||||
let upload_result = crate::util::img::upload_image_optimized(
|
||||
&format!("data/{}", user_id),
|
||||
bytes.freeze(),
|
||||
&ext.ext,
|
||||
Some(96),
|
||||
Some(1.0),
|
||||
&***file_host,
|
||||
)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE users
|
||||
SET avatar_url = $1, raw_avatar_url = $2
|
||||
WHERE (id = $3)
|
||||
",
|
||||
upload_result.url,
|
||||
upload_result.raw_url,
|
||||
actual_user.id as crate::database::models::ids::UserId,
|
||||
)
|
||||
.execute(&**pool)
|
||||
.await?;
|
||||
User::clear_caches(&[(actual_user.id, None)], &redis).await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn user_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_DELETE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to delete this user!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
let result = User::remove(id, &mut transaction, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
if result.is_some() {
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn user_follows(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::USER_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to see the projects this user follows!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let project_ids = User::get_follows(id, &**pool).await?;
|
||||
let projects: Vec<_> =
|
||||
crate::database::Project::get_many_ids(&project_ids, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Project::from)
|
||||
.collect();
|
||||
|
||||
Ok(HttpResponse::Ok().json(projects))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn user_notifications(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::NOTIFICATION_READ]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
|
||||
|
||||
if let Some(id) = id_option.map(|x| x.id) {
|
||||
if !user.role.is_admin() && user.id != id.into() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to see the notifications of this user!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut notifications: Vec<Notification> =
|
||||
crate::database::models::notification_item::Notification::get_many_user(
|
||||
id, &**pool, &redis,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect();
|
||||
|
||||
notifications.sort_by(|a, b| b.created.cmp(&a.created));
|
||||
Ok(HttpResponse::Ok().json(notifications))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
1036
apps/labrinth/src/routes/v3/version_creation.rs
Normal file
1036
apps/labrinth/src/routes/v3/version_creation.rs
Normal file
File diff suppressed because it is too large
Load Diff
682
apps/labrinth/src/routes/v3/version_file.rs
Normal file
682
apps/labrinth/src/routes/v3/version_file.rs
Normal file
@@ -0,0 +1,682 @@
|
||||
use super::ApiError;
|
||||
use crate::auth::checks::{filter_visible_versions, is_visible_version};
|
||||
use crate::auth::{filter_visible_projects, get_user_from_headers};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::VersionId;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::projects::VersionType;
|
||||
use crate::models::teams::ProjectPermissions;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::{database, models};
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use dashmap::DashMap;
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::scope("version_file")
|
||||
.route("{version_id}", web::get().to(get_version_from_hash))
|
||||
.route("{version_id}/update", web::post().to(get_update_from_hash))
|
||||
.route("project", web::post().to(get_projects_from_hashes))
|
||||
.route("{version_id}", web::delete().to(delete_file))
|
||||
.route("{version_id}/download", web::get().to(download_version)),
|
||||
);
|
||||
cfg.service(
|
||||
web::scope("version_files")
|
||||
.route("update", web::post().to(update_files))
|
||||
.route("update_individual", web::post().to(update_individual_files))
|
||||
.route("", web::post().to(get_versions_from_hashes)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn get_version_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let algorithm = hash_query
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
|
||||
let file = database::models::Version::get_file_from_hash(
|
||||
algorithm,
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
if let Some(file) = file {
|
||||
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
|
||||
if let Some(version) = version {
|
||||
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(models::projects::Version::from(version)))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct HashQuery {
|
||||
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
|
||||
pub version_id: Option<VersionId>,
|
||||
}
|
||||
|
||||
// Calculates whether or not to use sha1 or sha512 based on the size of the hash
|
||||
pub fn default_algorithm_from_hashes(hashes: &[String]) -> String {
|
||||
// Gets first hash, optionally
|
||||
let empty_string = "".into();
|
||||
let hash = hashes.first().unwrap_or(&empty_string);
|
||||
let hash_len = hash.len();
|
||||
// Sha1 = 40 characters
|
||||
// Sha512 = 128 characters
|
||||
// Favour sha1 as default, unless the hash is longer or equal to 128 characters
|
||||
if hash_len >= 128 {
|
||||
return "sha512".into();
|
||||
}
|
||||
"sha1".into()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UpdateData {
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
/*
|
||||
Loader fields to filter with:
|
||||
"game_versions": ["1.16.5", "1.17"]
|
||||
|
||||
Returns if it matches any of the values
|
||||
*/
|
||||
pub loader_fields: Option<HashMap<String, Vec<serde_json::Value>>>,
|
||||
}
|
||||
|
||||
pub async fn get_update_from_hash(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
update_data: web::Json<UpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
if let Some(file) = database::models::Version::get_file_from_hash(
|
||||
hash_query
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
{
|
||||
if let Some(project) =
|
||||
database::models::Project::get_id(file.project_id, &**pool, &redis).await?
|
||||
{
|
||||
let versions = database::models::Version::get_many(&project.versions, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
if let Some(version_types) = &update_data.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
if let Some(loaders) = &update_data.loaders {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(loader_fields) = &update_data.loader_fields {
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) =
|
||||
x.version_fields.iter().find(|y| y.field_name == *key)
|
||||
{
|
||||
values.iter().any(|v| x_vf.value.contains_json_value(v))
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.sorted();
|
||||
|
||||
if let Some(first) = versions.last() {
|
||||
if !is_visible_version(&first.inner, &user_option, &pool, &redis).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(first)));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
// Requests above with multiple versions below
|
||||
#[derive(Deserialize)]
|
||||
pub struct FileHashes {
|
||||
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
|
||||
pub hashes: Vec<String>,
|
||||
}
|
||||
|
||||
pub async fn get_versions_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let algorithm = file_data
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&file_data.hashes));
|
||||
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&file_data.hashes,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let version_ids = files.iter().map(|x| x.version_id).collect::<Vec<_>>();
|
||||
let versions_data = filter_visible_versions(
|
||||
database::models::Version::get_many(&version_ids, &**pool, &redis).await?,
|
||||
&user_option,
|
||||
&pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
|
||||
for version in versions_data {
|
||||
for file in files.iter().filter(|x| x.version_id == version.id.into()) {
|
||||
if let Some(hash) = file.hashes.get(&algorithm) {
|
||||
response.insert(hash.clone(), version.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
pub async fn get_projects_from_hashes(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
file_data: web::Json<FileHashes>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let algorithm = file_data
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&file_data.hashes));
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&file_data.hashes,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_ids = files.iter().map(|x| x.project_id).collect::<Vec<_>>();
|
||||
|
||||
let projects_data = filter_visible_projects(
|
||||
database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?,
|
||||
&user_option,
|
||||
&pool,
|
||||
false,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
|
||||
for project in projects_data {
|
||||
for file in files.iter().filter(|x| x.project_id == project.id.into()) {
|
||||
if let Some(hash) = file.hashes.get(&algorithm) {
|
||||
response.insert(hash.clone(), project.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ManyUpdateData {
|
||||
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
|
||||
pub hashes: Vec<String>,
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub game_versions: Option<Vec<String>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
pub async fn update_files(
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyUpdateData>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let algorithm = update_data
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&update_data.hashes));
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&update_data.hashes,
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO: de-hardcode this and actually use version fields system
|
||||
let update_version_ids = sqlx::query!(
|
||||
"
|
||||
SELECT v.id version_id, v.mod_id mod_id
|
||||
FROM mods m
|
||||
INNER JOIN versions v ON m.id = v.mod_id AND (cardinality($4::varchar[]) = 0 OR v.version_type = ANY($4))
|
||||
INNER JOIN version_fields vf ON vf.field_id = 3 AND v.id = vf.version_id
|
||||
INNER JOIN loader_field_enum_values lfev ON vf.enum_value = lfev.id AND (cardinality($2::varchar[]) = 0 OR lfev.value = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE m.id = ANY($1)
|
||||
ORDER BY v.date_published ASC
|
||||
",
|
||||
&files.iter().map(|x| x.project_id.0).collect::<Vec<_>>(),
|
||||
&update_data.game_versions.clone().unwrap_or_default(),
|
||||
&update_data.loaders.clone().unwrap_or_default(),
|
||||
&update_data.version_types.clone().unwrap_or_default().iter().map(|x| x.to_string()).collect::<Vec<_>>(),
|
||||
)
|
||||
.fetch(&**pool)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<database::models::ids::VersionId>>, m| {
|
||||
acc.entry(database::models::ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(database::models::VersionId(m.version_id));
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
let versions = database::models::Version::get_many(
|
||||
&update_version_ids
|
||||
.into_iter()
|
||||
.filter_map(|x| x.1.last().copied())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
for file in files {
|
||||
if let Some(version) = versions
|
||||
.iter()
|
||||
.find(|x| x.inner.project_id == file.project_id)
|
||||
{
|
||||
if let Some(hash) = file.hashes.get(&algorithm) {
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(version.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct FileUpdateData {
|
||||
pub hash: String,
|
||||
pub loaders: Option<Vec<String>>,
|
||||
pub loader_fields: Option<HashMap<String, Vec<serde_json::Value>>>,
|
||||
pub version_types: Option<Vec<VersionType>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct ManyFileUpdateData {
|
||||
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
|
||||
pub hashes: Vec<FileUpdateData>,
|
||||
}
|
||||
|
||||
pub async fn update_individual_files(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
update_data: web::Json<ManyFileUpdateData>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let algorithm = update_data.algorithm.clone().unwrap_or_else(|| {
|
||||
default_algorithm_from_hashes(
|
||||
&update_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.hash.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
});
|
||||
let files = database::models::Version::get_files_from_hash(
|
||||
algorithm.clone(),
|
||||
&update_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.hash.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let projects = database::models::Project::get_many_ids(
|
||||
&files.iter().map(|x| x.project_id).collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
let all_versions = database::models::Version::get_many(
|
||||
&projects
|
||||
.iter()
|
||||
.flat_map(|x| x.versions.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut response = HashMap::new();
|
||||
|
||||
for project in projects {
|
||||
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
|
||||
if let Some(hash) = file.hashes.get(&algorithm) {
|
||||
if let Some(query_file) = update_data.hashes.iter().find(|x| &x.hash == hash) {
|
||||
let version = all_versions
|
||||
.iter()
|
||||
.filter(|x| x.inner.project_id == file.project_id)
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
|
||||
if let Some(version_types) = &query_file.version_types {
|
||||
bool &= version_types
|
||||
.iter()
|
||||
.any(|y| y.as_str() == x.inner.version_type);
|
||||
}
|
||||
if let Some(loaders) = &query_file.loaders {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
|
||||
if let Some(loader_fields) = &query_file.loader_fields {
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) =
|
||||
x.version_fields.iter().find(|y| y.field_name == *key)
|
||||
{
|
||||
values.iter().any(|v| x_vf.value.contains_json_value(v))
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.sorted()
|
||||
.last();
|
||||
|
||||
if let Some(version) = version {
|
||||
if is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
response.insert(
|
||||
hash.clone(),
|
||||
models::projects::Version::from(version.clone()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}
|
||||
pub async fn delete_file(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let algorithm = hash_query
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
|
||||
let file = database::models::Version::get_file_from_hash(
|
||||
algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(row) = file {
|
||||
if !user.role.is_admin() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_version(
|
||||
row.version_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
|
||||
let organization =
|
||||
database::models::Organization::get_associated_organization_project_id(
|
||||
row.project_id,
|
||||
&**pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
|
||||
let organization_team_member = if let Some(organization) = &organization {
|
||||
database::models::TeamMember::get_from_user_id_organization(
|
||||
organization.id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
||||
&user.role,
|
||||
&team_member,
|
||||
&organization_team_member,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
if !permissions.contains(ProjectPermissions::DELETE_VERSION) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to delete this file!".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let version = database::models::Version::get(row.version_id, &**pool, &redis).await?;
|
||||
if let Some(version) = version {
|
||||
if version.files.len() < 2 {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"Versions must have at least one file uploaded to them".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
database::models::Version::clear_cache(&version, &redis).await?;
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM hashes
|
||||
WHERE file_id = $1
|
||||
",
|
||||
row.id.0
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM files
|
||||
WHERE files.id = $1
|
||||
",
|
||||
row.id.0,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct DownloadRedirect {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
// under /api/v1/version_file/{hash}/download
|
||||
pub async fn download_version(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
hash_query: web::Query<HashQuery>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let hash = info.into_inner().0.to_lowercase();
|
||||
let algorithm = hash_query
|
||||
.algorithm
|
||||
.clone()
|
||||
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
|
||||
let file = database::models::Version::get_file_from_hash(
|
||||
algorithm.clone(),
|
||||
hash,
|
||||
hash_query.version_id.map(|x| x.into()),
|
||||
&**pool,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Some(file) = file {
|
||||
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(version) = version {
|
||||
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header(("Location", &*file.url))
|
||||
.json(DownloadRedirect { url: file.url }))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
876
apps/labrinth/src/routes/v3/versions.rs
Normal file
876
apps/labrinth/src/routes/v3/versions.rs
Normal file
@@ -0,0 +1,876 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::ApiError;
|
||||
use crate::auth::checks::{filter_visible_versions, is_visible_project, is_visible_version};
|
||||
use crate::auth::get_user_from_headers;
|
||||
use crate::database;
|
||||
use crate::database::models::loader_fields::{
|
||||
self, LoaderField, LoaderFieldEnumValue, VersionField,
|
||||
};
|
||||
use crate::database::models::version_item::{DependencyBuilder, LoaderVersion};
|
||||
use crate::database::models::{image_item, Organization};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models;
|
||||
use crate::models::ids::base62_impl::parse_base62;
|
||||
use crate::models::ids::VersionId;
|
||||
use crate::models::images::ImageContext;
|
||||
use crate::models::pats::Scopes;
|
||||
use crate::models::projects::{skip_nulls, Loader};
|
||||
use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType};
|
||||
use crate::models::teams::ProjectPermissions;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::search::indexing::remove_documents;
|
||||
use crate::search::SearchConfig;
|
||||
use crate::util::img;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use validator::Validate;
|
||||
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.route(
|
||||
"version",
|
||||
web::post().to(super::version_creation::version_create),
|
||||
);
|
||||
cfg.route("versions", web::get().to(versions_get));
|
||||
|
||||
cfg.service(
|
||||
web::scope("version")
|
||||
.route("{id}", web::get().to(version_get))
|
||||
.route("{id}", web::patch().to(version_edit))
|
||||
.route("{id}", web::delete().to(version_delete))
|
||||
.route(
|
||||
"{version_id}/file",
|
||||
web::post().to(super::version_creation::upload_file_to_version),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Given a project ID/slug and a version slug
|
||||
pub async fn version_project_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String, String)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let info = info.into_inner();
|
||||
version_project_get_helper(req, info, pool, redis, session_queue).await
|
||||
}
|
||||
pub async fn version_project_get_helper(
|
||||
req: HttpRequest,
|
||||
id: (String, String),
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let result = database::models::Project::get(&id.0, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(project) = result {
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let versions =
|
||||
database::models::Version::get_many(&project.versions, &**pool, &redis).await?;
|
||||
|
||||
let id_opt = parse_base62(&id.1).ok();
|
||||
let version = versions
|
||||
.into_iter()
|
||||
.find(|x| Some(x.inner.id.0 as u64) == id_opt || x.inner.version_number == id.1);
|
||||
|
||||
if let Some(version) = version {
|
||||
if is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
|
||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(version)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct VersionIds {
|
||||
pub ids: String,
|
||||
}
|
||||
|
||||
pub async fn versions_get(
|
||||
req: HttpRequest,
|
||||
web::Query(ids): web::Query<VersionIds>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let version_ids = serde_json::from_str::<Vec<models::ids::VersionId>>(&ids.ids)?
|
||||
.into_iter()
|
||||
.map(|x| x.into())
|
||||
.collect::<Vec<database::models::VersionId>>();
|
||||
let versions_data = database::models::Version::get_many(&version_ids, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
let versions = filter_visible_versions(versions_data, &user_option, &pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(versions))
|
||||
}
|
||||
|
||||
pub async fn version_get(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(models::ids::VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let id = info.into_inner().0;
|
||||
version_get_helper(req, id, pool, redis, session_queue).await
|
||||
}
|
||||
|
||||
pub async fn version_get_helper(
|
||||
req: HttpRequest,
|
||||
id: models::ids::VersionId,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let version_data = database::models::Version::get(id.into(), &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(data) = version_data {
|
||||
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
|
||||
return Ok(HttpResponse::Ok().json(models::projects::Version::from(data)));
|
||||
}
|
||||
}
|
||||
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Default, Debug)]
|
||||
pub struct EditVersion {
|
||||
#[validate(
|
||||
length(min = 1, max = 64),
|
||||
custom(function = "crate::util::validate::validate_name")
|
||||
)]
|
||||
pub name: Option<String>,
|
||||
#[validate(
|
||||
length(min = 1, max = 32),
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
pub version_number: Option<String>,
|
||||
#[validate(length(max = 65536))]
|
||||
pub changelog: Option<String>,
|
||||
pub version_type: Option<models::projects::VersionType>,
|
||||
#[validate(
|
||||
length(min = 0, max = 4096),
|
||||
custom(function = "crate::util::validate::validate_deps")
|
||||
)]
|
||||
pub dependencies: Option<Vec<Dependency>>,
|
||||
pub loaders: Option<Vec<Loader>>,
|
||||
pub featured: Option<bool>,
|
||||
pub downloads: Option<u32>,
|
||||
pub status: Option<VersionStatus>,
|
||||
pub file_types: Option<Vec<EditVersionFileType>>,
|
||||
#[serde(
|
||||
default,
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "::serde_with::rust::double_option"
|
||||
)]
|
||||
pub ordering: Option<Option<i32>>,
|
||||
|
||||
// Flattened loader fields
|
||||
// All other fields are loader-specific VersionFields
|
||||
// These are flattened during serialization
|
||||
#[serde(deserialize_with = "skip_nulls")]
|
||||
#[serde(flatten)]
|
||||
pub fields: HashMap<String, serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct EditVersionFileType {
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
pub async fn version_edit(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
new_version: web::Json<serde_json::Value>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let new_version: EditVersion = serde_json::from_value(new_version.into_inner())?;
|
||||
version_edit_helper(
|
||||
req,
|
||||
info.into_inner(),
|
||||
pool,
|
||||
redis,
|
||||
new_version,
|
||||
session_queue,
|
||||
)
|
||||
.await
|
||||
}
|
||||
pub async fn version_edit_helper(
|
||||
req: HttpRequest,
|
||||
info: (VersionId,),
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
new_version: EditVersion,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_WRITE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
|
||||
new_version
|
||||
.validate()
|
||||
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
|
||||
|
||||
let version_id = info.0;
|
||||
let id = version_id.into();
|
||||
|
||||
let result = database::models::Version::get(id, &**pool, &redis).await?;
|
||||
|
||||
if let Some(version_item) = result {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
||||
version_item.inner.project_id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let organization = Organization::get_associated_organization_project_id(
|
||||
version_item.inner.project_id,
|
||||
&**pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let organization_team_member = if let Some(organization) = &organization {
|
||||
database::models::TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
||||
&user.role,
|
||||
&team_member,
|
||||
&organization_team_member,
|
||||
);
|
||||
|
||||
if let Some(perms) = permissions {
|
||||
if !perms.contains(ProjectPermissions::UPLOAD_VERSION) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have the permissions to edit this version!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
if let Some(name) = &new_version.name {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET name = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
name.trim(),
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(number) = &new_version.version_number {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET version_number = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
number,
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(version_type) = &new_version.version_type {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET version_type = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
version_type.as_str(),
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(dependencies) = &new_version.dependencies {
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM dependencies WHERE dependent_id = $1
|
||||
",
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let builders = dependencies
|
||||
.iter()
|
||||
.map(|x| database::models::version_item::DependencyBuilder {
|
||||
project_id: x.project_id.map(|x| x.into()),
|
||||
version_id: x.version_id.map(|x| x.into()),
|
||||
file_name: x.file_name.clone(),
|
||||
dependency_type: x.dependency_type.to_string(),
|
||||
})
|
||||
.collect::<Vec<database::models::version_item::DependencyBuilder>>();
|
||||
|
||||
DependencyBuilder::insert_many(builders, version_item.inner.id, &mut transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !new_version.fields.is_empty() {
|
||||
let version_fields_names = new_version
|
||||
.fields
|
||||
.keys()
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let all_loaders = loader_fields::Loader::list(&mut *transaction, &redis).await?;
|
||||
let loader_ids = version_item
|
||||
.loaders
|
||||
.iter()
|
||||
.filter_map(|x| all_loaders.iter().find(|y| &y.loader == x).map(|y| y.id))
|
||||
.collect_vec();
|
||||
|
||||
let loader_fields = LoaderField::get_fields(&loader_ids, &mut *transaction, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|lf| version_fields_names.contains(&lf.field))
|
||||
.collect::<Vec<LoaderField>>();
|
||||
|
||||
let loader_field_ids = loader_fields.iter().map(|lf| lf.id.0).collect::<Vec<i32>>();
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM version_fields
|
||||
WHERE version_id = $1
|
||||
AND field_id = ANY($2)
|
||||
",
|
||||
id as database::models::ids::VersionId,
|
||||
&loader_field_ids
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let mut loader_field_enum_values = LoaderFieldEnumValue::list_many_loader_fields(
|
||||
&loader_fields,
|
||||
&mut *transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut version_fields = Vec::new();
|
||||
for (vf_name, vf_value) in new_version.fields {
|
||||
let loader_field = loader_fields
|
||||
.iter()
|
||||
.find(|lf| lf.field == vf_name)
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"Loader field '{vf_name}' does not exist for any loaders supplied."
|
||||
))
|
||||
})?;
|
||||
let enum_variants = loader_field_enum_values
|
||||
.remove(&loader_field.id)
|
||||
.unwrap_or_default();
|
||||
let vf: VersionField = VersionField::check_parse(
|
||||
version_id.into(),
|
||||
loader_field.clone(),
|
||||
vf_value.clone(),
|
||||
enum_variants,
|
||||
)
|
||||
.map_err(ApiError::InvalidInput)?;
|
||||
version_fields.push(vf);
|
||||
}
|
||||
VersionField::insert_many(version_fields, &mut transaction).await?;
|
||||
}
|
||||
|
||||
if let Some(loaders) = &new_version.loaders {
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM loaders_versions WHERE version_id = $1
|
||||
",
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let mut loader_versions = Vec::new();
|
||||
for loader in loaders {
|
||||
let loader_id = database::models::loader_fields::Loader::get_id(
|
||||
&loader.0,
|
||||
&mut *transaction,
|
||||
&redis,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("No database entry for loader provided.".to_string())
|
||||
})?;
|
||||
loader_versions.push(LoaderVersion::new(loader_id, id));
|
||||
}
|
||||
LoaderVersion::insert_many(loader_versions, &mut transaction).await?;
|
||||
|
||||
crate::database::models::Project::clear_cache(
|
||||
version_item.inner.project_id,
|
||||
None,
|
||||
None,
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(featured) = &new_version.featured {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET featured = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
featured,
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(body) = &new_version.changelog {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET changelog = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
body,
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(downloads) = &new_version.downloads {
|
||||
if !user.role.is_mod() {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to set the downloads of this mod".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET downloads = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
*downloads as i32,
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
let diff = *downloads - (version_item.inner.downloads as u32);
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE mods
|
||||
SET downloads = downloads + $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
diff as i32,
|
||||
version_item.inner.project_id as database::models::ids::ProjectId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(status) = &new_version.status {
|
||||
if !status.can_be_requested() {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"The requested status cannot be set!".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET status = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
status.as_str(),
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(file_types) = &new_version.file_types {
|
||||
for file_type in file_types {
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT f.id id FROM hashes h
|
||||
INNER JOIN files f ON h.file_id = f.id
|
||||
WHERE h.algorithm = $2 AND h.hash = $1
|
||||
",
|
||||
file_type.hash.as_bytes(),
|
||||
file_type.algorithm
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput(format!(
|
||||
"Specified file with hash {} does not exist.",
|
||||
file_type.algorithm.clone()
|
||||
))
|
||||
})?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE files
|
||||
SET file_type = $2
|
||||
WHERE (id = $1)
|
||||
",
|
||||
result.id,
|
||||
file_type.file_type.as_ref().map(|x| x.as_str()),
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ordering) = &new_version.ordering {
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
SET ordering = $1
|
||||
WHERE (id = $2)
|
||||
",
|
||||
ordering.to_owned() as Option<i32>,
|
||||
id as database::models::ids::VersionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// delete any images no longer in the changelog
|
||||
let checkable_strings: Vec<&str> = vec![&new_version.changelog]
|
||||
.into_iter()
|
||||
.filter_map(|x| x.as_ref().map(|y| y.as_str()))
|
||||
.collect();
|
||||
let context = ImageContext::Version {
|
||||
version_id: Some(version_item.inner.id.into()),
|
||||
};
|
||||
|
||||
img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?;
|
||||
|
||||
transaction.commit().await?;
|
||||
database::models::Version::clear_cache(&version_item, &redis).await?;
|
||||
database::models::Project::clear_cache(
|
||||
version_item.inner.project_id,
|
||||
None,
|
||||
Some(true),
|
||||
&redis,
|
||||
)
|
||||
.await?;
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to edit this version!".to_string(),
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct VersionListFilters {
|
||||
pub loaders: Option<String>,
|
||||
pub featured: Option<bool>,
|
||||
pub version_type: Option<VersionType>,
|
||||
pub limit: Option<usize>,
|
||||
pub offset: Option<usize>,
|
||||
/*
|
||||
Loader fields to filter with:
|
||||
"game_versions": ["1.16.5", "1.17"]
|
||||
|
||||
Returns if it matches any of the values
|
||||
*/
|
||||
pub loader_fields: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn version_list(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(String,)>,
|
||||
web::Query(filters): web::Query<VersionListFilters>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let string = info.into_inner().0;
|
||||
|
||||
let result = database::models::Project::get(&string, &**pool, &redis).await?;
|
||||
|
||||
let user_option = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
|
||||
)
|
||||
.await
|
||||
.map(|x| x.1)
|
||||
.ok();
|
||||
|
||||
if let Some(project) = result {
|
||||
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
|
||||
return Err(ApiError::NotFound);
|
||||
}
|
||||
|
||||
let loader_field_filters = filters.loader_fields.as_ref().map(|x| {
|
||||
serde_json::from_str::<HashMap<String, Vec<serde_json::Value>>>(x).unwrap_or_default()
|
||||
});
|
||||
let loader_filters = filters
|
||||
.loaders
|
||||
.as_ref()
|
||||
.map(|x| serde_json::from_str::<Vec<String>>(x).unwrap_or_default());
|
||||
let mut versions = database::models::Version::get_many(&project.versions, &**pool, &redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.skip(filters.offset.unwrap_or(0))
|
||||
.take(filters.limit.unwrap_or(usize::MAX))
|
||||
.filter(|x| {
|
||||
let mut bool = true;
|
||||
|
||||
if let Some(version_type) = filters.version_type {
|
||||
bool &= &*x.inner.version_type == version_type.as_str();
|
||||
}
|
||||
if let Some(loaders) = &loader_filters {
|
||||
bool &= x.loaders.iter().any(|y| loaders.contains(y));
|
||||
}
|
||||
if let Some(loader_fields) = &loader_field_filters {
|
||||
for (key, values) in loader_fields {
|
||||
bool &= if let Some(x_vf) =
|
||||
x.version_fields.iter().find(|y| y.field_name == *key)
|
||||
{
|
||||
values.iter().any(|v| x_vf.value.contains_json_value(v))
|
||||
} else {
|
||||
true
|
||||
};
|
||||
}
|
||||
}
|
||||
bool
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut response = versions
|
||||
.iter()
|
||||
.filter(|version| {
|
||||
filters
|
||||
.featured
|
||||
.map(|featured| featured == version.inner.featured)
|
||||
.unwrap_or(true)
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
versions.sort_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published));
|
||||
|
||||
// Attempt to populate versions with "auto featured" versions
|
||||
if response.is_empty() && !versions.is_empty() && filters.featured.unwrap_or(false) {
|
||||
// TODO: This is a bandaid fix for detecting auto-featured versions.
|
||||
// In the future, not all versions will have 'game_versions' fields, so this will need to be changed.
|
||||
let (loaders, game_versions) = futures::future::try_join(
|
||||
database::models::loader_fields::Loader::list(&**pool, &redis),
|
||||
database::models::legacy_loader_fields::MinecraftGameVersion::list(
|
||||
None,
|
||||
Some(true),
|
||||
&**pool,
|
||||
&redis,
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut joined_filters = Vec::new();
|
||||
for game_version in &game_versions {
|
||||
for loader in &loaders {
|
||||
joined_filters.push((game_version, loader))
|
||||
}
|
||||
}
|
||||
|
||||
joined_filters.into_iter().for_each(|filter| {
|
||||
versions
|
||||
.iter()
|
||||
.find(|version| {
|
||||
// TODO: This is the bandaid fix for detecting auto-featured versions.
|
||||
let game_versions = version
|
||||
.version_fields
|
||||
.iter()
|
||||
.find(|vf| vf.field_name == "game_versions")
|
||||
.map(|vf| vf.value.clone())
|
||||
.map(|v| v.as_strings())
|
||||
.unwrap_or_default();
|
||||
game_versions.contains(&filter.0.version)
|
||||
&& version.loaders.contains(&filter.1.loader)
|
||||
})
|
||||
.map(|version| response.push(version.clone()))
|
||||
.unwrap_or(());
|
||||
});
|
||||
|
||||
if response.is_empty() {
|
||||
versions
|
||||
.into_iter()
|
||||
.for_each(|version| response.push(version));
|
||||
}
|
||||
}
|
||||
|
||||
response.sort_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published));
|
||||
response.dedup_by(|a, b| a.inner.id == b.inner.id);
|
||||
|
||||
let response = filter_visible_versions(response, &user_option, &pool, &redis).await?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn version_delete(
|
||||
req: HttpRequest,
|
||||
info: web::Path<(VersionId,)>,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
search_config: web::Data<SearchConfig>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Some(&[Scopes::VERSION_DELETE]),
|
||||
)
|
||||
.await?
|
||||
.1;
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let version = database::models::Version::get(id.into(), &**pool, &redis)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::InvalidInput("The specified version does not exist!".to_string())
|
||||
})?;
|
||||
|
||||
if !user.role.is_admin() {
|
||||
let team_member = database::models::TeamMember::get_from_user_id_project(
|
||||
version.inner.project_id,
|
||||
user.id.into(),
|
||||
false,
|
||||
&**pool,
|
||||
)
|
||||
.await
|
||||
.map_err(ApiError::Database)?;
|
||||
|
||||
let organization =
|
||||
Organization::get_associated_organization_project_id(version.inner.project_id, &**pool)
|
||||
.await?;
|
||||
|
||||
let organization_team_member = if let Some(organization) = &organization {
|
||||
database::models::TeamMember::get_from_user_id(
|
||||
organization.team_id,
|
||||
user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let permissions = ProjectPermissions::get_permissions_by_role(
|
||||
&user.role,
|
||||
&team_member,
|
||||
&organization_team_member,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
if !permissions.contains(ProjectPermissions::DELETE_VERSION) {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You do not have permission to delete versions in this team".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
let context = ImageContext::Version {
|
||||
version_id: Some(version.inner.id.into()),
|
||||
};
|
||||
let uploaded_images =
|
||||
database::models::Image::get_many_contexted(context, &mut transaction).await?;
|
||||
for image in uploaded_images {
|
||||
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
|
||||
}
|
||||
|
||||
let result =
|
||||
database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?;
|
||||
transaction.commit().await?;
|
||||
remove_documents(&[version.inner.id.into()], &search_config).await?;
|
||||
database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis)
|
||||
.await?;
|
||||
|
||||
if result.is_some() {
|
||||
Ok(HttpResponse::NoContent().body(""))
|
||||
} else {
|
||||
Err(ApiError::NotFound)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user