move to monorepo dir

This commit is contained in:
Jai A
2024-10-16 14:11:42 -07:00
parent ff7975773e
commit e3a3379615
756 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,635 @@
use super::ApiError;
use crate::database;
use crate::database::redis::RedisPool;
use crate::models::teams::ProjectPermissions;
use crate::{
auth::get_user_from_headers,
database::models::user_item,
models::{
ids::{base62_impl::to_base62, ProjectId, VersionId},
pats::Scopes,
},
queue::session::AuthQueue,
};
use actix_web::{web, HttpRequest, HttpResponse};
use chrono::{DateTime, Duration, Utc};
use serde::{Deserialize, Serialize};
use sqlx::postgres::types::PgInterval;
use sqlx::PgPool;
use std::collections::HashMap;
use std::convert::TryInto;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("analytics")
.route("playtime", web::get().to(playtimes_get))
.route("views", web::get().to(views_get))
.route("downloads", web::get().to(downloads_get))
.route("revenue", web::get().to(revenue_get))
.route(
"countries/downloads",
web::get().to(countries_downloads_get),
)
.route("countries/views", web::get().to(countries_views_get)),
);
}
/// The json data to be passed to fetch analytic data
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively.
/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day)
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GetData {
// only one of project_ids or version_ids should be used
// if neither are provided, all projects the user has access to will be used
pub project_ids: Option<String>,
pub start_date: Option<DateTime<Utc>>, // defaults to 2 weeks ago
pub end_date: Option<DateTime<Utc>>, // defaults to now
pub resolution_minutes: Option<u32>, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries)
}
/// Get playtime data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 23
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
#[derive(Serialize, Deserialize, Clone)]
pub struct FetchedPlaytime {
pub time: u64,
pub total_seconds: u64,
pub loader_seconds: HashMap<String, u64>,
pub game_version_seconds: HashMap<String, u64>,
pub parent_seconds: HashMap<VersionId, u64>,
}
pub async fn playtimes_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::ANALYTICS]),
)
.await
.map(|x| x.1)?;
let project_ids = data
.project_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the views
let playtimes = crate::clickhouse::fetch_playtimes(
project_ids.unwrap_or_default(),
start_date,
end_date,
resolution_minutes,
clickhouse.into_inner(),
)
.await?;
let mut hm = HashMap::new();
for playtime in playtimes {
let id_string = to_base62(playtime.id);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(playtime.time, playtime.total);
}
}
Ok(HttpResponse::Ok().json(hm))
}
/// Get view data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of days to views
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 1090
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
pub async fn views_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::ANALYTICS]),
)
.await
.map(|x| x.1)?;
let project_ids = data
.project_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the views
let views = crate::clickhouse::fetch_views(
project_ids.unwrap_or_default(),
start_date,
end_date,
resolution_minutes,
clickhouse.into_inner(),
)
.await?;
let mut hm = HashMap::new();
for views in views {
let id_string = to_base62(views.id);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(views.time, views.total);
}
}
Ok(HttpResponse::Ok().json(hm))
}
/// Get download data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 32
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
pub async fn downloads_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::ANALYTICS]),
)
.await
.map(|x| x.1)?;
let project_ids = data
.project_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user_option, &pool, &redis, None).await?;
// Get the downloads
let downloads = crate::clickhouse::fetch_downloads(
project_ids.unwrap_or_default(),
start_date,
end_date,
resolution_minutes,
clickhouse.into_inner(),
)
.await?;
let mut hm = HashMap::new();
for downloads in downloads {
let id_string = to_base62(downloads.id);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(downloads.time, downloads.total);
}
}
Ok(HttpResponse::Ok().json(hm))
}
/// Get payout data for a set of projects
/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 0.001
/// }
///}
/// ONLY project IDs can be used. Unauthorized projects will be filtered out.
pub async fn revenue_get(
req: HttpRequest,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PAYOUTS_READ]),
)
.await
.map(|x| x.1)?;
let project_ids = data
.project_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
// Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command
// Round start_date down to nearest resolution
let diff = start_date.timestamp() % (resolution_minutes as i64 * 60);
let start_date = start_date - Duration::seconds(diff);
// Round end_date up to nearest resolution
let diff = end_date.timestamp() % (resolution_minutes as i64 * 60);
let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids =
filter_allowed_ids(project_ids, user.clone(), &pool, &redis, Some(true)).await?;
let duration: PgInterval = Duration::minutes(resolution_minutes as i64)
.try_into()
.map_err(|_| ApiError::InvalidInput("Invalid resolution_minutes".to_string()))?;
// Get the revenue data
let project_ids = project_ids.unwrap_or_default();
struct PayoutValue {
mod_id: Option<i64>,
amount_sum: Option<rust_decimal::Decimal>,
interval_start: Option<DateTime<Utc>>,
}
let payouts_values = if project_ids.is_empty() {
sqlx::query!(
"
SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start
FROM payouts_values
WHERE user_id = $1 AND created BETWEEN $2 AND $3
GROUP by mod_id, interval_start ORDER BY interval_start
",
user.id.0 as i64,
start_date,
end_date,
duration,
)
.fetch_all(&**pool)
.await?.into_iter().map(|x| PayoutValue {
mod_id: x.mod_id,
amount_sum: x.amount_sum,
interval_start: x.interval_start,
}).collect::<Vec<_>>()
} else {
sqlx::query!(
"
SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start
FROM payouts_values
WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3
GROUP by mod_id, interval_start ORDER BY interval_start
",
&project_ids.iter().map(|x| x.0 as i64).collect::<Vec<_>>(),
start_date,
end_date,
duration,
)
.fetch_all(&**pool)
.await?.into_iter().map(|x| PayoutValue {
mod_id: x.mod_id,
amount_sum: x.amount_sum,
interval_start: x.interval_start,
}).collect::<Vec<_>>()
};
let mut hm: HashMap<_, _> = project_ids
.into_iter()
.map(|x| (x.to_string(), HashMap::new()))
.collect::<HashMap<_, _>>();
for value in payouts_values {
if let Some(mod_id) = value.mod_id {
if let Some(amount) = value.amount_sum {
if let Some(interval_start) = value.interval_start {
let id_string = to_base62(mod_id as u64);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(interval_start.timestamp(), amount);
}
}
}
}
}
Ok(HttpResponse::Ok().json(hm))
}
/// Get country data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads.
/// Unknown countries are labeled "".
/// This is usuable to see significant performing countries per project
/// eg:
/// {
/// "4N1tEhnO": {
/// "CAN": 22
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
pub async fn countries_downloads_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::ANALYTICS]),
)
.await
.map(|x| x.1)?;
let project_ids = data
.project_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the countries
let countries = crate::clickhouse::fetch_countries_downloads(
project_ids.unwrap_or_default(),
start_date,
end_date,
clickhouse.into_inner(),
)
.await?;
let mut hm = HashMap::new();
for views in countries {
let id_string = to_base62(views.id);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(views.country, views.total);
}
}
let hm: HashMap<String, HashMap<String, u64>> = hm
.into_iter()
.map(|(key, value)| (key, condense_countries(value)))
.collect();
Ok(HttpResponse::Ok().json(hm))
}
/// Get country data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views.
/// Unknown countries are labeled "".
/// This is usuable to see significant performing countries per project
/// eg:
/// {
/// "4N1tEhnO": {
/// "CAN": 56165
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
pub async fn countries_views_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::ANALYTICS]),
)
.await
.map(|x| x.1)?;
let project_ids = data
.project_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis, None).await?;
// Get the countries
let countries = crate::clickhouse::fetch_countries_views(
project_ids.unwrap_or_default(),
start_date,
end_date,
clickhouse.into_inner(),
)
.await?;
let mut hm = HashMap::new();
for views in countries {
let id_string = to_base62(views.id);
if !hm.contains_key(&id_string) {
hm.insert(id_string.clone(), HashMap::new());
}
if let Some(hm) = hm.get_mut(&id_string) {
hm.insert(views.country, views.total);
}
}
let hm: HashMap<String, HashMap<String, u64>> = hm
.into_iter()
.map(|(key, value)| (key, condense_countries(value)))
.collect();
Ok(HttpResponse::Ok().json(hm))
}
fn condense_countries(countries: HashMap<String, u64>) -> HashMap<String, u64> {
// Every country under '15' (view or downloads) should be condensed into 'XX'
let mut hm = HashMap::new();
for (mut country, count) in countries {
if count < 50 {
country = "XX".to_string();
}
if !hm.contains_key(&country) {
hm.insert(country.to_string(), 0);
}
if let Some(hm) = hm.get_mut(&country) {
*hm += count;
}
}
hm
}
async fn filter_allowed_ids(
mut project_ids: Option<Vec<String>>,
user: crate::models::users::User,
pool: &web::Data<PgPool>,
redis: &RedisPool,
remove_defaults: Option<bool>,
) -> Result<Option<Vec<ProjectId>>, ApiError> {
// If no project_ids or version_ids are provided, we default to all projects the user has *public* access to
if project_ids.is_none() && !remove_defaults.unwrap_or(false) {
project_ids = Some(
user_item::User::get_projects(user.id.into(), &***pool, redis)
.await?
.into_iter()
.map(|x| ProjectId::from(x).to_string())
.collect(),
);
}
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
let project_ids = if let Some(project_strings) = project_ids {
let projects_data =
database::models::Project::get_many(&project_strings, &***pool, redis).await?;
let team_ids = projects_data
.iter()
.map(|x| x.inner.team_id)
.collect::<Vec<database::models::TeamId>>();
let team_members =
database::models::TeamMember::get_from_team_full_many(&team_ids, &***pool, redis)
.await?;
let organization_ids = projects_data
.iter()
.filter_map(|x| x.inner.organization_id)
.collect::<Vec<database::models::OrganizationId>>();
let organizations =
database::models::Organization::get_many_ids(&organization_ids, &***pool, redis)
.await?;
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<database::models::TeamId>>();
let organization_team_members = database::models::TeamMember::get_from_team_full_many(
&organization_team_ids,
&***pool,
redis,
)
.await?;
let ids = projects_data
.into_iter()
.filter(|project| {
let team_member = team_members
.iter()
.find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into());
let organization = project
.inner
.organization_id
.and_then(|oid| organizations.iter().find(|x| x.id == oid));
let organization_team_member = if let Some(organization) = organization {
organization_team_members
.iter()
.find(|x| x.team_id == organization.team_id && x.user_id == user.id.into())
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,
&team_member.cloned(),
&organization_team_member.cloned(),
)
.unwrap_or_default();
permissions.contains(ProjectPermissions::VIEW_ANALYTICS)
})
.map(|x| x.inner.id.into())
.collect::<Vec<_>>();
Some(ids)
} else {
None
};
// Only one of project_ids or version_ids will be Some
Ok(project_ids)
}

View File

@@ -0,0 +1,541 @@
use crate::auth::checks::is_visible_collection;
use crate::auth::{filter_visible_collections, get_user_from_headers};
use crate::database::models::{collection_item, generate_collection_id, project_item};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::collections::{Collection, CollectionStatus};
use crate::models::ids::base62_impl::parse_base62;
use crate::models::ids::{CollectionId, ProjectId};
use crate::models::pats::Scopes;
use crate::queue::session::AuthQueue;
use crate::routes::v3::project_creation::CreateError;
use crate::routes::ApiError;
use crate::util::img::delete_old_images;
use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use crate::{database, models};
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use chrono::Utc;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
use validator::Validate;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("collections", web::get().to(collections_get));
cfg.route("collection", web::post().to(collection_create));
cfg.service(
web::scope("collection")
.route("{id}", web::get().to(collection_get))
.route("{id}", web::delete().to(collection_delete))
.route("{id}", web::patch().to(collection_edit))
.route("{id}/icon", web::patch().to(collection_icon_edit))
.route("{id}/icon", web::delete().to(delete_collection_icon)),
);
}
#[derive(Serialize, Deserialize, Validate, Clone)]
pub struct CollectionCreateData {
#[validate(
length(min = 3, max = 64),
custom(function = "crate::util::validate::validate_name")
)]
/// The title or name of the project.
pub name: String,
#[validate(length(min = 3, max = 255))]
/// A short description of the collection.
pub description: Option<String>,
#[validate(length(max = 32))]
#[serde(default = "Vec::new")]
/// A list of initial projects to use with the created collection
pub projects: Vec<String>,
}
pub async fn collection_create(
req: HttpRequest,
collection_create_data: web::Json<CollectionCreateData>,
client: Data<PgPool>,
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, CreateError> {
let collection_create_data = collection_create_data.into_inner();
// The currently logged in user
let current_user = get_user_from_headers(
&req,
&**client,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_CREATE]),
)
.await?
.1;
collection_create_data
.validate()
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
let mut transaction = client.begin().await?;
let collection_id: CollectionId = generate_collection_id(&mut transaction).await?.into();
let initial_project_ids = project_item::Project::get_many(
&collection_create_data.projects,
&mut *transaction,
&redis,
)
.await?
.into_iter()
.map(|x| x.inner.id.into())
.collect::<Vec<ProjectId>>();
let collection_builder_actual = collection_item::CollectionBuilder {
collection_id: collection_id.into(),
user_id: current_user.id.into(),
name: collection_create_data.name,
description: collection_create_data.description,
status: CollectionStatus::Listed,
projects: initial_project_ids
.iter()
.copied()
.map(|x| x.into())
.collect(),
};
let collection_builder = collection_builder_actual.clone();
let now = Utc::now();
collection_builder_actual.insert(&mut transaction).await?;
let response = crate::models::collections::Collection {
id: collection_id,
user: collection_builder.user_id.into(),
name: collection_builder.name.clone(),
description: collection_builder.description.clone(),
created: now,
updated: now,
icon_url: None,
color: None,
status: collection_builder.status,
projects: initial_project_ids,
};
transaction.commit().await?;
Ok(HttpResponse::Ok().json(response))
}
#[derive(Serialize, Deserialize)]
pub struct CollectionIds {
pub ids: String,
}
pub async fn collections_get(
req: HttpRequest,
web::Query(ids): web::Query<CollectionIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let ids = serde_json::from_str::<Vec<&str>>(&ids.ids)?;
let ids = ids
.into_iter()
.map(|x| parse_base62(x).map(|x| database::models::CollectionId(x as i64)))
.collect::<Result<Vec<_>, _>>()?;
let collections_data = database::models::Collection::get_many(&ids, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_READ]),
)
.await
.map(|x| x.1)
.ok();
let collections = filter_visible_collections(collections_data, &user_option).await?;
Ok(HttpResponse::Ok().json(collections))
}
pub async fn collection_get(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection_data = database::models::Collection::get(id, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_READ]),
)
.await
.map(|x| x.1)
.ok();
if let Some(data) = collection_data {
if is_visible_collection(&data, &user_option).await? {
return Ok(HttpResponse::Ok().json(Collection::from(data)));
}
}
Err(ApiError::NotFound)
}
#[derive(Deserialize, Validate)]
pub struct EditCollection {
#[validate(
length(min = 3, max = 64),
custom(function = "crate::util::validate::validate_name")
)]
pub name: Option<String>,
#[validate(length(min = 3, max = 256))]
#[serde(
default,
skip_serializing_if = "Option::is_none",
with = "::serde_with::rust::double_option"
)]
pub description: Option<Option<String>>,
pub status: Option<CollectionStatus>,
#[validate(length(max = 1024))]
pub new_projects: Option<Vec<String>>,
}
pub async fn collection_edit(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
new_collection: web::Json<EditCollection>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_WRITE]),
)
.await?
.1;
new_collection
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let result = database::models::Collection::get(id, &**pool, &redis).await?;
if let Some(collection_item) = result {
if !can_modify_collection(&collection_item, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
}
let id = collection_item.id;
let mut transaction = pool.begin().await?;
if let Some(name) = &new_collection.name {
sqlx::query!(
"
UPDATE collections
SET name = $1
WHERE (id = $2)
",
name.trim(),
id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(description) = &new_collection.description {
sqlx::query!(
"
UPDATE collections
SET description = $1
WHERE (id = $2)
",
description.as_ref(),
id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(status) = &new_collection.status {
if !(user.role.is_mod()
|| collection_item.status.is_approved() && status.can_be_requested())
{
return Err(ApiError::CustomAuthentication(
"You don't have permission to set this status!".to_string(),
));
}
sqlx::query!(
"
UPDATE collections
SET status = $1
WHERE (id = $2)
",
status.to_string(),
id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(new_project_ids) = &new_collection.new_projects {
// Delete all existing projects
sqlx::query!(
"
DELETE FROM collections_mods
WHERE collection_id = $1
",
collection_item.id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
let collection_item_ids = new_project_ids
.iter()
.map(|_| collection_item.id.0)
.collect_vec();
let mut validated_project_ids = Vec::new();
for project_id in new_project_ids {
let project = database::models::Project::get(project_id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"The specified project {project_id} does not exist!"
))
})?;
validated_project_ids.push(project.inner.id.0);
}
// Insert- don't throw an error if it already exists
sqlx::query!(
"
INSERT INTO collections_mods (collection_id, mod_id)
SELECT * FROM UNNEST ($1::int8[], $2::int8[])
ON CONFLICT DO NOTHING
",
&collection_item_ids[..],
&validated_project_ids[..],
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
UPDATE collections
SET updated = NOW()
WHERE id = $1
",
collection_item.id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize)]
pub struct Extension {
pub ext: String,
}
#[allow(clippy::too_many_arguments)]
pub async fn collection_icon_edit(
web::Query(ext): web::Query<Extension>,
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
mut payload: web::Payload,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_WRITE]),
)
.await?
.1;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection_item = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified collection does not exist!".to_string())
})?;
if !can_modify_collection(&collection_item, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
}
delete_old_images(
collection_item.icon_url,
collection_item.raw_icon_url,
&***file_host,
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let collection_id: CollectionId = collection_item.id.into();
let upload_result = crate::util::img::upload_image_optimized(
&format!("data/{}", collection_id),
bytes.freeze(),
&ext.ext,
Some(96),
Some(1.0),
&***file_host,
)
.await?;
let mut transaction = pool.begin().await?;
sqlx::query!(
"
UPDATE collections
SET icon_url = $1, raw_icon_url = $2, color = $3
WHERE (id = $4)
",
upload_result.url,
upload_result.raw_url,
upload_result.color.map(|x| x as i32),
collection_item.id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
Ok(HttpResponse::NoContent().body(""))
}
pub async fn delete_collection_icon(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_WRITE]),
)
.await?
.1;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection_item = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified collection does not exist!".to_string())
})?;
if !can_modify_collection(&collection_item, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
}
delete_old_images(
collection_item.icon_url,
collection_item.raw_icon_url,
&***file_host,
)
.await?;
let mut transaction = pool.begin().await?;
sqlx::query!(
"
UPDATE collections
SET icon_url = NULL, raw_icon_url = NULL, color = NULL
WHERE (id = $1)
",
collection_item.id as database::models::ids::CollectionId,
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
database::models::Collection::clear_cache(collection_item.id, &redis).await?;
Ok(HttpResponse::NoContent().body(""))
}
pub async fn collection_delete(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_DELETE]),
)
.await?
.1;
let string = info.into_inner().0;
let id = database::models::CollectionId(parse_base62(&string)? as i64);
let collection = database::models::Collection::get(id, &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified collection does not exist!".to_string())
})?;
if !can_modify_collection(&collection, &user) {
return Ok(HttpResponse::Unauthorized().body(""));
}
let mut transaction = pool.begin().await?;
let result =
database::models::Collection::remove(collection.id, &mut transaction, &redis).await?;
transaction.commit().await?;
database::models::Collection::clear_cache(collection.id, &redis).await?;
if result.is_some() {
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
fn can_modify_collection(
collection: &database::models::Collection,
user: &models::users::User,
) -> bool {
collection.user_id == user.id.into() || user.role.is_mod()
}

View File

@@ -0,0 +1,227 @@
use std::sync::Arc;
use super::threads::is_authorized_thread;
use crate::auth::checks::{is_team_member_project, is_team_member_version};
use crate::auth::get_user_from_headers;
use crate::database;
use crate::database::models::{project_item, report_item, thread_item, version_item};
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::{ThreadMessageId, VersionId};
use crate::models::images::{Image, ImageContext};
use crate::models::reports::ReportId;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::util::img::upload_image_optimized;
use crate::util::routes::read_from_payload;
use actix_web::{web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("image", web::post().to(images_add));
}
#[derive(Serialize, Deserialize)]
pub struct ImageUpload {
pub ext: String,
// Context must be an allowed context
// currently: project, version, thread_message, report
pub context: String,
// Optional context id to associate with
pub project_id: Option<String>, // allow slug or id
pub version_id: Option<VersionId>,
pub thread_message_id: Option<ThreadMessageId>,
pub report_id: Option<ReportId>,
}
pub async fn images_add(
req: HttpRequest,
web::Query(data): web::Query<ImageUpload>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
mut payload: web::Payload,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let mut context = ImageContext::from_str(&data.context, None);
let scopes = vec![context.relevant_scope()];
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, Some(&scopes))
.await?
.1;
// Attempt to associated a supplied id with the context
// If the context cannot be found, or the user is not authorized to upload images for the context, return an error
match &mut context {
ImageContext::Project { project_id } => {
if let Some(id) = data.project_id {
let project = project_item::Project::get(&id, &**pool, &redis).await?;
if let Some(project) = project {
if is_team_member_project(&project.inner, &Some(user.clone()), &pool).await? {
*project_id = Some(project.inner.id.into());
} else {
return Err(ApiError::CustomAuthentication(
"You are not authorized to upload images for this project".to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"The project could not be found.".to_string(),
));
}
}
}
ImageContext::Version { version_id } => {
if let Some(id) = data.version_id {
let version = version_item::Version::get(id.into(), &**pool, &redis).await?;
if let Some(version) = version {
if is_team_member_version(&version.inner, &Some(user.clone()), &pool, &redis)
.await?
{
*version_id = Some(version.inner.id.into());
} else {
return Err(ApiError::CustomAuthentication(
"You are not authorized to upload images for this version".to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"The version could not be found.".to_string(),
));
}
}
}
ImageContext::ThreadMessage { thread_message_id } => {
if let Some(id) = data.thread_message_id {
let thread_message = thread_item::ThreadMessage::get(id.into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The thread message could not found.".to_string())
})?;
let thread = thread_item::Thread::get(thread_message.thread_id, &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The thread associated with the thread message could not be found"
.to_string(),
)
})?;
if is_authorized_thread(&thread, &user, &pool).await? {
*thread_message_id = Some(thread_message.id.into());
} else {
return Err(ApiError::CustomAuthentication(
"You are not authorized to upload images for this thread message"
.to_string(),
));
}
}
}
ImageContext::Report { report_id } => {
if let Some(id) = data.report_id {
let report = report_item::Report::get(id.into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The report could not be found.".to_string())
})?;
let thread = thread_item::Thread::get(report.thread_id, &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(
"The thread associated with the report could not be found.".to_string(),
)
})?;
if is_authorized_thread(&thread, &user, &pool).await? {
*report_id = Some(report.id.into());
} else {
return Err(ApiError::CustomAuthentication(
"You are not authorized to upload images for this report".to_string(),
));
}
}
}
ImageContext::Unknown => {
return Err(ApiError::InvalidInput(
"Context must be one of: project, version, thread_message, report".to_string(),
));
}
}
// Upload the image to the file host
let bytes =
read_from_payload(&mut payload, 1_048_576, "Icons must be smaller than 1MiB").await?;
let content_length = bytes.len();
let upload_result = upload_image_optimized(
"data/cached_images",
bytes.freeze(),
&data.ext,
None,
None,
&***file_host,
)
.await?;
let mut transaction = pool.begin().await?;
let db_image: database::models::Image = database::models::Image {
id: database::models::generate_image_id(&mut transaction).await?,
url: upload_result.url,
raw_url: upload_result.raw_url,
size: content_length as u64,
created: chrono::Utc::now(),
owner_id: database::models::UserId::from(user.id),
context: context.context_as_str().to_string(),
project_id: if let ImageContext::Project {
project_id: Some(id),
} = context
{
Some(crate::database::models::ProjectId::from(id))
} else {
None
},
version_id: if let ImageContext::Version {
version_id: Some(id),
} = context
{
Some(database::models::VersionId::from(id))
} else {
None
},
thread_message_id: if let ImageContext::ThreadMessage {
thread_message_id: Some(id),
} = context
{
Some(database::models::ThreadMessageId::from(id))
} else {
None
},
report_id: if let ImageContext::Report {
report_id: Some(id),
} = context
{
Some(database::models::ReportId::from(id))
} else {
None
},
};
// Insert
db_image.insert(&mut transaction).await?;
let image = Image {
id: db_image.id.into(),
url: db_image.url,
size: db_image.size,
created: db_image.created,
owner_id: db_image.owner_id.into(),
context,
};
transaction.commit().await?;
Ok(HttpResponse::Ok().json(image))
}

View File

@@ -0,0 +1,53 @@
pub use super::ApiError;
use crate::util::cors::default_cors;
use actix_web::{web, HttpResponse};
use serde_json::json;
pub mod analytics_get;
pub mod collections;
pub mod images;
pub mod notifications;
pub mod organizations;
pub mod payouts;
pub mod project_creation;
pub mod projects;
pub mod reports;
pub mod statistics;
pub mod tags;
pub mod teams;
pub mod threads;
pub mod users;
pub mod version_creation;
pub mod version_file;
pub mod versions;
pub mod oauth_clients;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("v3")
.wrap(default_cors())
.configure(analytics_get::config)
.configure(collections::config)
.configure(images::config)
.configure(notifications::config)
.configure(organizations::config)
.configure(project_creation::config)
.configure(projects::config)
.configure(reports::config)
.configure(statistics::config)
.configure(tags::config)
.configure(teams::config)
.configure(threads::config)
.configure(users::config)
.configure(version_file::config)
.configure(payouts::config)
.configure(versions::config),
);
}
pub async fn hello_world() -> Result<HttpResponse, ApiError> {
Ok(HttpResponse::Ok().json(json!({
"hello": "world",
})))
}

View File

@@ -0,0 +1,289 @@
use crate::auth::get_user_from_headers;
use crate::database;
use crate::database::redis::RedisPool;
use crate::models::ids::NotificationId;
use crate::models::notifications::Notification;
use crate::models::pats::Scopes;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use actix_web::{web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("notifications", web::get().to(notifications_get));
cfg.route("notifications", web::patch().to(notifications_read));
cfg.route("notifications", web::delete().to(notifications_delete));
cfg.service(
web::scope("notification")
.route("{id}", web::get().to(notification_get))
.route("{id}", web::patch().to(notification_read))
.route("{id}", web::delete().to(notification_delete)),
);
}
#[derive(Serialize, Deserialize)]
pub struct NotificationIds {
pub ids: String,
}
pub async fn notifications_get(
req: HttpRequest,
web::Query(ids): web::Query<NotificationIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_READ]),
)
.await?
.1;
use database::models::notification_item::Notification as DBNotification;
use database::models::NotificationId as DBNotificationId;
let notification_ids: Vec<DBNotificationId> =
serde_json::from_str::<Vec<NotificationId>>(ids.ids.as_str())?
.into_iter()
.map(DBNotificationId::from)
.collect();
let notifications_data: Vec<DBNotification> =
database::models::notification_item::Notification::get_many(&notification_ids, &**pool)
.await?;
let notifications: Vec<Notification> = notifications_data
.into_iter()
.filter(|n| n.user_id == user.id.into() || user.role.is_admin())
.map(Notification::from)
.collect();
Ok(HttpResponse::Ok().json(notifications))
}
pub async fn notification_get(
req: HttpRequest,
info: web::Path<(NotificationId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_READ]),
)
.await?
.1;
let id = info.into_inner().0;
let notification_data =
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
if let Some(data) = notification_data {
if user.id == data.user_id.into() || user.role.is_admin() {
Ok(HttpResponse::Ok().json(Notification::from(data)))
} else {
Err(ApiError::NotFound)
}
} else {
Err(ApiError::NotFound)
}
}
pub async fn notification_read(
req: HttpRequest,
info: web::Path<(NotificationId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_WRITE]),
)
.await?
.1;
let id = info.into_inner().0;
let notification_data =
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
if let Some(data) = notification_data {
if data.user_id == user.id.into() || user.role.is_admin() {
let mut transaction = pool.begin().await?;
database::models::notification_item::Notification::read(
id.into(),
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::CustomAuthentication(
"You are not authorized to read this notification!".to_string(),
))
}
} else {
Err(ApiError::NotFound)
}
}
pub async fn notification_delete(
req: HttpRequest,
info: web::Path<(NotificationId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_WRITE]),
)
.await?
.1;
let id = info.into_inner().0;
let notification_data =
database::models::notification_item::Notification::get(id.into(), &**pool).await?;
if let Some(data) = notification_data {
if data.user_id == user.id.into() || user.role.is_admin() {
let mut transaction = pool.begin().await?;
database::models::notification_item::Notification::remove(
id.into(),
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::CustomAuthentication(
"You are not authorized to delete this notification!".to_string(),
))
}
} else {
Err(ApiError::NotFound)
}
}
pub async fn notifications_read(
req: HttpRequest,
web::Query(ids): web::Query<NotificationIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_WRITE]),
)
.await?
.1;
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<_>>();
let mut transaction = pool.begin().await?;
let notifications_data =
database::models::notification_item::Notification::get_many(&notification_ids, &**pool)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
for notification in notifications_data {
if notification.user_id == user.id.into() || user.role.is_admin() {
notifications.push(notification.id);
}
}
database::models::notification_item::Notification::read_many(
&notifications,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
}
pub async fn notifications_delete(
req: HttpRequest,
web::Query(ids): web::Query<NotificationIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_WRITE]),
)
.await?
.1;
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<_>>();
let mut transaction = pool.begin().await?;
let notifications_data =
database::models::notification_item::Notification::get_many(&notification_ids, &**pool)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
for notification in notifications_data {
if notification.user_id == user.id.into() || user.role.is_admin() {
notifications.push(notification.id);
}
}
database::models::notification_item::Notification::remove_many(
&notifications,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
}

View File

@@ -0,0 +1,559 @@
use std::{collections::HashSet, fmt::Display, sync::Arc};
use actix_web::{
delete, get, patch, post,
web::{self, scope},
HttpRequest, HttpResponse,
};
use chrono::Utc;
use itertools::Itertools;
use rand::{distributions::Alphanumeric, Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use validator::Validate;
use super::ApiError;
use crate::{
auth::{checks::ValidateAuthorized, get_user_from_headers},
database::{
models::{
generate_oauth_client_id, generate_oauth_redirect_id,
oauth_client_authorization_item::OAuthClientAuthorization,
oauth_client_item::{OAuthClient, OAuthRedirectUri},
DatabaseError, OAuthClientId, User,
},
redis::RedisPool,
},
models::{
self,
oauth_clients::{GetOAuthClientsRequest, OAuthClientCreationResult},
pats::Scopes,
},
queue::session::AuthQueue,
routes::v3::project_creation::CreateError,
util::validate::validation_errors_to_string,
};
use crate::{
file_hosting::FileHost,
models::{ids::base62_impl::parse_base62, oauth_clients::DeleteOAuthClientQueryParam},
util::routes::read_from_payload,
};
use crate::database::models::oauth_client_item::OAuthClient as DBOAuthClient;
use crate::models::ids::OAuthClientId as ApiOAuthClientId;
use crate::util::img::{delete_old_images, upload_image_optimized};
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
scope("oauth")
.configure(crate::auth::oauth::config)
.service(revoke_oauth_authorization)
.service(oauth_client_create)
.service(oauth_client_edit)
.service(oauth_client_delete)
.service(oauth_client_icon_edit)
.service(oauth_client_icon_delete)
.service(get_client)
.service(get_clients)
.service(get_user_oauth_authorizations),
);
}
pub async fn get_user_clients(
req: HttpRequest,
info: web::Path<String>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
let target_user = User::get(&info.into_inner(), &**pool, &redis).await?;
if let Some(target_user) = target_user {
if target_user.id != current_user.id.into() && !current_user.role.is_admin() {
return Err(ApiError::CustomAuthentication(
"You do not have permission to see the OAuth clients of this user!".to_string(),
));
}
let clients = OAuthClient::get_all_user_clients(target_user.id, &**pool).await?;
let response = clients
.into_iter()
.map(models::oauth_clients::OAuthClient::from)
.collect_vec();
Ok(HttpResponse::Ok().json(response))
} else {
Err(ApiError::NotFound)
}
}
#[get("app/{id}")]
pub async fn get_client(
id: web::Path<ApiOAuthClientId>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let clients = get_clients_inner(&[id.into_inner()], pool).await?;
if let Some(client) = clients.into_iter().next() {
Ok(HttpResponse::Ok().json(client))
} else {
Err(ApiError::NotFound)
}
}
#[get("apps")]
pub async fn get_clients(
info: web::Query<GetOAuthClientsRequest>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let ids: Vec<_> = info
.ids
.iter()
.map(|id| parse_base62(id).map(ApiOAuthClientId))
.collect::<Result<_, _>>()?;
let clients = get_clients_inner(&ids, pool).await?;
Ok(HttpResponse::Ok().json(clients))
}
#[derive(Deserialize, Validate)]
pub struct NewOAuthApp {
#[validate(
custom(function = "crate::util::validate::validate_name"),
length(min = 3, max = 255)
)]
pub name: String,
#[validate(custom(function = "crate::util::validate::validate_no_restricted_scopes"))]
pub max_scopes: Scopes,
pub redirect_uris: Vec<String>,
#[validate(
custom(function = "crate::util::validate::validate_url"),
length(max = 255)
)]
pub url: Option<String>,
#[validate(length(max = 255))]
pub description: Option<String>,
}
#[post("app")]
pub async fn oauth_client_create<'a>(
req: HttpRequest,
new_oauth_app: web::Json<NewOAuthApp>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, CreateError> {
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
new_oauth_app
.validate()
.map_err(|e| CreateError::ValidationError(validation_errors_to_string(e, None)))?;
let mut transaction = pool.begin().await?;
let client_id = generate_oauth_client_id(&mut transaction).await?;
let client_secret = generate_oauth_client_secret();
let client_secret_hash = DBOAuthClient::hash_secret(&client_secret);
let redirect_uris =
create_redirect_uris(&new_oauth_app.redirect_uris, client_id, &mut transaction).await?;
let client = OAuthClient {
id: client_id,
icon_url: None,
raw_icon_url: None,
max_scopes: new_oauth_app.max_scopes,
name: new_oauth_app.name.clone(),
redirect_uris,
created: Utc::now(),
created_by: current_user.id.into(),
url: new_oauth_app.url.clone(),
description: new_oauth_app.description.clone(),
secret_hash: client_secret_hash,
};
client.clone().insert(&mut transaction).await?;
transaction.commit().await?;
let client = models::oauth_clients::OAuthClient::from(client);
Ok(HttpResponse::Ok().json(OAuthClientCreationResult {
client,
client_secret,
}))
}
#[delete("app/{id}")]
pub async fn oauth_client_delete<'a>(
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
let client = OAuthClient::get(client_id.into_inner().into(), &**pool).await?;
if let Some(client) = client {
client.validate_authorized(Some(&current_user))?;
OAuthClient::remove(client.id, &**pool).await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize, Validate)]
pub struct OAuthClientEdit {
#[validate(
custom(function = "crate::util::validate::validate_name"),
length(min = 3, max = 255)
)]
pub name: Option<String>,
#[validate(custom(function = "crate::util::validate::validate_no_restricted_scopes"))]
pub max_scopes: Option<Scopes>,
#[validate(length(min = 1))]
pub redirect_uris: Option<Vec<String>>,
#[validate(
custom(function = "crate::util::validate::validate_url"),
length(max = 255)
)]
pub url: Option<Option<String>>,
#[validate(length(max = 255))]
pub description: Option<Option<String>>,
}
#[patch("app/{id}")]
pub async fn oauth_client_edit(
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
client_updates: web::Json<OAuthClientEdit>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
client_updates
.validate()
.map_err(|e| ApiError::Validation(validation_errors_to_string(e, None)))?;
if let Some(existing_client) = OAuthClient::get(client_id.into_inner().into(), &**pool).await? {
existing_client.validate_authorized(Some(&current_user))?;
let mut updated_client = existing_client.clone();
let OAuthClientEdit {
name,
max_scopes,
redirect_uris,
url,
description,
} = client_updates.into_inner();
if let Some(name) = name {
updated_client.name = name;
}
if let Some(max_scopes) = max_scopes {
updated_client.max_scopes = max_scopes;
}
if let Some(url) = url {
updated_client.url = url;
}
if let Some(description) = description {
updated_client.description = description;
}
let mut transaction = pool.begin().await?;
updated_client
.update_editable_fields(&mut *transaction)
.await?;
if let Some(redirects) = redirect_uris {
edit_redirects(redirects, &existing_client, &mut transaction).await?;
}
transaction.commit().await?;
Ok(HttpResponse::Ok().body(""))
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize)]
pub struct Extension {
pub ext: String,
}
#[patch("app/{id}/icon")]
#[allow(clippy::too_many_arguments)]
pub async fn oauth_client_icon_edit(
web::Query(ext): web::Query<Extension>,
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
mut payload: web::Payload,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
let client = OAuthClient::get((*client_id).into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified client does not exist!".to_string())
})?;
client.validate_authorized(Some(&user))?;
delete_old_images(
client.icon_url.clone(),
client.raw_icon_url.clone(),
&***file_host,
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let upload_result = upload_image_optimized(
&format!("data/{}", client_id),
bytes.freeze(),
&ext.ext,
Some(96),
Some(1.0),
&***file_host,
)
.await?;
let mut transaction = pool.begin().await?;
let mut editable_client = client.clone();
editable_client.icon_url = Some(upload_result.url);
editable_client.raw_icon_url = Some(upload_result.raw_url);
editable_client
.update_editable_fields(&mut *transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
}
#[delete("app/{id}/icon")]
pub async fn oauth_client_icon_delete(
req: HttpRequest,
client_id: web::Path<ApiOAuthClientId>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
let client = OAuthClient::get((*client_id).into(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified client does not exist!".to_string())
})?;
client.validate_authorized(Some(&user))?;
delete_old_images(
client.icon_url.clone(),
client.raw_icon_url.clone(),
&***file_host,
)
.await?;
let mut transaction = pool.begin().await?;
let mut editable_client = client.clone();
editable_client.icon_url = None;
editable_client.raw_icon_url = None;
editable_client
.update_editable_fields(&mut *transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
}
#[get("authorizations")]
pub async fn get_user_oauth_authorizations(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
let authorizations =
OAuthClientAuthorization::get_all_for_user(current_user.id.into(), &**pool).await?;
let mapped: Vec<models::oauth_clients::OAuthClientAuthorization> =
authorizations.into_iter().map(|a| a.into()).collect_vec();
Ok(HttpResponse::Ok().json(mapped))
}
#[delete("authorizations")]
pub async fn revoke_oauth_authorization(
req: HttpRequest,
info: web::Query<DeleteOAuthClientQueryParam>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::SESSION_ACCESS]),
)
.await?
.1;
OAuthClientAuthorization::remove(info.client_id.into(), current_user.id.into(), &**pool)
.await?;
Ok(HttpResponse::Ok().body(""))
}
fn generate_oauth_client_secret() -> String {
ChaCha20Rng::from_entropy()
.sample_iter(&Alphanumeric)
.take(32)
.map(char::from)
.collect::<String>()
}
async fn create_redirect_uris(
uri_strings: impl IntoIterator<Item = impl Display>,
client_id: OAuthClientId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Vec<OAuthRedirectUri>, DatabaseError> {
let mut redirect_uris = vec![];
for uri in uri_strings.into_iter() {
let id = generate_oauth_redirect_id(transaction).await?;
redirect_uris.push(OAuthRedirectUri {
id,
client_id,
uri: uri.to_string(),
});
}
Ok(redirect_uris)
}
async fn edit_redirects(
redirects: Vec<String>,
existing_client: &OAuthClient,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let updated_redirects: HashSet<String> = redirects.into_iter().collect();
let original_redirects: HashSet<String> = existing_client
.redirect_uris
.iter()
.map(|r| r.uri.to_string())
.collect();
let redirects_to_add = create_redirect_uris(
updated_redirects.difference(&original_redirects),
existing_client.id,
&mut *transaction,
)
.await?;
OAuthClient::insert_redirect_uris(&redirects_to_add, &mut **transaction).await?;
let mut redirects_to_remove = existing_client.redirect_uris.clone();
redirects_to_remove.retain(|r| !updated_redirects.contains(&r.uri));
OAuthClient::remove_redirect_uris(redirects_to_remove.iter().map(|r| r.id), &mut **transaction)
.await?;
Ok(())
}
pub async fn get_clients_inner(
ids: &[ApiOAuthClientId],
pool: web::Data<PgPool>,
) -> Result<Vec<models::oauth_clients::OAuthClient>, ApiError> {
let ids: Vec<OAuthClientId> = ids.iter().map(|i| (*i).into()).collect();
let clients = OAuthClient::get_many(&ids, &**pool).await?;
Ok(clients.into_iter().map(|c| c.into()).collect_vec())
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,934 @@
use crate::auth::validate::get_user_record_from_bearer_token;
use crate::auth::{get_user_from_headers, AuthenticationError};
use crate::database::models::generate_payout_id;
use crate::database::redis::RedisPool;
use crate::models::ids::PayoutId;
use crate::models::pats::Scopes;
use crate::models::payouts::{PayoutMethodType, PayoutStatus};
use crate::queue::payouts::{make_aditude_request, PayoutsQueue};
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use chrono::{Datelike, Duration, TimeZone, Utc, Weekday};
use hex::ToHex;
use hmac::{Hmac, Mac, NewMac};
use reqwest::Method;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sha2::Sha256;
use sqlx::PgPool;
use std::collections::HashMap;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("payout")
.service(paypal_webhook)
.service(tremendous_webhook)
.service(user_payouts)
.service(create_payout)
.service(cancel_payout)
.service(payment_methods)
.service(get_balance)
.service(platform_revenue),
);
}
#[post("_paypal")]
pub async fn paypal_webhook(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
payouts: web::Data<PayoutsQueue>,
body: String,
) -> Result<HttpResponse, ApiError> {
let auth_algo = req
.headers()
.get("PAYPAL-AUTH-ALGO")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing auth algo".to_string()))?;
let cert_url = req
.headers()
.get("PAYPAL-CERT-URL")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing cert url".to_string()))?;
let transmission_id = req
.headers()
.get("PAYPAL-TRANSMISSION-ID")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing transmission ID".to_string()))?;
let transmission_sig = req
.headers()
.get("PAYPAL-TRANSMISSION-SIG")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing transmission sig".to_string()))?;
let transmission_time = req
.headers()
.get("PAYPAL-TRANSMISSION-TIME")
.and_then(|x| x.to_str().ok())
.ok_or_else(|| ApiError::InvalidInput("missing transmission time".to_string()))?;
#[derive(Deserialize)]
struct WebHookResponse {
verification_status: String,
}
let webhook_res = payouts
.make_paypal_request::<(), WebHookResponse>(
Method::POST,
"notifications/verify-webhook-signature",
None,
// This is needed as serde re-orders fields, which causes the validation to fail for PayPal.
Some(format!(
"{{
\"auth_algo\": \"{auth_algo}\",
\"cert_url\": \"{cert_url}\",
\"transmission_id\": \"{transmission_id}\",
\"transmission_sig\": \"{transmission_sig}\",
\"transmission_time\": \"{transmission_time}\",
\"webhook_id\": \"{}\",
\"webhook_event\": {body}
}}",
dotenvy::var("PAYPAL_WEBHOOK_ID")?
)),
None,
)
.await?;
if &webhook_res.verification_status != "SUCCESS" {
return Err(ApiError::InvalidInput(
"Invalid webhook signature".to_string(),
));
}
#[derive(Deserialize)]
struct PayPalResource {
pub payout_item_id: String,
}
#[derive(Deserialize)]
struct PayPalWebhook {
pub event_type: String,
pub resource: PayPalResource,
}
let webhook = serde_json::from_str::<PayPalWebhook>(&body)?;
match &*webhook.event_type {
"PAYMENT.PAYOUTS-ITEM.BLOCKED"
| "PAYMENT.PAYOUTS-ITEM.DENIED"
| "PAYMENT.PAYOUTS-ITEM.REFUNDED"
| "PAYMENT.PAYOUTS-ITEM.RETURNED"
| "PAYMENT.PAYOUTS-ITEM.CANCELED" => {
let mut transaction = pool.begin().await?;
let result = sqlx::query!(
"SELECT user_id, amount, fee FROM payouts WHERE platform_id = $1 AND status = $2",
webhook.resource.payout_item_id,
PayoutStatus::InTransit.as_str()
)
.fetch_optional(&mut *transaction)
.await?;
if let Some(result) = result {
sqlx::query!(
"
UPDATE payouts
SET status = $1
WHERE platform_id = $2
",
if &*webhook.event_type == "PAYMENT.PAYOUTS-ITEM.CANCELED" {
PayoutStatus::Cancelled
} else {
PayoutStatus::Failed
}
.as_str(),
webhook.resource.payout_item_id
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
crate::database::models::user_item::User::clear_caches(
&[(crate::database::models::UserId(result.user_id), None)],
&redis,
)
.await?;
}
}
"PAYMENT.PAYOUTS-ITEM.SUCCEEDED" => {
let mut transaction = pool.begin().await?;
sqlx::query!(
"
UPDATE payouts
SET status = $1
WHERE platform_id = $2
",
PayoutStatus::Success.as_str(),
webhook.resource.payout_item_id
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
}
_ => {}
}
Ok(HttpResponse::NoContent().finish())
}
#[post("_tremendous")]
pub async fn tremendous_webhook(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
body: String,
) -> Result<HttpResponse, ApiError> {
let signature = req
.headers()
.get("Tremendous-Webhook-Signature")
.and_then(|x| x.to_str().ok())
.and_then(|x| x.split('=').next_back())
.ok_or_else(|| ApiError::InvalidInput("missing webhook signature".to_string()))?;
let mut mac: Hmac<Sha256> =
Hmac::new_from_slice(dotenvy::var("TREMENDOUS_PRIVATE_KEY")?.as_bytes())
.map_err(|_| ApiError::Payments("error initializing HMAC".to_string()))?;
mac.update(body.as_bytes());
let request_signature = mac.finalize().into_bytes().encode_hex::<String>();
if &*request_signature != signature {
return Err(ApiError::InvalidInput(
"Invalid webhook signature".to_string(),
));
}
#[derive(Deserialize)]
pub struct TremendousResource {
pub id: String,
}
#[derive(Deserialize)]
struct TremendousPayload {
pub resource: TremendousResource,
}
#[derive(Deserialize)]
struct TremendousWebhook {
pub event: String,
pub payload: TremendousPayload,
}
let webhook = serde_json::from_str::<TremendousWebhook>(&body)?;
match &*webhook.event {
"REWARDS.CANCELED" | "REWARDS.DELIVERY.FAILED" => {
let mut transaction = pool.begin().await?;
let result = sqlx::query!(
"SELECT user_id, amount, fee FROM payouts WHERE platform_id = $1 AND status = $2",
webhook.payload.resource.id,
PayoutStatus::InTransit.as_str()
)
.fetch_optional(&mut *transaction)
.await?;
if let Some(result) = result {
sqlx::query!(
"
UPDATE payouts
SET status = $1
WHERE platform_id = $2
",
if &*webhook.event == "REWARDS.CANCELED" {
PayoutStatus::Cancelled
} else {
PayoutStatus::Failed
}
.as_str(),
webhook.payload.resource.id
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
crate::database::models::user_item::User::clear_caches(
&[(crate::database::models::UserId(result.user_id), None)],
&redis,
)
.await?;
}
}
"REWARDS.DELIVERY.SUCCEEDED" => {
let mut transaction = pool.begin().await?;
sqlx::query!(
"
UPDATE payouts
SET status = $1
WHERE platform_id = $2
",
PayoutStatus::Success.as_str(),
webhook.payload.resource.id
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
}
_ => {}
}
Ok(HttpResponse::NoContent().finish())
}
#[get("")]
pub async fn user_payouts(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PAYOUTS_READ]),
)
.await?
.1;
let payout_ids =
crate::database::models::payout_item::Payout::get_all_for_user(user.id.into(), &**pool)
.await?;
let payouts =
crate::database::models::payout_item::Payout::get_many(&payout_ids, &**pool).await?;
Ok(HttpResponse::Ok().json(
payouts
.into_iter()
.map(crate::models::payouts::Payout::from)
.collect::<Vec<_>>(),
))
}
#[derive(Deserialize)]
pub struct Withdrawal {
#[serde(with = "rust_decimal::serde::float")]
amount: Decimal,
method: PayoutMethodType,
method_id: String,
}
#[post("")]
pub async fn create_payout(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
body: web::Json<Withdrawal>,
session_queue: web::Data<AuthQueue>,
payouts_queue: web::Data<PayoutsQueue>,
) -> Result<HttpResponse, ApiError> {
let (scopes, user) =
get_user_record_from_bearer_token(&req, None, &**pool, &redis, &session_queue)
.await?
.ok_or_else(|| ApiError::Authentication(AuthenticationError::InvalidCredentials))?;
if !scopes.contains(Scopes::PAYOUTS_WRITE) {
return Err(ApiError::Authentication(
AuthenticationError::InvalidCredentials,
));
}
let mut transaction = pool.begin().await?;
sqlx::query!(
"
SELECT balance FROM users WHERE id = $1 FOR UPDATE
",
user.id.0
)
.fetch_optional(&mut *transaction)
.await?;
let balance = get_user_balance(user.id, &pool).await?;
if balance.available < body.amount || body.amount < Decimal::ZERO {
return Err(ApiError::InvalidInput(
"You do not have enough funds to make this payout!".to_string(),
));
}
let payout_method = payouts_queue
.get_payout_methods()
.await?
.into_iter()
.find(|x| x.id == body.method_id)
.ok_or_else(|| ApiError::InvalidInput("Invalid payment method specified!".to_string()))?;
let fee = std::cmp::min(
std::cmp::max(
payout_method.fee.min,
payout_method.fee.percentage * body.amount,
),
payout_method.fee.max.unwrap_or(Decimal::MAX),
);
let transfer = (body.amount - fee).round_dp(2);
if transfer <= Decimal::ZERO {
return Err(ApiError::InvalidInput(
"You need to withdraw more to cover the fee!".to_string(),
));
}
let payout_id = generate_payout_id(&mut transaction).await?;
let payout_item = match body.method {
PayoutMethodType::Venmo | PayoutMethodType::PayPal => {
let (wallet, wallet_type, address, display_address) =
if body.method == PayoutMethodType::Venmo {
if let Some(venmo) = user.venmo_handle {
("Venmo", "user_handle", venmo.clone(), venmo)
} else {
return Err(ApiError::InvalidInput(
"Venmo address has not been set for account!".to_string(),
));
}
} else if let Some(paypal_id) = user.paypal_id {
if let Some(paypal_country) = user.paypal_country {
if &*paypal_country == "US" && &*body.method_id != "paypal_us" {
return Err(ApiError::InvalidInput(
"Please use the US PayPal transfer option!".to_string(),
));
} else if &*paypal_country != "US" && &*body.method_id == "paypal_us" {
return Err(ApiError::InvalidInput(
"Please use the International PayPal transfer option!".to_string(),
));
}
(
"PayPal",
"paypal_id",
paypal_id.clone(),
user.paypal_email.unwrap_or(paypal_id),
)
} else {
return Err(ApiError::InvalidInput(
"Please re-link your PayPal account!".to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"You have not linked a PayPal account!".to_string(),
));
};
#[derive(Deserialize)]
struct PayPalLink {
href: String,
}
#[derive(Deserialize)]
struct PayoutsResponse {
pub links: Vec<PayPalLink>,
}
let mut payout_item = crate::database::models::payout_item::Payout {
id: payout_id,
user_id: user.id,
created: Utc::now(),
status: PayoutStatus::InTransit,
amount: transfer,
fee: Some(fee),
method: Some(body.method),
method_address: Some(display_address),
platform_id: None,
};
let res: PayoutsResponse = payouts_queue.make_paypal_request(
Method::POST,
"payments/payouts",
Some(
json! ({
"sender_batch_header": {
"sender_batch_id": format!("{}-payouts", Utc::now().to_rfc3339()),
"email_subject": "You have received a payment from Modrinth!",
"email_message": "Thank you for creating projects on Modrinth. Please claim this payment within 30 days.",
},
"items": [{
"amount": {
"currency": "USD",
"value": transfer.to_string()
},
"receiver": address,
"note": "Payment from Modrinth creator monetization program",
"recipient_type": wallet_type,
"recipient_wallet": wallet,
"sender_item_id": crate::models::ids::PayoutId::from(payout_id),
}]
})
),
None,
None
).await?;
if let Some(link) = res.links.first() {
#[derive(Deserialize)]
struct PayoutItem {
pub payout_item_id: String,
}
#[derive(Deserialize)]
struct PayoutData {
pub items: Vec<PayoutItem>,
}
if let Ok(res) = payouts_queue
.make_paypal_request::<(), PayoutData>(
Method::GET,
&link.href,
None,
None,
Some(true),
)
.await
{
if let Some(data) = res.items.first() {
payout_item.platform_id = Some(data.payout_item_id.clone());
}
}
}
payout_item
}
PayoutMethodType::Tremendous => {
if let Some(email) = user.email {
if user.email_verified {
let mut payout_item = crate::database::models::payout_item::Payout {
id: payout_id,
user_id: user.id,
created: Utc::now(),
status: PayoutStatus::InTransit,
amount: transfer,
fee: Some(fee),
method: Some(PayoutMethodType::Tremendous),
method_address: Some(email.clone()),
platform_id: None,
};
#[derive(Deserialize)]
struct Reward {
pub id: String,
}
#[derive(Deserialize)]
struct Order {
pub rewards: Vec<Reward>,
}
#[derive(Deserialize)]
struct TremendousResponse {
pub order: Order,
}
let res: TremendousResponse = payouts_queue
.make_tremendous_request(
Method::POST,
"orders",
Some(json! ({
"payment": {
"funding_source_id": "BALANCE",
},
"rewards": [{
"value": {
"denomination": transfer
},
"delivery": {
"method": "EMAIL"
},
"recipient": {
"name": user.username,
"email": email
},
"products": [
&body.method_id,
],
"campaign_id": dotenvy::var("TREMENDOUS_CAMPAIGN_ID")?,
}]
})),
)
.await?;
if let Some(reward) = res.order.rewards.first() {
payout_item.platform_id = Some(reward.id.clone())
}
payout_item
} else {
return Err(ApiError::InvalidInput(
"You must verify your account email to proceed!".to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"You must add an email to your account to proceed!".to_string(),
));
}
}
PayoutMethodType::Unknown => {
return Err(ApiError::Payments(
"Invalid payment method specified!".to_string(),
))
}
};
payout_item.insert(&mut transaction).await?;
transaction.commit().await?;
crate::database::models::User::clear_caches(&[(user.id, None)], &redis).await?;
Ok(HttpResponse::NoContent().finish())
}
#[delete("{id}")]
pub async fn cancel_payout(
info: web::Path<(PayoutId,)>,
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
payouts: web::Data<PayoutsQueue>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PAYOUTS_WRITE]),
)
.await?
.1;
let id = info.into_inner().0;
let payout = crate::database::models::payout_item::Payout::get(id.into(), &**pool).await?;
if let Some(payout) = payout {
if payout.user_id != user.id.into() && !user.role.is_admin() {
return Ok(HttpResponse::NotFound().finish());
}
if let Some(platform_id) = payout.platform_id {
if let Some(method) = payout.method {
if payout.status != PayoutStatus::InTransit {
return Err(ApiError::InvalidInput(
"Payout cannot be cancelled!".to_string(),
));
}
match method {
PayoutMethodType::Venmo | PayoutMethodType::PayPal => {
payouts
.make_paypal_request::<(), ()>(
Method::POST,
&format!("payments/payouts-item/{}/cancel", platform_id),
None,
None,
None,
)
.await?;
}
PayoutMethodType::Tremendous => {
payouts
.make_tremendous_request::<(), ()>(
Method::POST,
&format!("rewards/{}/cancel", platform_id),
None,
)
.await?;
}
PayoutMethodType::Unknown => {
return Err(ApiError::InvalidInput(
"Payout cannot be cancelled!".to_string(),
))
}
}
let mut transaction = pool.begin().await?;
sqlx::query!(
"
UPDATE payouts
SET status = $1
WHERE platform_id = $2
",
PayoutStatus::Cancelling.as_str(),
platform_id
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().finish())
} else {
Err(ApiError::InvalidInput(
"Payout cannot be cancelled!".to_string(),
))
}
} else {
Err(ApiError::InvalidInput(
"Payout cannot be cancelled!".to_string(),
))
}
} else {
Ok(HttpResponse::NotFound().finish())
}
}
#[derive(Deserialize)]
pub struct MethodFilter {
pub country: Option<String>,
}
#[get("methods")]
pub async fn payment_methods(
payouts_queue: web::Data<PayoutsQueue>,
filter: web::Query<MethodFilter>,
) -> Result<HttpResponse, ApiError> {
let methods = payouts_queue
.get_payout_methods()
.await?
.into_iter()
.filter(|x| {
let mut val = true;
if let Some(country) = &filter.country {
val &= x.supported_countries.contains(country);
}
val
})
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(methods))
}
#[derive(Serialize)]
pub struct UserBalance {
pub available: Decimal,
pub pending: Decimal,
}
#[get("balance")]
pub async fn get_balance(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PAYOUTS_READ]),
)
.await?
.1;
let balance = get_user_balance(user.id.into(), &pool).await?;
Ok(HttpResponse::Ok().json(balance))
}
async fn get_user_balance(
user_id: crate::database::models::ids::UserId,
pool: &PgPool,
) -> Result<UserBalance, sqlx::Error> {
let available = sqlx::query!(
"
SELECT SUM(amount)
FROM payouts_values
WHERE user_id = $1 AND date_available <= NOW()
",
user_id.0
)
.fetch_optional(pool)
.await?;
let pending = sqlx::query!(
"
SELECT SUM(amount)
FROM payouts_values
WHERE user_id = $1 AND date_available > NOW()
",
user_id.0
)
.fetch_optional(pool)
.await?;
let withdrawn = sqlx::query!(
"
SELECT SUM(amount) amount, SUM(fee) fee
FROM payouts
WHERE user_id = $1 AND (status = 'success' OR status = 'in-transit')
",
user_id.0
)
.fetch_optional(pool)
.await?;
let available = available
.map(|x| x.sum.unwrap_or(Decimal::ZERO))
.unwrap_or(Decimal::ZERO);
let pending = pending
.map(|x| x.sum.unwrap_or(Decimal::ZERO))
.unwrap_or(Decimal::ZERO);
let (withdrawn, fees) = withdrawn
.map(|x| {
(
x.amount.unwrap_or(Decimal::ZERO),
x.fee.unwrap_or(Decimal::ZERO),
)
})
.unwrap_or((Decimal::ZERO, Decimal::ZERO));
Ok(UserBalance {
available: available.round_dp(16) - withdrawn.round_dp(16) - fees.round_dp(16),
pending,
})
}
#[derive(Serialize, Deserialize)]
pub struct RevenueResponse {
pub all_time: Decimal,
pub data: Vec<RevenueData>,
}
#[derive(Serialize, Deserialize)]
pub struct RevenueData {
pub time: u64,
pub revenue: Decimal,
pub creator_revenue: Decimal,
}
#[get("platform_revenue")]
pub async fn platform_revenue(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let mut redis = redis.connect().await?;
const PLATFORM_REVENUE_NAMESPACE: &str = "platform_revenue";
let res: Option<RevenueResponse> = redis
.get_deserialized_from_json(PLATFORM_REVENUE_NAMESPACE, "0")
.await?;
if let Some(res) = res {
return Ok(HttpResponse::Ok().json(res));
}
let all_time_payouts = sqlx::query!(
"
SELECT SUM(amount) from payouts_values
",
)
.fetch_optional(&**pool)
.await?
.and_then(|x| x.sum)
.unwrap_or(Decimal::ZERO);
let points =
make_aditude_request(&["METRIC_REVENUE", "METRIC_IMPRESSIONS"], "30d", "1d").await?;
let mut points_map = HashMap::new();
for point in points {
for point in point.points_list {
let entry = points_map.entry(point.time.seconds).or_insert((None, None));
if let Some(revenue) = point.metric.revenue {
entry.0 = Some(revenue);
}
if let Some(impressions) = point.metric.impressions {
entry.1 = Some(impressions);
}
}
}
let mut revenue_data = Vec::new();
let now = Utc::now();
for i in 1..=30 {
let time = now - Duration::days(i);
let start = time
.date_naive()
.and_hms_opt(0, 0, 0)
.unwrap()
.and_utc()
.timestamp();
if let Some((revenue, impressions)) = points_map.remove(&(start as u64)) {
// Before 9/5/24, when legacy payouts were in effect.
if start >= 1725494400 {
let revenue = revenue.unwrap_or(Decimal::ZERO);
let impressions = impressions.unwrap_or(0);
// Modrinth's share of ad revenue
let modrinth_cut = Decimal::from(1) / Decimal::from(4);
// Clean.io fee (ad antimalware). Per 1000 impressions.
let clean_io_fee = Decimal::from(8) / Decimal::from(1000);
let net_revenue =
revenue - (clean_io_fee * Decimal::from(impressions) / Decimal::from(1000));
let payout = net_revenue * (Decimal::from(1) - modrinth_cut);
revenue_data.push(RevenueData {
time: start as u64,
revenue: net_revenue,
creator_revenue: payout,
});
continue;
}
}
revenue_data.push(get_legacy_data_point(start as u64));
}
let res = RevenueResponse {
all_time: all_time_payouts,
data: revenue_data,
};
redis
.set_serialized_to_json(PLATFORM_REVENUE_NAMESPACE, 0, &res, Some(60 * 60))
.await?;
Ok(HttpResponse::Ok().json(res))
}
fn get_legacy_data_point(timestamp: u64) -> RevenueData {
let start = Utc.timestamp_opt(timestamp as i64, 0).unwrap();
let old_payouts_budget = Decimal::from(10_000);
let days = Decimal::from(28);
let weekdays = Decimal::from(20);
let weekend_bonus = Decimal::from(5) / Decimal::from(4);
let weekday_amount = old_payouts_budget / (weekdays + (weekend_bonus) * (days - weekdays));
let weekend_amount = weekday_amount * weekend_bonus;
let payout = match start.weekday() {
Weekday::Sat | Weekday::Sun => weekend_amount,
_ => weekday_amount,
};
RevenueData {
time: timestamp,
revenue: payout,
creator_revenue: payout * (Decimal::from(9) / Decimal::from(10)),
}
}

View File

@@ -0,0 +1,984 @@
use super::version_creation::{try_create_version_fields, InitialVersionData};
use crate::auth::{get_user_from_headers, AuthenticationError};
use crate::database::models::loader_fields::{Loader, LoaderField, LoaderFieldEnumValue};
use crate::database::models::thread_item::ThreadBuilder;
use crate::database::models::{self, image_item, User};
use crate::database::redis::RedisPool;
use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError;
use crate::models::ids::base62_impl::to_base62;
use crate::models::ids::{ImageId, OrganizationId};
use crate::models::images::{Image, ImageContext};
use crate::models::pats::Scopes;
use crate::models::projects::{
License, Link, MonetizationStatus, ProjectId, ProjectStatus, VersionId, VersionStatus,
};
use crate::models::teams::{OrganizationPermissions, ProjectPermissions};
use crate::models::threads::ThreadType;
use crate::models::users::UserId;
use crate::queue::session::AuthQueue;
use crate::search::indexing::IndexingError;
use crate::util::img::upload_image_optimized;
use crate::util::routes::read_from_field;
use crate::util::validate::validation_errors_to_string;
use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use chrono::Utc;
use futures::stream::StreamExt;
use image::ImageError;
use itertools::Itertools;
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::collections::HashMap;
use std::sync::Arc;
use thiserror::Error;
use validator::Validate;
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
cfg.route("project", web::post().to(project_create));
}
#[derive(Error, Debug)]
pub enum CreateError {
#[error("Environment Error")]
EnvError(#[from] dotenvy::Error),
#[error("An unknown database error occurred")]
SqlxDatabaseError(#[from] sqlx::Error),
#[error("Database Error: {0}")]
DatabaseError(#[from] models::DatabaseError),
#[error("Indexing Error: {0}")]
IndexingError(#[from] IndexingError),
#[error("Error while parsing multipart payload: {0}")]
MultipartError(#[from] actix_multipart::MultipartError),
#[error("Error while parsing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
#[error("Error while validating input: {0}")]
ValidationError(String),
#[error("Error while uploading file: {0}")]
FileHostingError(#[from] FileHostingError),
#[error("Error while validating uploaded file: {0}")]
FileValidationError(#[from] crate::validate::ValidationError),
#[error("{}", .0)]
MissingValueError(String),
#[error("Invalid format for image: {0}")]
InvalidIconFormat(String),
#[error("Error with multipart data: {0}")]
InvalidInput(String),
#[error("Invalid game version: {0}")]
InvalidGameVersion(String),
#[error("Invalid loader: {0}")]
InvalidLoader(String),
#[error("Invalid category: {0}")]
InvalidCategory(String),
#[error("Invalid file type for version file: {0}")]
InvalidFileType(String),
#[error("Slug is already taken!")]
SlugCollision,
#[error("Authentication Error: {0}")]
Unauthorized(#[from] AuthenticationError),
#[error("Authentication Error: {0}")]
CustomAuthenticationError(String),
#[error("Image Parsing Error: {0}")]
ImageError(#[from] ImageError),
#[error("Reroute Error: {0}")]
RerouteError(#[from] reqwest::Error),
}
impl actix_web::ResponseError for CreateError {
fn status_code(&self) -> StatusCode {
match self {
CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::IndexingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::SerDeError(..) => StatusCode::BAD_REQUEST,
CreateError::MultipartError(..) => StatusCode::BAD_REQUEST,
CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidIconFormat(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidInput(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidFileType(..) => StatusCode::BAD_REQUEST,
CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED,
CreateError::CustomAuthenticationError(..) => StatusCode::UNAUTHORIZED,
CreateError::SlugCollision => StatusCode::BAD_REQUEST,
CreateError::ValidationError(..) => StatusCode::BAD_REQUEST,
CreateError::FileValidationError(..) => StatusCode::BAD_REQUEST,
CreateError::ImageError(..) => StatusCode::BAD_REQUEST,
CreateError::RerouteError(..) => StatusCode::INTERNAL_SERVER_ERROR,
}
}
fn error_response(&self) -> HttpResponse {
HttpResponse::build(self.status_code()).json(ApiError {
error: match self {
CreateError::EnvError(..) => "environment_error",
CreateError::SqlxDatabaseError(..) => "database_error",
CreateError::DatabaseError(..) => "database_error",
CreateError::IndexingError(..) => "indexing_error",
CreateError::FileHostingError(..) => "file_hosting_error",
CreateError::SerDeError(..) => "invalid_input",
CreateError::MultipartError(..) => "invalid_input",
CreateError::MissingValueError(..) => "invalid_input",
CreateError::InvalidIconFormat(..) => "invalid_input",
CreateError::InvalidInput(..) => "invalid_input",
CreateError::InvalidGameVersion(..) => "invalid_input",
CreateError::InvalidLoader(..) => "invalid_input",
CreateError::InvalidCategory(..) => "invalid_input",
CreateError::InvalidFileType(..) => "invalid_input",
CreateError::Unauthorized(..) => "unauthorized",
CreateError::CustomAuthenticationError(..) => "unauthorized",
CreateError::SlugCollision => "invalid_input",
CreateError::ValidationError(..) => "invalid_input",
CreateError::FileValidationError(..) => "invalid_input",
CreateError::ImageError(..) => "invalid_image",
CreateError::RerouteError(..) => "reroute_error",
},
description: self.to_string(),
})
}
}
pub fn default_project_type() -> String {
"mod".to_string()
}
fn default_requested_status() -> ProjectStatus {
ProjectStatus::Approved
}
#[derive(Serialize, Deserialize, Validate, Clone)]
pub struct ProjectCreateData {
#[validate(
length(min = 3, max = 64),
custom(function = "crate::util::validate::validate_name")
)]
#[serde(alias = "mod_name")]
/// The title or name of the project.
pub name: String,
#[validate(
length(min = 3, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
#[serde(alias = "mod_slug")]
/// The slug of a project, used for vanity URLs
pub slug: String,
#[validate(length(min = 3, max = 255))]
#[serde(alias = "mod_description")]
/// A short description of the project.
pub summary: String,
#[validate(length(max = 65536))]
#[serde(alias = "mod_body")]
/// A long description of the project, in markdown.
pub description: String,
#[validate(length(max = 32))]
#[validate]
/// A list of initial versions to upload with the created project
pub initial_versions: Vec<InitialVersionData>,
#[validate(length(max = 3))]
/// A list of the categories that the project is in.
pub categories: Vec<String>,
#[validate(length(max = 256))]
#[serde(default = "Vec::new")]
/// A list of the categories that the project is in.
pub additional_categories: Vec<String>,
/// An optional link to the project's license page
pub license_url: Option<String>,
/// An optional list of all donation links the project has
#[validate(custom(function = "crate::util::validate::validate_url_hashmap_values"))]
#[serde(default)]
pub link_urls: HashMap<String, String>,
/// An optional boolean. If true, the project will be created as a draft.
pub is_draft: Option<bool>,
/// The license id that the project follows
pub license_id: String,
#[validate(length(max = 64))]
#[validate]
/// The multipart names of the gallery items to upload
pub gallery_items: Option<Vec<NewGalleryItem>>,
#[serde(default = "default_requested_status")]
/// The status of the mod to be set once it is approved
pub requested_status: ProjectStatus,
// Associations to uploaded images in body/description
#[validate(length(max = 10))]
#[serde(default)]
pub uploaded_images: Vec<ImageId>,
/// The id of the organization to create the project in
pub organization_id: Option<OrganizationId>,
}
#[derive(Serialize, Deserialize, Validate, Clone)]
pub struct NewGalleryItem {
/// The name of the multipart item where the gallery media is located
pub item: String,
/// Whether the gallery item should show in search or not
pub featured: bool,
#[validate(length(min = 1, max = 2048))]
/// The title of the gallery item
pub name: Option<String>,
#[validate(length(min = 1, max = 2048))]
/// The description of the gallery item
pub description: Option<String>,
pub ordering: i64,
}
pub struct UploadedFile {
pub file_id: String,
pub file_name: String,
}
pub async fn undo_uploads(
file_host: &dyn FileHost,
uploaded_files: &[UploadedFile],
) -> Result<(), CreateError> {
for file in uploaded_files {
file_host
.delete_file_version(&file.file_id, &file.file_name)
.await?;
}
Ok(())
}
pub async fn project_create(
req: HttpRequest,
mut payload: Multipart,
client: Data<PgPool>,
redis: Data<RedisPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
let result = project_create_inner(
req,
&mut payload,
&mut transaction,
&***file_host,
&mut uploaded_files,
&client,
&redis,
&session_queue,
)
.await;
if result.is_err() {
let undo_result = undo_uploads(&***file_host, &uploaded_files).await;
let rollback_result = transaction.rollback().await;
undo_result?;
if let Err(e) = rollback_result {
return Err(e.into());
}
} else {
transaction.commit().await?;
}
result
}
/*
Project Creation Steps:
Get logged in user
Must match the author in the version creation
1. Data
- Gets "data" field from multipart form; must be first
- Verification: string lengths
- Create versions
- Some shared logic with version creation
- Create list of VersionBuilders
- Create ProjectBuilder
2. Upload
- Icon: check file format & size
- Upload to backblaze & record URL
- Project files
- Check for matching version
- File size limits?
- Check file type
- Eventually, malware scan
- Upload to backblaze & create VersionFileBuilder
-
3. Creation
- Database stuff
- Add project data to indexing queue
*/
#[allow(clippy::too_many_arguments)]
async fn project_create_inner(
req: HttpRequest,
payload: &mut Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
pool: &PgPool,
redis: &RedisPool,
session_queue: &AuthQueue,
) -> Result<HttpResponse, CreateError> {
// The base URL for files uploaded to backblaze
let cdn_url = dotenvy::var("CDN_URL")?;
// The currently logged in user
let current_user = get_user_from_headers(
&req,
pool,
redis,
session_queue,
Some(&[Scopes::PROJECT_CREATE]),
)
.await?
.1;
let project_id: ProjectId = models::generate_project_id(transaction).await?.into();
let all_loaders = models::loader_fields::Loader::list(&mut **transaction, redis).await?;
let project_create_data: ProjectCreateData;
let mut versions;
let mut versions_map = std::collections::HashMap::new();
let mut gallery_urls = Vec::new();
{
// The first multipart field must be named "data" and contain a
// JSON `ProjectCreateData` object.
let mut field = payload
.next()
.await
.map(|m| m.map_err(CreateError::MultipartError))
.unwrap_or_else(|| {
Err(CreateError::MissingValueError(String::from(
"No `data` field in multipart upload",
)))
})?;
let content_disposition = field.content_disposition();
let name = content_disposition
.get_name()
.ok_or_else(|| CreateError::MissingValueError(String::from("Missing content name")))?;
if name != "data" {
return Err(CreateError::InvalidInput(String::from(
"`data` field must come before file fields",
)));
}
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let create_data: ProjectCreateData = serde_json::from_slice(&data)?;
create_data
.validate()
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
let slug_project_id_option: Option<ProjectId> =
serde_json::from_str(&format!("\"{}\"", create_data.slug)).ok();
if let Some(slug_project_id) = slug_project_id_option {
let slug_project_id: models::ids::ProjectId = slug_project_id.into();
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)
",
slug_project_id as models::ids::ProjectId
)
.fetch_one(&mut **transaction)
.await
.map_err(|e| CreateError::DatabaseError(e.into()))?;
if results.exists.unwrap_or(false) {
return Err(CreateError::SlugCollision);
}
}
{
let results = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM mods WHERE slug = LOWER($1))
",
create_data.slug
)
.fetch_one(&mut **transaction)
.await
.map_err(|e| CreateError::DatabaseError(e.into()))?;
if results.exists.unwrap_or(false) {
return Err(CreateError::SlugCollision);
}
}
// Create VersionBuilders for the versions specified in `initial_versions`
versions = Vec::with_capacity(create_data.initial_versions.len());
for (i, data) in create_data.initial_versions.iter().enumerate() {
// Create a map of multipart field names to version indices
for name in &data.file_parts {
if versions_map.insert(name.to_owned(), i).is_some() {
// If the name is already used
return Err(CreateError::InvalidInput(String::from(
"Duplicate multipart field name",
)));
}
}
versions.push(
create_initial_version(
data,
project_id,
current_user.id,
&all_loaders,
transaction,
redis,
)
.await?,
);
}
project_create_data = create_data;
}
let mut icon_data = None;
let mut error = None;
while let Some(item) = payload.next().await {
let mut field: Field = item?;
if error.is_some() {
continue;
}
let result = async {
let content_disposition = field.content_disposition().clone();
let name = content_disposition.get_name().ok_or_else(|| {
CreateError::MissingValueError("Missing content name".to_string())
})?;
let (file_name, file_extension) =
super::version_creation::get_name_ext(&content_disposition)?;
if name == "icon" {
if icon_data.is_some() {
return Err(CreateError::InvalidInput(String::from(
"Projects can only have one icon",
)));
}
// Upload the icon to the cdn
icon_data = Some(
process_icon_upload(
uploaded_files,
project_id.0,
file_extension,
file_host,
field,
)
.await?,
);
return Ok(());
}
if let Some(gallery_items) = &project_create_data.gallery_items {
if gallery_items.iter().filter(|a| a.featured).count() > 1 {
return Err(CreateError::InvalidInput(String::from(
"Only one gallery image can be featured.",
)));
}
if let Some(item) = gallery_items.iter().find(|x| x.item == name) {
let data = read_from_field(
&mut field,
2 * (1 << 20),
"Gallery image exceeds the maximum of 2MiB.",
)
.await?;
let (_, file_extension) =
super::version_creation::get_name_ext(&content_disposition)?;
let url = format!("data/{project_id}/images");
let upload_result = upload_image_optimized(
&url,
data.freeze(),
file_extension,
Some(350),
Some(1.0),
file_host,
)
.await
.map_err(|e| CreateError::InvalidIconFormat(e.to_string()))?;
uploaded_files.push(UploadedFile {
file_id: upload_result.raw_url_path.clone(),
file_name: upload_result.raw_url_path,
});
gallery_urls.push(crate::models::projects::GalleryItem {
url: upload_result.url,
raw_url: upload_result.raw_url,
featured: item.featured,
name: item.name.clone(),
description: item.description.clone(),
created: Utc::now(),
ordering: item.ordering,
});
return Ok(());
}
}
let index = if let Some(i) = versions_map.get(name) {
*i
} else {
return Err(CreateError::InvalidInput(format!(
"File `{file_name}` (field {name}) isn't specified in the versions data"
)));
};
// `index` is always valid for these lists
let created_version = versions.get_mut(index).unwrap();
let version_data = project_create_data.initial_versions.get(index).unwrap();
// TODO: maybe redundant is this calculation done elsewhere?
let existing_file_names = created_version
.files
.iter()
.map(|x| x.filename.clone())
.collect();
// Upload the new jar file
super::version_creation::upload_file(
&mut field,
file_host,
version_data.file_parts.len(),
uploaded_files,
&mut created_version.files,
&mut created_version.dependencies,
&cdn_url,
&content_disposition,
project_id,
created_version.version_id.into(),
&created_version.version_fields,
version_data.loaders.clone(),
version_data.primary_file.is_some(),
version_data.primary_file.as_deref() == Some(name),
None,
existing_file_names,
transaction,
redis,
)
.await?;
Ok(())
}
.await;
if result.is_err() {
error = result.err();
}
}
if let Some(error) = error {
return Err(error);
}
{
// Check to make sure that all specified files were uploaded
for (version_data, builder) in project_create_data
.initial_versions
.iter()
.zip(versions.iter())
{
if version_data.file_parts.len() != builder.files.len() {
return Err(CreateError::InvalidInput(String::from(
"Some files were specified in initial_versions but not uploaded",
)));
}
}
// Convert the list of category names to actual categories
let mut categories = Vec::with_capacity(project_create_data.categories.len());
for category in &project_create_data.categories {
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
if ids.is_empty() {
return Err(CreateError::InvalidCategory(category.clone()));
}
// TODO: We should filter out categories that don't match the project type of any of the versions
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
categories.extend(ids.values());
}
let mut additional_categories =
Vec::with_capacity(project_create_data.additional_categories.len());
for category in &project_create_data.additional_categories {
let ids = models::categories::Category::get_ids(category, &mut **transaction).await?;
if ids.is_empty() {
return Err(CreateError::InvalidCategory(category.clone()));
}
// TODO: We should filter out categories that don't match the project type of any of the versions
// ie: if mod and modpack both share a name this should only have modpack if it only has a modpack as a version
additional_categories.extend(ids.values());
}
let mut members = vec![];
if let Some(organization_id) = project_create_data.organization_id {
let org = models::Organization::get_id(organization_id.into(), pool, redis)
.await?
.ok_or_else(|| {
CreateError::InvalidInput("Invalid organization ID specified!".to_string())
})?;
let team_member =
models::TeamMember::get_from_user_id(org.team_id, current_user.id.into(), pool)
.await?;
let perms =
OrganizationPermissions::get_permissions_by_role(&current_user.role, &team_member);
if !perms
.map(|x| x.contains(OrganizationPermissions::ADD_PROJECT))
.unwrap_or(false)
{
return Err(CreateError::CustomAuthenticationError(
"You do not have the permissions to create projects in this organization!"
.to_string(),
));
}
} else {
members.push(models::team_item::TeamMemberBuilder {
user_id: current_user.id.into(),
role: crate::models::teams::DEFAULT_ROLE.to_owned(),
is_owner: true,
permissions: ProjectPermissions::all(),
organization_permissions: None,
accepted: true,
payouts_split: Decimal::ONE_HUNDRED,
ordering: 0,
})
}
let team = models::team_item::TeamBuilder { members };
let team_id = team.insert(&mut *transaction).await?;
let status;
if project_create_data.is_draft.unwrap_or(false) {
status = ProjectStatus::Draft;
} else {
status = ProjectStatus::Processing;
if project_create_data.initial_versions.is_empty() {
return Err(CreateError::InvalidInput(String::from(
"Project submitted for review with no initial versions",
)));
}
}
let license_id =
spdx::Expression::parse(&project_create_data.license_id).map_err(|err| {
CreateError::InvalidInput(format!("Invalid SPDX license identifier: {err}"))
})?;
let mut link_urls = vec![];
let link_platforms =
models::categories::LinkPlatform::list(&mut **transaction, redis).await?;
for (platform, url) in &project_create_data.link_urls {
let platform_id =
models::categories::LinkPlatform::get_id(platform, &mut **transaction)
.await?
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"Link platform {} does not exist.",
platform.clone()
))
})?;
let link_platform = link_platforms
.iter()
.find(|x| x.id == platform_id)
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"Link platform {} does not exist.",
platform.clone()
))
})?;
link_urls.push(models::project_item::LinkUrl {
platform_id,
platform_name: link_platform.name.clone(),
url: url.clone(),
donation: link_platform.donation,
})
}
let project_builder_actual = models::project_item::ProjectBuilder {
project_id: project_id.into(),
team_id,
organization_id: project_create_data.organization_id.map(|x| x.into()),
name: project_create_data.name,
summary: project_create_data.summary,
description: project_create_data.description,
icon_url: icon_data.clone().map(|x| x.0),
raw_icon_url: icon_data.clone().map(|x| x.1),
license_url: project_create_data.license_url,
categories,
additional_categories,
initial_versions: versions,
status,
requested_status: Some(project_create_data.requested_status),
license: license_id.to_string(),
slug: Some(project_create_data.slug),
link_urls,
gallery_items: gallery_urls
.iter()
.map(|x| models::project_item::GalleryItem {
image_url: x.url.clone(),
raw_image_url: x.raw_url.clone(),
featured: x.featured,
name: x.name.clone(),
description: x.description.clone(),
created: x.created,
ordering: x.ordering,
})
.collect(),
color: icon_data.and_then(|x| x.2),
monetization_status: MonetizationStatus::Monetized,
};
let project_builder = project_builder_actual.clone();
let now = Utc::now();
let id = project_builder_actual.insert(&mut *transaction).await?;
User::clear_project_cache(&[current_user.id.into()], redis).await?;
for image_id in project_create_data.uploaded_images {
if let Some(db_image) =
image_item::Image::get(image_id.into(), &mut **transaction, redis).await?
{
let image: Image = db_image.into();
if !matches!(image.context, ImageContext::Project { .. })
|| image.context.inner_id().is_some()
{
return Err(CreateError::InvalidInput(format!(
"Image {} is not unused and in the 'project' context",
image_id
)));
}
sqlx::query!(
"
UPDATE uploaded_images
SET mod_id = $1
WHERE id = $2
",
id as models::ids::ProjectId,
image_id.0 as i64
)
.execute(&mut **transaction)
.await?;
image_item::Image::clear_cache(image.id.into(), redis).await?;
} else {
return Err(CreateError::InvalidInput(format!(
"Image {} does not exist",
image_id
)));
}
}
let thread_id = ThreadBuilder {
type_: ThreadType::Project,
members: vec![],
project_id: Some(id),
report_id: None,
}
.insert(&mut *transaction)
.await?;
let loaders = project_builder
.initial_versions
.iter()
.flat_map(|v| v.loaders.clone())
.unique()
.collect::<Vec<_>>();
let (project_types, games) = Loader::list(&mut **transaction, redis)
.await?
.into_iter()
.fold(
(Vec::new(), Vec::new()),
|(mut project_types, mut games), loader| {
if loaders.contains(&loader.id) {
project_types.extend(loader.supported_project_types);
games.extend(loader.supported_games);
}
(project_types, games)
},
);
let response = crate::models::projects::Project {
id: project_id,
slug: project_builder.slug.clone(),
project_types,
games,
team_id: team_id.into(),
organization: project_create_data.organization_id,
name: project_builder.name.clone(),
summary: project_builder.summary.clone(),
description: project_builder.description.clone(),
published: now,
updated: now,
approved: None,
queued: None,
status,
requested_status: project_builder.requested_status,
moderator_message: None,
license: License {
id: project_create_data.license_id.clone(),
name: "".to_string(),
url: project_builder.license_url.clone(),
},
downloads: 0,
followers: 0,
categories: project_create_data.categories,
additional_categories: project_create_data.additional_categories,
loaders: vec![],
versions: project_builder
.initial_versions
.iter()
.map(|v| v.version_id.into())
.collect::<Vec<_>>(),
icon_url: project_builder.icon_url.clone(),
link_urls: project_builder
.link_urls
.clone()
.into_iter()
.map(|x| (x.platform_name.clone(), Link::from(x)))
.collect(),
gallery: gallery_urls,
color: project_builder.color,
thread_id: thread_id.into(),
monetization_status: MonetizationStatus::Monetized,
fields: HashMap::new(), // Fields instantiate to empty
};
Ok(HttpResponse::Ok().json(response))
}
}
async fn create_initial_version(
version_data: &InitialVersionData,
project_id: ProjectId,
author: UserId,
all_loaders: &[models::loader_fields::Loader],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<models::version_item::VersionBuilder, CreateError> {
if version_data.project_id.is_some() {
return Err(CreateError::InvalidInput(String::from(
"Found project id in initial version for new project",
)));
}
version_data
.validate()
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
// Randomly generate a new id to be used for the version
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let loaders = version_data
.loaders
.iter()
.map(|x| {
all_loaders
.iter()
.find(|y| y.loader == x.0)
.ok_or_else(|| CreateError::InvalidLoader(x.0.clone()))
.map(|y| y.id)
})
.collect::<Result<Vec<models::LoaderId>, CreateError>>()?;
let loader_fields = LoaderField::get_fields(&loaders, &mut **transaction, redis).await?;
let mut loader_field_enum_values =
LoaderFieldEnumValue::list_many_loader_fields(&loader_fields, &mut **transaction, redis)
.await?;
let version_fields = try_create_version_fields(
version_id,
&version_data.fields,
&loader_fields,
&mut loader_field_enum_values,
)?;
let dependencies = version_data
.dependencies
.iter()
.map(|d| models::version_item::DependencyBuilder {
version_id: d.version_id.map(|x| x.into()),
project_id: d.project_id.map(|x| x.into()),
dependency_type: d.dependency_type.to_string(),
file_name: None,
})
.collect::<Vec<_>>();
let version = models::version_item::VersionBuilder {
version_id: version_id.into(),
project_id: project_id.into(),
author_id: author.into(),
name: version_data.version_title.clone(),
version_number: version_data.version_number.clone(),
changelog: version_data.version_body.clone().unwrap_or_default(),
files: Vec::new(),
dependencies,
loaders,
version_fields,
featured: version_data.featured,
status: VersionStatus::Listed,
version_type: version_data.release_channel.to_string(),
requested_status: None,
ordering: version_data.ordering,
};
Ok(version)
}
async fn process_icon_upload(
uploaded_files: &mut Vec<UploadedFile>,
id: u64,
file_extension: &str,
file_host: &dyn FileHost,
mut field: Field,
) -> Result<(String, String, Option<u32>), CreateError> {
let data = read_from_field(&mut field, 262144, "Icons must be smaller than 256KiB").await?;
let upload_result = crate::util::img::upload_image_optimized(
&format!("data/{}", to_base62(id)),
data.freeze(),
file_extension,
Some(96),
Some(1.0),
file_host,
)
.await
.map_err(|e| CreateError::InvalidIconFormat(e.to_string()))?;
uploaded_files.push(UploadedFile {
file_id: upload_result.raw_url_path.clone(),
file_name: upload_result.raw_url_path,
});
uploaded_files.push(UploadedFile {
file_id: upload_result.url_path.clone(),
file_name: upload_result.url_path,
});
Ok((
upload_result.url,
upload_result.raw_url,
upload_result.color,
))
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,507 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::database;
use crate::database::models::image_item;
use crate::database::models::thread_item::{ThreadBuilder, ThreadMessageBuilder};
use crate::database::redis::RedisPool;
use crate::models::ids::ImageId;
use crate::models::ids::{base62_impl::parse_base62, ProjectId, UserId, VersionId};
use crate::models::images::{Image, ImageContext};
use crate::models::pats::Scopes;
use crate::models::reports::{ItemType, Report};
use crate::models::threads::{MessageBody, ThreadType};
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::util::img;
use actix_web::{web, HttpRequest, HttpResponse};
use chrono::Utc;
use futures::StreamExt;
use serde::Deserialize;
use sqlx::PgPool;
use validator::Validate;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("report", web::post().to(report_create));
cfg.route("report", web::get().to(reports));
cfg.route("reports", web::get().to(reports_get));
cfg.route("report/{id}", web::get().to(report_get));
cfg.route("report/{id}", web::patch().to(report_edit));
cfg.route("report/{id}", web::delete().to(report_delete));
}
#[derive(Deserialize, Validate)]
pub struct CreateReport {
pub report_type: String,
pub item_id: String,
pub item_type: ItemType,
pub body: String,
// Associations to uploaded images
#[validate(length(max = 10))]
#[serde(default)]
pub uploaded_images: Vec<ImageId>,
}
pub async fn report_create(
req: HttpRequest,
pool: web::Data<PgPool>,
mut body: web::Payload,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let mut transaction = pool.begin().await?;
let current_user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::REPORT_CREATE]),
)
.await?
.1;
let mut bytes = web::BytesMut::new();
while let Some(item) = body.next().await {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInput("Error while parsing request payload!".to_string())
})?);
}
let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?;
let id = crate::database::models::generate_report_id(&mut transaction).await?;
let report_type = crate::database::models::categories::ReportType::get_id(
&new_report.report_type,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!("Invalid report type: {}", new_report.report_type))
})?;
let mut report = crate::database::models::report_item::Report {
id,
report_type_id: report_type,
project_id: None,
version_id: None,
user_id: None,
body: new_report.body.clone(),
reporter: current_user.id.into(),
created: Utc::now(),
closed: false,
};
match new_report.item_type {
ItemType::Project => {
let project_id = ProjectId(parse_base62(new_report.item_id.as_str())?);
let result = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods WHERE id = $1)",
project_id.0 as i64
)
.fetch_one(&mut *transaction)
.await?;
if !result.exists.unwrap_or(false) {
return Err(ApiError::InvalidInput(format!(
"Project could not be found: {}",
new_report.item_id
)));
}
report.project_id = Some(project_id.into())
}
ItemType::Version => {
let version_id = VersionId(parse_base62(new_report.item_id.as_str())?);
let result = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)",
version_id.0 as i64
)
.fetch_one(&mut *transaction)
.await?;
if !result.exists.unwrap_or(false) {
return Err(ApiError::InvalidInput(format!(
"Version could not be found: {}",
new_report.item_id
)));
}
report.version_id = Some(version_id.into())
}
ItemType::User => {
let user_id = UserId(parse_base62(new_report.item_id.as_str())?);
let result = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM users WHERE id = $1)",
user_id.0 as i64
)
.fetch_one(&mut *transaction)
.await?;
if !result.exists.unwrap_or(false) {
return Err(ApiError::InvalidInput(format!(
"User could not be found: {}",
new_report.item_id
)));
}
report.user_id = Some(user_id.into())
}
ItemType::Unknown => {
return Err(ApiError::InvalidInput(format!(
"Invalid report item type: {}",
new_report.item_type.as_str()
)))
}
}
report.insert(&mut transaction).await?;
for image_id in new_report.uploaded_images {
if let Some(db_image) =
image_item::Image::get(image_id.into(), &mut *transaction, &redis).await?
{
let image: Image = db_image.into();
if !matches!(image.context, ImageContext::Report { .. })
|| image.context.inner_id().is_some()
{
return Err(ApiError::InvalidInput(format!(
"Image {} is not unused and in the 'report' context",
image_id
)));
}
sqlx::query!(
"
UPDATE uploaded_images
SET report_id = $1
WHERE id = $2
",
id.0 as i64,
image_id.0 as i64
)
.execute(&mut *transaction)
.await?;
image_item::Image::clear_cache(image.id.into(), &redis).await?;
} else {
return Err(ApiError::InvalidInput(format!(
"Image {} could not be found",
image_id
)));
}
}
let thread_id = ThreadBuilder {
type_: ThreadType::Report,
members: vec![],
project_id: None,
report_id: Some(report.id),
}
.insert(&mut transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::Ok().json(Report {
id: id.into(),
report_type: new_report.report_type.clone(),
item_id: new_report.item_id.clone(),
item_type: new_report.item_type.clone(),
reporter: current_user.id,
body: new_report.body.clone(),
created: Utc::now(),
closed: false,
thread_id: thread_id.into(),
}))
}
#[derive(Deserialize)]
pub struct ReportsRequestOptions {
#[serde(default = "default_count")]
pub count: i16,
#[serde(default = "default_all")]
pub all: bool,
}
fn default_count() -> i16 {
100
}
fn default_all() -> bool {
true
}
pub async fn reports(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
count: web::Query<ReportsRequestOptions>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::REPORT_READ]),
)
.await?
.1;
use futures::stream::TryStreamExt;
let report_ids = if user.role.is_mod() && count.all {
sqlx::query!(
"
SELECT id FROM reports
WHERE closed = FALSE
ORDER BY created ASC
LIMIT $1;
",
count.count as i64
)
.fetch(&**pool)
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
.await?
} else {
sqlx::query!(
"
SELECT id FROM reports
WHERE closed = FALSE AND reporter = $1
ORDER BY created ASC
LIMIT $2;
",
user.id.0 as i64,
count.count as i64
)
.fetch(&**pool)
.map_ok(|m| crate::database::models::ids::ReportId(m.id))
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
.await?
};
let query_reports =
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
let mut reports: Vec<Report> = Vec::new();
for x in query_reports {
reports.push(x.into());
}
Ok(HttpResponse::Ok().json(reports))
}
#[derive(Deserialize)]
pub struct ReportIds {
pub ids: String,
}
pub async fn reports_get(
req: HttpRequest,
web::Query(ids): web::Query<ReportIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let report_ids: Vec<crate::database::models::ids::ReportId> =
serde_json::from_str::<Vec<crate::models::ids::ReportId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let reports_data =
crate::database::models::report_item::Report::get_many(&report_ids, &**pool).await?;
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::REPORT_READ]),
)
.await?
.1;
let all_reports = reports_data
.into_iter()
.filter(|x| user.role.is_mod() || x.reporter == user.id.into())
.map(|x| x.into())
.collect::<Vec<Report>>();
Ok(HttpResponse::Ok().json(all_reports))
}
pub async fn report_get(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::REPORT_READ]),
)
.await?
.1;
let id = info.into_inner().0.into();
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
if let Some(report) = report {
if !user.role.is_mod() && report.reporter != user.id.into() {
return Err(ApiError::NotFound);
}
let report: Report = report.into();
Ok(HttpResponse::Ok().json(report))
} else {
Err(ApiError::NotFound)
}
}
#[derive(Deserialize, Validate)]
pub struct EditReport {
#[validate(length(max = 65536))]
pub body: Option<String>,
pub closed: Option<bool>,
}
pub async fn report_edit(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
session_queue: web::Data<AuthQueue>,
edit_report: web::Json<EditReport>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::REPORT_WRITE]),
)
.await?
.1;
let id = info.into_inner().0.into();
let report = crate::database::models::report_item::Report::get(id, &**pool).await?;
if let Some(report) = report {
if !user.role.is_mod() && report.reporter != user.id.into() {
return Err(ApiError::NotFound);
}
let mut transaction = pool.begin().await?;
if let Some(edit_body) = &edit_report.body {
sqlx::query!(
"
UPDATE reports
SET body = $1
WHERE (id = $2)
",
edit_body,
id as crate::database::models::ids::ReportId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(edit_closed) = edit_report.closed {
if !user.role.is_mod() {
return Err(ApiError::InvalidInput(
"You cannot reopen a report!".to_string(),
));
}
ThreadMessageBuilder {
author_id: Some(user.id.into()),
body: if !edit_closed && report.closed {
MessageBody::ThreadReopen
} else {
MessageBody::ThreadClosure
},
thread_id: report.thread_id,
hide_identity: user.role.is_mod(),
}
.insert(&mut transaction)
.await?;
sqlx::query!(
"
UPDATE reports
SET closed = $1
WHERE (id = $2)
",
edit_closed,
id as crate::database::models::ids::ReportId,
)
.execute(&mut *transaction)
.await?;
}
// delete any images no longer in the body
let checkable_strings: Vec<&str> = vec![&edit_report.body]
.into_iter()
.filter_map(|x: &Option<String>| x.as_ref().map(|y| y.as_str()))
.collect();
let image_context = ImageContext::Report {
report_id: Some(id.into()),
};
img::delete_unused_images(image_context, checkable_strings, &mut transaction, &redis)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
pub async fn report_delete(
req: HttpRequest,
pool: web::Data<PgPool>,
info: web::Path<(crate::models::reports::ReportId,)>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::REPORT_DELETE]),
)
.await?;
let mut transaction = pool.begin().await?;
let id = info.into_inner().0;
let context = ImageContext::Report {
report_id: Some(id),
};
let uploaded_images =
database::models::Image::get_many_contexted(context, &mut transaction).await?;
for image in uploaded_images {
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
}
let result =
crate::database::models::report_item::Report::remove_full(id.into(), &mut transaction)
.await?;
transaction.commit().await?;
if result.is_some() {
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}

View File

@@ -0,0 +1,92 @@
use crate::routes::ApiError;
use actix_web::{web, HttpResponse};
use sqlx::PgPool;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("statistics", web::get().to(get_stats));
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct V3Stats {
pub projects: Option<i64>,
pub versions: Option<i64>,
pub authors: Option<i64>,
pub files: Option<i64>,
}
pub async fn get_stats(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
let projects = sqlx::query!(
"
SELECT COUNT(id)
FROM mods
WHERE status = ANY($1)
",
&*crate::models::projects::ProjectStatus::iterator()
.filter(|x| x.is_searchable())
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool)
.await?;
let versions = sqlx::query!(
"
SELECT COUNT(v.id)
FROM versions v
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
WHERE v.status = ANY($2)
",
&*crate::models::projects::ProjectStatus::iterator()
.filter(|x| x.is_searchable())
.map(|x| x.to_string())
.collect::<Vec<String>>(),
&*crate::models::projects::VersionStatus::iterator()
.filter(|x| x.is_listed())
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool)
.await?;
let authors = sqlx::query!(
"
SELECT COUNT(DISTINCT u.id)
FROM users u
INNER JOIN team_members tm on u.id = tm.user_id AND tm.accepted = TRUE
INNER JOIN mods m on tm.team_id = m.team_id AND m.status = ANY($1)
",
&*crate::models::projects::ProjectStatus::iterator()
.filter(|x| x.is_searchable())
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool)
.await?;
let files = sqlx::query!(
"
SELECT COUNT(f.id) FROM files f
INNER JOIN versions v on f.version_id = v.id AND v.status = ANY($2)
INNER JOIN mods m on v.mod_id = m.id AND m.status = ANY($1)
",
&*crate::models::projects::ProjectStatus::iterator()
.filter(|x| x.is_searchable())
.map(|x| x.to_string())
.collect::<Vec<String>>(),
&*crate::models::projects::VersionStatus::iterator()
.filter(|x| x.is_listed())
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool)
.await?;
let v3_stats = V3Stats {
projects: projects.count,
versions: versions.count,
authors: authors.count,
files: files.count,
};
Ok(HttpResponse::Ok().json(v3_stats))
}

View File

@@ -0,0 +1,252 @@
use std::collections::HashMap;
use super::ApiError;
use crate::database::models::categories::{Category, LinkPlatform, ProjectType, ReportType};
use crate::database::models::loader_fields::{
Game, Loader, LoaderField, LoaderFieldEnumValue, LoaderFieldType,
};
use crate::database::redis::RedisPool;
use actix_web::{web, HttpResponse};
use itertools::Itertools;
use serde_json::Value;
use sqlx::PgPool;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("tag")
.route("category", web::get().to(category_list))
.route("loader", web::get().to(loader_list)),
)
.route("games", web::get().to(games_list))
.route("loader_field", web::get().to(loader_fields_list))
.route("license", web::get().to(license_list))
.route("license/{id}", web::get().to(license_text))
.route("link_platform", web::get().to(link_platform_list))
.route("report_type", web::get().to(report_type_list))
.route("project_type", web::get().to(project_type_list));
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct GameData {
pub slug: String,
pub name: String,
pub icon: Option<String>,
pub banner: Option<String>,
}
pub async fn games_list(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let results = Game::list(&**pool, &redis)
.await?
.into_iter()
.map(|x| GameData {
slug: x.slug,
name: x.name,
icon: x.icon_url,
banner: x.banner_url,
})
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(results))
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct CategoryData {
pub icon: String,
pub name: String,
pub project_type: String,
pub header: String,
}
pub async fn category_list(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let results = Category::list(&**pool, &redis)
.await?
.into_iter()
.map(|x| CategoryData {
icon: x.icon,
name: x.category,
project_type: x.project_type,
header: x.header,
})
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(results))
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct LoaderData {
pub icon: String,
pub name: String,
pub supported_project_types: Vec<String>,
pub supported_games: Vec<String>,
pub supported_fields: Vec<String>, // Available loader fields for this loader
pub metadata: Value,
}
pub async fn loader_list(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let loaders = Loader::list(&**pool, &redis).await?;
let loader_fields = LoaderField::get_fields_per_loader(
&loaders.iter().map(|x| x.id).collect_vec(),
&**pool,
&redis,
)
.await?;
let mut results = loaders
.into_iter()
.map(|x| LoaderData {
icon: x.icon,
name: x.loader,
supported_project_types: x.supported_project_types,
supported_games: x.supported_games,
supported_fields: loader_fields
.get(&x.id)
.map(|x| x.iter().map(|x| x.field.clone()).collect_vec())
.unwrap_or_default(),
metadata: x.metadata,
})
.collect::<Vec<_>>();
results.sort_by(|a, b| a.name.to_lowercase().cmp(&b.name.to_lowercase()));
Ok(HttpResponse::Ok().json(results))
}
#[derive(serde::Deserialize, serde::Serialize)]
pub struct LoaderFieldsEnumQuery {
pub loader_field: String,
pub filters: Option<HashMap<String, Value>>, // For metadata
}
// Provides the variants for any enumerable loader field.
pub async fn loader_fields_list(
pool: web::Data<PgPool>,
query: web::Query<LoaderFieldsEnumQuery>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let query = query.into_inner();
let loader_field = LoaderField::get_fields_all(&**pool, &redis)
.await?
.into_iter()
.find(|x| x.field == query.loader_field)
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"'{}' was not a valid loader field.",
query.loader_field
))
})?;
let loader_field_enum_id = match loader_field.field_type {
LoaderFieldType::Enum(enum_id) | LoaderFieldType::ArrayEnum(enum_id) => enum_id,
_ => {
return Err(ApiError::InvalidInput(format!(
"'{}' is not an enumerable field, but an '{}' field.",
query.loader_field,
loader_field.field_type.to_str()
)))
}
};
let results: Vec<_> = if let Some(filters) = query.filters {
LoaderFieldEnumValue::list_filter(loader_field_enum_id, filters, &**pool, &redis).await?
} else {
LoaderFieldEnumValue::list(loader_field_enum_id, &**pool, &redis).await?
};
Ok(HttpResponse::Ok().json(results))
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct License {
pub short: String,
pub name: String,
}
pub async fn license_list() -> HttpResponse {
let licenses = spdx::identifiers::LICENSES;
let mut results: Vec<License> = Vec::with_capacity(licenses.len());
for (short, name, _) in licenses {
results.push(License {
short: short.to_string(),
name: name.to_string(),
});
}
HttpResponse::Ok().json(results)
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct LicenseText {
pub title: String,
pub body: String,
}
pub async fn license_text(params: web::Path<(String,)>) -> Result<HttpResponse, ApiError> {
let license_id = params.into_inner().0;
if license_id == *crate::models::projects::DEFAULT_LICENSE_ID {
return Ok(HttpResponse::Ok().json(LicenseText {
title: "All Rights Reserved".to_string(),
body: "All rights reserved unless explicitly stated.".to_string(),
}));
}
if let Some(license) = spdx::license_id(&license_id) {
return Ok(HttpResponse::Ok().json(LicenseText {
title: license.full_name.to_string(),
body: license.text().to_string(),
}));
}
Err(ApiError::InvalidInput(
"Invalid SPDX identifier specified".to_string(),
))
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct LinkPlatformQueryData {
pub name: String,
pub donation: bool,
}
pub async fn link_platform_list(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let results: Vec<LinkPlatformQueryData> = LinkPlatform::list(&**pool, &redis)
.await?
.into_iter()
.map(|x| LinkPlatformQueryData {
name: x.name,
donation: x.donation,
})
.collect();
Ok(HttpResponse::Ok().json(results))
}
pub async fn report_type_list(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let results = ReportType::list(&**pool, &redis).await?;
Ok(HttpResponse::Ok().json(results))
}
pub async fn project_type_list(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let results = ProjectType::list(&**pool, &redis).await?;
Ok(HttpResponse::Ok().json(results))
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,596 @@
use std::sync::Arc;
use crate::auth::get_user_from_headers;
use crate::database;
use crate::database::models::image_item;
use crate::database::models::notification_item::NotificationBuilder;
use crate::database::models::thread_item::ThreadMessageBuilder;
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::ids::ThreadMessageId;
use crate::models::images::{Image, ImageContext};
use crate::models::notifications::NotificationBody;
use crate::models::pats::Scopes;
use crate::models::projects::ProjectStatus;
use crate::models::threads::{MessageBody, Thread, ThreadId, ThreadType};
use crate::models::users::User;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use actix_web::{web, HttpRequest, HttpResponse};
use futures::TryStreamExt;
use serde::Deserialize;
use sqlx::PgPool;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("thread")
.route("{id}", web::get().to(thread_get))
.route("{id}", web::post().to(thread_send_message)),
);
cfg.service(web::scope("message").route("{id}", web::delete().to(message_delete)));
cfg.route("threads", web::get().to(threads_get));
}
pub async fn is_authorized_thread(
thread: &database::models::Thread,
user: &User,
pool: &PgPool,
) -> Result<bool, ApiError> {
if user.role.is_mod() {
return Ok(true);
}
let user_id: database::models::UserId = user.id.into();
Ok(match thread.type_ {
ThreadType::Report => {
if let Some(report_id) = thread.report_id {
let report_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1 AND reporter = $2)",
report_id as database::models::ids::ReportId,
user_id as database::models::ids::UserId,
)
.fetch_one(pool)
.await?
.exists;
report_exists.unwrap_or(false)
} else {
false
}
}
ThreadType::Project => {
if let Some(project_id) = thread.project_id {
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 WHERE m.id = $1)",
project_id as database::models::ids::ProjectId,
user_id as database::models::ids::UserId,
)
.fetch_one(pool)
.await?
.exists;
if !project_exists.unwrap_or(false) {
let org_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM mods m INNER JOIN organizations o ON m.organization_id = o.id INNER JOIN team_members tm ON tm.team_id = o.team_id AND tm.user_id = $2 WHERE m.id = $1)",
project_id as database::models::ids::ProjectId,
user_id as database::models::ids::UserId,
)
.fetch_one(pool)
.await?
.exists;
org_exists.unwrap_or(false)
} else {
true
}
} else {
false
}
}
ThreadType::DirectMessage => thread.members.contains(&user_id),
})
}
pub async fn filter_authorized_threads(
threads: Vec<database::models::Thread>,
user: &User,
pool: &web::Data<PgPool>,
redis: &RedisPool,
) -> Result<Vec<Thread>, ApiError> {
let user_id: database::models::UserId = user.id.into();
let mut return_threads = Vec::new();
let mut check_threads = Vec::new();
for thread in threads {
if user.role.is_mod()
|| (thread.type_ == ThreadType::DirectMessage && thread.members.contains(&user_id))
{
return_threads.push(thread);
} else {
check_threads.push(thread);
}
}
if !check_threads.is_empty() {
let project_thread_ids = check_threads
.iter()
.filter(|x| x.type_ == ThreadType::Project)
.flat_map(|x| x.project_id.map(|x| x.0))
.collect::<Vec<_>>();
if !project_thread_ids.is_empty() {
sqlx::query!(
"
SELECT m.id FROM mods m
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2
WHERE m.id = ANY($1)
",
&*project_thread_ids,
user_id as database::models::ids::UserId,
)
.fetch(&***pool)
.map_ok(|row| {
check_threads.retain(|x| {
let bool = x.project_id.map(|x| x.0) == Some(row.id);
if bool {
return_threads.push(x.clone());
}
!bool
});
})
.try_collect::<Vec<()>>()
.await?;
}
let org_project_thread_ids = check_threads
.iter()
.filter(|x| x.type_ == ThreadType::Project)
.flat_map(|x| x.project_id.map(|x| x.0))
.collect::<Vec<_>>();
if !org_project_thread_ids.is_empty() {
sqlx::query!(
"
SELECT m.id FROM mods m
INNER JOIN organizations o ON o.id = m.organization_id
INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2
WHERE m.id = ANY($1)
",
&*project_thread_ids,
user_id as database::models::ids::UserId,
)
.fetch(&***pool)
.map_ok(|row| {
check_threads.retain(|x| {
let bool = x.project_id.map(|x| x.0) == Some(row.id);
if bool {
return_threads.push(x.clone());
}
!bool
});
})
.try_collect::<Vec<()>>()
.await?;
}
let report_thread_ids = check_threads
.iter()
.filter(|x| x.type_ == ThreadType::Report)
.flat_map(|x| x.report_id.map(|x| x.0))
.collect::<Vec<_>>();
if !report_thread_ids.is_empty() {
sqlx::query!(
"
SELECT id FROM reports
WHERE id = ANY($1) AND reporter = $2
",
&*report_thread_ids,
user_id as database::models::ids::UserId,
)
.fetch(&***pool)
.map_ok(|row| {
check_threads.retain(|x| {
let bool = x.report_id.map(|x| x.0) == Some(row.id);
if bool {
return_threads.push(x.clone());
}
!bool
});
})
.try_collect::<Vec<()>>()
.await?;
}
}
let mut user_ids = return_threads
.iter()
.flat_map(|x| x.members.clone())
.collect::<Vec<database::models::UserId>>();
user_ids.append(
&mut return_threads
.iter()
.flat_map(|x| {
x.messages
.iter()
.filter_map(|x| x.author_id)
.collect::<Vec<_>>()
})
.collect::<Vec<database::models::UserId>>(),
);
let users: Vec<User> = database::models::User::get_many_ids(&user_ids, &***pool, redis)
.await?
.into_iter()
.map(From::from)
.collect();
let mut final_threads = Vec::new();
for thread in return_threads {
let mut authors = thread.members.clone();
authors.append(
&mut thread
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
final_threads.push(Thread::from(
thread,
users
.iter()
.filter(|x| authors.contains(&x.id.into()))
.cloned()
.collect(),
user,
));
}
Ok(final_threads)
}
pub async fn thread_get(
req: HttpRequest,
info: web::Path<(ThreadId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0.into();
let thread_data = database::models::Thread::get(string, &**pool).await?;
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::THREAD_READ]),
)
.await?
.1;
if let Some(mut data) = thread_data {
if is_authorized_thread(&data, &user, &pool).await? {
let authors = &mut data.members;
authors.append(
&mut data
.messages
.iter()
.filter_map(|x| {
if x.hide_identity && !user.role.is_mod() {
None
} else {
x.author_id
}
})
.collect::<Vec<_>>(),
);
let users: Vec<User> = database::models::User::get_many_ids(authors, &**pool, &redis)
.await?
.into_iter()
.map(From::from)
.collect();
return Ok(HttpResponse::Ok().json(Thread::from(data, users, &user)));
}
}
Err(ApiError::NotFound)
}
#[derive(Deserialize)]
pub struct ThreadIds {
pub ids: String,
}
pub async fn threads_get(
req: HttpRequest,
web::Query(ids): web::Query<ThreadIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::THREAD_READ]),
)
.await?
.1;
let thread_ids: Vec<database::models::ids::ThreadId> =
serde_json::from_str::<Vec<ThreadId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let threads_data = database::models::Thread::get_many(&thread_ids, &**pool).await?;
let threads = filter_authorized_threads(threads_data, &user, &pool, &redis).await?;
Ok(HttpResponse::Ok().json(threads))
}
#[derive(Deserialize)]
pub struct NewThreadMessage {
pub body: MessageBody,
}
pub async fn thread_send_message(
req: HttpRequest,
info: web::Path<(ThreadId,)>,
pool: web::Data<PgPool>,
new_message: web::Json<NewThreadMessage>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::THREAD_WRITE]),
)
.await?
.1;
let string: database::models::ThreadId = info.into_inner().0.into();
if let MessageBody::Text {
body,
replying_to,
private,
..
} = &new_message.body
{
if body.len() > 65536 {
return Err(ApiError::InvalidInput(
"Input body is too long!".to_string(),
));
}
if *private && !user.role.is_mod() {
return Err(ApiError::InvalidInput(
"You are not allowed to send private messages!".to_string(),
));
}
if let Some(replying_to) = replying_to {
let thread_message =
database::models::ThreadMessage::get((*replying_to).into(), &**pool).await?;
if let Some(thread_message) = thread_message {
if thread_message.thread_id != string {
return Err(ApiError::InvalidInput(
"Message replied to is from another thread!".to_string(),
));
}
} else {
return Err(ApiError::InvalidInput(
"Message replied to does not exist!".to_string(),
));
}
}
} else {
return Err(ApiError::InvalidInput(
"You may only send text messages through this route!".to_string(),
));
}
let result = database::models::Thread::get(string, &**pool).await?;
if let Some(thread) = result {
if !is_authorized_thread(&thread, &user, &pool).await? {
return Err(ApiError::NotFound);
}
let mut transaction = pool.begin().await?;
let id = ThreadMessageBuilder {
author_id: Some(user.id.into()),
body: new_message.body.clone(),
thread_id: thread.id,
hide_identity: user.role.is_mod(),
}
.insert(&mut transaction)
.await?;
if let Some(project_id) = thread.project_id {
let project = database::models::Project::get_id(project_id, &**pool, &redis).await?;
if let Some(project) = project {
if project.inner.status != ProjectStatus::Processing && user.role.is_mod() {
let members = database::models::TeamMember::get_from_team_full(
project.inner.team_id,
&**pool,
&redis,
)
.await?;
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: Some(project.inner.id.into()),
report_id: None,
},
}
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
}
}
} else if let Some(report_id) = thread.report_id {
let report = database::models::report_item::Report::get(report_id, &**pool).await?;
if let Some(report) = report {
if report.closed && !user.role.is_mod() {
return Err(ApiError::InvalidInput(
"You may not reply to a closed report".to_string(),
));
}
if user.id != report.reporter.into() {
NotificationBuilder {
body: NotificationBody::ModeratorMessage {
thread_id: thread.id.into(),
message_id: id.into(),
project_id: None,
report_id: Some(report.id.into()),
},
}
.insert(report.reporter, &mut transaction, &redis)
.await?;
}
}
}
if let MessageBody::Text {
associated_images, ..
} = &new_message.body
{
for image_id in associated_images {
if let Some(db_image) =
image_item::Image::get((*image_id).into(), &mut *transaction, &redis).await?
{
let image: Image = db_image.into();
if !matches!(image.context, ImageContext::ThreadMessage { .. })
|| image.context.inner_id().is_some()
{
return Err(ApiError::InvalidInput(format!(
"Image {} is not unused and in the 'thread_message' context",
image_id
)));
}
sqlx::query!(
"
UPDATE uploaded_images
SET thread_message_id = $1
WHERE id = $2
",
thread.id.0,
image_id.0 as i64
)
.execute(&mut *transaction)
.await?;
image_item::Image::clear_cache(image.id.into(), &redis).await?;
} else {
return Err(ApiError::InvalidInput(format!(
"Image {} does not exist",
image_id
)));
}
}
}
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
pub async fn message_delete(
req: HttpRequest,
info: web::Path<(ThreadMessageId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::THREAD_WRITE]),
)
.await?
.1;
let result = database::models::ThreadMessage::get(info.into_inner().0.into(), &**pool).await?;
if let Some(thread) = result {
if !user.role.is_mod() && thread.author_id != Some(user.id.into()) {
return Err(ApiError::CustomAuthentication(
"You cannot delete this message!".to_string(),
));
}
let mut transaction = pool.begin().await?;
let context = ImageContext::ThreadMessage {
thread_message_id: Some(thread.id.into()),
};
let images = database::Image::get_many_contexted(context, &mut transaction).await?;
let cdn_url = dotenvy::var("CDN_URL")?;
for image in images {
let name = image.url.split(&format!("{cdn_url}/")).nth(1);
if let Some(icon_path) = name {
file_host.delete_file_version("", icon_path).await?;
}
database::Image::remove(image.id, &mut transaction, &redis).await?;
}
let private = if let MessageBody::Text { private, .. } = thread.body {
private
} else if let MessageBody::Deleted { private, .. } = thread.body {
private
} else {
false
};
database::models::ThreadMessage::remove_full(thread.id, private, &mut transaction).await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}

View File

@@ -0,0 +1,627 @@
use std::{collections::HashMap, sync::Arc};
use actix_web::{web, HttpRequest, HttpResponse};
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use validator::Validate;
use super::{oauth_clients::get_user_clients, ApiError};
use crate::util::img::delete_old_images;
use crate::{
auth::{filter_visible_projects, get_user_from_headers},
database::{models::User, redis::RedisPool},
file_hosting::FileHost,
models::{
collections::{Collection, CollectionStatus},
ids::UserId,
notifications::Notification,
pats::Scopes,
projects::Project,
users::{Badges, Role},
},
queue::session::AuthQueue,
util::{routes::read_from_payload, validate::validation_errors_to_string},
};
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route("user", web::get().to(user_auth_get));
cfg.route("users", web::get().to(users_get));
cfg.service(
web::scope("user")
.route("{user_id}/projects", web::get().to(projects_list))
.route("{id}", web::get().to(user_get))
.route("{user_id}/collections", web::get().to(collections_list))
.route("{user_id}/organizations", web::get().to(orgs_list))
.route("{id}", web::patch().to(user_edit))
.route("{id}/icon", web::patch().to(user_icon_edit))
.route("{id}", web::delete().to(user_delete))
.route("{id}/follows", web::get().to(user_follows))
.route("{id}/notifications", web::get().to(user_notifications))
.route("{id}/oauth_apps", web::get().to(get_user_clients)),
);
}
pub async fn projects_list(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PROJECT_READ]),
)
.await
.map(|x| x.1)
.ok();
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
let project_data = User::get_projects(id, &**pool, &redis).await?;
let projects: Vec<_> =
crate::database::Project::get_many_ids(&project_data, &**pool, &redis).await?;
let projects = filter_visible_projects(projects, &user, &pool, true).await?;
Ok(HttpResponse::Ok().json(projects))
} else {
Err(ApiError::NotFound)
}
}
pub async fn user_auth_get(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let (scopes, mut user) = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::USER_READ]),
)
.await?;
if !scopes.contains(Scopes::USER_READ_EMAIL) {
user.email = None;
}
if !scopes.contains(Scopes::PAYOUTS_READ) {
user.payout_data = None;
}
Ok(HttpResponse::Ok().json(user))
}
#[derive(Serialize, Deserialize)]
pub struct UserIds {
pub ids: String,
}
pub async fn users_get(
web::Query(ids): web::Query<UserIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user_ids = serde_json::from_str::<Vec<String>>(&ids.ids)?;
let users_data = User::get_many(&user_ids, &**pool, &redis).await?;
let users: Vec<crate::models::users::User> = users_data.into_iter().map(From::from).collect();
Ok(HttpResponse::Ok().json(users))
}
pub async fn user_get(
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let user_data = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(data) = user_data {
let response: crate::models::users::User = data.into();
Ok(HttpResponse::Ok().json(response))
} else {
Err(ApiError::NotFound)
}
}
pub async fn collections_list(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::COLLECTION_READ]),
)
.await
.map(|x| x.1)
.ok();
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
let user_id: UserId = id.into();
let can_view_private = user
.map(|y| y.role.is_mod() || y.id == user_id)
.unwrap_or(false);
let project_data = User::get_collections(id, &**pool).await?;
let response: Vec<_> =
crate::database::models::Collection::get_many(&project_data, &**pool, &redis)
.await?
.into_iter()
.filter(|x| can_view_private || matches!(x.status, CollectionStatus::Listed))
.map(Collection::from)
.collect();
Ok(HttpResponse::Ok().json(response))
} else {
Err(ApiError::NotFound)
}
}
pub async fn orgs_list(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PROJECT_READ]),
)
.await
.map(|x| x.1)
.ok();
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
let org_data = User::get_organizations(id, &**pool).await?;
let organizations_data =
crate::database::models::organization_item::Organization::get_many_ids(
&org_data, &**pool, &redis,
)
.await?;
let team_ids = organizations_data
.iter()
.map(|x| x.team_id)
.collect::<Vec<_>>();
let teams_data = crate::database::models::TeamMember::get_from_team_full_many(
&team_ids, &**pool, &redis,
)
.await?;
let users = User::get_many_ids(
&teams_data.iter().map(|x| x.user_id).collect::<Vec<_>>(),
&**pool,
&redis,
)
.await?;
let mut organizations = vec![];
let mut team_groups = HashMap::new();
for item in teams_data {
team_groups.entry(item.team_id).or_insert(vec![]).push(item);
}
for data in organizations_data {
let members_data = team_groups.remove(&data.team_id).unwrap_or(vec![]);
let logged_in = user
.as_ref()
.and_then(|user| {
members_data
.iter()
.find(|x| x.user_id == user.id.into() && x.accepted)
})
.is_some();
let team_members: Vec<_> = members_data
.into_iter()
.filter(|x| logged_in || x.accepted || id == x.user_id)
.flat_map(|data| {
users.iter().find(|x| x.id == data.user_id).map(|user| {
crate::models::teams::TeamMember::from(data, user.clone(), !logged_in)
})
})
.collect();
let organization = crate::models::organizations::Organization::from(data, team_members);
organizations.push(organization);
}
Ok(HttpResponse::Ok().json(organizations))
} else {
Err(ApiError::NotFound)
}
}
lazy_static! {
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap();
}
#[derive(Serialize, Deserialize, Validate)]
pub struct EditUser {
#[validate(length(min = 1, max = 39), regex = "RE_URL_SAFE")]
pub username: Option<String>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
with = "::serde_with::rust::double_option"
)]
#[validate(length(max = 160))]
pub bio: Option<Option<String>>,
pub role: Option<Role>,
pub badges: Option<Badges>,
#[validate(length(max = 160))]
pub venmo_handle: Option<String>,
}
pub async fn user_edit(
req: HttpRequest,
info: web::Path<(String,)>,
new_user: web::Json<EditUser>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let (scopes, user) = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::USER_WRITE]),
)
.await?;
new_user
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(actual_user) = id_option {
let id = actual_user.id;
let user_id: UserId = id.into();
if user.id == user_id || user.role.is_mod() {
let mut transaction = pool.begin().await?;
if let Some(username) = &new_user.username {
let existing_user_id_option = User::get(username, &**pool, &redis).await?;
if existing_user_id_option
.map(|x| UserId::from(x.id))
.map(|id| id == user.id)
.unwrap_or(true)
{
sqlx::query!(
"
UPDATE users
SET username = $1
WHERE (id = $2)
",
username,
id as crate::database::models::ids::UserId,
)
.execute(&mut *transaction)
.await?;
} else {
return Err(ApiError::InvalidInput(format!(
"Username {username} is taken!"
)));
}
}
if let Some(bio) = &new_user.bio {
sqlx::query!(
"
UPDATE users
SET bio = $1
WHERE (id = $2)
",
bio.as_deref(),
id as crate::database::models::ids::UserId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(role) = &new_user.role {
if !user.role.is_admin() {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the role of this user!"
.to_string(),
));
}
let role = role.to_string();
sqlx::query!(
"
UPDATE users
SET role = $1
WHERE (id = $2)
",
role,
id as crate::database::models::ids::UserId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(badges) = &new_user.badges {
if !user.role.is_admin() {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the badges of this user!"
.to_string(),
));
}
sqlx::query!(
"
UPDATE users
SET badges = $1
WHERE (id = $2)
",
badges.bits() as i64,
id as crate::database::models::ids::UserId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(venmo_handle) = &new_user.venmo_handle {
if !scopes.contains(Scopes::PAYOUTS_WRITE) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit the venmo handle of this user!"
.to_string(),
));
}
sqlx::query!(
"
UPDATE users
SET venmo_handle = $1
WHERE (id = $2)
",
venmo_handle,
id as crate::database::models::ids::UserId,
)
.execute(&mut *transaction)
.await?;
}
transaction.commit().await?;
User::clear_caches(&[(id, Some(actual_user.username))], &redis).await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::CustomAuthentication(
"You do not have permission to edit this user!".to_string(),
))
}
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize)]
pub struct Extension {
pub ext: String,
}
#[allow(clippy::too_many_arguments)]
pub async fn user_icon_edit(
web::Query(ext): web::Query<Extension>,
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
mut payload: web::Payload,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::USER_WRITE]),
)
.await?
.1;
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(actual_user) = id_option {
if user.id != actual_user.id.into() && !user.role.is_mod() {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit this user's icon.".to_string(),
));
}
delete_old_images(
actual_user.avatar_url,
actual_user.raw_avatar_url,
&***file_host,
)
.await?;
let bytes =
read_from_payload(&mut payload, 262144, "Icons must be smaller than 256KiB").await?;
let user_id: UserId = actual_user.id.into();
let upload_result = crate::util::img::upload_image_optimized(
&format!("data/{}", user_id),
bytes.freeze(),
&ext.ext,
Some(96),
Some(1.0),
&***file_host,
)
.await?;
sqlx::query!(
"
UPDATE users
SET avatar_url = $1, raw_avatar_url = $2
WHERE (id = $3)
",
upload_result.url,
upload_result.raw_url,
actual_user.id as crate::database::models::ids::UserId,
)
.execute(&**pool)
.await?;
User::clear_caches(&[(actual_user.id, None)], &redis).await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
pub async fn user_delete(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::USER_DELETE]),
)
.await?
.1;
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
if !user.role.is_admin() && user.id != id.into() {
return Err(ApiError::CustomAuthentication(
"You do not have permission to delete this user!".to_string(),
));
}
let mut transaction = pool.begin().await?;
let result = User::remove(id, &mut transaction, &redis).await?;
transaction.commit().await?;
if result.is_some() {
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
} else {
Err(ApiError::NotFound)
}
}
pub async fn user_follows(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::USER_READ]),
)
.await?
.1;
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
if !user.role.is_admin() && user.id != id.into() {
return Err(ApiError::CustomAuthentication(
"You do not have permission to see the projects this user follows!".to_string(),
));
}
let project_ids = User::get_follows(id, &**pool).await?;
let projects: Vec<_> =
crate::database::Project::get_many_ids(&project_ids, &**pool, &redis)
.await?
.into_iter()
.map(Project::from)
.collect();
Ok(HttpResponse::Ok().json(projects))
} else {
Err(ApiError::NotFound)
}
}
pub async fn user_notifications(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::NOTIFICATION_READ]),
)
.await?
.1;
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
if !user.role.is_admin() && user.id != id.into() {
return Err(ApiError::CustomAuthentication(
"You do not have permission to see the notifications of this user!".to_string(),
));
}
let mut notifications: Vec<Notification> =
crate::database::models::notification_item::Notification::get_many_user(
id, &**pool, &redis,
)
.await?
.into_iter()
.map(Into::into)
.collect();
notifications.sort_by(|a, b| b.created.cmp(&a.created));
Ok(HttpResponse::Ok().json(notifications))
} else {
Err(ApiError::NotFound)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,682 @@
use super::ApiError;
use crate::auth::checks::{filter_visible_versions, is_visible_version};
use crate::auth::{filter_visible_projects, get_user_from_headers};
use crate::database::redis::RedisPool;
use crate::models::ids::VersionId;
use crate::models::pats::Scopes;
use crate::models::projects::VersionType;
use crate::models::teams::ProjectPermissions;
use crate::queue::session::AuthQueue;
use crate::{database, models};
use actix_web::{web, HttpRequest, HttpResponse};
use dashmap::DashMap;
use futures::TryStreamExt;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::collections::HashMap;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("version_file")
.route("{version_id}", web::get().to(get_version_from_hash))
.route("{version_id}/update", web::post().to(get_update_from_hash))
.route("project", web::post().to(get_projects_from_hashes))
.route("{version_id}", web::delete().to(delete_file))
.route("{version_id}/download", web::get().to(download_version)),
);
cfg.service(
web::scope("version_files")
.route("update", web::post().to(update_files))
.route("update_individual", web::post().to(update_individual_files))
.route("", web::post().to(get_versions_from_hashes)),
);
}
pub async fn get_version_from_hash(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
hash_query: web::Query<HashQuery>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let hash = info.into_inner().0.to_lowercase();
let algorithm = hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
let file = database::models::Version::get_file_from_hash(
algorithm,
hash,
hash_query.version_id.map(|x| x.into()),
&**pool,
&redis,
)
.await?;
if let Some(file) = file {
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
if let Some(version) = version {
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
return Err(ApiError::NotFound);
}
Ok(HttpResponse::Ok().json(models::projects::Version::from(version)))
} else {
Err(ApiError::NotFound)
}
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize)]
pub struct HashQuery {
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
pub version_id: Option<VersionId>,
}
// Calculates whether or not to use sha1 or sha512 based on the size of the hash
pub fn default_algorithm_from_hashes(hashes: &[String]) -> String {
// Gets first hash, optionally
let empty_string = "".into();
let hash = hashes.first().unwrap_or(&empty_string);
let hash_len = hash.len();
// Sha1 = 40 characters
// Sha512 = 128 characters
// Favour sha1 as default, unless the hash is longer or equal to 128 characters
if hash_len >= 128 {
return "sha512".into();
}
"sha1".into()
}
#[derive(Serialize, Deserialize)]
pub struct UpdateData {
pub loaders: Option<Vec<String>>,
pub version_types: Option<Vec<VersionType>>,
/*
Loader fields to filter with:
"game_versions": ["1.16.5", "1.17"]
Returns if it matches any of the values
*/
pub loader_fields: Option<HashMap<String, Vec<serde_json::Value>>>,
}
pub async fn get_update_from_hash(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
hash_query: web::Query<HashQuery>,
update_data: web::Json<UpdateData>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let hash = info.into_inner().0.to_lowercase();
if let Some(file) = database::models::Version::get_file_from_hash(
hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])),
hash,
hash_query.version_id.map(|x| x.into()),
&**pool,
&redis,
)
.await?
{
if let Some(project) =
database::models::Project::get_id(file.project_id, &**pool, &redis).await?
{
let versions = database::models::Version::get_many(&project.versions, &**pool, &redis)
.await?
.into_iter()
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &update_data.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &update_data.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &update_data.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted();
if let Some(first) = versions.last() {
if !is_visible_version(&first.inner, &user_option, &pool, &redis).await? {
return Err(ApiError::NotFound);
}
return Ok(HttpResponse::Ok().json(models::projects::Version::from(first)));
}
}
}
Err(ApiError::NotFound)
}
// Requests above with multiple versions below
#[derive(Deserialize)]
pub struct FileHashes {
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
pub hashes: Vec<String>,
}
pub async fn get_versions_from_hashes(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_data: web::Json<FileHashes>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let algorithm = file_data
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&file_data.hashes));
let files = database::models::Version::get_files_from_hash(
algorithm.clone(),
&file_data.hashes,
&**pool,
&redis,
)
.await?;
let version_ids = files.iter().map(|x| x.version_id).collect::<Vec<_>>();
let versions_data = filter_visible_versions(
database::models::Version::get_many(&version_ids, &**pool, &redis).await?,
&user_option,
&pool,
&redis,
)
.await?;
let mut response = HashMap::new();
for version in versions_data {
for file in files.iter().filter(|x| x.version_id == version.id.into()) {
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert(hash.clone(), version.clone());
}
}
}
Ok(HttpResponse::Ok().json(response))
}
pub async fn get_projects_from_hashes(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_data: web::Json<FileHashes>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let algorithm = file_data
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&file_data.hashes));
let files = database::models::Version::get_files_from_hash(
algorithm.clone(),
&file_data.hashes,
&**pool,
&redis,
)
.await?;
let project_ids = files.iter().map(|x| x.project_id).collect::<Vec<_>>();
let projects_data = filter_visible_projects(
database::models::Project::get_many_ids(&project_ids, &**pool, &redis).await?,
&user_option,
&pool,
false,
)
.await?;
let mut response = HashMap::new();
for project in projects_data {
for file in files.iter().filter(|x| x.project_id == project.id.into()) {
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert(hash.clone(), project.clone());
}
}
}
Ok(HttpResponse::Ok().json(response))
}
#[derive(Deserialize)]
pub struct ManyUpdateData {
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
pub hashes: Vec<String>,
pub loaders: Option<Vec<String>>,
pub game_versions: Option<Vec<String>>,
pub version_types: Option<Vec<VersionType>>,
}
pub async fn update_files(
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<HttpResponse, ApiError> {
let algorithm = update_data
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&update_data.hashes));
let files = database::models::Version::get_files_from_hash(
algorithm.clone(),
&update_data.hashes,
&**pool,
&redis,
)
.await?;
// TODO: de-hardcode this and actually use version fields system
let update_version_ids = sqlx::query!(
"
SELECT v.id version_id, v.mod_id mod_id
FROM mods m
INNER JOIN versions v ON m.id = v.mod_id AND (cardinality($4::varchar[]) = 0 OR v.version_type = ANY($4))
INNER JOIN version_fields vf ON vf.field_id = 3 AND v.id = vf.version_id
INNER JOIN loader_field_enum_values lfev ON vf.enum_value = lfev.id AND (cardinality($2::varchar[]) = 0 OR lfev.value = ANY($2::varchar[]))
INNER JOIN loaders_versions lv ON lv.version_id = v.id
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
WHERE m.id = ANY($1)
ORDER BY v.date_published ASC
",
&files.iter().map(|x| x.project_id.0).collect::<Vec<_>>(),
&update_data.game_versions.clone().unwrap_or_default(),
&update_data.loaders.clone().unwrap_or_default(),
&update_data.version_types.clone().unwrap_or_default().iter().map(|x| x.to_string()).collect::<Vec<_>>(),
)
.fetch(&**pool)
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<database::models::ids::VersionId>>, m| {
acc.entry(database::models::ProjectId(m.mod_id))
.or_default()
.push(database::models::VersionId(m.version_id));
async move { Ok(acc) }
})
.await?;
let versions = database::models::Version::get_many(
&update_version_ids
.into_iter()
.filter_map(|x| x.1.last().copied())
.collect::<Vec<_>>(),
&**pool,
&redis,
)
.await?;
let mut response = HashMap::new();
for file in files {
if let Some(version) = versions
.iter()
.find(|x| x.inner.project_id == file.project_id)
{
if let Some(hash) = file.hashes.get(&algorithm) {
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
}
Ok(HttpResponse::Ok().json(response))
}
#[derive(Serialize, Deserialize)]
pub struct FileUpdateData {
pub hash: String,
pub loaders: Option<Vec<String>>,
pub loader_fields: Option<HashMap<String, Vec<serde_json::Value>>>,
pub version_types: Option<Vec<VersionType>>,
}
#[derive(Serialize, Deserialize)]
pub struct ManyFileUpdateData {
pub algorithm: Option<String>, // Defaults to calculation based on size of hash
pub hashes: Vec<FileUpdateData>,
}
pub async fn update_individual_files(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyFileUpdateData>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let algorithm = update_data.algorithm.clone().unwrap_or_else(|| {
default_algorithm_from_hashes(
&update_data
.hashes
.iter()
.map(|x| x.hash.clone())
.collect::<Vec<_>>(),
)
});
let files = database::models::Version::get_files_from_hash(
algorithm.clone(),
&update_data
.hashes
.iter()
.map(|x| x.hash.clone())
.collect::<Vec<_>>(),
&**pool,
&redis,
)
.await?;
let projects = database::models::Project::get_many_ids(
&files.iter().map(|x| x.project_id).collect::<Vec<_>>(),
&**pool,
&redis,
)
.await?;
let all_versions = database::models::Version::get_many(
&projects
.iter()
.flat_map(|x| x.versions.clone())
.collect::<Vec<_>>(),
&**pool,
&redis,
)
.await?;
let mut response = HashMap::new();
for project in projects {
for file in files.iter().filter(|x| x.project_id == project.inner.id) {
if let Some(hash) = file.hashes.get(&algorithm) {
if let Some(query_file) = update_data.hashes.iter().find(|x| &x.hash == hash) {
let version = all_versions
.iter()
.filter(|x| x.inner.project_id == file.project_id)
.filter(|x| {
let mut bool = true;
if let Some(version_types) = &query_file.version_types {
bool &= version_types
.iter()
.any(|y| y.as_str() == x.inner.version_type);
}
if let Some(loaders) = &query_file.loaders {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &query_file.loader_fields {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.sorted()
.last();
if let Some(version) = version {
if is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
}
}
}
}
}
}
Ok(HttpResponse::Ok().json(response))
}
// under /api/v1/version_file/{hash}
pub async fn delete_file(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
hash_query: web::Query<HashQuery>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_WRITE]),
)
.await?
.1;
let hash = info.into_inner().0.to_lowercase();
let algorithm = hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
let file = database::models::Version::get_file_from_hash(
algorithm.clone(),
hash,
hash_query.version_id.map(|x| x.into()),
&**pool,
&redis,
)
.await?;
if let Some(row) = file {
if !user.role.is_admin() {
let team_member = database::models::TeamMember::get_from_user_id_version(
row.version_id,
user.id.into(),
&**pool,
)
.await
.map_err(ApiError::Database)?;
let organization =
database::models::Organization::get_associated_organization_project_id(
row.project_id,
&**pool,
)
.await
.map_err(ApiError::Database)?;
let organization_team_member = if let Some(organization) = &organization {
database::models::TeamMember::get_from_user_id_organization(
organization.id,
user.id.into(),
false,
&**pool,
)
.await
.map_err(ApiError::Database)?
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,
&team_member,
&organization_team_member,
)
.unwrap_or_default();
if !permissions.contains(ProjectPermissions::DELETE_VERSION) {
return Err(ApiError::CustomAuthentication(
"You don't have permission to delete this file!".to_string(),
));
}
}
let version = database::models::Version::get(row.version_id, &**pool, &redis).await?;
if let Some(version) = version {
if version.files.len() < 2 {
return Err(ApiError::InvalidInput(
"Versions must have at least one file uploaded to them".to_string(),
));
}
database::models::Version::clear_cache(&version, &redis).await?;
}
let mut transaction = pool.begin().await?;
sqlx::query!(
"
DELETE FROM hashes
WHERE file_id = $1
",
row.id.0
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
DELETE FROM files
WHERE files.id = $1
",
row.id.0,
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize)]
pub struct DownloadRedirect {
pub url: String,
}
// under /api/v1/version_file/{hash}/download
pub async fn download_version(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
hash_query: web::Query<HashQuery>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let hash = info.into_inner().0.to_lowercase();
let algorithm = hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
let file = database::models::Version::get_file_from_hash(
algorithm.clone(),
hash,
hash_query.version_id.map(|x| x.into()),
&**pool,
&redis,
)
.await?;
if let Some(file) = file {
let version = database::models::Version::get(file.version_id, &**pool, &redis).await?;
if let Some(version) = version {
if !is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
return Err(ApiError::NotFound);
}
Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*file.url))
.json(DownloadRedirect { url: file.url }))
} else {
Err(ApiError::NotFound)
}
} else {
Err(ApiError::NotFound)
}
}

View File

@@ -0,0 +1,876 @@
use std::collections::HashMap;
use super::ApiError;
use crate::auth::checks::{filter_visible_versions, is_visible_project, is_visible_version};
use crate::auth::get_user_from_headers;
use crate::database;
use crate::database::models::loader_fields::{
self, LoaderField, LoaderFieldEnumValue, VersionField,
};
use crate::database::models::version_item::{DependencyBuilder, LoaderVersion};
use crate::database::models::{image_item, Organization};
use crate::database::redis::RedisPool;
use crate::models;
use crate::models::ids::base62_impl::parse_base62;
use crate::models::ids::VersionId;
use crate::models::images::ImageContext;
use crate::models::pats::Scopes;
use crate::models::projects::{skip_nulls, Loader};
use crate::models::projects::{Dependency, FileType, VersionStatus, VersionType};
use crate::models::teams::ProjectPermissions;
use crate::queue::session::AuthQueue;
use crate::search::indexing::remove_documents;
use crate::search::SearchConfig;
use crate::util::img;
use crate::util::validate::validation_errors_to_string;
use actix_web::{web, HttpRequest, HttpResponse};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use validator::Validate;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.route(
"version",
web::post().to(super::version_creation::version_create),
);
cfg.route("versions", web::get().to(versions_get));
cfg.service(
web::scope("version")
.route("{id}", web::get().to(version_get))
.route("{id}", web::patch().to(version_edit))
.route("{id}", web::delete().to(version_delete))
.route(
"{version_id}/file",
web::post().to(super::version_creation::upload_file_to_version),
),
);
}
// Given a project ID/slug and a version slug
pub async fn version_project_get(
req: HttpRequest,
info: web::Path<(String, String)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let info = info.into_inner();
version_project_get_helper(req, info, pool, redis, session_queue).await
}
pub async fn version_project_get_helper(
req: HttpRequest,
id: (String, String),
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let result = database::models::Project::get(&id.0, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
if let Some(project) = result {
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
return Err(ApiError::NotFound);
}
let versions =
database::models::Version::get_many(&project.versions, &**pool, &redis).await?;
let id_opt = parse_base62(&id.1).ok();
let version = versions
.into_iter()
.find(|x| Some(x.inner.id.0 as u64) == id_opt || x.inner.version_number == id.1);
if let Some(version) = version {
if is_visible_version(&version.inner, &user_option, &pool, &redis).await? {
return Ok(HttpResponse::Ok().json(models::projects::Version::from(version)));
}
}
}
Err(ApiError::NotFound)
}
#[derive(Serialize, Deserialize)]
pub struct VersionIds {
pub ids: String,
}
pub async fn versions_get(
req: HttpRequest,
web::Query(ids): web::Query<VersionIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let version_ids = serde_json::from_str::<Vec<models::ids::VersionId>>(&ids.ids)?
.into_iter()
.map(|x| x.into())
.collect::<Vec<database::models::VersionId>>();
let versions_data = database::models::Version::get_many(&version_ids, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
let versions = filter_visible_versions(versions_data, &user_option, &pool, &redis).await?;
Ok(HttpResponse::Ok().json(versions))
}
pub async fn version_get(
req: HttpRequest,
info: web::Path<(models::ids::VersionId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let id = info.into_inner().0;
version_get_helper(req, id, pool, redis, session_queue).await
}
pub async fn version_get_helper(
req: HttpRequest,
id: models::ids::VersionId,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let version_data = database::models::Version::get(id.into(), &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
if let Some(data) = version_data {
if is_visible_version(&data.inner, &user_option, &pool, &redis).await? {
return Ok(HttpResponse::Ok().json(models::projects::Version::from(data)));
}
}
Err(ApiError::NotFound)
}
#[derive(Serialize, Deserialize, Validate, Default, Debug)]
pub struct EditVersion {
#[validate(
length(min = 1, max = 64),
custom(function = "crate::util::validate::validate_name")
)]
pub name: Option<String>,
#[validate(
length(min = 1, max = 32),
regex = "crate::util::validate::RE_URL_SAFE"
)]
pub version_number: Option<String>,
#[validate(length(max = 65536))]
pub changelog: Option<String>,
pub version_type: Option<models::projects::VersionType>,
#[validate(
length(min = 0, max = 4096),
custom(function = "crate::util::validate::validate_deps")
)]
pub dependencies: Option<Vec<Dependency>>,
pub loaders: Option<Vec<Loader>>,
pub featured: Option<bool>,
pub downloads: Option<u32>,
pub status: Option<VersionStatus>,
pub file_types: Option<Vec<EditVersionFileType>>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
with = "::serde_with::rust::double_option"
)]
pub ordering: Option<Option<i32>>,
// Flattened loader fields
// All other fields are loader-specific VersionFields
// These are flattened during serialization
#[serde(deserialize_with = "skip_nulls")]
#[serde(flatten)]
pub fields: HashMap<String, serde_json::Value>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EditVersionFileType {
pub algorithm: String,
pub hash: String,
pub file_type: Option<FileType>,
}
pub async fn version_edit(
req: HttpRequest,
info: web::Path<(VersionId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
new_version: web::Json<serde_json::Value>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let new_version: EditVersion = serde_json::from_value(new_version.into_inner())?;
version_edit_helper(
req,
info.into_inner(),
pool,
redis,
new_version,
session_queue,
)
.await
}
pub async fn version_edit_helper(
req: HttpRequest,
info: (VersionId,),
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
new_version: EditVersion,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_WRITE]),
)
.await?
.1;
new_version
.validate()
.map_err(|err| ApiError::Validation(validation_errors_to_string(err, None)))?;
let version_id = info.0;
let id = version_id.into();
let result = database::models::Version::get(id, &**pool, &redis).await?;
if let Some(version_item) = result {
let team_member = database::models::TeamMember::get_from_user_id_project(
version_item.inner.project_id,
user.id.into(),
false,
&**pool,
)
.await?;
let organization = Organization::get_associated_organization_project_id(
version_item.inner.project_id,
&**pool,
)
.await?;
let organization_team_member = if let Some(organization) = &organization {
database::models::TeamMember::get_from_user_id(
organization.team_id,
user.id.into(),
&**pool,
)
.await?
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,
&team_member,
&organization_team_member,
);
if let Some(perms) = permissions {
if !perms.contains(ProjectPermissions::UPLOAD_VERSION) {
return Err(ApiError::CustomAuthentication(
"You do not have the permissions to edit this version!".to_string(),
));
}
let mut transaction = pool.begin().await?;
if let Some(name) = &new_version.name {
sqlx::query!(
"
UPDATE versions
SET name = $1
WHERE (id = $2)
",
name.trim(),
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(number) = &new_version.version_number {
sqlx::query!(
"
UPDATE versions
SET version_number = $1
WHERE (id = $2)
",
number,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(version_type) = &new_version.version_type {
sqlx::query!(
"
UPDATE versions
SET version_type = $1
WHERE (id = $2)
",
version_type.as_str(),
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(dependencies) = &new_version.dependencies {
sqlx::query!(
"
DELETE FROM dependencies WHERE dependent_id = $1
",
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
let builders = dependencies
.iter()
.map(|x| database::models::version_item::DependencyBuilder {
project_id: x.project_id.map(|x| x.into()),
version_id: x.version_id.map(|x| x.into()),
file_name: x.file_name.clone(),
dependency_type: x.dependency_type.to_string(),
})
.collect::<Vec<database::models::version_item::DependencyBuilder>>();
DependencyBuilder::insert_many(builders, version_item.inner.id, &mut transaction)
.await?;
}
if !new_version.fields.is_empty() {
let version_fields_names = new_version
.fields
.keys()
.map(|x| x.to_string())
.collect::<Vec<String>>();
let all_loaders = loader_fields::Loader::list(&mut *transaction, &redis).await?;
let loader_ids = version_item
.loaders
.iter()
.filter_map(|x| all_loaders.iter().find(|y| &y.loader == x).map(|y| y.id))
.collect_vec();
let loader_fields = LoaderField::get_fields(&loader_ids, &mut *transaction, &redis)
.await?
.into_iter()
.filter(|lf| version_fields_names.contains(&lf.field))
.collect::<Vec<LoaderField>>();
let loader_field_ids = loader_fields.iter().map(|lf| lf.id.0).collect::<Vec<i32>>();
sqlx::query!(
"
DELETE FROM version_fields
WHERE version_id = $1
AND field_id = ANY($2)
",
id as database::models::ids::VersionId,
&loader_field_ids
)
.execute(&mut *transaction)
.await?;
let mut loader_field_enum_values = LoaderFieldEnumValue::list_many_loader_fields(
&loader_fields,
&mut *transaction,
&redis,
)
.await?;
let mut version_fields = Vec::new();
for (vf_name, vf_value) in new_version.fields {
let loader_field = loader_fields
.iter()
.find(|lf| lf.field == vf_name)
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Loader field '{vf_name}' does not exist for any loaders supplied."
))
})?;
let enum_variants = loader_field_enum_values
.remove(&loader_field.id)
.unwrap_or_default();
let vf: VersionField = VersionField::check_parse(
version_id.into(),
loader_field.clone(),
vf_value.clone(),
enum_variants,
)
.map_err(ApiError::InvalidInput)?;
version_fields.push(vf);
}
VersionField::insert_many(version_fields, &mut transaction).await?;
}
if let Some(loaders) = &new_version.loaders {
sqlx::query!(
"
DELETE FROM loaders_versions WHERE version_id = $1
",
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
let mut loader_versions = Vec::new();
for loader in loaders {
let loader_id = database::models::loader_fields::Loader::get_id(
&loader.0,
&mut *transaction,
&redis,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("No database entry for loader provided.".to_string())
})?;
loader_versions.push(LoaderVersion::new(loader_id, id));
}
LoaderVersion::insert_many(loader_versions, &mut transaction).await?;
crate::database::models::Project::clear_cache(
version_item.inner.project_id,
None,
None,
&redis,
)
.await?;
}
if let Some(featured) = &new_version.featured {
sqlx::query!(
"
UPDATE versions
SET featured = $1
WHERE (id = $2)
",
featured,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(body) = &new_version.changelog {
sqlx::query!(
"
UPDATE versions
SET changelog = $1
WHERE (id = $2)
",
body,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(downloads) = &new_version.downloads {
if !user.role.is_mod() {
return Err(ApiError::CustomAuthentication(
"You don't have permission to set the downloads of this mod".to_string(),
));
}
sqlx::query!(
"
UPDATE versions
SET downloads = $1
WHERE (id = $2)
",
*downloads as i32,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
let diff = *downloads - (version_item.inner.downloads as u32);
sqlx::query!(
"
UPDATE mods
SET downloads = downloads + $1
WHERE (id = $2)
",
diff as i32,
version_item.inner.project_id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(status) = &new_version.status {
if !status.can_be_requested() {
return Err(ApiError::InvalidInput(
"The requested status cannot be set!".to_string(),
));
}
sqlx::query!(
"
UPDATE versions
SET status = $1
WHERE (id = $2)
",
status.as_str(),
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(file_types) = &new_version.file_types {
for file_type in file_types {
let result = sqlx::query!(
"
SELECT f.id id FROM hashes h
INNER JOIN files f ON h.file_id = f.id
WHERE h.algorithm = $2 AND h.hash = $1
",
file_type.hash.as_bytes(),
file_type.algorithm
)
.fetch_optional(&**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInput(format!(
"Specified file with hash {} does not exist.",
file_type.algorithm.clone()
))
})?;
sqlx::query!(
"
UPDATE files
SET file_type = $2
WHERE (id = $1)
",
result.id,
file_type.file_type.as_ref().map(|x| x.as_str()),
)
.execute(&mut *transaction)
.await?;
}
}
if let Some(ordering) = &new_version.ordering {
sqlx::query!(
"
UPDATE versions
SET ordering = $1
WHERE (id = $2)
",
ordering.to_owned() as Option<i32>,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
}
// delete any images no longer in the changelog
let checkable_strings: Vec<&str> = vec![&new_version.changelog]
.into_iter()
.filter_map(|x| x.as_ref().map(|y| y.as_str()))
.collect();
let context = ImageContext::Version {
version_id: Some(version_item.inner.id.into()),
};
img::delete_unused_images(context, checkable_strings, &mut transaction, &redis).await?;
transaction.commit().await?;
database::models::Version::clear_cache(&version_item, &redis).await?;
database::models::Project::clear_cache(
version_item.inner.project_id,
None,
Some(true),
&redis,
)
.await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::CustomAuthentication(
"You do not have permission to edit this version!".to_string(),
))
}
} else {
Err(ApiError::NotFound)
}
}
#[derive(Serialize, Deserialize)]
pub struct VersionListFilters {
pub loaders: Option<String>,
pub featured: Option<bool>,
pub version_type: Option<VersionType>,
pub limit: Option<usize>,
pub offset: Option<usize>,
/*
Loader fields to filter with:
"game_versions": ["1.16.5", "1.17"]
Returns if it matches any of the values
*/
pub loader_fields: Option<String>,
}
pub async fn version_list(
req: HttpRequest,
info: web::Path<(String,)>,
web::Query(filters): web::Query<VersionListFilters>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0;
let result = database::models::Project::get(&string, &**pool, &redis).await?;
let user_option = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PROJECT_READ, Scopes::VERSION_READ]),
)
.await
.map(|x| x.1)
.ok();
if let Some(project) = result {
if !is_visible_project(&project.inner, &user_option, &pool, false).await? {
return Err(ApiError::NotFound);
}
let loader_field_filters = filters.loader_fields.as_ref().map(|x| {
serde_json::from_str::<HashMap<String, Vec<serde_json::Value>>>(x).unwrap_or_default()
});
let loader_filters = filters
.loaders
.as_ref()
.map(|x| serde_json::from_str::<Vec<String>>(x).unwrap_or_default());
let mut versions = database::models::Version::get_many(&project.versions, &**pool, &redis)
.await?
.into_iter()
.skip(filters.offset.unwrap_or(0))
.take(filters.limit.unwrap_or(usize::MAX))
.filter(|x| {
let mut bool = true;
if let Some(version_type) = filters.version_type {
bool &= &*x.inner.version_type == version_type.as_str();
}
if let Some(loaders) = &loader_filters {
bool &= x.loaders.iter().any(|y| loaders.contains(y));
}
if let Some(loader_fields) = &loader_field_filters {
for (key, values) in loader_fields {
bool &= if let Some(x_vf) =
x.version_fields.iter().find(|y| y.field_name == *key)
{
values.iter().any(|v| x_vf.value.contains_json_value(v))
} else {
true
};
}
}
bool
})
.collect::<Vec<_>>();
let mut response = versions
.iter()
.filter(|version| {
filters
.featured
.map(|featured| featured == version.inner.featured)
.unwrap_or(true)
})
.cloned()
.collect::<Vec<_>>();
versions.sort_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published));
// Attempt to populate versions with "auto featured" versions
if response.is_empty() && !versions.is_empty() && filters.featured.unwrap_or(false) {
// TODO: This is a bandaid fix for detecting auto-featured versions.
// In the future, not all versions will have 'game_versions' fields, so this will need to be changed.
let (loaders, game_versions) = futures::future::try_join(
database::models::loader_fields::Loader::list(&**pool, &redis),
database::models::legacy_loader_fields::MinecraftGameVersion::list(
None,
Some(true),
&**pool,
&redis,
),
)
.await?;
let mut joined_filters = Vec::new();
for game_version in &game_versions {
for loader in &loaders {
joined_filters.push((game_version, loader))
}
}
joined_filters.into_iter().for_each(|filter| {
versions
.iter()
.find(|version| {
// TODO: This is the bandaid fix for detecting auto-featured versions.
let game_versions = version
.version_fields
.iter()
.find(|vf| vf.field_name == "game_versions")
.map(|vf| vf.value.clone())
.map(|v| v.as_strings())
.unwrap_or_default();
game_versions.contains(&filter.0.version)
&& version.loaders.contains(&filter.1.loader)
})
.map(|version| response.push(version.clone()))
.unwrap_or(());
});
if response.is_empty() {
versions
.into_iter()
.for_each(|version| response.push(version));
}
}
response.sort_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published));
response.dedup_by(|a, b| a.inner.id == b.inner.id);
let response = filter_visible_versions(response, &user_option, &pool, &redis).await?;
Ok(HttpResponse::Ok().json(response))
} else {
Err(ApiError::NotFound)
}
}
pub async fn version_delete(
req: HttpRequest,
info: web::Path<(VersionId,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
search_config: web::Data<SearchConfig>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::VERSION_DELETE]),
)
.await?
.1;
let id = info.into_inner().0;
let version = database::models::Version::get(id.into(), &**pool, &redis)
.await?
.ok_or_else(|| {
ApiError::InvalidInput("The specified version does not exist!".to_string())
})?;
if !user.role.is_admin() {
let team_member = database::models::TeamMember::get_from_user_id_project(
version.inner.project_id,
user.id.into(),
false,
&**pool,
)
.await
.map_err(ApiError::Database)?;
let organization =
Organization::get_associated_organization_project_id(version.inner.project_id, &**pool)
.await?;
let organization_team_member = if let Some(organization) = &organization {
database::models::TeamMember::get_from_user_id(
organization.team_id,
user.id.into(),
&**pool,
)
.await?
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,
&team_member,
&organization_team_member,
)
.unwrap_or_default();
if !permissions.contains(ProjectPermissions::DELETE_VERSION) {
return Err(ApiError::CustomAuthentication(
"You do not have permission to delete versions in this team".to_string(),
));
}
}
let mut transaction = pool.begin().await?;
let context = ImageContext::Version {
version_id: Some(version.inner.id.into()),
};
let uploaded_images =
database::models::Image::get_many_contexted(context, &mut transaction).await?;
for image in uploaded_images {
image_item::Image::remove(image.id, &mut transaction, &redis).await?;
}
let result =
database::models::Version::remove_full(version.inner.id, &redis, &mut transaction).await?;
transaction.commit().await?;
remove_documents(&[version.inner.id.into()], &search_config).await?;
database::models::Project::clear_cache(version.inner.project_id, None, Some(true), &redis)
.await?;
if result.is_some() {
Ok(HttpResponse::NoContent().body(""))
} else {
Err(ApiError::NotFound)
}
}