Final V2 Changes (#212)

* Redo dependencies, add rejection reasons, make notifications more readable

* Fix errors, add dependency route, finish PR

* Fix clippy errors
This commit is contained in:
Geometrically
2021-06-16 09:05:35 -07:00
committed by GitHub
parent 2a4caa856e
commit d2c2503cfa
39 changed files with 2365 additions and 1303 deletions

6
.env
View File

@@ -1,9 +1,15 @@
DEBUG=true DEBUG=true
RUST_LOG=info,sqlx::query=warn RUST_LOG=info,sqlx::query=warn
SITE_URL=https://modrinth.com
CDN_URL=https://cdn.modrinth.com CDN_URL=https://cdn.modrinth.com
MODERATION_DISCORD_WEBHOOK=
CLOUDFLARE_INTEGRATION=false
DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth
DATABASE_MIN_CONNECTIONS=0
DATABASE_MAX_CONNECTIONS=16
MEILISEARCH_ADDR=http://localhost:7700 MEILISEARCH_ADDR=http://localhost:7700
MEILISEARCH_KEY=modrinth MEILISEARCH_KEY=modrinth

View File

@@ -0,0 +1,18 @@
INSERT INTO statuses (status) VALUES ('archived');
ALTER TABLE notifications
ADD COLUMN type varchar(256);
ALTER TABLE mods
ADD COLUMN rejection_reason varchar(2000),
ADD COLUMN rejection_body varchar(65536);
DROP TABLE dependencies;
CREATE TABLE dependencies (
id serial PRIMARY KEY,
dependent_id bigint REFERENCES versions ON UPDATE CASCADE NOT NULL,
dependency_type varchar(255) NOT NULL,
dependency_id bigint REFERENCES versions ON UPDATE CASCADE,
mod_dependency_id bigint REFERENCES mods ON UPDATE CASCADE
);

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
//pub mod project_query_cache; //pub mod project_query_cache;
#[macro_export] #[macro_export]
macro_rules! generate_cache { macro_rules! generate_cache {
($name:ident,$id:ty, $val:ty, $cache_name:ident, $mod_name:ident, $getter_name:ident, $setter_name:ident) => { ($name:ident,$id:ty, $val:ty, $cache_name:ident, $mod_name:ident, $getter_name:ident, $setter_name:ident, $remover_name:ident) => {
pub mod $mod_name { pub mod $mod_name {
use cached::async_mutex::Mutex; use cached::async_mutex::Mutex;
use cached::{Cached, SizedCache}; use cached::{Cached, SizedCache};
@@ -20,6 +20,10 @@ macro_rules! generate_cache {
let mut cache = $cache_name.lock().await; let mut cache = $cache_name.lock().await;
Cached::cache_set(&mut *cache, id, val.clone()); Cached::cache_set(&mut *cache, id, val.clone());
} }
pub async fn $remover_name<'a>(id: $id) {
let mut cache = $cache_name.lock().await;
Cached::cache_remove(&mut *cache, &id);
}
} }
}; };
} }
@@ -31,7 +35,8 @@ generate_cache!(
PROJECT_CACHE, PROJECT_CACHE,
project_cache, project_cache,
get_cache_project, get_cache_project,
set_cache_project set_cache_project,
remove_cache_project
); );
generate_cache!( generate_cache!(
query_project, query_project,
@@ -40,5 +45,6 @@ generate_cache!(
QUERY_PROJECT_CACHE, QUERY_PROJECT_CACHE,
query_project_cache, query_project_cache,
get_cache_query_project, get_cache_query_project,
set_cache_query_project set_cache_query_project,
remove_cache_query_project
); );

View File

@@ -1,4 +1,4 @@
mod cache; pub mod cache;
pub mod models; pub mod models;
mod postgres_database; mod postgres_database;
pub use models::Project; pub use models::Project;

View File

@@ -113,7 +113,7 @@ pub struct TeamId(pub i64);
#[sqlx(transparent)] #[sqlx(transparent)]
pub struct TeamMemberId(pub i64); pub struct TeamMemberId(pub i64);
#[derive(Copy, Clone, Debug, Type)] #[derive(Copy, Clone, Debug, Type, PartialEq)]
#[sqlx(transparent)] #[sqlx(transparent)]
pub struct ProjectId(pub i64); pub struct ProjectId(pub i64);
#[derive(Copy, Clone, Debug, Type)] #[derive(Copy, Clone, Debug, Type)]
@@ -133,7 +133,7 @@ pub struct LicenseId(pub i32);
#[sqlx(transparent)] #[sqlx(transparent)]
pub struct DonationPlatformId(pub i32); pub struct DonationPlatformId(pub i32);
#[derive(Copy, Clone, Debug, Type)] #[derive(Copy, Clone, Debug, Type, PartialEq)]
#[sqlx(transparent)] #[sqlx(transparent)]
pub struct VersionId(pub i64); pub struct VersionId(pub i64);
#[derive(Copy, Clone, Debug, Type)] #[derive(Copy, Clone, Debug, Type)]

View File

@@ -2,6 +2,7 @@ use super::ids::*;
use crate::database::models::DatabaseError; use crate::database::models::DatabaseError;
pub struct NotificationBuilder { pub struct NotificationBuilder {
pub notification_type: Option<String>,
pub title: String, pub title: String,
pub text: String, pub text: String,
pub link: String, pub link: String,
@@ -16,6 +17,7 @@ pub struct NotificationActionBuilder {
pub struct Notification { pub struct Notification {
pub id: NotificationId, pub id: NotificationId,
pub user_id: UserId, pub user_id: UserId,
pub notification_type: Option<String>,
pub title: String, pub title: String,
pub text: String, pub text: String,
pub link: String, pub link: String,
@@ -64,6 +66,7 @@ impl NotificationBuilder {
Notification { Notification {
id, id,
user_id: user, user_id: user,
notification_type: self.notification_type.clone(),
title: self.title.clone(), title: self.title.clone(),
text: self.text.clone(), text: self.text.clone(),
link: self.link.clone(), link: self.link.clone(),
@@ -87,17 +90,18 @@ impl Notification {
sqlx::query!( sqlx::query!(
" "
INSERT INTO notifications ( INSERT INTO notifications (
id, user_id, title, text, link id, user_id, title, text, link, type
) )
VALUES ( VALUES (
$1, $2, $3, $4, $5 $1, $2, $3, $4, $5, $6
) )
", ",
self.id as NotificationId, self.id as NotificationId,
self.user_id as UserId, self.user_id as UserId,
&self.title, &self.title,
&self.text, &self.text,
&self.link &self.link,
self.notification_type
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
@@ -118,7 +122,7 @@ impl Notification {
{ {
let result = sqlx::query!( let result = sqlx::query!(
" "
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method, ' ,') actions STRING_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method, ' ,') actions
FROM notifications n FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
@@ -150,6 +154,7 @@ impl Notification {
Ok(Some(Notification { Ok(Some(Notification {
id, id,
user_id: UserId(row.user_id), user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title, title: row.title,
text: row.text, text: row.text,
link: row.link, link: row.link,
@@ -174,7 +179,7 @@ impl Notification {
let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect(); let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect();
sqlx::query!( sqlx::query!(
" "
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method, ' ,') actions STRING_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method, ' ,') actions
FROM notifications n FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
@@ -207,6 +212,7 @@ impl Notification {
Notification { Notification {
id, id,
user_id: UserId(row.user_id), user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title, title: row.title,
text: row.text, text: row.text,
link: row.link, link: row.link,
@@ -231,7 +237,7 @@ impl Notification {
sqlx::query!( sqlx::query!(
" "
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method, ' ,') actions STRING_AGG(DISTINCT na.id || ', ' || na.title || ', ' || na.action_route || ', ' || na.action_route_method, ' ,') actions
FROM notifications n FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
@@ -263,6 +269,7 @@ impl Notification {
Notification { Notification {
id, id,
user_id: UserId(row.user_id), user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title, title: row.title,
text: row.text, text: row.text,
link: row.link, link: row.link,
@@ -276,13 +283,10 @@ impl Notification {
.await .await
} }
pub async fn remove<'a, 'b, E>( pub async fn remove(
id: NotificationId, id: NotificationId,
exec: E, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> ) -> Result<Option<()>, sqlx::error::Error> {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
sqlx::query!( sqlx::query!(
" "
DELETE FROM notifications_actions DELETE FROM notifications_actions
@@ -290,7 +294,7 @@ impl Notification {
", ",
id as NotificationId, id as NotificationId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -300,7 +304,36 @@ impl Notification {
", ",
id as NotificationId, id as NotificationId,
) )
.execute(exec) .execute(&mut *transaction)
.await?;
Ok(Some(()))
}
pub async fn remove_many(
notification_ids: Vec<NotificationId>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect();
sqlx::query!(
"
DELETE FROM notifications_actions
WHERE notification_id IN (SELECT * FROM UNNEST($1::bigint[]))
",
&notification_ids_parsed
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
DELETE FROM notifications
WHERE id IN (SELECT * FROM UNNEST($1::bigint[]))
",
&notification_ids_parsed
)
.execute(&mut *transaction)
.await?; .await?;
Ok(Some(())) Ok(Some(()))

View File

@@ -88,6 +88,8 @@ impl ProjectBuilder {
server_side: self.server_side, server_side: self.server_side,
license: self.license, license: self.license,
slug: self.slug, slug: self.slug,
rejection_reason: None,
rejection_body: None,
}; };
project_struct.insert(&mut *transaction).await?; project_struct.insert(&mut *transaction).await?;
@@ -141,6 +143,8 @@ pub struct Project {
pub server_side: SideTypeId, pub server_side: SideTypeId,
pub license: LicenseId, pub license: LicenseId,
pub slug: Option<String>, pub slug: Option<String>,
pub rejection_reason: Option<String>,
pub rejection_body: Option<String>,
} }
impl Project { impl Project {
@@ -204,7 +208,8 @@ impl Project {
icon_url, body, body_url, published, icon_url, body, body_url, published,
updated, status, updated, status,
issues_url, source_url, wiki_url, discord_url, license_url, issues_url, source_url, wiki_url, discord_url, license_url,
team_id, client_side, server_side, license, slug team_id, client_side, server_side, license, slug,
rejection_reason, rejection_body
FROM mods FROM mods
WHERE id = $1 WHERE id = $1
", ",
@@ -237,6 +242,8 @@ impl Project {
slug: row.slug, slug: row.slug,
body: row.body, body: row.body,
follows: row.follows, follows: row.follows,
rejection_reason: row.rejection_reason,
rejection_body: row.rejection_body,
})) }))
} else { } else {
Ok(None) Ok(None)
@@ -259,7 +266,8 @@ impl Project {
icon_url, body, body_url, published, icon_url, body, body_url, published,
updated, status, updated, status,
issues_url, source_url, wiki_url, discord_url, license_url, issues_url, source_url, wiki_url, discord_url, license_url,
team_id, client_side, server_side, license, slug team_id, client_side, server_side, license, slug,
rejection_reason, rejection_body
FROM mods FROM mods
WHERE id IN (SELECT * FROM UNNEST($1::bigint[])) WHERE id IN (SELECT * FROM UNNEST($1::bigint[]))
", ",
@@ -290,6 +298,8 @@ impl Project {
slug: m.slug, slug: m.slug,
body: m.body, body: m.body,
follows: m.follows, follows: m.follows,
rejection_reason: m.rejection_reason,
rejection_body: m.rejection_body,
})) }))
}) })
.try_collect::<Vec<Project>>() .try_collect::<Vec<Project>>()
@@ -298,20 +308,17 @@ impl Project {
Ok(projects) Ok(projects)
} }
pub async fn remove_full<'a, 'b, E>( pub async fn remove_full(
id: ProjectId, id: ProjectId,
exec: E, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> ) -> Result<Option<()>, sqlx::error::Error> {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let result = sqlx::query!( let result = sqlx::query!(
" "
SELECT team_id FROM mods WHERE id = $1 SELECT team_id FROM mods WHERE id = $1
", ",
id as ProjectId, id as ProjectId,
) )
.fetch_optional(exec) .fetch_optional(&mut *transaction)
.await?; .await?;
let team_id: TeamId = if let Some(id) = result { let team_id: TeamId = if let Some(id) = result {
@@ -327,7 +334,7 @@ impl Project {
", ",
id as ProjectId id as ProjectId
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -337,7 +344,7 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -347,7 +354,7 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -357,7 +364,7 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -367,7 +374,7 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
use futures::TryStreamExt; use futures::TryStreamExt;
@@ -378,15 +385,24 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.fetch_many(exec) .fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|c| VersionId(c.id))) }) .try_filter_map(|e| async { Ok(e.right().map(|c| VersionId(c.id))) })
.try_collect::<Vec<VersionId>>() .try_collect::<Vec<VersionId>>()
.await?; .await?;
for version in versions { for version in versions {
super::Version::remove_full(version, exec).await?; super::Version::remove_full(version, transaction).await?;
} }
sqlx::query!(
"
DELETE FROM dependencies WHERE mod_dependency_id = $1
",
id as ProjectId,
)
.execute(&mut *transaction)
.await?;
sqlx::query!( sqlx::query!(
" "
DELETE FROM mods DELETE FROM mods
@@ -394,7 +410,7 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -404,7 +420,7 @@ impl Project {
", ",
team_id as TeamId, team_id as TeamId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -414,7 +430,7 @@ impl Project {
", ",
team_id as TeamId, team_id as TeamId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
Ok(Some(())) Ok(Some(()))
@@ -552,7 +568,7 @@ impl Project {
executor: E, executor: E,
) -> Result<Option<QueryProject>, sqlx::error::Error> ) -> Result<Option<QueryProject>, sqlx::error::Error>
where where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{ {
let result = sqlx::query!( let result = sqlx::query!(
" "
@@ -560,7 +576,7 @@ impl Project {
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published, m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status, m.updated updated, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url, m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.rejection_reason rejection_reason, m.rejection_body rejection_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name, s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT v.id::text, ',') versions STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT v.id::text, ',') versions
FROM mods m FROM mods m
@@ -605,6 +621,8 @@ impl Project {
slug: m.slug.clone(), slug: m.slug.clone(),
body: m.body.clone(), body: m.body.clone(),
follows: m.follows, follows: m.follows,
rejection_reason: m.rejection_reason,
rejection_body: m.rejection_body,
}, },
project_type: m.project_type_name, project_type: m.project_type_name,
categories: m categories: m
@@ -647,7 +665,7 @@ impl Project {
m.icon_url icon_url, m.body body, m.body_url body_url, m.published published, m.icon_url icon_url, m.body body, m.body_url body_url, m.published published,
m.updated updated, m.status status, m.updated updated, m.status status,
m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url, m.issues_url issues_url, m.source_url source_url, m.wiki_url wiki_url, m.discord_url discord_url, m.license_url license_url,
m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.team_id team_id, m.client_side client_side, m.server_side server_side, m.license license, m.slug slug, m.rejection_reason rejection_reason, m.rejection_body rejection_body,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name, s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, l.name license_name, pt.name project_type_name,
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT v.id::text, ',') versions STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT v.id::text, ',') versions
FROM mods m FROM mods m
@@ -689,7 +707,9 @@ impl Project {
license: LicenseId(m.license), license: LicenseId(m.license),
slug: m.slug.clone(), slug: m.slug.clone(),
body: m.body.clone(), body: m.body.clone(),
follows: m.follows follows: m.follows,
rejection_reason: m.rejection_reason,
rejection_body: m.rejection_body,
}, },
project_type: m.project_type_name, project_type: m.project_type_name,
categories: m.categories.unwrap_or_default().split(',').map(|x| x.to_string()).collect(), categories: m.categories.unwrap_or_default().split(',').map(|x| x.to_string()).collect(),

View File

@@ -238,10 +238,10 @@ impl User {
Ok(projects) Ok(projects)
} }
pub async fn remove<'a, 'b, E>(id: UserId, exec: E) -> Result<Option<()>, sqlx::error::Error> pub async fn remove(
where id: UserId,
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
{ ) -> Result<Option<()>, sqlx::error::Error> {
let deleted_user: UserId = crate::models::users::DELETED_USER.into(); let deleted_user: UserId = crate::models::users::DELETED_USER.into();
sqlx::query!( sqlx::query!(
@@ -254,7 +254,7 @@ impl User {
id as UserId, id as UserId,
crate::models::teams::OWNER_ROLE crate::models::teams::OWNER_ROLE
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -266,7 +266,7 @@ impl User {
deleted_user as UserId, deleted_user as UserId,
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
use futures::TryStreamExt; use futures::TryStreamExt;
@@ -277,7 +277,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.fetch_many(exec) .fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|m| m.id as i64)) }) .try_filter_map(|e| async { Ok(e.right().map(|m| m.id as i64)) })
.try_collect::<Vec<i64>>() .try_collect::<Vec<i64>>()
.await?; .await?;
@@ -289,7 +289,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -299,7 +299,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -309,7 +309,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -319,7 +319,7 @@ impl User {
", ",
&notifications &notifications
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -329,7 +329,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -339,19 +339,16 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
Ok(Some(())) Ok(Some(()))
} }
pub async fn remove_full<'a, 'b, E>( pub async fn remove_full(
id: UserId, id: UserId,
exec: E, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> ) -> Result<Option<()>, sqlx::error::Error> {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::TryStreamExt; use futures::TryStreamExt;
let projects: Vec<ProjectId> = sqlx::query!( let projects: Vec<ProjectId> = sqlx::query!(
" "
@@ -362,13 +359,14 @@ impl User {
id as UserId, id as UserId,
crate::models::teams::OWNER_ROLE crate::models::teams::OWNER_ROLE
) )
.fetch_many(exec) .fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) }) .try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) })
.try_collect::<Vec<ProjectId>>() .try_collect::<Vec<ProjectId>>()
.await?; .await?;
for project_id in projects { for project_id in projects {
let _result = super::project_item::Project::remove_full(project_id, exec).await?; let _result =
super::project_item::Project::remove_full(project_id, transaction).await?;
} }
let notifications: Vec<i64> = sqlx::query!( let notifications: Vec<i64> = sqlx::query!(
@@ -378,7 +376,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.fetch_many(exec) .fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|m| m.id as i64)) }) .try_filter_map(|e| async { Ok(e.right().map(|m| m.id as i64)) })
.try_collect::<Vec<i64>>() .try_collect::<Vec<i64>>()
.await?; .await?;
@@ -390,7 +388,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -400,7 +398,7 @@ impl User {
", ",
&notifications &notifications
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
let deleted_user: UserId = crate::models::users::DELETED_USER.into(); let deleted_user: UserId = crate::models::users::DELETED_USER.into();
@@ -414,7 +412,7 @@ impl User {
deleted_user as UserId, deleted_user as UserId,
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -424,7 +422,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -434,7 +432,7 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
Ok(Some(())) Ok(Some(()))

View File

@@ -10,13 +10,62 @@ pub struct VersionBuilder {
pub version_number: String, pub version_number: String,
pub changelog: String, pub changelog: String,
pub files: Vec<VersionFileBuilder>, pub files: Vec<VersionFileBuilder>,
pub dependencies: Vec<(VersionId, String)>, pub dependencies: Vec<DependencyBuilder>,
pub game_versions: Vec<GameVersionId>, pub game_versions: Vec<GameVersionId>,
pub loaders: Vec<LoaderId>, pub loaders: Vec<LoaderId>,
pub release_channel: ChannelId, pub release_channel: ChannelId,
pub featured: bool, pub featured: bool,
} }
pub struct DependencyBuilder {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub dependency_type: String,
}
impl DependencyBuilder {
pub async fn insert(
self,
version_id: VersionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let version_dependency_id = if let Some(project_id) = self.project_id {
sqlx::query!(
"
SELECT version.id id FROM (
SELECT DISTINCT ON(v.id) v.id, v.date_published FROM versions v
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id AND gvv.game_version_id IN (SELECT game_version_id FROM game_versions_versions WHERE joining_version_id = $2)
INNER JOIN loaders_versions lv ON lv.version_id = v.id AND lv.loader_id IN (SELECT loader_id FROM loaders_versions WHERE version_id = $2)
WHERE v.mod_id = $1
) AS version
ORDER BY version.date_published DESC
LIMIT 1
",
project_id as ProjectId,
version_id as VersionId,
)
.fetch_optional(&mut *transaction).await?.map(|x| VersionId(x.id))
} else {
self.version_id
};
sqlx::query!(
"
INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id)
VALUES ($1, $2, $3, $4)
",
version_id as VersionId,
self.dependency_type,
version_dependency_id.map(|x| x.0),
self.project_id.map(|x| x.0),
)
.execute(&mut *transaction)
.await?;
Ok(())
}
}
pub struct VersionFileBuilder { pub struct VersionFileBuilder {
pub url: String, pub url: String,
pub filename: String, pub filename: String,
@@ -105,20 +154,10 @@ impl VersionBuilder {
} }
for dependency in self.dependencies { for dependency in self.dependencies {
sqlx::query!( dependency.insert(self.version_id, transaction).await?;
"
INSERT INTO dependencies (dependent_id, dependency_id, dependency_type)
VALUES ($1, $2, $3)
",
self.version_id as VersionId,
dependency.0 as VersionId,
dependency.1,
)
.execute(&mut *transaction)
.await?;
} }
for loader in self.loaders { for loader in self.loaders.clone() {
sqlx::query!( sqlx::query!(
" "
INSERT INTO loaders_versions (loader_id, version_id) INSERT INTO loaders_versions (loader_id, version_id)
@@ -131,7 +170,7 @@ impl VersionBuilder {
.await?; .await?;
} }
for game_version in self.game_versions { for game_version in self.game_versions.clone() {
sqlx::query!( sqlx::query!(
" "
INSERT INTO game_versions_versions (game_version_id, joining_version_id) INSERT INTO game_versions_versions (game_version_id, joining_version_id)
@@ -144,6 +183,42 @@ impl VersionBuilder {
.await?; .await?;
} }
// Sync dependencies
use futures::stream::TryStreamExt;
let dependencies = sqlx::query!(
"
SELECT d.id id
FROM versions v
INNER JOIN dependencies d ON d.dependent_id = v.id
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id AND gvv.game_version_id IN (SELECT * FROM UNNEST($2::integer[]))
INNER JOIN loaders_versions lv ON lv.version_id = v.id AND lv.loader_id IN (SELECT * FROM UNNEST($3::integer[]))
WHERE v.mod_id = $1
",
self.project_id as ProjectId,
&self.game_versions.iter().map(|x| x.0).collect::<Vec<i32>>(),
&self.loaders.iter().map(|x| x.0).collect::<Vec<i32>>(),
)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async {
Ok(e.right().map(|d| d.id as i64))
})
.try_collect::<Vec<i64>>()
.await?;
sqlx::query!(
"
UPDATE dependencies
SET dependency_id = $2
WHERE id IN (SELECT * FROM UNNEST($1::bigint[]))
",
&dependencies,
self.version_id as VersionId,
)
.execute(&mut *transaction)
.await?;
Ok(self.version_id) Ok(self.version_id)
} }
} }
@@ -200,17 +275,17 @@ impl Version {
} }
// TODO: someone verify this // TODO: someone verify this
pub async fn remove_full<'a, E>(id: VersionId, exec: E) -> Result<Option<()>, sqlx::Error> pub async fn remove_full(
where id: VersionId,
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
{ ) -> Result<Option<()>, sqlx::Error> {
let result = sqlx::query!( let result = sqlx::query!(
" "
SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1) SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)
", ",
id as VersionId, id as VersionId,
) )
.fetch_one(exec) .fetch_one(&mut *transaction)
.await?; .await?;
if !result.exists.unwrap_or(false) { if !result.exists.unwrap_or(false) {
@@ -224,7 +299,33 @@ impl Version {
", ",
id as VersionId, id as VersionId,
) )
.execute(exec) .execute(&mut *transaction)
.await?;
use futures::TryStreamExt;
let game_versions: Vec<i32> = sqlx::query!(
"
SELECT game_version_id id FROM game_versions_versions
WHERE joining_version_id = $1
",
id as VersionId,
)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.id)) })
.try_collect::<Vec<i32>>()
.await?;
let loaders: Vec<i32> = sqlx::query!(
"
SELECT loader_id id FROM loaders_versions
WHERE version_id = $1
",
id as VersionId,
)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.id)) })
.try_collect::<Vec<i32>>()
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -234,7 +335,7 @@ impl Version {
", ",
id as VersionId, id as VersionId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -244,7 +345,7 @@ impl Version {
", ",
id as VersionId, id as VersionId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -254,11 +355,9 @@ impl Version {
", ",
id as VersionId, id as VersionId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
use futures::TryStreamExt;
let files = sqlx::query!( let files = sqlx::query!(
" "
SELECT files.id, files.url, files.filename, files.is_primary FROM files SELECT files.id, files.url, files.filename, files.is_primary FROM files
@@ -266,7 +365,7 @@ impl Version {
", ",
id as VersionId, id as VersionId,
) )
.fetch_many(exec) .fetch_many(&mut *transaction)
.try_filter_map(|e| async { .try_filter_map(|e| async {
Ok(e.right().map(|c| VersionFile { Ok(e.right().map(|c| VersionFile {
id: FileId(c.id), id: FileId(c.id),
@@ -301,7 +400,7 @@ impl Version {
", ",
id as VersionId id as VersionId
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -311,54 +410,71 @@ impl Version {
", ",
id as VersionId, id as VersionId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
// Sync dependencies
let project_id = sqlx::query!(
"
SELECT mod_id FROM versions WHERE id = $1
",
id as VersionId,
)
.fetch_one(&mut *transaction)
.await?;
let new_version_id = sqlx::query!(
"
SELECT v.id id
FROM versions v
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id AND gvv.game_version_id IN (SELECT * FROM UNNEST($2::integer[]))
INNER JOIN loaders_versions lv ON lv.version_id = v.id AND lv.loader_id IN (SELECT * FROM UNNEST($3::integer[]))
WHERE v.mod_id = $1
ORDER BY v.date_published DESC
LIMIT 1
",
project_id.mod_id,
&game_versions,
&loaders,
)
.fetch_optional(&mut *transaction)
.await?
.map(|x| x.id);
sqlx::query!(
"
UPDATE dependencies
SET dependency_id = $2
WHERE dependency_id = $1
",
id as VersionId,
new_version_id,
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL
",
)
.execute(&mut *transaction)
.await?;
// delete version
sqlx::query!( sqlx::query!(
" "
DELETE FROM versions WHERE id = $1 DELETE FROM versions WHERE id = $1
", ",
id as VersionId, id as VersionId,
) )
.execute(exec) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!(
"
DELETE FROM dependencies WHERE dependent_id = $1
",
id as VersionId,
)
.execute(exec)
.await?;
Ok(Some(())) Ok(Some(()))
} }
pub async fn get_dependencies<'a, E>(
id: VersionId,
exec: E,
) -> Result<Vec<VersionId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
use futures::stream::TryStreamExt;
let vec = sqlx::query!(
"
SELECT dependency_id id FROM dependencies
WHERE dependent_id = $1
",
id as VersionId,
)
.fetch_many(exec)
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.id))) })
.try_collect::<Vec<VersionId>>()
.await?;
Ok(vec)
}
pub async fn get_project_versions<'a, E>( pub async fn get_project_versions<'a, E>(
project_id: ProjectId, project_id: ProjectId,
game_versions: Option<Vec<String>>, game_versions: Option<Vec<String>>,
@@ -491,7 +607,7 @@ impl Version {
STRING_AGG(DISTINCT gv.version, ',') game_versions, STRING_AGG(DISTINCT l.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') game_versions, STRING_AGG(DISTINCT l.loader, ',') loaders,
STRING_AGG(DISTINCT f.id || ', ' || f.filename || ', ' || f.is_primary || ', ' || f.url, ' ,') files, STRING_AGG(DISTINCT f.id || ', ' || f.filename || ', ' || f.is_primary || ', ' || f.url, ' ,') files,
STRING_AGG(DISTINCT h.algorithm || ', ' || encode(h.hash, 'escape') || ', ' || h.file_id, ' ,') hashes, STRING_AGG(DISTINCT h.algorithm || ', ' || encode(h.hash, 'escape') || ', ' || h.file_id, ' ,') hashes,
STRING_AGG(DISTINCT d.dependency_id || ', ' || d.dependency_type, ' ,') dependencies STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ', ' || COALESCE(d.mod_dependency_id, 0) || ', ' || d.dependency_type, ' ,') dependencies
FROM versions v FROM versions v
INNER JOIN release_channels rc on v.release_channel = rc.id INNER JOIN release_channels rc on v.release_channel = rc.id
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
@@ -557,11 +673,24 @@ impl Version {
.for_each(|f| { .for_each(|f| {
let dependency: Vec<&str> = f.split(", ").collect(); let dependency: Vec<&str> = f.split(", ").collect();
if dependency.len() >= 2 { if dependency.len() >= 3 {
dependencies.push(( dependencies.push(QueryDependency {
VersionId(dependency[0].parse().unwrap_or(0)), project_id: match &*dependency[2] {
dependency[1].to_string(), "0" => None,
)) _ => match dependency[2].parse() {
Ok(x) => Some(ProjectId(x)),
Err(_) => None,
},
},
version_id: match &*dependency[0] {
"0" => None,
_ => match dependency[0].parse() {
Ok(x) => Some(VersionId(x)),
Err(_) => None,
},
},
dependency_type: dependency[1].to_string(),
});
} }
}); });
@@ -615,7 +744,7 @@ impl Version {
STRING_AGG(DISTINCT gv.version, ',') game_versions, STRING_AGG(DISTINCT l.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') game_versions, STRING_AGG(DISTINCT l.loader, ',') loaders,
STRING_AGG(DISTINCT f.id || ', ' || f.filename || ', ' || f.is_primary || ', ' || f.url, ' ,') files, STRING_AGG(DISTINCT f.id || ', ' || f.filename || ', ' || f.is_primary || ', ' || f.url, ' ,') files,
STRING_AGG(DISTINCT h.algorithm || ', ' || encode(h.hash, 'escape') || ', ' || h.file_id, ' ,') hashes, STRING_AGG(DISTINCT h.algorithm || ', ' || encode(h.hash, 'escape') || ', ' || h.file_id, ' ,') hashes,
STRING_AGG(DISTINCT d.dependency_id || ', ' || d.dependency_type, ' ,') dependencies STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ', ' || COALESCE(d.mod_dependency_id, 0) || ', ' || d.dependency_type, ' ,') dependencies
FROM versions v FROM versions v
INNER JOIN release_channels rc on v.release_channel = rc.id INNER JOIN release_channels rc on v.release_channel = rc.id
LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id
@@ -678,8 +807,28 @@ impl Version {
v.dependencies.unwrap_or_default().split(" ,").for_each(|f| { v.dependencies.unwrap_or_default().split(" ,").for_each(|f| {
let dependency: Vec<&str> = f.split(", ").collect(); let dependency: Vec<&str> = f.split(", ").collect();
if dependency.len() >= 2 { if dependency.len() >= 3 {
dependencies.push((VersionId(dependency[0].parse().unwrap_or(0)), dependency[1].to_string())) dependencies.push(QueryDependency {
project_id: match &*dependency[2] {
"0" => None,
_ => {
match dependency[2].parse() {
Ok(x) => Some(ProjectId(x)),
Err(_) => None,
}
},
},
version_id: match &*dependency[0] {
"0" => None,
_ => {
match dependency[0].parse() {
Ok(x) => Some(VersionId(x)),
Err(_) => None,
}
},
},
dependency_type: dependency[1].to_string()
});
} }
}); });
@@ -743,7 +892,14 @@ pub struct QueryVersion {
pub game_versions: Vec<String>, pub game_versions: Vec<String>,
pub loaders: Vec<String>, pub loaders: Vec<String>,
pub featured: bool, pub featured: bool,
pub dependencies: Vec<(VersionId, String)>, pub dependencies: Vec<QueryDependency>,
}
#[derive(Clone)]
pub struct QueryDependency {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub dependency_type: String,
} }
#[derive(Clone)] #[derive(Clone)]

View File

@@ -11,7 +11,20 @@ pub async fn connect() -> Result<PgPool, sqlx::Error> {
let database_url = dotenv::var("DATABASE_URL").expect("`DATABASE_URL` not in .env"); let database_url = dotenv::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
let pool = PgPoolOptions::new() let pool = PgPoolOptions::new()
.max_connections(20) .min_connections(
dotenv::var("DATABASE_MIN_CONNECTIONS")
.ok()
.map(|x| x.parse::<u32>().ok())
.flatten()
.unwrap_or(16),
)
.max_connections(
dotenv::var("DATABASE_MAX_CONNECTIONS")
.ok()
.map(|x| x.parse::<u32>().ok())
.flatten()
.unwrap_or(16),
)
.connect(&database_url) .connect(&database_url)
.await?; .await?;

View File

@@ -11,13 +11,13 @@ use search::indexing::index_projects;
use search::indexing::IndexingSettings; use search::indexing::IndexingSettings;
use std::sync::Arc; use std::sync::Arc;
mod auth;
mod database; mod database;
mod file_hosting; mod file_hosting;
mod models; mod models;
mod routes; mod routes;
mod scheduler; mod scheduler;
mod search; mod search;
mod util;
mod validate; mod validate;
#[derive(Debug, Options)] #[derive(Debug, Options)]
@@ -265,9 +265,23 @@ async fn main() -> std::io::Result<()> {
.with_identifier(|req| { .with_identifier(|req| {
let connection_info = req.connection_info(); let connection_info = req.connection_info();
let ip = String::from( let ip = String::from(
connection_info if dotenv::var("CLOUDFLARE_INTEGRATION")
.remote_addr() .ok()
.ok_or(ARError::IdentificationError)?, .map(|i| i.parse().unwrap())
.unwrap_or(false)
{
if let Some(header) = req.headers().get("CF-Connecting-IP") {
header.to_str().map_err(|_| ARError::IdentificationError)?
} else {
connection_info
.remote_addr()
.ok_or(ARError::IdentificationError)?
}
} else {
connection_info
.remote_addr()
.ok_or(ARError::IdentificationError)?
},
); );
let ignore_ips = dotenv::var("RATE_LIMIT_IGNORE_IPS") let ignore_ips = dotenv::var("RATE_LIMIT_IGNORE_IPS")
@@ -277,16 +291,16 @@ async fn main() -> std::io::Result<()> {
if ignore_ips.contains(&ip) { if ignore_ips.contains(&ip) {
// At an even distribution of numbers, this will allow at the most // At an even distribution of numbers, this will allow at the most
// 3000 requests per minute from the frontend, which is reasonable // 18000 requests per minute from the frontend, which is reasonable
// (50 requests per second) // (300 requests per second)
let random = rand::thread_rng().gen_range(1, 15); let random = rand::thread_rng().gen_range(1, 30);
return Ok(format!("{}-{}", ip, random)); return Ok(format!("{}-{}", ip, random));
} }
Ok(ip) Ok(ip)
}) })
.with_interval(std::time::Duration::from_secs(60)) .with_interval(std::time::Duration::from_secs(60))
.with_max_requests(200), .with_max_requests(300),
) )
.wrap(sentry_actix::Sentry::new()) .wrap(sentry_actix::Sentry::new())
.data(pool.clone()) .data(pool.clone())
@@ -335,6 +349,7 @@ fn check_env_vars() -> bool {
failed |= true; failed |= true;
} }
failed |= check_var::<String>("SITE_URL");
failed |= check_var::<String>("CDN_URL"); failed |= check_var::<String>("CDN_URL");
failed |= check_var::<String>("DATABASE_URL"); failed |= check_var::<String>("DATABASE_URL");
failed |= check_var::<String>("MEILISEARCH_ADDR"); failed |= check_var::<String>("MEILISEARCH_ADDR");

View File

@@ -12,6 +12,8 @@ pub struct NotificationId(pub u64);
pub struct Notification { pub struct Notification {
pub id: NotificationId, pub id: NotificationId,
pub user_id: UserId, pub user_id: UserId,
#[serde(rename = "type")]
pub type_: Option<String>,
pub title: String, pub title: String,
pub text: String, pub text: String,
pub link: String, pub link: String,

View File

@@ -12,13 +12,13 @@ use validator::Validate;
pub struct ProjectId(pub u64); pub struct ProjectId(pub u64);
/// The ID of a specific version of a project /// The ID of a specific version of a project
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[serde(from = "Base62Id")] #[serde(from = "Base62Id")]
#[serde(into = "Base62Id")] #[serde(into = "Base62Id")]
pub struct VersionId(pub u64); pub struct VersionId(pub u64);
/// A project returned from the API /// A project returned from the API
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize, Clone)]
pub struct Project { pub struct Project {
/// The ID of the project, encoded as a base62 string. /// The ID of the project, encoded as a base62 string.
pub id: ProjectId, pub id: ProjectId,
@@ -40,8 +40,12 @@ pub struct Project {
pub published: DateTime<Utc>, pub published: DateTime<Utc>,
/// The date at which the project was first published. /// The date at which the project was first published.
pub updated: DateTime<Utc>, pub updated: DateTime<Utc>,
/// The status of the project /// The status of the project
pub status: ProjectStatus, pub status: ProjectStatus,
/// The rejection data of the project
pub rejection_data: Option<RejectionReason>,
/// The license of this project /// The license of this project
pub license: License, pub license: License,
@@ -73,6 +77,12 @@ pub struct Project {
pub donation_urls: Option<Vec<DonationLink>>, pub donation_urls: Option<Vec<DonationLink>>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RejectionReason {
pub reason: String,
pub body: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
pub enum SideType { pub enum SideType {
@@ -134,6 +144,7 @@ pub struct DonationLink {
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum ProjectStatus { pub enum ProjectStatus {
Approved, Approved,
Archived,
Rejected, Rejected,
Draft, Draft,
Unlisted, Unlisted,
@@ -155,6 +166,7 @@ impl ProjectStatus {
"approved" => ProjectStatus::Approved, "approved" => ProjectStatus::Approved,
"draft" => ProjectStatus::Draft, "draft" => ProjectStatus::Draft,
"unlisted" => ProjectStatus::Unlisted, "unlisted" => ProjectStatus::Unlisted,
"archived" => ProjectStatus::Archived,
_ => ProjectStatus::Unknown, _ => ProjectStatus::Unknown,
} }
} }
@@ -166,6 +178,7 @@ impl ProjectStatus {
ProjectStatus::Unlisted => "unlisted", ProjectStatus::Unlisted => "unlisted",
ProjectStatus::Processing => "processing", ProjectStatus::Processing => "processing",
ProjectStatus::Unknown => "unknown", ProjectStatus::Unknown => "unknown",
ProjectStatus::Archived => "archived",
} }
} }
@@ -177,6 +190,7 @@ impl ProjectStatus {
ProjectStatus::Unlisted => false, ProjectStatus::Unlisted => false,
ProjectStatus::Processing => true, ProjectStatus::Processing => true,
ProjectStatus::Unknown => true, ProjectStatus::Unknown => true,
ProjectStatus::Archived => false,
} }
} }
@@ -240,9 +254,11 @@ pub struct VersionFile {
/// version's functionality /// version's functionality
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct Dependency { pub struct Dependency {
/// The filename of the file. /// The specific version id that the dependency uses
pub version_id: VersionId, pub version_id: Option<VersionId>,
/// Whether the file is the primary file of a version /// The project ID that the dependency is synced with and auto-updated
pub project_id: Option<ProjectId>,
/// The type of the dependency
pub dependency_type: DependencyType, pub dependency_type: DependencyType,
} }

View File

@@ -1,9 +1,9 @@
use crate::auth::get_github_user_from_token;
use crate::database::models::{generate_state_id, User}; use crate::database::models::{generate_state_id, User};
use crate::models::error::ApiError; use crate::models::error::ApiError;
use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::ids::base62_impl::{parse_base62, to_base62};
use crate::models::ids::DecodingError; use crate::models::ids::DecodingError;
use crate::models::users::Role; use crate::models::users::Role;
use crate::util::auth::get_github_user_from_token;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::web::{scope, Data, Query, ServiceConfig}; use actix_web::web::{scope, Data, Query, ServiceConfig};
use actix_web::{get, HttpResponse}; use actix_web::{get, HttpResponse};
@@ -32,7 +32,7 @@ pub enum AuthorizationError {
#[error("Invalid Authentication credentials")] #[error("Invalid Authentication credentials")]
InvalidCredentialsError, InvalidCredentialsError,
#[error("Authentication Error: {0}")] #[error("Authentication Error: {0}")]
AuthenticationError(#[from] crate::auth::AuthenticationError), AuthenticationError(#[from] crate::util::auth::AuthenticationError),
#[error("Error while decoding Base62")] #[error("Error while decoding Base62")]
DecodingError(#[from] DecodingError), DecodingError(#[from] DecodingError),
} }
@@ -129,78 +129,82 @@ pub async fn auth_callback(
let mut transaction = client.begin().await?; let mut transaction = client.begin().await?;
let state_id = parse_base62(&*info.state)?; let state_id = parse_base62(&*info.state)?;
let result = sqlx::query!( let result_option = sqlx::query!(
" "
SELECT url,expires FROM states SELECT url,expires FROM states
WHERE id = $1 WHERE id = $1
", ",
state_id as i64 state_id as i64
) )
.fetch_one(&mut *transaction) .fetch_optional(&mut *transaction)
.await?; .await?;
let now = Utc::now(); if let Some(result) = result_option {
let duration = result.expires.signed_duration_since(now); let now = Utc::now();
let duration = result.expires.signed_duration_since(now);
if duration.num_seconds() < 0 { if duration.num_seconds() < 0 {
return Err(AuthorizationError::InvalidCredentialsError); return Err(AuthorizationError::InvalidCredentialsError);
} }
sqlx::query!( sqlx::query!(
" "
DELETE FROM states DELETE FROM states
WHERE id = $1 WHERE id = $1
", ",
state_id as i64 state_id as i64
) )
.execute(&mut *transaction) .execute(&mut *transaction)
.await?;
let client_id = dotenv::var("GITHUB_CLIENT_ID")?;
let client_secret = dotenv::var("GITHUB_CLIENT_SECRET")?;
let url = format!(
"https://github.com/login/oauth/access_token?client_id={}&client_secret={}&code={}",
client_id, client_secret, info.code
);
let token: AccessToken = reqwest::Client::new()
.post(&url)
.header(reqwest::header::ACCEPT, "application/json")
.send()
.await?
.json()
.await?; .await?;
let user = get_github_user_from_token(&*token.access_token).await?; let client_id = dotenv::var("GITHUB_CLIENT_ID")?;
let client_secret = dotenv::var("GITHUB_CLIENT_SECRET")?;
let user_result = User::get_from_github_id(user.id, &mut *transaction).await?; let url = format!(
match user_result { "https://github.com/login/oauth/access_token?client_id={}&client_secret={}&code={}",
Some(x) => info!("{:?}", x.id), client_id, client_secret, info.code
None => { );
let user_id = crate::database::models::generate_user_id(&mut transaction).await?;
User { let token: AccessToken = reqwest::Client::new()
id: user_id, .post(&url)
github_id: Some(user.id as i64), .header(reqwest::header::ACCEPT, "application/json")
username: user.login, .send()
name: user.name, .await?
email: user.email, .json()
avatar_url: Some(user.avatar_url),
bio: user.bio,
created: Utc::now(),
role: Role::Developer.to_string(),
}
.insert(&mut transaction)
.await?; .await?;
let user = get_github_user_from_token(&*token.access_token).await?;
let user_result = User::get_from_github_id(user.id, &mut *transaction).await?;
match user_result {
Some(x) => info!("{:?}", x.id),
None => {
let user_id = crate::database::models::generate_user_id(&mut transaction).await?;
User {
id: user_id,
github_id: Some(user.id as i64),
username: user.login,
name: user.name,
email: user.email,
avatar_url: Some(user.avatar_url),
bio: user.bio,
created: Utc::now(),
role: Role::Developer.to_string(),
}
.insert(&mut transaction)
.await?;
}
} }
transaction.commit().await?;
let redirect_url = format!("{}?code={}", result.url, token.access_token);
Ok(HttpResponse::TemporaryRedirect()
.header("Location", &*redirect_url)
.json(AuthorizationInit { url: redirect_url }))
} else {
Err(AuthorizationError::InvalidCredentialsError)
} }
transaction.commit().await?;
let redirect_url = format!("{}?code={}", result.url, token.access_token);
Ok(HttpResponse::TemporaryRedirect()
.header("Location", &*redirect_url)
.json(AuthorizationInit { url: redirect_url }))
} }

View File

@@ -1,7 +1,7 @@
use crate::auth::get_user_from_headers;
use crate::database; use crate::database;
use crate::models::projects::ProjectId; use crate::models::projects::ProjectId;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::{get, web, HttpRequest, HttpResponse}; use actix_web::{get, web, HttpRequest, HttpResponse};
use sqlx::PgPool; use sqlx::PgPool;
use yaserde_derive::YaSerialize; use yaserde_derive::YaSerialize;

View File

@@ -55,7 +55,8 @@ pub fn projects_config(cfg: &mut web::ServiceConfig) {
.service(projects::project_follow) .service(projects::project_follow)
.service(projects::project_unfollow) .service(projects::project_unfollow)
.service(teams::team_members_get_project) .service(teams::team_members_get_project)
.service(web::scope("{project_id}").service(versions::version_list)), .service(web::scope("{project_id}").service(versions::version_list))
.service(projects::dependency_list),
); );
} }
@@ -119,6 +120,7 @@ pub fn teams_config(cfg: &mut web::ServiceConfig) {
pub fn notifications_config(cfg: &mut web::ServiceConfig) { pub fn notifications_config(cfg: &mut web::ServiceConfig) {
cfg.service(notifications::notifications_get); cfg.service(notifications::notifications_get);
cfg.service(notifications::notification_delete);
cfg.service( cfg.service(
web::scope("notification") web::scope("notification")
@@ -152,13 +154,13 @@ pub enum ApiError {
#[error("Deserialization error: {0}")] #[error("Deserialization error: {0}")]
JsonError(#[from] serde_json::Error), JsonError(#[from] serde_json::Error),
#[error("Authentication Error: {0}")] #[error("Authentication Error: {0}")]
AuthenticationError(#[from] crate::auth::AuthenticationError), AuthenticationError(#[from] crate::util::auth::AuthenticationError),
#[error("Authentication Error: {0}")] #[error("Authentication Error: {0}")]
CustomAuthenticationError(String), CustomAuthenticationError(String),
#[error("Invalid Input: {0}")] #[error("Invalid Input: {0}")]
InvalidInputError(String), InvalidInputError(String),
#[error("Error while validating input: {0}")] #[error("Error while validating input: {0}")]
ValidationError(#[from] validator::ValidationErrors), ValidationError(String),
#[error("Search Error: {0}")] #[error("Search Error: {0}")]
SearchError(#[from] meilisearch_sdk::errors::Error), SearchError(#[from] meilisearch_sdk::errors::Error),
#[error("Indexing Error: {0}")] #[error("Indexing Error: {0}")]

View File

@@ -1,7 +1,7 @@
use super::ApiError; use super::ApiError;
use crate::auth::check_is_moderator_from_headers;
use crate::database; use crate::database;
use crate::models::projects::{Project, ProjectStatus}; use crate::models::projects::{Project, ProjectStatus};
use crate::util::auth::check_is_moderator_from_headers;
use actix_web::{get, web, HttpRequest, HttpResponse}; use actix_web::{get, web, HttpRequest, HttpResponse};
use serde::Deserialize; use serde::Deserialize;
use sqlx::PgPool; use sqlx::PgPool;

View File

@@ -1,8 +1,8 @@
use crate::auth::get_user_from_headers;
use crate::database; use crate::database;
use crate::models::ids::NotificationId; use crate::models::ids::NotificationId;
use crate::models::notifications::{Notification, NotificationAction}; use crate::models::notifications::{Notification, NotificationAction};
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::{delete, get, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;
@@ -70,6 +70,7 @@ pub fn convert_notification(
Notification { Notification {
id: notif.id.into(), id: notif.id.into(),
user_id: notif.user_id.into(), user_id: notif.user_id.into(),
type_: notif.notification_type,
title: notif.title, title: notif.title,
text: notif.text, text: notif.text,
link: notif.link, link: notif.link,
@@ -101,7 +102,12 @@ pub async fn notification_delete(
if let Some(data) = notification_data { if let Some(data) = notification_data {
if data.user_id == user.id.into() || user.role.is_mod() { if data.user_id == user.id.into() || user.role.is_mod() {
database::models::notification_item::Notification::remove(id.into(), &**pool).await?; let mut transaction = pool.begin().await?;
database::models::notification_item::Notification::remove(id.into(), &mut transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} else { } else {
@@ -113,3 +119,38 @@ pub async fn notification_delete(
Ok(HttpResponse::NotFound().body("")) Ok(HttpResponse::NotFound().body(""))
} }
} }
#[delete("notifications")]
pub async fn notifications_delete(
req: HttpRequest,
web::Query(ids): web::Query<NotificationIds>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?;
let notification_ids = serde_json::from_str::<Vec<NotificationId>>(&*ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let mut transaction = pool.begin().await?;
let notifications_data =
database::models::notification_item::Notification::get_many(notification_ids, &**pool)
.await?;
let mut notifications: Vec<database::models::ids::NotificationId> = Vec::new();
for notification in notifications_data {
if notification.user_id == user.id.into() || user.role.is_mod() {
notifications.push(notification.id);
}
}
database::models::notification_item::Notification::remove_many(notifications, &mut transaction)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
}

View File

@@ -1,4 +1,3 @@
use crate::auth::{get_user_from_headers, AuthenticationError};
use crate::database::models; use crate::database::models;
use crate::file_hosting::{FileHost, FileHostingError}; use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError; use crate::models::error::ApiError;
@@ -8,13 +7,13 @@ use crate::models::projects::{
use crate::models::users::UserId; use crate::models::users::UserId;
use crate::routes::version_creation::InitialVersionData; use crate::routes::version_creation::InitialVersionData;
use crate::search::indexing::{queue::CreationQueue, IndexingError}; use crate::search::indexing::{queue::CreationQueue, IndexingError};
use crate::util::auth::{get_user_from_headers, AuthenticationError};
use crate::util::validate::validation_errors_to_string;
use actix_multipart::{Field, Multipart}; use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{post, HttpRequest, HttpResponse}; use actix_web::{post, HttpRequest, HttpResponse};
use futures::stream::StreamExt; use futures::stream::StreamExt;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use std::sync::Arc; use std::sync::Arc;
@@ -36,7 +35,7 @@ pub enum CreateError {
#[error("Error while parsing JSON: {0}")] #[error("Error while parsing JSON: {0}")]
SerDeError(#[from] serde_json::Error), SerDeError(#[from] serde_json::Error),
#[error("Error while validating input: {0}")] #[error("Error while validating input: {0}")]
ValidationError(#[from] validator::ValidationErrors), ValidationError(String),
#[error("Error while uploading file")] #[error("Error while uploading file")]
FileHostingError(#[from] FileHostingError), FileHostingError(#[from] FileHostingError),
#[error("Error while validating uploaded file: {0}")] #[error("Error while validating uploaded file: {0}")]
@@ -116,10 +115,6 @@ impl actix_web::ResponseError for CreateError {
} }
} }
lazy_static! {
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap();
}
fn default_project_type() -> String { fn default_project_type() -> String {
"mod".to_string() "mod".to_string()
} }
@@ -134,7 +129,10 @@ struct ProjectCreateData {
#[serde(default = "default_project_type")] #[serde(default = "default_project_type")]
/// The project type of this mod /// The project type of this mod
pub project_type: String, pub project_type: String,
#[validate(length(min = 3, max = 64), regex = "RE_URL_SAFE")] #[validate(
length(min = 3, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
#[serde(alias = "mod_slug")] #[serde(alias = "mod_slug")]
/// The slug of a project, used for vanity URLs /// The slug of a project, used for vanity URLs
pub slug: String, pub slug: String,
@@ -153,6 +151,7 @@ struct ProjectCreateData {
pub server_side: SideType, pub server_side: SideType,
#[validate(length(max = 64))] #[validate(length(max = 64))]
#[validate]
/// A list of initial versions to upload with the created project /// A list of initial versions to upload with the created project
pub initial_versions: Vec<InitialVersionData>, pub initial_versions: Vec<InitialVersionData>,
#[validate(length(max = 3))] #[validate(length(max = 3))]
@@ -326,7 +325,9 @@ pub async fn project_create_inner(
} }
let create_data: ProjectCreateData = serde_json::from_slice(&data)?; let create_data: ProjectCreateData = serde_json::from_slice(&data)?;
create_data.validate()?; create_data
.validate()
.map_err(|err| CreateError::InvalidInput(validation_errors_to_string(err, None)))?;
let slug_project_id_option: Option<ProjectId> = let slug_project_id_option: Option<ProjectId> =
serde_json::from_str(&*format!("\"{}\"", create_data.slug)).ok(); serde_json::from_str(&*format!("\"{}\"", create_data.slug)).ok();
@@ -498,6 +499,12 @@ pub async fn project_create_inner(
status = ProjectStatus::Draft; status = ProjectStatus::Draft;
} else { } else {
status = ProjectStatus::Processing; status = ProjectStatus::Processing;
if project_create_data.initial_versions.is_empty() {
return Err(CreateError::InvalidInput(String::from(
"Project submitted for review with no initial versions",
)));
}
} }
let status_id = models::StatusId::get_id(&status, &mut *transaction) let status_id = models::StatusId::get_id(&status, &mut *transaction)
@@ -590,6 +597,7 @@ pub async fn project_create_inner(
published: now, published: now,
updated: now, updated: now,
status: status.clone(), status: status.clone(),
rejection_data: None,
license: License { license: License {
id: project_create_data.license_id.clone(), id: project_create_data.license_id.clone(),
name: "".to_string(), name: "".to_string(),
@@ -622,6 +630,12 @@ pub async fn project_create_inner(
) )
.await?; .await?;
indexing_queue.add(index_project); indexing_queue.add(index_project);
if let Ok(webhook_url) = dotenv::var("MODERATION_DISCORD_WEBHOOK") {
crate::util::webhook::send_discord_webhook(response.clone(), webhook_url)
.await
.ok();
}
} }
Ok(HttpResponse::Ok().json(response)) Ok(HttpResponse::Ok().json(response))
@@ -643,7 +657,9 @@ async fn create_initial_version(
))); )));
} }
version_data.validate()?; version_data
.validate()
.map_err(|err| CreateError::ValidationError(validation_errors_to_string(err, None)))?;
// Randomly generate a new id to be used for the version // Randomly generate a new id to be used for the version
let version_id: VersionId = models::generate_version_id(transaction).await?.into(); let version_id: VersionId = models::generate_version_id(transaction).await?.into();
@@ -684,7 +700,11 @@ async fn create_initial_version(
let dependencies = version_data let dependencies = version_data
.dependencies .dependencies
.iter() .iter()
.map(|x| ((x.version_id).into(), x.dependency_type.to_string())) .map(|d| models::version_item::DependencyBuilder {
version_id: d.version_id.map(|x| x.into()),
project_id: d.project_id.map(|x| x.into()),
dependency_type: d.dependency_type.to_string(),
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let version = models::version_item::VersionBuilder { let version = models::version_item::VersionBuilder {

View File

@@ -1,21 +1,23 @@
use crate::auth::get_user_from_headers;
use crate::database; use crate::database;
use crate::database::cache::project_cache::remove_cache_project;
use crate::database::cache::query_project_cache::remove_cache_query_project;
use crate::file_hosting::FileHost; use crate::file_hosting::FileHost;
use crate::models; use crate::models;
use crate::models::projects::{ use crate::models::projects::{
DonationLink, License, ProjectId, ProjectStatus, SearchRequest, SideType, DonationLink, License, ProjectId, ProjectStatus, RejectionReason, SearchRequest, SideType,
}; };
use crate::models::teams::Permissions; use crate::models::teams::Permissions;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::search::indexing::queue::CreationQueue; use crate::search::indexing::queue::CreationQueue;
use crate::search::{search_for_project, SearchConfig, SearchError}; use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::util::auth::get_user_from_headers;
use crate::util::validate::validation_errors_to_string;
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use futures::StreamExt; use futures::StreamExt;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use validator::Validate; use validator::Validate;
@@ -91,7 +93,8 @@ pub async fn project_get(
let string = info.into_inner().0; let string = info.into_inner().0;
let project_data = let project_data =
database::models::Project::get_full_from_slug_or_project_id(string, &**pool).await?; database::models::Project::get_full_from_slug_or_project_id(string.clone(), &**pool)
.await?;
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok(); let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
@@ -129,6 +132,94 @@ pub async fn project_get(
} }
} }
struct DependencyInfo {
pub project: Option<models::projects::Project>,
pub version: Option<models::projects::Version>,
}
#[get("dependencies")]
pub async fn dependency_list(
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0;
let result = database::models::Project::get_from_slug_or_project_id(string, &**pool).await?;
if let Some(project) = result {
let id = project.id;
use futures::stream::TryStreamExt;
let dependencies = sqlx::query!(
"
SELECT d.dependent_id, d.dependency_id, d.mod_dependency_id
FROM versions v
INNER JOIN dependencies d ON d.dependent_id = v.id
WHERE v.mod_id = $1
",
id as database::models::ProjectId
)
.fetch_many(&**pool)
.try_filter_map(|e| async {
Ok(e.right().map(|x| {
(
database::models::VersionId(x.dependent_id),
x.dependency_id.map(database::models::VersionId),
x.mod_dependency_id.map(database::models::ProjectId),
)
}))
})
.try_collect::<Vec<(
database::models::VersionId,
Option<database::models::VersionId>,
Option<database::models::ProjectId>,
)>>()
.await?;
let projects = database::Project::get_many_full(
dependencies.iter().map(|x| x.2).flatten().collect(),
&**pool,
)
.await?;
let versions = database::Version::get_many_full(
dependencies.iter().map(|x| x.1).flatten().collect(),
&**pool,
)
.await?;
let mut response: HashMap<models::projects::VersionId, DependencyInfo> = HashMap::new();
for dependency in dependencies {
response.insert(
dependency.0.into(),
DependencyInfo {
project: if let Some(id) = dependency.2 {
projects
.iter()
.find(|x| x.inner.id == id)
.map(|x| convert_project(x.clone()))
} else {
None
},
version: if let Some(id) = dependency.1 {
versions
.iter()
.find(|x| x.id == id)
.map(|x| super::versions::convert_version(x.clone()))
} else {
None
},
},
);
}
Ok(HttpResponse::NotFound().body(""))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
pub fn convert_project( pub fn convert_project(
data: database::models::project_item::QueryProject, data: database::models::project_item::QueryProject,
) -> models::projects::Project { ) -> models::projects::Project {
@@ -146,6 +237,14 @@ pub fn convert_project(
published: m.published, published: m.published,
updated: m.updated, updated: m.updated,
status: data.status, status: data.status,
rejection_data: if let Some(reason) = m.rejection_reason {
Some(RejectionReason {
reason,
body: m.rejection_body,
})
} else {
None
},
license: License { license: License {
id: data.license_id, id: data.license_id,
name: data.license_name, name: data.license_name,
@@ -175,10 +274,6 @@ pub fn convert_project(
} }
} }
lazy_static! {
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap();
}
/// A project returned from the API /// A project returned from the API
#[derive(Serialize, Deserialize, Validate)] #[derive(Serialize, Deserialize, Validate)]
pub struct EditProject { pub struct EditProject {
@@ -188,7 +283,6 @@ pub struct EditProject {
pub description: Option<String>, pub description: Option<String>,
#[validate(length(max = 65536))] #[validate(length(max = 65536))]
pub body: Option<String>, pub body: Option<String>,
pub status: Option<ProjectStatus>,
#[validate(length(max = 3))] #[validate(length(max = 3))]
pub categories: Option<Vec<String>>, pub categories: Option<Vec<String>>,
#[serde( #[serde(
@@ -236,8 +330,26 @@ pub struct EditProject {
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
with = "::serde_with::rust::double_option" with = "::serde_with::rust::double_option"
)] )]
#[validate(length(min = 3, max = 64), regex = "RE_URL_SAFE")] #[validate(
length(min = 3, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
pub slug: Option<Option<String>>, pub slug: Option<Option<String>>,
pub status: Option<ProjectStatus>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
with = "::serde_with::rust::double_option"
)]
#[validate(length(max = 2000))]
pub rejection_reason: Option<Option<String>>,
#[serde(
default,
skip_serializing_if = "Option::is_none",
with = "::serde_with::rust::double_option"
)]
#[validate(length(max = 65536))]
pub rejection_body: Option<Option<String>>,
} }
#[patch("{id}")] #[patch("{id}")]
@@ -251,11 +363,14 @@ pub async fn project_edit(
) -> Result<HttpResponse, ApiError> { ) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?; let user = get_user_from_headers(req.headers(), &**pool).await?;
new_project.validate()?; new_project
.validate()
.map_err(|err| ApiError::ValidationError(validation_errors_to_string(err, None)))?;
let string = info.into_inner().0; let string = info.into_inner().0;
let result = let result =
database::models::Project::get_full_from_slug_or_project_id(string, &**pool).await?; database::models::Project::get_full_from_slug_or_project_id(string.clone(), &**pool)
.await?;
if let Some(project_item) = result { if let Some(project_item) = result {
let id = project_item.inner.id; let id = project_item.inner.id;
@@ -337,6 +452,12 @@ pub async fn project_edit(
)); ));
} }
if status == &ProjectStatus::Processing && project_item.versions.is_empty() {
return Err(ApiError::InvalidInputError(String::from(
"Project submitted for review with no initial versions",
)));
}
let status_id = database::models::StatusId::get_id(&status, &mut *transaction) let status_id = database::models::StatusId::get_id(&status, &mut *transaction)
.await? .await?
.ok_or_else(|| { .ok_or_else(|| {
@@ -357,6 +478,30 @@ pub async fn project_edit(
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
if project_item.status == ProjectStatus::Processing {
sqlx::query!(
"
UPDATE mods
SET rejection_reason = NULL
WHERE (id = $1)
",
id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
UPDATE mods
SET rejection_body = NULL
WHERE (id = $1)
",
id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
}
if project_item.status.is_searchable() && !status.is_searchable() { if project_item.status.is_searchable() && !status.is_searchable() {
delete_from_index(id.into(), config).await?; delete_from_index(id.into(), config).await?;
} else if !project_item.status.is_searchable() && status.is_searchable() { } else if !project_item.status.is_searchable() && status.is_searchable() {
@@ -365,6 +510,15 @@ pub async fn project_edit(
.await?; .await?;
indexing_queue.add(index_project); indexing_queue.add(index_project);
if let Ok(webhook_url) = dotenv::var("MODERATION_DISCORD_WEBHOOK") {
crate::util::webhook::send_discord_webhook(
convert_project(project_item.clone()),
webhook_url,
)
.await
.ok();
}
} }
} }
@@ -684,6 +838,48 @@ pub async fn project_edit(
} }
} }
if let Some(rejection_reason) = &new_project.rejection_reason {
if !user.role.is_mod() {
return Err(ApiError::CustomAuthenticationError(
"You do not have the permissions to edit the rejection reason of this project!"
.to_string(),
));
}
sqlx::query!(
"
UPDATE mods
SET rejection_reason = $1
WHERE (id = $2)
",
rejection_reason.as_deref(),
id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(rejection_body) = &new_project.rejection_body {
if !user.role.is_mod() {
return Err(ApiError::CustomAuthenticationError(
"You do not have the permissions to edit the rejection body of this project!"
.to_string(),
));
}
sqlx::query!(
"
UPDATE mods
SET rejection_body = $1
WHERE (id = $2)
",
rejection_body.as_deref(),
id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
}
if let Some(body) = &new_project.body { if let Some(body) = &new_project.body {
if !perms.contains(Permissions::EDIT_BODY) { if !perms.contains(Permissions::EDIT_BODY) {
return Err(ApiError::CustomAuthenticationError( return Err(ApiError::CustomAuthenticationError(
@@ -705,6 +901,9 @@ pub async fn project_edit(
.await?; .await?;
} }
remove_cache_project(string.clone()).await;
remove_cache_query_project(string).await;
transaction.commit().await?; transaction.commit().await?;
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} else { } else {
@@ -736,11 +935,12 @@ pub async fn project_icon_edit(
let user = get_user_from_headers(req.headers(), &**pool).await?; let user = get_user_from_headers(req.headers(), &**pool).await?;
let string = info.into_inner().0; let string = info.into_inner().0;
let project_item = database::models::Project::get_from_slug_or_project_id(string, &**pool) let project_item =
.await? database::models::Project::get_from_slug_or_project_id(string.clone(), &**pool)
.ok_or_else(|| { .await?
ApiError::InvalidInputError("The specified project does not exist!".to_string()) .ok_or_else(|| {
})?; ApiError::InvalidInputError("The specified project does not exist!".to_string())
})?;
if !user.role.is_mod() { if !user.role.is_mod() {
let team_member = database::models::TeamMember::get_from_user_id( let team_member = database::models::TeamMember::get_from_user_id(
@@ -782,12 +982,14 @@ pub async fn project_icon_edit(
))); )));
} }
let hash = sha1::Sha1::from(bytes.clone()).hexdigest();
let project_id: ProjectId = project_item.id.into(); let project_id: ProjectId = project_item.id.into();
let upload_data = file_host let upload_data = file_host
.upload_file( .upload_file(
content_type, content_type,
&format!("data/{}/icon.{}", project_id, ext.ext), &format!("data/{}/{}.{}", project_id, hash, ext.ext),
bytes.to_vec(), bytes.to_vec(),
) )
.await?; .await?;
@@ -804,6 +1006,9 @@ pub async fn project_icon_edit(
.execute(&**pool) .execute(&**pool)
.await?; .await?;
remove_cache_project(string.clone()).await;
remove_cache_query_project(string).await;
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} else { } else {
Err(ApiError::InvalidInputError(format!( Err(ApiError::InvalidInputError(format!(
@@ -823,7 +1028,7 @@ pub async fn project_delete(
let user = get_user_from_headers(req.headers(), &**pool).await?; let user = get_user_from_headers(req.headers(), &**pool).await?;
let string = info.into_inner().0; let string = info.into_inner().0;
let project = database::models::Project::get_from_slug_or_project_id(string, &**pool) let project = database::models::Project::get_from_slug_or_project_id(string.clone(), &**pool)
.await? .await?
.ok_or_else(|| { .ok_or_else(|| {
ApiError::InvalidInputError("The specified project does not exist!".to_string()) ApiError::InvalidInputError("The specified project does not exist!".to_string())
@@ -851,7 +1056,14 @@ pub async fn project_delete(
} }
} }
let result = database::models::Project::remove_full(project.id, &**pool).await?; let mut transaction = pool.begin().await?;
let result = database::models::Project::remove_full(project.id, &mut transaction).await?;
remove_cache_project(string.clone()).await;
remove_cache_query_project(string).await;
transaction.commit().await?;
delete_from_index(project.id.into(), config).await?; delete_from_index(project.id.into(), config).await?;
@@ -893,6 +1105,8 @@ pub async fn project_follow(
.unwrap_or(false); .unwrap_or(false);
if !following { if !following {
let mut transaction = pool.begin().await?;
sqlx::query!( sqlx::query!(
" "
UPDATE mods UPDATE mods
@@ -901,7 +1115,7 @@ pub async fn project_follow(
", ",
project_id as database::models::ids::ProjectId, project_id as database::models::ids::ProjectId,
) )
.execute(&**pool) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -912,9 +1126,11 @@ pub async fn project_follow(
user_id as database::models::ids::UserId, user_id as database::models::ids::UserId,
project_id as database::models::ids::ProjectId project_id as database::models::ids::ProjectId
) )
.execute(&**pool) .execute(&mut *transaction)
.await?; .await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} else { } else {
Err(ApiError::InvalidInputError( Err(ApiError::InvalidInputError(
@@ -954,6 +1170,8 @@ pub async fn project_unfollow(
.unwrap_or(false); .unwrap_or(false);
if following { if following {
let mut transaction = pool.begin().await?;
sqlx::query!( sqlx::query!(
" "
UPDATE mods UPDATE mods
@@ -962,7 +1180,7 @@ pub async fn project_unfollow(
", ",
project_id as database::models::ids::ProjectId, project_id as database::models::ids::ProjectId,
) )
.execute(&**pool) .execute(&mut *transaction)
.await?; .await?;
sqlx::query!( sqlx::query!(
@@ -973,9 +1191,11 @@ pub async fn project_unfollow(
user_id as database::models::ids::UserId, user_id as database::models::ids::UserId,
project_id as database::models::ids::ProjectId project_id as database::models::ids::ProjectId
) )
.execute(&**pool) .execute(&mut *transaction)
.await?; .await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} else { } else {
Err(ApiError::InvalidInputError( Err(ApiError::InvalidInputError(

View File

@@ -1,7 +1,7 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::models::ids::{ProjectId, UserId, VersionId}; use crate::models::ids::{ProjectId, UserId, VersionId};
use crate::models::reports::{ItemType, Report}; use crate::models::reports::{ItemType, Report};
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::{check_is_moderator_from_headers, get_user_from_headers};
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use futures::StreamExt; use futures::StreamExt;
use serde::Deserialize; use serde::Deserialize;

View File

@@ -1,7 +1,7 @@
use super::ApiError; use super::ApiError;
use crate::auth::check_is_admin_from_headers;
use crate::database::models; use crate::database::models;
use crate::database::models::categories::{DonationPlatform, License, ProjectType, ReportType}; use crate::database::models::categories::{DonationPlatform, License, ProjectType, ReportType};
use crate::util::auth::check_is_admin_from_headers;
use actix_web::{delete, get, put, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, put, web, HttpRequest, HttpResponse};
use models::categories::{Category, GameVersion, Loader}; use models::categories::{Category, GameVersion, Loader};
use sqlx::PgPool; use sqlx::PgPool;

View File

@@ -1,4 +1,3 @@
use crate::auth::get_user_from_headers;
use crate::database::models::notification_item::{NotificationActionBuilder, NotificationBuilder}; use crate::database::models::notification_item::{NotificationActionBuilder, NotificationBuilder};
use crate::database::models::team_item::QueryTeamMember; use crate::database::models::team_item::QueryTeamMember;
use crate::database::models::TeamMember; use crate::database::models::TeamMember;
@@ -6,6 +5,7 @@ use crate::models::ids::ProjectId;
use crate::models::teams::{Permissions, TeamId}; use crate::models::teams::{Permissions, TeamId};
use crate::models::users::UserId; use crate::models::users::UserId;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;
@@ -246,6 +246,7 @@ pub async fn add_team_member(
let team: TeamId = team_id.into(); let team: TeamId = team_id.into();
NotificationBuilder { NotificationBuilder {
notification_type: Some("team_invite".to_string()),
title: "You have been invited to join a team!".to_string(), title: "You have been invited to join a team!".to_string(),
text: format!( text: format!(
"Team invite from {} to join the team for project {}", "Team invite from {} to join the team for project {}",

View File

@@ -1,4 +1,3 @@
use crate::auth::get_user_from_headers;
use crate::database::models::User; use crate::database::models::User;
use crate::file_hosting::FileHost; use crate::file_hosting::FileHost;
use crate::models::notifications::Notification; use crate::models::notifications::Notification;
@@ -6,6 +5,8 @@ use crate::models::projects::{Project, ProjectStatus};
use crate::models::users::{Role, UserId}; use crate::models::users::{Role, UserId};
use crate::routes::notifications::convert_notification; use crate::routes::notifications::convert_notification;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use crate::util::validate::validation_errors_to_string;
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
use futures::StreamExt; use futures::StreamExt;
use lazy_static::lazy_static; use lazy_static::lazy_static;
@@ -166,7 +167,9 @@ pub async fn user_edit(
) -> Result<HttpResponse, ApiError> { ) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?; let user = get_user_from_headers(req.headers(), &**pool).await?;
new_user.validate()?; new_user
.validate()
.map_err(|err| ApiError::ValidationError(validation_errors_to_string(err, None)))?;
let id_option = let id_option =
crate::database::models::User::get_id_from_username_or_id(info.into_inner().0, &**pool) crate::database::models::User::get_id_from_username_or_id(info.into_inner().0, &**pool)
@@ -396,13 +399,17 @@ pub async fn user_delete(
)); ));
} }
let mut transaction = pool.begin().await?;
let result; let result;
if &*removal_type.removal_type == "full" { if &*removal_type.removal_type == "full" {
result = crate::database::models::User::remove_full(id, &**pool).await?; result = crate::database::models::User::remove_full(id, &mut transaction).await?;
} else { } else {
result = crate::database::models::User::remove(id, &**pool).await?; result = crate::database::models::User::remove(id, &mut transaction).await?;
}; };
transaction.commit().await?;
if result.is_some() { if result.is_some() {
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))
} else { } else {

View File

@@ -1,8 +1,8 @@
use crate::auth::check_is_moderator_from_headers;
use crate::database; use crate::database;
use crate::models::projects::{Project, ProjectStatus}; use crate::models::projects::{Project, ProjectStatus};
use crate::routes::moderation::ResultCount; use crate::routes::moderation::ResultCount;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::check_is_moderator_from_headers;
use actix_web::web; use actix_web::web;
use actix_web::{get, HttpRequest, HttpResponse}; use actix_web::{get, HttpRequest, HttpResponse};
use sqlx::PgPool; use sqlx::PgPool;

View File

@@ -1,4 +1,3 @@
use crate::auth::get_user_from_headers;
use crate::file_hosting::FileHost; use crate::file_hosting::FileHost;
use crate::models::projects::SearchRequest; use crate::models::projects::SearchRequest;
use crate::routes::project_creation::{project_create_inner, undo_uploads, CreateError}; use crate::routes::project_creation::{project_create_inner, undo_uploads, CreateError};
@@ -6,6 +5,7 @@ use crate::routes::projects::{convert_project, ProjectIds};
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::search::indexing::queue::CreationQueue; use crate::search::indexing::queue::CreationQueue;
use crate::search::{search_for_project, SearchConfig, SearchError}; use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::util::auth::get_user_from_headers;
use crate::{database, models}; use crate::{database, models};
use actix_multipart::Multipart; use actix_multipart::Multipart;
use actix_web::web; use actix_web::web;

View File

@@ -1,8 +1,8 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::models::ids::ReportId; use crate::models::ids::ReportId;
use crate::models::projects::{ProjectId, VersionId}; use crate::models::projects::{ProjectId, VersionId};
use crate::models::users::UserId; use crate::models::users::UserId;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::{check_is_moderator_from_headers, get_user_from_headers};
use actix_web::web; use actix_web::web;
use actix_web::{get, post, HttpRequest, HttpResponse}; use actix_web::{get, post, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};

View File

@@ -1,6 +1,6 @@
use crate::auth::check_is_admin_from_headers;
use crate::database::models::categories::{Category, GameVersion, Loader, ProjectType}; use crate::database::models::categories::{Category, GameVersion, Loader, ProjectType};
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::check_is_admin_from_headers;
use actix_web::{get, put, web}; use actix_web::{get, put, web};
use actix_web::{HttpRequest, HttpResponse}; use actix_web::{HttpRequest, HttpResponse};
use sqlx::PgPool; use sqlx::PgPool;

View File

@@ -1,7 +1,7 @@
use crate::auth::get_user_from_headers;
use crate::models::teams::{Permissions, TeamId}; use crate::models::teams::{Permissions, TeamId};
use crate::models::users::UserId; use crate::models::users::UserId;
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::{get, web, HttpRequest, HttpResponse}; use actix_web::{get, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;

View File

@@ -1,8 +1,8 @@
use crate::auth::get_user_from_headers;
use crate::database::models::User; use crate::database::models::User;
use crate::models::ids::UserId; use crate::models::ids::UserId;
use crate::models::projects::{ProjectId, ProjectStatus}; use crate::models::projects::{ProjectId, ProjectStatus};
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use actix_web::web; use actix_web::web;
use actix_web::{get, HttpRequest, HttpResponse}; use actix_web::{get, HttpRequest, HttpResponse};
use sqlx::PgPool; use sqlx::PgPool;

View File

@@ -1,10 +1,10 @@
use crate::auth::get_user_from_headers;
use crate::file_hosting::FileHost; use crate::file_hosting::FileHost;
use crate::models::ids::{ProjectId, UserId, VersionId}; use crate::models::ids::{ProjectId, UserId, VersionId};
use crate::models::projects::{Dependency, GameVersion, Loader, Version, VersionFile, VersionType}; use crate::models::projects::{Dependency, GameVersion, Loader, Version, VersionFile, VersionType};
use crate::models::teams::Permissions; use crate::models::teams::Permissions;
use crate::routes::versions::{convert_version, VersionIds, VersionListFilters}; use crate::routes::versions::{convert_version, VersionIds, VersionListFilters};
use crate::routes::ApiError; use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use crate::{database, models, Pepper}; use crate::{database, models, Pepper};
use actix_web::{delete, get, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};

View File

@@ -1,4 +1,3 @@
use crate::auth::get_user_from_headers;
use crate::database::models; use crate::database::models;
use crate::database::models::notification_item::NotificationBuilder; use crate::database::models::notification_item::NotificationBuilder;
use crate::database::models::version_item::{VersionBuilder, VersionFileBuilder}; use crate::database::models::version_item::{VersionBuilder, VersionFileBuilder};
@@ -8,28 +7,27 @@ use crate::models::projects::{
}; };
use crate::models::teams::Permissions; use crate::models::teams::Permissions;
use crate::routes::project_creation::{CreateError, UploadedFile}; use crate::routes::project_creation::{CreateError, UploadedFile};
use crate::util::auth::get_user_from_headers;
use crate::util::validate::validation_errors_to_string;
use crate::validate::{validate_file, ValidationResult}; use crate::validate::{validate_file, ValidationResult};
use actix_multipart::{Field, Multipart}; use actix_multipart::{Field, Multipart};
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{post, HttpRequest, HttpResponse}; use actix_web::{post, HttpRequest, HttpResponse};
use futures::stream::StreamExt; use futures::stream::StreamExt;
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use validator::Validate; use validator::Validate;
lazy_static! {
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_\-.]*$").unwrap();
}
#[derive(Serialize, Deserialize, Validate, Clone)] #[derive(Serialize, Deserialize, Validate, Clone)]
pub struct InitialVersionData { pub struct InitialVersionData {
#[serde(alias = "mod_id")] #[serde(alias = "mod_id")]
pub project_id: Option<ProjectId>, pub project_id: Option<ProjectId>,
#[validate(length(min = 1, max = 256))] #[validate(length(min = 1, max = 256))]
pub file_parts: Vec<String>, pub file_parts: Vec<String>,
#[validate(length(min = 1, max = 64), regex = "RE_URL_SAFE")] #[validate(
length(min = 1, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
pub version_number: String, pub version_number: String,
#[validate(length(min = 3, max = 256))] #[validate(length(min = 3, max = 256))]
pub version_title: String, pub version_title: String,
@@ -127,7 +125,9 @@ async fn version_create_inner(
)); ));
} }
version_create_data.validate()?; version_create_data.validate().map_err(|err| {
CreateError::ValidationError(validation_errors_to_string(err, None))
})?;
let project_id: models::ProjectId = version_create_data.project_id.unwrap().into(); let project_id: models::ProjectId = version_create_data.project_id.unwrap().into();
@@ -234,7 +234,11 @@ async fn version_create_inner(
let dependencies = version_create_data let dependencies = version_create_data
.dependencies .dependencies
.iter() .iter()
.map(|x| ((x.version_id).into(), x.dependency_type.to_string())) .map(|d| models::version_item::DependencyBuilder {
version_id: d.version_id.map(|x| x.into()),
project_id: d.project_id.map(|x| x.into()),
dependency_type: d.dependency_type.to_string(),
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
version_builder = Some(VersionBuilder { version_builder = Some(VersionBuilder {
@@ -332,9 +336,10 @@ async fn version_create_inner(
let version_id: VersionId = builder.version_id.into(); let version_id: VersionId = builder.version_id.into();
NotificationBuilder { NotificationBuilder {
title: "A project you followed has been updated!".to_string(), notification_type: Some("project_update".to_string()),
title: format!("**{}** has been updated!", result.title),
text: format!( text: format!(
"Project {} has been updated to version {}", "The project, {}, has released a new version: {}",
result.title, result.title,
version_data.version_number.clone() version_data.version_number.clone()
), ),

View File

@@ -1,9 +1,9 @@
use super::ApiError; use super::ApiError;
use crate::auth::get_user_from_headers;
use crate::file_hosting::FileHost; use crate::file_hosting::FileHost;
use crate::models; use crate::models;
use crate::models::projects::{GameVersion, Loader}; use crate::models::projects::{GameVersion, Loader};
use crate::models::teams::Permissions; use crate::models::teams::Permissions;
use crate::util::auth::get_user_from_headers;
use crate::{database, Pepper}; use crate::{database, Pepper};
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -118,7 +118,19 @@ async fn download_version_inner(
pepper: &web::Data<Pepper>, pepper: &web::Data<Pepper>,
) -> Result<(), ApiError> { ) -> Result<(), ApiError> {
let real_ip = req.connection_info(); let real_ip = req.connection_info();
let ip_option = real_ip.borrow().remote_addr(); let ip_option = if dotenv::var("CLOUDFLARE_INTEGRATION")
.ok()
.map(|i| i.parse().unwrap())
.unwrap_or(false)
{
if let Some(header) = req.headers().get("CF-Connecting-IP") {
header.to_str().ok()
} else {
real_ip.borrow().remote_addr()
}
} else {
real_ip.borrow().remote_addr()
};
if let Some(ip) = ip_option { if let Some(ip) = ip_option {
let hash = sha1::Sha1::from(format!("{}{}", ip, pepper.pepper)).hexdigest(); let hash = sha1::Sha1::from(format!("{}{}", ip, pepper.pepper)).hexdigest();

View File

@@ -1,12 +1,11 @@
use super::ApiError; use super::ApiError;
use crate::auth::get_user_from_headers;
use crate::database; use crate::database;
use crate::models; use crate::models;
use crate::models::projects::{Dependency, DependencyType}; use crate::models::projects::{Dependency, DependencyType};
use crate::models::teams::Permissions; use crate::models::teams::Permissions;
use crate::util::auth::get_user_from_headers;
use crate::util::validate::validation_errors_to_string;
use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse}; use actix_web::{delete, get, patch, web, HttpRequest, HttpResponse};
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::PgPool; use sqlx::PgPool;
use validator::Validate; use validator::Validate;
@@ -189,8 +188,9 @@ pub fn convert_version(
.dependencies .dependencies
.into_iter() .into_iter()
.map(|d| Dependency { .map(|d| Dependency {
version_id: d.0.into(), version_id: d.version_id.map(|x| x.into()),
dependency_type: DependencyType::from_str(&*d.1), project_id: d.project_id.map(|x| x.into()),
dependency_type: DependencyType::from_str(&*d.dependency_type),
}) })
.collect(), .collect(),
game_versions: data game_versions: data
@@ -206,15 +206,14 @@ pub fn convert_version(
} }
} }
lazy_static! {
static ref RE_URL_SAFE: Regex = Regex::new(r"^[a-zA-Z0-9_-]*$").unwrap();
}
#[derive(Serialize, Deserialize, Validate)] #[derive(Serialize, Deserialize, Validate)]
pub struct EditVersion { pub struct EditVersion {
#[validate(length(min = 3, max = 256))] #[validate(length(min = 3, max = 256))]
pub name: Option<String>, pub name: Option<String>,
#[validate(length(min = 1, max = 64), regex = "RE_URL_SAFE")] #[validate(
length(min = 1, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
pub version_number: Option<String>, pub version_number: Option<String>,
#[validate(length(max = 65536))] #[validate(length(max = 65536))]
pub changelog: Option<String>, pub changelog: Option<String>,
@@ -236,7 +235,9 @@ pub async fn version_edit(
) -> Result<HttpResponse, ApiError> { ) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?; let user = get_user_from_headers(req.headers(), &**pool).await?;
new_version.validate()?; new_version
.validate()
.map_err(|err| ApiError::ValidationError(validation_errors_to_string(err, None)))?;
let version_id = info.into_inner().0; let version_id = info.into_inner().0;
let id = version_id.into(); let id = version_id.into();
@@ -332,21 +333,17 @@ pub async fn version_edit(
.execute(&mut *transaction) .execute(&mut *transaction)
.await?; .await?;
for dependency in dependencies { let builders = dependencies
let dependency_id: database::models::ids::VersionId = .iter()
dependency.version_id.clone().into(); .map(|x| database::models::version_item::DependencyBuilder {
project_id: x.project_id.clone().map(|x| x.into()),
version_id: x.version_id.clone().map(|x| x.into()),
dependency_type: x.dependency_type.to_string(),
})
.collect::<Vec<database::models::version_item::DependencyBuilder>>();
sqlx::query!( for dependency in builders {
" dependency.insert(version_item.id, &mut transaction).await?;
INSERT INTO dependencies (dependent_id, dependency_id, dependency_type)
VALUES ($1, $2, $3)
",
id as database::models::ids::VersionId,
dependency_id as database::models::ids::VersionId,
dependency.dependency_type.as_str()
)
.execute(&mut *transaction)
.await?;
} }
} }
@@ -533,7 +530,11 @@ pub async fn version_delete(
} }
} }
let result = database::models::Version::remove_full(id.into(), &**pool).await?; let mut transaction = pool.begin().await?;
let result = database::models::Version::remove_full(id.into(), &mut transaction).await?;
transaction.commit().await?;
if result.is_some() { if result.is_some() {
Ok(HttpResponse::NoContent().body("")) Ok(HttpResponse::NoContent().body(""))

3
src/util/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod auth;
pub mod validate;
pub mod webhook;

55
src/util/validate.rs Normal file
View File

@@ -0,0 +1,55 @@
use lazy_static::lazy_static;
use regex::Regex;
use validator::{ValidationErrors, ValidationErrorsKind};
lazy_static! {
pub static ref RE_URL_SAFE: Regex = Regex::new(r#"^[a-zA-Z0-9!@$()`.+,_"-]*$"#).unwrap();
}
//TODO: In order to ensure readability, only the first error is printed, this may need to be expanded on in the future!
pub fn validation_errors_to_string(errors: ValidationErrors, adder: Option<String>) -> String {
let mut output = String::new();
let map = errors.into_errors();
let key_option = map.keys().next().copied();
if let Some(field) = key_option {
if let Some(error) = map.get(field) {
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(*errors.clone(), Some(format!("of item {}", field)))
}
ValidationErrorsKind::List(list) => {
if let Some(errors) = list.get(&0) {
output.push_str(&*validation_errors_to_string(
*errors.clone(),
Some(format!("of list {} with index 0", field)),
));
}
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.get(0) {
if let Some(adder) = adder {
output.push_str(&*format!(
"Field {} {} failed validation with error {}",
field, adder, error.code
));
} else {
output.push_str(&*format!(
"Field {} failed validation with error {}",
field, error.code
));
}
}
output
}
};
}
}
"".to_string()
}

107
src/util/webhook.rs Normal file
View File

@@ -0,0 +1,107 @@
use crate::models::projects::Project;
use chrono::{DateTime, Utc};
use serde::Serialize;
#[derive(Serialize)]
struct DiscordEmbed {
pub title: String,
pub description: String,
pub url: String,
pub timestamp: DateTime<Utc>,
pub color: u32,
pub fields: Vec<DiscordEmbedField>,
pub image: DiscordEmbedImage,
}
#[derive(Serialize)]
struct DiscordEmbedField {
pub name: String,
pub value: String,
pub inline: bool,
}
#[derive(Serialize)]
struct DiscordEmbedImage {
pub url: Option<String>,
}
#[derive(Serialize)]
struct DiscordWebhook {
pub embeds: Vec<DiscordEmbed>,
}
pub async fn send_discord_webhook(
project: Project,
webhook_url: String,
) -> Result<(), reqwest::Error> {
let mut fields = Vec::new();
fields.push(DiscordEmbedField {
name: "id".to_string(),
value: project.id.to_string(),
inline: true,
});
if let Some(slug) = project.slug.clone() {
fields.push(DiscordEmbedField {
name: "slug".to_string(),
value: slug,
inline: true,
});
}
fields.push(DiscordEmbedField {
name: "project_type".to_string(),
value: project.project_type.to_string(),
inline: true,
});
fields.push(DiscordEmbedField {
name: "client_side".to_string(),
value: project.client_side.to_string(),
inline: true,
});
fields.push(DiscordEmbedField {
name: "server_side".to_string(),
value: project.server_side.to_string(),
inline: true,
});
fields.push(DiscordEmbedField {
name: "categories".to_string(),
value: project.categories.join(", "),
inline: true,
});
let embed = DiscordEmbed {
url: format!(
"{}/mod/{}",
dotenv::var("SITE_URL").unwrap_or_default(),
project
.clone()
.slug
.unwrap_or_else(|| project.id.to_string())
),
title: project.title,
description: project.description,
timestamp: project.published,
color: 6137157,
fields,
image: DiscordEmbedImage {
url: project.icon_url,
},
};
let client = reqwest::Client::new();
client
.post(&webhook_url)
.json(&DiscordWebhook {
embeds: vec![embed],
})
.send()
.await?;
Ok(())
}