Project Types, Code Cleanup, and Rename Mods -> Projects (#192)

* Initial work for modpacks and project types

* Code cleanup, fix some issues

* Username route getting, remove pointless tests

* Base validator types + fixes

* Fix strange IML generation

* Multiple hash requests for version files

* Fix docker build (hopefully)

* Legacy routes

* Finish validator architecture

* Update rust version in dockerfile

* Added caching and fixed typo (#203)

* Added caching and fixed typo

* Fixed clippy error

* Removed log for cache

* Add final validators, fix how loaders are handled and add icons to tags

* Fix search module

* Fix parts of legacy API not working

Co-authored-by: Redblueflame <contact@redblueflame.com>
This commit is contained in:
Geometrically
2021-05-30 15:02:07 -07:00
committed by GitHub
parent 712424c339
commit 16db28060c
55 changed files with 6656 additions and 3908 deletions

129
src/routes/v1/mod.rs Normal file
View File

@@ -0,0 +1,129 @@
use actix_web::web;
mod moderation;
mod mods;
mod reports;
mod tags;
mod users;
mod versions;
pub fn v1_config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("/api/v1/")
.configure(super::auth_config)
.configure(tags_config)
.configure(mods_config)
.configure(versions_config)
.configure(teams_config)
.configure(users_config)
.configure(moderation_config)
.configure(reports_config)
.configure(notifications_config),
);
}
pub fn tags_config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("/tag/")
.service(tags::category_list)
.service(tags::category_create)
.service(super::tags::category_delete)
.service(tags::loader_list)
.service(tags::loader_create)
.service(super::tags::loader_delete)
.service(super::tags::game_version_list)
.service(super::tags::game_version_create)
.service(super::tags::game_version_delete)
.service(super::tags::license_create)
.service(super::tags::license_delete)
.service(super::tags::license_list)
.service(super::tags::donation_platform_create)
.service(super::tags::donation_platform_list)
.service(super::tags::donation_platform_delete)
.service(super::tags::report_type_create)
.service(super::tags::report_type_delete)
.service(super::tags::report_type_list),
);
}
pub fn mods_config(cfg: &mut web::ServiceConfig) {
cfg.service(mods::mod_search);
cfg.service(mods::mods_get);
cfg.service(mods::mod_create);
cfg.service(
web::scope("mod")
.service(super::projects::project_get)
.service(super::projects::project_delete)
.service(super::projects::project_edit)
.service(super::projects::project_icon_edit)
.service(super::projects::project_follow)
.service(super::projects::project_unfollow)
.service(web::scope("{mod_id}").service(versions::version_list)),
);
}
pub fn versions_config(cfg: &mut web::ServiceConfig) {
cfg.service(versions::versions_get);
cfg.service(super::version_creation::version_create);
cfg.service(
web::scope("version")
.service(versions::version_get)
.service(super::versions::version_delete)
.service(super::version_creation::upload_file_to_version)
.service(super::versions::version_edit),
);
cfg.service(
web::scope("version_file")
.service(versions::delete_file)
.service(versions::get_version_from_hash)
.service(versions::download_version),
);
}
pub fn users_config(cfg: &mut web::ServiceConfig) {
cfg.service(super::users::user_auth_get);
cfg.service(super::users::users_get);
cfg.service(
web::scope("user")
.service(super::users::user_get)
.service(users::mods_list)
.service(super::users::user_delete)
.service(super::users::user_edit)
.service(super::users::user_icon_edit)
.service(super::users::user_notifications)
.service(super::users::user_follows),
);
}
pub fn teams_config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("team")
.service(super::teams::team_members_get)
.service(super::teams::edit_team_member)
.service(super::teams::add_team_member)
.service(super::teams::join_team)
.service(super::teams::remove_team_member),
);
}
pub fn notifications_config(cfg: &mut web::ServiceConfig) {
cfg.service(super::notifications::notifications_get);
cfg.service(
web::scope("notification")
.service(super::notifications::notification_get)
.service(super::notifications::notification_delete),
);
}
pub fn moderation_config(cfg: &mut web::ServiceConfig) {
cfg.service(web::scope("moderation").service(moderation::get_mods));
}
pub fn reports_config(cfg: &mut web::ServiceConfig) {
cfg.service(reports::reports);
cfg.service(reports::report_create);
cfg.service(super::reports::delete_report);
}

View File

@@ -0,0 +1,44 @@
use crate::auth::check_is_moderator_from_headers;
use crate::database;
use crate::models::projects::{Project, ProjectStatus};
use crate::routes::moderation::ResultCount;
use crate::routes::ApiError;
use actix_web::web;
use actix_web::{get, HttpRequest, HttpResponse};
use sqlx::PgPool;
#[get("mods")]
pub async fn get_mods(
req: HttpRequest,
pool: web::Data<PgPool>,
count: web::Query<ResultCount>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(req.headers(), &**pool).await?;
use futures::stream::TryStreamExt;
let project_ids = sqlx::query!(
"
SELECT id FROM mods
WHERE status = (
SELECT id FROM statuses WHERE status = $1
)
ORDER BY updated ASC
LIMIT $2;
",
ProjectStatus::Processing.as_str(),
count.count as i64
)
.fetch_many(&**pool)
.try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) })
.try_collect::<Vec<database::models::ProjectId>>()
.await?;
let projects: Vec<Project> = database::Project::get_many_full(project_ids, &**pool)
.await?
.into_iter()
.map(crate::routes::projects::convert_project)
.collect();
Ok(HttpResponse::Ok().json(projects))
}

172
src/routes/v1/mods.rs Normal file
View File

@@ -0,0 +1,172 @@
use crate::auth::get_user_from_headers;
use crate::file_hosting::FileHost;
use crate::models::projects::SearchRequest;
use crate::routes::project_creation::{project_create_inner, undo_uploads, CreateError};
use crate::routes::projects::{convert_project, ProjectIds};
use crate::routes::ApiError;
use crate::search::indexing::queue::CreationQueue;
use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::{database, models};
use actix_multipart::Multipart;
use actix_web::web;
use actix_web::web::Data;
use actix_web::{get, post, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ResultSearchMod {
pub mod_id: String,
pub slug: Option<String>,
pub author: String,
pub title: String,
pub description: String,
pub categories: Vec<String>,
pub versions: Vec<String>,
pub downloads: i32,
pub follows: i32,
pub page_url: String,
pub icon_url: String,
pub author_url: String,
pub date_created: String,
pub date_modified: String,
pub latest_version: String,
pub license: String,
pub client_side: String,
pub server_side: String,
pub host: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SearchResults {
pub hits: Vec<ResultSearchMod>,
pub offset: usize,
pub limit: usize,
pub total_hits: usize,
}
#[get("mod")]
pub async fn mod_search(
web::Query(info): web::Query<SearchRequest>,
config: web::Data<SearchConfig>,
) -> Result<HttpResponse, SearchError> {
let results = search_for_project(&info, &**config).await?;
Ok(HttpResponse::Ok().json(SearchResults {
hits: results
.hits
.into_iter()
.map(|x| ResultSearchMod {
mod_id: x.project_id.clone(),
slug: x.slug,
author: x.author.clone(),
title: x.title,
description: x.description,
categories: x.categories,
versions: x.versions,
downloads: x.downloads,
follows: x.follows,
page_url: format!("https://modrinth.com/mod/{}", x.project_id),
icon_url: x.icon_url,
author_url: format!("https://modrinth.com/user/{}", x.author),
date_created: x.date_created,
date_modified: x.date_modified,
latest_version: x.latest_version,
license: x.license,
client_side: x.client_side,
server_side: x.server_side,
host: "modrinth".to_string(),
})
.collect(),
offset: results.offset,
limit: results.limit,
total_hits: results.total_hits,
}))
}
#[get("mods")]
pub async fn mods_get(
req: HttpRequest,
ids: web::Query<ProjectIds>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let project_ids = serde_json::from_str::<Vec<models::ids::ProjectId>>(&*ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let projects_data = database::models::Project::get_many_full(project_ids, &**pool).await?;
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
let mut projects = Vec::new();
for project_data in projects_data {
let mut authorized = !project_data.status.is_hidden();
if let Some(user) = &user_option {
if !authorized {
if user.role.is_mod() {
authorized = true;
} else {
let user_id: database::models::ids::UserId = user.id.into();
let project_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM team_members WHERE team_id = $1 AND user_id = $2)",
project_data.inner.team_id as database::models::ids::TeamId,
user_id as database::models::ids::UserId,
)
.fetch_one(&**pool)
.await?
.exists;
authorized = project_exists.unwrap_or(false);
}
}
}
if authorized {
projects.push(convert_project(project_data));
}
}
Ok(HttpResponse::Ok().json(projects))
}
#[post("mod")]
pub async fn mod_create(
req: HttpRequest,
payload: Multipart,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
indexing_queue: Data<Arc<CreationQueue>>,
) -> Result<HttpResponse, CreateError> {
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
let result = project_create_inner(
req,
payload,
&mut transaction,
&***file_host,
&mut uploaded_files,
&***indexing_queue,
)
.await;
if result.is_err() {
let undo_result = undo_uploads(&***file_host, &uploaded_files).await;
let rollback_result = transaction.rollback().await;
if let Err(e) = undo_result {
return Err(e);
}
if let Err(e) = rollback_result {
return Err(e.into());
}
} else {
transaction.commit().await?;
}
result
}

195
src/routes/v1/reports.rs Normal file
View File

@@ -0,0 +1,195 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::models::ids::ReportId;
use crate::models::projects::{ProjectId, VersionId};
use crate::models::users::UserId;
use crate::routes::ApiError;
use actix_web::web;
use actix_web::{get, post, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
#[derive(Serialize, Deserialize)]
pub struct Report {
pub id: ReportId,
pub report_type: String,
pub item_id: String,
pub item_type: ItemType,
pub reporter: UserId,
pub body: String,
pub created: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "kebab-case")]
pub enum ItemType {
Mod,
Version,
User,
Unknown,
}
impl ItemType {
pub fn as_str(&self) -> &'static str {
match self {
ItemType::Mod => "mod",
ItemType::Version => "version",
ItemType::User => "user",
ItemType::Unknown => "unknown",
}
}
}
#[derive(Deserialize)]
pub struct CreateReport {
pub report_type: String,
pub item_id: String,
pub item_type: ItemType,
pub body: String,
}
#[post("report")]
pub async fn report_create(
req: HttpRequest,
pool: web::Data<PgPool>,
mut body: web::Payload,
) -> Result<HttpResponse, ApiError> {
let mut transaction = pool.begin().await?;
let current_user = get_user_from_headers(req.headers(), &mut *transaction).await?;
let mut bytes = web::BytesMut::new();
while let Some(item) = body.next().await {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInputError("Error while parsing request payload!".to_string())
})?);
}
let new_report: CreateReport = serde_json::from_slice(bytes.as_ref())?;
let id = crate::database::models::generate_report_id(&mut transaction).await?;
let report_type = crate::database::models::categories::ReportType::get_id(
&*new_report.report_type,
&mut *transaction,
)
.await?
.ok_or_else(|| {
ApiError::InvalidInputError(format!("Invalid report type: {}", new_report.report_type))
})?;
let mut report = crate::database::models::report_item::Report {
id,
report_type_id: report_type,
project_id: None,
version_id: None,
user_id: None,
body: new_report.body.clone(),
reporter: current_user.id.into(),
created: chrono::Utc::now(),
};
match new_report.item_type {
ItemType::Mod => {
report.project_id = Some(
serde_json::from_str::<ProjectId>(&*format!("\"{}\"", new_report.item_id))?.into(),
)
}
ItemType::Version => {
report.version_id = Some(
serde_json::from_str::<VersionId>(&*format!("\"{}\"", new_report.item_id))?.into(),
)
}
ItemType::User => {
report.user_id = Some(
serde_json::from_str::<UserId>(&*format!("\"{}\"", new_report.item_id))?.into(),
)
}
ItemType::Unknown => {
return Err(ApiError::InvalidInputError(format!(
"Invalid report item type: {}",
new_report.item_type.as_str()
)))
}
}
report.insert(&mut transaction).await?;
transaction.commit().await?;
Ok(HttpResponse::Ok().json(Report {
id: id.into(),
report_type: new_report.report_type.clone(),
item_id: new_report.item_id.clone(),
item_type: new_report.item_type.clone(),
reporter: current_user.id,
body: new_report.body.clone(),
created: chrono::Utc::now(),
}))
}
#[derive(Deserialize)]
pub struct ResultCount {
#[serde(default = "default_count")]
count: i16,
}
fn default_count() -> i16 {
100
}
#[get("report")]
pub async fn reports(
req: HttpRequest,
pool: web::Data<PgPool>,
count: web::Query<ResultCount>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(req.headers(), &**pool).await?;
use futures::stream::TryStreamExt;
let report_ids = sqlx::query!(
"
SELECT id FROM reports
ORDER BY created ASC
LIMIT $1;
",
count.count as i64
)
.fetch_many(&**pool)
.try_filter_map(|e| async {
Ok(e.right()
.map(|m| crate::database::models::ids::ReportId(m.id)))
})
.try_collect::<Vec<crate::database::models::ids::ReportId>>()
.await?;
let query_reports =
crate::database::models::report_item::Report::get_many(report_ids, &**pool).await?;
let mut reports = Vec::new();
for x in query_reports {
let mut item_id = "".to_string();
let mut item_type = ItemType::Unknown;
if let Some(project_id) = x.project_id {
item_id = serde_json::to_string::<ProjectId>(&project_id.into())?;
item_type = ItemType::Mod;
} else if let Some(version_id) = x.version_id {
item_id = serde_json::to_string::<VersionId>(&version_id.into())?;
item_type = ItemType::Version;
} else if let Some(user_id) = x.user_id {
item_id = serde_json::to_string::<UserId>(&user_id.into())?;
item_type = ItemType::User;
}
reports.push(Report {
id: x.id.into(),
report_type: x.report_type,
item_id,
item_type,
reporter: x.reporter.into(),
body: x.body,
created: x.created,
})
}
Ok(HttpResponse::Ok().json(reports))
}

81
src/routes/v1/tags.rs Normal file
View File

@@ -0,0 +1,81 @@
use crate::auth::check_is_admin_from_headers;
use crate::database::models::categories::{Category, Loader, ProjectType};
use crate::routes::ApiError;
use actix_web::{get, put, web};
use actix_web::{HttpRequest, HttpResponse};
use sqlx::PgPool;
const DEFAULT_ICON: &str = r#"<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"><circle cx="12" cy="12" r="10"></circle><path d="M9.09 9a3 3 0 0 1 5.83 1c0 2-3 3-3 3"></path><line x1="12" y1="17" x2="12.01" y2="17"></line></svg>"#;
#[get("category")]
pub async fn category_list(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
let results = Category::list(&**pool)
.await?
.into_iter()
.filter(|x| &*x.project_type == "mod")
.map(|x| x.project_type)
.collect::<Vec<String>>();
Ok(HttpResponse::Ok().json(results))
}
#[put("category/{name}")]
pub async fn category_create(
req: HttpRequest,
pool: web::Data<PgPool>,
category: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(req.headers(), &**pool).await?;
let name = category.into_inner().0;
let project_type = crate::database::models::ProjectTypeId::get_id("mod".to_string(), &**pool)
.await?
.ok_or_else(|| {
ApiError::InvalidInputError("Specified project type does not exist!".to_string())
})?;
let _id = Category::builder()
.name(&name)?
.icon(DEFAULT_ICON)?
.project_type(&project_type)?
.insert(&**pool)
.await?;
Ok(HttpResponse::NoContent().body(""))
}
#[get("loader")]
pub async fn loader_list(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiError> {
let results = Loader::list(&**pool)
.await?
.into_iter()
.filter(|x| x.supported_project_types.contains(&"mod".to_string()))
.map(|x| x.loader)
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(results))
}
#[put("loader/{name}")]
pub async fn loader_create(
req: HttpRequest,
pool: web::Data<PgPool>,
loader: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(req.headers(), &**pool).await?;
let name = loader.into_inner().0;
let mut transaction = pool.begin().await?;
let project_types =
ProjectType::get_many_id(&vec!["mod".to_string()], &mut *transaction).await?;
let _id = Loader::builder()
.name(&name)?
.icon(DEFAULT_ICON)?
.supported_project_types(&*project_types.into_iter().map(|x| x.id).collect::<Vec<_>>())?
.insert(&mut transaction)
.await?;
Ok(HttpResponse::NoContent().body(""))
}

44
src/routes/v1/users.rs Normal file
View File

@@ -0,0 +1,44 @@
use crate::auth::get_user_from_headers;
use crate::database::models::User;
use crate::models::ids::UserId;
use crate::models::projects::ProjectStatus;
use crate::routes::ApiError;
use actix_web::web;
use actix_web::{get, HttpRequest, HttpResponse};
use sqlx::PgPool;
#[get("{user_id}/mods")]
pub async fn mods_list(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await.ok();
let id_option =
crate::database::models::User::get_id_from_username_or_id(info.into_inner().0, &**pool)
.await?;
if let Some(id) = id_option {
let user_id: UserId = id.into();
let project_data = if let Some(current_user) = user {
if current_user.role.is_mod() || current_user.id == user_id {
User::get_projects_private(id, &**pool).await?
} else {
User::get_projects(id, ProjectStatus::Approved.as_str(), &**pool).await?
}
} else {
User::get_projects(id, ProjectStatus::Approved.as_str(), &**pool).await?
};
let response = project_data
.into_iter()
.map(|v| v.into())
.collect::<Vec<crate::models::ids::ProjectId>>();
Ok(HttpResponse::Ok().json(response))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}

416
src/routes/v1/versions.rs Normal file
View File

@@ -0,0 +1,416 @@
use crate::auth::get_user_from_headers;
use crate::file_hosting::FileHost;
use crate::models::ids::{ProjectId, UserId, VersionId};
use crate::models::projects::{Dependency, GameVersion, Loader, Version, VersionFile, VersionType};
use crate::models::teams::Permissions;
use crate::routes::versions::{convert_version, VersionIds, VersionListFilters};
use crate::routes::ApiError;
use crate::{database, models, Pepper};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::borrow::Borrow;
use std::sync::Arc;
/// A specific version of a mod
#[derive(Serialize, Deserialize)]
pub struct LegacyVersion {
pub id: VersionId,
pub mod_id: ProjectId,
pub author_id: UserId,
pub featured: bool,
pub name: String,
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: DateTime<Utc>,
pub downloads: u32,
pub version_type: VersionType,
pub files: Vec<VersionFile>,
pub dependencies: Vec<Dependency>,
pub game_versions: Vec<GameVersion>,
pub loaders: Vec<Loader>,
}
fn convert_to_legacy(version: Version) -> LegacyVersion {
LegacyVersion {
id: version.id,
mod_id: version.project_id,
author_id: version.author_id,
featured: version.featured,
name: version.name,
version_number: version.version_number,
changelog: version.changelog,
changelog_url: version.changelog_url,
date_published: version.date_published,
downloads: version.downloads,
version_type: version.version_type,
files: version.files,
dependencies: version.dependencies,
game_versions: version.game_versions,
loaders: version.loaders,
}
}
#[get("version")]
pub async fn version_list(
info: web::Path<(String,)>,
web::Query(filters): web::Query<VersionListFilters>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let string = info.into_inner().0;
let result = database::models::Project::get_from_slug_or_project_id(string, &**pool).await?;
if let Some(project) = result {
let id = project.id;
let version_ids = database::models::Version::get_project_versions(
id,
filters
.game_versions
.as_ref()
.map(|x| serde_json::from_str(x).unwrap_or_default()),
filters
.loaders
.as_ref()
.map(|x| serde_json::from_str(x).unwrap_or_default()),
&**pool,
)
.await?;
let mut versions = database::models::Version::get_many_full(version_ids, &**pool).await?;
let mut response = versions
.iter()
.cloned()
.filter(|version| {
filters
.featured
.map(|featured| featured == version.featured)
.unwrap_or(true)
})
.map(convert_version)
.map(convert_to_legacy)
.collect::<Vec<_>>();
versions.sort_by(|a, b| b.date_published.cmp(&a.date_published));
// Attempt to populate versions with "auto featured" versions
if response.is_empty() && !versions.is_empty() && filters.featured.unwrap_or(false) {
let loaders = database::models::categories::Loader::list(&**pool).await?;
let game_versions =
database::models::categories::GameVersion::list_filter(None, Some(true), &**pool)
.await?;
let mut joined_filters = Vec::new();
for game_version in &game_versions {
for loader in &loaders {
joined_filters.push((game_version, loader))
}
}
joined_filters.into_iter().for_each(|filter| {
versions
.iter()
.find(|version| {
version.game_versions.contains(&filter.0.version)
&& version.loaders.contains(&filter.1.loader)
})
.map(|version| {
response.push(convert_to_legacy(convert_version(version.clone())))
})
.unwrap_or(());
});
if response.is_empty() {
versions
.into_iter()
.for_each(|version| response.push(convert_to_legacy(convert_version(version))));
}
}
response.sort_by(|a, b| b.date_published.cmp(&a.date_published));
response.dedup_by(|a, b| a.id == b.id);
Ok(HttpResponse::Ok().json(response))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
#[get("versions")]
pub async fn versions_get(
ids: web::Query<VersionIds>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let version_ids = serde_json::from_str::<Vec<models::ids::VersionId>>(&*ids.ids)?
.into_iter()
.map(|x| x.into())
.collect();
let versions_data = database::models::Version::get_many_full(version_ids, &**pool).await?;
let mut versions = Vec::new();
for version_data in versions_data {
versions.push(convert_to_legacy(convert_version(version_data)));
}
Ok(HttpResponse::Ok().json(versions))
}
#[get("{version_id}")]
pub async fn version_get(
info: web::Path<(models::ids::VersionId,)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let id = info.into_inner().0;
let version_data = database::models::Version::get_full(id.into(), &**pool).await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(convert_to_legacy(convert_version(data))))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
#[derive(Deserialize)]
pub struct Algorithm {
#[serde(default = "default_algorithm")]
algorithm: String,
}
fn default_algorithm() -> String {
"sha1".into()
}
// under /api/v1/version_file/{hash}
#[get("{version_id}")]
pub async fn get_version_from_hash(
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
algorithm: web::Query<Algorithm>,
) -> Result<HttpResponse, ApiError> {
let hash = info.into_inner().0.to_lowercase();
let result = sqlx::query!(
"
SELECT f.version_id version_id FROM hashes h
INNER JOIN files f ON h.file_id = f.id
WHERE h.algorithm = $2 AND h.hash = $1
",
hash.as_bytes(),
algorithm.algorithm
)
.fetch_optional(&**pool)
.await?;
if let Some(id) = result {
let version_data = database::models::Version::get_full(
database::models::VersionId(id.version_id),
&**pool,
)
.await?;
if let Some(data) = version_data {
Ok(HttpResponse::Ok().json(super::versions::convert_version(data)))
} else {
Ok(HttpResponse::NotFound().body(""))
}
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
#[derive(Serialize, Deserialize)]
pub struct DownloadRedirect {
pub url: String,
}
// under /api/v1/version_file/{hash}/download
#[allow(clippy::await_holding_refcell_ref)]
#[get("{version_id}/download")]
pub async fn download_version(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
algorithm: web::Query<Algorithm>,
pepper: web::Data<Pepper>,
) -> Result<HttpResponse, ApiError> {
let hash = info.into_inner().0;
let result = sqlx::query!(
"
SELECT f.url url, f.id id, f.version_id version_id, v.mod_id mod_id FROM hashes h
INNER JOIN files f ON h.file_id = f.id
INNER JOIN versions v ON v.id = f.version_id
WHERE h.algorithm = $2 AND h.hash = $1
",
hash.as_bytes(),
algorithm.algorithm
)
.fetch_optional(&**pool)
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?;
if let Some(id) = result {
let real_ip = req.connection_info();
let ip_option = real_ip.borrow().remote_addr();
if let Some(ip) = ip_option {
let hash = sha1::Sha1::from(format!("{}{}", ip, pepper.pepper)).hexdigest();
let download_exists = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM downloads WHERE version_id = $1 AND date > (CURRENT_DATE - INTERVAL '30 minutes ago') AND identifier = $2)",
id.version_id,
hash,
)
.fetch_one(&**pool)
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?
.exists.unwrap_or(false);
if !download_exists {
sqlx::query!(
"
INSERT INTO downloads (
version_id, identifier
)
VALUES (
$1, $2
)
",
id.version_id,
hash
)
.execute(&**pool)
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?;
sqlx::query!(
"
UPDATE versions
SET downloads = downloads + 1
WHERE id = $1
",
id.version_id,
)
.execute(&**pool)
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?;
sqlx::query!(
"
UPDATE mods
SET downloads = downloads + 1
WHERE id = $1
",
id.mod_id,
)
.execute(&**pool)
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?;
}
}
Ok(HttpResponse::TemporaryRedirect()
.header("Location", &*id.url)
.json(DownloadRedirect { url: id.url }))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
// under /api/v1/version_file/{hash}
#[delete("{version_id}")]
pub async fn delete_file(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
algorithm: web::Query<Algorithm>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?;
let hash = info.into_inner().0.to_lowercase();
let result = sqlx::query!(
"
SELECT f.id id, f.version_id version_id, f.filename filename, v.version_number version_number, v.mod_id project_id FROM hashes h
INNER JOIN files f ON h.file_id = f.id
INNER JOIN versions v ON v.id = f.version_id
WHERE h.algorithm = $2 AND h.hash = $1
",
hash.as_bytes(),
algorithm.algorithm
)
.fetch_optional(&**pool)
.await
?;
if let Some(row) = result {
if !user.role.is_mod() {
let team_member = database::models::TeamMember::get_from_user_id_version(
database::models::ids::VersionId(row.version_id),
user.id.into(),
&**pool,
)
.await
.map_err(ApiError::DatabaseError)?
.ok_or_else(|| {
ApiError::CustomAuthenticationError(
"You don't have permission to delete this file!".to_string(),
)
})?;
if !team_member
.permissions
.contains(Permissions::DELETE_VERSION)
{
return Err(ApiError::CustomAuthenticationError(
"You don't have permission to delete this file!".to_string(),
));
}
}
let mut transaction = pool.begin().await?;
sqlx::query!(
"
DELETE FROM hashes
WHERE file_id = $1
",
row.id
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
DELETE FROM files
WHERE files.id = $1
",
row.id,
)
.execute(&mut *transaction)
.await?;
let project_id: models::projects::ProjectId =
database::models::ids::ProjectId(row.project_id).into();
file_host
.delete_file_version(
"",
&format!(
"data/{}/versions/{}/{}",
project_id, row.version_number, row.filename
),
)
.await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}