Add mod lists for modpacks, liteloader support, update actix, fix moderation webhook (#357)

This commit is contained in:
Geometrically
2022-05-26 10:08:19 -07:00
committed by GitHub
parent e7b41f9a4c
commit d29d910ac6
20 changed files with 667 additions and 755 deletions

View File

@@ -21,6 +21,7 @@ pub struct VersionBuilder {
pub struct DependencyBuilder {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub file_name: Option<String>,
pub dependency_type: String,
}
@@ -59,13 +60,14 @@ impl DependencyBuilder {
sqlx::query!(
"
INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id)
VALUES ($1, $2, $3, $4)
INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id, dependency_file_name)
VALUES ($1, $2, $3, $4, $5)
",
version_id as VersionId,
self.dependency_type,
version_dependency_id.map(|x| x.0),
project_dependency_id.map(|x| x.0),
self.file_name,
)
.execute(&mut *transaction)
.await?;
@@ -455,7 +457,7 @@ impl Version {
sqlx::query!(
"
DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL
DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL AND dependency_file_name = NULL
",
)
.execute(&mut *transaction)
@@ -659,7 +661,7 @@ impl Version {
).fetch_all(executor),
sqlx::query!(
"
SELECT dependency_id, mod_dependency_id, dependency_type
SELECT dependency_id, mod_dependency_id, dependency_file_name, dependency_type
FROM dependencies
WHERE dependent_id = $1
",
@@ -716,6 +718,7 @@ impl Version {
.map(|x| QueryDependency {
project_id: x.mod_dependency_id.map(ProjectId),
version_id: x.dependency_id.map(VersionId),
file_name: x.dependency_file_name,
dependency_type: x.dependency_type,
})
.collect(),
@@ -779,6 +782,7 @@ pub struct QueryVersion {
pub struct QueryDependency {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub file_name: Option<String>,
pub dependency_type: String,
}

View File

@@ -10,7 +10,6 @@ use gumdrop::Options;
use log::{error, info, warn};
use search::indexing::index_projects;
use search::indexing::IndexingSettings;
use std::sync::atomic::Ordering;
use std::sync::Arc;
mod database;
@@ -182,33 +181,6 @@ async fn main() -> std::io::Result<()> {
}
});
let indexing_queue =
Arc::new(search::indexing::queue::CreationQueue::new());
let mut skip = skip_initial;
let queue_ref = indexing_queue.clone();
let search_config_ref = search_config.clone();
scheduler.run(std::time::Duration::from_secs(15 * 60), move || {
let queue_ref = queue_ref.clone();
let search_config_ref = search_config_ref.clone();
let local_skip = skip;
if skip {
skip = false;
}
async move {
if local_skip {
return;
}
info!("Indexing created project queue");
let result = queue_ref.index(&search_config_ref).await;
if let Err(e) = result {
warn!("Indexing created projects failed: {:?}", e);
}
crate::health::SEARCH_READY.store(true, Ordering::Release);
info!("Done indexing created project queue");
}
});
scheduler::schedule_versions(&mut scheduler, pool.clone(), skip_initial);
let ip_salt = Pepper {
@@ -270,7 +242,6 @@ async fn main() -> std::io::Result<()> {
)
.app_data(web::Data::new(pool.clone()))
.app_data(web::Data::new(file_host.clone()))
.app_data(web::Data::new(indexing_queue.clone()))
.app_data(web::Data::new(search_config.clone()))
.app_data(web::Data::new(ip_salt.clone()))
.configure(routes::v1_config)

View File

@@ -1,6 +1,7 @@
pub mod error;
pub mod ids;
pub mod notifications;
pub mod pack;
pub mod projects;
pub mod reports;
pub mod teams;

111
src/models/pack.rs Normal file
View File

@@ -0,0 +1,111 @@
use crate::models::projects::SideType;
use crate::parse_strings_from_var;
use serde::{Deserialize, Serialize};
use validator::Validate;
#[derive(Serialize, Deserialize, Validate, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFormat {
pub game: String,
pub format_version: i32,
#[validate(length(min = 3, max = 512))]
pub version_id: String,
#[validate(length(min = 3, max = 512))]
pub name: String,
#[validate(length(max = 2048))]
pub summary: Option<String>,
#[validate]
pub files: Vec<PackFile>,
pub dependencies: std::collections::HashMap<PackDependency, String>,
}
#[derive(Serialize, Deserialize, Validate, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: String,
pub hashes: std::collections::HashMap<PackFileHash, String>,
pub env: Option<std::collections::HashMap<EnvType, SideType>>,
#[validate(custom(function = "validate_download_url"))]
pub downloads: Vec<String>,
pub file_size: u32,
}
fn validate_download_url(
values: &[String],
) -> Result<(), validator::ValidationError> {
for value in values {
let url = url::Url::parse(value)
.ok()
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?;
if url.as_str() != value {
return Err(validator::ValidationError::new("invalid URL"));
}
let domains = parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS")
.unwrap_or_default();
if !domains.contains(
&url.domain()
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?
.to_string(),
) {
return Err(validator::ValidationError::new(
"File download source is not from allowed sources",
));
}
}
Ok(())
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase", from = "String")]
pub enum PackFileHash {
Sha1,
Sha512,
Unknown(String),
}
impl From<String> for PackFileHash {
fn from(s: String) -> Self {
return match s.as_str() {
"sha1" => PackFileHash::Sha1,
"sha512" => PackFileHash::Sha512,
_ => PackFileHash::Unknown(s),
};
}
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase")]
pub enum EnvType {
Client,
Server,
}
#[derive(Serialize, Deserialize, Clone, Hash, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub enum PackDependency {
Forge,
FabricLoader,
QuiltLoader,
Minecraft,
}
impl std::fmt::Display for PackDependency {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(self.as_str())
}
}
impl PackDependency {
// These are constant, so this can remove unnecessary allocations (`to_string`)
pub fn as_str(&self) -> &'static str {
match self {
PackDependency::Forge => "forge",
PackDependency::FabricLoader => "fabric-loader",
PackDependency::Minecraft => "minecraft",
PackDependency::QuiltLoader => "quilt-loader",
}
}
}

View File

@@ -166,7 +166,7 @@ pub struct ModeratorMessage {
pub body: Option<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum SideType {
Required,
@@ -368,6 +368,7 @@ impl From<QueryVersion> for Version {
.map(|d| Dependency {
version_id: d.version_id.map(|i| VersionId(i.0 as u64)),
project_id: d.project_id.map(|i| ProjectId(i.0 as u64)),
file_name: d.file_name,
dependency_type: DependencyType::from_str(
d.dependency_type.as_str(),
),
@@ -399,7 +400,7 @@ pub struct VersionFile {
pub size: u32,
}
/// A dependency which describes what versions are required, break support, or are optional to the
/// A dendency which describes what versions are required, break support, or are optional to the
/// version's functionality
#[derive(Serialize, Deserialize, Clone)]
pub struct Dependency {
@@ -407,6 +408,8 @@ pub struct Dependency {
pub version_id: Option<VersionId>,
/// The project ID that the dependency is synced with and auto-updated
pub project_id: Option<ProjectId>,
/// The filename of the dependency. Used exclusively for external mods on modpacks
pub file_name: Option<String>,
/// The type of the dependency
pub dependency_type: DependencyType,
}

View File

@@ -17,6 +17,7 @@ use actix_web::{post, HttpRequest, HttpResponse};
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::collections::HashSet;
use std::sync::Arc;
use thiserror::Error;
use time::OffsetDateTime;
@@ -357,6 +358,12 @@ pub async fn project_create_inner(
CreateError::InvalidInput(validation_errors_to_string(err, None))
})?;
let mut uniq = HashSet::new();
create_data
.initial_versions
.iter()
.all(|x| uniq.insert(x.version_number.clone()));
let slug_project_id_option: Option<ProjectId> =
serde_json::from_str(&*format!("\"{}\"", create_data.slug)).ok();
@@ -542,6 +549,7 @@ pub async fn project_create_inner(
file_host,
uploaded_files,
&mut created_version.files,
&mut created_version.dependencies,
&cdn_url,
&content_disposition,
project_id,
@@ -824,6 +832,7 @@ async fn create_initial_version(
version_id: d.version_id.map(|x| x.into()),
project_id: d.project_id.map(|x| x.into()),
dependency_type: d.dependency_type.to_string(),
file_name: None,
})
.collect::<Vec<_>>();

View File

@@ -6,12 +6,10 @@ use crate::models::projects::{
};
use crate::models::teams::Permissions;
use crate::routes::ApiError;
use crate::search::indexing::queue::CreationQueue;
use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::util::auth::{get_user_from_headers, is_authorized};
use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use actix_web::web::Data;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use futures::StreamExt;
use serde::{Deserialize, Serialize};
@@ -278,7 +276,6 @@ pub async fn project_edit(
pool: web::Data<PgPool>,
config: web::Data<SearchConfig>,
new_project: web::Json<EditProject>,
indexing_queue: Data<Arc<CreationQueue>>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(req.headers(), &**pool).await?;
@@ -442,17 +439,6 @@ pub async fn project_edit(
&& !status.is_searchable()
{
delete_from_index(id.into(), config).await?;
} else if !project_item.status.is_searchable()
&& status.is_searchable()
{
// let index_project =
// crate::search::indexing::local_import::query_one(
// id,
// &mut *transaction,
// )
// .await?;
//
// indexing_queue.add(index_project);
}
}

View File

@@ -1,12 +1,13 @@
use crate::database::models;
use crate::database::models::notification_item::NotificationBuilder;
use crate::database::models::version_item::{
VersionBuilder, VersionFileBuilder,
DependencyBuilder, VersionBuilder, VersionFileBuilder,
};
use crate::file_hosting::FileHost;
use crate::models::pack::PackFileHash;
use crate::models::projects::{
Dependency, GameVersion, Loader, ProjectId, Version, VersionFile,
VersionId, VersionType,
Dependency, DependencyType, GameVersion, Loader, ProjectId, Version,
VersionFile, VersionId, VersionType,
};
use crate::models::teams::Permissions;
use crate::routes::project_creation::{CreateError, UploadedFile};
@@ -171,7 +172,7 @@ async fn version_create_inner(
// Check whether there is already a version of this project with the
// same version number
let results = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number=$1) AND (mod_id=$2))",
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number = $1) AND (mod_id = $2))",
version_create_data.version_number,
project_id as models::ProjectId,
)
@@ -262,6 +263,7 @@ async fn version_create_inner(
version_id: d.version_id.map(|x| x.into()),
project_id: d.project_id.map(|x| x.into()),
dependency_type: d.dependency_type.to_string(),
file_name: None,
})
.collect::<Vec<_>>();
@@ -313,6 +315,7 @@ async fn version_create_inner(
file_host,
uploaded_files,
&mut version.files,
&mut version.dependencies,
&cdn_url,
&content_disposition,
version.project_id.into(),
@@ -579,11 +582,23 @@ async fn upload_file_to_version_inner(
))
})?;
let mut dependencies = version
.dependencies
.iter()
.map(|x| models::version_item::DependencyBuilder {
project_id: x.project_id,
version_id: x.version_id,
file_name: None,
dependency_type: x.dependency_type.clone(),
})
.collect();
upload_file(
&mut field,
file_host,
uploaded_files,
&mut file_builders,
&mut dependencies,
&cdn_url,
&content_disposition,
project_id,
@@ -625,6 +640,7 @@ pub async fn upload_file(
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
version_files: &mut Vec<models::version_item::VersionFileBuilder>,
dependencies: &mut Vec<models::version_item::DependencyBuilder>,
cdn_url: &str,
content_disposition: &actix_web::http::header::ContentDisposition,
project_id: crate::models::ids::ProjectId,
@@ -680,6 +696,66 @@ pub async fn upload_file(
)
.await?;
if let ValidationResult::PassWithPackData(ref data) = validation_result {
if dependencies.is_empty() {
let hashes: Vec<Vec<u8>> = data
.files
.iter()
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
.map(|x| x.as_bytes().to_vec())
.collect();
let res = sqlx::query!(
"
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
INNER JOIN files f on h.file_id = f.id
INNER JOIN versions v on f.version_id = v.id
WHERE h.algorithm = 'sha1' AND h.hash = ANY($1)
",
&*hashes
)
.fetch_all(&mut *transaction).await?;
for file in &data.files {
if let Some(dep) = res.iter().find(|x| {
x.hash.as_deref()
== file
.hashes
.get(&PackFileHash::Sha1)
.map(|x| x.as_bytes())
}) {
if let Some(project_id) = dep.project_id {
if let Some(version_id) = dep.version_id {
dependencies.push(DependencyBuilder {
project_id: Some(models::ProjectId(project_id)),
version_id: Some(models::VersionId(version_id)),
file_name: None,
dependency_type: DependencyType::Required
.to_string(),
});
}
}
} else {
if let Some(first_download) = file.downloads.first() {
dependencies.push(DependencyBuilder {
project_id: None,
version_id: None,
file_name: Some(
first_download
.rsplit('/')
.next()
.unwrap_or(first_download)
.to_string(),
),
dependency_type: DependencyType::Required
.to_string(),
});
}
}
}
}
}
let file_path_encode = format!(
"data/{}/versions/{}/{}",
project_id,
@@ -700,6 +776,20 @@ pub async fn upload_file(
file_name: file_path,
});
let sha1_bytes = upload_data.content_sha1.into_bytes();
let sha512_bytes = upload_data.content_sha512.into_bytes();
if version_files.iter().any(|x| {
x.hashes
.iter()
.any(|y| y.hash == sha1_bytes || y.hash == sha512_bytes)
}) {
return Err(CreateError::InvalidInput(
"Duplicate files are not allowed to be uploaded to Modrinth!"
.to_string(),
));
}
version_files.push(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, file_path_encode),
@@ -708,16 +798,16 @@ pub async fn upload_file(
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
hash: sha1_bytes,
},
models::version_item::HashBuilder {
algorithm: "sha512".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha512.into_bytes(),
hash: sha512_bytes,
},
],
primary: (validation_result == ValidationResult::Pass
primary: (validation_result.is_passed()
&& version_files.iter().all(|x| !x.primary)
&& !ignore_primary)
|| force_primary,

View File

@@ -250,6 +250,21 @@ pub async fn version_edit(
}
if let Some(number) = &new_version.version_number {
let results = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number = $1) AND (mod_id = $2))",
number,
version_item.project_id as database::models::ids::ProjectId,
)
.fetch_one(&mut *transaction)
.await?;
if results.exists.unwrap_or(true) {
return Err(ApiError::InvalidInput(
"A version with that version_number already exists"
.to_string(),
));
}
sqlx::query!(
"
UPDATE versions
@@ -292,6 +307,7 @@ pub async fn version_edit(
.map(|x| database::models::version_item::DependencyBuilder {
project_id: x.project_id.map(|x| x.into()),
version_id: x.version_id.map(|x| x.into()),
file_name: x.file_name.clone(),
dependency_type: x.dependency_type.to_string(),
})
.collect::<Vec<database::models::version_item::DependencyBuilder>>();

View File

@@ -86,78 +86,6 @@ pub async fn index_local(
.await?
)
}
pub async fn query_one(
id: ProjectId,
exec: &mut sqlx::PgConnection,
) -> Result<UploadSearchProject, IndexingError> {
let m = sqlx::query!(
//region query
"
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
m.icon_url icon_url, m.published published,
m.updated updated,
m.team_id team_id, m.license license, m.slug slug,
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
STRING_AGG(DISTINCT mg.image_url, ',') gallery
FROM mods m
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
LEFT OUTER JOIN versions v ON v.mod_id = m.id
LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id
LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id
LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id
LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id
INNER JOIN statuses s ON s.id = m.status
INNER JOIN project_types pt ON pt.id = m.project_type
INNER JOIN side_types cs ON m.client_side = cs.id
INNER JOIN side_types ss ON m.server_side = ss.id
INNER JOIN licenses l ON m.license = l.id
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2 AND tm.accepted = TRUE
INNER JOIN users u ON tm.user_id = u.id
WHERE m.id = $1
GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;
",
//endregion query
id as ProjectId,
crate::models::teams::OWNER_ROLE
)
.fetch_one(exec)
.await?;
let mut categories = split_to_strings(m.categories);
categories.append(&mut split_to_strings(m.loaders));
let versions = split_to_strings(m.versions);
let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into();
Ok(UploadSearchProject {
project_id: format!("{}", project_id),
title: m.title,
description: m.description,
categories,
follows: m.follows,
downloads: m.downloads,
icon_url: m.icon_url.unwrap_or_default(),
author: m.username,
date_created: m.published,
created_timestamp: m.published.unix_timestamp(),
date_modified: m.updated,
modified_timestamp: m.updated.unix_timestamp(),
latest_version: versions
.last()
.cloned()
.unwrap_or_else(|| "None".to_string()),
versions,
license: m.short,
client_side: m.client_side_type,
server_side: m.server_side_type,
slug: m.slug,
project_type: m.project_type_name,
gallery: split_to_strings(m.gallery),
})
}
fn split_to_strings(s: Option<String>) -> Vec<String> {
s.map(|x| x.split(',').map(ToString::to_string).collect())

View File

@@ -1,6 +1,5 @@
/// This module is used for the indexing from any source.
pub mod local_import;
pub mod queue;
use crate::search::{SearchConfig, UploadSearchProject};
use local_import::index_local;

View File

@@ -1,36 +0,0 @@
use super::{add_projects, IndexingError, UploadSearchProject};
use crate::search::SearchConfig;
use std::sync::Mutex;
pub struct CreationQueue {
// There's probably a better structure for this, but a mutex works
// and I don't think this can deadlock. This queue requires fast
// writes and then a single potentially slower read/write that
// empties the queue.
queue: Mutex<Vec<UploadSearchProject>>,
}
impl CreationQueue {
pub fn new() -> Self {
CreationQueue {
queue: Mutex::new(Vec::with_capacity(10)),
}
}
pub fn add(&self, search_project: UploadSearchProject) {
// Can only panic if mutex is poisoned
self.queue.lock().unwrap().push(search_project);
}
pub fn take(&self) -> Vec<UploadSearchProject> {
std::mem::replace(
&mut *self.queue.lock().unwrap(),
Vec::with_capacity(10),
)
}
pub async fn index(
&self,
config: &SearchConfig,
) -> Result<(), IndexingError> {
let queue = self.take();
add_projects(queue, config).await
}
}

View File

@@ -56,12 +56,15 @@ pub async fn send_discord_webhook(
value: project.server_side.to_string(),
inline: true,
},
DiscordEmbedField {
];
if !project.categories.is_empty() {
fields.push(DiscordEmbedField {
name: "categories",
value: project.categories.join(", "),
inline: true,
},
];
});
}
if let Some(ref slug) = project.slug {
fields.push(DiscordEmbedField {

View File

@@ -0,0 +1,38 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use std::io::Cursor;
use zip::ZipArchive;
pub struct LiteLoaderValidator;
impl super::Validator for LiteLoaderValidator {
fn get_file_extensions(&self) -> &[&str] {
&["litemod"]
}
fn get_project_types(&self) -> &[&str] {
&["mod"]
}
fn get_supported_loaders(&self) -> &[&str] {
&["liteloader"]
}
fn get_supported_game_versions(&self) -> SupportedGameVersions {
SupportedGameVersions::All
}
fn validate(
&self,
archive: &mut ZipArchive<Cursor<bytes::Bytes>>,
) -> Result<ValidationResult, ValidationError> {
archive.by_name("litemod.json").map_err(|_| {
ValidationError::InvalidInput(
"No litemod.json present for LiteLoader file.".into(),
)
})?;
Ok(ValidationResult::Pass)
}
}

View File

@@ -1,6 +1,8 @@
use crate::models::pack::PackFormat;
use crate::models::projects::{GameVersion, Loader};
use crate::validate::fabric::FabricValidator;
use crate::validate::forge::{ForgeValidator, LegacyForgeValidator};
use crate::validate::liteloader::LiteLoaderValidator;
use crate::validate::pack::PackValidator;
use crate::validate::quilt::QuiltValidator;
use std::io::Cursor;
@@ -10,6 +12,7 @@ use zip::ZipArchive;
mod fabric;
mod forge;
mod liteloader;
mod pack;
mod quilt;
@@ -29,12 +32,24 @@ pub enum ValidationError {
#[derive(Eq, PartialEq)]
pub enum ValidationResult {
/// File should be marked as primary with pack file data
PassWithPackData(PackFormat),
/// File should be marked as primary
Pass,
/// File should not be marked primary, the reason for which is inside the String
Warning(&'static str),
}
impl ValidationResult {
pub fn is_passed(&self) -> bool {
match self {
ValidationResult::PassWithPackData(_) => true,
ValidationResult::Pass => true,
ValidationResult::Warning(_) => false,
}
}
}
pub enum SupportedGameVersions {
All,
PastDate(OffsetDateTime),
@@ -54,12 +69,13 @@ pub trait Validator: Sync {
) -> Result<ValidationResult, ValidationError>;
}
static VALIDATORS: [&dyn Validator; 5] = [
static VALIDATORS: [&dyn Validator; 6] = [
&PackValidator,
&FabricValidator,
&ForgeValidator,
&LegacyForgeValidator,
&QuiltValidator,
&LiteLoaderValidator,
];
/// The return value is whether this file should be marked as primary or not, based on the analysis of the file

View File

@@ -1,120 +1,13 @@
use crate::models::projects::SideType;
use crate::util::env::parse_strings_from_var;
use crate::models::pack::{PackFileHash, PackFormat};
use crate::util::validate::validation_errors_to_string;
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use serde::{Deserialize, Serialize};
use std::io::{Cursor, Read};
use std::path::Component;
use validator::Validate;
use zip::ZipArchive;
#[derive(Serialize, Deserialize, Validate)]
#[serde(rename_all = "camelCase")]
pub struct PackFormat<'a> {
pub game: &'a str,
pub format_version: i32,
#[validate(length(min = 3, max = 512))]
pub version_id: &'a str,
#[validate(length(min = 3, max = 512))]
pub name: &'a str,
#[validate(length(max = 2048))]
pub summary: Option<&'a str>,
#[validate]
pub files: Vec<PackFile<'a>>,
pub dependencies: std::collections::HashMap<PackDependency, &'a str>,
}
#[derive(Serialize, Deserialize, Validate)]
#[serde(rename_all = "camelCase")]
pub struct PackFile<'a> {
pub path: &'a str,
pub hashes: std::collections::HashMap<FileHash, &'a str>,
pub env: Option<std::collections::HashMap<EnvType, SideType>>,
#[validate(custom(function = "validate_download_url"))]
pub downloads: Vec<&'a str>,
pub file_size: u32,
}
fn validate_download_url(
values: &[&str],
) -> Result<(), validator::ValidationError> {
for value in values {
let url = url::Url::parse(value)
.ok()
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?;
if &url.as_str() != value {
return Err(validator::ValidationError::new("invalid URL"));
}
let domains = parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS")
.unwrap_or_default();
if !domains.contains(
&url.domain()
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?
.to_string(),
) {
return Err(validator::ValidationError::new(
"File download source is not from allowed sources",
));
}
}
Ok(())
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase", from = "String")]
pub enum FileHash {
Sha1,
Sha512,
Unknown(String),
}
impl From<String> for FileHash {
fn from(s: String) -> Self {
return match s.as_str() {
"sha1" => FileHash::Sha1,
"sha512" => FileHash::Sha512,
_ => FileHash::Unknown(s),
};
}
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
#[serde(rename_all = "camelCase")]
pub enum EnvType {
Client,
Server,
}
#[derive(Serialize, Deserialize, Clone, Hash, PartialEq, Eq)]
#[serde(rename_all = "kebab-case")]
pub enum PackDependency {
Forge,
FabricLoader,
Minecraft,
}
impl std::fmt::Display for PackDependency {
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
fmt.write_str(self.as_str())
}
}
impl PackDependency {
// These are constant, so this can remove unnecessary allocations (`to_string`)
pub fn as_str(&self) -> &'static str {
match self {
PackDependency::Forge => "forge",
PackDependency::FabricLoader => "fabric-loader",
PackDependency::Minecraft => "minecraft",
}
}
}
pub struct PackValidator;
impl super::Validator for PackValidator {
@@ -162,20 +55,20 @@ impl super::Validator for PackValidator {
));
}
for file in pack.files {
if file.hashes.get(&FileHash::Sha1).is_none() {
for file in &pack.files {
if file.hashes.get(&PackFileHash::Sha1).is_none() {
return Err(ValidationError::InvalidInput(
"All pack files must provide a SHA1 hash!".into(),
));
}
if file.hashes.get(&FileHash::Sha512).is_none() {
if file.hashes.get(&PackFileHash::Sha512).is_none() {
return Err(ValidationError::InvalidInput(
"All pack files must provide a SHA512 hash!".into(),
));
}
let path = std::path::Path::new(file.path)
let path = std::path::Path::new(&file.path)
.components()
.next()
.ok_or_else(|| {
@@ -194,6 +87,6 @@ impl super::Validator for PackValidator {
};
}
Ok(ValidationResult::Pass)
Ok(ValidationResult::PassWithPackData(pack))
}
}