You've already forked AstralRinth
forked from didirus/AstralRinth
Add mod lists for modpacks, liteloader support, update actix, fix moderation webhook (#357)
This commit is contained in:
476
Cargo.lock
generated
476
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -14,11 +14,11 @@ path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
actix = "0.13.0"
|
||||
actix-web = "4.0.1"
|
||||
actix-web = { git = "https://github.com/modrinth/actix-web", rev = "88c7c18" }
|
||||
actix-rt = "2.7.0"
|
||||
tokio-stream = "0.1.8"
|
||||
actix-multipart = "0.4.0"
|
||||
actix-cors = "0.6.1"
|
||||
actix-multipart = { git = "https://github.com/modrinth/actix-web", rev = "88c7c18" }
|
||||
actix-cors = { git = "https://github.com/modrinth/actix-extras.git", rev = "34d301f" }
|
||||
|
||||
meilisearch-sdk = "0.15.0"
|
||||
reqwest = { version = "0.11.10", features = ["json"] }
|
||||
|
||||
2
migrations/20220526040434_dep-file-names.sql
Normal file
2
migrations/20220526040434_dep-file-names.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE dependencies
|
||||
ADD COLUMN dependency_file_name varchar(1024) NULL;
|
||||
342
sqlx-data.json
342
sqlx-data.json
@@ -1862,6 +1862,22 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"5f94e9e767ec4be7f9136b991b4a29373dbe48feb2f61281e3212721095ed675": {
|
||||
"query": "\n INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id, dependency_file_name)\n VALUES ($1, $2, $3, $4, $5)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Varchar",
|
||||
"Int8",
|
||||
"Int8",
|
||||
"Varchar"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"5ff8fd471ff62f86aa95e52cee2723b31ec3d7fc53c3ef1454df40eef0ceff53": {
|
||||
"query": "\n SELECT version.id FROM (\n SELECT DISTINCT ON(v.id) v.id, v.date_published FROM versions v\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))\n WHERE v.mod_id = $1\n ) AS version\n ORDER BY version.date_published ASC\n ",
|
||||
"describe": {
|
||||
@@ -1986,6 +2002,44 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"6347536d5bf9c2c9bd830fbdc03457df7eafcb2d58a4846589316e5bc72a2020": {
|
||||
"query": "\n SELECT dependency_id, mod_dependency_id, dependency_file_name, dependency_type\n FROM dependencies\n WHERE dependent_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "dependency_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "mod_dependency_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "dependency_file_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "dependency_type",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
}
|
||||
},
|
||||
"67d021f0776276081d3c50ca97afa6b78b98860bf929009e845e9c00a192e3b5": {
|
||||
"query": "\n SELECT id FROM report_types\n WHERE name = $1\n ",
|
||||
"describe": {
|
||||
@@ -2457,6 +2511,27 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"7cae1137ab3aaa8de1617d820fb5635eb7498e61174e79da3cdd0da7e99aaca3": {
|
||||
"query": "SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number = $1) AND (mod_id = $2))",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "exists",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
}
|
||||
},
|
||||
"8129255d25bf0624d83f50558b668ed7b7f9c264e380d276522fc82bc871939b": {
|
||||
"query": "\n INSERT INTO notifications_actions (\n notification_id, title, action_route, action_route_method\n )\n VALUES (\n $1, $2, $3, $4\n )\n ",
|
||||
"describe": {
|
||||
@@ -2849,153 +2924,6 @@
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"90bc0bbac72f7b8b8433555309adee07610ae9ae2438610e3adf55909b9f2794": {
|
||||
"query": "\n SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.published published,\n m.updated updated,\n m.team_id team_id, m.license license, m.slug slug,\n s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,\n STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,\n STRING_AGG(DISTINCT mg.image_url, ',') gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id\n LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN statuses s ON s.id = m.status\n INNER JOIN project_types pt ON pt.id = m.project_type\n INNER JOIN side_types cs ON m.client_side = cs.id\n INNER JOIN side_types ss ON m.server_side = ss.id\n INNER JOIN licenses l ON m.license = l.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2 AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.id = $1\n GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "project_type",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "title",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "downloads",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "follows",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "icon_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "published",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "updated",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "team_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "license",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "slug",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "status_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "client_side_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "server_side_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "short",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 16,
|
||||
"name": "project_type_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 17,
|
||||
"name": "username",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 18,
|
||||
"name": "categories",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 19,
|
||||
"name": "loaders",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 20,
|
||||
"name": "versions",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 21,
|
||||
"name": "gallery",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Text"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
}
|
||||
},
|
||||
"9348309884811e8b22f33786ae7c0f259f37f3c90e545f00761a641570107160": {
|
||||
"query": "\n SELECT m.title title, m.id id, pt.name project_type\n FROM mods m\n INNER JOIN project_types pt ON pt.id = m.project_type\n WHERE m.team_id = $1\n ",
|
||||
"describe": {
|
||||
@@ -3028,16 +2956,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"94a823b6e8b2610d72843008706c448432aab21690b4727aea77ad687a98f634": {
|
||||
"query": "\n DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"94ca18bf5244b0add2e6a12edfdc8d67159eed8c5afdf690f9b702faed249a4c": {
|
||||
"query": "\n SELECT gv.version game_version\n FROM game_versions_versions gvv\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id\n WHERE gvv.joining_version_id = $1\n ORDER BY gv.created\n ",
|
||||
"describe": {
|
||||
@@ -3271,38 +3189,6 @@
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"9dfee6ef2fd11e7664b73b72611670c2a2f8f0b8887a6cabbc3f115b1de4675d": {
|
||||
"query": "\n SELECT dependency_id, mod_dependency_id, dependency_type\n FROM dependencies\n WHERE dependent_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "dependency_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "mod_dependency_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "dependency_type",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
}
|
||||
},
|
||||
"9f1f1039e8e360092e046b219fe6861368f5b4a338041d426ef689981f0cb9df": {
|
||||
"query": "\n SELECT id, filename, is_primary, url, size\n FROM files\n WHERE version_id = $1\n ",
|
||||
"describe": {
|
||||
@@ -3870,6 +3756,16 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"ae1686b8b566dd7ecc57c653c9313a4b324a2ec3a63aa6a44ed1d8ea7999b115": {
|
||||
"query": "\n DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL AND dependency_file_name = NULL\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": []
|
||||
},
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"b030a9e0fdb75eee8ee50aafdcb6063a073e2aa53cc70d40ed46437c1d0dfe80": {
|
||||
"query": "\n INSERT INTO mods_gallery (\n mod_id, image_url, featured, title, description\n )\n VALUES (\n $1, $2, $3, $4, $5\n )\n ",
|
||||
"describe": {
|
||||
@@ -4370,21 +4266,6 @@
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"c11f52e25edd7239a7a499c55d7127b4f51786e1b7666e3c61925c49fb41e05e": {
|
||||
"query": "\n INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id)\n VALUES ($1, $2, $3, $4)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Varchar",
|
||||
"Int8",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"c1a3f6dcef6110d6ea884670fb82bac14b98e922bb5673c048ccce7b7300539b": {
|
||||
"query": "\n SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1)\n ",
|
||||
"describe": {
|
||||
@@ -4738,6 +4619,38 @@
|
||||
"nullable": []
|
||||
}
|
||||
},
|
||||
"cfcc6970c0b469c4afd37bedfd386def7980f6b7006030d4783723861d0e3a38": {
|
||||
"query": "\n SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h\n INNER JOIN files f on h.file_id = f.id\n INNER JOIN versions v on f.version_id = v.id\n WHERE h.algorithm = 'sha1' AND h.hash = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "project_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "hash",
|
||||
"type_info": "Bytea"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"ByteaArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
}
|
||||
},
|
||||
"d03630ab0ff37f5f0a8c088558fdc8a1955bad78bea282c40f72d15e5cf77a79": {
|
||||
"query": "\n SELECT v.id id\n FROM versions v\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id AND gvv.game_version_id = ANY($2)\n INNER JOIN loaders_versions lv ON lv.version_id = v.id AND lv.loader_id = ANY($3)\n WHERE v.mod_id = $1\n ORDER BY v.date_published DESC\n LIMIT 1\n ",
|
||||
"describe": {
|
||||
@@ -5990,27 +5903,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"fcb0ceeacfa2fa0f8f1f1987e744dabb73c26ac0fb8178ad9b3b9ebb3bd0acac": {
|
||||
"query": "SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number=$1) AND (mod_id=$2))",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "exists",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Text",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null
|
||||
]
|
||||
}
|
||||
},
|
||||
"fd00809bd75662a8f21d812fd00071b4208f88186d9f86badece97c9c95ad3b9": {
|
||||
"query": "\n SELECT id\n FROM versions\n WHERE mod_id = $1\n ",
|
||||
"describe": {
|
||||
|
||||
@@ -21,6 +21,7 @@ pub struct VersionBuilder {
|
||||
pub struct DependencyBuilder {
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub version_id: Option<VersionId>,
|
||||
pub file_name: Option<String>,
|
||||
pub dependency_type: String,
|
||||
}
|
||||
|
||||
@@ -59,13 +60,14 @@ impl DependencyBuilder {
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
INSERT INTO dependencies (dependent_id, dependency_type, dependency_id, mod_dependency_id, dependency_file_name)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
",
|
||||
version_id as VersionId,
|
||||
self.dependency_type,
|
||||
version_dependency_id.map(|x| x.0),
|
||||
project_dependency_id.map(|x| x.0),
|
||||
self.file_name,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
@@ -455,7 +457,7 @@ impl Version {
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL
|
||||
DELETE FROM dependencies WHERE mod_dependency_id = NULL AND dependency_id = NULL AND dependency_file_name = NULL
|
||||
",
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
@@ -659,7 +661,7 @@ impl Version {
|
||||
).fetch_all(executor),
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT dependency_id, mod_dependency_id, dependency_type
|
||||
SELECT dependency_id, mod_dependency_id, dependency_file_name, dependency_type
|
||||
FROM dependencies
|
||||
WHERE dependent_id = $1
|
||||
",
|
||||
@@ -716,6 +718,7 @@ impl Version {
|
||||
.map(|x| QueryDependency {
|
||||
project_id: x.mod_dependency_id.map(ProjectId),
|
||||
version_id: x.dependency_id.map(VersionId),
|
||||
file_name: x.dependency_file_name,
|
||||
dependency_type: x.dependency_type,
|
||||
})
|
||||
.collect(),
|
||||
@@ -779,6 +782,7 @@ pub struct QueryVersion {
|
||||
pub struct QueryDependency {
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub version_id: Option<VersionId>,
|
||||
pub file_name: Option<String>,
|
||||
pub dependency_type: String,
|
||||
}
|
||||
|
||||
|
||||
29
src/main.rs
29
src/main.rs
@@ -10,7 +10,6 @@ use gumdrop::Options;
|
||||
use log::{error, info, warn};
|
||||
use search::indexing::index_projects;
|
||||
use search::indexing::IndexingSettings;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
mod database;
|
||||
@@ -182,33 +181,6 @@ async fn main() -> std::io::Result<()> {
|
||||
}
|
||||
});
|
||||
|
||||
let indexing_queue =
|
||||
Arc::new(search::indexing::queue::CreationQueue::new());
|
||||
|
||||
let mut skip = skip_initial;
|
||||
let queue_ref = indexing_queue.clone();
|
||||
let search_config_ref = search_config.clone();
|
||||
scheduler.run(std::time::Duration::from_secs(15 * 60), move || {
|
||||
let queue_ref = queue_ref.clone();
|
||||
let search_config_ref = search_config_ref.clone();
|
||||
let local_skip = skip;
|
||||
if skip {
|
||||
skip = false;
|
||||
}
|
||||
async move {
|
||||
if local_skip {
|
||||
return;
|
||||
}
|
||||
info!("Indexing created project queue");
|
||||
let result = queue_ref.index(&search_config_ref).await;
|
||||
if let Err(e) = result {
|
||||
warn!("Indexing created projects failed: {:?}", e);
|
||||
}
|
||||
crate::health::SEARCH_READY.store(true, Ordering::Release);
|
||||
info!("Done indexing created project queue");
|
||||
}
|
||||
});
|
||||
|
||||
scheduler::schedule_versions(&mut scheduler, pool.clone(), skip_initial);
|
||||
|
||||
let ip_salt = Pepper {
|
||||
@@ -270,7 +242,6 @@ async fn main() -> std::io::Result<()> {
|
||||
)
|
||||
.app_data(web::Data::new(pool.clone()))
|
||||
.app_data(web::Data::new(file_host.clone()))
|
||||
.app_data(web::Data::new(indexing_queue.clone()))
|
||||
.app_data(web::Data::new(search_config.clone()))
|
||||
.app_data(web::Data::new(ip_salt.clone()))
|
||||
.configure(routes::v1_config)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod error;
|
||||
pub mod ids;
|
||||
pub mod notifications;
|
||||
pub mod pack;
|
||||
pub mod projects;
|
||||
pub mod reports;
|
||||
pub mod teams;
|
||||
|
||||
111
src/models/pack.rs
Normal file
111
src/models/pack.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use crate::models::projects::SideType;
|
||||
use crate::parse_strings_from_var;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use validator::Validate;
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Eq, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PackFormat {
|
||||
pub game: String,
|
||||
pub format_version: i32,
|
||||
#[validate(length(min = 3, max = 512))]
|
||||
pub version_id: String,
|
||||
#[validate(length(min = 3, max = 512))]
|
||||
pub name: String,
|
||||
#[validate(length(max = 2048))]
|
||||
pub summary: Option<String>,
|
||||
#[validate]
|
||||
pub files: Vec<PackFile>,
|
||||
pub dependencies: std::collections::HashMap<PackDependency, String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate, Eq, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PackFile {
|
||||
pub path: String,
|
||||
pub hashes: std::collections::HashMap<PackFileHash, String>,
|
||||
pub env: Option<std::collections::HashMap<EnvType, SideType>>,
|
||||
#[validate(custom(function = "validate_download_url"))]
|
||||
pub downloads: Vec<String>,
|
||||
pub file_size: u32,
|
||||
}
|
||||
|
||||
fn validate_download_url(
|
||||
values: &[String],
|
||||
) -> Result<(), validator::ValidationError> {
|
||||
for value in values {
|
||||
let url = url::Url::parse(value)
|
||||
.ok()
|
||||
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?;
|
||||
|
||||
if url.as_str() != value {
|
||||
return Err(validator::ValidationError::new("invalid URL"));
|
||||
}
|
||||
|
||||
let domains = parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS")
|
||||
.unwrap_or_default();
|
||||
if !domains.contains(
|
||||
&url.domain()
|
||||
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?
|
||||
.to_string(),
|
||||
) {
|
||||
return Err(validator::ValidationError::new(
|
||||
"File download source is not from allowed sources",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[serde(rename_all = "camelCase", from = "String")]
|
||||
pub enum PackFileHash {
|
||||
Sha1,
|
||||
Sha512,
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl From<String> for PackFileHash {
|
||||
fn from(s: String) -> Self {
|
||||
return match s.as_str() {
|
||||
"sha1" => PackFileHash::Sha1,
|
||||
"sha512" => PackFileHash::Sha512,
|
||||
_ => PackFileHash::Unknown(s),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum EnvType {
|
||||
Client,
|
||||
Server,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Hash, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum PackDependency {
|
||||
Forge,
|
||||
FabricLoader,
|
||||
QuiltLoader,
|
||||
Minecraft,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PackDependency {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fmt.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl PackDependency {
|
||||
// These are constant, so this can remove unnecessary allocations (`to_string`)
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
PackDependency::Forge => "forge",
|
||||
PackDependency::FabricLoader => "fabric-loader",
|
||||
PackDependency::Minecraft => "minecraft",
|
||||
PackDependency::QuiltLoader => "quilt-loader",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -166,7 +166,7 @@ pub struct ModeratorMessage {
|
||||
pub body: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum SideType {
|
||||
Required,
|
||||
@@ -368,6 +368,7 @@ impl From<QueryVersion> for Version {
|
||||
.map(|d| Dependency {
|
||||
version_id: d.version_id.map(|i| VersionId(i.0 as u64)),
|
||||
project_id: d.project_id.map(|i| ProjectId(i.0 as u64)),
|
||||
file_name: d.file_name,
|
||||
dependency_type: DependencyType::from_str(
|
||||
d.dependency_type.as_str(),
|
||||
),
|
||||
@@ -399,7 +400,7 @@ pub struct VersionFile {
|
||||
pub size: u32,
|
||||
}
|
||||
|
||||
/// A dependency which describes what versions are required, break support, or are optional to the
|
||||
/// A dendency which describes what versions are required, break support, or are optional to the
|
||||
/// version's functionality
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct Dependency {
|
||||
@@ -407,6 +408,8 @@ pub struct Dependency {
|
||||
pub version_id: Option<VersionId>,
|
||||
/// The project ID that the dependency is synced with and auto-updated
|
||||
pub project_id: Option<ProjectId>,
|
||||
/// The filename of the dependency. Used exclusively for external mods on modpacks
|
||||
pub file_name: Option<String>,
|
||||
/// The type of the dependency
|
||||
pub dependency_type: DependencyType,
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ use actix_web::{post, HttpRequest, HttpResponse};
|
||||
use futures::stream::StreamExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
use std::collections::HashSet;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
use time::OffsetDateTime;
|
||||
@@ -357,6 +358,12 @@ pub async fn project_create_inner(
|
||||
CreateError::InvalidInput(validation_errors_to_string(err, None))
|
||||
})?;
|
||||
|
||||
let mut uniq = HashSet::new();
|
||||
create_data
|
||||
.initial_versions
|
||||
.iter()
|
||||
.all(|x| uniq.insert(x.version_number.clone()));
|
||||
|
||||
let slug_project_id_option: Option<ProjectId> =
|
||||
serde_json::from_str(&*format!("\"{}\"", create_data.slug)).ok();
|
||||
|
||||
@@ -542,6 +549,7 @@ pub async fn project_create_inner(
|
||||
file_host,
|
||||
uploaded_files,
|
||||
&mut created_version.files,
|
||||
&mut created_version.dependencies,
|
||||
&cdn_url,
|
||||
&content_disposition,
|
||||
project_id,
|
||||
@@ -824,6 +832,7 @@ async fn create_initial_version(
|
||||
version_id: d.version_id.map(|x| x.into()),
|
||||
project_id: d.project_id.map(|x| x.into()),
|
||||
dependency_type: d.dependency_type.to_string(),
|
||||
file_name: None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
||||
@@ -6,12 +6,10 @@ use crate::models::projects::{
|
||||
};
|
||||
use crate::models::teams::Permissions;
|
||||
use crate::routes::ApiError;
|
||||
use crate::search::indexing::queue::CreationQueue;
|
||||
use crate::search::{search_for_project, SearchConfig, SearchError};
|
||||
use crate::util::auth::{get_user_from_headers, is_authorized};
|
||||
use crate::util::routes::read_from_payload;
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
|
||||
use futures::StreamExt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -278,7 +276,6 @@ pub async fn project_edit(
|
||||
pool: web::Data<PgPool>,
|
||||
config: web::Data<SearchConfig>,
|
||||
new_project: web::Json<EditProject>,
|
||||
indexing_queue: Data<Arc<CreationQueue>>,
|
||||
) -> Result<HttpResponse, ApiError> {
|
||||
let user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
|
||||
@@ -442,17 +439,6 @@ pub async fn project_edit(
|
||||
&& !status.is_searchable()
|
||||
{
|
||||
delete_from_index(id.into(), config).await?;
|
||||
} else if !project_item.status.is_searchable()
|
||||
&& status.is_searchable()
|
||||
{
|
||||
// let index_project =
|
||||
// crate::search::indexing::local_import::query_one(
|
||||
// id,
|
||||
// &mut *transaction,
|
||||
// )
|
||||
// .await?;
|
||||
//
|
||||
// indexing_queue.add(index_project);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
use crate::database::models;
|
||||
use crate::database::models::notification_item::NotificationBuilder;
|
||||
use crate::database::models::version_item::{
|
||||
VersionBuilder, VersionFileBuilder,
|
||||
DependencyBuilder, VersionBuilder, VersionFileBuilder,
|
||||
};
|
||||
use crate::file_hosting::FileHost;
|
||||
use crate::models::pack::PackFileHash;
|
||||
use crate::models::projects::{
|
||||
Dependency, GameVersion, Loader, ProjectId, Version, VersionFile,
|
||||
VersionId, VersionType,
|
||||
Dependency, DependencyType, GameVersion, Loader, ProjectId, Version,
|
||||
VersionFile, VersionId, VersionType,
|
||||
};
|
||||
use crate::models::teams::Permissions;
|
||||
use crate::routes::project_creation::{CreateError, UploadedFile};
|
||||
@@ -171,7 +172,7 @@ async fn version_create_inner(
|
||||
// Check whether there is already a version of this project with the
|
||||
// same version number
|
||||
let results = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number=$1) AND (mod_id=$2))",
|
||||
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number = $1) AND (mod_id = $2))",
|
||||
version_create_data.version_number,
|
||||
project_id as models::ProjectId,
|
||||
)
|
||||
@@ -262,6 +263,7 @@ async fn version_create_inner(
|
||||
version_id: d.version_id.map(|x| x.into()),
|
||||
project_id: d.project_id.map(|x| x.into()),
|
||||
dependency_type: d.dependency_type.to_string(),
|
||||
file_name: None,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -313,6 +315,7 @@ async fn version_create_inner(
|
||||
file_host,
|
||||
uploaded_files,
|
||||
&mut version.files,
|
||||
&mut version.dependencies,
|
||||
&cdn_url,
|
||||
&content_disposition,
|
||||
version.project_id.into(),
|
||||
@@ -579,11 +582,23 @@ async fn upload_file_to_version_inner(
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut dependencies = version
|
||||
.dependencies
|
||||
.iter()
|
||||
.map(|x| models::version_item::DependencyBuilder {
|
||||
project_id: x.project_id,
|
||||
version_id: x.version_id,
|
||||
file_name: None,
|
||||
dependency_type: x.dependency_type.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
upload_file(
|
||||
&mut field,
|
||||
file_host,
|
||||
uploaded_files,
|
||||
&mut file_builders,
|
||||
&mut dependencies,
|
||||
&cdn_url,
|
||||
&content_disposition,
|
||||
project_id,
|
||||
@@ -625,6 +640,7 @@ pub async fn upload_file(
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
version_files: &mut Vec<models::version_item::VersionFileBuilder>,
|
||||
dependencies: &mut Vec<models::version_item::DependencyBuilder>,
|
||||
cdn_url: &str,
|
||||
content_disposition: &actix_web::http::header::ContentDisposition,
|
||||
project_id: crate::models::ids::ProjectId,
|
||||
@@ -680,6 +696,66 @@ pub async fn upload_file(
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let ValidationResult::PassWithPackData(ref data) = validation_result {
|
||||
if dependencies.is_empty() {
|
||||
let hashes: Vec<Vec<u8>> = data
|
||||
.files
|
||||
.iter()
|
||||
.filter_map(|x| x.hashes.get(&PackFileHash::Sha1))
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let res = sqlx::query!(
|
||||
"
|
||||
SELECT v.id version_id, v.mod_id project_id, h.hash hash FROM hashes h
|
||||
INNER JOIN files f on h.file_id = f.id
|
||||
INNER JOIN versions v on f.version_id = v.id
|
||||
WHERE h.algorithm = 'sha1' AND h.hash = ANY($1)
|
||||
",
|
||||
&*hashes
|
||||
)
|
||||
.fetch_all(&mut *transaction).await?;
|
||||
|
||||
for file in &data.files {
|
||||
if let Some(dep) = res.iter().find(|x| {
|
||||
x.hash.as_deref()
|
||||
== file
|
||||
.hashes
|
||||
.get(&PackFileHash::Sha1)
|
||||
.map(|x| x.as_bytes())
|
||||
}) {
|
||||
if let Some(project_id) = dep.project_id {
|
||||
if let Some(version_id) = dep.version_id {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: Some(models::ProjectId(project_id)),
|
||||
version_id: Some(models::VersionId(version_id)),
|
||||
file_name: None,
|
||||
dependency_type: DependencyType::Required
|
||||
.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let Some(first_download) = file.downloads.first() {
|
||||
dependencies.push(DependencyBuilder {
|
||||
project_id: None,
|
||||
version_id: None,
|
||||
file_name: Some(
|
||||
first_download
|
||||
.rsplit('/')
|
||||
.next()
|
||||
.unwrap_or(first_download)
|
||||
.to_string(),
|
||||
),
|
||||
dependency_type: DependencyType::Required
|
||||
.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let file_path_encode = format!(
|
||||
"data/{}/versions/{}/{}",
|
||||
project_id,
|
||||
@@ -700,6 +776,20 @@ pub async fn upload_file(
|
||||
file_name: file_path,
|
||||
});
|
||||
|
||||
let sha1_bytes = upload_data.content_sha1.into_bytes();
|
||||
let sha512_bytes = upload_data.content_sha512.into_bytes();
|
||||
|
||||
if version_files.iter().any(|x| {
|
||||
x.hashes
|
||||
.iter()
|
||||
.any(|y| y.hash == sha1_bytes || y.hash == sha512_bytes)
|
||||
}) {
|
||||
return Err(CreateError::InvalidInput(
|
||||
"Duplicate files are not allowed to be uploaded to Modrinth!"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
version_files.push(models::version_item::VersionFileBuilder {
|
||||
filename: file_name.to_string(),
|
||||
url: format!("{}/{}", cdn_url, file_path_encode),
|
||||
@@ -708,16 +798,16 @@ pub async fn upload_file(
|
||||
algorithm: "sha1".to_string(),
|
||||
// This is an invalid cast - the database expects the hash's
|
||||
// bytes, but this is the string version.
|
||||
hash: upload_data.content_sha1.into_bytes(),
|
||||
hash: sha1_bytes,
|
||||
},
|
||||
models::version_item::HashBuilder {
|
||||
algorithm: "sha512".to_string(),
|
||||
// This is an invalid cast - the database expects the hash's
|
||||
// bytes, but this is the string version.
|
||||
hash: upload_data.content_sha512.into_bytes(),
|
||||
hash: sha512_bytes,
|
||||
},
|
||||
],
|
||||
primary: (validation_result == ValidationResult::Pass
|
||||
primary: (validation_result.is_passed()
|
||||
&& version_files.iter().all(|x| !x.primary)
|
||||
&& !ignore_primary)
|
||||
|| force_primary,
|
||||
|
||||
@@ -250,6 +250,21 @@ pub async fn version_edit(
|
||||
}
|
||||
|
||||
if let Some(number) = &new_version.version_number {
|
||||
let results = sqlx::query!(
|
||||
"SELECT EXISTS(SELECT 1 FROM versions WHERE (version_number = $1) AND (mod_id = $2))",
|
||||
number,
|
||||
version_item.project_id as database::models::ids::ProjectId,
|
||||
)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
if results.exists.unwrap_or(true) {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"A version with that version_number already exists"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
UPDATE versions
|
||||
@@ -292,6 +307,7 @@ pub async fn version_edit(
|
||||
.map(|x| database::models::version_item::DependencyBuilder {
|
||||
project_id: x.project_id.map(|x| x.into()),
|
||||
version_id: x.version_id.map(|x| x.into()),
|
||||
file_name: x.file_name.clone(),
|
||||
dependency_type: x.dependency_type.to_string(),
|
||||
})
|
||||
.collect::<Vec<database::models::version_item::DependencyBuilder>>();
|
||||
|
||||
@@ -86,78 +86,6 @@ pub async fn index_local(
|
||||
.await?
|
||||
)
|
||||
}
|
||||
pub async fn query_one(
|
||||
id: ProjectId,
|
||||
exec: &mut sqlx::PgConnection,
|
||||
) -> Result<UploadSearchProject, IndexingError> {
|
||||
let m = sqlx::query!(
|
||||
//region query
|
||||
"
|
||||
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
||||
m.icon_url icon_url, m.published published,
|
||||
m.updated updated,
|
||||
m.team_id team_id, m.license license, m.slug slug,
|
||||
s.status status_name, cs.name client_side_type, ss.name server_side_type, l.short short, pt.name project_type_name, u.username username,
|
||||
STRING_AGG(DISTINCT c.category, ',') categories, STRING_AGG(DISTINCT lo.loader, ',') loaders, STRING_AGG(DISTINCT gv.version, ',') versions,
|
||||
STRING_AGG(DISTINCT mg.image_url, ',') gallery
|
||||
FROM mods m
|
||||
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id
|
||||
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
||||
LEFT OUTER JOIN versions v ON v.mod_id = m.id
|
||||
LEFT OUTER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
LEFT OUTER JOIN game_versions gv ON gvv.game_version_id = gv.id
|
||||
LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id
|
||||
LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id
|
||||
INNER JOIN statuses s ON s.id = m.status
|
||||
INNER JOIN project_types pt ON pt.id = m.project_type
|
||||
INNER JOIN side_types cs ON m.client_side = cs.id
|
||||
INNER JOIN side_types ss ON m.server_side = ss.id
|
||||
INNER JOIN licenses l ON m.license = l.id
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.role = $2 AND tm.accepted = TRUE
|
||||
INNER JOIN users u ON tm.user_id = u.id
|
||||
WHERE m.id = $1
|
||||
GROUP BY m.id, s.id, cs.id, ss.id, l.id, pt.id, u.id;
|
||||
",
|
||||
//endregion query
|
||||
id as ProjectId,
|
||||
crate::models::teams::OWNER_ROLE
|
||||
)
|
||||
.fetch_one(exec)
|
||||
.await?;
|
||||
|
||||
let mut categories = split_to_strings(m.categories);
|
||||
categories.append(&mut split_to_strings(m.loaders));
|
||||
let versions = split_to_strings(m.versions);
|
||||
|
||||
let project_id: crate::models::projects::ProjectId = ProjectId(m.id).into();
|
||||
|
||||
Ok(UploadSearchProject {
|
||||
project_id: format!("{}", project_id),
|
||||
title: m.title,
|
||||
description: m.description,
|
||||
categories,
|
||||
follows: m.follows,
|
||||
downloads: m.downloads,
|
||||
icon_url: m.icon_url.unwrap_or_default(),
|
||||
author: m.username,
|
||||
date_created: m.published,
|
||||
created_timestamp: m.published.unix_timestamp(),
|
||||
date_modified: m.updated,
|
||||
modified_timestamp: m.updated.unix_timestamp(),
|
||||
latest_version: versions
|
||||
.last()
|
||||
.cloned()
|
||||
.unwrap_or_else(|| "None".to_string()),
|
||||
versions,
|
||||
license: m.short,
|
||||
client_side: m.client_side_type,
|
||||
server_side: m.server_side_type,
|
||||
slug: m.slug,
|
||||
project_type: m.project_type_name,
|
||||
gallery: split_to_strings(m.gallery),
|
||||
})
|
||||
}
|
||||
|
||||
fn split_to_strings(s: Option<String>) -> Vec<String> {
|
||||
s.map(|x| x.split(',').map(ToString::to_string).collect())
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/// This module is used for the indexing from any source.
|
||||
pub mod local_import;
|
||||
pub mod queue;
|
||||
|
||||
use crate::search::{SearchConfig, UploadSearchProject};
|
||||
use local_import::index_local;
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
use super::{add_projects, IndexingError, UploadSearchProject};
|
||||
use crate::search::SearchConfig;
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub struct CreationQueue {
|
||||
// There's probably a better structure for this, but a mutex works
|
||||
// and I don't think this can deadlock. This queue requires fast
|
||||
// writes and then a single potentially slower read/write that
|
||||
// empties the queue.
|
||||
queue: Mutex<Vec<UploadSearchProject>>,
|
||||
}
|
||||
|
||||
impl CreationQueue {
|
||||
pub fn new() -> Self {
|
||||
CreationQueue {
|
||||
queue: Mutex::new(Vec::with_capacity(10)),
|
||||
}
|
||||
}
|
||||
pub fn add(&self, search_project: UploadSearchProject) {
|
||||
// Can only panic if mutex is poisoned
|
||||
self.queue.lock().unwrap().push(search_project);
|
||||
}
|
||||
pub fn take(&self) -> Vec<UploadSearchProject> {
|
||||
std::mem::replace(
|
||||
&mut *self.queue.lock().unwrap(),
|
||||
Vec::with_capacity(10),
|
||||
)
|
||||
}
|
||||
pub async fn index(
|
||||
&self,
|
||||
config: &SearchConfig,
|
||||
) -> Result<(), IndexingError> {
|
||||
let queue = self.take();
|
||||
add_projects(queue, config).await
|
||||
}
|
||||
}
|
||||
@@ -56,12 +56,15 @@ pub async fn send_discord_webhook(
|
||||
value: project.server_side.to_string(),
|
||||
inline: true,
|
||||
},
|
||||
DiscordEmbedField {
|
||||
];
|
||||
|
||||
if !project.categories.is_empty() {
|
||||
fields.push(DiscordEmbedField {
|
||||
name: "categories",
|
||||
value: project.categories.join(", "),
|
||||
inline: true,
|
||||
},
|
||||
];
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(ref slug) = project.slug {
|
||||
fields.push(DiscordEmbedField {
|
||||
|
||||
38
src/validate/liteloader.rs
Normal file
38
src/validate/liteloader.rs
Normal file
@@ -0,0 +1,38 @@
|
||||
use crate::validate::{
|
||||
SupportedGameVersions, ValidationError, ValidationResult,
|
||||
};
|
||||
use std::io::Cursor;
|
||||
use zip::ZipArchive;
|
||||
|
||||
pub struct LiteLoaderValidator;
|
||||
|
||||
impl super::Validator for LiteLoaderValidator {
|
||||
fn get_file_extensions(&self) -> &[&str] {
|
||||
&["litemod"]
|
||||
}
|
||||
|
||||
fn get_project_types(&self) -> &[&str] {
|
||||
&["mod"]
|
||||
}
|
||||
|
||||
fn get_supported_loaders(&self) -> &[&str] {
|
||||
&["liteloader"]
|
||||
}
|
||||
|
||||
fn get_supported_game_versions(&self) -> SupportedGameVersions {
|
||||
SupportedGameVersions::All
|
||||
}
|
||||
|
||||
fn validate(
|
||||
&self,
|
||||
archive: &mut ZipArchive<Cursor<bytes::Bytes>>,
|
||||
) -> Result<ValidationResult, ValidationError> {
|
||||
archive.by_name("litemod.json").map_err(|_| {
|
||||
ValidationError::InvalidInput(
|
||||
"No litemod.json present for LiteLoader file.".into(),
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(ValidationResult::Pass)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::models::pack::PackFormat;
|
||||
use crate::models::projects::{GameVersion, Loader};
|
||||
use crate::validate::fabric::FabricValidator;
|
||||
use crate::validate::forge::{ForgeValidator, LegacyForgeValidator};
|
||||
use crate::validate::liteloader::LiteLoaderValidator;
|
||||
use crate::validate::pack::PackValidator;
|
||||
use crate::validate::quilt::QuiltValidator;
|
||||
use std::io::Cursor;
|
||||
@@ -10,6 +12,7 @@ use zip::ZipArchive;
|
||||
|
||||
mod fabric;
|
||||
mod forge;
|
||||
mod liteloader;
|
||||
mod pack;
|
||||
mod quilt;
|
||||
|
||||
@@ -29,12 +32,24 @@ pub enum ValidationError {
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
pub enum ValidationResult {
|
||||
/// File should be marked as primary with pack file data
|
||||
PassWithPackData(PackFormat),
|
||||
/// File should be marked as primary
|
||||
Pass,
|
||||
/// File should not be marked primary, the reason for which is inside the String
|
||||
Warning(&'static str),
|
||||
}
|
||||
|
||||
impl ValidationResult {
|
||||
pub fn is_passed(&self) -> bool {
|
||||
match self {
|
||||
ValidationResult::PassWithPackData(_) => true,
|
||||
ValidationResult::Pass => true,
|
||||
ValidationResult::Warning(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SupportedGameVersions {
|
||||
All,
|
||||
PastDate(OffsetDateTime),
|
||||
@@ -54,12 +69,13 @@ pub trait Validator: Sync {
|
||||
) -> Result<ValidationResult, ValidationError>;
|
||||
}
|
||||
|
||||
static VALIDATORS: [&dyn Validator; 5] = [
|
||||
static VALIDATORS: [&dyn Validator; 6] = [
|
||||
&PackValidator,
|
||||
&FabricValidator,
|
||||
&ForgeValidator,
|
||||
&LegacyForgeValidator,
|
||||
&QuiltValidator,
|
||||
&LiteLoaderValidator,
|
||||
];
|
||||
|
||||
/// The return value is whether this file should be marked as primary or not, based on the analysis of the file
|
||||
|
||||
@@ -1,120 +1,13 @@
|
||||
use crate::models::projects::SideType;
|
||||
use crate::util::env::parse_strings_from_var;
|
||||
use crate::models::pack::{PackFileHash, PackFormat};
|
||||
use crate::util::validate::validation_errors_to_string;
|
||||
use crate::validate::{
|
||||
SupportedGameVersions, ValidationError, ValidationResult,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::{Cursor, Read};
|
||||
use std::path::Component;
|
||||
use validator::Validate;
|
||||
use zip::ZipArchive;
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PackFormat<'a> {
|
||||
pub game: &'a str,
|
||||
pub format_version: i32,
|
||||
#[validate(length(min = 3, max = 512))]
|
||||
pub version_id: &'a str,
|
||||
#[validate(length(min = 3, max = 512))]
|
||||
pub name: &'a str,
|
||||
#[validate(length(max = 2048))]
|
||||
pub summary: Option<&'a str>,
|
||||
#[validate]
|
||||
pub files: Vec<PackFile<'a>>,
|
||||
pub dependencies: std::collections::HashMap<PackDependency, &'a str>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Validate)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PackFile<'a> {
|
||||
pub path: &'a str,
|
||||
pub hashes: std::collections::HashMap<FileHash, &'a str>,
|
||||
pub env: Option<std::collections::HashMap<EnvType, SideType>>,
|
||||
#[validate(custom(function = "validate_download_url"))]
|
||||
pub downloads: Vec<&'a str>,
|
||||
pub file_size: u32,
|
||||
}
|
||||
|
||||
fn validate_download_url(
|
||||
values: &[&str],
|
||||
) -> Result<(), validator::ValidationError> {
|
||||
for value in values {
|
||||
let url = url::Url::parse(value)
|
||||
.ok()
|
||||
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?;
|
||||
|
||||
if &url.as_str() != value {
|
||||
return Err(validator::ValidationError::new("invalid URL"));
|
||||
}
|
||||
|
||||
let domains = parse_strings_from_var("WHITELISTED_MODPACK_DOMAINS")
|
||||
.unwrap_or_default();
|
||||
if !domains.contains(
|
||||
&url.domain()
|
||||
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?
|
||||
.to_string(),
|
||||
) {
|
||||
return Err(validator::ValidationError::new(
|
||||
"File download source is not from allowed sources",
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[serde(rename_all = "camelCase", from = "String")]
|
||||
pub enum FileHash {
|
||||
Sha1,
|
||||
Sha512,
|
||||
Unknown(String),
|
||||
}
|
||||
|
||||
impl From<String> for FileHash {
|
||||
fn from(s: String) -> Self {
|
||||
return match s.as_str() {
|
||||
"sha1" => FileHash::Sha1,
|
||||
"sha512" => FileHash::Sha512,
|
||||
_ => FileHash::Unknown(s),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum EnvType {
|
||||
Client,
|
||||
Server,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Hash, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum PackDependency {
|
||||
Forge,
|
||||
FabricLoader,
|
||||
Minecraft,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PackDependency {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fmt.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl PackDependency {
|
||||
// These are constant, so this can remove unnecessary allocations (`to_string`)
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
PackDependency::Forge => "forge",
|
||||
PackDependency::FabricLoader => "fabric-loader",
|
||||
PackDependency::Minecraft => "minecraft",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PackValidator;
|
||||
|
||||
impl super::Validator for PackValidator {
|
||||
@@ -162,20 +55,20 @@ impl super::Validator for PackValidator {
|
||||
));
|
||||
}
|
||||
|
||||
for file in pack.files {
|
||||
if file.hashes.get(&FileHash::Sha1).is_none() {
|
||||
for file in &pack.files {
|
||||
if file.hashes.get(&PackFileHash::Sha1).is_none() {
|
||||
return Err(ValidationError::InvalidInput(
|
||||
"All pack files must provide a SHA1 hash!".into(),
|
||||
));
|
||||
}
|
||||
|
||||
if file.hashes.get(&FileHash::Sha512).is_none() {
|
||||
if file.hashes.get(&PackFileHash::Sha512).is_none() {
|
||||
return Err(ValidationError::InvalidInput(
|
||||
"All pack files must provide a SHA512 hash!".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let path = std::path::Path::new(file.path)
|
||||
let path = std::path::Path::new(&file.path)
|
||||
.components()
|
||||
.next()
|
||||
.ok_or_else(|| {
|
||||
@@ -194,6 +87,6 @@ impl super::Validator for PackValidator {
|
||||
};
|
||||
}
|
||||
|
||||
Ok(ValidationResult::Pass)
|
||||
Ok(ValidationResult::PassWithPackData(pack))
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user