Housekeeping + Fix DB perf issues (#542)

* Housekeeping + fix db perf issues

* run prep
This commit is contained in:
Geometrically
2023-02-22 16:11:14 -07:00
committed by GitHub
parent 9afdc55416
commit 00d09aa01e
21 changed files with 1251 additions and 674 deletions

1239
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -12,35 +12,35 @@ path = "src/main.rs"
[dependencies]
actix = "0.13.0"
actix-web = "4.2.1"
actix-rt = "2.7.0"
actix-multipart = "0.4.0"
actix-web = "4.3.0"
actix-rt = "2.8.0"
actix-multipart = "0.5.0"
actix-cors = "0.6.4"
tokio = { version = "1.21.2", features = ["sync"] }
tokio-stream = "0.1.10"
tokio = { version = "1.25.0", features = ["sync"] }
tokio-stream = "0.1.11"
futures = "0.3.24"
futures = "0.3.26"
futures-timer = "3.0.2"
async-trait = "0.1.57"
async-trait = "0.1.64"
dashmap = "5.4.0"
lazy_static = "1.4.0"
meilisearch-sdk = "0.15.0"
meilisearch-sdk = "0.22.0"
rust-s3 = "0.32.3"
reqwest = { version = "0.11.12", features = ["json", "multipart"] }
reqwest = { version = "0.11.14", features = ["json", "multipart"] }
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_with = "2.0.1"
chrono = { version = "0.4.22", features = ["serde"]}
serde_with = "2.2.0"
chrono = { version = "0.4.23", features = ["serde"]}
yaserde = "0.8.0"
yaserde_derive = "0.8.0"
xml-rs = "0.8.4"
rand = "0.8.5"
bytes = "1.2.1"
base64 = "0.20.0"
bytes = "1.4.0"
base64 = "0.21.0"
sha1 = { version = "0.6.1", features = ["std"] }
sha2 = "0.9.9"
hmac = "0.11.0"
@@ -50,25 +50,25 @@ hex = "0.4.3"
url = "2.3.1"
urlencoding = "2.1.2"
zip = "0.6.3"
zip = "0.6.4"
itertools = "0.10.5"
validator = { version = "0.16.0", features = ["derive", "phone"] }
regex = "1.6.0"
regex = "1.7.1"
censor = "0.3.0"
spdx = { version = "0.9.0", features = ["text"] }
spdx = { version = "0.10.0", features = ["text"] }
dotenvy = "0.15.6"
log = "0.4.17"
env_logger = "0.9.1"
thiserror = "1.0.37"
env_logger = "0.10.0"
thiserror = "1.0.38"
sqlx = { version = "0.6.2", features = ["runtime-actix-rustls", "postgres", "chrono", "offline", "macros", "migrate", "decimal", "json"] }
rust_decimal = { version = "1.26", features = ["serde-with-float", "serde-with-str"] }
rust_decimal = { version = "1.28.1", features = ["serde-with-float", "serde-with-str"] }
sentry = { version = "0.29.2", features = ["profiling"] }
sentry-actix = "0.29.2"
sentry = { version = "0.29.3", features = ["profiling"] }
sentry-actix = "0.29.3"
image = "0.24.5"
color-thief = "0.2.2"

View File

@@ -12,12 +12,12 @@ services:
POSTGRES_PASSWORD: labrinth
POSTGRES_HOST_AUTH_METHOD: trust
meilisearch:
image: getmeili/meilisearch:v0.25.0
image: getmeili/meilisearch:v1.0.1
restart: on-failure
ports:
- "7700:7700"
volumes:
- meilisearch-data:/data.ms
- meilisearch-data:/meili_data
environment:
MEILI_MASTER_KEY: modrinth
volumes:

View File

@@ -2868,6 +2868,38 @@
},
"query": "\n INSERT INTO historical_payouts (user_id, amount, status)\n VALUES ($1, $2, $3)\n "
},
"4838777a8ef4371f4f5bb4f4f038bb6d041455f0849a3972a5418d75165ae9c7": {
"describe": {
"columns": [
{
"name": "dependency_id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
},
{
"name": "mod_dependency_id",
"ordinal": 2,
"type_info": "Int8"
}
],
"nullable": [
true,
null,
true
],
"parameters": {
"Left": [
"Int8"
]
}
},
"query": "\n SELECT d.dependency_id, COALESCE(vd.mod_id, 0) mod_id, d.mod_dependency_id\n FROM versions v\n INNER JOIN dependencies d ON d.dependent_id = v.id\n LEFT JOIN versions vd ON d.dependency_id = vd.id\n WHERE v.mod_id = $1\n "
},
"49a5d21a1454afc6383b78e468fd0decc75b9163e7286f34ceab22d563a0d3f7": {
"describe": {
"columns": [],
@@ -2914,6 +2946,43 @@
},
"query": "\n UPDATE mods\n SET server_side = $1\n WHERE (id = $2)\n "
},
"4ad05a5f35600c5dadedfe93e91374ef20ba55c6a9ac6016a01422f2ae8dbb72": {
"describe": {
"columns": [
{
"name": "version_id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
},
{
"name": "date_published",
"ordinal": 2,
"type_info": "Timestamptz"
}
],
"nullable": [
false,
false,
false
],
"parameters": {
"Left": [
"Int8Array",
"VarcharArray",
"VarcharArray",
"Varchar",
"Int8",
"Int8"
]
}
},
"query": "\n SELECT DISTINCT ON(v.date_published, v.id) version_id, v.mod_id, v.date_published FROM versions v\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))\n WHERE v.mod_id = ANY($1) AND ($4::varchar IS NULL OR v.version_type = $4)\n ORDER BY v.date_published, v.id ASC\n LIMIT $5 OFFSET $6\n "
},
"4b14b5c69f6a0ee4e06e41d7cea425c7c34d6db45895275a2ce8adfa28dc8f72": {
"describe": {
"columns": [
@@ -3728,6 +3797,27 @@
},
"query": "\n UPDATE dependencies\n SET dependency_id = $2\n WHERE id = ANY($1::bigint[])\n "
},
"71abd207410d123f9a50345ddcddee335fea0d0cc6f28762713ee01a36aee8a0": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int8"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Int8Array",
"Int8"
]
}
},
"query": "\n SELECT m.id FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2\n WHERE m.id = ANY($1)\n "
},
"72ad6f4be40d7620a0ec557e3806da41ce95335aeaa910fe35aca2ec7c3f09b6": {
"describe": {
"columns": [
@@ -4642,6 +4732,35 @@
},
"query": "\n DELETE FROM loaders\n WHERE loader = $1\n "
},
"9284d7f22617e0a7daf91540ff31791d0921ec5d4eb4809846dc67567bec1a81": {
"describe": {
"columns": [
{
"name": "hash",
"ordinal": 0,
"type_info": "Bytea"
},
{
"name": "mod_id",
"ordinal": 1,
"type_info": "Int8"
}
],
"nullable": [
false,
false
],
"parameters": {
"Left": [
"TextArray",
"ByteaArray",
"Text",
"TextArray"
]
}
},
"query": "\n SELECT h.hash, v.mod_id FROM hashes h\n INNER JOIN files f ON h.file_id = f.id\n INNER JOIN versions v ON v.id = f.version_id AND v.status != ANY($1)\n INNER JOIN mods m on v.mod_id = m.id\n WHERE h.algorithm = $3 AND h.hash = ANY($2::bytea[]) AND m.status != ANY($4)\n "
},
"9348309884811e8b22f33786ae7c0f259f37f3c90e545f00761a641570107160": {
"describe": {
"columns": [
@@ -4752,6 +4871,33 @@
},
"query": "SELECT EXISTS(SELECT 1 FROM reports WHERE id=$1)"
},
"980e2ebd1b77baecff5b302b063d8f359ddbdb68452c4c8f2a53dc8d6a2127a4": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Int8"
},
{
"name": "team_id",
"ordinal": 1,
"type_info": "Int8"
}
],
"nullable": [
false,
false
],
"parameters": {
"Left": [
"Int8Array",
"Int8"
]
}
},
"query": "\n SELECT m.id id, m.team_id team_id FROM team_members tm\n INNER JOIN mods m ON m.team_id = tm.team_id\n WHERE tm.team_id = ANY($1) AND tm.user_id = $2\n "
},
"99a1eac69d7f5a5139703df431e6a5c3012a90143a8c635f93632f04d0bc41d4": {
"describe": {
"columns": [],

View File

@@ -117,7 +117,7 @@ pub struct TeamId(pub i64);
#[sqlx(transparent)]
pub struct TeamMemberId(pub i64);
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Deserialize)]
#[derive(Copy, Clone, Debug, Type, PartialEq, Eq, Deserialize, Hash)]
#[sqlx(transparent)]
pub struct ProjectId(pub i64);
#[derive(Copy, Clone, Debug, Type)]

View File

@@ -518,6 +518,56 @@ impl Version {
Ok(vec)
}
pub async fn get_projects_versions<'a, E>(
project_ids: Vec<ProjectId>,
game_versions: Option<Vec<String>>,
loaders: Option<Vec<String>>,
version_type: Option<VersionType>,
limit: Option<u32>,
offset: Option<u32>,
exec: E,
) -> Result<HashMap<ProjectId, Vec<VersionId>>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
use futures::stream::TryStreamExt;
let vec = sqlx::query!(
"
SELECT DISTINCT ON(v.date_published, v.id) version_id, v.mod_id, v.date_published FROM versions v
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
INNER JOIN loaders_versions lv ON lv.version_id = v.id
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
WHERE v.mod_id = ANY($1) AND ($4::varchar IS NULL OR v.version_type = $4)
ORDER BY v.date_published, v.id ASC
LIMIT $5 OFFSET $6
",
&project_ids.into_iter().map(|x| x.0).collect::<Vec<i64>>(),
&game_versions.unwrap_or_default(),
&loaders.unwrap_or_default(),
version_type.map(|x| x.as_str()),
limit.map(|x| x as i64),
offset.map(|x| x as i64),
)
.fetch_many(exec)
.try_filter_map(|e| async { Ok(e.right().map(|v| (ProjectId(v.mod_id), VersionId(v.version_id)))) })
.try_collect::<Vec<(ProjectId, VersionId)>>()
.await?;
let mut map: HashMap<ProjectId, Vec<VersionId>> = HashMap::new();
for (project_id, version_id) in vec {
if let Some(value) = map.get_mut(&project_id) {
value.push(version_id);
} else {
map.insert(project_id, vec![version_id]);
}
}
Ok(map)
}
pub async fn get<'a, 'b, E>(
id: VersionId,
executor: E,

View File

@@ -1,4 +1,5 @@
use crate::file_hosting::FileHostingError;
use base64::Engine;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -35,7 +36,10 @@ pub async fn authorize_account(
application_key: &str,
) -> Result<AuthorizationData, FileHostingError> {
let combined_key = format!("{key_id}:{application_key}");
let formatted_key = format!("Basic {}", base64::encode(combined_key));
let formatted_key = format!(
"Basic {}",
base64::engine::general_purpose::STANDARD.encode(combined_key)
);
let response = reqwest::Client::new()
.get("https://api.backblazeb2.com/b2api/v2/b2_authorize_account")

View File

@@ -1,4 +1,5 @@
use crate::routes::ApiError;
use base64::Engine;
use chrono::{DateTime, Duration, Utc};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
@@ -51,7 +52,10 @@ impl PayoutsQueue {
dotenvy::var("PAYPAL_CLIENT_ID")?,
dotenvy::var("PAYPAL_CLIENT_SECRET")?
);
let formatted_key = format!("Basic {}", base64::encode(combined_key));
let formatted_key = format!(
"Basic {}",
base64::engine::general_purpose::STANDARD.encode(combined_key)
);
let mut form = HashMap::new();
form.insert("grant_type", "client_credentials");

View File

@@ -108,11 +108,13 @@ pub async fn process_payout(
pool: web::Data<PgPool>,
data: web::Json<PayoutData>,
) -> Result<HttpResponse, ApiError> {
let start = data
.date
.date()
.and_hms_nano(0, 0, 0, 0)
.with_timezone(&Utc);
let start: DateTime<Utc> = DateTime::from_utc(
data.date
.date_naive()
.and_hms_nano_opt(0, 0, 0, 0)
.unwrap_or_default(),
Utc,
);
let client = reqwest::Client::new();
let mut transaction = pool.begin().await?;

View File

@@ -267,7 +267,8 @@ pub async fn handle_stripe_webhook(
if let Some(item) = invoice.lines.data.first() {
let expires: DateTime<Utc> = DateTime::from_utc(
NaiveDateTime::from_timestamp(item.period.end, 0),
NaiveDateTime::from_timestamp_opt(item.period.end, 0)
.unwrap_or_default(),
Utc,
) + Duration::days(1);

View File

@@ -9,15 +9,18 @@ use crate::models::projects::{
use crate::models::teams::Permissions;
use crate::routes::ApiError;
use crate::search::{search_for_project, SearchConfig, SearchError};
use crate::util::auth::{get_user_from_headers, is_authorized};
use crate::util::auth::{
filter_authorized_projects, get_user_from_headers, is_authorized,
};
use crate::util::routes::read_from_payload;
use crate::util::validate::validation_errors_to_string;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use futures::{StreamExt, TryStreamExt};
use futures::TryStreamExt;
use meilisearch_sdk::indexes::IndexesResults;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::{PgPool, Row};
use sqlx::PgPool;
use std::sync::Arc;
use validator::Validate;
@@ -91,16 +94,8 @@ pub async fn projects_get(
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
let projects: Vec<_> = futures::stream::iter(projects_data)
.filter_map(|data| async {
if is_authorized(&data.inner, &user_option, &pool).await.ok()? {
Some(Project::from(data))
} else {
None
}
})
.collect()
.await;
let projects =
filter_authorized_projects(projects_data, &user_option, &pool).await?;
Ok(HttpResponse::Ok().json(projects))
}
@@ -216,26 +211,25 @@ pub async fn dependency_list(
use futures::stream::TryStreamExt;
//TODO: This query is not checked at compile time! Once SQLX parses this query correctly, please use the query! macro instead
let dependencies = sqlx::query(
let dependencies = sqlx::query!(
"
SELECT d.dependency_id, vd.mod_id, d.mod_dependency_id
SELECT d.dependency_id, COALESCE(vd.mod_id, 0) mod_id, d.mod_dependency_id
FROM versions v
INNER JOIN dependencies d ON d.dependent_id = v.id
LEFT JOIN versions vd ON d.dependency_id = vd.id
WHERE v.mod_id = $1
",
id as database::models::ProjectId
)
.bind(id as database::models::ProjectId)
.fetch_many(&**pool)
.try_filter_map(|e| async {
Ok(e.right().map(|x| {
(
x.get::<Option<i64>, usize>(0)
x.dependency_id
.map(database::models::VersionId),
x.get::<Option<i64>, usize>(1)
.map(database::models::ProjectId),
x.get::<Option<i64>, usize>(2)
if x.mod_id == Some(0) { None } else { x.mod_id
.map(database::models::ProjectId) },
x.mod_dependency_id
.map(database::models::ProjectId),
)
}))
@@ -262,19 +256,20 @@ pub async fn dependency_list(
})
.collect::<Vec<_>>();
let (projects_result, versions_result) = futures::join!(
database::Project::get_many_full(&project_ids, &**pool,),
let (projects_result, versions_result) = futures::future::try_join(
database::Project::get_many_full(&project_ids, &**pool),
database::Version::get_many_full(
dependencies.iter().filter_map(|x| x.0).collect(),
&**pool,
)
);
),
)
.await?;
let mut projects = projects_result?
let mut projects = projects_result
.into_iter()
.map(models::projects::Project::from)
.collect::<Vec<_>>();
let mut versions = versions_result?
let mut versions = versions_result
.into_iter()
.map(models::projects::Version::from)
.collect::<Vec<_>>();
@@ -2372,9 +2367,9 @@ pub async fn delete_from_index(
let client =
meilisearch_sdk::client::Client::new(&*config.address, &*config.key);
let indexes: Vec<meilisearch_sdk::indexes::Index> =
client.get_indexes().await?;
for index in indexes {
let indexes: IndexesResults = client.get_indexes().await?;
for index in indexes.results {
index.delete_document(id.to_string()).await?;
}

View File

@@ -18,7 +18,8 @@ pub async fn get_stats(
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool);
.fetch_one(&**pool)
.await?;
let versions = sqlx::query!(
"
@@ -36,7 +37,8 @@ pub async fn get_stats(
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool);
.fetch_one(&**pool)
.await?;
let authors = sqlx::query!(
"
@@ -50,7 +52,8 @@ pub async fn get_stats(
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool);
.fetch_one(&**pool)
.await?;
let files = sqlx::query!(
"
@@ -67,10 +70,8 @@ pub async fn get_stats(
.map(|x| x.to_string())
.collect::<Vec<String>>(),
)
.fetch_one(&**pool);
let (projects, versions, authors, files) =
futures::future::try_join4(projects, versions, authors, files).await?;
.fetch_one(&**pool)
.await?;
let json = json!({
"projects": projects.count,

View File

@@ -5,11 +5,10 @@ use serde::Serialize;
use sqlx::PgPool;
use crate::database;
use crate::models::projects::{Version, VersionType};
use crate::models::projects::VersionType;
use crate::util::auth::{
get_user_from_headers, is_authorized, is_authorized_version,
filter_authorized_versions, get_user_from_headers, is_authorized,
};
use futures::StreamExt;
use super::ApiError;
@@ -48,22 +47,10 @@ pub async fn forge_updates(
let versions =
database::models::Version::get_many_full(version_ids, &**pool).await?;
let mut versions = futures::stream::iter(versions)
.filter_map(|data| async {
if is_authorized_version(&data.inner, &user_option, &pool)
.await
.ok()?
{
Some(data)
} else {
None
}
})
.collect::<Vec<_>>()
.await;
let mut versions =
filter_authorized_versions(versions, &user_option, &pool).await?;
versions
.sort_by(|a, b| b.inner.date_published.cmp(&a.inner.date_published));
versions.sort_by(|a, b| b.date_published.cmp(&a.date_published));
#[derive(Serialize)]
struct ForgeUpdates {
@@ -81,8 +68,6 @@ pub async fn forge_updates(
};
for version in versions {
let version = Version::from(version);
if version.version_type == VersionType::Release {
for game_version in &version.game_versions {
response

View File

@@ -7,11 +7,11 @@ use crate::util::auth::get_user_from_headers;
use crate::util::routes::ok_or_not_found;
use crate::{database, models};
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use futures::TryStreamExt;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::collections::HashMap;
use tokio::sync::RwLock;
#[derive(Deserialize)]
pub struct HashQuery {
@@ -460,7 +460,7 @@ pub async fn update_files(
let result = sqlx::query!(
"
SELECT f.url url, h.hash hash, h.algorithm algorithm, f.version_id version_id, v.mod_id project_id FROM hashes h
SELECT h.hash, v.mod_id FROM hashes h
INNER JOIN files f ON h.file_id = f.id
INNER JOIN versions v ON v.id = f.version_id AND v.status != ANY($1)
INNER JOIN mods m on v.mod_id = m.id
@@ -471,49 +471,52 @@ pub async fn update_files(
update_data.algorithm,
&*crate::models::projects::ProjectStatus::iterator().filter(|x| x.is_hidden()).map(|x| x.to_string()).collect::<Vec<String>>(),
)
.fetch_all(&mut *transaction)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async {
Ok(e.right().map(|m| (m.hash, database::models::ids::ProjectId(m.mod_id))))
})
.try_collect::<Vec<_>>()
.await?;
let version_ids: RwLock<HashMap<database::models::VersionId, Vec<u8>>> =
RwLock::new(HashMap::new());
let mut version_ids: HashMap<database::models::VersionId, Vec<u8>> =
HashMap::new();
futures::future::try_join_all(result.into_iter().map(|row| async {
let updated_versions = database::models::Version::get_project_versions(
database::models::ProjectId(row.project_id),
Some(
update_data
.game_versions
.clone()
.iter()
.map(|x| x.0.clone())
.collect(),
),
Some(
update_data
.loaders
.clone()
.iter()
.map(|x| x.0.clone())
.collect(),
),
None,
None,
None,
&**pool,
)
.await?;
if let Some(latest_version) = updated_versions.first() {
let mut version_ids = version_ids.write().await;
version_ids.insert(*latest_version, row.hash);
}
Ok::<(), ApiError>(())
}))
let updated_versions = database::models::Version::get_projects_versions(
result
.iter()
.map(|x| x.1)
.collect::<Vec<database::models::ProjectId>>()
.clone(),
Some(
update_data
.game_versions
.clone()
.iter()
.map(|x| x.0.clone())
.collect(),
),
Some(
update_data
.loaders
.clone()
.iter()
.map(|x| x.0.clone())
.collect(),
),
None,
None,
None,
&**pool,
)
.await?;
let version_ids = version_ids.into_inner();
for (hash, id) in result {
if let Some(latest_version) =
updated_versions.get(&id).and_then(|x| x.last())
{
version_ids.insert(*latest_version, hash);
}
}
let versions = database::models::Version::get_many_full(
version_ids.keys().copied().collect(),
@@ -533,8 +536,7 @@ pub async fn update_files(
models::projects::Version::from(version),
);
} else {
let version_id: models::projects::VersionId =
version.inner.id.into();
let version_id: VersionId = version.inner.id.into();
return Err(ApiError::Database(DatabaseError::Other(format!(
"Could not parse hash for version {version_id}"

View File

@@ -2,16 +2,16 @@ use super::ApiError;
use crate::database;
use crate::models;
use crate::models::projects::{
Dependency, FileType, Version, VersionStatus, VersionType,
Dependency, FileType, VersionStatus, VersionType,
};
use crate::models::teams::Permissions;
use crate::util::auth::{
get_user_from_headers, is_authorized, is_authorized_version,
filter_authorized_versions, get_user_from_headers, is_authorized,
is_authorized_version,
};
use crate::util::validate::validation_errors_to_string;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use validator::Validate;
@@ -70,23 +70,16 @@ pub async fn version_list(
database::models::Version::get_many_full(version_ids, &**pool)
.await?;
let mut response = futures::stream::iter(versions.clone())
.filter_map(|data| async {
if is_authorized_version(&data.inner, &user_option, &pool)
.await
.ok()?
&& filters
.featured
.map(|featured| featured == data.inner.featured)
.unwrap_or(true)
{
Some(Version::from(data))
} else {
None
}
let mut response = versions
.iter()
.filter(|version| {
filters
.featured
.map(|featured| featured == version.inner.featured)
.unwrap_or(true)
})
.collect::<Vec<_>>()
.await;
.cloned()
.collect::<Vec<_>>();
versions.sort_by(|a, b| {
b.inner.date_published.cmp(&a.inner.date_published)
@@ -97,16 +90,15 @@ pub async fn version_list(
&& !versions.is_empty()
&& filters.featured.unwrap_or(false)
{
let (loaders, game_versions) = futures::join!(
let (loaders, game_versions) = futures::future::try_join(
database::models::categories::Loader::list(&**pool),
database::models::categories::GameVersion::list_filter(
None,
Some(true),
&**pool
)
);
let (loaders, game_versions) = (loaders?, game_versions?);
&**pool,
),
)
.await?;
let mut joined_filters = Vec::new();
for game_version in &game_versions {
@@ -122,21 +114,24 @@ pub async fn version_list(
version.game_versions.contains(&filter.0.version)
&& version.loaders.contains(&filter.1.loader)
})
.map(|version| {
response.push(Version::from(version.clone()))
})
.map(|version| response.push(version.clone()))
.unwrap_or(());
});
if response.is_empty() {
versions
.into_iter()
.for_each(|version| response.push(Version::from(version)));
.for_each(|version| response.push(version));
}
}
response.sort_by(|a, b| b.date_published.cmp(&a.date_published));
response.dedup_by(|a, b| a.id == b.id);
response.sort_by(|a, b| {
b.inner.date_published.cmp(&a.inner.date_published)
});
response.dedup_by(|a, b| a.inner.id == b.inner.id);
let response =
filter_authorized_versions(response, &user_option, &pool).await?;
Ok(HttpResponse::Ok().json(response))
} else {
@@ -190,19 +185,8 @@ pub async fn versions_get(
let user_option = get_user_from_headers(req.headers(), &**pool).await.ok();
let versions: Vec<_> = futures::stream::iter(versions_data)
.filter_map(|data| async {
if is_authorized_version(&data.inner, &user_option, &pool)
.await
.ok()?
{
Some(Version::from(data))
} else {
None
}
})
.collect()
.await;
let versions =
filter_authorized_versions(versions_data, &user_option, &pool).await?;
Ok(HttpResponse::Ok().json(versions))
}

View File

@@ -4,7 +4,6 @@ use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use chrono::{DateTime, Utc};
use meilisearch_sdk::client::Client;
use meilisearch_sdk::document::Document;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::cmp::min;
@@ -137,22 +136,6 @@ pub struct ResultSearchProject {
pub color: Option<u32>,
}
impl Document for UploadSearchProject {
type UIDType = String;
fn get_uid(&self) -> &Self::UIDType {
&self.project_id
}
}
impl Document for ResultSearchProject {
type UIDType = String;
fn get_uid(&self) -> &Self::UIDType {
&self.project_id
}
}
pub async fn search_for_project(
info: &SearchRequest,
config: &SearchConfig,
@@ -240,8 +223,8 @@ pub async fn search_for_project(
Ok(SearchResults {
hits: results.hits.into_iter().map(|r| r.result).collect(),
offset: results.offset,
limit: results.limit,
total_hits: results.nb_hits,
offset: results.offset.unwrap_or_default(),
limit: results.limit.unwrap_or_default(),
total_hits: results.estimated_total_hits.unwrap_or_default(),
})
}

View File

@@ -1,4 +1,6 @@
use crate::database;
use crate::database::models::project_item::QueryProject;
use crate::database::models::version_item::QueryVersion;
use crate::database::{models, Project, Version};
use crate::models::users::{Role, User, UserId, UserPayoutData};
use crate::routes::ApiError;
@@ -161,6 +163,69 @@ pub async fn is_authorized(
Ok(authorized)
}
pub async fn filter_authorized_projects(
projects: Vec<QueryProject>,
user_option: &Option<User>,
pool: &web::Data<PgPool>,
) -> Result<Vec<crate::models::projects::Project>, ApiError> {
let mut return_projects = Vec::new();
let mut check_projects = Vec::new();
for project in projects {
if !project.inner.status.is_hidden()
|| user_option
.as_ref()
.map(|x| x.role.is_mod())
.unwrap_or(false)
{
return_projects.push(project.into());
} else if user_option.is_some() {
check_projects.push(project);
}
}
if !check_projects.is_empty() {
if let Some(user) = user_option {
let user_id: models::ids::UserId = user.id.into();
use futures::TryStreamExt;
sqlx::query!(
"
SELECT m.id id, m.team_id team_id FROM team_members tm
INNER JOIN mods m ON m.team_id = tm.team_id
WHERE tm.team_id = ANY($1) AND tm.user_id = $2
",
&check_projects
.iter()
.map(|x| x.inner.team_id.0)
.collect::<Vec<_>>(),
user_id as database::models::ids::UserId,
)
.fetch_many(&***pool)
.try_for_each(|e| {
if let Some(row) = e.right() {
check_projects.retain(|x| {
let bool = x.inner.id.0 == row.id
&& x.inner.team_id.0 == row.team_id;
if bool {
return_projects.push(x.clone().into());
}
!bool
});
}
futures::future::ready(Ok(()))
})
.await?;
}
}
Ok(return_projects)
}
pub async fn is_authorized_version(
version_data: &Version,
user_option: &Option<User>,
@@ -191,3 +256,61 @@ pub async fn is_authorized_version(
Ok(authorized)
}
pub async fn filter_authorized_versions(
versions: Vec<QueryVersion>,
user_option: &Option<User>,
pool: &web::Data<PgPool>,
) -> Result<Vec<crate::models::projects::Version>, ApiError> {
let mut return_versions = Vec::new();
let mut check_versions = Vec::new();
for version in versions {
if !version.inner.status.is_hidden()
|| user_option
.as_ref()
.map(|x| x.role.is_mod())
.unwrap_or(false)
{
return_versions.push(version.into());
} else if user_option.is_some() {
check_versions.push(version);
}
}
if !check_versions.is_empty() {
if let Some(user) = user_option {
let user_id: models::ids::UserId = user.id.into();
use futures::TryStreamExt;
sqlx::query!(
"
SELECT m.id FROM mods m
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2
WHERE m.id = ANY($1)
",
&check_versions.iter().map(|x| x.inner.project_id.0).collect::<Vec<_>>(),
user_id as database::models::ids::UserId,
)
.fetch_many(&***pool)
.try_for_each(|e| {
if let Some(row) = e.right() {
check_versions.retain(|x| {
let bool = x.inner.project_id.0 == row.id;
if bool {
return_versions.push(x.clone().into());
}
!bool
});
}
futures::future::ready(Ok(()))
}).await?;
}
}
Ok(return_versions)
}

View File

@@ -23,7 +23,7 @@ impl super::Validator for FabricValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 18w49a, the first fabric version
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1543969469, 0),
NaiveDateTime::from_timestamp_opt(1543969469, 0).unwrap(),
Utc,
))
}

View File

@@ -23,7 +23,7 @@ impl super::Validator for ForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 1.13, the first forge version which uses the new TOML system
SupportedGameVersions::PastDate(DateTime::<Utc>::from_utc(
NaiveDateTime::from_timestamp(1540122067, 0),
NaiveDateTime::from_timestamp_opt(1540122067, 0).unwrap(),
Utc,
))
}
@@ -69,11 +69,11 @@ impl super::Validator for LegacyForgeValidator {
// Times between versions 1.5.2 to 1.12.2, which all use the legacy way of defining mods
SupportedGameVersions::Range(
DateTime::from_utc(
NaiveDateTime::from_timestamp(1366818300, 0),
NaiveDateTime::from_timestamp_opt(1366818300, 0).unwrap(),
Utc,
),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1505810340, 0),
NaiveDateTime::from_timestamp_opt(1505810340, 0).unwrap(),
Utc,
),
)

View File

@@ -22,7 +22,7 @@ impl super::Validator for QuiltValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1646070100, 0),
NaiveDateTime::from_timestamp_opt(1646070100, 0).unwrap(),
Utc,
))
}

View File

@@ -23,7 +23,7 @@ impl super::Validator for PackValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 13w24a which replaced texture packs with resource packs
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1371137542, 0),
NaiveDateTime::from_timestamp_opt(1371137542, 0).unwrap(),
Utc,
))
}
@@ -61,11 +61,11 @@ impl super::Validator for TexturePackValidator {
// a1.2.2a to 13w23b
SupportedGameVersions::Range(
DateTime::from_utc(
NaiveDateTime::from_timestamp(1289339999, 0),
NaiveDateTime::from_timestamp_opt(1289339999, 0).unwrap(),
Utc,
),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1370651522, 0),
NaiveDateTime::from_timestamp_opt(1370651522, 0).unwrap(),
Utc,
),
)