You've already forked AstralRinth
forked from didirus/AstralRinth
Search overhaul (#771)
* started work; switching context * working! * fmt clippy prepare * fixes * fixes * revs * merge fixes * changed comments * merge issues
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::search::ResultSearchProject;
|
||||
use crate::{routes::v2_reroute, search::ResultSearchProject};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct LegacySearchResults {
|
||||
@@ -44,7 +45,12 @@ impl LegacyResultSearchProject {
|
||||
if categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project.loader_fields.get("mrpack_loaders")
|
||||
{
|
||||
categories.extend(mrpack_loaders.clone());
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
.iter()
|
||||
.filter_map(|c| c.as_str())
|
||||
.map(String::from),
|
||||
);
|
||||
categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
}
|
||||
@@ -52,7 +58,12 @@ impl LegacyResultSearchProject {
|
||||
if display_categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project.loader_fields.get("mrpack_loaders")
|
||||
{
|
||||
display_categories.extend(mrpack_loaders.clone());
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
.iter()
|
||||
.filter_map(|c| c.as_str())
|
||||
.map(String::from),
|
||||
);
|
||||
display_categories.retain(|c| c != "mrpack");
|
||||
}
|
||||
}
|
||||
@@ -84,25 +95,44 @@ impl LegacyResultSearchProject {
|
||||
project_type
|
||||
};
|
||||
|
||||
let loader_fields = result_search_project.loader_fields.clone();
|
||||
let get_one_bool_loader_field = |key: &str| {
|
||||
loader_fields
|
||||
.get(key)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.first()
|
||||
.and_then(|s| s.as_bool())
|
||||
};
|
||||
|
||||
let singleplayer = get_one_bool_loader_field("singleplayer");
|
||||
let client_only = get_one_bool_loader_field("client_only").unwrap_or(false);
|
||||
let server_only = get_one_bool_loader_field("server_only").unwrap_or(false);
|
||||
let client_and_server = get_one_bool_loader_field("client_and_server");
|
||||
|
||||
let (client_side, server_side) = v2_reroute::convert_side_types_v2_bools(
|
||||
singleplayer,
|
||||
client_only,
|
||||
server_only,
|
||||
client_and_server,
|
||||
);
|
||||
let client_side = client_side.to_string();
|
||||
let server_side = server_side.to_string();
|
||||
|
||||
let versions = result_search_project
|
||||
.loader_fields
|
||||
.get("game_versions")
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter_map(|s| s.as_str().map(String::from))
|
||||
.collect_vec();
|
||||
|
||||
Self {
|
||||
project_type,
|
||||
client_side: result_search_project
|
||||
.loader_fields
|
||||
.get("client_side")
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.join(","),
|
||||
server_side: result_search_project
|
||||
.loader_fields
|
||||
.get("server_side")
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.join(","),
|
||||
versions: result_search_project
|
||||
.loader_fields
|
||||
.get("game_versions")
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
client_side,
|
||||
server_side,
|
||||
versions,
|
||||
latest_version: result_search_project.version_id,
|
||||
categories,
|
||||
|
||||
@@ -110,11 +140,11 @@ impl LegacyResultSearchProject {
|
||||
slug: result_search_project.slug,
|
||||
author: result_search_project.author,
|
||||
title: result_search_project.name,
|
||||
description: result_search_project.description,
|
||||
description: result_search_project.summary,
|
||||
display_categories,
|
||||
downloads: result_search_project.downloads,
|
||||
follows: result_search_project.follows,
|
||||
icon_url: result_search_project.icon_url,
|
||||
icon_url: result_search_project.icon_url.unwrap_or_default(),
|
||||
license: result_search_project.license,
|
||||
date_created: result_search_project.date_created,
|
||||
date_modified: result_search_project.date_modified,
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use super::ids::base62_impl::parse_base62;
|
||||
use super::ids::{Base62Id, OrganizationId};
|
||||
use super::teams::TeamId;
|
||||
use super::users::UserId;
|
||||
use crate::database::models::loader_fields::VersionField;
|
||||
use crate::database::models::project_item::{LinkUrl, QueryProject};
|
||||
use crate::database::models::version_item::QueryVersion;
|
||||
use crate::models::threads::ThreadId;
|
||||
use crate::search::ResultSearchProject;
|
||||
use chrono::{DateTime, Utc};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use validator::Validate;
|
||||
|
||||
@@ -119,30 +123,38 @@ fn remove_duplicates(values: Vec<serde_json::Value>) -> Vec<serde_json::Value> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
// This is a helper function to convert a list of VersionFields into a HashMap of field name to vecs of values
|
||||
// This allows for removal of duplicates
|
||||
pub fn from_duplicate_version_fields(
|
||||
version_fields: Vec<VersionField>,
|
||||
) -> HashMap<String, Vec<serde_json::Value>> {
|
||||
let mut fields: HashMap<String, Vec<serde_json::Value>> = HashMap::new();
|
||||
for vf in version_fields {
|
||||
// We use a string directly, so we can remove duplicates
|
||||
let serialized = if let Some(inner_array) = vf.value.serialize_internal().as_array() {
|
||||
inner_array.clone()
|
||||
} else {
|
||||
vec![vf.value.serialize_internal()]
|
||||
};
|
||||
|
||||
// Create array if doesnt exist, otherwise push, or if json is an array, extend
|
||||
if let Some(arr) = fields.get_mut(&vf.field_name) {
|
||||
arr.extend(serialized);
|
||||
} else {
|
||||
fields.insert(vf.field_name, serialized);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove duplicates by converting to string and back
|
||||
for (_, v) in fields.iter_mut() {
|
||||
*v = remove_duplicates(v.clone());
|
||||
}
|
||||
fields
|
||||
}
|
||||
|
||||
impl From<QueryProject> for Project {
|
||||
fn from(data: QueryProject) -> Self {
|
||||
let mut fields: HashMap<String, Vec<serde_json::Value>> = HashMap::new();
|
||||
for vf in data.aggregate_version_fields {
|
||||
// We use a string directly, so we can remove duplicates
|
||||
let serialized = if let Some(inner_array) = vf.value.serialize_internal().as_array() {
|
||||
inner_array.clone()
|
||||
} else {
|
||||
vec![vf.value.serialize_internal()]
|
||||
};
|
||||
|
||||
// Create array if doesnt exist, otherwise push, or if json is an array, extend
|
||||
if let Some(arr) = fields.get_mut(&vf.field_name) {
|
||||
arr.extend(serialized);
|
||||
} else {
|
||||
fields.insert(vf.field_name, serialized);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove duplicates by converting to string and back
|
||||
for (_, v) in fields.iter_mut() {
|
||||
*v = remove_duplicates(v.clone());
|
||||
}
|
||||
|
||||
let fields = from_duplicate_version_fields(data.aggregate_version_fields);
|
||||
let m = data.inner;
|
||||
Self {
|
||||
id: m.id.into(),
|
||||
@@ -221,6 +233,155 @@ impl From<QueryProject> for Project {
|
||||
}
|
||||
}
|
||||
|
||||
impl Project {
|
||||
// Matches the from QueryProject, but with a ResultSearchProject
|
||||
pub fn from_search(m: ResultSearchProject) -> Option<Self> {
|
||||
let project_id = ProjectId(parse_base62(&m.project_id).ok()?);
|
||||
let team_id = TeamId(parse_base62(&m.team_id).ok()?);
|
||||
let organization_id = m
|
||||
.organization_id
|
||||
.and_then(|id| Some(OrganizationId(parse_base62(&id).ok()?)));
|
||||
let thread_id = ThreadId(parse_base62(&m.thread_id).ok()?);
|
||||
let versions = m
|
||||
.versions
|
||||
.iter()
|
||||
.filter_map(|id| Some(VersionId(parse_base62(id).ok()?)))
|
||||
.collect();
|
||||
|
||||
let approved = DateTime::parse_from_rfc3339(&m.date_created).ok()?;
|
||||
let published = DateTime::parse_from_rfc3339(&m.date_published).ok()?.into();
|
||||
let approved = if approved == published {
|
||||
None
|
||||
} else {
|
||||
Some(approved.into())
|
||||
};
|
||||
|
||||
let updated = DateTime::parse_from_rfc3339(&m.date_modified).ok()?.into();
|
||||
let queued = m
|
||||
.date_queued
|
||||
.and_then(|dq| DateTime::parse_from_rfc3339(&dq).ok())
|
||||
.map(|d| d.into());
|
||||
|
||||
let status = ProjectStatus::from_string(&m.status);
|
||||
let requested_status = m
|
||||
.requested_status
|
||||
.map(|mrs| ProjectStatus::from_string(&mrs));
|
||||
|
||||
let license_url = m.license_url;
|
||||
let icon_url = m.icon_url;
|
||||
|
||||
// Loaders
|
||||
let mut loaders = m.loaders;
|
||||
let mrpack_loaders_strings = m.loader_fields.get("mrpack_loaders").cloned().map(|v| {
|
||||
v.into_iter()
|
||||
.filter_map(|v| v.as_str().map(String::from))
|
||||
.collect_vec()
|
||||
});
|
||||
// If the project has a mrpack loader, keep only 'loaders' that are not in the mrpack_loaders
|
||||
if let Some(ref mrpack_loaders) = mrpack_loaders_strings {
|
||||
loaders.retain(|l| !mrpack_loaders.contains(l));
|
||||
}
|
||||
|
||||
// Categories
|
||||
let mut categories = m.display_categories.clone();
|
||||
categories.retain(|c| !loaders.contains(c));
|
||||
if let Some(ref mrpack_loaders) = mrpack_loaders_strings {
|
||||
categories.retain(|l| !mrpack_loaders.contains(l));
|
||||
}
|
||||
|
||||
// Additional categories
|
||||
let mut additional_categories = m.categories.clone();
|
||||
additional_categories.retain(|c| !categories.contains(c));
|
||||
additional_categories.retain(|c| !loaders.contains(c));
|
||||
if let Some(ref mrpack_loaders) = mrpack_loaders_strings {
|
||||
additional_categories.retain(|l| !mrpack_loaders.contains(l));
|
||||
}
|
||||
|
||||
let games = m.games;
|
||||
|
||||
let monetization_status = m
|
||||
.monetization_status
|
||||
.as_deref()
|
||||
.map(MonetizationStatus::from_string)
|
||||
.unwrap_or(MonetizationStatus::Monetized);
|
||||
|
||||
let link_urls = m
|
||||
.links
|
||||
.into_iter()
|
||||
.map(|d| (d.platform_name.clone(), Link::from(d)))
|
||||
.collect();
|
||||
|
||||
let gallery = m
|
||||
.gallery_items
|
||||
.into_iter()
|
||||
.map(|x| GalleryItem {
|
||||
url: x.image_url,
|
||||
featured: x.featured,
|
||||
name: x.name,
|
||||
description: x.description,
|
||||
created: x.created,
|
||||
ordering: x.ordering,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Some(Self {
|
||||
id: project_id,
|
||||
slug: m.slug,
|
||||
project_types: m.project_types,
|
||||
games,
|
||||
team_id,
|
||||
organization: organization_id,
|
||||
name: m.name,
|
||||
summary: m.summary,
|
||||
description: "".to_string(), // Body is potentially huge, do not store in search
|
||||
published,
|
||||
updated,
|
||||
approved,
|
||||
queued,
|
||||
status,
|
||||
requested_status,
|
||||
moderator_message: None, // Deprecated
|
||||
license: License {
|
||||
id: m.license.clone(),
|
||||
name: match spdx::Expression::parse(&m.license) {
|
||||
Ok(spdx_expr) => {
|
||||
let mut vec: Vec<&str> = Vec::new();
|
||||
for node in spdx_expr.iter() {
|
||||
if let spdx::expression::ExprNode::Req(req) = node {
|
||||
if let Some(id) = req.req.license.id() {
|
||||
vec.push(id.full_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
// spdx crate returns AND/OR operations in postfix order
|
||||
// and it would be a lot more effort to make it actually in order
|
||||
// so let's just ignore that and make them comma-separated
|
||||
vec.join(", ")
|
||||
}
|
||||
Err(_) => "".to_string(),
|
||||
},
|
||||
url: license_url,
|
||||
},
|
||||
downloads: m.downloads as u32,
|
||||
followers: m.follows as u32,
|
||||
categories,
|
||||
additional_categories,
|
||||
loaders,
|
||||
versions,
|
||||
icon_url,
|
||||
link_urls,
|
||||
gallery,
|
||||
color: m.color,
|
||||
thread_id,
|
||||
monetization_status,
|
||||
fields: m
|
||||
.loader_fields
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, v.into_iter().collect()))
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct GalleryItem {
|
||||
pub url: String,
|
||||
|
||||
Reference in New Issue
Block a user