You've already forked AstralRinth
forked from didirus/AstralRinth
Fixes incorrect loader fields (#849)
* loader_fields fix * tested, fixed * added direct file check for invalid file_parts * search fixes * removed printlns * Adds check for loaders * removes println
This commit is contained in:
@@ -836,13 +836,19 @@ impl VersionField {
|
||||
}
|
||||
|
||||
pub fn from_query_json(
|
||||
// A list of all version fields to extract data from
|
||||
query_version_field_combined: Vec<QueryVersionField>,
|
||||
query_loader_fields: &[QueryLoaderField],
|
||||
// A list of all loader fields to reference when extracting data
|
||||
// Note: any loader field in here that is not in query_version_field_combined will be still considered
|
||||
// (For example, game_versions in query_loader_fields but not in query_version_field_combined would produce game_versions: [])
|
||||
query_loader_fields: &[&QueryLoaderField],
|
||||
// enum values to reference when parsing enum values
|
||||
query_loader_field_enum_values: &[QueryLoaderFieldEnumValue],
|
||||
allow_many: bool, // If true, will allow multiple values for a single singleton field, returning them as separate VersionFields
|
||||
// allow_many = true, multiple Bools => two VersionFields of Bool
|
||||
// allow_many = false, multiple Bools => error
|
||||
// multiple Arraybools => 1 VersionField of ArrayBool
|
||||
// If true, will allow multiple values for a single singleton field, returning them as separate VersionFields
|
||||
// allow_many = true, multiple Bools => two VersionFields of Bool
|
||||
// allow_many = false, multiple Bools => error
|
||||
// multiple Arraybools => 1 VersionField of ArrayBool
|
||||
allow_many: bool,
|
||||
) -> Vec<VersionField> {
|
||||
query_loader_fields
|
||||
.iter()
|
||||
|
||||
@@ -607,7 +607,6 @@ impl Project {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<ProjectId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
"
|
||||
@@ -630,7 +629,6 @@ impl Project {
|
||||
string_value: m.string_value,
|
||||
};
|
||||
|
||||
loader_field_ids.insert(LoaderFieldId(m.field_id));
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
@@ -641,27 +639,6 @@ impl Project {
|
||||
)
|
||||
.await?;
|
||||
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
FROM loader_fields lf
|
||||
WHERE id = ANY($1)
|
||||
",
|
||||
&loader_field_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, enum_id, value, ordering, created, metadata
|
||||
@@ -735,13 +712,22 @@ impl Project {
|
||||
}
|
||||
).await?;
|
||||
|
||||
type StringTriple = (Vec<String>, Vec<String>, Vec<String>);
|
||||
let loaders_ptypes_games: DashMap<ProjectId, StringTriple> = sqlx::query!(
|
||||
#[derive(Default)]
|
||||
struct VersionLoaderData {
|
||||
loaders: Vec<String>,
|
||||
project_types: Vec<String>,
|
||||
games: Vec<String>,
|
||||
loader_loader_field_ids: Vec<LoaderFieldId>,
|
||||
}
|
||||
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loaders_ptypes_games: DashMap<ProjectId, VersionLoaderData> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,
|
||||
ARRAY_AGG(DISTINCT lfl.loader_field_id) filter (where lfl.loader_field_id is not null) loader_fields
|
||||
FROM versions v
|
||||
INNER JOIN loaders_versions lv ON v.id = lv.version_id
|
||||
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||
@@ -749,6 +735,7 @@ impl Project {
|
||||
INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||
INNER JOIN games g ON lptg.game_id = g.id
|
||||
LEFT JOIN loader_fields_loaders lfl ON lfl.loader_id = l.id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY mod_id
|
||||
",
|
||||
@@ -756,15 +743,47 @@ impl Project {
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let project_id = ProjectId(m.mod_id);
|
||||
let loaders = m.loaders.unwrap_or_default();
|
||||
let project_types = m.project_types.unwrap_or_default();
|
||||
let games = m.games.unwrap_or_default();
|
||||
|
||||
(project_id, (loaders, project_types, games))
|
||||
// Add loader fields to the set we need to fetch
|
||||
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
|
||||
for loader_field_id in loader_loader_field_ids.iter() {
|
||||
loader_field_ids.insert(*loader_field_id);
|
||||
}
|
||||
|
||||
// Add loader + loader associated data to the map
|
||||
let version_loader_data = VersionLoaderData {
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
loader_loader_field_ids,
|
||||
};
|
||||
|
||||
(project_id, version_loader_data)
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
FROM loader_fields lf
|
||||
WHERE id = ANY($1)
|
||||
",
|
||||
&loader_field_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let db_projects: Vec<QueryProject> = sqlx::query!(
|
||||
"
|
||||
SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows,
|
||||
@@ -791,11 +810,21 @@ impl Project {
|
||||
Ok(e.right().map(|m| {
|
||||
let id = m.id;
|
||||
let project_id = ProjectId(id);
|
||||
let (loaders, project_types, games) = loaders_ptypes_games.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let VersionLoaderData {
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
loader_loader_field_ids,
|
||||
} = loaders_ptypes_games.remove(&project_id).map(|x|x.1).unwrap_or_default();
|
||||
let mut versions = versions.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let mut gallery = mods_gallery.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let urls = links.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
|
||||
let loader_fields = loader_fields.iter()
|
||||
.filter(|x| loader_loader_field_ids.contains(&x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
QueryProject {
|
||||
inner: Project {
|
||||
id: ProjectId(id),
|
||||
|
||||
@@ -510,7 +510,6 @@ impl Version {
|
||||
}
|
||||
|
||||
if !version_ids_parsed.is_empty() {
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
"
|
||||
@@ -532,7 +531,6 @@ impl Version {
|
||||
string_value: m.string_value,
|
||||
};
|
||||
|
||||
loader_field_ids.insert(LoaderFieldId(m.field_id));
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
@@ -543,6 +541,57 @@ impl Version {
|
||||
)
|
||||
.await?;
|
||||
|
||||
#[derive(Default)]
|
||||
struct VersionLoaderData {
|
||||
loaders: Vec<String>,
|
||||
project_types: Vec<String>,
|
||||
games: Vec<String>,
|
||||
loader_loader_field_ids: Vec<LoaderFieldId>,
|
||||
}
|
||||
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loaders_ptypes_games: DashMap<VersionId, VersionLoaderData> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT version_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,
|
||||
ARRAY_AGG(DISTINCT lfl.loader_field_id) filter (where lfl.loader_field_id is not null) loader_fields
|
||||
FROM versions v
|
||||
INNER JOIN loaders_versions lv ON v.id = lv.version_id
|
||||
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||
INNER JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||
INNER JOIN games g ON lptg.game_id = g.id
|
||||
LEFT JOIN loader_fields_loaders lfl ON lfl.loader_id = l.id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY version_id
|
||||
",
|
||||
&version_ids_parsed
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let version_id = VersionId(m.version_id);
|
||||
|
||||
// Add loader fields to the set we need to fetch
|
||||
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
|
||||
for loader_field_id in loader_loader_field_ids.iter() {
|
||||
loader_field_ids.insert(*loader_field_id);
|
||||
}
|
||||
|
||||
// Add loader + loader associated data to the map
|
||||
let version_loader_data = VersionLoaderData {
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
loader_loader_field_ids,
|
||||
};
|
||||
(version_id,version_loader_data)
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
// Fetch all loader fields from any version
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
@@ -588,36 +637,6 @@ impl Version {
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
type StringTriple = (Vec<String>, Vec<String>, Vec<String>);
|
||||
let loaders_ptypes_games: DashMap<VersionId, StringTriple> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT version_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games
|
||||
FROM versions v
|
||||
INNER JOIN loaders_versions lv ON v.id = lv.version_id
|
||||
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||
INNER JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||
INNER JOIN games g ON lptg.game_id = g.id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY version_id
|
||||
",
|
||||
&version_ids_parsed
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let version_id = VersionId(m.version_id);
|
||||
let loaders = m.loaders.unwrap_or_default();
|
||||
let project_types = m.project_types.unwrap_or_default();
|
||||
let games = m.games.unwrap_or_default();
|
||||
|
||||
(version_id, (loaders, project_types, games))
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
@@ -729,12 +748,21 @@ impl Version {
|
||||
Ok(e.right().map(|v|
|
||||
{
|
||||
let version_id = VersionId(v.id);
|
||||
let (loaders, project_types, games) = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let VersionLoaderData {
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
loader_loader_field_ids,
|
||||
} = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let files = files.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let hashes = hashes.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let dependencies = dependencies.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
|
||||
let loader_fields = loader_fields.iter()
|
||||
.filter(|x| loader_loader_field_ids.contains(&x.id))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
QueryVersion {
|
||||
inner: Version {
|
||||
id: VersionId(v.id),
|
||||
|
||||
@@ -42,8 +42,11 @@ pub struct LegacyResultSearchProject {
|
||||
impl LegacyResultSearchProject {
|
||||
pub fn from(result_search_project: ResultSearchProject) -> Self {
|
||||
let mut categories = result_search_project.categories;
|
||||
categories.extend(result_search_project.loaders);
|
||||
if categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project.loader_fields.get("mrpack_loaders")
|
||||
if let Some(mrpack_loaders) = result_search_project
|
||||
.project_loader_fields
|
||||
.get("mrpack_loaders")
|
||||
{
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
@@ -56,7 +59,9 @@ impl LegacyResultSearchProject {
|
||||
}
|
||||
let mut display_categories = result_search_project.display_categories;
|
||||
if display_categories.contains(&"mrpack".to_string()) {
|
||||
if let Some(mrpack_loaders) = result_search_project.loader_fields.get("mrpack_loaders")
|
||||
if let Some(mrpack_loaders) = result_search_project
|
||||
.project_loader_fields
|
||||
.get("mrpack_loaders")
|
||||
{
|
||||
categories.extend(
|
||||
mrpack_loaders
|
||||
@@ -93,9 +98,9 @@ impl LegacyResultSearchProject {
|
||||
og_project_type.clone()
|
||||
};
|
||||
|
||||
let loader_fields = result_search_project.loader_fields.clone();
|
||||
let project_loader_fields = result_search_project.project_loader_fields.clone();
|
||||
let get_one_bool_loader_field = |key: &str| {
|
||||
loader_fields
|
||||
project_loader_fields
|
||||
.get(key)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
@@ -119,7 +124,7 @@ impl LegacyResultSearchProject {
|
||||
let server_side = server_side.to_string();
|
||||
|
||||
let versions = result_search_project
|
||||
.loader_fields
|
||||
.project_loader_fields
|
||||
.get("game_versions")
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
|
||||
@@ -272,11 +272,16 @@ impl Project {
|
||||
|
||||
// Loaders
|
||||
let mut loaders = m.loaders;
|
||||
let mrpack_loaders_strings = m.loader_fields.get("mrpack_loaders").cloned().map(|v| {
|
||||
v.into_iter()
|
||||
.filter_map(|v| v.as_str().map(String::from))
|
||||
.collect_vec()
|
||||
});
|
||||
let mrpack_loaders_strings =
|
||||
m.project_loader_fields
|
||||
.get("mrpack_loaders")
|
||||
.cloned()
|
||||
.map(|v| {
|
||||
v.into_iter()
|
||||
.filter_map(|v| v.as_str().map(String::from))
|
||||
.collect_vec()
|
||||
});
|
||||
|
||||
// If the project has a mrpack loader, keep only 'loaders' that are not in the mrpack_loaders
|
||||
if let Some(ref mrpack_loaders) = mrpack_loaders_strings {
|
||||
loaders.retain(|l| !mrpack_loaders.contains(l));
|
||||
@@ -375,7 +380,7 @@ impl Project {
|
||||
thread_id,
|
||||
monetization_status,
|
||||
fields: m
|
||||
.loader_fields
|
||||
.project_loader_fields
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, v.into_iter().collect()))
|
||||
.collect(),
|
||||
|
||||
@@ -146,7 +146,7 @@ pub async fn project_create(
|
||||
let payload = v2_reroute::alter_actix_multipart(
|
||||
payload,
|
||||
req.headers().clone(),
|
||||
|legacy_create: ProjectCreateData| async move {
|
||||
|legacy_create: ProjectCreateData, _| async move {
|
||||
// Side types will be applied to each version
|
||||
let client_side = legacy_create.client_side;
|
||||
let server_side = legacy_create.server_side;
|
||||
|
||||
@@ -9,8 +9,10 @@ use crate::models::projects::{
|
||||
use crate::models::v2::projects::LegacyVersion;
|
||||
use crate::queue::session::AuthQueue;
|
||||
use crate::routes::v3::project_creation::CreateError;
|
||||
use crate::routes::v3::version_creation;
|
||||
use crate::routes::{v2_reroute, v3};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::http::header::ContentDisposition;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{post, web, HttpRequest, HttpResponse};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -89,7 +91,7 @@ pub async fn version_create(
|
||||
let payload = v2_reroute::alter_actix_multipart(
|
||||
payload,
|
||||
req.headers().clone(),
|
||||
|legacy_create: InitialVersionData| {
|
||||
|legacy_create: InitialVersionData, content_dispositions: Vec<ContentDisposition>| {
|
||||
let client = client.clone();
|
||||
let redis = redis.clone();
|
||||
async move {
|
||||
@@ -176,6 +178,19 @@ pub async fn version_create(
|
||||
}
|
||||
}
|
||||
|
||||
// Similarly, check actual content disposition for mrpacks, in case file_parts is wrong
|
||||
for content_disposition in content_dispositions {
|
||||
// Uses version_create functions to get the file name and extension
|
||||
let (_, file_extension) = version_creation::get_name_ext(&content_disposition)?;
|
||||
crate::util::ext::project_file_type(file_extension)
|
||||
.ok_or_else(|| CreateError::InvalidFileType(file_extension.to_string()))?;
|
||||
|
||||
if file_extension == "mrpack" {
|
||||
project_type = Some("modpack");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Modpacks now use the "mrpack" loader, and loaders are converted to loader fields.
|
||||
// Setting of 'project_type' directly is removed, it's loader-based now.
|
||||
if project_type == Some("modpack") {
|
||||
|
||||
@@ -5,7 +5,7 @@ use super::ApiError;
|
||||
use crate::models::v2::projects::LegacySideType;
|
||||
use crate::util::actix::{generate_multipart, MultipartSegment, MultipartSegmentData};
|
||||
use actix_multipart::Multipart;
|
||||
use actix_web::http::header::{HeaderMap, TryIntoHeaderPair};
|
||||
use actix_web::http::header::{ContentDisposition, HeaderMap, TryIntoHeaderPair};
|
||||
use actix_web::HttpResponse;
|
||||
use futures::{stream, Future, StreamExt};
|
||||
use serde_json::{json, Value};
|
||||
@@ -43,10 +43,15 @@ pub fn flatten_404_error(res: ApiError) -> Result<HttpResponse, ApiError> {
|
||||
}
|
||||
}
|
||||
|
||||
// Allows internal modification of an actix multipart file
|
||||
// Expected:
|
||||
// 1. A json segment
|
||||
// 2. Any number of other binary segments
|
||||
// 'closure' is called with the json value, and the content disposition of the other segments
|
||||
pub async fn alter_actix_multipart<T, U, Fut>(
|
||||
mut multipart: Multipart,
|
||||
mut headers: HeaderMap,
|
||||
mut closure: impl FnMut(T) -> Fut,
|
||||
mut closure: impl FnMut(T, Vec<ContentDisposition>) -> Fut,
|
||||
) -> Result<Multipart, CreateError>
|
||||
where
|
||||
T: serde::de::DeserializeOwned,
|
||||
@@ -55,6 +60,10 @@ where
|
||||
{
|
||||
let mut segments: Vec<MultipartSegment> = Vec::new();
|
||||
|
||||
let mut json = None;
|
||||
let mut json_segment = None;
|
||||
let mut content_dispositions = Vec::new();
|
||||
|
||||
if let Some(field) = multipart.next().await {
|
||||
let mut field = field?;
|
||||
let content_disposition = field.content_disposition().clone();
|
||||
@@ -71,16 +80,15 @@ where
|
||||
|
||||
{
|
||||
let json_value: T = serde_json::from_slice(&buffer)?;
|
||||
let json_value: U = closure(json_value).await?;
|
||||
buffer = serde_json::to_vec(&json_value)?;
|
||||
json = Some(json_value);
|
||||
}
|
||||
|
||||
segments.push(MultipartSegment {
|
||||
json_segment = Some(MultipartSegment {
|
||||
name: field_name.to_string(),
|
||||
filename: field_filename.map(|s| s.to_string()),
|
||||
content_type: field_content_type,
|
||||
data: MultipartSegmentData::Binary(buffer),
|
||||
})
|
||||
data: MultipartSegmentData::Binary(vec![]), // Initialize to empty, will be finished after
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(field) = multipart.next().await {
|
||||
@@ -97,6 +105,7 @@ where
|
||||
buffer.extend_from_slice(&data);
|
||||
}
|
||||
|
||||
content_dispositions.push(content_disposition.clone());
|
||||
segments.push(MultipartSegment {
|
||||
name: field_name.to_string(),
|
||||
filename: field_filename.map(|s| s.to_string()),
|
||||
@@ -105,6 +114,24 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
// Finishes the json segment, with aggregated content dispositions
|
||||
{
|
||||
let json_value = json.ok_or(CreateError::InvalidInput(
|
||||
"No json segment found in multipart.".to_string(),
|
||||
))?;
|
||||
let mut json_segment = json_segment.ok_or(CreateError::InvalidInput(
|
||||
"No json segment found in multipart.".to_string(),
|
||||
))?;
|
||||
|
||||
// Call closure, with the json value and names of the other segments
|
||||
let json_value: U = closure(json_value, content_dispositions).await?;
|
||||
let buffer = serde_json::to_vec(&json_value)?;
|
||||
json_segment.data = MultipartSegmentData::Binary(buffer);
|
||||
|
||||
// Insert the json segment at the beginning
|
||||
segments.insert(0, json_segment);
|
||||
}
|
||||
|
||||
let (boundary, payload) = generate_multipart(segments);
|
||||
|
||||
match (
|
||||
|
||||
@@ -128,6 +128,8 @@ pub async fn index_local(
|
||||
.map(|vf| (vf.field_name.clone(), vf.value.serialize_internal()))
|
||||
.collect();
|
||||
let mut loader_fields = models::projects::from_duplicate_version_fields(version_fields);
|
||||
let project_loader_fields =
|
||||
models::projects::from_duplicate_version_fields(m.aggregate_version_fields.clone());
|
||||
let license = match m.inner.license.split(' ').next() {
|
||||
Some(license) => license.to_string(),
|
||||
None => m.inner.license.clone(),
|
||||
@@ -240,6 +242,7 @@ pub async fn index_local(
|
||||
links: m.urls.clone(),
|
||||
gallery_items: m.gallery_items.clone(),
|
||||
loaders,
|
||||
project_loader_fields,
|
||||
};
|
||||
|
||||
uploads.push(usp);
|
||||
|
||||
@@ -400,6 +400,7 @@ const DEFAULT_DISPLAYED_ATTRIBUTES: &[&str] = &[
|
||||
"links",
|
||||
"gallery_items",
|
||||
"loaders", // search uses loaders as categories- this is purely for the Project model.
|
||||
"project_loader_fields",
|
||||
];
|
||||
|
||||
const DEFAULT_SEARCHABLE_ATTRIBUTES: &[&str] = &["name", "summary", "author", "slug"];
|
||||
|
||||
@@ -133,6 +133,7 @@ pub struct UploadSearchProject {
|
||||
pub gallery_items: Vec<GalleryItem>, // Gallery *only* urls are stored in gallery, but the gallery items are stored here- required for the Project model.
|
||||
pub games: Vec<String>, // Todo: in future, could be a searchable field.
|
||||
pub organization_id: Option<String>, // Todo: in future, could be a searchable field.
|
||||
pub project_loader_fields: HashMap<String, Vec<serde_json::Value>>, // Aggregation of loader_fields from all versions of the project, allowing for reconstruction of the Project model.
|
||||
|
||||
#[serde(flatten)]
|
||||
pub loader_fields: HashMap<String, Vec<serde_json::Value>>,
|
||||
@@ -184,6 +185,7 @@ pub struct ResultSearchProject {
|
||||
pub gallery_items: Vec<GalleryItem>, // Gallery *only* urls are stored in gallery, but the gallery items are stored here- required for the Project model.
|
||||
pub games: Vec<String>, // Todo: in future, could be a searchable field.
|
||||
pub organization_id: Option<String>, // Todo: in future, could be a searchable field.
|
||||
pub project_loader_fields: HashMap<String, Vec<serde_json::Value>>, // Aggregation of loader_fields from all versions of the project, allowing for reconstruction of the Project model.
|
||||
|
||||
#[serde(flatten)]
|
||||
pub loader_fields: HashMap<String, Vec<serde_json::Value>>,
|
||||
|
||||
Reference in New Issue
Block a user