forked from didirus/AstralRinth
Secure auth route, fix quilt deps bug, optimize queries more (#374)
* Secure auth route, fix quilt deps bug, optimize queries more * Add to_lowercase for multiple hashes functions
This commit is contained in:
@@ -2,6 +2,7 @@
|
||||
// TODO: remove attr once routes are created
|
||||
|
||||
use thiserror::Error;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub mod categories;
|
||||
pub mod ids;
|
||||
@@ -125,3 +126,11 @@ impl ids::ProjectTypeId {
|
||||
Ok(result.map(|r| ids::ProjectTypeId(r.id)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn convert_postgres_date(input: &str) -> OffsetDateTime {
|
||||
OffsetDateTime::parse(
|
||||
format!("{}:00Z", input.replace(' ', "T")),
|
||||
time::Format::Rfc3339,
|
||||
)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc())
|
||||
}
|
||||
|
||||
@@ -118,31 +118,40 @@ impl Notification {
|
||||
id: NotificationId,
|
||||
executor: E,
|
||||
) -> Result<Option<Self>, sqlx::error::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let (notifications, actions) = futures::join!(
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type
|
||||
FROM notifications n
|
||||
WHERE n.id = $1
|
||||
GROUP BY n.id, n.user_id;
|
||||
",
|
||||
id as NotificationId,
|
||||
)
|
||||
.fetch_optional(executor),
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id, title, notification_id, action_route, action_route_method
|
||||
FROM notifications_actions
|
||||
WHERE notification_id = $1
|
||||
",
|
||||
id as NotificationId,
|
||||
).fetch_all(executor),
|
||||
);
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.id = $1
|
||||
GROUP BY n.id, n.user_id;
|
||||
",
|
||||
id as NotificationId,
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
if let Some(row) = result {
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(row) = notifications? {
|
||||
Ok(Some(Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
@@ -152,16 +161,7 @@ impl Notification {
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions: actions?
|
||||
.into_iter()
|
||||
.map(|x| NotificationAction {
|
||||
id: NotificationActionId(x.id),
|
||||
notification_id: NotificationId(x.notification_id),
|
||||
title: x.title,
|
||||
action_route_method: x.action_route_method,
|
||||
action_route: x.action_route,
|
||||
})
|
||||
.collect(),
|
||||
actions,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
@@ -172,38 +172,116 @@ impl Notification {
|
||||
notification_ids: Vec<NotificationId>,
|
||||
exec: E,
|
||||
) -> Result<Vec<Notification>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
futures::future::try_join_all(
|
||||
notification_ids.into_iter().map(|id| Self::get(id, exec)),
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect();
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.id = ANY($1)
|
||||
GROUP BY n.id, n.user_id
|
||||
ORDER BY n.created DESC;
|
||||
",
|
||||
¬ification_ids_parsed
|
||||
)
|
||||
.await
|
||||
.map(|x| x.into_iter().flatten().collect())
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = NotificationId(row.id);
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
notification_type: row.notification_type,
|
||||
title: row.title,
|
||||
text: row.text,
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Notification>>()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_many_user<'a, E>(
|
||||
user_id: UserId,
|
||||
exec: E,
|
||||
) -> Result<Vec<Notification>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let notification_ids = sqlx::query!(
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id
|
||||
FROM notifications
|
||||
WHERE user_id = $1
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.user_id = $1
|
||||
GROUP BY n.id, n.user_id;
|
||||
",
|
||||
user_id as UserId
|
||||
)
|
||||
.fetch_all(exec)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| NotificationId(x.id))
|
||||
.collect();
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = NotificationId(row.id);
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
Self::get_many(notification_ids, exec).await
|
||||
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
notification_type: row.notification_type,
|
||||
title: row.title,
|
||||
text: row.text,
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Notification>>()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::convert_postgres_date;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -719,11 +720,7 @@ impl Project {
|
||||
} else {
|
||||
Some(strings[4].to_string())
|
||||
},
|
||||
created: OffsetDateTime::parse(
|
||||
strings[2],
|
||||
time::Format::Rfc3339,
|
||||
)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
|
||||
created: convert_postgres_date(strings[2]),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -835,7 +832,7 @@ impl Project {
|
||||
featured: strings[1].parse().unwrap_or(false),
|
||||
title: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
|
||||
description: if strings[4] == " " { None } else { Some(strings[4].to_string()) },
|
||||
created: OffsetDateTime::parse(strings[2], time::Format::Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc())
|
||||
created: convert_postgres_date(strings[2])
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::ids::*;
|
||||
use super::DatabaseError;
|
||||
use crate::database::models::convert_postgres_date;
|
||||
use std::collections::HashMap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
@@ -498,22 +499,20 @@ impl Version {
|
||||
|
||||
let vec = sqlx::query!(
|
||||
"
|
||||
SELECT version.id FROM (
|
||||
SELECT DISTINCT ON(v.id) v.id, v.date_published FROM versions v
|
||||
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE v.mod_id = $1
|
||||
) AS version
|
||||
ORDER BY version.date_published ASC
|
||||
SELECT DISTINCT ON(v.date_published, v.id) version_id, v.date_published FROM versions v
|
||||
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE v.mod_id = $1
|
||||
ORDER BY v.date_published, v.id ASC
|
||||
",
|
||||
project_id as ProjectId,
|
||||
&game_versions.unwrap_or_default(),
|
||||
&loaders.unwrap_or_default(),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.id))) })
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.version_id))) })
|
||||
.try_collect::<Vec<VersionId>>()
|
||||
.await?;
|
||||
|
||||
@@ -615,7 +614,7 @@ impl Version {
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured,
|
||||
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
|
||||
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
|
||||
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
|
||||
@@ -636,26 +635,6 @@ impl Version {
|
||||
.await?;
|
||||
|
||||
if let Some(v) = result {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
||||
.hashes
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
Ok(Some(QueryVersion {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
@@ -666,44 +645,87 @@ impl Version {
|
||||
changelog_url: v.changelog_url,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
files: v
|
||||
.files
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
files: {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
||||
.hashes
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes
|
||||
.insert(hash.1.clone(), hash.2.clone());
|
||||
}
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect(),
|
||||
game_versions: v
|
||||
.game_versions
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|x| x.to_string())
|
||||
.collect(),
|
||||
v.files
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id =
|
||||
FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(
|
||||
hash.1.clone(),
|
||||
hash.2.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect()
|
||||
},
|
||||
game_versions: {
|
||||
let game_versions = v.game_versions.unwrap_or_default();
|
||||
|
||||
let mut gv = game_versions
|
||||
.split(" ~~~~ ")
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> =
|
||||
x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((
|
||||
version[0],
|
||||
convert_postgres_date(version[1])
|
||||
.unix_timestamp(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(&str, i64)>>();
|
||||
|
||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
|
||||
gv.into_iter().map(|x| x.0.to_string()).collect()
|
||||
},
|
||||
loaders: v
|
||||
.loaders
|
||||
.unwrap_or_default()
|
||||
@@ -770,7 +792,7 @@ impl Version {
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured,
|
||||
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
|
||||
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
|
||||
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
|
||||
@@ -790,21 +812,7 @@ impl Version {
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|v| {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect();
|
||||
|
||||
Ok(e.right().map(|v|
|
||||
QueryVersion {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
@@ -815,32 +823,71 @@ impl Version {
|
||||
changelog_url: v.changelog_url,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
files: v.files.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
files: {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||
}
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect();
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
v.files.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect()
|
||||
},
|
||||
game_versions: {
|
||||
let game_versions = v
|
||||
.game_versions
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut gv = game_versions
|
||||
.split(" ~~~~ ")
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((version[0], convert_postgres_date(version[1]).unix_timestamp()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect(),
|
||||
game_versions: v.game_versions.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
|
||||
.collect::<Vec<(&str, i64)>>();
|
||||
|
||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
|
||||
gv.into_iter()
|
||||
.map(|x| x.0.to_string())
|
||||
.collect()
|
||||
},
|
||||
loaders: v.loaders.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
|
||||
featured: v.featured,
|
||||
dependencies: v.dependencies
|
||||
@@ -878,7 +925,7 @@ impl Version {
|
||||
}).flatten().collect(),
|
||||
version_type: v.version_type
|
||||
}
|
||||
}))
|
||||
))
|
||||
})
|
||||
.try_collect::<Vec<QueryVersion>>()
|
||||
.await
|
||||
|
||||
Reference in New Issue
Block a user