You've already forked AstralRinth
forked from didirus/AstralRinth
Secure auth route, fix quilt deps bug, optimize queries more (#374)
* Secure auth route, fix quilt deps bug, optimize queries more * Add to_lowercase for multiple hashes functions
This commit is contained in:
@@ -2,6 +2,7 @@
|
||||
// TODO: remove attr once routes are created
|
||||
|
||||
use thiserror::Error;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub mod categories;
|
||||
pub mod ids;
|
||||
@@ -125,3 +126,11 @@ impl ids::ProjectTypeId {
|
||||
Ok(result.map(|r| ids::ProjectTypeId(r.id)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn convert_postgres_date(input: &str) -> OffsetDateTime {
|
||||
OffsetDateTime::parse(
|
||||
format!("{}:00Z", input.replace(' ', "T")),
|
||||
time::Format::Rfc3339,
|
||||
)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc())
|
||||
}
|
||||
|
||||
@@ -118,31 +118,40 @@ impl Notification {
|
||||
id: NotificationId,
|
||||
executor: E,
|
||||
) -> Result<Option<Self>, sqlx::error::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let (notifications, actions) = futures::join!(
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type
|
||||
FROM notifications n
|
||||
WHERE n.id = $1
|
||||
GROUP BY n.id, n.user_id;
|
||||
",
|
||||
id as NotificationId,
|
||||
)
|
||||
.fetch_optional(executor),
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id, title, notification_id, action_route, action_route_method
|
||||
FROM notifications_actions
|
||||
WHERE notification_id = $1
|
||||
",
|
||||
id as NotificationId,
|
||||
).fetch_all(executor),
|
||||
);
|
||||
let result = sqlx::query!(
|
||||
"
|
||||
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.id = $1
|
||||
GROUP BY n.id, n.user_id;
|
||||
",
|
||||
id as NotificationId,
|
||||
)
|
||||
.fetch_optional(executor)
|
||||
.await?;
|
||||
|
||||
if let Some(row) = result {
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(row) = notifications? {
|
||||
Ok(Some(Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
@@ -152,16 +161,7 @@ impl Notification {
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions: actions?
|
||||
.into_iter()
|
||||
.map(|x| NotificationAction {
|
||||
id: NotificationActionId(x.id),
|
||||
notification_id: NotificationId(x.notification_id),
|
||||
title: x.title,
|
||||
action_route_method: x.action_route_method,
|
||||
action_route: x.action_route,
|
||||
})
|
||||
.collect(),
|
||||
actions,
|
||||
}))
|
||||
} else {
|
||||
Ok(None)
|
||||
@@ -172,38 +172,116 @@ impl Notification {
|
||||
notification_ids: Vec<NotificationId>,
|
||||
exec: E,
|
||||
) -> Result<Vec<Notification>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
futures::future::try_join_all(
|
||||
notification_ids.into_iter().map(|id| Self::get(id, exec)),
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect();
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.id = ANY($1)
|
||||
GROUP BY n.id, n.user_id
|
||||
ORDER BY n.created DESC;
|
||||
",
|
||||
¬ification_ids_parsed
|
||||
)
|
||||
.await
|
||||
.map(|x| x.into_iter().flatten().collect())
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = NotificationId(row.id);
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
notification_type: row.notification_type,
|
||||
title: row.title,
|
||||
text: row.text,
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Notification>>()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_many_user<'a, E>(
|
||||
user_id: UserId,
|
||||
exec: E,
|
||||
) -> Result<Vec<Notification>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
let notification_ids = sqlx::query!(
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id
|
||||
FROM notifications
|
||||
WHERE user_id = $1
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
|
||||
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
|
||||
FROM notifications n
|
||||
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
|
||||
WHERE n.user_id = $1
|
||||
GROUP BY n.id, n.user_id;
|
||||
",
|
||||
user_id as UserId
|
||||
)
|
||||
.fetch_all(exec)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|x| NotificationId(x.id))
|
||||
.collect();
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = NotificationId(row.id);
|
||||
let mut actions: Vec<NotificationAction> = Vec::new();
|
||||
|
||||
Self::get_many(notification_ids, exec).await
|
||||
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
|
||||
let action: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if action.len() >= 3 {
|
||||
actions.push(NotificationAction {
|
||||
id: NotificationActionId(action[0].parse().unwrap_or(0)),
|
||||
notification_id: id,
|
||||
title: action[1].to_string(),
|
||||
action_route_method: action[3].to_string(),
|
||||
action_route: action[2].to_string(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
Notification {
|
||||
id,
|
||||
user_id: UserId(row.user_id),
|
||||
notification_type: row.notification_type,
|
||||
title: row.title,
|
||||
text: row.text,
|
||||
link: row.link,
|
||||
read: row.read,
|
||||
created: row.created,
|
||||
actions,
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Notification>>()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::convert_postgres_date;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -719,11 +720,7 @@ impl Project {
|
||||
} else {
|
||||
Some(strings[4].to_string())
|
||||
},
|
||||
created: OffsetDateTime::parse(
|
||||
strings[2],
|
||||
time::Format::Rfc3339,
|
||||
)
|
||||
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
|
||||
created: convert_postgres_date(strings[2]),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -835,7 +832,7 @@ impl Project {
|
||||
featured: strings[1].parse().unwrap_or(false),
|
||||
title: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
|
||||
description: if strings[4] == " " { None } else { Some(strings[4].to_string()) },
|
||||
created: OffsetDateTime::parse(strings[2], time::Format::Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc())
|
||||
created: convert_postgres_date(strings[2])
|
||||
})
|
||||
} else {
|
||||
None
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::ids::*;
|
||||
use super::DatabaseError;
|
||||
use crate::database::models::convert_postgres_date;
|
||||
use std::collections::HashMap;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
@@ -498,22 +499,20 @@ impl Version {
|
||||
|
||||
let vec = sqlx::query!(
|
||||
"
|
||||
SELECT version.id FROM (
|
||||
SELECT DISTINCT ON(v.id) v.id, v.date_published FROM versions v
|
||||
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE v.mod_id = $1
|
||||
) AS version
|
||||
ORDER BY version.date_published ASC
|
||||
SELECT DISTINCT ON(v.date_published, v.id) version_id, v.date_published FROM versions v
|
||||
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
|
||||
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
|
||||
INNER JOIN loaders_versions lv ON lv.version_id = v.id
|
||||
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
|
||||
WHERE v.mod_id = $1
|
||||
ORDER BY v.date_published, v.id ASC
|
||||
",
|
||||
project_id as ProjectId,
|
||||
&game_versions.unwrap_or_default(),
|
||||
&loaders.unwrap_or_default(),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.id))) })
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.version_id))) })
|
||||
.try_collect::<Vec<VersionId>>()
|
||||
.await?;
|
||||
|
||||
@@ -615,7 +614,7 @@ impl Version {
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured,
|
||||
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
|
||||
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
|
||||
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
|
||||
@@ -636,26 +635,6 @@ impl Version {
|
||||
.await?;
|
||||
|
||||
if let Some(v) = result {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
||||
.hashes
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
Ok(Some(QueryVersion {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
@@ -666,44 +645,87 @@ impl Version {
|
||||
changelog_url: v.changelog_url,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
files: v
|
||||
.files
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
files: {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v
|
||||
.hashes
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes
|
||||
.insert(hash.1.clone(), hash.2.clone());
|
||||
}
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect(),
|
||||
game_versions: v
|
||||
.game_versions
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|x| x.to_string())
|
||||
.collect(),
|
||||
v.files
|
||||
.unwrap_or_default()
|
||||
.split(" ~~~~ ")
|
||||
.map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id =
|
||||
FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(
|
||||
hash.1.clone(),
|
||||
hash.2.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
.collect()
|
||||
},
|
||||
game_versions: {
|
||||
let game_versions = v.game_versions.unwrap_or_default();
|
||||
|
||||
let mut gv = game_versions
|
||||
.split(" ~~~~ ")
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> =
|
||||
x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((
|
||||
version[0],
|
||||
convert_postgres_date(version[1])
|
||||
.unix_timestamp(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<(&str, i64)>>();
|
||||
|
||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
|
||||
gv.into_iter().map(|x| x.0.to_string()).collect()
|
||||
},
|
||||
loaders: v
|
||||
.loaders
|
||||
.unwrap_or_default()
|
||||
@@ -770,7 +792,7 @@ impl Version {
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured,
|
||||
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
|
||||
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
|
||||
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
|
||||
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
|
||||
@@ -790,21 +812,7 @@ impl Version {
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|v| {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect();
|
||||
|
||||
Ok(e.right().map(|v|
|
||||
QueryVersion {
|
||||
id: VersionId(v.id),
|
||||
project_id: ProjectId(v.mod_id),
|
||||
@@ -815,32 +823,71 @@ impl Version {
|
||||
changelog_url: v.changelog_url,
|
||||
date_published: v.date_published,
|
||||
downloads: v.downloads,
|
||||
files: v.files.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
files: {
|
||||
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let hash: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||
}
|
||||
if hash.len() >= 3 {
|
||||
Some((
|
||||
FileId(hash[2].parse().unwrap_or(0)),
|
||||
hash[0].to_string(),
|
||||
hash[1].to_string().into_bytes(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect();
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
v.files.unwrap_or_default().split(" ~~~~ ").map(|f| {
|
||||
let file: Vec<&str> = f.split(" |||| ").collect();
|
||||
|
||||
if file.len() >= 5 {
|
||||
let file_id = FileId(file[0].parse().unwrap_or(0));
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
if (hash.0).0 == file_id.0 {
|
||||
file_hashes.insert(hash.1.clone(), hash.2.clone());
|
||||
}
|
||||
}
|
||||
|
||||
Some(QueryFile {
|
||||
id: file_id,
|
||||
url: file[3].to_string(),
|
||||
filename: file[4].to_string(),
|
||||
hashes: file_hashes,
|
||||
primary: file[1].parse().unwrap_or(false),
|
||||
size: file[2].parse().unwrap_or(0),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect()
|
||||
},
|
||||
game_versions: {
|
||||
let game_versions = v
|
||||
.game_versions
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut gv = game_versions
|
||||
.split(" ~~~~ ")
|
||||
.flat_map(|x| {
|
||||
let version: Vec<&str> = x.split(" |||| ").collect();
|
||||
|
||||
if version.len() >= 2 {
|
||||
Some((version[0], convert_postgres_date(version[1]).unix_timestamp()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}).flatten().collect(),
|
||||
game_versions: v.game_versions.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
|
||||
.collect::<Vec<(&str, i64)>>();
|
||||
|
||||
gv.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
|
||||
gv.into_iter()
|
||||
.map(|x| x.0.to_string())
|
||||
.collect()
|
||||
},
|
||||
loaders: v.loaders.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
|
||||
featured: v.featured,
|
||||
dependencies: v.dependencies
|
||||
@@ -878,7 +925,7 @@ impl Version {
|
||||
}).flatten().collect(),
|
||||
version_type: v.version_type
|
||||
}
|
||||
}))
|
||||
))
|
||||
})
|
||||
.try_collect::<Vec<QueryVersion>>()
|
||||
.await
|
||||
|
||||
@@ -253,9 +253,7 @@ async fn main() -> std::io::Result<()> {
|
||||
})
|
||||
.with_interval(std::time::Duration::from_secs(60))
|
||||
.with_max_requests(300)
|
||||
.with_ignore_key(
|
||||
dotenv::var("RATE_LIMIT_IGNORE_KEY").ok(),
|
||||
),
|
||||
.with_ignore_key(dotenv::var("RATE_LIMIT_IGNORE_KEY").ok()),
|
||||
)
|
||||
.app_data(web::Data::new(pool.clone()))
|
||||
.app_data(web::Data::new(file_host.clone()))
|
||||
@@ -296,6 +294,11 @@ fn check_env_vars() -> bool {
|
||||
failed |= true;
|
||||
}
|
||||
|
||||
if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() {
|
||||
warn!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings");
|
||||
failed |= true;
|
||||
}
|
||||
|
||||
failed |= check_var::<String>("SITE_URL");
|
||||
failed |= check_var::<String>("CDN_URL");
|
||||
failed |= check_var::<String>("LABRINTH_ADMIN_KEY");
|
||||
|
||||
@@ -51,7 +51,7 @@ where
|
||||
max_requests: 0,
|
||||
store,
|
||||
identifier: Rc::new(Box::new(identifier)),
|
||||
ignore_key: None
|
||||
ignore_key: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
/*!
|
||||
This auth module is primarily for use within the main website. Applications interacting with the
|
||||
authenticated API (a very small portion - notifications, private projects, editing/creating projects
|
||||
and versions) should either retrieve the Modrinth GitHub token through the site, or create a personal
|
||||
app token for use with Modrinth.
|
||||
|
||||
JUst as a summary: Don't implement this flow in your application! Instead, use a personal access token
|
||||
or create your own GitHub OAuth2 application.
|
||||
|
||||
This system will be revisited and allow easier interaction with the authenticated API once we roll
|
||||
out our own authentication system.
|
||||
*/
|
||||
|
||||
use crate::database::models::{generate_state_id, User};
|
||||
use crate::models::error::ApiError;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
@@ -11,6 +24,7 @@ use serde::{Deserialize, Serialize};
|
||||
use sqlx::postgres::PgPool;
|
||||
use thiserror::Error;
|
||||
use time::OffsetDateTime;
|
||||
use crate::parse_strings_from_var;
|
||||
|
||||
pub fn config(cfg: &mut ServiceConfig) {
|
||||
cfg.service(scope("auth").service(auth_callback).service(init));
|
||||
@@ -34,6 +48,8 @@ pub enum AuthorizationError {
|
||||
Authentication(#[from] crate::util::auth::AuthenticationError),
|
||||
#[error("Error while decoding Base62")]
|
||||
Decoding(#[from] DecodingError),
|
||||
#[error("Invalid callback URL specified")]
|
||||
Url,
|
||||
}
|
||||
impl actix_web::ResponseError for AuthorizationError {
|
||||
fn status_code(&self) -> StatusCode {
|
||||
@@ -50,6 +66,7 @@ impl actix_web::ResponseError for AuthorizationError {
|
||||
AuthorizationError::InvalidCredentials => StatusCode::UNAUTHORIZED,
|
||||
AuthorizationError::Decoding(..) => StatusCode::BAD_REQUEST,
|
||||
AuthorizationError::Authentication(..) => StatusCode::UNAUTHORIZED,
|
||||
AuthorizationError::Url => StatusCode::BAD_REQUEST,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,7 +82,8 @@ impl actix_web::ResponseError for AuthorizationError {
|
||||
AuthorizationError::Decoding(..) => "decoding_error",
|
||||
AuthorizationError::Authentication(..) => {
|
||||
"authentication_error"
|
||||
}
|
||||
},
|
||||
AuthorizationError::Url => "url_error",
|
||||
},
|
||||
description: &self.to_string(),
|
||||
})
|
||||
@@ -96,6 +114,16 @@ pub async fn init(
|
||||
Query(info): Query<AuthorizationInit>,
|
||||
client: Data<PgPool>,
|
||||
) -> Result<HttpResponse, AuthorizationError> {
|
||||
let url = url::Url::parse(&info.url).map_err(|_| AuthorizationError::Url)?;
|
||||
|
||||
let allowed_callback_urls = parse_strings_from_var("ALLOWED_CALLBACK_URLS")
|
||||
.unwrap_or_default();
|
||||
|
||||
let domain = url.domain().ok_or(AuthorizationError::Url)?;
|
||||
if !allowed_callback_urls.iter().any(|x| domain.ends_with(x)) {
|
||||
return Err(AuthorizationError::Url);
|
||||
}
|
||||
|
||||
let mut transaction = client.begin().await?;
|
||||
|
||||
let state = generate_state_id(&mut transaction).await?;
|
||||
@@ -136,7 +164,7 @@ pub async fn auth_callback(
|
||||
|
||||
let result_option = sqlx::query!(
|
||||
"
|
||||
SELECT url,expires FROM states
|
||||
SELECT url, expires FROM states
|
||||
WHERE id = $1
|
||||
",
|
||||
state_id as i64
|
||||
@@ -145,13 +173,11 @@ pub async fn auth_callback(
|
||||
.await?;
|
||||
|
||||
if let Some(result) = result_option {
|
||||
// let now = OffsetDateTime::now_utc();
|
||||
// TODO: redo this condition later..
|
||||
// let duration = now - result.expires;
|
||||
//
|
||||
// if duration.whole_seconds() < 0 {
|
||||
// return Err(AuthorizationError::InvalidCredentials);
|
||||
// }
|
||||
let duration = result.expires - OffsetDateTime::now_utc();
|
||||
|
||||
if duration.whole_seconds() < 0 {
|
||||
return Err(AuthorizationError::InvalidCredentials);
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
|
||||
@@ -385,18 +385,29 @@ pub async fn transfer_ownership(
|
||||
let id = info.into_inner().0;
|
||||
|
||||
let current_user = get_user_from_headers(req.headers(), &**pool).await?;
|
||||
let member = TeamMember::get_from_user_id(
|
||||
id.into(),
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::CustomAuthentication(
|
||||
"You don't have permission to edit members of this team"
|
||||
.to_string(),
|
||||
|
||||
if !current_user.role.is_mod() {
|
||||
let member = TeamMember::get_from_user_id(
|
||||
id.into(),
|
||||
current_user.id.into(),
|
||||
&**pool,
|
||||
)
|
||||
})?;
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
ApiError::CustomAuthentication(
|
||||
"You don't have permission to edit members of this team"
|
||||
.to_string(),
|
||||
)
|
||||
})?;
|
||||
|
||||
if member.role != crate::models::teams::OWNER_ROLE {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to edit the ownership of this team"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let new_member = TeamMember::get_from_user_id(
|
||||
id.into(),
|
||||
new_owner.user_id.into(),
|
||||
@@ -409,13 +420,6 @@ pub async fn transfer_ownership(
|
||||
)
|
||||
})?;
|
||||
|
||||
if member.role != crate::models::teams::OWNER_ROLE {
|
||||
return Err(ApiError::CustomAuthentication(
|
||||
"You don't have permission to edit the ownership of this team"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
if !new_member.accepted {
|
||||
return Err(ApiError::InvalidInput(
|
||||
"You can only transfer ownership to members who are currently in your team".to_string(),
|
||||
|
||||
@@ -35,7 +35,7 @@ pub struct InitialVersionData {
|
||||
regex = "crate::util::validate::RE_URL_SAFE"
|
||||
)]
|
||||
pub version_number: String,
|
||||
#[validate(length(min = 3, max = 256))]
|
||||
#[validate(length(min = 1, max = 256))]
|
||||
#[serde(alias = "name")]
|
||||
pub version_title: String,
|
||||
#[validate(length(max = 65536))]
|
||||
@@ -639,11 +639,11 @@ pub async fn upload_file(
|
||||
field: &mut Field,
|
||||
file_host: &dyn FileHost,
|
||||
uploaded_files: &mut Vec<UploadedFile>,
|
||||
version_files: &mut Vec<models::version_item::VersionFileBuilder>,
|
||||
dependencies: &mut Vec<models::version_item::DependencyBuilder>,
|
||||
version_files: &mut Vec<VersionFileBuilder>,
|
||||
dependencies: &mut Vec<DependencyBuilder>,
|
||||
cdn_url: &str,
|
||||
content_disposition: &actix_web::http::header::ContentDisposition,
|
||||
project_id: crate::models::ids::ProjectId,
|
||||
project_id: ProjectId,
|
||||
version_number: &str,
|
||||
project_type: &str,
|
||||
loaders: Vec<Loader>,
|
||||
|
||||
@@ -303,7 +303,7 @@ pub async fn get_versions_from_hashes(
|
||||
let hashes_parsed: Vec<Vec<u8>> = file_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let result = sqlx::query!(
|
||||
@@ -360,7 +360,7 @@ pub async fn download_files(
|
||||
let hashes_parsed: Vec<Vec<u8>> = file_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
@@ -411,7 +411,7 @@ pub async fn update_files(
|
||||
let hashes_parsed: Vec<Vec<u8>> = update_data
|
||||
.hashes
|
||||
.iter()
|
||||
.map(|x| x.as_bytes().to_vec())
|
||||
.map(|x| x.to_lowercase().as_bytes().to_vec())
|
||||
.collect();
|
||||
|
||||
let mut transaction = pool.begin().await?;
|
||||
|
||||
@@ -20,7 +20,7 @@ impl super::Validator for PackValidator {
|
||||
}
|
||||
|
||||
fn get_supported_loaders(&self) -> &[&str] {
|
||||
&["forge", "fabric"]
|
||||
&["forge", "fabric", "quilt"]
|
||||
}
|
||||
|
||||
fn get_supported_game_versions(&self) -> SupportedGameVersions {
|
||||
|
||||
Reference in New Issue
Block a user