Add redis caching to getting user notifications and projects [MOD-540] (#723)

* Add redis caching to getting a user's project ids

* Run `cargo sqlx prepare` to update the sqlx-data.json

* Add redis caching for getting user notifications

* Fix new clippy warnings

* Remove log that shouldn't have been committed

* Batch insert of notifications (untested)

* sqlx prepare...

* Fix merge conflict things and use new redis struct

* Fix bug with calling delete_many without any elements (caught by tests)

* cargo sqlx prepare

* Add tests around cache invalidation (and fix bug they caught!)

* Some test reorg based on code review suggestions
This commit is contained in:
Jackson Kruger
2023-10-12 17:52:24 -05:00
committed by GitHub
parent d66270eef0
commit abf4cd71ba
34 changed files with 848 additions and 379 deletions

View File

@@ -707,18 +707,6 @@
},
"query": "\n UPDATE mods\n SET webhook_sent = TRUE\n WHERE id = $1\n "
},
"127691940ca7e542e246dd2a1c9cb391041b30ddf0547d73b49c1dd9dc59d2ae": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8Array"
]
}
},
"query": "\n UPDATE notifications\n SET read = TRUE\n WHERE id = ANY($1)\n "
},
"15fac93c76e72348b50f526e1acb183521d94be335ad8b9dfeb0398d4a8a2fc4": {
"describe": {
"columns": [],
@@ -2353,18 +2341,6 @@
},
"query": "\n SELECT id FROM threads\n WHERE report_id = $1\n "
},
"599df07263a2705e57fc70a7c4f5dc606e1730c281e3b573d2f2a2030bed04e0": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8Array"
]
}
},
"query": "\n DELETE FROM notifications\n WHERE id = ANY($1)\n "
},
"59e95e832615c375753bfc9a56b07c02d916399adfa52fb11a79b8f7b56ecf8b": {
"describe": {
"columns": [
@@ -3720,6 +3696,20 @@
},
"query": "\n UPDATE pats\n SET expires = $1\n WHERE id = $2\n "
},
"8a9bf48b3d4aa665136568a9bf9ddb8e5d81ed27ce587e26672dfb45a44c7b9c": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8Array",
"Int8Array",
"JsonbArray"
]
}
},
"query": "\n INSERT INTO notifications (\n id, user_id, body\n )\n SELECT * FROM UNNEST($1::bigint[], $2::bigint[], $3::jsonb[])\n "
},
"8abb317c85f48c7dd9ccf4a7b8fbc0b58ac73f7ae87ff2dfe67009a51089f784": {
"describe": {
"columns": [],
@@ -3861,6 +3851,26 @@
},
"query": "\n UPDATE threads\n SET show_in_mod_inbox = FALSE\n WHERE id = $1\n "
},
"8f74918aa923e516b6b2967b7d1afbd02c8bde5466d22ad60ad735f8358cbf04": {
"describe": {
"columns": [
{
"name": "user_id",
"ordinal": 0,
"type_info": "Int8"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Int8"
]
}
},
"query": "\n DELETE FROM team_members\n WHERE team_id = $1\n RETURNING user_id\n "
},
"912250d37f13a98a21165c72bfc1eaa8a85b9952dd6750c117dca7fbb1bb8962": {
"describe": {
"columns": [],
@@ -5426,20 +5436,6 @@
},
"query": "\n DELETE FROM threads_messages\n WHERE thread_id = $1\n "
},
"d2c046d4bedeb7181ece4e94d7de90c97bd3dd1b0c16070704028923a0c2834a": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Jsonb"
]
}
},
"query": "\n INSERT INTO notifications (\n id, user_id, body\n )\n VALUES (\n $1, $2, $3\n )\n "
},
"d2e826d4fa4e3e730cc84c97964c0c5fdd25cd49ddff8c593bd9b8a3b4d5ff1e": {
"describe": {
"columns": [],
@@ -6291,6 +6287,26 @@
},
"query": "SELECT id FROM users WHERE discord_id = $1"
},
"ee375e658423156a758cc372400961f627fa5a620a3f61e37ec09fee1d7bb4e3": {
"describe": {
"columns": [
{
"name": "user_id",
"ordinal": 0,
"type_info": "Int8"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Int8Array"
]
}
},
"query": "\n DELETE FROM notifications\n WHERE id = ANY($1)\n RETURNING user_id\n "
},
"eec6d4028d790e57a4d97fc5a200a9ae2b3d2cb60ee83c51fb05180b821558f5": {
"describe": {
"columns": [],
@@ -6470,6 +6486,26 @@
},
"query": "\n UPDATE users\n SET bio = $1\n WHERE (id = $2)\n "
},
"f775506213dbf4bf0ee05fd53c693412e3baae64b6dc0aead8082059f16755bc": {
"describe": {
"columns": [
{
"name": "user_id",
"ordinal": 0,
"type_info": "Int8"
}
],
"nullable": [
false
],
"parameters": {
"Left": [
"Int8Array"
]
}
},
"query": "\n UPDATE notifications\n SET read = TRUE\n WHERE id = ANY($1)\n RETURNING user_id\n "
},
"f793e96499ff35f8dc2e420484c2a0cdb54f25ffa27caa081691779ab896a709": {
"describe": {
"columns": [],

View File

@@ -102,10 +102,9 @@ impl Category {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "category")
.await?
.and_then(|x| serde_json::from_str::<Vec<Category>>(&x).ok());
let res: Option<Vec<Category>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "category")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -133,12 +132,7 @@ impl Category {
.await?;
redis
.set(
TAGS_NAMESPACE,
"category",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "category", &result, None)
.await?;
Ok(result)
@@ -167,10 +161,9 @@ impl Loader {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "loader")
.await?
.and_then(|x| serde_json::from_str::<Vec<Loader>>(&x).ok());
let res: Option<Vec<Loader>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "loader")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -204,12 +197,7 @@ impl Loader {
.await?;
redis
.set(
TAGS_NAMESPACE,
"loader",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "loader", &result, None)
.await?;
Ok(result)
@@ -252,10 +240,9 @@ impl GameVersion {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "game_version")
.await?
.and_then(|x| serde_json::from_str::<Vec<GameVersion>>(&x).ok());
let res: Option<Vec<GameVersion>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "game_version")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -279,12 +266,7 @@ impl GameVersion {
.await?;
redis
.set(
TAGS_NAMESPACE,
"game_version",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "game_version", &result, None)
.await?;
Ok(result)
}
@@ -400,10 +382,9 @@ impl DonationPlatform {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "donation_platform")
.await?
.and_then(|x| serde_json::from_str::<Vec<DonationPlatform>>(&x).ok());
let res: Option<Vec<DonationPlatform>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "donation_platform")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -426,12 +407,7 @@ impl DonationPlatform {
.await?;
redis
.set(
TAGS_NAMESPACE,
"donation_platform",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "donation_platform", &result, None)
.await?;
Ok(result)
@@ -460,10 +436,9 @@ impl ReportType {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "report_type")
.await?
.and_then(|x| serde_json::from_str::<Vec<String>>(&x).ok());
let res: Option<Vec<String>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "report_type")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -480,12 +455,7 @@ impl ReportType {
.await?;
redis
.set(
TAGS_NAMESPACE,
"report_type",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "report_type", &result, None)
.await?;
Ok(result)
@@ -514,10 +484,9 @@ impl ProjectType {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "project_type")
.await?
.and_then(|x| serde_json::from_str::<Vec<String>>(&x).ok());
let res: Option<Vec<String>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "project_type")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -534,12 +503,7 @@ impl ProjectType {
.await?;
redis
.set(
TAGS_NAMESPACE,
"project_type",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "project_type", &result, None)
.await?;
Ok(result)
@@ -568,10 +532,9 @@ impl SideType {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(TAGS_NAMESPACE, "side_type")
.await?
.and_then(|x| serde_json::from_str::<Vec<String>>(&x).ok());
let res: Option<Vec<String>> = redis
.get_deserialized_from_json(TAGS_NAMESPACE, "side_type")
.await?;
if let Some(res) = res {
return Ok(res);
@@ -588,12 +551,7 @@ impl SideType {
.await?;
redis
.set(
TAGS_NAMESPACE,
"side_type",
serde_json::to_string(&result)?,
None,
)
.set_serialized_to_json(TAGS_NAMESPACE, "side_type", &result, None)
.await?;
Ok(result)

View File

@@ -225,10 +225,10 @@ impl Collection {
for collection in db_collections {
redis
.set(
.set_serialized_to_json(
COLLECTIONS_NAMESPACE,
collection.id.0,
serde_json::to_string(&collection)?,
&collection,
None,
)
.await?;

View File

@@ -49,19 +49,13 @@ impl Flow {
.collect::<String>();
redis
.set(
FLOWS_NAMESPACE,
&flow,
serde_json::to_string(&self)?,
Some(expires.num_seconds()),
)
.set_serialized_to_json(FLOWS_NAMESPACE, &flow, &self, Some(expires.num_seconds()))
.await?;
Ok(flow)
}
pub async fn get(id: &str, redis: &RedisPool) -> Result<Option<Flow>, DatabaseError> {
let res = redis.get::<String, _>(FLOWS_NAMESPACE, id).await?;
Ok(res.and_then(|x| serde_json::from_str(&x).ok()))
redis.get_deserialized_from_json(FLOWS_NAMESPACE, id).await
}
pub async fn remove(id: &str, redis: &RedisPool) -> Result<Option<()>, DatabaseError> {

View File

@@ -216,10 +216,10 @@ pub struct FileId(pub i64);
#[sqlx(transparent)]
pub struct PatId(pub i64);
#[derive(Copy, Clone, Debug, Type, Deserialize)]
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct NotificationId(pub i64);
#[derive(Copy, Clone, Debug, Type, Deserialize)]
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct NotificationActionId(pub i32);

View File

@@ -236,12 +236,7 @@ impl Image {
for image in db_images {
redis
.set(
IMAGES_NAMESPACE,
image.id.0,
serde_json::to_string(&image)?,
None,
)
.set_serialized_to_json(IMAGES_NAMESPACE, image.id.0, &image, None)
.await?;
found_images.push(image);
}

View File

@@ -1,13 +1,18 @@
use super::ids::*;
use crate::database::models::DatabaseError;
use crate::database::{models::DatabaseError, redis::RedisPool};
use crate::models::notifications::NotificationBody;
use chrono::{DateTime, Utc};
use serde::Deserialize;
use futures::TryStreamExt;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
const USER_NOTIFICATIONS_NAMESPACE: &str = "user_notifications";
pub struct NotificationBuilder {
pub body: NotificationBody,
}
#[derive(Serialize, Deserialize)]
pub struct Notification {
pub id: NotificationId,
pub user_id: UserId,
@@ -16,7 +21,7 @@ pub struct Notification {
pub created: DateTime<Utc>,
}
#[derive(Deserialize)]
#[derive(Serialize, Deserialize)]
pub struct NotificationAction {
pub id: NotificationActionId,
pub notification_id: NotificationId,
@@ -30,54 +35,68 @@ impl NotificationBuilder {
&self,
user: UserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
self.insert_many(vec![user], transaction).await
self.insert_many(vec![user], transaction, redis).await
}
pub async fn insert_many(
&self,
users: Vec<UserId>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut notifications = Vec::new();
for user in users {
let id = generate_notification_id(&mut *transaction).await?;
Notification {
notifications.push(Notification {
id,
user_id: user,
body: self.body.clone(),
read: false,
created: Utc::now(),
}
.insert(&mut *transaction)
.await?;
});
}
Notification::insert_many(&notifications, transaction, redis).await?;
Ok(())
}
}
impl Notification {
pub async fn insert(
&self,
pub async fn insert_many(
notifications: &[Notification],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let notification_ids = notifications.iter().map(|n| n.id.0).collect_vec();
let user_ids = notifications.iter().map(|n| n.user_id.0).collect_vec();
let bodies = notifications
.iter()
.map(|n| Ok(serde_json::value::to_value(n.body.clone())?))
.collect::<Result<Vec<_>, DatabaseError>>()?;
sqlx::query!(
"
INSERT INTO notifications (
id, user_id, body
)
VALUES (
$1, $2, $3
)
SELECT * FROM UNNEST($1::bigint[], $2::bigint[], $3::jsonb[])
",
self.id as NotificationId,
self.user_id as UserId,
serde_json::value::to_value(self.body.clone())?
&notification_ids[..],
&user_ids[..],
&bodies[..],
)
.execute(&mut *transaction)
.await?;
Notification::clear_user_notifications_cache(
notifications.iter().map(|n| &n.user_id),
redis,
)
.await?;
Ok(())
}
@@ -100,8 +119,6 @@ impl Notification {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::stream::TryStreamExt;
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
sqlx::query!(
"
@@ -152,13 +169,20 @@ impl Notification {
pub async fn get_many_user<'a, E>(
user_id: UserId,
exec: E,
) -> Result<Vec<Notification>, sqlx::Error>
redis: &RedisPool,
) -> Result<Vec<Notification>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::stream::TryStreamExt;
let cached_notifications: Option<Vec<Notification>> = redis
.get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, user_id.0)
.await?;
sqlx::query!(
if let Some(notifications) = cached_notifications {
return Ok(notifications);
}
let db_notifications = sqlx::query!(
"
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type, n.body,
JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'title', na.title, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions
@@ -200,47 +224,67 @@ impl Notification {
}))
})
.try_collect::<Vec<Notification>>()
.await
.await?;
redis
.set_serialized_to_json(
USER_NOTIFICATIONS_NAMESPACE,
user_id.0,
&db_notifications,
None,
)
.await?;
Ok(db_notifications)
}
pub async fn read(
id: NotificationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
Self::read_many(&[id], transaction).await
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
Self::read_many(&[id], transaction, redis).await
}
pub async fn read_many(
notification_ids: &[NotificationId],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
sqlx::query!(
let affected_users = sqlx::query!(
"
UPDATE notifications
SET read = TRUE
WHERE id = ANY($1)
RETURNING user_id
",
&notification_ids_parsed
)
.execute(&mut *transaction)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) })
.try_collect::<Vec<_>>()
.await?;
Notification::clear_user_notifications_cache(affected_users.iter(), redis).await?;
Ok(Some(()))
}
pub async fn remove(
id: NotificationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
Self::remove_many(&[id], transaction).await
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
Self::remove_many(&[id], transaction, redis).await
}
pub async fn remove_many(
notification_ids: &[NotificationId],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
sqlx::query!(
@@ -253,16 +297,36 @@ impl Notification {
.execute(&mut *transaction)
.await?;
sqlx::query!(
let affected_users = sqlx::query!(
"
DELETE FROM notifications
WHERE id = ANY($1)
RETURNING user_id
",
&notification_ids_parsed
)
.execute(&mut *transaction)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) })
.try_collect::<Vec<_>>()
.await?;
Notification::clear_user_notifications_cache(affected_users.iter(), redis).await?;
Ok(Some(()))
}
pub async fn clear_user_notifications_cache(
user_ids: impl IntoIterator<Item = &UserId>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
redis
.delete_many(
user_ids
.into_iter()
.map(|id| (USER_NOTIFICATIONS_NAMESPACE, Some(id.0.to_string()))),
)
.await?;
Ok(())
}
}

View File

@@ -187,10 +187,10 @@ impl Organization {
for organization in organizations {
redis
.set(
.set_serialized_to_json(
ORGANIZATIONS_NAMESPACE,
organization.id.0,
serde_json::to_string(&organization)?,
&organization,
None,
)
.await?;

View File

@@ -169,7 +169,7 @@ impl PersonalAccessToken {
for pat in db_pats {
redis
.set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None)
.set_serialized_to_json(PATS_NAMESPACE, pat.id.0, &pat, None)
.await?;
redis
.set(
@@ -195,9 +195,8 @@ impl PersonalAccessToken {
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(PATS_USERS_NAMESPACE, user_id.0)
.await?
.and_then(|x| serde_json::from_str::<Vec<i64>>(&x).ok());
.get_deserialized_from_json::<Vec<i64>, _>(PATS_USERS_NAMESPACE, user_id.0)
.await?;
if let Some(res) = res {
return Ok(res.into_iter().map(PatId).collect());

View File

@@ -1,10 +1,11 @@
use super::ids::*;
use super::{ids::*, User};
use crate::database::models;
use crate::database::models::DatabaseError;
use crate::database::redis::RedisPool;
use crate::models::ids::base62_impl::{parse_base62, to_base62};
use crate::models::projects::{MonetizationStatus, ProjectStatus};
use chrono::{DateTime, Utc};
use futures::TryStreamExt;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
@@ -445,16 +446,21 @@ impl Project {
models::TeamMember::clear_cache(project.inner.team_id, redis).await?;
sqlx::query!(
let affected_user_ids = sqlx::query!(
"
DELETE FROM team_members
WHERE team_id = $1
RETURNING user_id
",
project.inner.team_id as TeamId,
)
.execute(&mut *transaction)
.fetch_many(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) })
.try_collect::<Vec<_>>()
.await?;
User::clear_project_cache(&affected_user_ids, redis).await?;
sqlx::query!(
"
DELETE FROM teams
@@ -520,8 +526,6 @@ impl Project {
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
use futures::TryStreamExt;
if project_strings.is_empty() {
return Ok(Vec::new());
}
@@ -695,12 +699,7 @@ impl Project {
for project in db_projects {
redis
.set(
PROJECTS_NAMESPACE,
project.inner.id.0,
serde_json::to_string(&project)?,
None,
)
.set_serialized_to_json(PROJECTS_NAMESPACE, project.inner.id.0, &project, None)
.await?;
if let Some(slug) = &project.inner.slug {
redis
@@ -729,14 +728,10 @@ impl Project {
{
type Dependencies = Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>;
use futures::stream::TryStreamExt;
let dependencies = redis
.get::<String, _>(PROJECTS_DEPENDENCIES_NAMESPACE, id.0)
.get_deserialized_from_json::<Dependencies, _>(PROJECTS_DEPENDENCIES_NAMESPACE, id.0)
.await?;
if let Some(dependencies) =
dependencies.and_then(|x| serde_json::from_str::<Dependencies>(&x).ok())
{
if let Some(dependencies) = dependencies {
return Ok(dependencies);
}
@@ -768,12 +763,7 @@ impl Project {
.await?;
redis
.set(
PROJECTS_DEPENDENCIES_NAMESPACE,
id.0,
serde_json::to_string(&dependencies)?,
None,
)
.set_serialized_to_json(PROJECTS_DEPENDENCIES_NAMESPACE, id.0, &dependencies, None)
.await?;
Ok(dependencies)
}

View File

@@ -213,12 +213,7 @@ impl Session {
for session in db_sessions {
redis
.set(
SESSIONS_NAMESPACE,
session.id.0,
serde_json::to_string(&session)?,
None,
)
.set_serialized_to_json(SESSIONS_NAMESPACE, session.id.0, &session, None)
.await?;
redis
.set(
@@ -244,9 +239,8 @@ impl Session {
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let res = redis
.get::<String, _>(SESSIONS_USERS_NAMESPACE, user_id.0)
.await?
.and_then(|x| serde_json::from_str::<Vec<i64>>(&x).ok());
.get_deserialized_from_json::<Vec<i64>, _>(SESSIONS_USERS_NAMESPACE, user_id.0)
.await?;
if let Some(res) = res {
return Ok(res.into_iter().map(SessionId).collect());
@@ -268,12 +262,7 @@ impl Session {
.await?;
redis
.set(
SESSIONS_USERS_NAMESPACE,
user_id.0,
serde_json::to_string(&db_sessions)?,
None,
)
.set_serialized_to_json(SESSIONS_USERS_NAMESPACE, user_id.0, &db_sessions, None)
.await?;
Ok(db_sessions)

View File

@@ -261,12 +261,7 @@ impl TeamMember {
let mut members = members.collect::<Vec<_>>();
redis
.set(
TEAMS_NAMESPACE,
id.0,
serde_json::to_string(&members)?,
None,
)
.set_serialized_to_json(TEAMS_NAMESPACE, id.0, &members, None)
.await?;
found_teams.append(&mut members);
}

View File

@@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
const USERS_NAMESPACE: &str = "users";
const USER_USERNAMES_NAMESPACE: &str = "users_usernames";
// const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct User {
@@ -234,12 +234,7 @@ impl User {
for user in db_users {
redis
.set(
USERS_NAMESPACE,
user.id.0,
serde_json::to_string(&user)?,
None,
)
.set_serialized_to_json(USERS_NAMESPACE, user.id.0, &user, None)
.await?;
redis
.set(
@@ -276,13 +271,22 @@ impl User {
pub async fn get_projects<'a, E>(
user_id: UserId,
exec: E,
) -> Result<Vec<ProjectId>, sqlx::Error>
redis: &RedisPool,
) -> Result<Vec<ProjectId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
use futures::stream::TryStreamExt;
let projects = sqlx::query!(
let cached_projects = redis
.get_deserialized_from_json::<Vec<ProjectId>, _>(USERS_PROJECTS_NAMESPACE, user_id.0)
.await?;
if let Some(projects) = cached_projects {
return Ok(projects);
}
let db_projects = sqlx::query!(
"
SELECT m.id FROM mods m
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.accepted = TRUE
@@ -296,7 +300,11 @@ impl User {
.try_collect::<Vec<ProjectId>>()
.await?;
Ok(projects)
redis
.set_serialized_to_json(USERS_PROJECTS_NAMESPACE, user_id.0, &db_projects, None)
.await?;
Ok(db_projects)
}
pub async fn get_collections<'a, E>(
@@ -365,6 +373,21 @@ impl User {
Ok(())
}
pub async fn clear_project_cache(
user_ids: &[UserId],
redis: &RedisPool,
) -> Result<(), DatabaseError> {
redis
.delete_many(
user_ids
.into_iter()
.map(|id| (USERS_PROJECTS_NAMESPACE, Some(id.0.to_string()))),
)
.await?;
Ok(())
}
pub async fn remove(
id: UserId,
full: bool,

View File

@@ -690,12 +690,7 @@ impl Version {
for version in db_versions {
redis
.set(
VERSIONS_NAMESPACE,
version.inner.id.0,
serde_json::to_string(&version)?,
None,
)
.set_serialized_to_json(VERSIONS_NAMESPACE, version.inner.id.0, &version, None)
.await?;
found_versions.push(version);
@@ -827,12 +822,7 @@ impl Version {
for (key, mut files) in save_files {
redis
.set(
VERSION_FILES_NAMESPACE,
key,
serde_json::to_string(&files)?,
None,
)
.set_serialized_to_json(VERSION_FILES_NAMESPACE, key, &files, None)
.await?;
found_files.append(&mut files);

View File

@@ -59,9 +59,24 @@ impl RedisPool {
Ok(())
}
pub async fn get<R, T1>(&self, namespace: &str, id: T1) -> Result<Option<R>, DatabaseError>
pub async fn set_serialized_to_json<Id, D>(
&self,
namespace: &str,
id: Id,
data: D,
expiry: Option<i64>,
) -> Result<(), DatabaseError>
where
T1: Display,
Id: Display,
D: serde::Serialize,
{
self.set(namespace, id, serde_json::to_string(&data)?, expiry)
.await
}
pub async fn get<R, Id>(&self, namespace: &str, id: Id) -> Result<Option<R>, DatabaseError>
where
Id: Display,
R: FromRedisValue,
{
let mut redis_connection = self.pool.get().await?;
@@ -73,6 +88,21 @@ impl RedisPool {
Ok(res)
}
pub async fn get_deserialized_from_json<R, Id>(
&self,
namespace: &str,
id: Id,
) -> Result<Option<R>, DatabaseError>
where
Id: Display,
R: for<'a> serde::Deserialize<'a>,
{
Ok(self
.get::<String, Id>(namespace, id)
.await?
.and_then(|x| serde_json::from_str(&x).ok()))
}
pub async fn multi_get<R, T1>(
&self,
namespace: &str,
@@ -111,17 +141,20 @@ impl RedisPool {
pub async fn delete_many(
&self,
iter: impl IntoIterator<Item = (&str, Option<String>)>,
) -> Result<(), DatabaseError>
where {
let mut redis_connection = self.pool.get().await?;
) -> Result<(), DatabaseError> {
let mut cmd = cmd("DEL");
let mut any = false;
for (namespace, id) in iter {
if let Some(id) = id {
cmd.arg(format!("{}_{}:{}", self.meta_namespace, namespace, id));
any = true;
}
}
cmd.query_async::<_, ()>(&mut redis_connection).await?;
if any {
let mut redis_connection = self.pool.get().await?;
cmd.query_async::<_, ()>(&mut redis_connection).await?;
}
Ok(())
}

View File

@@ -488,7 +488,7 @@ async fn filter_allowed_ids(
if project_ids.is_none() && version_ids.is_none() {
if let Some(user) = &user_option {
project_ids = Some(
user_item::User::get_projects(user.id.into(), &***pool)
user_item::User::get_projects(user.id.into(), &***pool, redis)
.await?
.into_iter()
.map(|x| ProjectId::from(x).to_string())

View File

@@ -129,8 +129,12 @@ pub async fn notification_read(
if data.user_id == user.id.into() || user.role.is_admin() {
let mut transaction = pool.begin().await?;
database::models::notification_item::Notification::read(id.into(), &mut transaction)
.await?;
database::models::notification_item::Notification::read(
id.into(),
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
@@ -172,8 +176,12 @@ pub async fn notification_delete(
if data.user_id == user.id.into() || user.role.is_admin() {
let mut transaction = pool.begin().await?;
database::models::notification_item::Notification::remove(id.into(), &mut transaction)
.await?;
database::models::notification_item::Notification::remove(
id.into(),
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
@@ -225,8 +233,12 @@ pub async fn notifications_read(
}
}
database::models::notification_item::Notification::read_many(&notifications, &mut transaction)
.await?;
database::models::notification_item::Notification::read_many(
&notifications,
&mut transaction,
&redis,
)
.await?;
transaction.commit().await?;
@@ -273,6 +285,7 @@ pub async fn notifications_delete(
database::models::notification_item::Notification::remove_many(
&notifications,
&mut transaction,
&redis,
)
.await?;

View File

@@ -1,7 +1,7 @@
use super::version_creation::InitialVersionData;
use crate::auth::{get_user_from_headers, AuthenticationError};
use crate::database::models::thread_item::ThreadBuilder;
use crate::database::models::{self, image_item};
use crate::database::models::{self, image_item, User};
use crate::database::redis::RedisPool;
use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError;
@@ -791,6 +791,7 @@ async fn project_create_inner(
let now = Utc::now();
let id = project_builder_actual.insert(&mut *transaction).await?;
User::clear_project_cache(&[current_user.id.into()], redis).await?;
for image_id in project_create_data.uploaded_images {
if let Some(db_image) =

View File

@@ -598,7 +598,7 @@ pub async fn project_edit(
new_status: *status,
},
}
.insert_many(notified_members, &mut transaction)
.insert_many(notified_members, &mut transaction, &redis)
.await?;
}

View File

@@ -1,7 +1,7 @@
use crate::auth::{get_user_from_headers, is_authorized};
use crate::database::models::notification_item::NotificationBuilder;
use crate::database::models::team_item::TeamAssociationId;
use crate::database::models::{Organization, Team, TeamMember};
use crate::database::models::{Organization, Team, TeamMember, User};
use crate::database::redis::RedisPool;
use crate::database::Project;
use crate::models::notifications::NotificationBody;
@@ -349,6 +349,7 @@ pub async fn join_team(
)
.await?;
User::clear_project_cache(&[current_user.id.into()], &redis).await?;
TeamMember::clear_cache(team_id, &redis).await?;
transaction.commit().await?;
@@ -532,7 +533,7 @@ pub async fn add_team_member(
role: new_member.role.clone(),
},
}
.insert(new_member.user_id.into(), &mut transaction)
.insert(new_member.user_id.into(), &mut transaction, &redis)
.await?;
}
TeamAssociationId::Organization(oid) => {
@@ -544,7 +545,7 @@ pub async fn add_team_member(
role: new_member.role.clone(),
},
}
.insert(new_member.user_id.into(), &mut transaction)
.insert(new_member.user_id.into(), &mut transaction, &redis)
.await?;
}
}
@@ -954,6 +955,7 @@ pub async fn remove_team_member(
}
TeamMember::clear_cache(id, &redis).await?;
User::clear_project_cache(&[delete_member.user_id.into()], &redis).await?;
transaction.commit().await?;
Ok(HttpResponse::NoContent().body(""))

View File

@@ -410,6 +410,7 @@ pub async fn thread_send_message(
.insert_many(
members.into_iter().map(|x| x.user_id).collect(),
&mut transaction,
&redis,
)
.await?;
}
@@ -435,7 +436,7 @@ pub async fn thread_send_message(
report_id: Some(report.id.into()),
},
}
.insert(report.reporter, &mut transaction)
.insert(report.reporter, &mut transaction, &redis)
.await?;
}
}

View File

@@ -136,7 +136,7 @@ pub async fn projects_list(
.map(|y| y.role.is_mod() || y.id == user_id)
.unwrap_or(false);
let project_data = User::get_projects(id, &**pool).await?;
let project_data = User::get_projects(id, &**pool, &redis).await?;
let response: Vec<_> =
crate::database::Project::get_many_ids(&project_data, &**pool, &redis)
@@ -591,11 +591,13 @@ pub async fn user_notifications(
}
let mut notifications: Vec<Notification> =
crate::database::models::notification_item::Notification::get_many_user(id, &**pool)
.await?
.into_iter()
.map(Into::into)
.collect();
crate::database::models::notification_item::Notification::get_many_user(
id, &**pool, &redis,
)
.await?
.into_iter()
.map(Into::into)
.collect();
notifications.sort_by(|a, b| b.created.cmp(&a.created));

View File

@@ -409,7 +409,7 @@ async fn version_create_inner(
version_id,
},
}
.insert_many(users, &mut *transaction)
.insert_many(users, &mut *transaction, redis)
.await?;
let response = Version {

175
tests/common/api_v2.rs Normal file
View File

@@ -0,0 +1,175 @@
#![allow(dead_code)]
use super::{
actix::AppendsMultipart,
asserts::assert_status,
database::{MOD_USER_PAT, USER_USER_PAT},
environment::LocalService,
request_data::ProjectCreationRequestData,
};
use actix_http::StatusCode;
use actix_web::{
dev::ServiceResponse,
test::{self, TestRequest},
};
use labrinth::models::{
notifications::Notification,
projects::{Project, Version},
};
use serde_json::json;
use std::rc::Rc;
pub struct ApiV2 {
pub test_app: Rc<Box<dyn LocalService>>,
}
impl ApiV2 {
pub async fn call(&self, req: actix_http::Request) -> ServiceResponse {
self.test_app.call(req).await.unwrap()
}
pub async fn add_public_project(
&self,
creation_data: ProjectCreationRequestData,
) -> (Project, Version) {
// Add a project.
let req = TestRequest::post()
.uri("/v2/project")
.append_header(("Authorization", USER_USER_PAT))
.set_multipart(creation_data.segment_data)
.to_request();
let resp = self.call(req).await;
assert_status(resp, StatusCode::OK);
// Approve as a moderator.
let req = TestRequest::patch()
.uri(&format!("/v2/project/{}", creation_data.slug))
.append_header(("Authorization", MOD_USER_PAT))
.set_json(json!(
{
"status": "approved"
}
))
.to_request();
let resp = self.call(req).await;
assert_status(resp, StatusCode::NO_CONTENT);
let project = self
.get_project_deserialized(&creation_data.slug, USER_USER_PAT)
.await;
// Get project's versions
let req = TestRequest::get()
.uri(&format!("/v2/project/{}/version", creation_data.slug))
.append_header(("Authorization", USER_USER_PAT))
.to_request();
let resp = self.call(req).await;
let versions: Vec<Version> = test::read_body_json(resp).await;
let version = versions.into_iter().next().unwrap();
(project, version)
}
pub async fn remove_project(&self, project_slug_or_id: &str, pat: &str) -> ServiceResponse {
let req = test::TestRequest::delete()
.uri(&format!("/v2/project/{project_slug_or_id}"))
.append_header(("Authorization", pat))
.to_request();
let resp = self.call(req).await;
assert_eq!(resp.status(), 204);
resp
}
pub async fn get_project_deserialized(&self, slug: &str, pat: &str) -> Project {
let req = TestRequest::get()
.uri(&format!("/v2/project/{slug}"))
.append_header(("Authorization", pat))
.to_request();
let resp = self.call(req).await;
test::read_body_json(resp).await
}
pub async fn get_user_projects_deserialized(
&self,
user_id_or_username: &str,
pat: &str,
) -> Vec<Project> {
let req = test::TestRequest::get()
.uri(&format!("/v2/user/{}/projects", user_id_or_username))
.append_header(("Authorization", pat))
.to_request();
let resp = self.call(req).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn add_user_to_team(
&self,
team_id: &str,
user_id: &str,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::post()
.uri(&format!("/v2/team/{team_id}/members"))
.append_header(("Authorization", pat))
.set_json(json!( {
"user_id": user_id
}))
.to_request();
self.call(req).await
}
pub async fn join_team(&self, team_id: &str, pat: &str) -> ServiceResponse {
let req = test::TestRequest::post()
.uri(&format!("/v2/team/{team_id}/join"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
pub async fn remove_from_team(
&self,
team_id: &str,
user_id: &str,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::delete()
.uri(&format!("/v2/team/{team_id}/members/{user_id}"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
pub async fn get_user_notifications_deserialized(
&self,
user_id: &str,
pat: &str,
) -> Vec<Notification> {
let req = test::TestRequest::get()
.uri(&format!("/v2/user/{user_id}/notifications"))
.append_header(("Authorization", pat))
.to_request();
let resp = self.call(req).await;
test::read_body_json(resp).await
}
pub async fn mark_notification_read(
&self,
notification_id: &str,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::patch()
.uri(&format!("/v2/notification/{notification_id}"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
pub async fn delete_notification(&self, notification_id: &str, pat: &str) -> ServiceResponse {
let req = test::TestRequest::delete()
.uri(&format!("/v2/notification/{notification_id}"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
}

3
tests/common/asserts.rs Normal file
View File

@@ -0,0 +1,3 @@
pub fn assert_status(response: actix_web::dev::ServiceResponse, status: actix_http::StatusCode) {
assert_eq!(response.status(), status, "{:#?}", response.response());
}

View File

@@ -3,16 +3,15 @@ use labrinth::{models::projects::Project, models::projects::Version};
use serde_json::json;
use sqlx::Executor;
use crate::common::{
actix::AppendsMultipart,
database::{MOD_USER_PAT, USER_USER_PAT},
};
use crate::common::{actix::AppendsMultipart, database::USER_USER_PAT};
use super::{
actix::{MultipartSegment, MultipartSegmentData},
environment::TestEnvironment,
request_data::get_public_project_creation_data,
};
#[allow(dead_code)]
pub const DUMMY_CATEGORIES: &'static [&str] = &[
"combat",
"decoration",
@@ -23,6 +22,14 @@ pub const DUMMY_CATEGORIES: &'static [&str] = &[
"optimization",
];
#[allow(dead_code)]
pub enum DummyJarFile {
DummyProjectAlpha,
DummyProjectBeta,
BasicMod,
BasicModDifferent,
}
pub struct DummyData {
pub alpha_team_id: String,
pub beta_team_id: String,
@@ -75,94 +82,19 @@ pub async fn add_dummy_data(test_env: &TestEnvironment) -> DummyData {
}
pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) {
// Adds dummy data to the database with sqlx (projects, versions, threads)
// Generate test project data.
let json_data = json!(
{
"title": "Test Project Alpha",
"slug": "alpha",
"description": "A dummy project for testing with.",
"body": "This project is approved, and versions are listed.",
"client_side": "required",
"server_side": "optional",
"initial_versions": [{
"file_parts": ["dummy-project-alpha.jar"],
"version_number": "1.2.3",
"version_title": "start",
"dependencies": [],
"game_versions": ["1.20.1"] ,
"release_channel": "release",
"loaders": ["fabric"],
"featured": true
}],
"categories": [],
"license_id": "MIT"
}
);
// Basic json
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
};
// Basic file
let file_segment = MultipartSegment {
name: "dummy-project-alpha.jar".to_string(),
filename: Some("dummy-project-alpha.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: MultipartSegmentData::Binary(
include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec(),
),
};
// Add a project.
let req = TestRequest::post()
.uri("/v2/project")
.append_header(("Authorization", USER_USER_PAT))
.set_multipart(vec![json_segment.clone(), file_segment.clone()])
.to_request();
let resp = test_env.call(req).await;
assert_eq!(resp.status(), 200);
// Approve as a moderator.
let req = TestRequest::patch()
.uri("/v2/project/alpha")
.append_header(("Authorization", MOD_USER_PAT))
.set_json(json!(
{
"status": "approved"
}
test_env
.v2
.add_public_project(get_public_project_creation_data(
"alpha",
DummyJarFile::DummyProjectAlpha,
))
.to_request();
let resp = test_env.call(req).await;
assert_eq!(resp.status(), 204);
// Get project
let req = TestRequest::get()
.uri("/v2/project/alpha")
.append_header(("Authorization", USER_USER_PAT))
.to_request();
let resp = test_env.call(req).await;
let project: Project = test::read_body_json(resp).await;
// Get project's versions
let req = TestRequest::get()
.uri("/v2/project/alpha/version")
.append_header(("Authorization", USER_USER_PAT))
.to_request();
let resp = test_env.call(req).await;
let versions: Vec<Version> = test::read_body_json(resp).await;
let version = versions.into_iter().next().unwrap();
(project, version)
.await
}
pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) {
// Adds dummy data to the database with sqlx (projects, versions, threads)
// Generate test project data.
let jar = DummyJarFile::DummyProjectBeta;
let json_data = json!(
{
"title": "Test Project Beta",
@@ -172,7 +104,7 @@ pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version)
"client_side": "required",
"server_side": "optional",
"initial_versions": [{
"file_parts": ["dummy-project-beta.jar"],
"file_parts": [jar.filename()],
"version_number": "1.2.3",
"version_title": "start",
"status": "unlisted",
@@ -200,12 +132,10 @@ pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version)
// Basic file
let file_segment = MultipartSegment {
name: "dummy-project-beta.jar".to_string(),
filename: Some("dummy-project-beta.jar".to_string()),
name: jar.filename(),
filename: Some(jar.filename()),
content_type: Some("application/java-archive".to_string()),
data: MultipartSegmentData::Binary(
include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec(),
),
data: MultipartSegmentData::Binary(jar.bytes()),
};
// Add a project.
@@ -237,3 +167,30 @@ pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version)
(project, version)
}
impl DummyJarFile {
pub fn filename(&self) -> String {
match self {
DummyJarFile::DummyProjectAlpha => "dummy-project-alpha.jar",
DummyJarFile::DummyProjectBeta => "dummy-project-beta.jar",
DummyJarFile::BasicMod => "basic-mod.jar",
DummyJarFile::BasicModDifferent => "basic-mod-different.jar",
}
.to_string()
}
pub fn bytes(&self) -> Vec<u8> {
match self {
DummyJarFile::DummyProjectAlpha => {
include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec()
}
DummyJarFile::DummyProjectBeta => {
include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec()
}
DummyJarFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(),
DummyJarFile::BasicModDifferent => {
include_bytes!("../../tests/files/basic-mod-different.jar").to_vec()
}
}
}
}

View File

@@ -1,7 +1,15 @@
#![allow(dead_code)]
use super::{database::TemporaryDatabase, dummy_data};
use std::rc::Rc;
use super::{
api_v2::ApiV2,
asserts::assert_status,
database::{TemporaryDatabase, FRIEND_USER_ID, USER_USER_PAT},
dummy_data,
};
use crate::common::setup;
use actix_http::StatusCode;
use actix_web::{dev::ServiceResponse, test, App};
use futures::Future;
@@ -22,8 +30,9 @@ where
// temporary sqlx db like #[sqlx::test] would.
// Use .call(req) on it directly to make a test call as if test::call_service(req) were being used.
pub struct TestEnvironment {
test_app: Box<dyn LocalService>,
test_app: Rc<Box<dyn LocalService>>,
pub db: TemporaryDatabase,
pub v2: ApiV2,
pub dummy: Option<dummy_data::DummyData>,
}
@@ -40,9 +49,12 @@ impl TestEnvironment {
let db = TemporaryDatabase::create().await;
let labrinth_config = setup(&db).await;
let app = App::new().configure(|cfg| labrinth::app_config(cfg, labrinth_config.clone()));
let test_app = test::init_service(app).await;
let test_app: Rc<Box<dyn LocalService>> = Rc::new(Box::new(test::init_service(app).await));
Self {
test_app: Box::new(test_app),
v2: ApiV2 {
test_app: test_app.clone(),
},
test_app,
db,
dummy: None,
}
@@ -54,9 +66,21 @@ impl TestEnvironment {
pub async fn call(&self, req: actix_http::Request) -> ServiceResponse {
self.test_app.call(req).await.unwrap()
}
pub async fn generate_friend_user_notification(&self) {
let resp = self
.v2
.add_user_to_team(
&self.dummy.as_ref().unwrap().alpha_team_id,
FRIEND_USER_ID,
USER_USER_PAT,
)
.await;
assert_status(resp, StatusCode::NO_CONTENT);
}
}
trait LocalService {
pub trait LocalService {
fn call(
&self,
req: actix_http::Request,

View File

@@ -5,10 +5,13 @@ use std::sync::Arc;
use self::database::TemporaryDatabase;
pub mod actix;
pub mod api_v2;
pub mod asserts;
pub mod database;
pub mod dummy_data;
pub mod environment;
pub mod pats;
pub mod request_data;
pub mod scopes;
// Testing equivalent to 'setup' function, producing a LabrinthConfig

View File

@@ -0,0 +1,60 @@
use serde_json::json;
use super::{actix::MultipartSegment, dummy_data::DummyJarFile};
use crate::common::actix::MultipartSegmentData;
pub struct ProjectCreationRequestData {
pub slug: String,
pub jar: DummyJarFile,
pub segment_data: Vec<MultipartSegment>,
}
pub fn get_public_project_creation_data(
slug: &str,
jar: DummyJarFile,
) -> ProjectCreationRequestData {
let json_data = json!(
{
"title": format!("Test Project {slug}"),
"slug": slug,
"description": "A dummy project for testing with.",
"body": "This project is approved, and versions are listed.",
"client_side": "required",
"server_side": "optional",
"initial_versions": [{
"file_parts": [jar.filename()],
"version_number": "1.2.3",
"version_title": "start",
"dependencies": [],
"game_versions": ["1.20.1"] ,
"release_channel": "release",
"loaders": ["fabric"],
"featured": true
}],
"categories": [],
"license_id": "MIT"
}
);
// Basic json
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
};
// Basic file
let file_segment = MultipartSegment {
name: jar.filename(),
filename: Some(jar.filename()),
content_type: Some("application/java-archive".to_string()),
data: MultipartSegmentData::Binary(jar.bytes()),
};
ProjectCreationRequestData {
slug: slug.to_string(),
jar,
segment_data: vec![json_segment.clone(), file_segment.clone()],
}
}

View File

@@ -83,9 +83,10 @@ impl<'a> ScopeTest<'a> {
if resp.status().as_u16() != self.expected_failure_code {
return Err(format!(
"Expected failure code {}, got {}",
"Expected failure code {}, got {} ({:#?})",
self.expected_failure_code,
resp.status().as_u16()
resp.status().as_u16(),
resp.response()
));
}
@@ -106,8 +107,9 @@ impl<'a> ScopeTest<'a> {
if !(resp.status().is_success() || resp.status().is_redirection()) {
return Err(format!(
"Expected success code, got {}",
resp.status().as_u16()
"Expected success code, got {} ({:#?})",
resp.status().as_u16(),
resp.response()
));
}

70
tests/notifications.rs Normal file
View File

@@ -0,0 +1,70 @@
use common::{
database::{FRIEND_USER_ID, FRIEND_USER_PAT, USER_USER_PAT},
environment::with_test_environment,
};
mod common;
#[actix_rt::test]
pub async fn get_user_notifications_after_team_invitation_returns_notification() {
with_test_environment(|test_env| async move {
let alpha_team_id = test_env.dummy.as_ref().unwrap().alpha_team_id.clone();
let api = test_env.v2;
api.get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
api.add_user_to_team(&alpha_team_id, FRIEND_USER_ID, USER_USER_PAT)
.await;
let notifications = api
.get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert_eq!(1, notifications.len());
})
.await;
}
#[actix_rt::test]
pub async fn get_user_notifications_after_reading_indicates_notification_read() {
with_test_environment(|test_env| async move {
test_env.generate_friend_user_notification().await;
let api = test_env.v2;
let notifications = api
.get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert_eq!(1, notifications.len());
let notification_id = notifications[0].id.to_string();
api.mark_notification_read(&notification_id, FRIEND_USER_PAT)
.await;
let notifications = api
.get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert_eq!(1, notifications.len());
assert!(notifications[0].read);
})
.await;
}
#[actix_rt::test]
pub async fn get_user_notifications_after_deleting_does_not_show_notification() {
with_test_environment(|test_env| async move {
test_env.generate_friend_user_notification().await;
let api = test_env.v2;
let notifications = api
.get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert_eq!(1, notifications.len());
let notification_id = notifications[0].id.to_string();
api.delete_notification(&notification_id, FRIEND_USER_PAT)
.await;
let notifications = api
.get_user_notifications_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert_eq!(0, notifications.len());
})
.await;
}

View File

@@ -269,11 +269,7 @@ async fn test_add_remove_project() {
let id = body["id"].to_string();
// Remove the project
let req = test::TestRequest::delete()
.uri("/v2/project/demo")
.append_header(("Authorization", USER_USER_PAT))
.to_request();
let resp = test_env.call(req).await;
let resp = test_env.v2.remove_project("demo", USER_USER_PAT).await;
assert_eq!(resp.status(), 204);
// Confirm that the project is gone from the cache

View File

@@ -92,14 +92,10 @@ pub async fn notifications_scopes() {
// We will invite user 'friend' to project team, and use that as a notification
// Get notifications
let req = TestRequest::post()
.uri(&format!("/v2/team/{alpha_team_id}/members"))
.append_header(("Authorization", USER_USER_PAT))
.set_json(json!( {
"user_id": FRIEND_USER_ID // friend
}))
.to_request();
let resp = test_env.call(req).await;
let resp = test_env
.v2
.add_user_to_team(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT)
.await;
assert_eq!(resp.status(), 204);
// Notification get
@@ -164,14 +160,10 @@ pub async fn notifications_scopes() {
// Mass notification delete
// We invite mod, get the notification ID, and do mass delete using that
let req = test::TestRequest::post()
.uri(&format!("/v2/team/{alpha_team_id}/members"))
.append_header(("Authorization", USER_USER_PAT))
.set_json(json!( {
"user_id": MOD_USER_ID // mod
}))
.to_request();
let resp = test_env.call(req).await;
let resp = test_env
.v2
.add_user_to_team(alpha_team_id, MOD_USER_ID, USER_USER_PAT)
.await;
assert_eq!(resp.status(), 204);
let read_notifications = Scopes::NOTIFICATION_READ;
let req_gen = || test::TestRequest::get().uri(&format!("/v2/user/{MOD_USER_ID}/notifications"));

102
tests/user.rs Normal file
View File

@@ -0,0 +1,102 @@
use common::{
database::{FRIEND_USER_ID, FRIEND_USER_PAT, USER_USER_ID, USER_USER_PAT},
environment::with_test_environment,
};
use crate::common::{dummy_data::DummyJarFile, request_data::get_public_project_creation_data};
mod common;
#[actix_rt::test]
pub async fn get_user_projects_after_creating_project_returns_new_project() {
with_test_environment(|test_env| async move {
let api = test_env.v2;
api.get_user_projects_deserialized(USER_USER_ID, USER_USER_PAT)
.await;
let (project, _) = api
.add_public_project(get_public_project_creation_data(
"slug",
DummyJarFile::BasicMod,
))
.await;
let resp_projects = api
.get_user_projects_deserialized(USER_USER_ID, USER_USER_PAT)
.await;
assert!(resp_projects.iter().any(|p| p.id == project.id));
})
.await;
}
#[actix_rt::test]
pub async fn get_user_projects_after_deleting_project_shows_removal() {
with_test_environment(|test_env| async move {
let api = test_env.v2;
let (project, _) = api
.add_public_project(get_public_project_creation_data(
"iota",
DummyJarFile::BasicMod,
))
.await;
api.get_user_projects_deserialized(USER_USER_ID, USER_USER_PAT)
.await;
api.remove_project(&project.slug.as_ref().unwrap(), USER_USER_PAT)
.await;
let resp_projects = api
.get_user_projects_deserialized(USER_USER_ID, USER_USER_PAT)
.await;
assert!(!resp_projects.iter().any(|p| p.id == project.id));
})
.await;
}
#[actix_rt::test]
pub async fn get_user_projects_after_joining_team_shows_team_projects() {
with_test_environment(|test_env| async move {
let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id;
let alpha_project_id = &test_env.dummy.as_ref().unwrap().alpha_project_id;
let api = test_env.v2;
api.get_user_projects_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
api.add_user_to_team(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT)
.await;
api.join_team(&alpha_team_id, FRIEND_USER_PAT).await;
let projects = api
.get_user_projects_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert!(projects
.iter()
.any(|p| p.id.to_string() == *alpha_project_id));
})
.await;
}
#[actix_rt::test]
pub async fn get_user_projects_after_leaving_team_shows_no_team_projects() {
with_test_environment(|test_env| async move {
let alpha_team_id = &test_env.dummy.as_ref().unwrap().alpha_team_id;
let alpha_project_id = &test_env.dummy.as_ref().unwrap().alpha_project_id;
let api = test_env.v2;
api.add_user_to_team(alpha_team_id, FRIEND_USER_ID, USER_USER_PAT)
.await;
api.join_team(&alpha_team_id, FRIEND_USER_PAT).await;
api.get_user_projects_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
api.remove_from_team(&alpha_team_id, FRIEND_USER_ID, USER_USER_PAT)
.await;
let projects = api
.get_user_projects_deserialized(FRIEND_USER_ID, FRIEND_USER_PAT)
.await;
assert!(!projects
.iter()
.any(|p| p.id.to_string() == *alpha_project_id));
})
.await;
}