You've already forked AstralRinth
forked from didirus/AstralRinth
Add redis caching to getting user notifications and projects [MOD-540] (#723)
* Add redis caching to getting a user's project ids * Run `cargo sqlx prepare` to update the sqlx-data.json * Add redis caching for getting user notifications * Fix new clippy warnings * Remove log that shouldn't have been committed * Batch insert of notifications (untested) * sqlx prepare... * Fix merge conflict things and use new redis struct * Fix bug with calling delete_many without any elements (caught by tests) * cargo sqlx prepare * Add tests around cache invalidation (and fix bug they caught!) * Some test reorg based on code review suggestions
This commit is contained in:
@@ -102,10 +102,9 @@ impl Category {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "category")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<Category>>(&x).ok());
|
||||
let res: Option<Vec<Category>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "category")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -133,12 +132,7 @@ impl Category {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"category",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "category", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
@@ -167,10 +161,9 @@ impl Loader {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "loader")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<Loader>>(&x).ok());
|
||||
let res: Option<Vec<Loader>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "loader")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -204,12 +197,7 @@ impl Loader {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"loader",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "loader", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
@@ -252,10 +240,9 @@ impl GameVersion {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "game_version")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<GameVersion>>(&x).ok());
|
||||
let res: Option<Vec<GameVersion>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "game_version")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -279,12 +266,7 @@ impl GameVersion {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"game_version",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "game_version", &result, None)
|
||||
.await?;
|
||||
Ok(result)
|
||||
}
|
||||
@@ -400,10 +382,9 @@ impl DonationPlatform {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "donation_platform")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<DonationPlatform>>(&x).ok());
|
||||
let res: Option<Vec<DonationPlatform>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "donation_platform")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -426,12 +407,7 @@ impl DonationPlatform {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"donation_platform",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "donation_platform", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
@@ -460,10 +436,9 @@ impl ReportType {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "report_type")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<String>>(&x).ok());
|
||||
let res: Option<Vec<String>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "report_type")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -480,12 +455,7 @@ impl ReportType {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"report_type",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "report_type", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
@@ -514,10 +484,9 @@ impl ProjectType {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "project_type")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<String>>(&x).ok());
|
||||
let res: Option<Vec<String>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "project_type")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -534,12 +503,7 @@ impl ProjectType {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"project_type",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "project_type", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
@@ -568,10 +532,9 @@ impl SideType {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(TAGS_NAMESPACE, "side_type")
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<String>>(&x).ok());
|
||||
let res: Option<Vec<String>> = redis
|
||||
.get_deserialized_from_json(TAGS_NAMESPACE, "side_type")
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res);
|
||||
@@ -588,12 +551,7 @@ impl SideType {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
TAGS_NAMESPACE,
|
||||
"side_type",
|
||||
serde_json::to_string(&result)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TAGS_NAMESPACE, "side_type", &result, None)
|
||||
.await?;
|
||||
|
||||
Ok(result)
|
||||
|
||||
@@ -225,10 +225,10 @@ impl Collection {
|
||||
|
||||
for collection in db_collections {
|
||||
redis
|
||||
.set(
|
||||
.set_serialized_to_json(
|
||||
COLLECTIONS_NAMESPACE,
|
||||
collection.id.0,
|
||||
serde_json::to_string(&collection)?,
|
||||
&collection,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -49,19 +49,13 @@ impl Flow {
|
||||
.collect::<String>();
|
||||
|
||||
redis
|
||||
.set(
|
||||
FLOWS_NAMESPACE,
|
||||
&flow,
|
||||
serde_json::to_string(&self)?,
|
||||
Some(expires.num_seconds()),
|
||||
)
|
||||
.set_serialized_to_json(FLOWS_NAMESPACE, &flow, &self, Some(expires.num_seconds()))
|
||||
.await?;
|
||||
Ok(flow)
|
||||
}
|
||||
|
||||
pub async fn get(id: &str, redis: &RedisPool) -> Result<Option<Flow>, DatabaseError> {
|
||||
let res = redis.get::<String, _>(FLOWS_NAMESPACE, id).await?;
|
||||
Ok(res.and_then(|x| serde_json::from_str(&x).ok()))
|
||||
redis.get_deserialized_from_json(FLOWS_NAMESPACE, id).await
|
||||
}
|
||||
|
||||
pub async fn remove(id: &str, redis: &RedisPool) -> Result<Option<()>, DatabaseError> {
|
||||
|
||||
@@ -216,10 +216,10 @@ pub struct FileId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct PatId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct NotificationId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct NotificationActionId(pub i32);
|
||||
|
||||
|
||||
@@ -236,12 +236,7 @@ impl Image {
|
||||
|
||||
for image in db_images {
|
||||
redis
|
||||
.set(
|
||||
IMAGES_NAMESPACE,
|
||||
image.id.0,
|
||||
serde_json::to_string(&image)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(IMAGES_NAMESPACE, image.id.0, &image, None)
|
||||
.await?;
|
||||
found_images.push(image);
|
||||
}
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models::DatabaseError;
|
||||
use crate::database::{models::DatabaseError, redis::RedisPool};
|
||||
use crate::models::notifications::NotificationBody;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Deserialize;
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const USER_NOTIFICATIONS_NAMESPACE: &str = "user_notifications";
|
||||
|
||||
pub struct NotificationBuilder {
|
||||
pub body: NotificationBody,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Notification {
|
||||
pub id: NotificationId,
|
||||
pub user_id: UserId,
|
||||
@@ -16,7 +21,7 @@ pub struct Notification {
|
||||
pub created: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct NotificationAction {
|
||||
pub id: NotificationActionId,
|
||||
pub notification_id: NotificationId,
|
||||
@@ -30,54 +35,68 @@ impl NotificationBuilder {
|
||||
&self,
|
||||
user: UserId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
self.insert_many(vec![user], transaction).await
|
||||
self.insert_many(vec![user], transaction, redis).await
|
||||
}
|
||||
|
||||
pub async fn insert_many(
|
||||
&self,
|
||||
users: Vec<UserId>,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let mut notifications = Vec::new();
|
||||
for user in users {
|
||||
let id = generate_notification_id(&mut *transaction).await?;
|
||||
|
||||
Notification {
|
||||
notifications.push(Notification {
|
||||
id,
|
||||
user_id: user,
|
||||
body: self.body.clone(),
|
||||
read: false,
|
||||
created: Utc::now(),
|
||||
}
|
||||
.insert(&mut *transaction)
|
||||
.await?;
|
||||
});
|
||||
}
|
||||
|
||||
Notification::insert_many(¬ifications, transaction, redis).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Notification {
|
||||
pub async fn insert(
|
||||
&self,
|
||||
pub async fn insert_many(
|
||||
notifications: &[Notification],
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let notification_ids = notifications.iter().map(|n| n.id.0).collect_vec();
|
||||
let user_ids = notifications.iter().map(|n| n.user_id.0).collect_vec();
|
||||
let bodies = notifications
|
||||
.iter()
|
||||
.map(|n| Ok(serde_json::value::to_value(n.body.clone())?))
|
||||
.collect::<Result<Vec<_>, DatabaseError>>()?;
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO notifications (
|
||||
id, user_id, body
|
||||
)
|
||||
VALUES (
|
||||
$1, $2, $3
|
||||
)
|
||||
SELECT * FROM UNNEST($1::bigint[], $2::bigint[], $3::jsonb[])
|
||||
",
|
||||
self.id as NotificationId,
|
||||
self.user_id as UserId,
|
||||
serde_json::value::to_value(self.body.clone())?
|
||||
¬ification_ids[..],
|
||||
&user_ids[..],
|
||||
&bodies[..],
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Notification::clear_user_notifications_cache(
|
||||
notifications.iter().map(|n| &n.user_id),
|
||||
redis,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -100,8 +119,6 @@ impl Notification {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
|
||||
sqlx::query!(
|
||||
"
|
||||
@@ -152,13 +169,20 @@ impl Notification {
|
||||
pub async fn get_many_user<'a, E>(
|
||||
user_id: UserId,
|
||||
exec: E,
|
||||
) -> Result<Vec<Notification>, sqlx::Error>
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Notification>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
let cached_notifications: Option<Vec<Notification>> = redis
|
||||
.get_deserialized_from_json(USER_NOTIFICATIONS_NAMESPACE, user_id.0)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
if let Some(notifications) = cached_notifications {
|
||||
return Ok(notifications);
|
||||
}
|
||||
|
||||
let db_notifications = sqlx::query!(
|
||||
"
|
||||
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type, n.body,
|
||||
JSONB_AGG(DISTINCT jsonb_build_object('id', na.id, 'notification_id', na.notification_id, 'title', na.title, 'action_route_method', na.action_route_method, 'action_route', na.action_route)) filter (where na.id is not null) actions
|
||||
@@ -200,47 +224,67 @@ impl Notification {
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Notification>>()
|
||||
.await
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set_serialized_to_json(
|
||||
USER_NOTIFICATIONS_NAMESPACE,
|
||||
user_id.0,
|
||||
&db_notifications,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(db_notifications)
|
||||
}
|
||||
|
||||
pub async fn read(
|
||||
id: NotificationId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
Self::read_many(&[id], transaction).await
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
Self::read_many(&[id], transaction, redis).await
|
||||
}
|
||||
|
||||
pub async fn read_many(
|
||||
notification_ids: &[NotificationId],
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
|
||||
|
||||
sqlx::query!(
|
||||
let affected_users = sqlx::query!(
|
||||
"
|
||||
UPDATE notifications
|
||||
SET read = TRUE
|
||||
WHERE id = ANY($1)
|
||||
RETURNING user_id
|
||||
",
|
||||
¬ification_ids_parsed
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) })
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
Notification::clear_user_notifications_cache(affected_users.iter(), redis).await?;
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: NotificationId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
Self::remove_many(&[id], transaction).await
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
Self::remove_many(&[id], transaction, redis).await
|
||||
}
|
||||
|
||||
pub async fn remove_many(
|
||||
notification_ids: &[NotificationId],
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Option<()>, sqlx::error::Error> {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
let notification_ids_parsed: Vec<i64> = notification_ids.iter().map(|x| x.0).collect();
|
||||
|
||||
sqlx::query!(
|
||||
@@ -253,16 +297,36 @@ impl Notification {
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
let affected_users = sqlx::query!(
|
||||
"
|
||||
DELETE FROM notifications
|
||||
WHERE id = ANY($1)
|
||||
RETURNING user_id
|
||||
",
|
||||
¬ification_ids_parsed
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) })
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
Notification::clear_user_notifications_cache(affected_users.iter(), redis).await?;
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
pub async fn clear_user_notifications_cache(
|
||||
user_ids: impl IntoIterator<Item = &UserId>,
|
||||
redis: &RedisPool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
redis
|
||||
.delete_many(
|
||||
user_ids
|
||||
.into_iter()
|
||||
.map(|id| (USER_NOTIFICATIONS_NAMESPACE, Some(id.0.to_string()))),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,10 +187,10 @@ impl Organization {
|
||||
|
||||
for organization in organizations {
|
||||
redis
|
||||
.set(
|
||||
.set_serialized_to_json(
|
||||
ORGANIZATIONS_NAMESPACE,
|
||||
organization.id.0,
|
||||
serde_json::to_string(&organization)?,
|
||||
&organization,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -169,7 +169,7 @@ impl PersonalAccessToken {
|
||||
|
||||
for pat in db_pats {
|
||||
redis
|
||||
.set(PATS_NAMESPACE, pat.id.0, serde_json::to_string(&pat)?, None)
|
||||
.set_serialized_to_json(PATS_NAMESPACE, pat.id.0, &pat, None)
|
||||
.await?;
|
||||
redis
|
||||
.set(
|
||||
@@ -195,9 +195,8 @@ impl PersonalAccessToken {
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(PATS_USERS_NAMESPACE, user_id.0)
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<i64>>(&x).ok());
|
||||
.get_deserialized_from_json::<Vec<i64>, _>(PATS_USERS_NAMESPACE, user_id.0)
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res.into_iter().map(PatId).collect());
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use super::ids::*;
|
||||
use super::{ids::*, User};
|
||||
use crate::database::models;
|
||||
use crate::database::models::DatabaseError;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
use crate::models::projects::{MonetizationStatus, ProjectStatus};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -445,16 +446,21 @@ impl Project {
|
||||
|
||||
models::TeamMember::clear_cache(project.inner.team_id, redis).await?;
|
||||
|
||||
sqlx::query!(
|
||||
let affected_user_ids = sqlx::query!(
|
||||
"
|
||||
DELETE FROM team_members
|
||||
WHERE team_id = $1
|
||||
RETURNING user_id
|
||||
",
|
||||
project.inner.team_id as TeamId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.fetch_many(&mut *transaction)
|
||||
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) })
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
User::clear_project_cache(&affected_user_ids, redis).await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM teams
|
||||
@@ -520,8 +526,6 @@ impl Project {
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::TryStreamExt;
|
||||
|
||||
if project_strings.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
@@ -695,12 +699,7 @@ impl Project {
|
||||
|
||||
for project in db_projects {
|
||||
redis
|
||||
.set(
|
||||
PROJECTS_NAMESPACE,
|
||||
project.inner.id.0,
|
||||
serde_json::to_string(&project)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(PROJECTS_NAMESPACE, project.inner.id.0, &project, None)
|
||||
.await?;
|
||||
if let Some(slug) = &project.inner.slug {
|
||||
redis
|
||||
@@ -729,14 +728,10 @@ impl Project {
|
||||
{
|
||||
type Dependencies = Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>;
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let dependencies = redis
|
||||
.get::<String, _>(PROJECTS_DEPENDENCIES_NAMESPACE, id.0)
|
||||
.get_deserialized_from_json::<Dependencies, _>(PROJECTS_DEPENDENCIES_NAMESPACE, id.0)
|
||||
.await?;
|
||||
if let Some(dependencies) =
|
||||
dependencies.and_then(|x| serde_json::from_str::<Dependencies>(&x).ok())
|
||||
{
|
||||
if let Some(dependencies) = dependencies {
|
||||
return Ok(dependencies);
|
||||
}
|
||||
|
||||
@@ -768,12 +763,7 @@ impl Project {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
PROJECTS_DEPENDENCIES_NAMESPACE,
|
||||
id.0,
|
||||
serde_json::to_string(&dependencies)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(PROJECTS_DEPENDENCIES_NAMESPACE, id.0, &dependencies, None)
|
||||
.await?;
|
||||
Ok(dependencies)
|
||||
}
|
||||
|
||||
@@ -213,12 +213,7 @@ impl Session {
|
||||
|
||||
for session in db_sessions {
|
||||
redis
|
||||
.set(
|
||||
SESSIONS_NAMESPACE,
|
||||
session.id.0,
|
||||
serde_json::to_string(&session)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(SESSIONS_NAMESPACE, session.id.0, &session, None)
|
||||
.await?;
|
||||
redis
|
||||
.set(
|
||||
@@ -244,9 +239,8 @@ impl Session {
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let res = redis
|
||||
.get::<String, _>(SESSIONS_USERS_NAMESPACE, user_id.0)
|
||||
.await?
|
||||
.and_then(|x| serde_json::from_str::<Vec<i64>>(&x).ok());
|
||||
.get_deserialized_from_json::<Vec<i64>, _>(SESSIONS_USERS_NAMESPACE, user_id.0)
|
||||
.await?;
|
||||
|
||||
if let Some(res) = res {
|
||||
return Ok(res.into_iter().map(SessionId).collect());
|
||||
@@ -268,12 +262,7 @@ impl Session {
|
||||
.await?;
|
||||
|
||||
redis
|
||||
.set(
|
||||
SESSIONS_USERS_NAMESPACE,
|
||||
user_id.0,
|
||||
serde_json::to_string(&db_sessions)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(SESSIONS_USERS_NAMESPACE, user_id.0, &db_sessions, None)
|
||||
.await?;
|
||||
|
||||
Ok(db_sessions)
|
||||
|
||||
@@ -261,12 +261,7 @@ impl TeamMember {
|
||||
let mut members = members.collect::<Vec<_>>();
|
||||
|
||||
redis
|
||||
.set(
|
||||
TEAMS_NAMESPACE,
|
||||
id.0,
|
||||
serde_json::to_string(&members)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(TEAMS_NAMESPACE, id.0, &members, None)
|
||||
.await?;
|
||||
found_teams.append(&mut members);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
const USERS_NAMESPACE: &str = "users";
|
||||
const USER_USERNAMES_NAMESPACE: &str = "users_usernames";
|
||||
// const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
|
||||
const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||
pub struct User {
|
||||
@@ -234,12 +234,7 @@ impl User {
|
||||
|
||||
for user in db_users {
|
||||
redis
|
||||
.set(
|
||||
USERS_NAMESPACE,
|
||||
user.id.0,
|
||||
serde_json::to_string(&user)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(USERS_NAMESPACE, user.id.0, &user, None)
|
||||
.await?;
|
||||
redis
|
||||
.set(
|
||||
@@ -276,13 +271,22 @@ impl User {
|
||||
pub async fn get_projects<'a, E>(
|
||||
user_id: UserId,
|
||||
exec: E,
|
||||
) -> Result<Vec<ProjectId>, sqlx::Error>
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<ProjectId>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let projects = sqlx::query!(
|
||||
let cached_projects = redis
|
||||
.get_deserialized_from_json::<Vec<ProjectId>, _>(USERS_PROJECTS_NAMESPACE, user_id.0)
|
||||
.await?;
|
||||
|
||||
if let Some(projects) = cached_projects {
|
||||
return Ok(projects);
|
||||
}
|
||||
|
||||
let db_projects = sqlx::query!(
|
||||
"
|
||||
SELECT m.id FROM mods m
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.accepted = TRUE
|
||||
@@ -296,7 +300,11 @@ impl User {
|
||||
.try_collect::<Vec<ProjectId>>()
|
||||
.await?;
|
||||
|
||||
Ok(projects)
|
||||
redis
|
||||
.set_serialized_to_json(USERS_PROJECTS_NAMESPACE, user_id.0, &db_projects, None)
|
||||
.await?;
|
||||
|
||||
Ok(db_projects)
|
||||
}
|
||||
|
||||
pub async fn get_collections<'a, E>(
|
||||
@@ -365,6 +373,21 @@ impl User {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn clear_project_cache(
|
||||
user_ids: &[UserId],
|
||||
redis: &RedisPool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
redis
|
||||
.delete_many(
|
||||
user_ids
|
||||
.into_iter()
|
||||
.map(|id| (USERS_PROJECTS_NAMESPACE, Some(id.0.to_string()))),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: UserId,
|
||||
full: bool,
|
||||
|
||||
@@ -690,12 +690,7 @@ impl Version {
|
||||
|
||||
for version in db_versions {
|
||||
redis
|
||||
.set(
|
||||
VERSIONS_NAMESPACE,
|
||||
version.inner.id.0,
|
||||
serde_json::to_string(&version)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(VERSIONS_NAMESPACE, version.inner.id.0, &version, None)
|
||||
.await?;
|
||||
|
||||
found_versions.push(version);
|
||||
@@ -827,12 +822,7 @@ impl Version {
|
||||
|
||||
for (key, mut files) in save_files {
|
||||
redis
|
||||
.set(
|
||||
VERSION_FILES_NAMESPACE,
|
||||
key,
|
||||
serde_json::to_string(&files)?,
|
||||
None,
|
||||
)
|
||||
.set_serialized_to_json(VERSION_FILES_NAMESPACE, key, &files, None)
|
||||
.await?;
|
||||
|
||||
found_files.append(&mut files);
|
||||
|
||||
Reference in New Issue
Block a user