You've already forked AstralRinth
forked from didirus/AstralRinth
Collections (#688)
* initial draft; unfinished * images, fixes * fixes * println * revisions * fixes * alternate context setup version * rev * partial revs * rev * clippy ,fmt * fmt/clippy/prepare * fixes * revs
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
pub mod models;
|
||||
mod postgres_database;
|
||||
pub use models::Image;
|
||||
pub use models::Project;
|
||||
pub use models::Version;
|
||||
pub use postgres_database::check_for_migrations;
|
||||
|
||||
263
src/database/models/collection_item.rs
Normal file
263
src/database/models/collection_item.rs
Normal file
@@ -0,0 +1,263 @@
|
||||
use super::ids::*;
|
||||
use crate::database::models;
|
||||
use crate::database::models::DatabaseError;
|
||||
use crate::models::collections::CollectionStatus;
|
||||
use chrono::{DateTime, Utc};
|
||||
use redis::cmd;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const COLLECTIONS_NAMESPACE: &str = "collections";
|
||||
const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CollectionBuilder {
|
||||
pub collection_id: CollectionId,
|
||||
pub user_id: UserId,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub status: CollectionStatus,
|
||||
pub projects: Vec<ProjectId>,
|
||||
}
|
||||
|
||||
impl CollectionBuilder {
|
||||
pub async fn insert(
|
||||
self,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<CollectionId, DatabaseError> {
|
||||
let collection_struct = Collection {
|
||||
id: self.collection_id,
|
||||
title: self.title,
|
||||
user_id: self.user_id,
|
||||
description: self.description,
|
||||
created: Utc::now(),
|
||||
updated: Utc::now(),
|
||||
icon_url: None,
|
||||
color: None,
|
||||
status: self.status,
|
||||
projects: self.projects,
|
||||
};
|
||||
collection_struct.insert(&mut *transaction).await?;
|
||||
|
||||
Ok(self.collection_id)
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Collection {
|
||||
pub id: CollectionId,
|
||||
pub user_id: UserId,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub created: DateTime<Utc>,
|
||||
pub updated: DateTime<Utc>,
|
||||
pub icon_url: Option<String>,
|
||||
pub color: Option<u32>,
|
||||
pub status: CollectionStatus,
|
||||
pub projects: Vec<ProjectId>,
|
||||
}
|
||||
|
||||
impl Collection {
|
||||
pub async fn insert(
|
||||
&self,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<(), DatabaseError> {
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO collections (
|
||||
id, user_id, title, description,
|
||||
created, icon_url, status
|
||||
)
|
||||
VALUES (
|
||||
$1, $2, $3, $4,
|
||||
$5, $6, $7
|
||||
)
|
||||
",
|
||||
self.id as CollectionId,
|
||||
self.user_id as UserId,
|
||||
&self.title,
|
||||
&self.description,
|
||||
self.created,
|
||||
self.icon_url.as_ref(),
|
||||
self.status.to_string(),
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
for project_id in self.projects.iter() {
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO collections_mods (collection_id, mod_id)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT DO NOTHING
|
||||
",
|
||||
self.id as CollectionId,
|
||||
*project_id as ProjectId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: CollectionId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
let collection = Self::get(id, &mut *transaction, redis).await?;
|
||||
|
||||
if let Some(collection) = collection {
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM collections_mods
|
||||
WHERE collection_id = $1
|
||||
",
|
||||
id as CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM collections
|
||||
WHERE id = $1
|
||||
",
|
||||
id as CollectionId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
models::Collection::clear_cache(collection.id, redis).await?;
|
||||
|
||||
Ok(Some(()))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get<'a, 'b, E>(
|
||||
id: CollectionId,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<Collection>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Collection::get_many(&[id], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_many<'a, E>(
|
||||
collection_ids: &[CollectionId],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<Collection>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::TryStreamExt;
|
||||
|
||||
if collection_ids.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let mut found_collections = Vec::new();
|
||||
let mut remaining_collections: Vec<CollectionId> = collection_ids.to_vec();
|
||||
|
||||
if !collection_ids.is_empty() {
|
||||
let collections = cmd("MGET")
|
||||
.arg(
|
||||
collection_ids
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", COLLECTIONS_NAMESPACE, x.0))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
for collection in collections {
|
||||
if let Some(collection) =
|
||||
collection.and_then(|x| serde_json::from_str::<Collection>(&x).ok())
|
||||
{
|
||||
remaining_collections.retain(|x| collection.id.0 != x.0);
|
||||
found_collections.push(collection);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !remaining_collections.is_empty() {
|
||||
let collection_ids_parsed: Vec<i64> =
|
||||
remaining_collections.iter().map(|x| x.0).collect();
|
||||
let db_collections: Vec<Collection> = sqlx::query!(
|
||||
"
|
||||
SELECT c.id id, c.title title, c.description description,
|
||||
c.icon_url icon_url, c.color color, c.created created, c.user_id user_id,
|
||||
c.updated updated, c.status status,
|
||||
ARRAY_AGG(DISTINCT cm.mod_id) filter (where cm.mod_id is not null) mods
|
||||
FROM collections c
|
||||
LEFT JOIN collections_mods cm ON cm.collection_id = c.id
|
||||
WHERE c.id = ANY($1)
|
||||
GROUP BY c.id;
|
||||
",
|
||||
&collection_ids_parsed,
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|m| {
|
||||
let id = m.id;
|
||||
|
||||
Collection {
|
||||
id: CollectionId(id),
|
||||
user_id: UserId(m.user_id),
|
||||
title: m.title.clone(),
|
||||
description: m.description.clone(),
|
||||
icon_url: m.icon_url.clone(),
|
||||
color: m.color.map(|x| x as u32),
|
||||
created: m.created,
|
||||
updated: m.updated,
|
||||
status: CollectionStatus::from_str(&m.status),
|
||||
projects: m
|
||||
.mods
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(ProjectId)
|
||||
.collect(),
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Collection>>()
|
||||
.await?;
|
||||
|
||||
for collection in db_collections {
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", COLLECTIONS_NAMESPACE, collection.id.0))
|
||||
.arg(serde_json::to_string(&collection)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
found_collections.push(collection);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_collections)
|
||||
}
|
||||
|
||||
pub async fn clear_cache(
|
||||
id: CollectionId,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let mut redis = redis.get().await?;
|
||||
let mut cmd = cmd("DEL");
|
||||
|
||||
cmd.arg(format!("{}:{}", COLLECTIONS_NAMESPACE, id.0));
|
||||
cmd.query_async::<_, ()>(&mut redis).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -62,6 +62,13 @@ generate_ids!(
|
||||
"SELECT EXISTS(SELECT 1 FROM teams WHERE id=$1)",
|
||||
TeamId
|
||||
);
|
||||
generate_ids!(
|
||||
pub generate_collection_id,
|
||||
CollectionId,
|
||||
8,
|
||||
"SELECT EXISTS(SELECT 1 FROM collections WHERE id=$1)",
|
||||
CollectionId
|
||||
);
|
||||
generate_ids!(
|
||||
pub generate_file_id,
|
||||
FileId,
|
||||
@@ -130,6 +137,14 @@ generate_ids!(
|
||||
SessionId
|
||||
);
|
||||
|
||||
generate_ids!(
|
||||
pub generate_image_id,
|
||||
ImageId,
|
||||
8,
|
||||
"SELECT EXISTS(SELECT 1 FROM uploaded_images WHERE id=$1)",
|
||||
ImageId
|
||||
);
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct UserId(pub i64);
|
||||
@@ -171,7 +186,11 @@ pub struct LoaderId(pub i32);
|
||||
#[sqlx(transparent)]
|
||||
pub struct CategoryId(pub i32);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct CollectionId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize, Serialize)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ReportId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type)]
|
||||
@@ -196,7 +215,7 @@ pub struct NotificationActionId(pub i32);
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ThreadId(pub i64);
|
||||
#[derive(Copy, Clone, Debug, Type, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ThreadMessageId(pub i64);
|
||||
|
||||
@@ -204,6 +223,10 @@ pub struct ThreadMessageId(pub i64);
|
||||
#[sqlx(transparent)]
|
||||
pub struct SessionId(pub i64);
|
||||
|
||||
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[sqlx(transparent)]
|
||||
pub struct ImageId(pub i64);
|
||||
|
||||
use crate::models::ids;
|
||||
|
||||
impl From<ids::ProjectId> for ProjectId {
|
||||
@@ -246,6 +269,16 @@ impl From<VersionId> for ids::VersionId {
|
||||
ids::VersionId(id.0 as u64)
|
||||
}
|
||||
}
|
||||
impl From<ids::CollectionId> for CollectionId {
|
||||
fn from(id: ids::CollectionId) -> Self {
|
||||
CollectionId(id.0 as i64)
|
||||
}
|
||||
}
|
||||
impl From<CollectionId> for ids::CollectionId {
|
||||
fn from(id: CollectionId) -> Self {
|
||||
ids::CollectionId(id.0 as u64)
|
||||
}
|
||||
}
|
||||
impl From<ids::ReportId> for ReportId {
|
||||
fn from(id: ids::ReportId) -> Self {
|
||||
ReportId(id.0 as i64)
|
||||
@@ -256,6 +289,16 @@ impl From<ReportId> for ids::ReportId {
|
||||
ids::ReportId(id.0 as u64)
|
||||
}
|
||||
}
|
||||
impl From<ImageId> for ids::ImageId {
|
||||
fn from(id: ImageId) -> Self {
|
||||
ids::ImageId(id.0 as u64)
|
||||
}
|
||||
}
|
||||
impl From<ids::ImageId> for ImageId {
|
||||
fn from(id: ids::ImageId) -> Self {
|
||||
ImageId(id.0 as i64)
|
||||
}
|
||||
}
|
||||
impl From<ids::NotificationId> for NotificationId {
|
||||
fn from(id: ids::NotificationId) -> Self {
|
||||
NotificationId(id.0 as i64)
|
||||
|
||||
275
src/database/models/image_item.rs
Normal file
275
src/database/models/image_item.rs
Normal file
@@ -0,0 +1,275 @@
|
||||
use super::ids::*;
|
||||
use crate::{database::models::DatabaseError, models::images::ImageContext};
|
||||
use chrono::{DateTime, Utc};
|
||||
use redis::cmd;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
const IMAGES_NAMESPACE: &str = "images";
|
||||
const DEFAULT_EXPIRY: i64 = 1800; // 30 minutes
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct Image {
|
||||
pub id: ImageId,
|
||||
pub url: String,
|
||||
pub size: u64,
|
||||
pub created: DateTime<Utc>,
|
||||
pub owner_id: UserId,
|
||||
|
||||
// context it is associated with
|
||||
pub context: String,
|
||||
|
||||
pub project_id: Option<ProjectId>,
|
||||
pub version_id: Option<VersionId>,
|
||||
pub thread_message_id: Option<ThreadMessageId>,
|
||||
pub report_id: Option<ReportId>,
|
||||
}
|
||||
|
||||
impl Image {
|
||||
pub async fn insert(
|
||||
&self,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<(), DatabaseError> {
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO uploaded_images (
|
||||
id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id
|
||||
)
|
||||
VALUES (
|
||||
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10
|
||||
);
|
||||
",
|
||||
self.id as ImageId,
|
||||
self.url,
|
||||
self.size as i64,
|
||||
self.created,
|
||||
self.owner_id as UserId,
|
||||
self.context,
|
||||
self.project_id.map(|x| x.0),
|
||||
self.version_id.map(|x| x.0),
|
||||
self.thread_message_id.map(|x| x.0),
|
||||
self.report_id.map(|x| x.0),
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn remove(
|
||||
id: ImageId,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<()>, DatabaseError> {
|
||||
let image = Self::get(id, &mut *transaction, redis).await?;
|
||||
|
||||
if let Some(image) = image {
|
||||
sqlx::query!(
|
||||
"
|
||||
DELETE FROM uploaded_images
|
||||
WHERE id = $1
|
||||
",
|
||||
id as ImageId,
|
||||
)
|
||||
.execute(&mut *transaction)
|
||||
.await?;
|
||||
|
||||
Image::clear_cache(image.id, redis).await?;
|
||||
|
||||
Ok(Some(()))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_many_contexted(
|
||||
context: ImageContext,
|
||||
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
|
||||
) -> Result<Vec<Image>, sqlx::Error> {
|
||||
// Set all of project_id, version_id, thread_message_id, report_id to None
|
||||
// Then set the one that is relevant to Some
|
||||
|
||||
let mut project_id = None;
|
||||
let mut version_id = None;
|
||||
let mut thread_message_id = None;
|
||||
let mut report_id = None;
|
||||
match context {
|
||||
ImageContext::Project {
|
||||
project_id: Some(id),
|
||||
} => {
|
||||
project_id = Some(ProjectId::from(id));
|
||||
}
|
||||
ImageContext::Version {
|
||||
version_id: Some(id),
|
||||
} => {
|
||||
version_id = Some(VersionId::from(id));
|
||||
}
|
||||
ImageContext::ThreadMessage {
|
||||
thread_message_id: Some(id),
|
||||
} => {
|
||||
thread_message_id = Some(ThreadMessageId::from(id));
|
||||
}
|
||||
ImageContext::Report {
|
||||
report_id: Some(id),
|
||||
} => {
|
||||
report_id = Some(ReportId::from(id));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
use futures::stream::TryStreamExt;
|
||||
sqlx::query!(
|
||||
"
|
||||
SELECT id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id
|
||||
FROM uploaded_images
|
||||
WHERE context = $1
|
||||
AND (mod_id = $2 OR ($2 IS NULL AND mod_id IS NULL))
|
||||
AND (version_id = $3 OR ($3 IS NULL AND version_id IS NULL))
|
||||
AND (thread_message_id = $4 OR ($4 IS NULL AND thread_message_id IS NULL))
|
||||
AND (report_id = $5 OR ($5 IS NULL AND report_id IS NULL))
|
||||
GROUP BY id
|
||||
",
|
||||
context.context_as_str(),
|
||||
project_id.map(|x| x.0),
|
||||
version_id.map(|x| x.0),
|
||||
thread_message_id.map(|x| x.0),
|
||||
report_id.map(|x| x.0),
|
||||
|
||||
)
|
||||
.fetch_many(transaction)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|row| {
|
||||
let id = ImageId(row.id);
|
||||
|
||||
Image {
|
||||
id,
|
||||
url: row.url,
|
||||
size: row.size as u64,
|
||||
created: row.created,
|
||||
owner_id: UserId(row.owner_id),
|
||||
context: row.context,
|
||||
project_id: row.mod_id.map(ProjectId),
|
||||
version_id: row.version_id.map(VersionId),
|
||||
thread_message_id: row.thread_message_id.map(ThreadMessageId),
|
||||
report_id: row.report_id.map(ReportId),
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Image>>()
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get<'a, 'b, E>(
|
||||
id: ImageId,
|
||||
executor: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Option<Image>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Image::get_many(&[id], executor, redis)
|
||||
.await
|
||||
.map(|x| x.into_iter().next())
|
||||
}
|
||||
|
||||
pub async fn get_many<'a, E>(
|
||||
image_ids: &[ImageId],
|
||||
exec: E,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<Vec<Image>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::TryStreamExt;
|
||||
|
||||
if image_ids.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut redis = redis.get().await?;
|
||||
|
||||
let mut found_images = Vec::new();
|
||||
let mut remaining_ids = image_ids.to_vec();
|
||||
|
||||
let image_ids = image_ids.iter().map(|x| x.0).collect::<Vec<_>>();
|
||||
|
||||
if !image_ids.is_empty() {
|
||||
let images = cmd("MGET")
|
||||
.arg(
|
||||
image_ids
|
||||
.iter()
|
||||
.map(|x| format!("{}:{}", IMAGES_NAMESPACE, x))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.query_async::<_, Vec<Option<String>>>(&mut redis)
|
||||
.await?;
|
||||
|
||||
for image in images {
|
||||
if let Some(image) = image.and_then(|x| serde_json::from_str::<Image>(&x).ok()) {
|
||||
remaining_ids.retain(|x| image.id.0 != x.0);
|
||||
found_images.push(image);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !remaining_ids.is_empty() {
|
||||
let db_images: Vec<Image> = sqlx::query!(
|
||||
"
|
||||
SELECT id, url, size, created, owner_id, context, mod_id, version_id, thread_message_id, report_id
|
||||
FROM uploaded_images
|
||||
WHERE id = ANY($1)
|
||||
GROUP BY id;
|
||||
",
|
||||
&remaining_ids.iter().map(|x| x.0).collect::<Vec<_>>(),
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|i| {
|
||||
let id = i.id;
|
||||
|
||||
Image {
|
||||
id: ImageId(id),
|
||||
url: i.url,
|
||||
size: i.size as u64,
|
||||
created: i.created,
|
||||
owner_id: UserId(i.owner_id),
|
||||
context: i.context,
|
||||
project_id: i.mod_id.map(ProjectId),
|
||||
version_id: i.version_id.map(VersionId),
|
||||
thread_message_id: i.thread_message_id.map(ThreadMessageId),
|
||||
report_id: i.report_id.map(ReportId),
|
||||
}
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<Image>>()
|
||||
.await?;
|
||||
|
||||
for image in db_images {
|
||||
cmd("SET")
|
||||
.arg(format!("{}:{}", IMAGES_NAMESPACE, image.id.0))
|
||||
.arg(serde_json::to_string(&image)?)
|
||||
.arg("EX")
|
||||
.arg(DEFAULT_EXPIRY)
|
||||
.query_async::<_, ()>(&mut redis)
|
||||
.await?;
|
||||
|
||||
found_images.push(image);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(found_images)
|
||||
}
|
||||
|
||||
pub async fn clear_cache(
|
||||
id: ImageId,
|
||||
redis: &deadpool_redis::Pool,
|
||||
) -> Result<(), DatabaseError> {
|
||||
let mut redis = redis.get().await?;
|
||||
let mut cmd = cmd("DEL");
|
||||
|
||||
cmd.arg(format!("{}:{}", IMAGES_NAMESPACE, id.0));
|
||||
cmd.query_async::<_, ()>(&mut redis).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
use thiserror::Error;
|
||||
|
||||
pub mod categories;
|
||||
pub mod collection_item;
|
||||
pub mod flow_item;
|
||||
pub mod ids;
|
||||
pub mod image_item;
|
||||
pub mod notification_item;
|
||||
pub mod pat_item;
|
||||
pub mod project_item;
|
||||
@@ -13,7 +15,9 @@ pub mod thread_item;
|
||||
pub mod user_item;
|
||||
pub mod version_item;
|
||||
|
||||
pub use collection_item::Collection;
|
||||
pub use ids::*;
|
||||
pub use image_item::Image;
|
||||
pub use project_item::Project;
|
||||
pub use team_item::Team;
|
||||
pub use team_item::TeamMember;
|
||||
|
||||
@@ -546,6 +546,7 @@ impl Project {
|
||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||
.map(|x| x as i64)
|
||||
.collect();
|
||||
|
||||
let db_projects: Vec<QueryProject> = sqlx::query!(
|
||||
"
|
||||
SELECT m.id id, m.project_type project_type, m.title title, m.description description, m.downloads downloads, m.follows follows,
|
||||
|
||||
@@ -58,7 +58,7 @@ impl Report {
|
||||
|
||||
pub async fn get<'a, E>(id: ReportId, exec: E) -> Result<Option<QueryReport>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Self::get_many(&[id], exec)
|
||||
.await
|
||||
@@ -70,7 +70,7 @@ impl Report {
|
||||
exec: E,
|
||||
) -> Result<Vec<QueryReport>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
|
||||
@@ -212,7 +212,7 @@ impl ThreadMessage {
|
||||
exec: E,
|
||||
) -> Result<Option<ThreadMessage>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Self::get_many(&[id], exec)
|
||||
.await
|
||||
@@ -224,7 +224,7 @@ impl ThreadMessage {
|
||||
exec: E,
|
||||
) -> Result<Vec<ThreadMessage>, sqlx::Error>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user