Labrinth ID cleanup (#3681)

* Put all ID types in the labrinth::models::ids, and reduce code duplication with them

* Rewrite labrinth::database::models::ids and rename most DB interface ID structs to be prefixed with DB

* Run sqlx prepare

---------

Co-authored-by: Alejandro González <7822554+AlexTMjugador@users.noreply.github.com>
This commit is contained in:
Josiah Glosson
2025-05-22 03:34:36 -05:00
committed by GitHub
parent c6022ad977
commit 9e527ff141
111 changed files with 1477 additions and 1965 deletions

View File

@@ -1,5 +1,5 @@
use crate::database::models::{
ChargeId, DatabaseError, ProductPriceId, UserId, UserSubscriptionId,
DBChargeId, DBProductPriceId, DBUserId, DBUserSubscriptionId, DatabaseError,
};
use crate::models::billing::{
ChargeStatus, ChargeType, PaymentPlatform, PriceDuration,
@@ -8,9 +8,9 @@ use chrono::{DateTime, Utc};
use std::convert::{TryFrom, TryInto};
pub struct ChargeItem {
pub id: ChargeId,
pub user_id: UserId,
pub price_id: ProductPriceId,
pub id: DBChargeId,
pub user_id: DBUserId,
pub price_id: DBProductPriceId,
pub amount: i64,
pub currency_code: String,
pub status: ChargeStatus,
@@ -18,13 +18,13 @@ pub struct ChargeItem {
pub last_attempt: Option<DateTime<Utc>>,
pub type_: ChargeType,
pub subscription_id: Option<UserSubscriptionId>,
pub subscription_id: Option<DBUserSubscriptionId>,
pub subscription_interval: Option<PriceDuration>,
pub payment_platform: PaymentPlatform,
pub payment_platform_id: Option<String>,
pub parent_charge_id: Option<ChargeId>,
pub parent_charge_id: Option<DBChargeId>,
// Net is always in USD
pub net: Option<i64>,
@@ -53,22 +53,22 @@ impl TryFrom<ChargeResult> for ChargeItem {
fn try_from(r: ChargeResult) -> Result<Self, Self::Error> {
Ok(ChargeItem {
id: ChargeId(r.id),
user_id: UserId(r.user_id),
price_id: ProductPriceId(r.price_id),
id: DBChargeId(r.id),
user_id: DBUserId(r.user_id),
price_id: DBProductPriceId(r.price_id),
amount: r.amount,
currency_code: r.currency_code,
status: ChargeStatus::from_string(&r.status),
due: r.due,
last_attempt: r.last_attempt,
type_: ChargeType::from_string(&r.charge_type),
subscription_id: r.subscription_id.map(UserSubscriptionId),
subscription_id: r.subscription_id.map(DBUserSubscriptionId),
subscription_interval: r
.subscription_interval
.map(|x| PriceDuration::from_string(&x)),
payment_platform: PaymentPlatform::from_string(&r.payment_platform),
payment_platform_id: r.payment_platform_id,
parent_charge_id: r.parent_charge_id.map(ChargeId),
parent_charge_id: r.parent_charge_id.map(DBChargeId),
net: r.net,
})
}
@@ -100,7 +100,7 @@ impl ChargeItem {
pub async fn upsert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ChargeId, DatabaseError> {
) -> Result<DBChargeId, DatabaseError> {
sqlx::query!(
r#"
INSERT INTO charges (id, user_id, price_id, amount, currency_code, charge_type, status, due, last_attempt, subscription_id, subscription_interval, payment_platform, payment_platform_id, parent_charge_id, net)
@@ -144,7 +144,7 @@ impl ChargeItem {
}
pub async fn get(
id: ChargeId,
id: DBChargeId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ChargeItem>, DatabaseError> {
let id = id.0;
@@ -156,7 +156,7 @@ impl ChargeItem {
}
pub async fn get_from_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ChargeItem>, DatabaseError> {
let user_id = user_id.0;
@@ -174,7 +174,7 @@ impl ChargeItem {
}
pub async fn get_children(
charge_id: ChargeId,
charge_id: DBChargeId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ChargeItem>, DatabaseError> {
let charge_id = charge_id.0;
@@ -192,7 +192,7 @@ impl ChargeItem {
}
pub async fn get_open_subscription(
user_subscription_id: UserSubscriptionId,
user_subscription_id: DBUserSubscriptionId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ChargeItem>, DatabaseError> {
let user_subscription_id = user_subscription_id.0;
@@ -255,7 +255,7 @@ impl ChargeItem {
}
pub async fn remove(
id: ChargeId,
id: DBChargeId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
sqlx::query!(

View File

@@ -12,19 +12,19 @@ const COLLECTIONS_NAMESPACE: &str = "collections";
#[derive(Clone)]
pub struct CollectionBuilder {
pub collection_id: CollectionId,
pub user_id: UserId,
pub collection_id: DBCollectionId,
pub user_id: DBUserId,
pub name: String,
pub description: Option<String>,
pub status: CollectionStatus,
pub projects: Vec<ProjectId>,
pub projects: Vec<DBProjectId>,
}
impl CollectionBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<CollectionId, DatabaseError> {
) -> Result<DBCollectionId, DatabaseError> {
let collection_struct = Collection {
id: self.collection_id,
name: self.name,
@@ -45,8 +45,8 @@ impl CollectionBuilder {
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Collection {
pub id: CollectionId,
pub user_id: UserId,
pub id: DBCollectionId,
pub user_id: DBUserId,
pub name: String,
pub description: Option<String>,
pub created: DateTime<Utc>,
@@ -55,7 +55,7 @@ pub struct Collection {
pub raw_icon_url: Option<String>,
pub color: Option<u32>,
pub status: CollectionStatus,
pub projects: Vec<ProjectId>,
pub projects: Vec<DBProjectId>,
}
impl Collection {
@@ -66,16 +66,16 @@ impl Collection {
sqlx::query!(
"
INSERT INTO collections (
id, user_id, name, description,
id, user_id, name, description,
created, icon_url, raw_icon_url, status
)
VALUES (
$1, $2, $3, $4,
$1, $2, $3, $4,
$5, $6, $7, $8
)
",
self.id as CollectionId,
self.user_id as UserId,
self.id as DBCollectionId,
self.user_id as DBUserId,
&self.name,
self.description.as_ref(),
self.created,
@@ -104,7 +104,7 @@ impl Collection {
}
pub async fn remove(
id: CollectionId,
id: DBCollectionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -116,7 +116,7 @@ impl Collection {
DELETE FROM collections_mods
WHERE collection_id = $1
",
id as CollectionId,
id as DBCollectionId,
)
.execute(&mut **transaction)
.await?;
@@ -126,7 +126,7 @@ impl Collection {
DELETE FROM collections
WHERE id = $1
",
id as CollectionId,
id as DBCollectionId,
)
.execute(&mut **transaction)
.await?;
@@ -140,7 +140,7 @@ impl Collection {
}
pub async fn get<'a, 'b, E>(
id: CollectionId,
id: DBCollectionId,
executor: E,
redis: &RedisPool,
) -> Result<Option<Collection>, DatabaseError>
@@ -153,7 +153,7 @@ impl Collection {
}
pub async fn get_many<'a, E>(
collection_ids: &[CollectionId],
collection_ids: &[DBCollectionId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Collection>, DatabaseError>
@@ -181,8 +181,8 @@ impl Collection {
.fetch(exec)
.try_fold(DashMap::new(), |acc, m| {
let collection = Collection {
id: CollectionId(m.id),
user_id: UserId(m.user_id),
id: DBCollectionId(m.id),
user_id: DBUserId(m.user_id),
name: m.name.clone(),
description: m.description.clone(),
icon_url: m.icon_url.clone(),
@@ -195,7 +195,7 @@ impl Collection {
.mods
.unwrap_or_default()
.into_iter()
.map(ProjectId)
.map(DBProjectId)
.collect(),
};
@@ -213,7 +213,7 @@ impl Collection {
}
pub async fn clear_cache(
id: CollectionId,
id: DBCollectionId,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

View File

@@ -17,37 +17,37 @@ const FLOWS_NAMESPACE: &str = "flows";
#[serde(tag = "type", rename_all = "snake_case")]
pub enum Flow {
OAuth {
user_id: Option<UserId>,
user_id: Option<DBUserId>,
url: String,
provider: AuthProvider,
},
Login2FA {
user_id: UserId,
user_id: DBUserId,
},
Initialize2FA {
user_id: UserId,
user_id: DBUserId,
secret: String,
},
ForgotPassword {
user_id: UserId,
user_id: DBUserId,
},
ConfirmEmail {
user_id: UserId,
user_id: DBUserId,
confirm_email: String,
},
MinecraftAuth,
InitOAuthAppApproval {
user_id: UserId,
client_id: OAuthClientId,
existing_authorization_id: Option<OAuthClientAuthorizationId>,
user_id: DBUserId,
client_id: DBOAuthClientId,
existing_authorization_id: Option<DBOAuthClientAuthorizationId>,
scopes: Scopes,
redirect_uris: OAuthRedirectUris,
state: Option<String>,
},
OAuthAuthorizationCodeSupplied {
user_id: UserId,
client_id: OAuthClientId,
authorization_id: OAuthClientAuthorizationId,
user_id: DBUserId,
client_id: DBOAuthClientId,
authorization_id: DBOAuthClientAuthorizationId,
scopes: Scopes,
original_redirect_uri: Option<String>, // Needed for https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.3
},

View File

@@ -1,9 +1,9 @@
use crate::database::models::UserId;
use crate::database::models::DBUserId;
use chrono::{DateTime, Utc};
pub struct FriendItem {
pub user_id: UserId,
pub friend_id: UserId,
pub user_id: DBUserId,
pub friend_id: DBUserId,
pub created: DateTime<Utc>,
pub accepted: bool,
}
@@ -30,8 +30,8 @@ impl FriendItem {
}
pub async fn get_friend<'a, E>(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
exec: E,
) -> Result<Option<FriendItem>, sqlx::Error>
where
@@ -49,8 +49,8 @@ impl FriendItem {
.fetch_optional(exec)
.await?
.map(|row| FriendItem {
user_id: UserId(row.user_id),
friend_id: UserId(row.friend_id),
user_id: DBUserId(row.user_id),
friend_id: DBUserId(row.friend_id),
created: row.created,
accepted: row.accepted,
});
@@ -59,8 +59,8 @@ impl FriendItem {
}
pub async fn update_friend(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
accepted: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::Error> {
@@ -81,7 +81,7 @@ impl FriendItem {
}
pub async fn get_user_friends<'a, E>(
user_id: UserId,
user_id: DBUserId,
accepted: Option<bool>,
exec: E,
) -> Result<Vec<FriendItem>, sqlx::Error>
@@ -100,8 +100,8 @@ impl FriendItem {
.await?
.into_iter()
.map(|row| FriendItem {
user_id: UserId(row.user_id),
friend_id: UserId(row.friend_id),
user_id: DBUserId(row.user_id),
friend_id: DBUserId(row.friend_id),
created: row.created,
accepted: row.accepted,
})
@@ -112,8 +112,8 @@ impl FriendItem {
}
pub async fn remove(
user_id: UserId,
friend_id: UserId,
user_id: DBUserId,
friend_id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::Error> {
sqlx::query!(

View File

@@ -1,7 +1,15 @@
use super::DatabaseError;
use crate::models::ids::{
ChargeId, CollectionId, FileId, ImageId, NotificationId,
OAuthAccessTokenId, OAuthClientAuthorizationId, OAuthClientId,
OAuthRedirectUriId, OrganizationId, PatId, PayoutId, ProductId,
ProductPriceId, ProjectId, ReportId, SessionId, TeamId, TeamMemberId,
ThreadId, ThreadMessageId, UserSubscriptionId, VersionId,
};
use ariadne::ids::base62_impl::to_base62;
use ariadne::ids::{random_base62_rng, random_base62_rng_range};
use ariadne::ids::{UserId, random_base62_rng, random_base62_rng_range};
use censor::Censor;
use paste::paste;
use rand::SeedableRng;
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
@@ -10,12 +18,12 @@ use sqlx::sqlx_macros::Type;
const ID_RETRY_COUNT: usize = 20;
macro_rules! generate_ids {
($vis:vis $function_name:ident, $return_type:ty, $id_length:expr, $select_stmnt:literal, $id_function:expr) => {
$vis async fn $function_name(
($function_name:ident, $return_type:ident, $select_stmnt:expr) => {
pub async fn $function_name(
con: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<$return_type, DatabaseError> {
let mut rng = ChaCha20Rng::from_entropy();
let length = $id_length;
let length = 8;
let mut id = random_base62_rng(&mut rng, length);
let mut retry_count = 0;
let censor = Censor::Standard + Censor::Sex;
@@ -26,7 +34,9 @@ macro_rules! generate_ids {
.fetch_one(&mut **con)
.await?;
if results.exists.unwrap_or(true) || censor.check(&*to_base62(id)) {
if results.exists.unwrap_or(true)
|| censor.check(&*to_base62(id))
{
id = random_base62_rng(&mut rng, length);
} else {
break;
@@ -38,14 +48,14 @@ macro_rules! generate_ids {
}
}
Ok($id_function(id as i64))
Ok($return_type(id as i64))
}
};
}
macro_rules! generate_bulk_ids {
($vis:vis $function_name:ident, $return_type:ty, $select_stmnt:literal, $id_function:expr) => {
$vis async fn $function_name(
($function_name:ident, $return_type:ident, $select_stmnt:expr) => {
pub async fn $function_name(
count: usize,
con: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Vec<$return_type>, DatabaseError> {
@@ -55,14 +65,18 @@ macro_rules! generate_bulk_ids {
// Check if ID is unique
loop {
let base = random_base62_rng_range(&mut rng, 1, 10) as i64;
let ids = (0..count).map(|x| base + x as i64).collect::<Vec<_>>();
let ids =
(0..count).map(|x| base + x as i64).collect::<Vec<_>>();
let results = sqlx::query!($select_stmnt, &ids)
.fetch_one(&mut **con)
.await?;
if !results.exists.unwrap_or(true) {
return Ok(ids.into_iter().map(|x| $id_function(x)).collect());
return Ok(ids
.into_iter()
.map(|x| $return_type(x))
.collect());
}
retry_count += 1;
@@ -74,589 +88,167 @@ macro_rules! generate_bulk_ids {
};
}
generate_ids!(
pub generate_project_id,
ProjectId,
8,
"SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)",
ProjectId
);
generate_ids!(
pub generate_version_id,
VersionId,
8,
"SELECT EXISTS(SELECT 1 FROM versions WHERE id=$1)",
VersionId
);
generate_ids!(
pub generate_team_id,
TeamId,
8,
"SELECT EXISTS(SELECT 1 FROM teams WHERE id=$1)",
TeamId
);
generate_ids!(
pub generate_organization_id,
OrganizationId,
8,
"SELECT EXISTS(SELECT 1 FROM organizations WHERE id=$1)",
OrganizationId
);
generate_ids!(
pub generate_collection_id,
CollectionId,
8,
"SELECT EXISTS(SELECT 1 FROM collections WHERE id=$1)",
CollectionId
);
generate_ids!(
pub generate_file_id,
FileId,
8,
"SELECT EXISTS(SELECT 1 FROM files WHERE id=$1)",
FileId
);
generate_ids!(
pub generate_team_member_id,
TeamMemberId,
8,
"SELECT EXISTS(SELECT 1 FROM team_members WHERE id=$1)",
TeamMemberId
);
generate_ids!(
pub generate_pat_id,
PatId,
8,
"SELECT EXISTS(SELECT 1 FROM pats WHERE id=$1)",
PatId
);
macro_rules! db_id_interface {
($id_struct:ident $(, generator: $generator_function:ident @ $db_table:expr, $(bulk_generator: $bulk_generator_function:ident,)?)?) => {
paste! {
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[sqlx(transparent)]
pub struct [< DB $id_struct >](pub i64);
generate_ids!(
pub generate_user_id,
UserId,
8,
"SELECT EXISTS(SELECT 1 FROM users WHERE id=$1)",
UserId
);
generate_ids!(
pub generate_report_id,
ReportId,
8,
"SELECT EXISTS(SELECT 1 FROM reports WHERE id=$1)",
ReportId
);
impl From<$id_struct> for [< DB $id_struct >] {
fn from(id: $id_struct) -> Self {
Self(id.0 as i64)
}
}
impl From<[< DB $id_struct >]> for $id_struct {
fn from(id: [< DB $id_struct >]) -> Self {
Self(id.0 as u64)
}
}
generate_ids!(
pub generate_notification_id,
NotificationId,
8,
"SELECT EXISTS(SELECT 1 FROM notifications WHERE id=$1)",
NotificationId
);
$(
generate_ids!(
$generator_function,
[< DB $id_struct >],
"SELECT EXISTS(SELECT 1 FROM " + $db_table + " WHERE id=$1)"
);
generate_bulk_ids!(
pub generate_many_notification_ids,
NotificationId,
"SELECT EXISTS(SELECT 1 FROM notifications WHERE id = ANY($1))",
NotificationId
);
$(
generate_bulk_ids!(
$bulk_generator_function,
[< DB $id_struct >],
"SELECT EXISTS(SELECT 1 FROM " + $db_table + " WHERE id = ANY($1))"
);
)?
)?
}
};
}
generate_ids!(
pub generate_thread_id,
ThreadId,
8,
"SELECT EXISTS(SELECT 1 FROM threads WHERE id=$1)",
ThreadId
);
generate_ids!(
pub generate_thread_message_id,
ThreadMessageId,
8,
"SELECT EXISTS(SELECT 1 FROM threads_messages WHERE id=$1)",
ThreadMessageId
);
macro_rules! short_id_type {
($name:ident) => {
#[derive(
Copy,
Clone,
Debug,
Type,
Serialize,
Deserialize,
Eq,
PartialEq,
Hash,
)]
#[sqlx(transparent)]
pub struct $name(pub i32);
};
}
generate_ids!(
pub generate_session_id,
SessionId,
8,
"SELECT EXISTS(SELECT 1 FROM sessions WHERE id=$1)",
SessionId
);
generate_ids!(
pub generate_image_id,
ImageId,
8,
"SELECT EXISTS(SELECT 1 FROM uploaded_images WHERE id=$1)",
ImageId
);
generate_ids!(
pub generate_oauth_client_authorization_id,
OAuthClientAuthorizationId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_client_authorizations WHERE id=$1)",
OAuthClientAuthorizationId
);
generate_ids!(
pub generate_oauth_client_id,
OAuthClientId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_clients WHERE id=$1)",
OAuthClientId
);
generate_ids!(
pub generate_oauth_redirect_id,
OAuthRedirectUriId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_client_redirect_uris WHERE id=$1)",
OAuthRedirectUriId
);
generate_ids!(
pub generate_oauth_access_token_id,
OAuthAccessTokenId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_access_tokens WHERE id=$1)",
OAuthAccessTokenId
);
generate_ids!(
pub generate_payout_id,
PayoutId,
8,
"SELECT EXISTS(SELECT 1 FROM oauth_access_tokens WHERE id=$1)",
PayoutId
);
generate_ids!(
pub generate_product_id,
ProductId,
8,
"SELECT EXISTS(SELECT 1 FROM products WHERE id=$1)",
ProductId
);
generate_ids!(
pub generate_product_price_id,
ProductPriceId,
8,
"SELECT EXISTS(SELECT 1 FROM products_prices WHERE id=$1)",
ProductPriceId
);
generate_ids!(
pub generate_user_subscription_id,
UserSubscriptionId,
8,
"SELECT EXISTS(SELECT 1 FROM users_subscriptions WHERE id=$1)",
UserSubscriptionId
);
generate_ids!(
pub generate_charge_id,
db_id_interface!(
ChargeId,
8,
"SELECT EXISTS(SELECT 1 FROM charges WHERE id=$1)",
ChargeId
generator: generate_charge_id @ "charges",
);
db_id_interface!(
CollectionId,
generator: generate_collection_id @ "collections",
);
db_id_interface!(
FileId,
generator: generate_file_id @ "files",
);
db_id_interface!(
ImageId,
generator: generate_image_id @ "uploaded_images",
);
db_id_interface!(
NotificationId,
generator: generate_notification_id @ "notifications",
bulk_generator: generate_many_notification_ids,
);
db_id_interface!(
OAuthAccessTokenId,
generator: generate_oauth_access_token_id @ "oauth_access_tokens",
);
db_id_interface!(
OAuthClientAuthorizationId,
generator: generate_oauth_client_authorization_id @ "oauth_client_authorizations",
);
db_id_interface!(
OAuthClientId,
generator: generate_oauth_client_id @ "oauth_clients",
);
db_id_interface!(
OAuthRedirectUriId,
generator: generate_oauth_redirect_id @ "oauth_client_redirect_uris",
);
db_id_interface!(
OrganizationId,
generator: generate_organization_id @ "organizations",
);
db_id_interface!(
PatId,
generator: generate_pat_id @ "pats",
);
db_id_interface!(
PayoutId,
generator: generate_payout_id @ "payouts",
);
db_id_interface!(
ProductId,
generator: generate_product_id @ "products",
);
db_id_interface!(
ProductPriceId,
generator: generate_product_price_id @ "products_prices",
);
db_id_interface!(
ProjectId,
generator: generate_project_id @ "mods",
);
db_id_interface!(
ReportId,
generator: generate_report_id @ "reports",
);
db_id_interface!(
SessionId,
generator: generate_session_id @ "sessions",
);
db_id_interface!(
TeamId,
generator: generate_team_id @ "teams",
);
db_id_interface!(
TeamMemberId,
generator: generate_team_member_id @ "team_members",
);
db_id_interface!(
ThreadId,
generator: generate_thread_id @ "threads",
);
db_id_interface!(
ThreadMessageId,
generator: generate_thread_message_id @ "threads_messages",
);
db_id_interface!(
UserId,
generator: generate_user_id @ "users",
);
db_id_interface!(
UserSubscriptionId,
generator: generate_user_subscription_id @ "users_subscriptions",
);
db_id_interface!(
VersionId,
generator: generate_version_id @ "versions",
);
#[derive(
Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct UserId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Eq, Hash, PartialEq, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct TeamId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct TeamMemberId(pub i64);
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct OrganizationId(pub i64);
#[derive(
Copy, Clone, Debug, Type, PartialEq, Eq, Hash, Serialize, Deserialize,
)]
#[sqlx(transparent)]
pub struct ProjectId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct ProjectTypeId(pub i32);
#[derive(Copy, Clone, Debug, Type)]
#[sqlx(transparent)]
pub struct StatusId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct GameId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct LinkPlatformId(pub i32);
#[derive(
Copy,
Clone,
Debug,
Type,
PartialEq,
Eq,
Hash,
Serialize,
Deserialize,
PartialOrd,
Ord,
)]
#[sqlx(transparent)]
pub struct VersionId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, PartialEq, Eq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct CategoryId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct CollectionId(pub i64);
#[derive(Copy, Clone, Debug, Type, Deserialize, Serialize)]
#[sqlx(transparent)]
pub struct ReportId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
#[sqlx(transparent)]
pub struct ReportTypeId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Hash, Eq, PartialEq, Deserialize, Serialize,
)]
#[sqlx(transparent)]
pub struct FileId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Deserialize, Serialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct PatId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct NotificationId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize)]
#[sqlx(transparent)]
pub struct NotificationActionId(pub i32);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq)]
#[sqlx(transparent)]
pub struct ThreadId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ThreadMessageId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct SessionId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ImageId(pub i64);
#[derive(
Copy,
Clone,
Debug,
Type,
Serialize,
Deserialize,
Eq,
PartialEq,
Hash,
PartialOrd,
Ord,
)]
#[sqlx(transparent)]
pub struct LoaderFieldId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderFieldEnumId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct LoaderFieldEnumValueId(pub i32);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthClientId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthClientAuthorizationId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthRedirectUriId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct OAuthAccessTokenId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct PayoutId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ProductId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ProductPriceId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct UserSubscriptionId(pub i64);
#[derive(
Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash,
)]
#[sqlx(transparent)]
pub struct ChargeId(pub i64);
use crate::models::ids;
impl From<ids::ProjectId> for ProjectId {
fn from(id: ids::ProjectId) -> Self {
ProjectId(id.0 as i64)
}
}
impl From<ProjectId> for ids::ProjectId {
fn from(id: ProjectId) -> Self {
ids::ProjectId(id.0 as u64)
}
}
impl From<ids::UserId> for UserId {
fn from(id: ids::UserId) -> Self {
UserId(id.0 as i64)
}
}
impl From<UserId> for ids::UserId {
fn from(id: UserId) -> Self {
ids::UserId(id.0 as u64)
}
}
impl From<ids::TeamId> for TeamId {
fn from(id: ids::TeamId) -> Self {
TeamId(id.0 as i64)
}
}
impl From<TeamId> for ids::TeamId {
fn from(id: TeamId) -> Self {
ids::TeamId(id.0 as u64)
}
}
impl From<ids::OrganizationId> for OrganizationId {
fn from(id: ids::OrganizationId) -> Self {
OrganizationId(id.0 as i64)
}
}
impl From<OrganizationId> for ids::OrganizationId {
fn from(id: OrganizationId) -> Self {
ids::OrganizationId(id.0 as u64)
}
}
impl From<ids::VersionId> for VersionId {
fn from(id: ids::VersionId) -> Self {
VersionId(id.0 as i64)
}
}
impl From<VersionId> for ids::VersionId {
fn from(id: VersionId) -> Self {
ids::VersionId(id.0 as u64)
}
}
impl From<ids::CollectionId> for CollectionId {
fn from(id: ids::CollectionId) -> Self {
CollectionId(id.0 as i64)
}
}
impl From<CollectionId> for ids::CollectionId {
fn from(id: CollectionId) -> Self {
ids::CollectionId(id.0 as u64)
}
}
impl From<ids::ReportId> for ReportId {
fn from(id: ids::ReportId) -> Self {
ReportId(id.0 as i64)
}
}
impl From<ReportId> for ids::ReportId {
fn from(id: ReportId) -> Self {
ids::ReportId(id.0 as u64)
}
}
impl From<ImageId> for ids::ImageId {
fn from(id: ImageId) -> Self {
ids::ImageId(id.0 as u64)
}
}
impl From<ids::ImageId> for ImageId {
fn from(id: ids::ImageId) -> Self {
ImageId(id.0 as i64)
}
}
impl From<ids::NotificationId> for NotificationId {
fn from(id: ids::NotificationId) -> Self {
NotificationId(id.0 as i64)
}
}
impl From<NotificationId> for ids::NotificationId {
fn from(id: NotificationId) -> Self {
ids::NotificationId(id.0 as u64)
}
}
impl From<ids::ThreadId> for ThreadId {
fn from(id: ids::ThreadId) -> Self {
ThreadId(id.0 as i64)
}
}
impl From<ThreadId> for ids::ThreadId {
fn from(id: ThreadId) -> Self {
ids::ThreadId(id.0 as u64)
}
}
impl From<ids::ThreadMessageId> for ThreadMessageId {
fn from(id: ids::ThreadMessageId) -> Self {
ThreadMessageId(id.0 as i64)
}
}
impl From<ThreadMessageId> for ids::ThreadMessageId {
fn from(id: ThreadMessageId) -> Self {
ids::ThreadMessageId(id.0 as u64)
}
}
impl From<SessionId> for ids::SessionId {
fn from(id: SessionId) -> Self {
ids::SessionId(id.0 as u64)
}
}
impl From<PatId> for ids::PatId {
fn from(id: PatId) -> Self {
ids::PatId(id.0 as u64)
}
}
impl From<OAuthClientId> for ids::OAuthClientId {
fn from(id: OAuthClientId) -> Self {
ids::OAuthClientId(id.0 as u64)
}
}
impl From<ids::OAuthClientId> for OAuthClientId {
fn from(id: ids::OAuthClientId) -> Self {
Self(id.0 as i64)
}
}
impl From<OAuthRedirectUriId> for ids::OAuthRedirectUriId {
fn from(id: OAuthRedirectUriId) -> Self {
ids::OAuthRedirectUriId(id.0 as u64)
}
}
impl From<OAuthClientAuthorizationId> for ids::OAuthClientAuthorizationId {
fn from(id: OAuthClientAuthorizationId) -> Self {
ids::OAuthClientAuthorizationId(id.0 as u64)
}
}
impl From<ids::PayoutId> for PayoutId {
fn from(id: ids::PayoutId) -> Self {
PayoutId(id.0 as i64)
}
}
impl From<PayoutId> for ids::PayoutId {
fn from(id: PayoutId) -> Self {
ids::PayoutId(id.0 as u64)
}
}
impl From<ids::ProductId> for ProductId {
fn from(id: ids::ProductId) -> Self {
ProductId(id.0 as i64)
}
}
impl From<ProductId> for ids::ProductId {
fn from(id: ProductId) -> Self {
ids::ProductId(id.0 as u64)
}
}
impl From<ids::ProductPriceId> for ProductPriceId {
fn from(id: ids::ProductPriceId) -> Self {
ProductPriceId(id.0 as i64)
}
}
impl From<ProductPriceId> for ids::ProductPriceId {
fn from(id: ProductPriceId) -> Self {
ids::ProductPriceId(id.0 as u64)
}
}
impl From<ids::UserSubscriptionId> for UserSubscriptionId {
fn from(id: ids::UserSubscriptionId) -> Self {
UserSubscriptionId(id.0 as i64)
}
}
impl From<UserSubscriptionId> for ids::UserSubscriptionId {
fn from(id: UserSubscriptionId) -> Self {
ids::UserSubscriptionId(id.0 as u64)
}
}
impl From<ids::ChargeId> for ChargeId {
fn from(id: ids::ChargeId) -> Self {
ChargeId(id.0 as i64)
}
}
impl From<ChargeId> for ids::ChargeId {
fn from(id: ChargeId) -> Self {
ids::ChargeId(id.0 as u64)
}
}
short_id_type!(CategoryId);
short_id_type!(GameId);
short_id_type!(LinkPlatformId);
short_id_type!(LoaderFieldEnumId);
short_id_type!(LoaderFieldEnumValueId);
short_id_type!(LoaderFieldId);
short_id_type!(LoaderId);
short_id_type!(NotificationActionId);
short_id_type!(ProjectTypeId);
short_id_type!(ReportTypeId);
short_id_type!(StatusId);

View File

@@ -9,20 +9,20 @@ const IMAGES_NAMESPACE: &str = "images";
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Image {
pub id: ImageId,
pub id: DBImageId,
pub url: String,
pub raw_url: String,
pub size: u64,
pub created: DateTime<Utc>,
pub owner_id: UserId,
pub owner_id: DBUserId,
// context it is associated with
pub context: String,
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub thread_message_id: Option<ThreadMessageId>,
pub report_id: Option<ReportId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub thread_message_id: Option<DBThreadMessageId>,
pub report_id: Option<DBReportId>,
}
impl Image {
@@ -39,12 +39,12 @@ impl Image {
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11
);
",
self.id as ImageId,
self.id as DBImageId,
self.url,
self.raw_url,
self.size as i64,
self.created,
self.owner_id as UserId,
self.owner_id as DBUserId,
self.context,
self.project_id.map(|x| x.0),
self.version_id.map(|x| x.0),
@@ -58,7 +58,7 @@ impl Image {
}
pub async fn remove(
id: ImageId,
id: DBImageId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -70,7 +70,7 @@ impl Image {
DELETE FROM uploaded_images
WHERE id = $1
",
id as ImageId,
id as DBImageId,
)
.execute(&mut **transaction)
.await?;
@@ -98,22 +98,22 @@ impl Image {
ImageContext::Project {
project_id: Some(id),
} => {
project_id = Some(ProjectId::from(id));
project_id = Some(DBProjectId::from(id));
}
ImageContext::Version {
version_id: Some(id),
} => {
version_id = Some(VersionId::from(id));
version_id = Some(DBVersionId::from(id));
}
ImageContext::ThreadMessage {
thread_message_id: Some(id),
} => {
thread_message_id = Some(ThreadMessageId::from(id));
thread_message_id = Some(DBThreadMessageId::from(id));
}
ImageContext::Report {
report_id: Some(id),
} => {
report_id = Some(ReportId::from(id));
report_id = Some(DBReportId::from(id));
}
_ => {}
}
@@ -139,7 +139,7 @@ impl Image {
)
.fetch(&mut **transaction)
.map_ok(|row| {
let id = ImageId(row.id);
let id = DBImageId(row.id);
Image {
id,
@@ -147,12 +147,12 @@ impl Image {
raw_url: row.raw_url,
size: row.size as u64,
created: row.created,
owner_id: UserId(row.owner_id),
owner_id: DBUserId(row.owner_id),
context: row.context,
project_id: row.mod_id.map(ProjectId),
version_id: row.version_id.map(VersionId),
thread_message_id: row.thread_message_id.map(ThreadMessageId),
report_id: row.report_id.map(ReportId),
project_id: row.mod_id.map(DBProjectId),
version_id: row.version_id.map(DBVersionId),
thread_message_id: row.thread_message_id.map(DBThreadMessageId),
report_id: row.report_id.map(DBReportId),
}
})
.try_collect::<Vec<Image>>()
@@ -160,7 +160,7 @@ impl Image {
}
pub async fn get<'a, 'b, E>(
id: ImageId,
id: DBImageId,
executor: E,
redis: &RedisPool,
) -> Result<Option<Image>, DatabaseError>
@@ -173,7 +173,7 @@ impl Image {
}
pub async fn get_many<'a, E>(
image_ids: &[ImageId],
image_ids: &[DBImageId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Image>, DatabaseError>
@@ -198,17 +198,17 @@ impl Image {
.fetch(exec)
.try_fold(DashMap::new(), |acc, i| {
let img = Image {
id: ImageId(i.id),
id: DBImageId(i.id),
url: i.url,
raw_url: i.raw_url,
size: i.size as u64,
created: i.created,
owner_id: UserId(i.owner_id),
owner_id: DBUserId(i.owner_id),
context: i.context,
project_id: i.mod_id.map(ProjectId),
version_id: i.version_id.map(VersionId),
thread_message_id: i.thread_message_id.map(ThreadMessageId),
report_id: i.report_id.map(ReportId),
project_id: i.mod_id.map(DBProjectId),
version_id: i.version_id.map(DBVersionId),
thread_message_id: i.thread_message_id.map(DBThreadMessageId),
report_id: i.report_id.map(DBReportId),
};
acc.insert(i.id, img);
@@ -224,7 +224,7 @@ impl Image {
}
pub async fn clear_cache(
id: ImageId,
id: DBImageId,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

View File

@@ -153,7 +153,7 @@ impl Loader {
SELECT l.id id, l.loader loader, l.icon icon, l.metadata metadata,
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games
FROM loaders l
FROM loaders l
LEFT OUTER JOIN loaders_project_types lpt ON joining_loader_id = l.id
LEFT OUTER JOIN project_types pt ON lpt.joining_project_type_id = pt.id
LEFT OUTER JOIN loaders_project_types_games lptg ON lptg.loader_id = lpt.joining_loader_id AND lptg.project_type_id = lpt.joining_project_type_id
@@ -293,7 +293,7 @@ impl std::hash::Hash for LoaderFieldEnumValue {
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq, Hash)]
pub struct VersionField {
pub version_id: VersionId,
pub version_id: DBVersionId,
pub field_id: LoaderFieldId,
pub field_name: String,
pub value: VersionFieldValue,
@@ -312,7 +312,7 @@ pub enum VersionFieldValue {
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct QueryVersionField {
pub version_id: VersionId,
pub version_id: DBVersionId,
pub field_id: LoaderFieldId,
pub int_value: Option<i32>,
pub enum_value: Option<LoaderFieldEnumValueId>,
@@ -524,7 +524,7 @@ impl LoaderFieldEnum {
let result = sqlx::query!(
"
SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable
SELECT lfe.id, lfe.enum_name, lfe.ordering, lfe.hidable
FROM loader_field_enums lfe
WHERE lfe.enum_name = $1
ORDER BY lfe.ordering ASC
@@ -781,7 +781,7 @@ impl VersionField {
}
pub fn check_parse(
version_id: VersionId,
version_id: DBVersionId,
loader_field: LoaderField,
value: serde_json::Value,
enum_variants: Vec<LoaderFieldEnumValue>,
@@ -1032,7 +1032,7 @@ impl VersionFieldValue {
field_type: &LoaderFieldType,
qvfs: Vec<QueryVersionField>,
qlfev: &[QueryLoaderFieldEnumValue],
) -> Result<(VersionId, VersionFieldValue), DatabaseError> {
) -> Result<(DBVersionId, VersionFieldValue), DatabaseError> {
match field_type {
LoaderFieldType::Integer
| LoaderFieldType::Text
@@ -1076,7 +1076,7 @@ impl VersionFieldValue {
field_type: &LoaderFieldType,
qvfs: Vec<QueryVersionField>,
qlfev: &[QueryLoaderFieldEnumValue],
) -> Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError> {
) -> Result<Vec<(DBVersionId, VersionFieldValue)>, DatabaseError> {
let field_name = field_type.to_str();
let did_not_exist_error = |field_name: &str, desired_field: &str| {
DatabaseError::SchemaError(format!(
@@ -1093,7 +1093,8 @@ impl VersionFieldValue {
// If the field type is a non-array, then the reason for multiple version ids is that there are multiple versions being aggregated, and those version ids are contained within.
// If the field type is an array, then the reason for multiple version ids is that there are multiple values for a single version
// (or a greater aggregation between multiple arrays, in which case the per-field version is lost, so we just take the first one and use it for that)
let version_id = version_id.into_iter().next().unwrap_or(VersionId(0));
let version_id =
version_id.into_iter().next().unwrap_or(DBVersionId(0));
let field_id = qvfs
.iter()
@@ -1106,12 +1107,11 @@ impl VersionFieldValue {
)));
}
let mut value =
match field_type {
// Singleton fields
// If there are multiple, we assume multiple versions are being concatenated
LoaderFieldType::Integer => qvfs
.into_iter()
let mut value = match field_type {
// Singleton fields
// If there are multiple, we assume multiple versions are being concatenated
LoaderFieldType::Integer => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@@ -1121,11 +1121,12 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Text => qvfs
.into_iter()
>>()?
}
LoaderFieldType::Text => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@@ -1135,11 +1136,12 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Boolean => qvfs
.into_iter()
>>()?
}
LoaderFieldType::Boolean => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@@ -1152,11 +1154,12 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
LoaderFieldType::Enum(id) => qvfs
.into_iter()
>>()?
}
LoaderFieldType::Enum(id) => {
qvfs.into_iter()
.map(|qvf| {
Ok((
qvf.version_id,
@@ -1189,90 +1192,86 @@ impl VersionFieldValue {
))
})
.collect::<Result<
Vec<(VersionId, VersionFieldValue)>,
Vec<(DBVersionId, VersionFieldValue)>,
DatabaseError,
>>()?,
>>()?
}
// Array fields
// We concatenate into one array
LoaderFieldType::ArrayInteger => vec![(
version_id,
VersionFieldValue::ArrayInteger(
qvfs.into_iter()
.map(|qvf| {
// Array fields
// We concatenate into one array
LoaderFieldType::ArrayInteger => vec![(
version_id,
VersionFieldValue::ArrayInteger(
qvfs.into_iter()
.map(|qvf| {
qvf.int_value.ok_or(did_not_exist_error(
field_name,
"int_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayText => vec![(
version_id,
VersionFieldValue::ArrayText(
qvfs.into_iter()
.map(|qvf| {
qvf.string_value.ok_or(did_not_exist_error(
field_name,
"string_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayBoolean => vec![(
version_id,
VersionFieldValue::ArrayBoolean(
qvfs.into_iter()
.map(|qvf| {
Ok::<bool, DatabaseError>(
qvf.int_value.ok_or(did_not_exist_error(
field_name,
"int_value",
))
))? != 0,
)
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayEnum(id) => vec![(
version_id,
VersionFieldValue::ArrayEnum(
*id,
qvfs.into_iter()
.map(|qvf| {
let enum_id = qvf.enum_value.ok_or(
did_not_exist_error(field_name, "enum_value"),
)?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(
field_name,
"enum_value",
))?;
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev
.metadata
.clone()
.unwrap_or_default(),
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayText => vec![(
version_id,
VersionFieldValue::ArrayText(
qvfs.into_iter()
.map(|qvf| {
qvf.string_value.ok_or(did_not_exist_error(
field_name,
"string_value",
))
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayBoolean => vec![(
version_id,
VersionFieldValue::ArrayBoolean(
qvfs.into_iter()
.map(|qvf| {
Ok::<bool, DatabaseError>(
qvf.int_value.ok_or(
did_not_exist_error(
field_name,
"int_value",
),
)? != 0,
)
})
.collect::<Result<_, _>>()?,
),
)],
LoaderFieldType::ArrayEnum(id) => vec![(
version_id,
VersionFieldValue::ArrayEnum(
*id,
qvfs.into_iter()
.map(|qvf| {
let enum_id = qvf.enum_value.ok_or(
did_not_exist_error(
field_name,
"enum_value",
),
)?;
let lfev = qlfev
.iter()
.find(|x| x.id == enum_id)
.ok_or(did_not_exist_error(
field_name,
"enum_value",
))?;
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
id: lfev.id,
enum_id: lfev.enum_id,
value: lfev.value.clone(),
ordering: lfev.ordering,
created: lfev.created,
metadata: lfev
.metadata
.clone()
.unwrap_or_default(),
})
})
.collect::<Result<_, _>>()?,
),
)],
};
})
.collect::<Result<_, _>>()?,
),
)],
};
// Sort arrayenums by ordering, then by created
for (_, v) in value.iter_mut() {

View File

@@ -13,8 +13,8 @@ pub struct NotificationBuilder {
#[derive(Serialize, Deserialize)]
pub struct Notification {
pub id: NotificationId,
pub user_id: UserId,
pub id: DBNotificationId,
pub user_id: DBUserId,
pub body: NotificationBody,
pub read: bool,
pub created: DateTime<Utc>,
@@ -23,7 +23,7 @@ pub struct Notification {
#[derive(Serialize, Deserialize)]
pub struct NotificationAction {
pub id: NotificationActionId,
pub notification_id: NotificationId,
pub notification_id: DBNotificationId,
pub name: String,
pub action_route_method: String,
pub action_route: String,
@@ -32,7 +32,7 @@ pub struct NotificationAction {
impl NotificationBuilder {
pub async fn insert(
&self,
user: UserId,
user: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
@@ -41,7 +41,7 @@ impl NotificationBuilder {
pub async fn insert_many(
&self,
users: Vec<UserId>,
users: Vec<DBUserId>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
@@ -80,7 +80,7 @@ impl NotificationBuilder {
impl Notification {
pub async fn get<'a, 'b, E>(
id: NotificationId,
id: DBNotificationId,
executor: E,
) -> Result<Option<Self>, sqlx::error::Error>
where
@@ -92,7 +92,7 @@ impl Notification {
}
pub async fn get_many<'a, E>(
notification_ids: &[NotificationId],
notification_ids: &[DBNotificationId],
exec: E,
) -> Result<Vec<Notification>, sqlx::Error>
where
@@ -114,11 +114,11 @@ impl Notification {
)
.fetch(exec)
.map_ok(|row| {
let id = NotificationId(row.id);
let id = DBNotificationId(row.id);
Notification {
id,
user_id: UserId(row.user_id),
user_id: DBUserId(row.user_id),
read: row.read,
created: row.created,
body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| {
@@ -145,7 +145,7 @@ impl Notification {
}
pub async fn get_many_user<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<Notification>, DatabaseError>
@@ -174,15 +174,15 @@ impl Notification {
WHERE n.user_id = $1
GROUP BY n.id, n.user_id;
",
user_id as UserId
user_id as DBUserId
)
.fetch(exec)
.map_ok(|row| {
let id = NotificationId(row.id);
let id = DBNotificationId(row.id);
Notification {
id,
user_id: UserId(row.user_id),
user_id: DBUserId(row.user_id),
read: row.read,
created: row.created,
body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| {
@@ -220,7 +220,7 @@ impl Notification {
}
pub async fn read(
id: NotificationId,
id: DBNotificationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -228,7 +228,7 @@ impl Notification {
}
pub async fn read_many(
notification_ids: &[NotificationId],
notification_ids: &[DBNotificationId],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -245,7 +245,7 @@ impl Notification {
&notification_ids_parsed
)
.fetch(&mut **transaction)
.map_ok(|x| UserId(x.user_id))
.map_ok(|x| DBUserId(x.user_id))
.try_collect::<Vec<_>>()
.await?;
@@ -259,7 +259,7 @@ impl Notification {
}
pub async fn remove(
id: NotificationId,
id: DBNotificationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -267,7 +267,7 @@ impl Notification {
}
pub async fn remove_many(
notification_ids: &[NotificationId],
notification_ids: &[DBNotificationId],
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -293,7 +293,7 @@ impl Notification {
&notification_ids_parsed
)
.fetch(&mut **transaction)
.map_ok(|x| UserId(x.user_id))
.map_ok(|x| DBUserId(x.user_id))
.try_collect::<Vec<_>>()
.await?;
@@ -307,7 +307,7 @@ impl Notification {
}
pub async fn clear_user_notifications_cache(
user_ids: impl IntoIterator<Item = &UserId>,
user_ids: impl IntoIterator<Item = &DBUserId>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;

View File

@@ -4,13 +4,15 @@ use serde::{Deserialize, Serialize};
use crate::models::pats::Scopes;
use super::{DatabaseError, OAuthClientAuthorizationId, OAuthClientId, UserId};
use super::{
DBOAuthClientAuthorizationId, DBOAuthClientId, DBUserId, DatabaseError,
};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthClientAuthorization {
pub id: OAuthClientAuthorizationId,
pub client_id: OAuthClientId,
pub user_id: UserId,
pub id: DBOAuthClientAuthorizationId,
pub client_id: DBOAuthClientId,
pub user_id: DBUserId,
pub scopes: Scopes,
pub created: DateTime<Utc>,
}
@@ -26,9 +28,9 @@ struct AuthorizationQueryResult {
impl From<AuthorizationQueryResult> for OAuthClientAuthorization {
fn from(value: AuthorizationQueryResult) -> Self {
OAuthClientAuthorization {
id: OAuthClientAuthorizationId(value.id),
client_id: OAuthClientId(value.client_id),
user_id: UserId(value.user_id),
id: DBOAuthClientAuthorizationId(value.id),
client_id: DBOAuthClientId(value.client_id),
user_id: DBUserId(value.user_id),
scopes: Scopes::from_postgres(value.scopes),
created: value.created,
}
@@ -37,8 +39,8 @@ impl From<AuthorizationQueryResult> for OAuthClientAuthorization {
impl OAuthClientAuthorization {
pub async fn get(
client_id: OAuthClientId,
user_id: UserId,
client_id: DBOAuthClientId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<OAuthClientAuthorization>, DatabaseError> {
let value = sqlx::query_as!(
@@ -58,7 +60,7 @@ impl OAuthClientAuthorization {
}
pub async fn get_all_for_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClientAuthorization>, DatabaseError> {
let results = sqlx::query_as!(
@@ -77,9 +79,9 @@ impl OAuthClientAuthorization {
}
pub async fn upsert(
id: OAuthClientAuthorizationId,
client_id: OAuthClientId,
user_id: UserId,
id: DBOAuthClientAuthorizationId,
client_id: DBOAuthClientId,
user_id: DBUserId,
scopes: Scopes,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
@@ -106,8 +108,8 @@ impl OAuthClientAuthorization {
}
pub async fn remove(
client_id: OAuthClientId,
user_id: UserId,
client_id: DBOAuthClientId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<(), DatabaseError> {
sqlx::query!(

View File

@@ -3,19 +3,19 @@ use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sha2::Digest;
use super::{DatabaseError, OAuthClientId, OAuthRedirectUriId, UserId};
use super::{DBOAuthClientId, DBOAuthRedirectUriId, DBUserId, DatabaseError};
use crate::models::pats::Scopes;
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthRedirectUri {
pub id: OAuthRedirectUriId,
pub client_id: OAuthClientId,
pub id: DBOAuthRedirectUriId,
pub client_id: DBOAuthClientId,
pub uri: String,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthClient {
pub id: OAuthClientId,
pub id: DBOAuthClientId,
pub name: String,
pub icon_url: Option<String>,
pub raw_icon_url: Option<String>,
@@ -23,7 +23,7 @@ pub struct OAuthClient {
pub secret_hash: String,
pub redirect_uris: Vec<OAuthRedirectUri>,
pub created: DateTime<Utc>,
pub created_by: UserId,
pub created_by: DBUserId,
pub url: Option<String>,
pub description: Option<String>,
}
@@ -79,14 +79,14 @@ macro_rules! select_clients_with_predicate {
impl OAuthClient {
pub async fn get(
id: OAuthClientId,
id: DBOAuthClientId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<OAuthClient>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[OAuthClientId],
ids: &[DBOAuthClientId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClient>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@@ -102,7 +102,7 @@ impl OAuthClient {
}
pub async fn get_all_user_clients(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<OAuthClient>, DatabaseError> {
let user_id_param = user_id.0;
@@ -117,7 +117,7 @@ impl OAuthClient {
}
pub async fn remove(
id: OAuthClientId,
id: DBOAuthClientId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<(), DatabaseError> {
// Cascades to oauth_client_redirect_uris, oauth_client_authorizations
@@ -189,7 +189,7 @@ impl OAuthClient {
}
pub async fn remove_redirect_uris(
ids: impl IntoIterator<Item = OAuthRedirectUriId>,
ids: impl IntoIterator<Item = DBOAuthRedirectUriId>,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let ids = ids.into_iter().map(|id| id.0).collect_vec();
@@ -243,8 +243,8 @@ impl From<ClientQueryResult> for OAuthClient {
ids.iter()
.zip(uris.iter())
.map(|(id, uri)| OAuthRedirectUri {
id: OAuthRedirectUriId(*id),
client_id: OAuthClientId(r.id),
id: DBOAuthRedirectUriId(*id),
client_id: DBOAuthClientId(r.id),
uri: uri.to_string(),
})
.collect()
@@ -253,7 +253,7 @@ impl From<ClientQueryResult> for OAuthClient {
};
OAuthClient {
id: OAuthClientId(r.id),
id: DBOAuthClientId(r.id),
name: r.name,
icon_url: r.icon_url,
raw_icon_url: r.raw_icon_url,
@@ -261,7 +261,7 @@ impl From<ClientQueryResult> for OAuthClient {
secret_hash: r.secret_hash,
redirect_uris: redirects,
created: r.created,
created_by: UserId(r.created_by),
created_by: DBUserId(r.created_by),
url: r.url,
description: r.description,
}

View File

@@ -1,6 +1,6 @@
use super::{
DatabaseError, OAuthAccessTokenId, OAuthClientAuthorizationId,
OAuthClientId, UserId,
DBOAuthAccessTokenId, DBOAuthClientAuthorizationId, DBOAuthClientId,
DBUserId, DatabaseError,
};
use crate::models::pats::Scopes;
use chrono::{DateTime, Utc};
@@ -9,8 +9,8 @@ use sha2::Digest;
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OAuthAccessToken {
pub id: OAuthAccessTokenId,
pub authorization_id: OAuthClientAuthorizationId,
pub id: DBOAuthAccessTokenId,
pub authorization_id: DBOAuthClientAuthorizationId,
pub token_hash: String,
pub scopes: Scopes,
pub created: DateTime<Utc>,
@@ -18,8 +18,8 @@ pub struct OAuthAccessToken {
pub last_used: Option<DateTime<Utc>>,
// Stored separately inside oauth_client_authorizations table
pub client_id: OAuthClientId,
pub user_id: UserId,
pub client_id: DBOAuthClientId,
pub user_id: DBUserId,
}
impl OAuthAccessToken {
@@ -50,15 +50,15 @@ impl OAuthAccessToken {
.await?;
Ok(value.map(|r| OAuthAccessToken {
id: OAuthAccessTokenId(r.id),
authorization_id: OAuthClientAuthorizationId(r.authorization_id),
id: DBOAuthAccessTokenId(r.id),
authorization_id: DBOAuthClientAuthorizationId(r.authorization_id),
token_hash: r.token_hash,
scopes: Scopes::from_postgres(r.scopes),
created: r.created,
expires: r.expires,
last_used: r.last_used,
client_id: OAuthClientId(r.client_id),
user_id: UserId(r.user_id),
client_id: DBOAuthClientId(r.client_id),
user_id: DBUserId(r.user_id),
}))
}

View File

@@ -15,7 +15,7 @@ const ORGANIZATIONS_TITLES_NAMESPACE: &str = "organizations_titles";
/// An organization of users who together control one or more projects and organizations.
pub struct Organization {
/// The id of the organization
pub id: OrganizationId,
pub id: DBOrganizationId,
/// The slug of the organization
pub slug: String,
@@ -24,7 +24,7 @@ pub struct Organization {
pub name: String,
/// The associated team of the organization
pub team_id: TeamId,
pub team_id: DBTeamId,
/// The description of the organization
pub description: String,
@@ -48,7 +48,7 @@ impl Organization {
self.id.0,
self.slug,
self.name,
self.team_id as TeamId,
self.team_id as DBTeamId,
self.description,
self.icon_url,
self.raw_icon_url,
@@ -74,7 +74,7 @@ impl Organization {
}
pub async fn get_id<'a, 'b, E>(
id: OrganizationId,
id: DBOrganizationId,
exec: E,
redis: &RedisPool,
) -> Result<Option<Self>, super::DatabaseError>
@@ -87,7 +87,7 @@ impl Organization {
}
pub async fn get_many_ids<'a, 'b, E>(
organization_ids: &[OrganizationId],
organization_ids: &[DBOrganizationId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Self>, super::DatabaseError>
@@ -143,10 +143,10 @@ impl Organization {
.fetch(exec)
.try_fold(DashMap::new(), |acc, m| {
let org = Organization {
id: OrganizationId(m.id),
id: DBOrganizationId(m.id),
slug: m.slug.clone(),
name: m.name,
team_id: TeamId(m.team_id),
team_id: DBTeamId(m.team_id),
description: m.description,
icon_url: m.icon_url,
raw_icon_url: m.raw_icon_url,
@@ -168,7 +168,7 @@ impl Organization {
// Gets organization associated with a project ID, if it exists and there is one
pub async fn get_associated_organization_project_id<'a, 'b, E>(
project_id: ProjectId,
project_id: DBProjectId,
exec: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@@ -182,17 +182,17 @@ impl Organization {
WHERE m.id = $1
GROUP BY o.id;
",
project_id as ProjectId,
project_id as DBProjectId,
)
.fetch_optional(exec)
.await?;
if let Some(result) = result {
Ok(Some(Organization {
id: OrganizationId(result.id),
id: DBOrganizationId(result.id),
slug: result.slug,
name: result.name,
team_id: TeamId(result.team_id),
team_id: DBTeamId(result.team_id),
description: result.description,
icon_url: result.icon_url,
raw_icon_url: result.raw_icon_url,
@@ -204,7 +204,7 @@ impl Organization {
}
pub async fn remove(
id: OrganizationId,
id: DBOrganizationId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, super::DatabaseError> {
@@ -216,7 +216,7 @@ impl Organization {
DELETE FROM organizations
WHERE id = $1
",
id as OrganizationId,
id as DBOrganizationId,
)
.execute(&mut **transaction)
.await?;
@@ -228,7 +228,7 @@ impl Organization {
DELETE FROM team_members
WHERE team_id = $1
",
organization.team_id as TeamId,
organization.team_id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@@ -238,7 +238,7 @@ impl Organization {
DELETE FROM teams
WHERE id = $1
",
organization.team_id as TeamId,
organization.team_id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@@ -250,7 +250,7 @@ impl Organization {
}
pub async fn clear_cache(
id: OrganizationId,
id: DBOrganizationId,
slug: Option<String>,
redis: &RedisPool,
) -> Result<(), super::DatabaseError> {

View File

@@ -16,11 +16,11 @@ const PATS_USERS_NAMESPACE: &str = "pats_users";
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct PersonalAccessToken {
pub id: PatId,
pub id: DBPatId,
pub name: String,
pub access_token: String,
pub scopes: Scopes,
pub user_id: UserId,
pub user_id: DBUserId,
pub created: DateTime<Utc>,
pub expires: DateTime<Utc>,
pub last_used: Option<DateTime<Utc>>,
@@ -42,11 +42,11 @@ impl PersonalAccessToken {
$6
)
",
self.id as PatId,
self.id as DBPatId,
self.name,
self.access_token,
self.scopes.bits() as i64,
self.user_id as UserId,
self.user_id as DBUserId,
self.expires
)
.execute(&mut **transaction)
@@ -73,7 +73,7 @@ impl PersonalAccessToken {
}
pub async fn get_many_ids<'a, E>(
pat_ids: &[PatId],
pat_ids: &[DBPatId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<PersonalAccessToken>, DatabaseError>
@@ -126,11 +126,11 @@ impl PersonalAccessToken {
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
let pat = PersonalAccessToken {
id: PatId(x.id),
id: DBPatId(x.id),
name: x.name,
access_token: x.access_token.clone(),
scopes: Scopes::from_bits(x.scopes as u64).unwrap_or(Scopes::NONE),
user_id: UserId(x.user_id),
user_id: DBUserId(x.user_id),
created: x.created,
expires: x.expires,
last_used: x.last_used,
@@ -149,10 +149,10 @@ impl PersonalAccessToken {
}
pub async fn get_user_pats<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<PatId>, DatabaseError>
) -> Result<Vec<DBPatId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@@ -166,10 +166,10 @@ impl PersonalAccessToken {
.await?;
if let Some(res) = res {
return Ok(res.into_iter().map(PatId).collect());
return Ok(res.into_iter().map(DBPatId).collect());
}
let db_pats: Vec<PatId> = sqlx::query!(
let db_pats: Vec<DBPatId> = sqlx::query!(
"
SELECT id
FROM pats
@@ -179,8 +179,8 @@ impl PersonalAccessToken {
user_id.0,
)
.fetch(exec)
.map_ok(|x| PatId(x.id))
.try_collect::<Vec<PatId>>()
.map_ok(|x| DBPatId(x.id))
.try_collect::<Vec<DBPatId>>()
.await?;
redis
@@ -195,7 +195,7 @@ impl PersonalAccessToken {
}
pub async fn clear_cache(
clear_pats: Vec<(Option<PatId>, Option<String>, Option<UserId>)>,
clear_pats: Vec<(Option<DBPatId>, Option<String>, Option<DBUserId>)>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@@ -223,14 +223,14 @@ impl PersonalAccessToken {
}
pub async fn remove(
id: PatId,
id: DBPatId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
sqlx::query!(
"
DELETE FROM pats WHERE id = $1
",
id as PatId,
id as DBPatId,
)
.execute(&mut **transaction)
.await?;

View File

@@ -3,12 +3,12 @@ use chrono::{DateTime, Utc};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use super::{DatabaseError, PayoutId, UserId};
use super::{DBPayoutId, DBUserId, DatabaseError};
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct Payout {
pub id: PayoutId,
pub user_id: UserId,
pub id: DBPayoutId,
pub user_id: DBUserId,
pub created: DateTime<Utc>,
pub status: PayoutStatus,
pub amount: Decimal,
@@ -49,7 +49,7 @@ impl Payout {
}
pub async fn get<'a, 'b, E>(
id: PayoutId,
id: DBPayoutId,
executor: E,
) -> Result<Option<Payout>, DatabaseError>
where
@@ -61,7 +61,7 @@ impl Payout {
}
pub async fn get_many<'a, E>(
payout_ids: &[PayoutId],
payout_ids: &[DBPayoutId],
exec: E,
) -> Result<Vec<Payout>, DatabaseError>
where
@@ -79,8 +79,8 @@ impl Payout {
)
.fetch(exec)
.map_ok(|r| Payout {
id: PayoutId(r.id),
user_id: UserId(r.user_id),
id: DBPayoutId(r.id),
user_id: DBUserId(r.user_id),
created: r.created,
status: PayoutStatus::from_string(&r.status),
amount: r.amount,
@@ -96,9 +96,9 @@ impl Payout {
}
pub async fn get_all_for_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<PayoutId>, DatabaseError> {
) -> Result<Vec<DBPayoutId>, DatabaseError> {
let results = sqlx::query!(
"
SELECT id
@@ -112,7 +112,7 @@ impl Payout {
Ok(results
.into_iter()
.map(|r| PayoutId(r.id))
.map(|r| DBPayoutId(r.id))
.collect::<Vec<_>>())
}
}

View File

@@ -1,5 +1,5 @@
use crate::database::models::{
DatabaseError, ProductId, ProductPriceId, product_item,
DBProductId, DBProductPriceId, DatabaseError, product_item,
};
use crate::database::redis::RedisPool;
use crate::models::billing::{Price, ProductMetadata};
@@ -12,7 +12,7 @@ use std::convert::TryInto;
const PRODUCTS_NAMESPACE: &str = "products";
pub struct ProductItem {
pub id: ProductId,
pub id: DBProductId,
pub metadata: ProductMetadata,
pub unitary: bool,
}
@@ -42,7 +42,7 @@ impl TryFrom<ProductResult> for ProductItem {
fn try_from(r: ProductResult) -> Result<Self, Self::Error> {
Ok(ProductItem {
id: ProductId(r.id),
id: DBProductId(r.id),
metadata: serde_json::from_value(r.metadata)?,
unitary: r.unitary,
})
@@ -51,14 +51,14 @@ impl TryFrom<ProductResult> for ProductItem {
impl ProductItem {
pub async fn get(
id: ProductId,
id: DBProductId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ProductItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[ProductId],
ids: &[DBProductId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@@ -93,7 +93,7 @@ impl ProductItem {
#[derive(Deserialize, Serialize)]
pub struct QueryProduct {
pub id: ProductId,
pub id: DBProductId,
pub metadata: ProductMetadata,
pub unitary: bool,
pub prices: Vec<ProductPriceItem>,
@@ -155,8 +155,8 @@ impl QueryProduct {
#[derive(Deserialize, Serialize)]
pub struct ProductPriceItem {
pub id: ProductPriceId,
pub product_id: ProductId,
pub id: DBProductPriceId,
pub product_id: DBProductId,
pub prices: Price,
pub currency_code: String,
}
@@ -187,8 +187,8 @@ impl TryFrom<ProductPriceResult> for ProductPriceItem {
fn try_from(r: ProductPriceResult) -> Result<Self, Self::Error> {
Ok(ProductPriceItem {
id: ProductPriceId(r.id),
product_id: ProductId(r.product_id),
id: DBProductPriceId(r.id),
product_id: DBProductId(r.product_id),
prices: serde_json::from_value(r.prices)?,
currency_code: r.currency_code,
})
@@ -197,14 +197,14 @@ impl TryFrom<ProductPriceResult> for ProductPriceItem {
impl ProductPriceItem {
pub async fn get(
id: ProductPriceId,
id: DBProductPriceId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ProductPriceItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[ProductPriceId],
ids: &[DBProductPriceId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@@ -223,7 +223,7 @@ impl ProductPriceItem {
}
pub async fn get_all_product_prices(
product_id: ProductId,
product_id: DBProductId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let res = Self::get_all_products_prices(&[product_id], exec).await?;
@@ -232,9 +232,10 @@ impl ProductPriceItem {
}
pub async fn get_all_products_prices(
product_ids: &[ProductId],
product_ids: &[DBProductId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<DashMap<ProductId, Vec<ProductPriceItem>>, DatabaseError> {
) -> Result<DashMap<DBProductId, Vec<ProductPriceItem>>, DatabaseError>
{
let ids = product_ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
@@ -246,7 +247,7 @@ impl ProductPriceItem {
.fetch(exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProductId, Vec<ProductPriceItem>>, x| {
|acc: DashMap<DBProductId, Vec<ProductPriceItem>>, x| {
if let Ok(item) = <ProductPriceResult as TryInto<
ProductPriceItem,
>>::try_into(x)

View File

@@ -31,7 +31,7 @@ pub struct LinkUrl {
impl LinkUrl {
pub async fn insert_many_projects(
links: Vec<Self>,
project_id: ProjectId,
project_id: DBProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
let (project_ids, platform_ids, urls): (Vec<_>, Vec<_>, Vec<_>) = links
@@ -70,7 +70,7 @@ pub struct GalleryItem {
impl GalleryItem {
pub async fn insert_many(
items: Vec<Self>,
project_id: ProjectId,
project_id: DBProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
let (
@@ -118,7 +118,7 @@ impl GalleryItem {
}
pub struct ModCategory {
pub project_id: ProjectId,
pub project_id: DBProjectId,
pub category_id: CategoryId,
pub is_additional: bool,
}
@@ -154,9 +154,9 @@ impl ModCategory {
#[derive(Clone)]
pub struct ProjectBuilder {
pub project_id: ProjectId,
pub team_id: TeamId,
pub organization_id: Option<OrganizationId>,
pub project_id: DBProjectId,
pub team_id: DBTeamId,
pub organization_id: Option<DBOrganizationId>,
pub name: String,
pub summary: String,
pub description: String,
@@ -180,7 +180,7 @@ impl ProjectBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ProjectId, DatabaseError> {
) -> Result<DBProjectId, DatabaseError> {
let project_struct = Project {
id: self.project_id,
team_id: self.team_id,
@@ -264,9 +264,9 @@ impl ProjectBuilder {
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Project {
pub id: ProjectId,
pub team_id: TeamId,
pub organization_id: Option<OrganizationId>,
pub id: DBProjectId,
pub team_id: DBTeamId,
pub organization_id: Option<DBOrganizationId>,
pub name: String,
pub summary: String,
pub description: String,
@@ -311,8 +311,8 @@ impl Project {
LOWER($14), $15, $16, $17
)
",
self.id as ProjectId,
self.team_id as TeamId,
self.id as DBProjectId,
self.team_id as DBTeamId,
&self.name,
&self.summary,
&self.description,
@@ -336,7 +336,7 @@ impl Project {
}
pub async fn remove(
id: ProjectId,
id: DBProjectId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -351,7 +351,7 @@ impl Project {
DELETE FROM mod_follows
WHERE mod_id = $1
",
id as ProjectId
id as DBProjectId
)
.execute(&mut **transaction)
.await?;
@@ -361,7 +361,7 @@ impl Project {
DELETE FROM mods_gallery
WHERE mod_id = $1
",
id as ProjectId
id as DBProjectId
)
.execute(&mut **transaction)
.await?;
@@ -371,7 +371,7 @@ impl Project {
DELETE FROM mod_follows
WHERE mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -384,7 +384,7 @@ impl Project {
SET mod_id = NULL
WHERE mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -394,7 +394,7 @@ impl Project {
DELETE FROM mods_categories
WHERE joining_mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -404,7 +404,7 @@ impl Project {
DELETE FROM mods_links
WHERE joining_mod_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -418,7 +418,7 @@ impl Project {
"
DELETE FROM dependencies WHERE mod_dependency_id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -429,7 +429,7 @@ impl Project {
SET mod_id = NULL
WHERE (mod_id = $1)
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -439,7 +439,7 @@ impl Project {
DELETE FROM mods
WHERE id = $1
",
id as ProjectId,
id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -453,10 +453,10 @@ impl Project {
WHERE team_id = $1
RETURNING user_id
",
project.inner.team_id as TeamId,
project.inner.team_id as DBTeamId,
)
.fetch(&mut **transaction)
.map_ok(|x| UserId(x.user_id))
.map_ok(|x| DBUserId(x.user_id))
.try_collect::<Vec<_>>()
.await?;
@@ -467,7 +467,7 @@ impl Project {
DELETE FROM teams
WHERE id = $1
",
project.inner.team_id as TeamId,
project.inner.team_id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@@ -492,7 +492,7 @@ impl Project {
}
pub async fn get_id<'a, 'b, E>(
id: ProjectId,
id: DBProjectId,
executor: E,
redis: &RedisPool,
) -> Result<Option<QueryProject>, DatabaseError>
@@ -509,7 +509,7 @@ impl Project {
}
pub async fn get_many_ids<'a, E>(
project_ids: &[ProjectId],
project_ids: &[DBProjectId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<QueryProject>, DatabaseError>
@@ -553,7 +553,7 @@ impl Project {
.collect::<Vec<_>>();
let all_version_ids = DashSet::new();
let versions: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>> = sqlx::query!(
let versions: DashMap<DBProjectId, Vec<(DBVersionId, DateTime<Utc>)>> = sqlx::query!(
"
SELECT DISTINCT mod_id, v.id as id, date_published
FROM mods m
@@ -570,11 +570,11 @@ impl Project {
.fetch(&mut *exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
let version_id = VersionId(m.id);
|acc: DashMap<DBProjectId, Vec<(DBVersionId, DateTime<Utc>)>>, m| {
let version_id = DBVersionId(m.id);
let date_published = m.date_published;
all_version_ids.insert(version_id);
acc.entry(ProjectId(m.mod_id))
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push((version_id, date_published));
async move { Ok(acc) }
@@ -583,7 +583,7 @@ impl Project {
.await?;
let loader_field_enum_value_ids = DashSet::new();
let version_fields: DashMap<ProjectId, Vec<QueryVersionField>> = sqlx::query!(
let version_fields: DashMap<DBProjectId, Vec<QueryVersionField>> = sqlx::query!(
"
SELECT DISTINCT mod_id, version_id, field_id, int_value, enum_value, string_value
FROM versions v
@@ -595,9 +595,9 @@ impl Project {
.fetch(&mut *exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProjectId, Vec<QueryVersionField>>, m| {
|acc: DashMap<DBProjectId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
version_id: DBVersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: if m.enum_value == -1 { None } else { Some(LoaderFieldEnumValueId(m.enum_value)) },
@@ -608,7 +608,7 @@ impl Project {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(m.enum_value));
}
acc.entry(ProjectId(m.mod_id)).or_default().push(qvf);
acc.entry(DBProjectId(m.mod_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
@@ -638,7 +638,7 @@ impl Project {
.try_collect()
.await?;
let mods_gallery: DashMap<ProjectId, Vec<GalleryItem>> = sqlx::query!(
let mods_gallery: DashMap<DBProjectId, Vec<GalleryItem>> = sqlx::query!(
"
SELECT DISTINCT mod_id, mg.image_url, mg.raw_image_url, mg.featured, mg.name, mg.description, mg.created, mg.ordering
FROM mods_gallery mg
@@ -648,8 +648,8 @@ impl Project {
&project_ids_parsed,
&slugs
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<GalleryItem>>, m| {
acc.entry(ProjectId(m.mod_id))
.try_fold(DashMap::new(), |acc : DashMap<DBProjectId, Vec<GalleryItem>>, m| {
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push(GalleryItem {
image_url: m.image_url,
@@ -664,7 +664,7 @@ impl Project {
}
).await?;
let links: DashMap<ProjectId, Vec<LinkUrl>> = sqlx::query!(
let links: DashMap<DBProjectId, Vec<LinkUrl>> = sqlx::query!(
"
SELECT DISTINCT joining_mod_id as mod_id, joining_platform_id as platform_id, lp.name as platform_name, url, lp.donation as donation
FROM mods_links ml
@@ -675,8 +675,8 @@ impl Project {
&project_ids_parsed,
&slugs
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<LinkUrl>>, m| {
acc.entry(ProjectId(m.mod_id))
.try_fold(DashMap::new(), |acc : DashMap<DBProjectId, Vec<LinkUrl>>, m| {
acc.entry(DBProjectId(m.mod_id))
.or_default()
.push(LinkUrl {
platform_id: LinkPlatformId(m.platform_id),
@@ -697,7 +697,7 @@ impl Project {
}
let loader_field_ids = DashSet::new();
let loaders_ptypes_games: DashMap<ProjectId, VersionLoaderData> = sqlx::query!(
let loaders_ptypes_games: DashMap<DBProjectId, VersionLoaderData> = sqlx::query!(
"
SELECT DISTINCT mod_id,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
@@ -718,7 +718,7 @@ impl Project {
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
).fetch(&mut *exec)
.map_ok(|m| {
let project_id = ProjectId(m.mod_id);
let project_id = DBProjectId(m.mod_id);
// Add loader fields to the set we need to fetch
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
@@ -784,14 +784,14 @@ impl Project {
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc, m| {
let id = m.id;
let project_id = ProjectId(id);
let project_id = DBProjectId(id);
let VersionLoaderData {
loaders,
project_types,
games,
loader_loader_field_ids,
} = loaders_ptypes_games.remove(&project_id).map(|x|x.1).unwrap_or_default();
// Each version is a tuple of (VersionId, DateTime<Utc>)
// Each version is a tuple of (DBVersionId, DateTime<Utc>)
let mut versions = versions.remove(&project_id).map(|x| x.1).unwrap_or_default();
versions.sort_by(|a, b| a.1.cmp(&b.1));
let mut gallery = mods_gallery.remove(&project_id).map(|x| x.1).unwrap_or_default();
@@ -804,9 +804,9 @@ impl Project {
let project = QueryProject {
inner: Project {
id: ProjectId(id),
team_id: TeamId(m.team_id),
organization_id: m.organization_id.map(OrganizationId),
id: DBProjectId(id),
team_id: DBTeamId(m.team_id),
organization_id: m.organization_id.map(DBOrganizationId),
name: m.name.clone(),
summary: m.summary.clone(),
downloads: m.downloads,
@@ -847,7 +847,7 @@ impl Project {
},
urls,
aggregate_version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, true),
thread_id: ThreadId(m.thread_id),
thread_id: DBThreadId(m.thread_id),
};
acc.insert(m.id, (m.slug, project));
@@ -863,18 +863,25 @@ impl Project {
}
pub async fn get_dependencies<'a, E>(
id: ProjectId,
id: DBProjectId,
exec: E,
redis: &RedisPool,
) -> Result<
Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>,
Vec<(
Option<DBVersionId>,
Option<DBProjectId>,
Option<DBProjectId>,
)>,
DatabaseError,
>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
type Dependencies =
Vec<(Option<VersionId>, Option<ProjectId>, Option<ProjectId>)>;
type Dependencies = Vec<(
Option<DBVersionId>,
Option<DBProjectId>,
Option<DBProjectId>,
)>;
let mut redis = redis.connect().await?;
@@ -896,18 +903,18 @@ impl Project {
LEFT JOIN versions vd ON d.dependency_id = vd.id
WHERE v.mod_id = $1
",
id as ProjectId
id as DBProjectId
)
.fetch(exec)
.map_ok(|x| {
(
x.dependency_id.map(VersionId),
x.dependency_id.map(DBVersionId),
if x.mod_id == Some(0) {
None
} else {
x.mod_id.map(ProjectId)
x.mod_id.map(DBProjectId)
},
x.mod_dependency_id.map(ProjectId),
x.mod_dependency_id.map(DBProjectId),
)
})
.try_collect::<Dependencies>()
@@ -925,7 +932,7 @@ impl Project {
}
pub async fn clear_cache(
id: ProjectId,
id: DBProjectId,
slug: Option<String>,
clear_dependencies: Option<bool>,
redis: &RedisPool,
@@ -955,11 +962,11 @@ pub struct QueryProject {
pub inner: Project,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub versions: Vec<VersionId>,
pub versions: Vec<DBVersionId>,
pub project_types: Vec<String>,
pub games: Vec<String>,
pub urls: Vec<LinkUrl>,
pub gallery_items: Vec<GalleryItem>,
pub thread_id: ThreadId,
pub thread_id: DBThreadId,
pub aggregate_version_fields: Vec<VersionField>,
}

View File

@@ -2,28 +2,28 @@ use super::ids::*;
use chrono::{DateTime, Utc};
pub struct Report {
pub id: ReportId,
pub id: DBReportId,
pub report_type_id: ReportTypeId,
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub user_id: Option<UserId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub user_id: Option<DBUserId>,
pub body: String,
pub reporter: UserId,
pub reporter: DBUserId,
pub created: DateTime<Utc>,
pub closed: bool,
}
pub struct QueryReport {
pub id: ReportId,
pub id: DBReportId,
pub report_type: String,
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub user_id: Option<UserId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub user_id: Option<DBUserId>,
pub body: String,
pub reporter: UserId,
pub reporter: DBUserId,
pub created: DateTime<Utc>,
pub closed: bool,
pub thread_id: ThreadId,
pub thread_id: DBThreadId,
}
impl Report {
@@ -42,13 +42,13 @@ impl Report {
$6, $7
)
",
self.id as ReportId,
self.id as DBReportId,
self.report_type_id as ReportTypeId,
self.project_id.map(|x| x.0 as i64),
self.version_id.map(|x| x.0 as i64),
self.user_id.map(|x| x.0 as i64),
self.body,
self.reporter as UserId
self.reporter as DBUserId
)
.execute(&mut **transaction)
.await?;
@@ -57,7 +57,7 @@ impl Report {
}
pub async fn get<'a, E>(
id: ReportId,
id: DBReportId,
exec: E,
) -> Result<Option<QueryReport>, sqlx::Error>
where
@@ -69,7 +69,7 @@ impl Report {
}
pub async fn get_many<'a, E>(
report_ids: &[ReportId],
report_ids: &[DBReportId],
exec: E,
) -> Result<Vec<QueryReport>, sqlx::Error>
where
@@ -92,16 +92,16 @@ impl Report {
)
.fetch(exec)
.map_ok(|x| QueryReport {
id: ReportId(x.id),
id: DBReportId(x.id),
report_type: x.name,
project_id: x.mod_id.map(ProjectId),
version_id: x.version_id.map(VersionId),
user_id: x.user_id.map(UserId),
project_id: x.mod_id.map(DBProjectId),
version_id: x.version_id.map(DBVersionId),
user_id: x.user_id.map(DBUserId),
body: x.body,
reporter: UserId(x.reporter),
reporter: DBUserId(x.reporter),
created: x.created,
closed: x.closed,
thread_id: ThreadId(x.thread_id)
thread_id: DBThreadId(x.thread_id)
})
.try_collect::<Vec<QueryReport>>()
.await?;
@@ -110,14 +110,14 @@ impl Report {
}
pub async fn remove_full(
id: ReportId,
id: DBReportId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
let result = sqlx::query!(
"
SELECT EXISTS(SELECT 1 FROM reports WHERE id = $1)
",
id as ReportId
id as DBReportId
)
.fetch_one(&mut **transaction)
.await?;
@@ -131,14 +131,14 @@ impl Report {
SELECT id FROM threads
WHERE report_id = $1
",
id as ReportId
id as DBReportId
)
.fetch_optional(&mut **transaction)
.await?;
if let Some(thread_id) = thread_id {
crate::database::models::Thread::remove_full(
ThreadId(thread_id.id),
DBThreadId(thread_id.id),
transaction,
)
.await?;
@@ -148,7 +148,7 @@ impl Report {
"
DELETE FROM reports WHERE id = $1
",
id as ReportId,
id as DBReportId,
)
.execute(&mut **transaction)
.await?;

View File

@@ -14,7 +14,7 @@ const SESSIONS_USERS_NAMESPACE: &str = "sessions_users";
pub struct SessionBuilder {
pub session: String,
pub user_id: UserId,
pub user_id: DBUserId,
pub os: Option<String>,
pub platform: Option<String>,
@@ -30,7 +30,7 @@ impl SessionBuilder {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<SessionId, DatabaseError> {
) -> Result<DBSessionId, DatabaseError> {
let id = generate_session_id(transaction).await?;
sqlx::query!(
@@ -44,9 +44,9 @@ impl SessionBuilder {
$6, $7, $8, $9
)
",
id as SessionId,
id as DBSessionId,
self.session,
self.user_id as UserId,
self.user_id as DBUserId,
self.os,
self.platform,
self.city,
@@ -63,9 +63,9 @@ impl SessionBuilder {
#[derive(Deserialize, Serialize)]
pub struct Session {
pub id: SessionId,
pub id: DBSessionId,
pub session: String,
pub user_id: UserId,
pub user_id: DBUserId,
pub created: DateTime<Utc>,
pub last_login: DateTime<Utc>,
@@ -100,7 +100,7 @@ impl Session {
}
pub async fn get_id<'a, 'b, E>(
id: SessionId,
id: DBSessionId,
executor: E,
redis: &RedisPool,
) -> Result<Option<Session>, DatabaseError>
@@ -117,7 +117,7 @@ impl Session {
}
pub async fn get_many_ids<'a, E>(
session_ids: &[SessionId],
session_ids: &[DBSessionId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<Session>, DatabaseError>
@@ -174,9 +174,9 @@ impl Session {
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
let session = Session {
id: SessionId(x.id),
id: DBSessionId(x.id),
session: x.session.clone(),
user_id: UserId(x.user_id),
user_id: DBUserId(x.user_id),
created: x.created,
last_login: x.last_login,
expires: x.expires,
@@ -202,10 +202,10 @@ impl Session {
}
pub async fn get_user_sessions<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<SessionId>, DatabaseError>
) -> Result<Vec<DBSessionId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
@@ -219,11 +219,11 @@ impl Session {
.await?;
if let Some(res) = res {
return Ok(res.into_iter().map(SessionId).collect());
return Ok(res.into_iter().map(DBSessionId).collect());
}
use futures::TryStreamExt;
let db_sessions: Vec<SessionId> = sqlx::query!(
let db_sessions: Vec<DBSessionId> = sqlx::query!(
"
SELECT id
FROM sessions
@@ -233,8 +233,8 @@ impl Session {
user_id.0,
)
.fetch(exec)
.map_ok(|x| SessionId(x.id))
.try_collect::<Vec<SessionId>>()
.map_ok(|x| DBSessionId(x.id))
.try_collect::<Vec<DBSessionId>>()
.await?;
redis
@@ -251,9 +251,9 @@ impl Session {
pub async fn clear_cache(
clear_sessions: Vec<(
Option<SessionId>,
Option<DBSessionId>,
Option<String>,
Option<UserId>,
Option<DBUserId>,
)>,
redis: &RedisPool,
) -> Result<(), DatabaseError> {
@@ -281,14 +281,14 @@ impl Session {
}
pub async fn remove(
id: SessionId,
id: DBSessionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
sqlx::query!(
"
DELETE FROM sessions WHERE id = $1
",
id as SessionId,
id as DBSessionId,
)
.execute(&mut **transaction)
.await?;

View File

@@ -15,7 +15,7 @@ pub struct TeamBuilder {
pub members: Vec<TeamMemberBuilder>,
}
pub struct TeamMemberBuilder {
pub user_id: UserId,
pub user_id: DBUserId,
pub role: String,
pub is_owner: bool,
pub permissions: ProjectPermissions,
@@ -29,7 +29,7 @@ impl TeamBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<TeamId, super::DatabaseError> {
) -> Result<DBTeamId, super::DatabaseError> {
let team_id = generate_team_id(transaction).await?;
let team = Team { id: team_id };
@@ -39,7 +39,7 @@ impl TeamBuilder {
INSERT INTO teams (id)
VALUES ($1)
",
team.id as TeamId,
team.id as DBTeamId,
)
.execute(&mut **transaction)
.await?;
@@ -111,18 +111,18 @@ impl TeamBuilder {
/// A team of users who control a project
pub struct Team {
/// The id of the team
pub id: TeamId,
pub id: DBTeamId,
}
#[derive(Deserialize, Serialize, Clone, Debug, Copy)]
pub enum TeamAssociationId {
Project(ProjectId),
Organization(OrganizationId),
Project(DBProjectId),
Organization(DBOrganizationId),
}
impl Team {
pub async fn get_association<'a, 'b, E>(
id: TeamId,
id: DBTeamId,
executor: E,
) -> Result<Option<TeamAssociationId>, super::DatabaseError>
where
@@ -133,14 +133,14 @@ impl Team {
SELECT m.id AS pid, NULL AS oid
FROM mods m
WHERE m.team_id = $1
UNION ALL
SELECT NULL AS pid, o.id AS oid
FROM organizations o
WHERE o.team_id = $1
WHERE o.team_id = $1
",
id as TeamId
id as DBTeamId
)
.fetch_optional(executor)
.await?;
@@ -150,11 +150,12 @@ impl Team {
let mut team_association_id = None;
if let Some(pid) = t.pid {
team_association_id =
Some(TeamAssociationId::Project(ProjectId(pid)));
Some(TeamAssociationId::Project(DBProjectId(pid)));
}
if let Some(oid) = t.oid {
team_association_id =
Some(TeamAssociationId::Organization(OrganizationId(oid)));
team_association_id = Some(TeamAssociationId::Organization(
DBOrganizationId(oid),
));
}
return Ok(team_association_id);
}
@@ -165,11 +166,11 @@ impl Team {
/// A member of a team
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct TeamMember {
pub id: TeamMemberId,
pub team_id: TeamId,
pub id: DBTeamMemberId,
pub team_id: DBTeamId,
/// The ID of the user associated with the member
pub user_id: UserId,
pub user_id: DBUserId,
pub role: String,
pub is_owner: bool,
@@ -189,7 +190,7 @@ pub struct TeamMember {
impl TeamMember {
// Lists the full members of a team
pub async fn get_from_team_full<'a, 'b, E>(
id: TeamId,
id: DBTeamId,
executor: E,
redis: &RedisPool,
) -> Result<Vec<TeamMember>, super::DatabaseError>
@@ -200,7 +201,7 @@ impl TeamMember {
}
pub async fn get_from_team_full_many<'a, E>(
team_ids: &[TeamId],
team_ids: &[DBTeamId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<TeamMember>, super::DatabaseError>
@@ -229,8 +230,8 @@ impl TeamMember {
.fetch(exec)
.try_fold(DashMap::new(), |acc: DashMap<i64, Vec<TeamMember>>, m| {
let member = TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
role: m.member_role,
is_owner: m.is_owner,
permissions: ProjectPermissions::from_bits(m.permissions as u64)
@@ -239,7 +240,7 @@ impl TeamMember {
.organization_permissions
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
accepted: m.accepted,
user_id: UserId(m.user_id),
user_id: DBUserId(m.user_id),
payouts_split: m.payouts_split,
ordering: m.ordering,
};
@@ -260,7 +261,7 @@ impl TeamMember {
}
pub async fn clear_cache(
id: TeamId,
id: DBTeamId,
redis: &RedisPool,
) -> Result<(), super::DatabaseError> {
let mut redis = redis.connect().await?;
@@ -270,8 +271,8 @@ impl TeamMember {
/// Gets a team member from a user id and team id. Does not return pending members.
pub async fn get_from_user_id<'a, 'b, E>(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@@ -284,8 +285,8 @@ impl TeamMember {
/// Gets team members from user ids and team ids. Does not return pending members.
pub async fn get_from_user_id_many<'a, 'b, E>(
team_ids: &[TeamId],
user_id: UserId,
team_ids: &[DBTeamId],
user_id: DBUserId,
executor: E,
) -> Result<Vec<Self>, super::DatabaseError>
where
@@ -303,12 +304,12 @@ impl TeamMember {
ORDER BY ordering
",
&team_ids_parsed,
user_id as UserId
user_id as DBUserId
)
.fetch(executor)
.map_ok(|m| TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@@ -329,8 +330,8 @@ impl TeamMember {
/// Gets a team member from a user id and team id, including pending members.
pub async fn get_from_user_id_pending<'a, 'b, E>(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@@ -341,20 +342,20 @@ impl TeamMember {
SELECT id, team_id, role AS member_role, is_owner, permissions, organization_permissions,
accepted, payouts_split, role,
ordering, user_id
FROM team_members
WHERE (team_id = $1 AND user_id = $2)
ORDER BY ordering
",
id as TeamId,
user_id as UserId
id as DBTeamId,
user_id as DBUserId
)
.fetch_optional(executor)
.await?;
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
id: DBTeamMemberId(m.id),
team_id: id,
user_id,
role: m.role,
@@ -389,9 +390,9 @@ impl TeamMember {
$1, $2, $3, $4, $5, $6, $7, $8, $9
)
",
self.id as TeamMemberId,
self.team_id as TeamId,
self.user_id as UserId,
self.id as DBTeamMemberId,
self.team_id as DBTeamId,
self.user_id as DBUserId,
self.role,
self.permissions.bits() as i64,
self.organization_permissions.map(|p| p.bits() as i64),
@@ -406,8 +407,8 @@ impl TeamMember {
}
pub async fn delete(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), super::DatabaseError> {
sqlx::query!(
@@ -415,8 +416,8 @@ impl TeamMember {
DELETE FROM team_members
WHERE (team_id = $1 AND user_id = $2 AND NOT is_owner = TRUE)
",
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -426,8 +427,8 @@ impl TeamMember {
#[allow(clippy::too_many_arguments)]
pub async fn edit_team_member(
id: TeamId,
user_id: UserId,
id: DBTeamId,
user_id: DBUserId,
new_permissions: Option<ProjectPermissions>,
new_organization_permissions: Option<OrganizationPermissions>,
new_role: Option<String>,
@@ -445,8 +446,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
permissions.bits() as i64,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -460,8 +461,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
organization_permissions.bits() as i64,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -475,8 +476,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
role,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -490,8 +491,8 @@ impl TeamMember {
SET accepted = TRUE
WHERE (team_id = $1 AND user_id = $2)
",
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -506,8 +507,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
payouts_split,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -521,8 +522,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
ordering,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -536,8 +537,8 @@ impl TeamMember {
WHERE (team_id = $2 AND user_id = $3)
",
is_owner,
id as TeamId,
user_id as UserId,
id as DBTeamId,
user_id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -547,8 +548,8 @@ impl TeamMember {
}
pub async fn get_from_user_id_project<'a, 'b, E>(
id: ProjectId,
user_id: UserId,
id: DBProjectId,
user_id: DBUserId,
allow_pending: bool,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
@@ -568,8 +569,8 @@ impl TeamMember {
INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 AND accepted = ANY($3)
WHERE m.id = $1
",
id as ProjectId,
user_id as UserId,
id as DBProjectId,
user_id as DBUserId,
&accepted
)
.fetch_optional(executor)
@@ -577,8 +578,8 @@ impl TeamMember {
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@@ -600,8 +601,8 @@ impl TeamMember {
}
pub async fn get_from_user_id_organization<'a, 'b, E>(
id: OrganizationId,
user_id: UserId,
id: DBOrganizationId,
user_id: DBUserId,
allow_pending: bool,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
@@ -620,8 +621,8 @@ impl TeamMember {
INNER JOIN team_members tm ON tm.team_id = o.team_id AND user_id = $2 AND accepted = ANY($3)
WHERE o.id = $1
",
id as OrganizationId,
user_id as UserId,
id as DBOrganizationId,
user_id as DBUserId,
&accepted
)
.fetch_optional(executor)
@@ -629,8 +630,8 @@ impl TeamMember {
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@@ -652,8 +653,8 @@ impl TeamMember {
}
pub async fn get_from_user_id_version<'a, 'b, E>(
id: VersionId,
user_id: UserId,
id: DBVersionId,
user_id: DBUserId,
executor: E,
) -> Result<Option<Self>, super::DatabaseError>
where
@@ -661,22 +662,22 @@ impl TeamMember {
{
let result = sqlx::query!(
"
SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id
SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.is_owner, tm.permissions, tm.organization_permissions, tm.accepted, tm.payouts_split, tm.ordering, v.mod_id
FROM versions v
INNER JOIN mods m ON m.id = v.mod_id
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.user_id = $2 AND tm.accepted = TRUE
WHERE v.id = $1
",
id as VersionId,
user_id as UserId
id as DBVersionId,
user_id as DBUserId
)
.fetch_optional(executor)
.await?;
if let Some(m) = result {
Ok(Some(TeamMember {
id: TeamMemberId(m.id),
team_id: TeamId(m.team_id),
id: DBTeamMemberId(m.id),
team_id: DBTeamId(m.team_id),
user_id,
role: m.role,
is_owner: m.is_owner,
@@ -702,7 +703,7 @@ impl TeamMember {
// - organization team member (a user's membership to a given organization that owns a given project)
pub async fn get_for_project_permissions<'a, 'b, E>(
project: &Project,
user_id: UserId,
user_id: DBUserId,
executor: E,
) -> Result<(Option<Self>, Option<Self>), super::DatabaseError>
where

View File

@@ -6,35 +6,35 @@ use serde::{Deserialize, Serialize};
pub struct ThreadBuilder {
pub type_: ThreadType,
pub members: Vec<UserId>,
pub project_id: Option<ProjectId>,
pub report_id: Option<ReportId>,
pub members: Vec<DBUserId>,
pub project_id: Option<DBProjectId>,
pub report_id: Option<DBReportId>,
}
#[derive(Clone, Serialize)]
pub struct Thread {
pub id: ThreadId,
pub id: DBThreadId,
pub project_id: Option<ProjectId>,
pub report_id: Option<ReportId>,
pub project_id: Option<DBProjectId>,
pub report_id: Option<DBReportId>,
pub type_: ThreadType,
pub messages: Vec<ThreadMessage>,
pub members: Vec<UserId>,
pub members: Vec<DBUserId>,
}
pub struct ThreadMessageBuilder {
pub author_id: Option<UserId>,
pub author_id: Option<DBUserId>,
pub body: MessageBody,
pub thread_id: ThreadId,
pub thread_id: DBThreadId,
pub hide_identity: bool,
}
#[derive(Serialize, Deserialize, Clone)]
pub struct ThreadMessage {
pub id: ThreadMessageId,
pub thread_id: ThreadId,
pub author_id: Option<UserId>,
pub id: DBThreadMessageId,
pub thread_id: DBThreadId,
pub author_id: Option<DBUserId>,
pub body: MessageBody,
pub created: DateTime<Utc>,
pub hide_identity: bool,
@@ -44,7 +44,7 @@ impl ThreadMessageBuilder {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ThreadMessageId, DatabaseError> {
) -> Result<DBThreadMessageId, DatabaseError> {
let thread_message_id = generate_thread_message_id(transaction).await?;
sqlx::query!(
@@ -56,10 +56,10 @@ impl ThreadMessageBuilder {
$1, $2, $3, $4, $5
)
",
thread_message_id as ThreadMessageId,
thread_message_id as DBThreadMessageId,
self.author_id.map(|x| x.0),
serde_json::value::to_value(self.body.clone())?,
self.thread_id as ThreadId,
self.thread_id as DBThreadId,
self.hide_identity
)
.execute(&mut **transaction)
@@ -73,7 +73,7 @@ impl ThreadBuilder {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ThreadId, DatabaseError> {
) -> Result<DBThreadId, DatabaseError> {
let thread_id = generate_thread_id(&mut *transaction).await?;
sqlx::query!(
"
@@ -84,7 +84,7 @@ impl ThreadBuilder {
$1, $2, $3, $4
)
",
thread_id as ThreadId,
thread_id as DBThreadId,
self.type_.as_str(),
self.project_id.map(|x| x.0),
self.report_id.map(|x| x.0),
@@ -113,7 +113,7 @@ impl ThreadBuilder {
impl Thread {
pub async fn get<'a, E>(
id: ThreadId,
id: DBThreadId,
exec: E,
) -> Result<Option<Thread>, sqlx::Error>
where
@@ -125,7 +125,7 @@ impl Thread {
}
pub async fn get_many<'a, E>(
thread_ids: &[ThreadId],
thread_ids: &[DBThreadId],
exec: E,
) -> Result<Vec<Thread>, sqlx::Error>
where
@@ -150,9 +150,9 @@ impl Thread {
)
.fetch(exec)
.map_ok(|x| Thread {
id: ThreadId(x.id),
project_id: x.mod_id.map(ProjectId),
report_id: x.report_id.map(ReportId),
id: DBThreadId(x.id),
project_id: x.mod_id.map(DBProjectId),
report_id: x.report_id.map(DBReportId),
type_: ThreadType::from_string(&x.thread_type),
messages: {
let mut messages: Vec<ThreadMessage> = serde_json::from_value(
@@ -163,7 +163,7 @@ impl Thread {
messages.sort_by(|a, b| a.created.cmp(&b.created));
messages
},
members: x.members.unwrap_or_default().into_iter().map(UserId).collect(),
members: x.members.unwrap_or_default().into_iter().map(DBUserId).collect(),
})
.try_collect::<Vec<Thread>>()
.await?;
@@ -172,7 +172,7 @@ impl Thread {
}
pub async fn remove_full(
id: ThreadId,
id: DBThreadId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
sqlx::query!(
@@ -180,7 +180,7 @@ impl Thread {
DELETE FROM threads_messages
WHERE thread_id = $1
",
id as ThreadId,
id as DBThreadId,
)
.execute(&mut **transaction)
.await?;
@@ -189,7 +189,7 @@ impl Thread {
DELETE FROM threads_members
WHERE thread_id = $1
",
id as ThreadId
id as DBThreadId
)
.execute(&mut **transaction)
.await?;
@@ -198,7 +198,7 @@ impl Thread {
DELETE FROM threads
WHERE id = $1
",
id as ThreadId,
id as DBThreadId,
)
.execute(&mut **transaction)
.await?;
@@ -209,7 +209,7 @@ impl Thread {
impl ThreadMessage {
pub async fn get<'a, E>(
id: ThreadMessageId,
id: DBThreadMessageId,
exec: E,
) -> Result<Option<ThreadMessage>, sqlx::Error>
where
@@ -221,7 +221,7 @@ impl ThreadMessage {
}
pub async fn get_many<'a, E>(
message_ids: &[ThreadMessageId],
message_ids: &[DBThreadMessageId],
exec: E,
) -> Result<Vec<ThreadMessage>, sqlx::Error>
where
@@ -241,9 +241,9 @@ impl ThreadMessage {
)
.fetch(exec)
.map_ok(|x| ThreadMessage {
id: ThreadMessageId(x.id),
thread_id: ThreadId(x.thread_id),
author_id: x.author_id.map(UserId),
id: DBThreadMessageId(x.id),
thread_id: DBThreadId(x.thread_id),
author_id: x.author_id.map(DBUserId),
body: serde_json::from_value(x.body).unwrap_or(MessageBody::Deleted { private: false }),
created: x.created,
hide_identity: x.hide_identity,
@@ -255,7 +255,7 @@ impl ThreadMessage {
}
pub async fn remove_full(
id: ThreadMessageId,
id: DBThreadMessageId,
private: bool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, sqlx::error::Error> {
@@ -265,7 +265,7 @@ impl ThreadMessage {
SET body = $2
WHERE id = $1
",
id as ThreadMessageId,
id as DBThreadMessageId,
serde_json::to_value(MessageBody::Deleted { private })
.unwrap_or(serde_json::json!({}))
)

View File

@@ -1,9 +1,9 @@
use super::ids::{ProjectId, UserId};
use super::{CollectionId, ReportId, ThreadId};
use super::ids::{DBProjectId, DBUserId};
use super::{DBCollectionId, DBReportId, DBThreadId};
use crate::database::models;
use crate::database::models::charge_item::ChargeItem;
use crate::database::models::user_subscription_item::UserSubscriptionItem;
use crate::database::models::{DatabaseError, OrganizationId};
use crate::database::models::{DBOrganizationId, DatabaseError};
use crate::database::redis::RedisPool;
use crate::models::billing::ChargeStatus;
use crate::models::users::Badges;
@@ -20,7 +20,7 @@ const USERS_PROJECTS_NAMESPACE: &str = "users_projects";
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct User {
pub id: UserId,
pub id: DBUserId,
pub github_id: Option<i64>,
pub discord_id: Option<i64>,
@@ -72,7 +72,7 @@ impl User {
$14, $15, $16, $17, $18, $19, $20, $21
)
",
self.id as UserId,
self.id as DBUserId,
&self.username,
self.email.as_ref(),
self.avatar_url.as_ref(),
@@ -114,20 +114,20 @@ impl User {
}
pub async fn get_id<'a, 'b, E>(
id: UserId,
id: DBUserId,
executor: E,
redis: &RedisPool,
) -> Result<Option<User>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
User::get_many(&[crate::models::ids::UserId::from(id)], executor, redis)
User::get_many(&[ariadne::ids::UserId::from(id)], executor, redis)
.await
.map(|x| x.into_iter().next())
}
pub async fn get_many_ids<'a, E>(
user_ids: &[UserId],
user_ids: &[DBUserId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<User>, DatabaseError>
@@ -136,7 +136,7 @@ impl User {
{
let ids = user_ids
.iter()
.map(|x| crate::models::ids::UserId::from(*x))
.map(|x| ariadne::ids::UserId::from(*x))
.collect::<Vec<_>>();
User::get_many(&ids, exec, redis).await
}
@@ -188,7 +188,7 @@ impl User {
.fetch(exec)
.try_fold(DashMap::new(), |acc, u| {
let user = User {
id: UserId(u.id),
id: DBUserId(u.id),
github_id: u.github_id,
discord_id: u.discord_id,
gitlab_id: u.gitlab_id,
@@ -227,7 +227,7 @@ impl User {
pub async fn get_email<'a, E>(
email: &str,
exec: E,
) -> Result<Option<UserId>, sqlx::Error>
) -> Result<Option<DBUserId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@@ -241,14 +241,14 @@ impl User {
.fetch_optional(exec)
.await?;
Ok(user_pass.map(|x| UserId(x.id)))
Ok(user_pass.map(|x| DBUserId(x.id)))
}
pub async fn get_projects<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
redis: &RedisPool,
) -> Result<Vec<ProjectId>, DatabaseError>
) -> Result<Vec<DBProjectId>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@@ -257,7 +257,7 @@ impl User {
let mut redis = redis.connect().await?;
let cached_projects = redis
.get_deserialized_from_json::<Vec<ProjectId>>(
.get_deserialized_from_json::<Vec<DBProjectId>>(
USERS_PROJECTS_NAMESPACE,
&user_id.0.to_string(),
)
@@ -274,11 +274,11 @@ impl User {
WHERE tm.user_id = $1
ORDER BY m.downloads DESC
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| ProjectId(m.id))
.try_collect::<Vec<ProjectId>>()
.map_ok(|m| DBProjectId(m.id))
.try_collect::<Vec<DBProjectId>>()
.await?;
redis
@@ -294,9 +294,9 @@ impl User {
}
pub async fn get_organizations<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<OrganizationId>, sqlx::Error>
) -> Result<Vec<DBOrganizationId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@@ -308,20 +308,20 @@ impl User {
INNER JOIN team_members tm ON tm.team_id = o.team_id AND tm.accepted = TRUE
WHERE tm.user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| OrganizationId(m.id))
.try_collect::<Vec<OrganizationId>>()
.map_ok(|m| DBOrganizationId(m.id))
.try_collect::<Vec<DBOrganizationId>>()
.await?;
Ok(orgs)
}
pub async fn get_collections<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<CollectionId>, sqlx::Error>
) -> Result<Vec<DBCollectionId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@@ -332,20 +332,20 @@ impl User {
SELECT c.id FROM collections c
WHERE c.user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| CollectionId(m.id))
.try_collect::<Vec<CollectionId>>()
.map_ok(|m| DBCollectionId(m.id))
.try_collect::<Vec<DBCollectionId>>()
.await?;
Ok(projects)
}
pub async fn get_follows<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<ProjectId>, sqlx::Error>
) -> Result<Vec<DBProjectId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@@ -356,20 +356,20 @@ impl User {
SELECT mf.mod_id FROM mod_follows mf
WHERE mf.follower_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| ProjectId(m.mod_id))
.try_collect::<Vec<ProjectId>>()
.map_ok(|m| DBProjectId(m.mod_id))
.try_collect::<Vec<DBProjectId>>()
.await?;
Ok(projects)
}
pub async fn get_reports<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<ReportId>, sqlx::Error>
) -> Result<Vec<DBReportId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
@@ -380,18 +380,18 @@ impl User {
SELECT r.id FROM reports r
WHERE r.user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| ReportId(m.id))
.try_collect::<Vec<ReportId>>()
.map_ok(|m| DBReportId(m.id))
.try_collect::<Vec<DBReportId>>()
.await?;
Ok(reports)
}
pub async fn get_backup_codes<'a, E>(
user_id: UserId,
user_id: DBUserId,
exec: E,
) -> Result<Vec<String>, sqlx::Error>
where
@@ -404,7 +404,7 @@ impl User {
SELECT code FROM user_backup_codes
WHERE user_id = $1
",
user_id as UserId,
user_id as DBUserId,
)
.fetch(exec)
.map_ok(|m| to_base62(m.code as u64))
@@ -415,7 +415,7 @@ impl User {
}
pub async fn clear_caches(
user_ids: &[(UserId, Option<String>)],
user_ids: &[(DBUserId, Option<String>)],
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@@ -435,7 +435,7 @@ impl User {
}
pub async fn clear_project_cache(
user_ids: &[UserId],
user_ids: &[DBUserId],
redis: &RedisPool,
) -> Result<(), DatabaseError> {
let mut redis = redis.connect().await?;
@@ -452,7 +452,7 @@ impl User {
}
pub async fn remove(
id: UserId,
id: DBUserId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<Option<()>, DatabaseError> {
@@ -462,7 +462,7 @@ impl User {
User::clear_caches(&[(id, Some(delete_user.username))], redis)
.await?;
let deleted_user: UserId =
let deleted_user: DBUserId =
crate::models::users::DELETED_USER.into();
sqlx::query!(
@@ -471,8 +471,8 @@ impl User {
SET user_id = $1
WHERE (user_id = $2 AND is_owner = TRUE)
",
deleted_user as UserId,
id as UserId,
deleted_user as DBUserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -483,8 +483,8 @@ impl User {
SET author_id = $1
WHERE (author_id = $2)
",
deleted_user as UserId,
id as UserId,
deleted_user as DBUserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -495,7 +495,7 @@ impl User {
SELECT n.id FROM notifications n
WHERE n.user_id = $1
",
id as UserId,
id as DBUserId,
)
.fetch(&mut **transaction)
.map_ok(|m| m.id)
@@ -507,7 +507,7 @@ impl User {
DELETE FROM notifications
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -528,10 +528,10 @@ impl User {
FROM collections
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.fetch(&mut **transaction)
.map_ok(|x| CollectionId(x.id))
.map_ok(|x| DBCollectionId(x.id))
.try_collect::<Vec<_>>()
.await?;
@@ -547,10 +547,10 @@ impl User {
INNER JOIN reports r ON t.report_id = r.id AND (r.user_id = $1 OR r.reporter = $1)
WHERE report_id IS NOT NULL
",
id as UserId,
id as DBUserId,
)
.fetch(&mut **transaction)
.map_ok(|x| ThreadId(x.id))
.map_ok(|x| DBThreadId(x.id))
.try_collect::<Vec<_>>()
.await?;
@@ -563,7 +563,7 @@ impl User {
DELETE FROM reports
WHERE user_id = $1 OR reporter = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -573,7 +573,7 @@ impl User {
DELETE FROM mod_follows
WHERE follower_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -583,7 +583,7 @@ impl User {
DELETE FROM team_members
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -593,7 +593,7 @@ impl User {
DELETE FROM payouts_values
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -603,7 +603,7 @@ impl User {
DELETE FROM payouts
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -614,8 +614,8 @@ impl User {
SET body = '{"type": "deleted"}', author_id = $2
WHERE author_id = $1
"#,
id as UserId,
deleted_user as UserId,
id as DBUserId,
deleted_user as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -625,7 +625,7 @@ impl User {
DELETE FROM threads_members
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -635,7 +635,7 @@ impl User {
DELETE FROM sessions
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -645,7 +645,7 @@ impl User {
DELETE FROM pats
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -655,7 +655,7 @@ impl User {
DELETE FROM friends
WHERE user_id = $1 OR friend_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -683,8 +683,8 @@ impl User {
SET user_id = $1
WHERE user_id = $2
",
deleted_user as UserId,
id as UserId,
deleted_user as DBUserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -694,7 +694,7 @@ impl User {
DELETE FROM user_backup_codes
WHERE user_id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;
@@ -704,7 +704,7 @@ impl User {
DELETE FROM users
WHERE id = $1
",
id as UserId,
id as DBUserId,
)
.execute(&mut **transaction)
.await?;

View File

@@ -1,5 +1,5 @@
use crate::database::models::{
DatabaseError, ProductPriceId, UserId, UserSubscriptionId,
DBProductPriceId, DBUserId, DBUserSubscriptionId, DatabaseError,
};
use crate::models::billing::{
PriceDuration, SubscriptionMetadata, SubscriptionStatus,
@@ -9,9 +9,9 @@ use itertools::Itertools;
use std::convert::{TryFrom, TryInto};
pub struct UserSubscriptionItem {
pub id: UserSubscriptionId,
pub user_id: UserId,
pub price_id: ProductPriceId,
pub id: DBUserSubscriptionId,
pub user_id: DBUserId,
pub price_id: DBProductPriceId,
pub interval: PriceDuration,
pub created: DateTime<Utc>,
pub status: SubscriptionStatus,
@@ -48,9 +48,9 @@ impl TryFrom<UserSubscriptionResult> for UserSubscriptionItem {
fn try_from(r: UserSubscriptionResult) -> Result<Self, Self::Error> {
Ok(UserSubscriptionItem {
id: UserSubscriptionId(r.id),
user_id: UserId(r.user_id),
price_id: ProductPriceId(r.price_id),
id: DBUserSubscriptionId(r.id),
user_id: DBUserId(r.user_id),
price_id: DBProductPriceId(r.price_id),
interval: PriceDuration::from_string(&r.interval),
created: r.created,
status: SubscriptionStatus::from_string(&r.status),
@@ -61,14 +61,14 @@ impl TryFrom<UserSubscriptionResult> for UserSubscriptionItem {
impl UserSubscriptionItem {
pub async fn get(
id: UserSubscriptionId,
id: DBUserSubscriptionId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<UserSubscriptionItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[UserSubscriptionId],
ids: &[DBUserSubscriptionId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
@@ -87,7 +87,7 @@ impl UserSubscriptionItem {
}
pub async fn get_all_user(
user_id: UserId,
user_id: DBUserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let user_id = user_id.0;

View File

@@ -20,9 +20,9 @@ const VERSION_FILES_NAMESPACE: &str = "versions_files";
#[derive(Clone)]
pub struct VersionBuilder {
pub version_id: VersionId,
pub project_id: ProjectId,
pub author_id: UserId,
pub version_id: DBVersionId,
pub project_id: DBProjectId,
pub author_id: DBUserId,
pub name: String,
pub version_number: String,
pub changelog: String,
@@ -39,8 +39,8 @@ pub struct VersionBuilder {
#[derive(Clone)]
pub struct DependencyBuilder {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub file_name: Option<String>,
pub dependency_type: String,
}
@@ -48,7 +48,7 @@ pub struct DependencyBuilder {
impl DependencyBuilder {
pub async fn insert_many(
builders: Vec<Self>,
version_id: VersionId,
version_id: DBVersionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
let mut project_ids = Vec::new();
@@ -97,7 +97,7 @@ impl DependencyBuilder {
async fn try_get_project_id(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<ProjectId>, DatabaseError> {
) -> Result<Option<DBProjectId>, DatabaseError> {
Ok(if let Some(project_id) = self.project_id {
Some(project_id)
} else if let Some(version_id) = self.version_id {
@@ -105,11 +105,11 @@ impl DependencyBuilder {
"
SELECT mod_id FROM versions WHERE id = $1
",
version_id as VersionId,
version_id as DBVersionId,
)
.fetch_optional(&mut **transaction)
.await?
.map(|x| ProjectId(x.mod_id))
.map(|x| DBProjectId(x.mod_id))
} else {
None
})
@@ -129,9 +129,9 @@ pub struct VersionFileBuilder {
impl VersionFileBuilder {
pub async fn insert(
self,
version_id: VersionId,
version_id: DBVersionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<FileId, DatabaseError> {
) -> Result<DBFileId, DatabaseError> {
let file_id = generate_file_id(&mut *transaction).await?;
sqlx::query!(
@@ -139,8 +139,8 @@ impl VersionFileBuilder {
INSERT INTO files (id, version_id, url, filename, is_primary, size, file_type)
VALUES ($1, $2, $3, $4, $5, $6, $7)
",
file_id as FileId,
version_id as VersionId,
file_id as DBFileId,
version_id as DBVersionId,
self.url,
self.filename,
self.primary,
@@ -156,7 +156,7 @@ impl VersionFileBuilder {
INSERT INTO hashes (file_id, algorithm, hash)
VALUES ($1, $2, $3)
",
file_id as FileId,
file_id as DBFileId,
hash.algorithm,
hash.hash,
)
@@ -178,7 +178,7 @@ impl VersionBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<VersionId, DatabaseError> {
) -> Result<DBVersionId, DatabaseError> {
let version = Version {
id: self.version_id,
project_id: self.project_id,
@@ -203,7 +203,7 @@ impl VersionBuilder {
SET updated = NOW()
WHERE id = $1
",
self.project_id as ProjectId,
self.project_id as DBProjectId,
)
.execute(&mut **transaction)
.await?;
@@ -245,7 +245,7 @@ impl VersionBuilder {
#[derive(Serialize, Deserialize)]
pub struct LoaderVersion {
pub loader_id: LoaderId,
pub version_id: VersionId,
pub version_id: DBVersionId,
}
impl LoaderVersion {
@@ -274,9 +274,9 @@ impl LoaderVersion {
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct Version {
pub id: VersionId,
pub project_id: ProjectId,
pub author_id: UserId,
pub id: DBVersionId,
pub project_id: DBProjectId,
pub author_id: DBUserId,
pub name: String,
pub version_number: String,
pub changelog: String,
@@ -307,9 +307,9 @@ impl Version {
$9, $10, $11, $12
)
",
self.id as VersionId,
self.project_id as ProjectId,
self.author_id as UserId,
self.id as DBVersionId,
self.project_id as DBProjectId,
self.author_id as DBUserId,
&self.name,
&self.version_number,
self.changelog,
@@ -327,7 +327,7 @@ impl Version {
}
pub async fn remove_full(
id: VersionId,
id: DBVersionId,
redis: &RedisPool,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<Option<()>, DatabaseError> {
@@ -347,7 +347,7 @@ impl Version {
SET version_id = NULL
WHERE version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@@ -357,7 +357,7 @@ impl Version {
DELETE FROM version_fields vf
WHERE vf.version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@@ -367,7 +367,7 @@ impl Version {
DELETE FROM loaders_versions
WHERE loaders_versions.version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@@ -381,7 +381,7 @@ impl Version {
(hashes.file_id = files.id)
)
",
id as VersionId
id as DBVersionId
)
.execute(&mut **transaction)
.await?;
@@ -391,7 +391,7 @@ impl Version {
DELETE FROM files
WHERE files.version_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@@ -402,7 +402,7 @@ impl Version {
"
SELECT mod_id FROM versions WHERE id = $1
",
id as VersionId,
id as DBVersionId,
)
.fetch_one(&mut **transaction)
.await?;
@@ -413,7 +413,7 @@ impl Version {
SET dependency_id = NULL, mod_dependency_id = $2
WHERE dependency_id = $1
",
id as VersionId,
id as DBVersionId,
project_id.mod_id,
)
.execute(&mut **transaction)
@@ -431,7 +431,7 @@ impl Version {
"
DELETE FROM dependencies WHERE dependent_id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
@@ -442,13 +442,13 @@ impl Version {
"
DELETE FROM versions WHERE id = $1
",
id as VersionId,
id as DBVersionId,
)
.execute(&mut **transaction)
.await?;
crate::database::models::Project::clear_cache(
ProjectId(project_id.mod_id),
DBProjectId(project_id.mod_id),
None,
None,
redis,
@@ -459,7 +459,7 @@ impl Version {
}
pub async fn get<'a, 'b, E>(
id: VersionId,
id: DBVersionId,
executor: E,
redis: &RedisPool,
) -> Result<Option<QueryVersion>, DatabaseError>
@@ -472,7 +472,7 @@ impl Version {
}
pub async fn get_many<'a, E>(
version_ids: &[VersionId],
version_ids: &[DBVersionId],
exec: E,
redis: &RedisPool,
) -> Result<Vec<QueryVersion>, DatabaseError>
@@ -486,7 +486,7 @@ impl Version {
let mut exec = exec.acquire().await?;
let loader_field_enum_value_ids = DashSet::new();
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> = sqlx::query!(
let version_fields: DashMap<DBVersionId, Vec<QueryVersionField>> = sqlx::query!(
"
SELECT version_id, field_id, int_value, enum_value, string_value
FROM version_fields
@@ -497,9 +497,9 @@ impl Version {
.fetch(&mut *exec)
.try_fold(
DashMap::new(),
|acc: DashMap<VersionId, Vec<QueryVersionField>>, m| {
|acc: DashMap<DBVersionId, Vec<QueryVersionField>>, m| {
let qvf = QueryVersionField {
version_id: VersionId(m.version_id),
version_id: DBVersionId(m.version_id),
field_id: LoaderFieldId(m.field_id),
int_value: m.int_value,
enum_value: if m.enum_value == -1 { None } else { Some(LoaderFieldEnumValueId(m.enum_value)) },
@@ -510,7 +510,7 @@ impl Version {
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(m.enum_value));
}
acc.entry(VersionId(m.version_id)).or_default().push(qvf);
acc.entry(DBVersionId(m.version_id)).or_default().push(qvf);
async move { Ok(acc) }
},
)
@@ -525,7 +525,7 @@ impl Version {
}
let loader_field_ids = DashSet::new();
let loaders_ptypes_games: DashMap<VersionId, VersionLoaderData> = sqlx::query!(
let loaders_ptypes_games: DashMap<DBVersionId, VersionLoaderData> = sqlx::query!(
"
SELECT DISTINCT version_id,
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
@@ -546,7 +546,7 @@ impl Version {
&version_ids
).fetch(&mut *exec)
.map_ok(|m| {
let version_id = VersionId(m.version_id);
let version_id = DBVersionId(m.version_id);
// Add loader fields to the set we need to fetch
let loader_loader_field_ids = m.loader_fields.unwrap_or_default().into_iter().map(LoaderFieldId).collect::<Vec<_>>();
@@ -614,14 +614,14 @@ impl Version {
#[derive(Deserialize)]
struct Hash {
pub file_id: FileId,
pub file_id: DBFileId,
pub algorithm: String,
pub hash: String,
}
#[derive(Deserialize)]
struct File {
pub id: FileId,
pub id: DBFileId,
pub url: String,
pub filename: String,
pub primary: bool,
@@ -631,7 +631,7 @@ impl Version {
let file_ids = DashSet::new();
let reverse_file_map = DashMap::new();
let files : DashMap<VersionId, Vec<File>> = sqlx::query!(
let files : DashMap<DBVersionId, Vec<File>> = sqlx::query!(
"
SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type
FROM files f
@@ -639,9 +639,9 @@ impl Version {
",
&version_ids
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<File>>, m| {
.try_fold(DashMap::new(), |acc : DashMap<DBVersionId, Vec<File>>, m| {
let file = File {
id: FileId(m.id),
id: DBFileId(m.id),
url: m.url,
filename: m.filename,
primary: m.is_primary,
@@ -649,17 +649,17 @@ impl Version {
file_type: m.file_type.map(|x| FileType::from_string(&x)),
};
file_ids.insert(FileId(m.id));
reverse_file_map.insert(FileId(m.id), VersionId(m.version_id));
file_ids.insert(DBFileId(m.id));
reverse_file_map.insert(DBFileId(m.id), DBVersionId(m.version_id));
acc.entry(VersionId(m.version_id))
acc.entry(DBVersionId(m.version_id))
.or_default()
.push(file);
async move { Ok(acc) }
}
).await?;
let hashes: DashMap<VersionId, Vec<Hash>> = sqlx::query!(
let hashes: DashMap<DBVersionId, Vec<Hash>> = sqlx::query!(
"
SELECT DISTINCT file_id, algorithm, encode(hash, 'escape') hash
FROM hashes
@@ -668,15 +668,15 @@ impl Version {
&file_ids.iter().map(|x| x.0).collect::<Vec<_>>()
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc: DashMap<VersionId, Vec<Hash>>, m| {
.try_fold(DashMap::new(), |acc: DashMap<DBVersionId, Vec<Hash>>, m| {
if let Some(found_hash) = m.hash {
let hash = Hash {
file_id: FileId(m.file_id),
file_id: DBFileId(m.file_id),
algorithm: m.algorithm,
hash: found_hash,
};
if let Some(version_id) = reverse_file_map.get(&FileId(m.file_id)) {
if let Some(version_id) = reverse_file_map.get(&DBFileId(m.file_id)) {
acc.entry(*version_id).or_default().push(hash);
}
}
@@ -684,7 +684,7 @@ impl Version {
})
.await?;
let dependencies : DashMap<VersionId, Vec<QueryDependency>> = sqlx::query!(
let dependencies : DashMap<DBVersionId, Vec<QueryDependency>> = sqlx::query!(
"
SELECT DISTINCT dependent_id as version_id, d.mod_dependency_id as dependency_project_id, d.dependency_id as dependency_version_id, d.dependency_file_name as file_name, d.dependency_type as dependency_type
FROM dependencies d
@@ -694,13 +694,13 @@ impl Version {
).fetch(&mut *exec)
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<QueryDependency>>, m| {
let dependency = QueryDependency {
project_id: m.dependency_project_id.map(ProjectId),
version_id: m.dependency_version_id.map(VersionId),
project_id: m.dependency_project_id.map(DBProjectId),
version_id: m.dependency_version_id.map(DBVersionId),
file_name: m.file_name,
dependency_type: m.dependency_type,
};
acc.entry(VersionId(m.version_id))
acc.entry(DBVersionId(m.version_id))
.or_default()
.push(dependency);
async move { Ok(acc) }
@@ -719,7 +719,7 @@ impl Version {
)
.fetch(&mut *exec)
.try_fold(DashMap::new(), |acc, v| {
let version_id = VersionId(v.id);
let version_id = DBVersionId(v.id);
let VersionLoaderData {
loaders,
project_types,
@@ -737,9 +737,9 @@ impl Version {
let query_version = QueryVersion {
inner: Version {
id: VersionId(v.id),
project_id: ProjectId(v.mod_id),
author_id: UserId(v.author_id),
id: DBVersionId(v.id),
project_id: DBProjectId(v.mod_id),
author_id: DBUserId(v.author_id),
name: v.version_name,
version_number: v.version_number,
changelog: v.changelog,
@@ -812,7 +812,7 @@ impl Version {
pub async fn get_file_from_hash<'a, 'b, E>(
algo: String,
hash: String,
version_id: Option<VersionId>,
version_id: Option<DBVersionId>,
executor: E,
redis: &RedisPool,
) -> Result<Option<SingleFile>, DatabaseError>
@@ -873,9 +873,9 @@ impl Version {
let key = format!("{algorithm}_{hash}");
let file = SingleFile {
id: FileId(f.id),
version_id: VersionId(f.version_id),
project_id: ProjectId(f.mod_id),
id: DBFileId(f.id),
version_id: DBVersionId(f.version_id),
project_id: DBProjectId(f.mod_id),
url: f.url,
filename: f.filename,
hashes,
@@ -940,15 +940,15 @@ pub struct QueryVersion {
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct QueryDependency {
pub project_id: Option<ProjectId>,
pub version_id: Option<VersionId>,
pub project_id: Option<DBProjectId>,
pub version_id: Option<DBVersionId>,
pub file_name: Option<String>,
pub dependency_type: String,
}
#[derive(Clone, Deserialize, Serialize, PartialEq, Eq)]
pub struct QueryFile {
pub id: FileId,
pub id: DBFileId,
pub url: String,
pub filename: String,
pub hashes: HashMap<String, String>,
@@ -959,9 +959,9 @@ pub struct QueryFile {
#[derive(Clone, Deserialize, Serialize)]
pub struct SingleFile {
pub id: FileId,
pub version_id: VersionId,
pub project_id: ProjectId,
pub id: DBFileId,
pub version_id: DBVersionId,
pub project_id: DBProjectId,
pub url: String,
pub filename: String,
pub hashes: HashMap<String, String>,
@@ -1037,11 +1037,11 @@ mod tests {
date_published: DateTime<Utc>,
) -> Version {
Version {
id: VersionId(id),
id: DBVersionId(id),
ordering,
date_published,
project_id: ProjectId(0),
author_id: UserId(0),
project_id: DBProjectId(0),
author_id: DBUserId(0),
name: Default::default(),
version_number: Default::default(),
changelog: Default::default(),