Payments/subscriptions support (#943)

* [wip] Payments/subscriptions support

* finish

* working payment flow

* finish subscriptions, lint, clippy, etc

* docker compose
This commit is contained in:
Geometrically
2024-08-14 17:14:52 -07:00
committed by GitHub
parent 60edbcd5f0
commit 1d0d8d7fbe
71 changed files with 4009 additions and 1101 deletions

6
.env
View File

@@ -84,6 +84,7 @@ SMTP_HOST=none
SITE_VERIFY_EMAIL_PATH=none SITE_VERIFY_EMAIL_PATH=none
SITE_RESET_PASSWORD_PATH=none SITE_RESET_PASSWORD_PATH=none
SITE_BILLING_PATH=none
BEEHIIV_PUBLICATION_ID=none BEEHIIV_PUBLICATION_ID=none
BEEHIIV_API_KEY=none BEEHIIV_API_KEY=none
@@ -99,4 +100,7 @@ MAXMIND_LICENSE_KEY=none
PAYOUTS_BUDGET=100 PAYOUTS_BUDGET=100
FLAME_ANVIL_URL=none FLAME_ANVIL_URL=none
STRIPE_API_KEY=none
STRIPE_WEBHOOK_SECRET=none

View File

@@ -18,7 +18,7 @@ jobs:
# Start Docker Compose # Start Docker Compose
- name: Start Docker Compose - name: Start Docker Compose
run: docker-compose up -d run: docker compose up -d
- name: Install cargo tarpaulin - name: Install cargo tarpaulin
uses: taiki-e/install-action@cargo-tarpaulin uses: taiki-e/install-action@cargo-tarpaulin

View File

@@ -26,7 +26,7 @@ jobs:
# Start Docker Compose # Start Docker Compose
- name: Start Docker Compose - name: Start Docker Compose
run: docker-compose up -d run: docker compose up -d
- uses: actions-rs/toolchain@v1 - uses: actions-rs/toolchain@v1
name: Install toolchain name: Install toolchain

View File

@@ -0,0 +1,64 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n id, user_id, price_id, interval, created, expires, last_charge, status\n FROM users_subscriptions\n WHERE id = ANY($1::bigint[])",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "price_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "interval",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "expires",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "last_charge",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "status",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false
]
},
"hash": "07afad3b85ed64acbe9584570fdec92f923abf17439f0011e2b46797cec0ad97"
}

View File

@@ -0,0 +1,21 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO users_subscriptions (\n id, user_id, price_id, interval, created, expires, last_charge, status\n )\n VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8\n )\n ON CONFLICT (id)\n DO UPDATE\n SET interval = EXCLUDED.interval,\n expires = EXCLUDED.expires,\n last_charge = EXCLUDED.last_charge,\n status = EXCLUDED.status\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Int8",
"Text",
"Timestamptz",
"Timestamptz",
"Timestamptz",
"Varchar"
]
},
"nullable": []
},
"hash": "0e7a1aaa7999dcae156e1b1194232a12742a24740e48dd0d99582a79da873383"
}

View File

@@ -0,0 +1,34 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, metadata, unitary\n FROM products\n WHERE id = ANY($1::bigint[])",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "metadata",
"type_info": "Jsonb"
},
{
"ordinal": 2,
"name": "unitary",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false
]
},
"hash": "37da053e79c32173d7420edbe9d2f668c8bf7e00f3ca3ae4abd60a7aa36c943b"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS(SELECT 1 FROM products WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
},
"hash": "4065dd9c79f220db9daa3e162d791eeeddd9b913fb848602dca5e35570a56b27"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE users\n SET stripe_customer_id = $1\n WHERE id = $2\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Int8"
]
},
"nullable": []
},
"hash": "494610831c7303d9cb3c033ff94af80fcc428014795c719fcafe1272db2c0177"
}

View File

@@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, product_id, prices, currency_code\n FROM products_prices\n WHERE product_id = ANY($1::bigint[])",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "product_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "prices",
"type_info": "Jsonb"
},
{
"ordinal": 3,
"name": "currency_code",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "53845c65f224a5ab0526d2d02806bd82ee2c40bb32bbb1a72c3a625853caeed8"
}

View File

@@ -0,0 +1,64 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n id, user_id, price_id, interval, created, expires, last_charge, status\n FROM users_subscriptions\n WHERE expires < $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "price_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "interval",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "expires",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "last_charge",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "status",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Timestamptz"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false
]
},
"hash": "61a87b00baaba022ab32eedf177d5b9dc6d5b7568cf1df15fac6c9e85acfa448"
}

View File

@@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "\n SELECT id, name, email,\n avatar_url, username, bio,\n created, role, badges,\n balance,\n github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,\n email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email,\n venmo_handle\n FROM users\n WHERE id = ANY($1) OR LOWER(username) = ANY($2)\n ", "query": "\n SELECT id, name, email,\n avatar_url, username, bio,\n created, role, badges,\n balance,\n github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,\n email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email,\n venmo_handle, stripe_customer_id\n FROM users\n WHERE id = ANY($1) OR LOWER(username) = ANY($2)\n ",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@@ -117,6 +117,11 @@
"ordinal": 22, "ordinal": 22,
"name": "venmo_handle", "name": "venmo_handle",
"type_info": "Text" "type_info": "Text"
},
{
"ordinal": 23,
"name": "stripe_customer_id",
"type_info": "Text"
} }
], ],
"parameters": { "parameters": {
@@ -148,8 +153,9 @@
true, true,
true, true,
true, true,
true,
true true
] ]
}, },
"hash": "a47456ecddbd1787301a2765168db0df31980ae48cb2ec37c323da10ba55a785" "hash": "6a0ca2045bd91a27dce32c730cb5238527ec210f20de13bd8995885159c6d277"
} }

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE users\n SET badges = $1\n WHERE (id = $2)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": []
},
"hash": "83ad5d39f795c631e1cba90cadd24c872c72bb4f37f0c2c9bdd58ca76d41cb7f"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM users_subscriptions\n WHERE id = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": []
},
"hash": "88d135700420321a3896f9262bb663df0ac672d465d78445e48f321fc47e09cb"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM users_subscriptions\n WHERE id = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": []
},
"hash": "b64651865cf9c1fbebed7f188da6566d53049176d72073c22a04b43adea18326"
}

View File

@@ -0,0 +1,34 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, metadata, unitary\n FROM products\n WHERE 1 = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "metadata",
"type_info": "Jsonb"
},
{
"ordinal": 2,
"name": "unitary",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int4"
]
},
"nullable": [
false,
false,
false
]
},
"hash": "ba2e3eab0daba9698686cbf324351f5d0ddc7be1d1b650a86a43712786fd4a4d"
}

View File

@@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "\n INSERT INTO users (\n id, username, name, email,\n avatar_url, bio, created,\n github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,\n email_verified, password, paypal_id, paypal_country, paypal_email,\n venmo_handle\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7,\n $8, $9, $10, $11, $12, $13,\n $14, $15, $16, $17, $18, $19\n )\n ", "query": "\n INSERT INTO users (\n id, username, name, email,\n avatar_url, bio, created,\n github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,\n email_verified, password, paypal_id, paypal_country, paypal_email,\n venmo_handle, stripe_customer_id\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7,\n $8, $9, $10, $11, $12, $13,\n $14, $15, $16, $17, $18, $19, $20\n )\n ",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
@@ -23,10 +23,11 @@
"Text", "Text",
"Text", "Text",
"Text", "Text",
"Text",
"Text" "Text"
] ]
}, },
"nullable": [] "nullable": []
}, },
"hash": "36c8a2fe704197539ee5010e94a03a48637ac9227d683e0c75eb2603ba156610" "hash": "c33fb3503d040fd91a049b10853f608166e1fa1f4ce5f655849874858d5f9e27"
} }

View File

@@ -0,0 +1,64 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n id, user_id, price_id, interval, created, expires, last_charge, status\n FROM users_subscriptions\n WHERE user_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "price_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "interval",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "expires",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "last_charge",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "status",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false
]
},
"hash": "d6d3c29ff2aa3b311a19225cefcd5b8844fbe5bedf44ffe24f31b12e5bc5f868"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS(SELECT 1 FROM products_prices WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
},
"hash": "eb32f61d58b71eb55c348abe51bcc020a8ba20811d92cb6f2bcd225aa08b6210"
}

View File

@@ -0,0 +1,22 @@
{
"db_name": "PostgreSQL",
"query": "SELECT EXISTS(SELECT 1 FROM users_subscriptions WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
},
"hash": "f2711811ac8f67ead8e307259692b6a9bb08ac99448208895946cb010141cde2"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE users\n SET badges = $1\n WHERE (id = $2)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": []
},
"hash": "f643ba5f92e5f76cc2f9d2016f52dc03483c1e578dd5ea39119fcf5ad58d8250"
}

View File

@@ -0,0 +1,40 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, product_id, prices, currency_code\n FROM products_prices\n WHERE id = ANY($1::bigint[])",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "product_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "prices",
"type_info": "Jsonb"
},
{
"ordinal": 3,
"name": "currency_code",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "f786bd5afbde34fe166e5535a66ff53036be39958038eaf7c539fd8a9383b724"
}

1886
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -17,7 +17,7 @@ actix-multipart = "0.6.1"
actix-cors = "0.7.0" actix-cors = "0.7.0"
actix-ws = "0.2.5" actix-ws = "0.2.5"
actix-files = "0.6.5" actix-files = "0.6.5"
actix-web-prom = "0.7.0" actix-web-prom = { version = "0.8.0", features = ["process"]}
governor = "0.6.3" governor = "0.6.3"
tokio = { version = "1.35.1", features = ["sync"] } tokio = { version = "1.35.1", features = ["sync"] }
@@ -110,11 +110,15 @@ lettre = "0.11.3"
derive-new = "0.6.0" derive-new = "0.6.0"
rust_iso3166 = "0.1.11" rust_iso3166 = "0.1.11"
jemallocator = {version = "0.3.2", optional = true} jemallocator = {version = "0.5.4", optional = true}
async-stripe = { version = "0.37.3", features = ["runtime-tokio-hyper-rustls"] }
rusty-money = "0.4.1"
json-patch = "*"
[dev-dependencies] [dev-dependencies]
actix-http = "3.4.0" actix-http = "3.4.0"
json-patch = "*"
[profile.dev] [profile.dev]
opt-level = 0 # Minimal optimization, speeds up compilation opt-level = 0 # Minimal optimization, speeds up compilation
lto = false # Disables Link Time Optimization lto = false # Disables Link Time Optimization

View File

@@ -0,0 +1,34 @@
ALTER TABLE users ADD COLUMN stripe_customer_id TEXT NULL;
CREATE TABLE products (
id bigint PRIMARY KEY,
metadata jsonb NOT NULL,
unitary BOOLEAN NOT NULL DEFAULT FALSE
);
CREATE TABLE products_prices (
id bigint PRIMARY KEY,
product_id bigint REFERENCES products NOT NULL,
currency_code text not null,
prices jsonb NOT NULL
);
CREATE TABLE users_subscriptions (
id bigint PRIMARY KEY,
user_id bigint REFERENCES users NOT NULL,
price_id bigint REFERENCES products_prices NOT NULL,
interval text NOT NULL,
created timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
expires timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
last_charge timestamptz NULL,
status varchar(255) NOT NULL
);
CREATE UNIQUE INDEX users_stripe_customer_id
ON users (stripe_customer_id);
CREATE INDEX products_prices_product
ON products_prices (product_id);
CREATE INDEX users_subscriptions_users
ON users_subscriptions (user_id);

View File

@@ -140,19 +140,16 @@ pub async fn filter_enlisted_projects_ids(
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
user_id as database::models::ids::UserId, user_id as database::models::ids::UserId,
) )
.fetch_many(pool) .fetch(pool)
.try_for_each(|e| { .map_ok(|row| {
if let Some(row) = e.right() { for x in projects.iter() {
for x in projects.iter() { let bool = Some(x.id.0) == row.id && Some(x.team_id.0) == row.team_id;
let bool = Some(x.id.0) == row.id && Some(x.team_id.0) == row.team_id; if bool {
if bool { return_projects.push(x.id);
return_projects.push(x.id);
}
} }
} }
futures::future::ready(Ok(()))
}) })
.try_collect::<Vec<()>>()
.await?; .await?;
} }
Ok(return_projects) Ok(return_projects)

View File

@@ -3,6 +3,7 @@ pub mod email;
pub mod oauth; pub mod oauth;
pub mod templates; pub mod templates;
pub mod validate; pub mod validate;
pub use crate::auth::email::send_email;
pub use checks::{ pub use checks::{
filter_enlisted_projects_ids, filter_enlisted_version_ids, filter_visible_collections, filter_enlisted_projects_ids, filter_enlisted_version_ids, filter_visible_collections,
filter_visible_project_ids, filter_visible_projects, filter_visible_project_ids, filter_visible_projects,

View File

@@ -6,9 +6,9 @@ use crate::models::pats::Scopes;
use crate::models::users::{Role, User, UserId, UserPayoutData}; use crate::models::users::{Role, User, UserId, UserPayoutData};
use crate::queue::session::AuthQueue; use crate::queue::session::AuthQueue;
use crate::routes::internal::session::get_session_metadata; use crate::routes::internal::session::get_session_metadata;
use actix_web::http::header::{HeaderValue, AUTHORIZATION};
use actix_web::HttpRequest; use actix_web::HttpRequest;
use chrono::Utc; use chrono::Utc;
use reqwest::header::{HeaderValue, AUTHORIZATION};
pub async fn get_user_from_headers<'a, E>( pub async fn get_user_from_headers<'a, E>(
req: &HttpRequest, req: &HttpRequest,
@@ -69,6 +69,7 @@ where
venmo_handle: db_user.venmo_handle, venmo_handle: db_user.venmo_handle,
balance: db_user.balance, balance: db_user.balance,
}), }),
stripe_customer_id: db_user.stripe_customer_id,
}; };
if let Some(required_scopes) = required_scopes { if let Some(required_scopes) = required_scopes {

View File

@@ -108,15 +108,13 @@ impl Category {
ORDER BY c.ordering, c.category ORDER BY c.ordering, c.category
" "
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|c| Category {
Ok(e.right().map(|c| Category { id: CategoryId(c.id),
id: CategoryId(c.id), category: c.category,
category: c.category, project_type: c.project_type,
project_type: c.project_type, icon: c.icon,
icon: c.icon, header: c.category_header
header: c.category_header
}))
}) })
.try_collect::<Vec<Category>>() .try_collect::<Vec<Category>>()
.await?; .await?;
@@ -166,13 +164,11 @@ impl LinkPlatform {
SELECT id, name, donation FROM link_platforms SELECT id, name, donation FROM link_platforms
" "
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|c| LinkPlatform {
Ok(e.right().map(|c| LinkPlatform { id: LinkPlatformId(c.id),
id: LinkPlatformId(c.id), name: c.name,
name: c.name, donation: c.donation,
donation: c.donation,
}))
}) })
.try_collect::<Vec<LinkPlatform>>() .try_collect::<Vec<LinkPlatform>>()
.await?; .await?;
@@ -222,8 +218,8 @@ impl ReportType {
SELECT name FROM report_types SELECT name FROM report_types
" "
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.name)) }) .map_ok(|c| c.name)
.try_collect::<Vec<String>>() .try_collect::<Vec<String>>()
.await?; .await?;
@@ -272,8 +268,8 @@ impl ProjectType {
SELECT name FROM project_types SELECT name FROM project_types
" "
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.name)) }) .map_ok(|c| c.name)
.try_collect::<Vec<String>>() .try_collect::<Vec<String>>()
.await?; .await?;

View File

@@ -232,6 +232,30 @@ generate_ids!(
PayoutId PayoutId
); );
generate_ids!(
pub generate_product_id,
ProductId,
8,
"SELECT EXISTS(SELECT 1 FROM products WHERE id=$1)",
ProductId
);
generate_ids!(
pub generate_product_price_id,
ProductPriceId,
8,
"SELECT EXISTS(SELECT 1 FROM products_prices WHERE id=$1)",
ProductPriceId
);
generate_ids!(
pub generate_user_subscription_id,
UserSubscriptionId,
8,
"SELECT EXISTS(SELECT 1 FROM users_subscriptions WHERE id=$1)",
UserSubscriptionId
);
#[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize)] #[derive(Copy, Clone, Debug, PartialEq, Eq, Type, Hash, Serialize, Deserialize)]
#[sqlx(transparent)] #[sqlx(transparent)]
pub struct UserId(pub i64); pub struct UserId(pub i64);
@@ -351,6 +375,17 @@ pub struct OAuthAccessTokenId(pub i64);
#[sqlx(transparent)] #[sqlx(transparent)]
pub struct PayoutId(pub i64); pub struct PayoutId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[sqlx(transparent)]
pub struct ProductId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[sqlx(transparent)]
pub struct ProductPriceId(pub i64);
#[derive(Copy, Clone, Debug, Type, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[sqlx(transparent)]
pub struct UserSubscriptionId(pub i64);
use crate::models::ids; use crate::models::ids;
impl From<ids::ProjectId> for ProjectId { impl From<ids::ProjectId> for ProjectId {
@@ -504,3 +539,35 @@ impl From<PayoutId> for ids::PayoutId {
ids::PayoutId(id.0 as u64) ids::PayoutId(id.0 as u64)
} }
} }
impl From<ids::ProductId> for ProductId {
fn from(id: ids::ProductId) -> Self {
ProductId(id.0 as i64)
}
}
impl From<ProductId> for ids::ProductId {
fn from(id: ProductId) -> Self {
ids::ProductId(id.0 as u64)
}
}
impl From<ids::ProductPriceId> for ProductPriceId {
fn from(id: ids::ProductPriceId) -> Self {
ProductPriceId(id.0 as i64)
}
}
impl From<ProductPriceId> for ids::ProductPriceId {
fn from(id: ProductPriceId) -> Self {
ids::ProductPriceId(id.0 as u64)
}
}
impl From<ids::UserSubscriptionId> for UserSubscriptionId {
fn from(id: ids::UserSubscriptionId) -> Self {
UserSubscriptionId(id.0 as i64)
}
}
impl From<UserSubscriptionId> for ids::UserSubscriptionId {
fn from(id: UserSubscriptionId) -> Self {
ids::UserSubscriptionId(id.0 as u64)
}
}

View File

@@ -135,24 +135,22 @@ impl Image {
report_id.map(|x| x.0), report_id.map(|x| x.0),
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { .map_ok(|row| {
Ok(e.right().map(|row| { let id = ImageId(row.id);
let id = ImageId(row.id);
Image { Image {
id, id,
url: row.url, url: row.url,
size: row.size as u64, size: row.size as u64,
created: row.created, created: row.created,
owner_id: UserId(row.owner_id), owner_id: UserId(row.owner_id),
context: row.context, context: row.context,
project_id: row.mod_id.map(ProjectId), project_id: row.mod_id.map(ProjectId),
version_id: row.version_id.map(VersionId), version_id: row.version_id.map(VersionId),
thread_message_id: row.thread_message_id.map(ThreadMessageId), thread_message_id: row.thread_message_id.map(ThreadMessageId),
report_id: row.report_id.map(ReportId), report_id: row.report_id.map(ReportId),
} }
}))
}) })
.try_collect::<Vec<Image>>() .try_collect::<Vec<Image>>()
.await .await

View File

@@ -60,15 +60,13 @@ impl Game {
SELECT id, slug, name, icon_url, banner_url FROM games SELECT id, slug, name, icon_url, banner_url FROM games
", ",
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|x| Game {
Ok(e.right().map(|x| Game { id: GameId(x.id),
id: GameId(x.id), slug: x.slug,
slug: x.slug, name: x.name,
name: x.name, icon_url: x.icon_url,
icon_url: x.icon_url, banner_url: x.banner_url,
banner_url: x.banner_url,
}))
}) })
.try_collect::<Vec<Game>>() .try_collect::<Vec<Game>>()
.await?; .await?;
@@ -151,24 +149,21 @@ impl Loader {
GROUP BY l.id; GROUP BY l.id;
", ",
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|x| Loader {
Ok(e.right().map(|x| Loader { id: LoaderId(x.id),
id: LoaderId(x.id), loader: x.loader,
loader: x.loader, icon: x.icon,
icon: x.icon, supported_project_types: x
supported_project_types: x .project_types
.project_types .unwrap_or_default()
.unwrap_or_default() .iter()
.iter() .map(|x| x.to_string())
.map(|x| x.to_string()) .collect(),
.collect(), supported_games: x
supported_games: x .games
.games .unwrap_or_default(),
.unwrap_or_default(), metadata: x.metadata
metadata: x.metadata
}))
}) })
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@@ -451,21 +446,22 @@ impl LoaderField {
FROM loader_fields lf FROM loader_fields lf
", ",
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|r| {
Ok(e.right().and_then(|r| { Some(LoaderField {
Some(LoaderField { id: LoaderFieldId(r.id),
id: LoaderFieldId(r.id), field_type: LoaderFieldType::build(&r.field_type, r.enum_type)?,
field_type: LoaderFieldType::build(&r.field_type, r.enum_type)?, field: r.field,
field: r.field, optional: r.optional,
optional: r.optional, min_val: r.min_val,
min_val: r.min_val, max_val: r.max_val,
max_val: r.max_val, })
})
}))
}) })
.try_collect::<Vec<LoaderField>>() .try_collect::<Vec<Option<LoaderField>>>()
.await?; .await?
.into_iter()
.flatten()
.collect();
redis redis
.set_serialized_to_json(LOADER_FIELDS_NAMESPACE_ALL, "", &result, None) .set_serialized_to_json(LOADER_FIELDS_NAMESPACE_ALL, "", &result, None)

View File

@@ -14,12 +14,14 @@ pub mod oauth_token_item;
pub mod organization_item; pub mod organization_item;
pub mod pat_item; pub mod pat_item;
pub mod payout_item; pub mod payout_item;
pub mod product_item;
pub mod project_item; pub mod project_item;
pub mod report_item; pub mod report_item;
pub mod session_item; pub mod session_item;
pub mod team_item; pub mod team_item;
pub mod thread_item; pub mod thread_item;
pub mod user_item; pub mod user_item;
pub mod user_subscription_item;
pub mod version_item; pub mod version_item;
pub use collection_item::Collection; pub use collection_item::Collection;

View File

@@ -110,35 +110,33 @@ impl Notification {
", ",
&notification_ids_parsed &notification_ids_parsed
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|row| {
Ok(e.right().map(|row| { let id = NotificationId(row.id);
let id = NotificationId(row.id);
Notification { Notification {
id, id,
user_id: UserId(row.user_id), user_id: UserId(row.user_id),
read: row.read, read: row.read,
created: row.created, created: row.created,
body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| { body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| {
if let Some(name) = row.name { if let Some(name) = row.name {
NotificationBody::LegacyMarkdown { NotificationBody::LegacyMarkdown {
notification_type: row.notification_type, notification_type: row.notification_type,
name, name,
text: row.text.unwrap_or_default(), text: row.text.unwrap_or_default(),
link: row.link.unwrap_or_default(), link: row.link.unwrap_or_default(),
actions: serde_json::from_value( actions: serde_json::from_value(
row.actions.unwrap_or_default(), row.actions.unwrap_or_default(),
) )
.ok() .ok()
.unwrap_or_default(), .unwrap_or_default(),
}
} else {
NotificationBody::Unknown
} }
}), } else {
} NotificationBody::Unknown
})) }
}),
}
}) })
.try_collect::<Vec<Notification>>() .try_collect::<Vec<Notification>>()
.await .await
@@ -173,35 +171,33 @@ impl Notification {
", ",
user_id as UserId user_id as UserId
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|row| {
Ok(e.right().map(|row| { let id = NotificationId(row.id);
let id = NotificationId(row.id);
Notification { Notification {
id, id,
user_id: UserId(row.user_id), user_id: UserId(row.user_id),
read: row.read, read: row.read,
created: row.created, created: row.created,
body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| { body: row.body.clone().and_then(|x| serde_json::from_value(x).ok()).unwrap_or_else(|| {
if let Some(name) = row.name { if let Some(name) = row.name {
NotificationBody::LegacyMarkdown { NotificationBody::LegacyMarkdown {
notification_type: row.notification_type, notification_type: row.notification_type,
name, name,
text: row.text.unwrap_or_default(), text: row.text.unwrap_or_default(),
link: row.link.unwrap_or_default(), link: row.link.unwrap_or_default(),
actions: serde_json::from_value( actions: serde_json::from_value(
row.actions.unwrap_or_default(), row.actions.unwrap_or_default(),
) )
.ok() .ok()
.unwrap_or_default(), .unwrap_or_default(),
}
} else {
NotificationBody::Unknown
} }
}), } else {
} NotificationBody::Unknown
})) }
}),
}
}) })
.try_collect::<Vec<Notification>>() .try_collect::<Vec<Notification>>()
.await?; .await?;
@@ -242,8 +238,8 @@ impl Notification {
", ",
&notification_ids_parsed &notification_ids_parsed
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) }) .map_ok(|x| UserId(x.user_id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@@ -285,8 +281,8 @@ impl Notification {
", ",
&notification_ids_parsed &notification_ids_parsed
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) }) .map_ok(|x| UserId(x.user_id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;

View File

@@ -167,8 +167,8 @@ impl PersonalAccessToken {
", ",
user_id.0, user_id.0,
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|x| PatId(x.id))) }) .map_ok(|x| PatId(x.id))
.try_collect::<Vec<PatId>>() .try_collect::<Vec<PatId>>()
.await?; .await?;

View File

@@ -74,19 +74,17 @@ impl Payout {
", ",
&payout_ids.into_iter().map(|x| x.0).collect::<Vec<_>>() &payout_ids.into_iter().map(|x| x.0).collect::<Vec<_>>()
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|r| Payout {
Ok(e.right().map(|r| Payout { id: PayoutId(r.id),
id: PayoutId(r.id), user_id: UserId(r.user_id),
user_id: UserId(r.user_id), created: r.created,
created: r.created, status: PayoutStatus::from_string(&r.status),
status: PayoutStatus::from_string(&r.status), amount: r.amount,
amount: r.amount, method: r.method.map(|x| PayoutMethodType::from_string(&x)),
method: r.method.map(|x| PayoutMethodType::from_string(&x)), method_address: r.method_address,
method_address: r.method_address, platform_id: r.platform_id,
platform_id: r.platform_id, fee: r.fee,
fee: r.fee,
}))
}) })
.try_collect::<Vec<Payout>>() .try_collect::<Vec<Payout>>()
.await?; .await?;

View File

@@ -0,0 +1,248 @@
use crate::database::models::{product_item, DatabaseError, ProductId, ProductPriceId};
use crate::database::redis::RedisPool;
use crate::models::billing::{Price, ProductMetadata};
use dashmap::DashMap;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use std::convert::TryInto;
const PRODUCTS_NAMESPACE: &str = "products";
pub struct ProductItem {
pub id: ProductId,
pub metadata: ProductMetadata,
pub unitary: bool,
}
struct ProductResult {
id: i64,
metadata: serde_json::Value,
unitary: bool,
}
macro_rules! select_products_with_predicate {
($predicate:tt, $param:ident) => {
sqlx::query_as!(
ProductResult,
r#"
SELECT id, metadata, unitary
FROM products
"#
+ $predicate,
$param
)
};
}
impl TryFrom<ProductResult> for ProductItem {
type Error = serde_json::Error;
fn try_from(r: ProductResult) -> Result<Self, Self::Error> {
Ok(ProductItem {
id: ProductId(r.id),
metadata: serde_json::from_value(r.metadata)?,
unitary: r.unitary,
})
}
}
impl ProductItem {
pub async fn get(
id: ProductId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ProductItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[ProductId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results = select_products_with_predicate!("WHERE id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()
.map(|r| r.try_into())
.collect::<Result<Vec<_>, serde_json::Error>>()?)
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductItem>, DatabaseError> {
let one = 1;
let results = select_products_with_predicate!("WHERE 1 = $1", one)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()
.map(|r| r.try_into())
.collect::<Result<Vec<_>, serde_json::Error>>()?)
}
}
#[derive(Deserialize, Serialize)]
pub struct QueryProduct {
pub id: ProductId,
pub metadata: ProductMetadata,
pub unitary: bool,
pub prices: Vec<ProductPriceItem>,
}
impl QueryProduct {
pub async fn list<'a, E>(exec: E, redis: &RedisPool) -> Result<Vec<QueryProduct>, DatabaseError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{
let mut redis = redis.connect().await?;
let res: Option<Vec<QueryProduct>> = redis
.get_deserialized_from_json(PRODUCTS_NAMESPACE, "all")
.await?;
if let Some(res) = res {
return Ok(res);
}
let all_products = product_item::ProductItem::get_all(exec).await?;
let prices = product_item::ProductPriceItem::get_all_products_prices(
&all_products.iter().map(|x| x.id).collect::<Vec<_>>(),
exec,
)
.await?;
let products = all_products
.into_iter()
.map(|x| QueryProduct {
id: x.id,
metadata: x.metadata,
prices: prices
.remove(&x.id)
.map(|x| x.1)
.unwrap_or_default()
.into_iter()
.map(|x| ProductPriceItem {
id: x.id,
product_id: x.product_id,
prices: x.prices,
currency_code: x.currency_code,
})
.collect(),
unitary: x.unitary,
})
.collect::<Vec<_>>();
redis
.set_serialized_to_json(PRODUCTS_NAMESPACE, "all", &products, None)
.await?;
Ok(products)
}
}
#[derive(Deserialize, Serialize)]
pub struct ProductPriceItem {
pub id: ProductPriceId,
pub product_id: ProductId,
pub prices: Price,
pub currency_code: String,
}
struct ProductPriceResult {
id: i64,
product_id: i64,
prices: serde_json::Value,
currency_code: String,
}
macro_rules! select_prices_with_predicate {
($predicate:tt, $param:ident) => {
sqlx::query_as!(
ProductPriceResult,
r#"
SELECT id, product_id, prices, currency_code
FROM products_prices
"#
+ $predicate,
$param
)
};
}
impl TryFrom<ProductPriceResult> for ProductPriceItem {
type Error = serde_json::Error;
fn try_from(r: ProductPriceResult) -> Result<Self, Self::Error> {
Ok(ProductPriceItem {
id: ProductPriceId(r.id),
product_id: ProductId(r.product_id),
prices: serde_json::from_value(r.prices)?,
currency_code: r.currency_code,
})
}
}
impl ProductPriceItem {
pub async fn get(
id: ProductPriceId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<ProductPriceItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[ProductPriceId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results = select_prices_with_predicate!("WHERE id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
Ok(results
.into_iter()
.map(|r| r.try_into())
.collect::<Result<Vec<_>, serde_json::Error>>()?)
}
pub async fn get_all_product_prices(
product_id: ProductId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<ProductPriceItem>, DatabaseError> {
let res = Self::get_all_products_prices(&[product_id], exec).await?;
Ok(res.remove(&product_id).map(|x| x.1).unwrap_or_default())
}
pub async fn get_all_products_prices(
product_ids: &[ProductId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<DashMap<ProductId, Vec<ProductPriceItem>>, DatabaseError> {
let ids = product_ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
use futures_util::TryStreamExt;
let prices = select_prices_with_predicate!("WHERE product_id = ANY($1::bigint[])", ids_ref)
.fetch(exec)
.try_fold(
DashMap::new(),
|acc: DashMap<ProductId, Vec<ProductPriceItem>>, x| {
if let Ok(item) = <ProductPriceResult as TryInto<ProductPriceItem>>::try_into(x)
{
acc.entry(item.product_id).or_default().push(item);
}
async move { Ok(acc) }
},
)
.await?;
Ok(prices)
}
}

View File

@@ -358,8 +358,8 @@ impl Project {
", ",
id as ProjectId, id as ProjectId,
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| ThreadId(x.id))) }) .map_ok(|x| ThreadId(x.id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@@ -443,8 +443,8 @@ impl Project {
", ",
project.inner.team_id as TeamId, project.inner.team_id as TeamId,
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| UserId(x.user_id))) }) .map_ok(|x| UserId(x.user_id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@@ -874,19 +874,17 @@ impl Project {
", ",
id as ProjectId id as ProjectId
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|x| {
Ok(e.right().map(|x| { (
( x.dependency_id.map(VersionId),
x.dependency_id.map(VersionId), if x.mod_id == Some(0) {
if x.mod_id == Some(0) { None
None } else {
} else { x.mod_id.map(ProjectId)
x.mod_id.map(ProjectId) },
}, x.mod_dependency_id.map(ProjectId),
x.mod_dependency_id.map(ProjectId), )
)
}))
}) })
.try_collect::<Dependencies>() .try_collect::<Dependencies>()
.await?; .await?;

View File

@@ -86,20 +86,18 @@ impl Report {
", ",
&report_ids_parsed &report_ids_parsed
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|x| QueryReport {
Ok(e.right().map(|x| QueryReport { id: ReportId(x.id),
id: ReportId(x.id), report_type: x.name,
report_type: x.name, project_id: x.mod_id.map(ProjectId),
project_id: x.mod_id.map(ProjectId), version_id: x.version_id.map(VersionId),
version_id: x.version_id.map(VersionId), user_id: x.user_id.map(UserId),
user_id: x.user_id.map(UserId), body: x.body,
body: x.body, reporter: UserId(x.reporter),
reporter: UserId(x.reporter), created: x.created,
created: x.created, closed: x.closed,
closed: x.closed, thread_id: ThreadId(x.thread_id)
thread_id: ThreadId(x.thread_id)
}))
}) })
.try_collect::<Vec<QueryReport>>() .try_collect::<Vec<QueryReport>>()
.await?; .await?;

View File

@@ -220,8 +220,8 @@ impl Session {
", ",
user_id.0, user_id.0,
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|x| SessionId(x.id))) }) .map_ok(|x| SessionId(x.id))
.try_collect::<Vec<SessionId>>() .try_collect::<Vec<SessionId>>()
.await?; .await?;

View File

@@ -300,35 +300,25 @@ impl TeamMember {
&team_ids_parsed, &team_ids_parsed,
user_id as UserId user_id as UserId
) )
.fetch_many(executor) .fetch(executor)
.try_filter_map(|e| async { .map_ok(|m| TeamMember {
if let Some(m) = e.right() { id: TeamMemberId(m.id),
Ok(Some(Ok(TeamMember { team_id: TeamId(m.team_id),
id: TeamMemberId(m.id), user_id,
team_id: TeamId(m.team_id), role: m.role,
user_id, is_owner: m.is_owner,
role: m.role, permissions: ProjectPermissions::from_bits(m.permissions as u64)
is_owner: m.is_owner, .unwrap_or_default(),
permissions: ProjectPermissions::from_bits(m.permissions as u64) organization_permissions: m
.unwrap_or_default(), .organization_permissions
organization_permissions: m .map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()),
.organization_permissions accepted: m.accepted,
.map(|p| OrganizationPermissions::from_bits(p as u64).unwrap_or_default()), payouts_split: m.payouts_split,
accepted: m.accepted, ordering: m.ordering,
payouts_split: m.payouts_split,
ordering: m.ordering,
})))
} else {
Ok(None)
}
}) })
.try_collect::<Vec<Result<TeamMember, super::DatabaseError>>>() .try_collect::<Vec<TeamMember>>()
.await?; .await?;
let team_members = team_members
.into_iter()
.collect::<Result<Vec<TeamMember>, super::DatabaseError>>()?;
Ok(team_members) Ok(team_members)
} }

View File

@@ -144,9 +144,8 @@ impl Thread {
", ",
&thread_ids_parsed &thread_ids_parsed
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|x| Thread {
Ok(e.right().map(|x| Thread {
id: ThreadId(x.id), id: ThreadId(x.id),
project_id: x.mod_id.map(ProjectId), project_id: x.mod_id.map(ProjectId),
report_id: x.report_id.map(ReportId), report_id: x.report_id.map(ReportId),
@@ -161,8 +160,7 @@ impl Thread {
messages messages
}, },
members: x.members.unwrap_or_default().into_iter().map(UserId).collect(), members: x.members.unwrap_or_default().into_iter().map(UserId).collect(),
})) })
})
.try_collect::<Vec<Thread>>() .try_collect::<Vec<Thread>>()
.await?; .await?;
@@ -236,17 +234,14 @@ impl ThreadMessage {
", ",
&message_ids_parsed &message_ids_parsed
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { .map_ok(|x| ThreadMessage {
Ok(e.right().map(|x| ThreadMessage { id: ThreadMessageId(x.id),
id: ThreadMessageId(x.id), thread_id: ThreadId(x.thread_id),
thread_id: ThreadId(x.thread_id), author_id: x.author_id.map(UserId),
author_id: x.author_id.map(UserId), body: serde_json::from_value(x.body).unwrap_or(MessageBody::Deleted { private: false }),
body: serde_json::from_value(x.body) created: x.created,
.unwrap_or(MessageBody::Deleted { private: false }), hide_identity: x.hide_identity,
created: x.created,
hide_identity: x.hide_identity,
}))
}) })
.try_collect::<Vec<ThreadMessage>>() .try_collect::<Vec<ThreadMessage>>()
.await?; .await?;

View File

@@ -32,6 +32,7 @@ pub struct User {
pub paypal_country: Option<String>, pub paypal_country: Option<String>,
pub paypal_email: Option<String>, pub paypal_email: Option<String>,
pub venmo_handle: Option<String>, pub venmo_handle: Option<String>,
pub stripe_customer_id: Option<String>,
pub totp_secret: Option<String>, pub totp_secret: Option<String>,
@@ -60,13 +61,13 @@ impl User {
avatar_url, bio, created, avatar_url, bio, created,
github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id, github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,
email_verified, password, paypal_id, paypal_country, paypal_email, email_verified, password, paypal_id, paypal_country, paypal_email,
venmo_handle venmo_handle, stripe_customer_id
) )
VALUES ( VALUES (
$1, $2, $3, $4, $5, $1, $2, $3, $4, $5,
$6, $7, $6, $7,
$8, $9, $10, $11, $12, $13, $8, $9, $10, $11, $12, $13,
$14, $15, $16, $17, $18, $19 $14, $15, $16, $17, $18, $19, $20
) )
", ",
self.id as UserId, self.id as UserId,
@@ -87,7 +88,8 @@ impl User {
self.paypal_id, self.paypal_id,
self.paypal_country, self.paypal_country,
self.paypal_email, self.paypal_email,
self.venmo_handle self.venmo_handle,
self.stripe_customer_id
) )
.execute(&mut **transaction) .execute(&mut **transaction)
.await?; .await?;
@@ -170,7 +172,7 @@ impl User {
balance, balance,
github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id, github_id, discord_id, gitlab_id, google_id, steam_id, microsoft_id,
email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email, email_verified, password, totp_secret, paypal_id, paypal_country, paypal_email,
venmo_handle venmo_handle, stripe_customer_id
FROM users FROM users
WHERE id = ANY($1) OR LOWER(username) = ANY($2) WHERE id = ANY($1) OR LOWER(username) = ANY($2)
", ",
@@ -202,6 +204,7 @@ impl User {
paypal_country: u.paypal_country, paypal_country: u.paypal_country,
paypal_email: u.paypal_email, paypal_email: u.paypal_email,
venmo_handle: u.venmo_handle, venmo_handle: u.venmo_handle,
stripe_customer_id: u.stripe_customer_id,
totp_secret: u.totp_secret, totp_secret: u.totp_secret,
}; };
@@ -264,8 +267,8 @@ impl User {
", ",
user_id as UserId, user_id as UserId,
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|m| ProjectId(m.id))) }) .map_ok(|m| ProjectId(m.id))
.try_collect::<Vec<ProjectId>>() .try_collect::<Vec<ProjectId>>()
.await?; .await?;
@@ -293,8 +296,8 @@ impl User {
", ",
user_id as UserId, user_id as UserId,
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|m| OrganizationId(m.id))) }) .map_ok(|m| OrganizationId(m.id))
.try_collect::<Vec<OrganizationId>>() .try_collect::<Vec<OrganizationId>>()
.await?; .await?;
@@ -317,8 +320,8 @@ impl User {
", ",
user_id as UserId, user_id as UserId,
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|m| CollectionId(m.id))) }) .map_ok(|m| CollectionId(m.id))
.try_collect::<Vec<CollectionId>>() .try_collect::<Vec<CollectionId>>()
.await?; .await?;
@@ -341,8 +344,8 @@ impl User {
", ",
user_id as UserId, user_id as UserId,
) )
.fetch_many(exec) .fetch(exec)
.try_filter_map(|e| async { Ok(e.right().map(|m| to_base62(m.code as u64))) }) .map_ok(|m| to_base62(m.code as u64))
.try_collect::<Vec<String>>() .try_collect::<Vec<String>>()
.await?; .await?;
@@ -430,8 +433,8 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|m| m.id)) }) .map_ok(|m| m.id)
.try_collect::<Vec<i64>>() .try_collect::<Vec<i64>>()
.await?; .await?;
@@ -463,8 +466,8 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| CollectionId(x.id))) }) .map_ok(|x| CollectionId(x.id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@@ -481,8 +484,8 @@ impl User {
", ",
id as UserId, id as UserId,
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|x| ThreadId(x.id))) }) .map_ok(|x| ThreadId(x.id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;

View File

@@ -0,0 +1,153 @@
use crate::database::models::{DatabaseError, ProductPriceId, UserId, UserSubscriptionId};
use crate::models::billing::{PriceDuration, SubscriptionStatus};
use chrono::{DateTime, Utc};
use itertools::Itertools;
pub struct UserSubscriptionItem {
pub id: UserSubscriptionId,
pub user_id: UserId,
pub price_id: ProductPriceId,
pub interval: PriceDuration,
pub created: DateTime<Utc>,
pub expires: DateTime<Utc>,
pub last_charge: Option<DateTime<Utc>>,
pub status: SubscriptionStatus,
}
struct UserSubscriptionResult {
id: i64,
user_id: i64,
price_id: i64,
interval: String,
pub created: DateTime<Utc>,
pub expires: DateTime<Utc>,
pub last_charge: Option<DateTime<Utc>>,
pub status: String,
}
macro_rules! select_user_subscriptions_with_predicate {
($predicate:tt, $param:ident) => {
sqlx::query_as!(
UserSubscriptionResult,
r#"
SELECT
id, user_id, price_id, interval, created, expires, last_charge, status
FROM users_subscriptions
"#
+ $predicate,
$param
)
};
}
impl From<UserSubscriptionResult> for UserSubscriptionItem {
fn from(r: UserSubscriptionResult) -> Self {
UserSubscriptionItem {
id: UserSubscriptionId(r.id),
user_id: UserId(r.user_id),
price_id: ProductPriceId(r.price_id),
interval: PriceDuration::from_string(&r.interval),
created: r.created,
expires: r.expires,
last_charge: r.last_charge,
status: SubscriptionStatus::from_string(&r.status),
}
}
}
impl UserSubscriptionItem {
pub async fn get(
id: UserSubscriptionId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Option<UserSubscriptionItem>, DatabaseError> {
Ok(Self::get_many(&[id], exec).await?.into_iter().next())
}
pub async fn get_many(
ids: &[UserSubscriptionId],
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let ids = ids.iter().map(|id| id.0).collect_vec();
let ids_ref: &[i64] = &ids;
let results =
select_user_subscriptions_with_predicate!("WHERE id = ANY($1::bigint[])", ids_ref)
.fetch_all(exec)
.await?;
Ok(results.into_iter().map(|r| r.into()).collect())
}
pub async fn get_all_user(
user_id: UserId,
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let user_id = user_id.0;
let results = select_user_subscriptions_with_predicate!("WHERE user_id = $1", user_id)
.fetch_all(exec)
.await?;
Ok(results.into_iter().map(|r| r.into()).collect())
}
pub async fn get_all_expired(
exec: impl sqlx::Executor<'_, Database = sqlx::Postgres>,
) -> Result<Vec<UserSubscriptionItem>, DatabaseError> {
let now = Utc::now();
let results = select_user_subscriptions_with_predicate!("WHERE expires < $1", now)
.fetch_all(exec)
.await?;
Ok(results.into_iter().map(|r| r.into()).collect())
}
pub async fn upsert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
sqlx::query!(
"
INSERT INTO users_subscriptions (
id, user_id, price_id, interval, created, expires, last_charge, status
)
VALUES (
$1, $2, $3, $4, $5, $6, $7, $8
)
ON CONFLICT (id)
DO UPDATE
SET interval = EXCLUDED.interval,
expires = EXCLUDED.expires,
last_charge = EXCLUDED.last_charge,
status = EXCLUDED.status
",
self.id.0,
self.user_id.0,
self.price_id.0,
self.interval.as_str(),
self.created,
self.expires,
self.last_charge,
self.status.as_str(),
)
.execute(&mut **transaction)
.await?;
Ok(())
}
pub async fn remove(
id: UserSubscriptionId,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), DatabaseError> {
sqlx::query!(
"
DELETE FROM users_subscriptions
WHERE id = $1
",
id.0 as i64
)
.execute(&mut **transaction)
.await?;
Ok(())
}
}

View File

@@ -56,7 +56,7 @@ pub async fn get_upload_url(
bucket_id: &str, bucket_id: &str,
) -> Result<UploadUrlData, FileHostingError> { ) -> Result<UploadUrlData, FileHostingError> {
let response = reqwest::Client::new() let response = reqwest::Client::new()
.post(&format!("{}/b2api/v2/b2_get_upload_url", authorization_data.api_url).to_string()) .post(format!("{}/b2api/v2/b2_get_upload_url", authorization_data.api_url).to_string())
.header(reqwest::header::CONTENT_TYPE, "application/json") .header(reqwest::header::CONTENT_TYPE, "application/json")
.header( .header(
reqwest::header::AUTHORIZATION, reqwest::header::AUTHORIZATION,

View File

@@ -58,6 +58,7 @@ pub struct LabrinthConfig {
pub active_sockets: web::Data<RwLock<ActiveSockets>>, pub active_sockets: web::Data<RwLock<ActiveSockets>>,
pub automated_moderation_queue: web::Data<AutomatedModerationQueue>, pub automated_moderation_queue: web::Data<AutomatedModerationQueue>,
pub rate_limiter: KeyedRateLimiter, pub rate_limiter: KeyedRateLimiter,
pub stripe_client: stripe::Client,
} }
pub fn app_setup( pub fn app_setup(
@@ -75,14 +76,16 @@ pub fn app_setup(
let automated_moderation_queue = web::Data::new(AutomatedModerationQueue::default()); let automated_moderation_queue = web::Data::new(AutomatedModerationQueue::default());
let automated_moderation_queue_ref = automated_moderation_queue.clone(); {
let pool_ref = pool.clone(); let automated_moderation_queue_ref = automated_moderation_queue.clone();
let redis_pool_ref = redis_pool.clone(); let pool_ref = pool.clone();
actix_rt::spawn(async move { let redis_pool_ref = redis_pool.clone();
automated_moderation_queue_ref actix_rt::spawn(async move {
.task(pool_ref, redis_pool_ref) automated_moderation_queue_ref
.await; .task(pool_ref, redis_pool_ref)
}); .await;
});
}
let mut scheduler = scheduler::Scheduler::new(); let mut scheduler = scheduler::Scheduler::new();
@@ -257,6 +260,17 @@ pub fn app_setup(
}); });
} }
let stripe_client = stripe::Client::new(dotenvy::var("STRIPE_API_KEY").unwrap());
{
let pool_ref = pool.clone();
let redis_ref = redis_pool.clone();
let stripe_client_ref = stripe_client.clone();
actix_rt::spawn(async move {
routes::internal::billing::task(stripe_client_ref, pool_ref, redis_ref).await;
});
}
let ip_salt = Pepper { let ip_salt = Pepper {
pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(), pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(),
}; };
@@ -279,6 +293,7 @@ pub fn app_setup(
active_sockets, active_sockets,
automated_moderation_queue, automated_moderation_queue,
rate_limiter: limiter, rate_limiter: limiter,
stripe_client,
} }
} }
@@ -311,6 +326,7 @@ pub fn app_config(cfg: &mut web::ServiceConfig, labrinth_config: LabrinthConfig)
.app_data(web::Data::new(labrinth_config.maxmind.clone())) .app_data(web::Data::new(labrinth_config.maxmind.clone()))
.app_data(labrinth_config.active_sockets.clone()) .app_data(labrinth_config.active_sockets.clone())
.app_data(labrinth_config.automated_moderation_queue.clone()) .app_data(labrinth_config.automated_moderation_queue.clone())
.app_data(web::Data::new(labrinth_config.stripe_client.clone()))
.configure(routes::v2::config) .configure(routes::v2::config)
.configure(routes::v3::config) .configure(routes::v3::config)
.configure(routes::internal::config) .configure(routes::internal::config)
@@ -416,6 +432,7 @@ pub fn check_env_vars() -> bool {
failed |= check_var::<String>("SITE_VERIFY_EMAIL_PATH"); failed |= check_var::<String>("SITE_VERIFY_EMAIL_PATH");
failed |= check_var::<String>("SITE_RESET_PASSWORD_PATH"); failed |= check_var::<String>("SITE_RESET_PASSWORD_PATH");
failed |= check_var::<String>("SITE_BILLING_PATH");
failed |= check_var::<String>("BEEHIIV_PUBLICATION_ID"); failed |= check_var::<String>("BEEHIIV_PUBLICATION_ID");
failed |= check_var::<String>("BEEHIIV_API_KEY"); failed |= check_var::<String>("BEEHIIV_API_KEY");
@@ -438,5 +455,8 @@ pub fn check_env_vars() -> bool {
failed |= check_var::<String>("FLAME_ANVIL_URL"); failed |= check_var::<String>("FLAME_ANVIL_URL");
failed |= check_var::<String>("STRIPE_API_KEY");
failed |= check_var::<String>("STRIPE_WEBHOOK_SECRET");
failed failed
} }

View File

@@ -18,7 +18,6 @@ pub struct Pepper {
pub pepper: String, pub pepper: String,
} }
#[cfg(not(tarpaulin_include))]
#[actix_rt::main] #[actix_rt::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
dotenvy::dotenv().ok(); dotenvy::dotenv().ok();

View File

@@ -3,6 +3,7 @@ pub mod v2;
pub mod v3; pub mod v3;
pub use v3::analytics; pub use v3::analytics;
pub use v3::billing;
pub use v3::collections; pub use v3::collections;
pub use v3::ids; pub use v3::ids;
pub use v3::images; pub use v3::images;

119
src/models/v3/billing.rs Normal file
View File

@@ -0,0 +1,119 @@
use crate::models::ids::Base62Id;
use crate::models::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ProductId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct Product {
pub id: ProductId,
pub metadata: ProductMetadata,
pub prices: Vec<ProductPrice>,
pub unitary: bool,
}
#[derive(Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "kebab-case")]
pub enum ProductMetadata {
Midas,
}
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct ProductPriceId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct ProductPrice {
pub id: ProductPriceId,
pub product_id: ProductId,
pub prices: Price,
pub currency_code: String,
}
#[derive(Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "kebab-case")]
pub enum Price {
OneTime {
price: i32,
},
Recurring {
intervals: HashMap<PriceDuration, i32>,
},
}
#[derive(Serialize, Deserialize, Hash, Eq, PartialEq, Debug, Copy, Clone)]
#[serde(rename_all = "kebab-case")]
pub enum PriceDuration {
Monthly,
Yearly,
}
impl PriceDuration {
pub fn from_string(string: &str) -> PriceDuration {
match string {
"monthly" => PriceDuration::Monthly,
"yearly" => PriceDuration::Yearly,
_ => PriceDuration::Monthly,
}
}
pub fn as_str(&self) -> &'static str {
match self {
PriceDuration::Monthly => "monthly",
PriceDuration::Yearly => "yearly",
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct UserSubscriptionId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct UserSubscription {
pub id: UserSubscriptionId,
pub user_id: UserId,
pub price_id: ProductPriceId,
pub interval: PriceDuration,
pub status: SubscriptionStatus,
pub created: DateTime<Utc>,
pub expires: DateTime<Utc>,
pub last_charge: Option<DateTime<Utc>>,
}
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum SubscriptionStatus {
Active,
PaymentProcessing,
PaymentFailed,
Cancelled,
}
impl SubscriptionStatus {
pub fn from_string(string: &str) -> SubscriptionStatus {
match string {
"active" => SubscriptionStatus::Active,
"payment-processing" => SubscriptionStatus::PaymentProcessing,
"payment-failed" => SubscriptionStatus::PaymentFailed,
"cancelled" => SubscriptionStatus::Cancelled,
_ => SubscriptionStatus::Cancelled,
}
}
pub fn as_str(&self) -> &'static str {
match self {
SubscriptionStatus::Active => "active",
SubscriptionStatus::PaymentProcessing => "payment-processing",
SubscriptionStatus::PaymentFailed => "payment-failed",
SubscriptionStatus::Cancelled => "cancelled",
}
}
}

View File

@@ -1,5 +1,3 @@
use thiserror::Error;
pub use super::collections::CollectionId; pub use super::collections::CollectionId;
pub use super::images::ImageId; pub use super::images::ImageId;
pub use super::notifications::NotificationId; pub use super::notifications::NotificationId;
@@ -15,6 +13,9 @@ pub use super::teams::TeamId;
pub use super::threads::ThreadId; pub use super::threads::ThreadId;
pub use super::threads::ThreadMessageId; pub use super::threads::ThreadMessageId;
pub use super::users::UserId; pub use super::users::UserId;
pub use crate::models::billing::UserSubscriptionId;
pub use crate::models::v3::billing::{ProductId, ProductPriceId};
use thiserror::Error;
/// Generates a random 64 bit integer that is exactly `n` characters /// Generates a random 64 bit integer that is exactly `n` characters
/// long when encoded as base62. /// long when encoded as base62.
@@ -133,6 +134,9 @@ base62_id_impl!(OAuthClientId, OAuthClientId);
base62_id_impl!(OAuthRedirectUriId, OAuthRedirectUriId); base62_id_impl!(OAuthRedirectUriId, OAuthRedirectUriId);
base62_id_impl!(OAuthClientAuthorizationId, OAuthClientAuthorizationId); base62_id_impl!(OAuthClientAuthorizationId, OAuthClientAuthorizationId);
base62_id_impl!(PayoutId, PayoutId); base62_id_impl!(PayoutId, PayoutId);
base62_id_impl!(ProductId, ProductId);
base62_id_impl!(ProductPriceId, ProductPriceId);
base62_id_impl!(UserSubscriptionId, UserSubscriptionId);
pub mod base62_impl { pub mod base62_impl {
use serde::de::{self, Deserializer, Visitor}; use serde::de::{self, Deserializer, Visitor};

View File

@@ -1,4 +1,5 @@
pub mod analytics; pub mod analytics;
pub mod billing;
pub mod collections; pub mod collections;
pub mod ids; pub mod ids;
pub mod images; pub mod images;

View File

@@ -14,7 +14,6 @@ pub const DELETED_USER: UserId = UserId(127155982985829);
bitflags::bitflags! { bitflags::bitflags! {
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct Badges: u64 { pub struct Badges: u64 {
// 1 << 0 unused - ignore + replace with something later
const MIDAS = 1 << 0; const MIDAS = 1 << 0;
const EARLY_MODPACK_ADOPTER = 1 << 1; const EARLY_MODPACK_ADOPTER = 1 << 1;
const EARLY_RESPACK_ADOPTER = 1 << 2; const EARLY_RESPACK_ADOPTER = 1 << 2;
@@ -53,6 +52,7 @@ pub struct User {
pub has_password: Option<bool>, pub has_password: Option<bool>,
pub has_totp: Option<bool>, pub has_totp: Option<bool>,
pub payout_data: Option<UserPayoutData>, pub payout_data: Option<UserPayoutData>,
pub stripe_customer_id: Option<String>,
// DEPRECATED. Always returns None // DEPRECATED. Always returns None
pub github_id: Option<u64>, pub github_id: Option<u64>,
@@ -86,6 +86,7 @@ impl From<DBUser> for User {
has_password: None, has_password: None,
has_totp: None, has_totp: None,
github_id: None, github_id: None,
stripe_customer_id: None,
} }
} }
} }

View File

@@ -291,8 +291,8 @@ impl PayoutsQueue {
pub id: String, pub id: String,
pub category: String, pub category: String,
pub name: String, pub name: String,
pub description: String, // pub description: String,
pub disclosure: String, // pub disclosure: String,
pub skus: Vec<Sku>, pub skus: Vec<Sku>,
pub currency_codes: Vec<String>, pub currency_codes: Vec<String>,
pub countries: Vec<ProductCountry>, pub countries: Vec<ProductCountry>,

View File

@@ -98,11 +98,8 @@ impl AuthQueue {
WHERE refresh_expires <= NOW() WHERE refresh_expires <= NOW()
" "
) )
.fetch_many(&mut *transaction) .fetch(&mut *transaction)
.try_filter_map(|e| async { .map_ok(|x| (SessionId(x.id), x.session, UserId(x.user_id)))
Ok(e.right()
.map(|x| (SessionId(x.id), x.session, UserId(x.user_id))))
})
.try_collect::<Vec<(SessionId, String, UserId)>>() .try_collect::<Vec<(SessionId, String, UserId)>>()
.await?; .await?;

File diff suppressed because it is too large Load Diff

View File

@@ -30,6 +30,7 @@ use rust_decimal::Decimal;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use std::collections::HashMap; use std::collections::HashMap;
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use validator::Validate; use validator::Validate;
@@ -217,6 +218,7 @@ impl TempUser {
None None
}, },
venmo_handle: None, venmo_handle: None,
stripe_customer_id: None,
totp_secret: None, totp_secret: None,
username, username,
name: self.name, name: self.name,
@@ -680,7 +682,6 @@ impl AuthProvider {
pub id: String, pub id: String,
pub email: String, pub email: String,
pub name: Option<String>, pub name: Option<String>,
pub bio: Option<String>,
pub picture: Option<String>, pub picture: Option<String>,
} }
@@ -1523,6 +1524,7 @@ pub async fn create_account_with_password(
paypal_country: None, paypal_country: None,
paypal_email: None, paypal_email: None,
venmo_handle: None, venmo_handle: None,
stripe_customer_id: None,
totp_secret: None, totp_secret: None,
username: new_account.username.clone(), username: new_account.username.clone(),
name: Some(new_account.username), name: Some(new_account.username),
@@ -2157,6 +2159,7 @@ pub async fn set_email(
redis: Data<RedisPool>, redis: Data<RedisPool>,
email: web::Json<SetEmail>, email: web::Json<SetEmail>,
session_queue: Data<AuthQueue>, session_queue: Data<AuthQueue>,
stripe_client: Data<stripe::Client>,
) -> Result<HttpResponse, ApiError> { ) -> Result<HttpResponse, ApiError> {
email email
.0 .0
@@ -2197,6 +2200,22 @@ pub async fn set_email(
)?; )?;
} }
if let Some(customer_id) = user
.stripe_customer_id
.as_ref()
.and_then(|x| stripe::CustomerId::from_str(x).ok())
{
stripe::Customer::update(
&stripe_client,
&customer_id,
stripe::UpdateCustomer {
email: Some(&email.email),
..Default::default()
},
)
.await?;
}
let flow = Flow::ConfirmEmail { let flow = Flow::ConfirmEmail {
user_id: user.id.into(), user_id: user.id.into(),
confirm_email: email.email.clone(), confirm_email: email.email.clone(),

View File

@@ -1,4 +1,5 @@
pub(crate) mod admin; pub(crate) mod admin;
pub mod billing;
pub mod flows; pub mod flows;
pub mod moderation; pub mod moderation;
pub mod pats; pub mod pats;
@@ -17,6 +18,7 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
.configure(session::config) .configure(session::config)
.configure(flows::config) .configure(flows::config)
.configure(pats::config) .configure(pats::config)
.configure(moderation::config), .configure(moderation::config)
.configure(billing::config),
); );
} }

View File

@@ -55,8 +55,8 @@ pub async fn get_projects(
ProjectStatus::Processing.as_str(), ProjectStatus::Processing.as_str(),
count.count as i64 count.count as i64
) )
.fetch_many(&**pool) .fetch(&**pool)
.try_filter_map(|e| async { Ok(e.right().map(|m| database::models::ProjectId(m.id))) }) .map_ok(|m| database::models::ProjectId(m.id))
.try_collect::<Vec<database::models::ProjectId>>() .try_collect::<Vec<database::models::ProjectId>>()
.await?; .await?;

View File

@@ -131,6 +131,8 @@ pub enum ApiError {
NotFound, NotFound,
#[error("You are being rate-limited. Please wait {0} milliseconds. 0/{1} remaining.")] #[error("You are being rate-limited. Please wait {0} milliseconds. 0/{1} remaining.")]
RateLimitError(u128, u32), RateLimitError(u128, u32),
#[error("Error while interacting with payment processor: {0}")]
Stripe(#[from] stripe::StripeError),
} }
impl ApiError { impl ApiError {
@@ -163,6 +165,7 @@ impl ApiError {
ApiError::Zip(..) => "zip_error", ApiError::Zip(..) => "zip_error",
ApiError::Io(..) => "io_error", ApiError::Io(..) => "io_error",
ApiError::RateLimitError(..) => "ratelimit_error", ApiError::RateLimitError(..) => "ratelimit_error",
ApiError::Stripe(..) => "stripe_error",
}, },
description: self.to_string(), description: self.to_string(),
} }
@@ -198,6 +201,7 @@ impl actix_web::ResponseError for ApiError {
ApiError::Zip(..) => StatusCode::BAD_REQUEST, ApiError::Zip(..) => StatusCode::BAD_REQUEST,
ApiError::Io(..) => StatusCode::BAD_REQUEST, ApiError::Io(..) => StatusCode::BAD_REQUEST,
ApiError::RateLimitError(..) => StatusCode::TOO_MANY_REQUESTS, ApiError::RateLimitError(..) => StatusCode::TOO_MANY_REQUESTS,
ApiError::Stripe(..) => StatusCode::FAILED_DEPENDENCY,
} }
} }

View File

@@ -1,6 +1,5 @@
use crate::database::redis::RedisPool; use crate::database::redis::RedisPool;
use crate::models::ids::ImageId; use crate::models::reports::Report;
use crate::models::reports::{ItemType, Report};
use crate::models::v2::reports::LegacyReport; use crate::models::v2::reports::LegacyReport;
use crate::queue::session::AuthQueue; use crate::queue::session::AuthQueue;
use crate::routes::{v2_reroute, v3, ApiError}; use crate::routes::{v2_reroute, v3, ApiError};
@@ -18,18 +17,6 @@ pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(report_get); cfg.service(report_get);
} }
#[derive(Deserialize, Validate)]
pub struct CreateReport {
pub report_type: String,
pub item_id: String,
pub item_type: ItemType,
pub body: String,
// Associations to uploaded images
#[validate(length(max = 10))]
#[serde(default)]
pub uploaded_images: Vec<ImageId>,
}
#[post("report")] #[post("report")]
pub async fn report_create( pub async fn report_create(
req: HttpRequest, req: HttpRequest,

View File

@@ -77,9 +77,9 @@ pub async fn organization_projects_get(
possible_organization_id.map(|x| x as i64), possible_organization_id.map(|x| x as i64),
info info
) )
.fetch_many(&**pool) .fetch(&**pool)
.try_filter_map(|e| async { Ok(e.right().map(|m| crate::database::models::ProjectId(m.id))) }) .map_ok(|m| database::models::ProjectId(m.id))
.try_collect::<Vec<crate::database::models::ProjectId>>() .try_collect::<Vec<database::models::ProjectId>>()
.await?; .await?;
let projects_data = let projects_data =
@@ -574,8 +574,8 @@ pub async fn organization_delete(
", ",
organization.id as database::models::ids::OrganizationId organization.id as database::models::ids::OrganizationId
) )
.fetch_many(&mut *transaction) .fetch(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|c| crate::database::models::TeamId(c.id))) }) .map_ok(|c| database::models::TeamId(c.id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;

View File

@@ -12,7 +12,7 @@ use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use chrono::Utc; use chrono::Utc;
use hex::ToHex; use hex::ToHex;
use hmac::{Hmac, Mac, NewMac}; use hmac::{Hmac, Mac, NewMac};
use hyper::Method; use reqwest::Method;
use rust_decimal::Decimal; use rust_decimal::Decimal;
use serde::Deserialize; use serde::Deserialize;
use serde_json::json; use serde_json::json;

View File

@@ -98,8 +98,8 @@ pub async fn random_projects_get(
.map(|x| x.to_string()) .map(|x| x.to_string())
.collect::<Vec<String>>(), .collect::<Vec<String>>(),
) )
.fetch_many(&**pool) .fetch(&**pool)
.try_filter_map(|e| async { Ok(e.right().map(|m| db_ids::ProjectId(m.id))) }) .map_ok(|m| db_ids::ProjectId(m.id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;
@@ -430,8 +430,8 @@ pub async fn project_edit(
", ",
project_item.inner.team_id as db_ids::TeamId project_item.inner.team_id as db_ids::TeamId
) )
.fetch_many(&mut *transaction) .fetch(&mut *transaction)
.try_filter_map(|e| async { Ok(e.right().map(|c| db_models::UserId(c.id))) }) .map_ok(|c| db_models::UserId(c.id))
.try_collect::<Vec<_>>() .try_collect::<Vec<_>>()
.await?; .await?;

View File

@@ -260,11 +260,8 @@ pub async fn reports(
", ",
count.count as i64 count.count as i64
) )
.fetch_many(&**pool) .fetch(&**pool)
.try_filter_map(|e| async { .map_ok(|m| crate::database::models::ids::ReportId(m.id))
Ok(e.right()
.map(|m| crate::database::models::ids::ReportId(m.id)))
})
.try_collect::<Vec<crate::database::models::ids::ReportId>>() .try_collect::<Vec<crate::database::models::ids::ReportId>>()
.await? .await?
} else { } else {
@@ -278,11 +275,8 @@ pub async fn reports(
user.id.0 as i64, user.id.0 as i64,
count.count as i64 count.count as i64
) )
.fetch_many(&**pool) .fetch(&**pool)
.try_filter_map(|e| async { .map_ok(|m| crate::database::models::ids::ReportId(m.id))
Ok(e.right()
.map(|m| crate::database::models::ids::ReportId(m.id)))
})
.try_collect::<Vec<crate::database::models::ids::ReportId>>() .try_collect::<Vec<crate::database::models::ids::ReportId>>()
.await? .await?
}; };

View File

@@ -129,22 +129,19 @@ pub async fn filter_authorized_threads(
&*project_thread_ids, &*project_thread_ids,
user_id as database::models::ids::UserId, user_id as database::models::ids::UserId,
) )
.fetch_many(&***pool) .fetch(&***pool)
.try_for_each(|e| { .map_ok(|row| {
if let Some(row) = e.right() { check_threads.retain(|x| {
check_threads.retain(|x| { let bool = x.project_id.map(|x| x.0) == Some(row.id);
let bool = x.project_id.map(|x| x.0) == Some(row.id);
if bool { if bool {
return_threads.push(x.clone()); return_threads.push(x.clone());
} }
!bool !bool
}); });
}
futures::future::ready(Ok(()))
}) })
.try_collect::<Vec<()>>()
.await?; .await?;
} }
@@ -165,22 +162,19 @@ pub async fn filter_authorized_threads(
&*project_thread_ids, &*project_thread_ids,
user_id as database::models::ids::UserId, user_id as database::models::ids::UserId,
) )
.fetch_many(&***pool) .fetch(&***pool)
.try_for_each(|e| { .map_ok(|row| {
if let Some(row) = e.right() { check_threads.retain(|x| {
check_threads.retain(|x| { let bool = x.project_id.map(|x| x.0) == Some(row.id);
let bool = x.project_id.map(|x| x.0) == Some(row.id);
if bool { if bool {
return_threads.push(x.clone()); return_threads.push(x.clone());
} }
!bool !bool
}); });
}
futures::future::ready(Ok(()))
}) })
.try_collect::<Vec<()>>()
.await?; .await?;
} }
@@ -199,22 +193,19 @@ pub async fn filter_authorized_threads(
&*report_thread_ids, &*report_thread_ids,
user_id as database::models::ids::UserId, user_id as database::models::ids::UserId,
) )
.fetch_many(&***pool) .fetch(&***pool)
.try_for_each(|e| { .map_ok(|row| {
if let Some(row) = e.right() { check_threads.retain(|x| {
check_threads.retain(|x| { let bool = x.report_id.map(|x| x.0) == Some(row.id);
let bool = x.report_id.map(|x| x.0) == Some(row.id);
if bool { if bool {
return_threads.push(x.clone()); return_threads.push(x.clone());
} }
!bool !bool
}); });
}
futures::future::ready(Ok(()))
}) })
.try_collect::<Vec<()>>()
.await?; .await?;
} }
} }

View File

@@ -610,11 +610,8 @@ pub async fn user_follows(
", ",
id as crate::database::models::ids::UserId, id as crate::database::models::ids::UserId,
) )
.fetch_many(&**pool) .fetch(&**pool)
.try_filter_map(|e| async { .map_ok(|m| crate::database::models::ProjectId(m.mod_id))
Ok(e.right()
.map(|m| crate::database::models::ProjectId(m.mod_id)))
})
.try_collect::<Vec<crate::database::models::ProjectId>>() .try_collect::<Vec<crate::database::models::ProjectId>>()
.await?; .await?;

View File

@@ -394,8 +394,8 @@ async fn version_create_inner(
", ",
builder.project_id as crate::database::models::ids::ProjectId builder.project_id as crate::database::models::ids::ProjectId
) )
.fetch_many(&mut **transaction) .fetch(&mut **transaction)
.try_filter_map(|e| async { Ok(e.right().map(|m| models::ids::UserId(m.follower_id))) }) .map_ok(|m| models::ids::UserId(m.follower_id))
.try_collect::<Vec<models::ids::UserId>>() .try_collect::<Vec<models::ids::UserId>>()
.await?; .await?;

View File

@@ -49,10 +49,8 @@ pub async fn index_local(pool: &PgPool) -> Result<Vec<UploadSearchProject>, Inde
.map(|x| x.to_string()) .map(|x| x.to_string())
.collect::<Vec<String>>(), .collect::<Vec<String>>(),
) )
.fetch_many(pool) .fetch(pool)
.try_filter_map(|e| async { .map_ok(|m| {
Ok(e.right().map(|m| {
PartialProject { PartialProject {
id: ProjectId(m.id), id: ProjectId(m.id),
name: m.name, name: m.name,
@@ -65,7 +63,7 @@ pub async fn index_local(pool: &PgPool) -> Result<Vec<UploadSearchProject>, Inde
slug: m.slug, slug: m.slug,
color: m.color, color: m.color,
license: m.license, license: m.license,
}})) }
}) })
.try_collect::<Vec<PartialProject>>() .try_collect::<Vec<PartialProject>>()
.await?; .await?;

View File

@@ -5,7 +5,6 @@ use crate::routes::ApiError;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::Serialize; use serde::Serialize;
use sqlx::PgPool; use sqlx::PgPool;
use std::usize;
#[derive(Serialize)] #[derive(Serialize)]
struct DiscordEmbed { struct DiscordEmbed {

View File

@@ -1,5 +1,5 @@
use crate::validate::{filter_out_packs, SupportedGameVersions, ValidationError, ValidationResult}; use crate::validate::{filter_out_packs, SupportedGameVersions, ValidationError, ValidationResult};
use chrono::{DateTime, NaiveDateTime, Utc}; use chrono::DateTime;
use std::io::Cursor; use std::io::Cursor;
use zip::ZipArchive; use zip::ZipArchive;
@@ -16,10 +16,7 @@ impl super::Validator for ForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions { fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 1.13, the first forge version which uses the new TOML system // Time since release of 1.13, the first forge version which uses the new TOML system
SupportedGameVersions::PastDate(DateTime::<Utc>::from_naive_utc_and_offset( SupportedGameVersions::PastDate(DateTime::from_timestamp(1540122067, 0).unwrap())
NaiveDateTime::from_timestamp_opt(1540122067, 0).unwrap(),
Utc,
))
} }
fn validate( fn validate(
@@ -55,14 +52,8 @@ impl super::Validator for LegacyForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions { fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Times between versions 1.5.2 to 1.12.2, which all use the legacy way of defining mods // Times between versions 1.5.2 to 1.12.2, which all use the legacy way of defining mods
SupportedGameVersions::Range( SupportedGameVersions::Range(
DateTime::from_naive_utc_and_offset( DateTime::from_timestamp(0, 0).unwrap(),
NaiveDateTime::from_timestamp_opt(0, 0).unwrap(), DateTime::from_timestamp(1540122066, 0).unwrap(),
Utc,
),
DateTime::from_naive_utc_and_offset(
NaiveDateTime::from_timestamp_opt(1540122066, 0).unwrap(),
Utc,
),
) )
} }

View File

@@ -1,5 +1,5 @@
use crate::validate::{filter_out_packs, SupportedGameVersions, ValidationError, ValidationResult}; use crate::validate::{filter_out_packs, SupportedGameVersions, ValidationError, ValidationResult};
use chrono::{DateTime, NaiveDateTime, Utc}; use chrono::DateTime;
use std::io::Cursor; use std::io::Cursor;
use zip::ZipArchive; use zip::ZipArchive;
@@ -15,10 +15,7 @@ impl super::Validator for QuiltValidator {
} }
fn get_supported_game_versions(&self) -> SupportedGameVersions { fn get_supported_game_versions(&self) -> SupportedGameVersions {
SupportedGameVersions::PastDate(DateTime::from_naive_utc_and_offset( SupportedGameVersions::PastDate(DateTime::from_timestamp(1646070100, 0).unwrap())
NaiveDateTime::from_timestamp_opt(1646070100, 0).unwrap(),
Utc,
))
} }
fn validate( fn validate(

View File

@@ -1,5 +1,5 @@
use crate::validate::{SupportedGameVersions, ValidationError, ValidationResult}; use crate::validate::{SupportedGameVersions, ValidationError, ValidationResult};
use chrono::{DateTime, NaiveDateTime, Utc}; use chrono::DateTime;
use std::io::Cursor; use std::io::Cursor;
use zip::ZipArchive; use zip::ZipArchive;
@@ -16,10 +16,7 @@ impl super::Validator for PackValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions { fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 13w24a which replaced texture packs with resource packs // Time since release of 13w24a which replaced texture packs with resource packs
SupportedGameVersions::PastDate(DateTime::from_naive_utc_and_offset( SupportedGameVersions::PastDate(DateTime::from_timestamp(1371137542, 0).unwrap())
NaiveDateTime::from_timestamp_opt(1371137542, 0).unwrap(),
Utc,
))
} }
fn validate( fn validate(
@@ -50,14 +47,8 @@ impl super::Validator for TexturePackValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions { fn get_supported_game_versions(&self) -> SupportedGameVersions {
// a1.2.2a to 13w23b // a1.2.2a to 13w23b
SupportedGameVersions::Range( SupportedGameVersions::Range(
DateTime::from_naive_utc_and_offset( DateTime::from_timestamp(1289339999, 0).unwrap(),
NaiveDateTime::from_timestamp_opt(1289339999, 0).unwrap(), DateTime::from_timestamp(1370651522, 0).unwrap(),
Utc,
),
DateTime::from_naive_utc_and_offset(
NaiveDateTime::from_timestamp_opt(1370651522, 0).unwrap(),
Utc,
),
) )
} }