Add launcher analytics (#661)

* Add more analytics

* finish hydra move

* Finish websocket flow

* add minecraft account flow

* Finish playtime vals + payout automation
This commit is contained in:
Geometrically
2023-08-02 14:43:04 -07:00
committed by GitHub
parent 4bb47d7e01
commit 039d26feeb
49 changed files with 2636 additions and 743 deletions

View File

@@ -10,12 +10,14 @@ use crate::models::pats::Scopes;
use crate::models::users::{Badges, Role};
use crate::parse_strings_from_var;
use crate::queue::session::AuthQueue;
use crate::queue::socket::ActiveSockets;
use crate::routes::ApiError;
use crate::util::captcha::check_turnstile_captcha;
use crate::util::ext::{get_image_content_type, get_image_ext};
use crate::util::validate::{validation_errors_to_string, RE_URL_SAFE};
use actix_web::web::{scope, Data, Query, ServiceConfig};
use actix_web::web::{scope, Data, Payload, Query, ServiceConfig};
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use actix_ws::Closed;
use argon2::password_hash::SaltString;
use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier};
use chrono::{Duration, Utc};
@@ -27,11 +29,13 @@ use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use validator::Validate;
pub fn config(cfg: &mut ServiceConfig) {
cfg.service(
scope("auth")
.service(ws_init)
.service(init)
.service(auth_callback)
.service(delete_auth_provider)
@@ -46,7 +50,9 @@ pub fn config(cfg: &mut ServiceConfig) {
.service(resend_verify_email)
.service(set_email)
.service(verify_email)
.service(subscribe_newsletter),
.service(subscribe_newsletter)
.service(login_from_minecraft)
.configure(super::minecraft::config),
);
}
@@ -73,6 +79,167 @@ pub struct TempUser {
pub name: Option<String>,
}
impl TempUser {
async fn create_account(
self,
provider: AuthProvider,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
client: &PgPool,
file_host: &Arc<dyn FileHost + Send + Sync>,
redis: &deadpool_redis::Pool,
) -> Result<crate::database::models::UserId, AuthenticationError> {
if let Some(email) = &self.email {
if crate::database::models::User::get_email(email, client)
.await?
.is_some()
{
return Err(AuthenticationError::DuplicateUser);
}
}
let user_id = crate::database::models::generate_user_id(transaction).await?;
let mut username_increment: i32 = 0;
let mut username = None;
while username.is_none() {
let test_username = format!(
"{}{}",
self.username,
if username_increment > 0 {
username_increment.to_string()
} else {
"".to_string()
}
);
let new_id = crate::database::models::User::get(&test_username, client, redis).await?;
if new_id.is_none() {
username = Some(test_username);
} else {
username_increment += 1;
}
}
let avatar_url = if let Some(avatar_url) = self.avatar_url {
let cdn_url = dotenvy::var("CDN_URL")?;
let res = reqwest::get(&avatar_url).await?;
let headers = res.headers().clone();
let img_data = if let Some(content_type) = headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
{
get_image_ext(content_type).map(|ext| (ext, content_type))
} else if let Some(ext) = avatar_url.rsplit('.').next() {
get_image_content_type(ext).map(|content_type| (ext, content_type))
} else {
None
};
if let Some((ext, content_type)) = img_data {
let bytes = res.bytes().await?;
let hash = sha1::Sha1::from(&bytes).hexdigest();
let upload_data = file_host
.upload_file(
content_type,
&format!(
"user/{}/{}.{}",
crate::models::users::UserId::from(user_id),
hash,
ext
),
bytes,
)
.await?;
Some(format!("{}/{}", cdn_url, upload_data.file_name))
} else {
None
}
} else {
None
};
if let Some(username) = username {
crate::database::models::User {
id: user_id,
github_id: if provider == AuthProvider::GitHub {
Some(
self.id
.clone()
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
discord_id: if provider == AuthProvider::Discord {
Some(
self.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
gitlab_id: if provider == AuthProvider::GitLab {
Some(
self.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
google_id: if provider == AuthProvider::Google {
Some(self.id.clone())
} else {
None
},
steam_id: if provider == AuthProvider::Steam {
Some(
self.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
microsoft_id: if provider == AuthProvider::Microsoft {
Some(self.id)
} else {
None
},
password: None,
totp_secret: None,
username,
name: self.name,
email: self.email,
email_verified: true,
avatar_url,
bio: self.bio,
created: Utc::now(),
role: Role::Developer.to_string(),
badges: Badges::default(),
balance: Decimal::ZERO,
payout_wallet: None,
payout_wallet_type: None,
payout_address: None,
}
.insert(transaction)
.await?;
Ok(user_id)
} else {
Err(AuthenticationError::InvalidCredentials)
}
}
}
impl AuthProvider {
pub fn get_redirect_url(&self, state: String) -> Result<String, AuthenticationError> {
let self_addr = dotenvy::var("SELF_ADDR")?;
@@ -771,7 +938,7 @@ pub async fn init(
let state = Flow::OAuth {
user_id,
url: info.url,
url: Some(info.url),
provider: info.provider,
}
.insert(Duration::minutes(30), &redis)
@@ -783,262 +950,286 @@ pub async fn init(
.json(serde_json::json!({ "url": url })))
}
#[derive(Serialize, Deserialize)]
pub struct WsInit {
pub provider: AuthProvider,
}
#[get("ws")]
pub async fn ws_init(
req: HttpRequest,
Query(info): Query<WsInit>,
body: Payload,
db: Data<RwLock<ActiveSockets>>,
redis: Data<deadpool_redis::Pool>,
) -> Result<HttpResponse, actix_web::Error> {
let (res, session, _msg_stream) = actix_ws::handle(&req, body)?;
async fn sock(
mut ws_stream: actix_ws::Session,
info: WsInit,
db: Data<RwLock<ActiveSockets>>,
redis: Data<deadpool_redis::Pool>,
) -> Result<(), Closed> {
let flow = Flow::OAuth {
user_id: None,
url: None,
provider: info.provider,
}
.insert(Duration::minutes(30), &redis)
.await;
if let Ok(state) = flow {
if let Ok(url) = info.provider.get_redirect_url(state.clone()) {
ws_stream
.text(serde_json::json!({ "url": url }).to_string())
.await?;
let db = db.write().await;
db.auth_sockets.insert(state, ws_stream);
}
}
Ok(())
}
let _ = sock(session, info, db, redis).await;
Ok(res)
}
#[get("callback")]
pub async fn auth_callback(
req: HttpRequest,
Query(query): Query<HashMap<String, String>>,
sockets: Data<RwLock<ActiveSockets>>,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
redis: Data<deadpool_redis::Pool>,
) -> Result<HttpResponse, AuthenticationError> {
let state = query
.get("state")
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
) -> Result<HttpResponse, super::templates::ErrorPage> {
let res = async move {
let state = query
.get("state")
.ok_or_else(|| AuthenticationError::InvalidCredentials)?.clone();
let flow = Flow::get(state, &redis).await?;
let flow = Flow::get(&state, &redis).await?;
// Extract cookie header from request
if let Some(Flow::OAuth {
user_id,
provider,
url,
}) = flow
{
Flow::remove(state, &redis).await?;
let token = provider.get_token(query).await?;
let oauth_user = provider.get_user(&token).await?;
let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?;
let mut transaction = client.begin().await?;
if let Some(id) = user_id {
if user_id_opt.is_some() {
return Err(AuthenticationError::DuplicateUser);
}
provider
.update_user_id(id, Some(&oauth_user.id), &mut transaction)
.await?;
let user = crate::database::models::User::get_id(id, &**client, &redis).await?;
if let Some(email) = user.and_then(|x| x.email) {
send_email(
email,
"Authentication method added",
&format!("When logging into Modrinth, you can now log in using the {} authentication provider.", provider.as_str()),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
crate::database::models::User::clear_caches(&[(id, None)], &redis).await?;
transaction.commit().await?;
Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*url))
.json(serde_json::json!({ "url": url })))
} else {
let user_id = if let Some(user_id) = user_id_opt {
let user = crate::database::models::User::get_id(user_id, &**client, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if user.totp_secret.is_some() {
let flow = Flow::Login2FA { user_id: user.id }
.insert(Duration::minutes(30), &redis)
.await?;
let redirect_url = format!(
"{}{}error=2fa_required&flow={}",
// Extract cookie header from request
if let Some(Flow::OAuth {
user_id,
provider,
url,
if url.contains('?') { "&" } else { "?" },
flow
);
}) = flow
{
Flow::remove(&state, &redis).await?;
return Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*redirect_url))
.json(serde_json::json!({ "url": redirect_url })));
let token = provider.get_token(query).await?;
let oauth_user = provider.get_user(&token).await?;
let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?;
let mut transaction = client.begin().await?;
if let Some(id) = user_id {
if user_id_opt.is_some() {
return Err(AuthenticationError::DuplicateUser);
}
user_id
} else {
if let Some(email) = &oauth_user.email {
if crate::database::models::User::get_email(email, &**client)
.await?
.is_some()
{
return Err(AuthenticationError::DuplicateUser);
}
}
let user_id = crate::database::models::generate_user_id(&mut transaction).await?;
let mut username_increment: i32 = 0;
let mut username = None;
while username.is_none() {
let test_username = format!(
"{}{}",
oauth_user.username,
if username_increment > 0 {
username_increment.to_string()
} else {
"".to_string()
}
);
let new_id =
crate::database::models::User::get(&test_username, &**client, &redis)
.await?;
if new_id.is_none() {
username = Some(test_username);
} else {
username_increment += 1;
}
}
let avatar_url = if let Some(avatar_url) = oauth_user.avatar_url {
let cdn_url = dotenvy::var("CDN_URL")?;
let res = reqwest::get(&avatar_url).await?;
let headers = res.headers().clone();
let img_data = if let Some(content_type) = headers
.get(reqwest::header::CONTENT_TYPE)
.and_then(|ct| ct.to_str().ok())
{
get_image_ext(content_type).map(|ext| (ext, content_type))
} else if let Some(ext) = avatar_url.rsplit('.').next() {
get_image_content_type(ext).map(|content_type| (ext, content_type))
} else {
None
};
if let Some((ext, content_type)) = img_data {
let bytes = res.bytes().await?;
let hash = sha1::Sha1::from(&bytes).hexdigest();
let upload_data = file_host
.upload_file(
content_type,
&format!(
"user/{}/{}.{}",
crate::models::users::UserId::from(user_id),
hash,
ext
),
bytes,
)
.await?;
Some(format!("{}/{}", cdn_url, upload_data.file_name))
} else {
None
}
} else {
None
};
if let Some(username) = username {
crate::database::models::User {
id: user_id,
github_id: if provider == AuthProvider::GitHub {
Some(
oauth_user
.id
.clone()
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
discord_id: if provider == AuthProvider::Discord {
Some(
oauth_user
.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
gitlab_id: if provider == AuthProvider::GitLab {
Some(
oauth_user
.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
google_id: if provider == AuthProvider::Google {
Some(oauth_user.id.clone())
} else {
None
},
steam_id: if provider == AuthProvider::Steam {
Some(
oauth_user
.id
.parse()
.map_err(|_| AuthenticationError::InvalidCredentials)?,
)
} else {
None
},
microsoft_id: if provider == AuthProvider::Microsoft {
Some(oauth_user.id)
} else {
None
},
password: None,
totp_secret: None,
username,
name: oauth_user.name,
email: oauth_user.email,
email_verified: true,
avatar_url,
bio: oauth_user.bio,
created: Utc::now(),
role: Role::Developer.to_string(),
badges: Badges::default(),
balance: Decimal::ZERO,
payout_wallet: None,
payout_wallet_type: None,
payout_address: None,
}
.insert(&mut transaction)
provider
.update_user_id(id, Some(&oauth_user.id), &mut transaction)
.await?;
let user = crate::database::models::User::get_id(id, &**client, &redis).await?;
if let Some(email) = user.and_then(|x| x.email) {
send_email(
email,
"Authentication method added",
&format!("When logging into Modrinth, you can now log in using the {} authentication provider.", provider.as_str()),
"If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).",
None,
)?;
}
crate::database::models::User::clear_caches(&[(id, None)], &redis).await?;
transaction.commit().await?;
if let Some(url) = url {
Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*url))
.json(serde_json::json!({ "url": url })))
} else {
Err(AuthenticationError::InvalidCredentials)
}
} else {
let user_id = if let Some(user_id) = user_id_opt {
let user = crate::database::models::User::get_id(user_id, &**client, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if user.totp_secret.is_some() {
let flow = Flow::Login2FA { user_id: user.id }
.insert(Duration::minutes(30), &redis)
.await?;
if let Some(url) = url {
let redirect_url = format!(
"{}{}error=2fa_required&flow={}",
url,
if url.contains('?') { "&" } else { "?" },
flow
);
return Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*redirect_url))
.json(serde_json::json!({ "url": redirect_url })));
} else {
let mut ws_conn = {
let db = sockets.read().await;
let mut x = db
.auth_sockets
.get_mut(&state)
.ok_or_else(|| AuthenticationError::SocketError)?;
x.value_mut().clone()
};
ws_conn
.text(
serde_json::json!({
"error": "2fa_required",
"flow": flow,
}).to_string()
)
.await.map_err(|_| AuthenticationError::SocketError)?;
let _ = ws_conn.close(None).await;
return Ok(super::templates::Success {
icon: user.avatar_url.as_deref().unwrap_or("https://cdn-raw.modrinth.com/placeholder.svg"),
name: &user.username,
}.render());
}
}
user_id
} else {
return Err(AuthenticationError::InvalidCredentials);
}
};
oauth_user.create_account(provider, &mut transaction, &client, &file_host, &redis).await?
};
let session = issue_session(req, user_id, &mut transaction, &redis).await?;
transaction.commit().await?;
let session = issue_session(req, user_id, &mut transaction, &redis).await?;
transaction.commit().await?;
let redirect_url = format!(
"{}{}code={}{}",
url,
if url.contains('?') { '&' } else { '?' },
session.session,
if user_id_opt.is_none() {
"&new_account=true"
if let Some(url) = url {
let redirect_url = format!(
"{}{}code={}{}",
url,
if url.contains('?') { '&' } else { '?' },
session.session,
if user_id_opt.is_none() {
"&new_account=true"
} else {
""
}
);
Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*redirect_url))
.json(serde_json::json!({ "url": redirect_url })))
} else {
""
}
);
let user = crate::database::models::user_item::User::get_id(
user_id,
&**client,
&redis,
)
.await?.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*redirect_url))
.json(serde_json::json!({ "url": redirect_url })))
let mut ws_conn = {
let db = sockets.read().await;
let mut x = db
.auth_sockets
.get_mut(&state)
.ok_or_else(|| AuthenticationError::SocketError)?;
x.value_mut().clone()
};
ws_conn
.text(
serde_json::json!({
"code": session.session,
}).to_string()
)
.await.map_err(|_| AuthenticationError::SocketError)?;
let _ = ws_conn.close(None).await;
return Ok(super::templates::Success {
icon: user.avatar_url.as_deref().unwrap_or("https://cdn-raw.modrinth.com/placeholder.svg"),
name: &user.username,
}.render());
}
}
} else {
Err::<HttpResponse, AuthenticationError>(AuthenticationError::InvalidCredentials)
}
}.await;
Ok(res?)
}
#[derive(Deserialize)]
pub struct MinecraftLogin {
pub flow: String,
}
#[post("login/minecraft")]
pub async fn login_from_minecraft(
req: HttpRequest,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
redis: Data<deadpool_redis::Pool>,
login: web::Json<MinecraftLogin>,
) -> Result<HttpResponse, AuthenticationError> {
let flow = Flow::get(&login.flow, &redis).await?;
// Extract cookie header from request
if let Some(Flow::MicrosoftLogin {
access_token: token,
}) = flow
{
let provider = AuthProvider::Microsoft;
let oauth_user = provider.get_user(&token).await?;
let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?;
let mut transaction = client.begin().await?;
let user_id = if let Some(user_id) = user_id_opt {
let user = crate::database::models::User::get_id(user_id, &**client, &redis)
.await?
.ok_or_else(|| AuthenticationError::InvalidCredentials)?;
if user.totp_secret.is_some() {
let flow = Flow::Login2FA { user_id: user.id }
.insert(Duration::minutes(30), &redis)
.await?;
return Ok(HttpResponse::Ok().json(serde_json::json!({
"error": "2fa_required",
"flow": flow
})));
}
user_id
} else {
oauth_user
.create_account(provider, &mut transaction, &client, &file_host, &redis)
.await?
};
let session = issue_session(req, user_id, &mut transaction, &redis).await?;
Ok(HttpResponse::Ok().json(serde_json::json!({
"code": session.session
})))
} else {
Err(AuthenticationError::InvalidCredentials)
}

154
src/auth/minecraft/auth.rs Normal file
View File

@@ -0,0 +1,154 @@
//! Main authentication flow for Hydra
use crate::{auth::minecraft::stages, auth::templates, parse_var};
// use crate::db::RuntimeState;
use crate::database::models::flow_item::Flow;
use crate::queue::socket::ActiveSockets;
use actix_web::http::StatusCode;
use actix_web::{get, web, HttpResponse};
use chrono::Duration;
use serde::Deserialize;
use serde_json::json;
use tokio::sync::RwLock;
macro_rules! ws_conn_try {
($ctx:literal $status:path, $res:expr => $ws_conn:expr) => {
match $res {
Ok(res) => res,
Err(err) => {
let error = format!("In {}: {err}", $ctx);
let render = super::Error::render_string(&error);
let _ = $ws_conn.text(render.clone()).await;
let _ = $ws_conn.close(None).await;
return Err(templates::ErrorPage {
code: $status,
message: render,
});
}
}
};
}
#[derive(Deserialize)]
pub struct Query {
pub code: String,
pub state: String,
}
#[get("callback")]
pub async fn route(
db: web::Data<RwLock<ActiveSockets>>,
info: web::Query<Query>,
redis: web::Data<deadpool_redis::Pool>,
) -> Result<HttpResponse, templates::ErrorPage> {
let public_url = parse_var::<String>("SELF_ADDR").unwrap_or(format!(
"http://{}",
parse_var::<String>("BIND_ADDR").unwrap()
));
let client_id = parse_var::<String>("MICROSOFT_CLIENT_ID").unwrap();
let client_secret = parse_var::<String>("MICROSOFT_CLIENT_SECRET").unwrap();
let code = &info.code;
let mut ws_conn = {
let db = db.read().await;
let mut x = db
.auth_sockets
.get_mut(&info.state)
.ok_or_else(|| templates::ErrorPage {
code: StatusCode::BAD_REQUEST,
message: "Invalid state sent, you probably need to get a new websocket".to_string(),
})?;
x.value_mut().clone()
};
let access_token = ws_conn_try!(
"OAuth token exchange" StatusCode::INTERNAL_SERVER_ERROR,
stages::access_token::fetch_token(
public_url,
code,
&client_id,
&client_secret,
).await
=> ws_conn
);
let stages::xbl_signin::XBLLogin {
token: xbl_token,
uhs,
} = ws_conn_try!(
"XBox Live token exchange" StatusCode::INTERNAL_SERVER_ERROR,
stages::xbl_signin::login_xbl(&access_token.access_token).await
=> ws_conn
);
let xsts_response = ws_conn_try!(
"XSTS token exchange" StatusCode::INTERNAL_SERVER_ERROR,
stages::xsts_token::fetch_token(&xbl_token).await
=> ws_conn
);
match xsts_response {
stages::xsts_token::XSTSResponse::Unauthorized(err) => {
let _ = ws_conn
.text(super::Error::render_string(&format!(
"Error getting XBox Live token: {err}"
)))
.await;
let _ = ws_conn.close(None).await;
Err(templates::ErrorPage {
code: StatusCode::FORBIDDEN,
message: err,
})
}
stages::xsts_token::XSTSResponse::Success { token: xsts_token } => {
let bearer_token = &ws_conn_try!(
"Bearer token flow" StatusCode::INTERNAL_SERVER_ERROR,
stages::bearer_token::fetch_bearer(&xsts_token, &uhs)
.await
=> ws_conn
);
let player_info = &ws_conn_try!(
"No Minecraft account for profile. Make sure you own the game and have set a username through the official Minecraft launcher." StatusCode::BAD_REQUEST,
stages::player_info::fetch_info(bearer_token)
.await
=> ws_conn
);
let flow = &ws_conn_try!(
"Error creating microsoft login request flow." StatusCode::INTERNAL_SERVER_ERROR,
Flow::MicrosoftLogin {
access_token: bearer_token.clone(),
}
.insert(Duration::hours(1), &redis)
.await
=> ws_conn
);
ws_conn
.text(
json!({
"token": bearer_token,
"refresh_token": &access_token.refresh_token,
"expires_after": 86400,
"flow": flow,
}).to_string()
)
.await.map_err(|_| templates::ErrorPage {
code: StatusCode::BAD_REQUEST,
message: "Failed to send login details to launcher. Try restarting the login process!".to_string(),
})?;
let _ = ws_conn.close(None).await;
Ok(templates::Success {
name: &player_info.name,
icon: &format!("https://mc-heads.net/avatar/{}/128", &player_info.id),
}
.render())
}
}
}

View File

@@ -0,0 +1,35 @@
//! Login route for Hydra, redirects to the Microsoft login page before going to the redirect route
use crate::{auth::minecraft::stages::login_redirect, auth::templates, parse_var};
use actix_web::http::StatusCode;
use actix_web::{get, web, HttpResponse};
use serde::{Deserialize, Serialize};
#[derive(Deserialize)]
pub struct Query {
pub id: Option<String>,
}
#[derive(Serialize)]
pub struct AuthorizationInit {
pub url: String,
}
#[get("init")]
pub async fn route(info: web::Query<Query>) -> Result<HttpResponse, templates::ErrorPage> {
let conn_id = info.0.id.ok_or_else(|| templates::ErrorPage {
code: StatusCode::BAD_REQUEST,
message: "No socket ID provided (open a web socket at the / route for one)".to_string(),
})?;
let public_url = parse_var::<String>("SELF_ADDR").unwrap_or(format!(
"http://{}",
parse_var::<String>("BIND_ADDR").unwrap()
));
let client_id = parse_var::<String>("MICROSOFT_CLIENT_ID").unwrap();
let url = login_redirect::get_url(&public_url, &conn_id, &client_id);
Ok(HttpResponse::TemporaryRedirect()
.append_header(("Location", &*url))
.json(AuthorizationInit { url }))
}

60
src/auth/minecraft/mod.rs Normal file
View File

@@ -0,0 +1,60 @@
mod auth;
mod login;
mod refresh;
mod socket;
mod stages;
use actix_web::http::StatusCode;
use actix_web::web::{scope, ServiceConfig};
use actix_web::HttpResponse;
use serde_json::json;
use std::fmt::{Display, Formatter};
/// Error message
#[derive(Debug)]
pub struct Error {
pub code: StatusCode,
pub reason: String,
}
impl Error {
pub fn render_string(reason: &str) -> String {
json!({ "error": reason }).to_string()
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
json!({
"error": self.reason
})
)?;
Ok(())
}
}
impl actix_web::ResponseError for Error {
fn status_code(&self) -> StatusCode {
self.code
}
fn error_response(&self) -> HttpResponse {
HttpResponse::build(self.code).json(json!({
"error": self.reason
}))
}
}
pub fn config(cfg: &mut ServiceConfig) {
cfg.service(
scope("minecraft")
.service(auth::route)
.service(login::route)
.service(refresh::route)
.service(socket::route),
);
}

View File

@@ -0,0 +1,72 @@
//! Refresh token route
use super::stages;
use crate::parse_var;
use actix_web::http::StatusCode;
use actix_web::{post, web, HttpResponse};
use serde::Deserialize;
use serde_json::json;
#[derive(Deserialize)]
pub struct Body {
refresh_token: String,
}
#[post("refresh")]
pub async fn route(body: web::Json<Body>) -> Result<HttpResponse, super::Error> {
let public_url = parse_var::<String>("SELF_ADDR").unwrap_or(format!(
"http://{}",
parse_var::<String>("BIND_ADDR").unwrap()
));
let client_id = parse_var::<String>("MICROSOFT_CLIENT_ID").unwrap();
let client_secret = parse_var::<String>("MICROSOFT_CLIENT_SECRET").unwrap();
let access_token = stages::access_token::refresh_token(
&public_url,
&body.refresh_token,
&client_id,
&client_secret,
)
.await
.map_err(|_| super::Error {
code: StatusCode::INTERNAL_SERVER_ERROR,
reason: "Error with OAuth token exchange".to_string(),
})?;
let stages::xbl_signin::XBLLogin {
token: xbl_token,
uhs,
} = stages::xbl_signin::login_xbl(&access_token.access_token)
.await
.map_err(|_| super::Error {
code: StatusCode::INTERNAL_SERVER_ERROR,
reason: "Error with XBox Live token exchange".to_string(),
})?;
let xsts_response = stages::xsts_token::fetch_token(&xbl_token)
.await
.map_err(|_| super::Error {
code: StatusCode::INTERNAL_SERVER_ERROR,
reason: "Error with XSTS token exchange".to_string(),
})?;
match xsts_response {
stages::xsts_token::XSTSResponse::Unauthorized(err) => Err(super::Error {
code: StatusCode::UNAUTHORIZED,
reason: format!("Error getting XBox Live token: {err}"),
}),
stages::xsts_token::XSTSResponse::Success { token: xsts_token } => {
let bearer_token = stages::bearer_token::fetch_bearer(&xsts_token, &uhs)
.await
.map_err(|_| super::Error {
code: StatusCode::INTERNAL_SERVER_ERROR,
reason: "Error with Bearer token flow".to_string(),
})?;
Ok(HttpResponse::Ok().json(&json!({
"token": bearer_token,
"refresh_token": &access_token.refresh_token,
"expires_after": 86400
})))
}
}
}

View File

@@ -0,0 +1,40 @@
use crate::database::models::flow_item::Flow;
use crate::queue::socket::ActiveSockets;
use actix_web::web::Payload;
use actix_web::{get, web, HttpRequest, HttpResponse};
use actix_ws::{Closed, Session};
use chrono::Duration;
use tokio::sync::RwLock;
#[get("ws")]
pub async fn route(
req: HttpRequest,
body: Payload,
db: web::Data<RwLock<ActiveSockets>>,
redis: web::Data<deadpool_redis::Pool>,
) -> Result<HttpResponse, actix_web::Error> {
let (res, session, _msg_stream) = actix_ws::handle(&req, body)?;
let _ = sock(session, db, redis).await;
Ok(res)
}
async fn sock(
mut ws_stream: Session,
db: web::Data<RwLock<ActiveSockets>>,
redis: web::Data<deadpool_redis::Pool>,
) -> Result<(), Closed> {
if let Ok(state) = Flow::MinecraftAuth
.insert(Duration::minutes(30), &redis)
.await
{
ws_stream
.text(serde_json::json!({ "login_code": state }).to_string())
.await?;
let db = db.write().await;
db.auth_sockets.insert(state, ws_stream);
}
Ok(())
}

View File

@@ -0,0 +1,65 @@
//! Get access token from code
use serde::Deserialize;
use std::collections::HashMap;
const OAUTH_TOKEN_URL: &str = "https://login.live.com/oauth20_token.srf";
#[derive(Deserialize)]
pub struct Tokens {
pub access_token: String,
pub refresh_token: String,
}
pub async fn fetch_token(
public_uri: String,
code: &str,
client_id: &str,
client_secret: &str,
) -> Result<Tokens, reqwest::Error> {
let redirect_uri = format!("{}/v2/auth/minecraft/callback", public_uri);
let mut params = HashMap::new();
params.insert("client_id", client_id);
params.insert("client_secret", client_secret);
params.insert("code", code);
params.insert("grant_type", "authorization_code");
params.insert("redirect_uri", redirect_uri.as_str());
let client = reqwest::Client::new();
let result = client
.post(OAUTH_TOKEN_URL)
.form(&params)
.send()
.await?
.json::<Tokens>()
.await?;
Ok(result)
}
pub async fn refresh_token(
public_uri: &str,
refresh_token: &str,
client_id: &str,
client_secret: &str,
) -> Result<Tokens, reqwest::Error> {
let redirect_uri = format!("{}/v2/auth/minecraft/callback", public_uri);
let mut params = HashMap::new();
params.insert("client_id", client_id);
params.insert("client_secret", client_secret);
params.insert("refresh_token", refresh_token);
params.insert("grant_type", "refresh_token");
params.insert("redirect_uri", &redirect_uri);
let client = reqwest::Client::new();
let result = client
.post(OAUTH_TOKEN_URL)
.form(&params)
.send()
.await?
.json::<Tokens>()
.await?;
Ok(result)
}

View File

@@ -0,0 +1,27 @@
//! Minecraft bearer token
use crate::auth::AuthenticationError;
use serde_json::json;
const MCSERVICES_AUTH_URL: &str = "https://api.minecraftservices.com/launcher/login";
pub async fn fetch_bearer(token: &str, uhs: &str) -> Result<String, AuthenticationError> {
let client = reqwest::Client::new();
let body = client
.post(MCSERVICES_AUTH_URL)
.json(&json!({
"xtoken": format!("XBL3.0 x={};{}", uhs, token),
"platform": "PC_LAUNCHER"
}))
.send()
.await?
.text()
.await?;
serde_json::from_str::<serde_json::Value>(&body)?
.get("access_token")
.and_then(serde_json::Value::as_str)
.map(String::from)
.ok_or(AuthenticationError::Custom(
"Response didn't contain valid bearer token".to_string(),
))
}

View File

@@ -0,0 +1,8 @@
//! Login redirect step
pub fn get_url(public_uri: &str, conn_id: &str, client_id: &str) -> String {
format!(
"https://login.live.com/oauth20_authorize.srf?client_id={client_id}&response_type=code&redirect_uri={}&scope={}&state={conn_id}&prompt=select_account&cobrandid=8058f65d-ce06-4c30-9559-473c9275a65d",
urlencoding::encode(&format!("{}/v2/auth/minecraft/callback", public_uri)),
urlencoding::encode("XboxLive.signin offline_access")
)
}

View File

@@ -0,0 +1,27 @@
//! MSA authentication stages
use lazy_static::lazy_static;
pub mod access_token;
pub mod bearer_token;
pub mod login_redirect;
pub mod player_info;
pub mod xbl_signin;
pub mod xsts_token;
lazy_static! {
static ref REQWEST_CLIENT: reqwest::Client = {
let mut headers = reqwest::header::HeaderMap::new();
let header = reqwest::header::HeaderValue::from_str(&format!(
"modrinth/labrinth/{} (support@modrinth.com)",
env!("CARGO_PKG_VERSION")
))
.unwrap();
headers.insert(reqwest::header::USER_AGENT, header);
reqwest::Client::builder()
.tcp_keepalive(Some(std::time::Duration::from_secs(10)))
.default_headers(headers)
.build()
.expect("Reqwest Client Building Failed")
};
}

View File

@@ -0,0 +1,33 @@
//! Fetch player info for display
use serde::Deserialize;
const PROFILE_URL: &str = "https://api.minecraftservices.com/minecraft/profile";
#[derive(Deserialize)]
pub struct PlayerInfo {
pub id: String,
pub name: String,
}
impl Default for PlayerInfo {
fn default() -> Self {
Self {
id: "606e2ff0ed7748429d6ce1d3321c7838".to_string(),
name: String::from("???"),
}
}
}
pub async fn fetch_info(token: &str) -> Result<PlayerInfo, reqwest::Error> {
let client = reqwest::Client::new();
let resp = client
.get(PROFILE_URL)
.header(reqwest::header::AUTHORIZATION, format!("Bearer {token}"))
.send()
.await?
.error_for_status()?
.json()
.await?;
Ok(resp)
}

View File

@@ -0,0 +1,55 @@
//! Signin for XBox Live
use crate::auth::AuthenticationError;
use serde_json::json;
const XBL_AUTH_URL: &str = "https://user.auth.xboxlive.com/user/authenticate";
// Deserialization
pub struct XBLLogin {
pub token: String,
pub uhs: String,
}
// Impl
pub async fn login_xbl(token: &str) -> Result<XBLLogin, AuthenticationError> {
let client = reqwest::Client::new();
let body = client
.post(XBL_AUTH_URL)
.header(reqwest::header::ACCEPT, "application/json")
.header("x-xbl-contract-version", "1")
.json(&json!({
"Properties": {
"AuthMethod": "RPS",
"SiteName": "user.auth.xboxlive.com",
"RpsTicket": format!("d={token}")
},
"RelyingParty": "http://auth.xboxlive.com",
"TokenType": "JWT"
}))
.send()
.await?
.text()
.await?;
let json = serde_json::from_str::<serde_json::Value>(&body)?;
let token = Some(&json)
.and_then(|it| it.get("Token")?.as_str().map(String::from))
.ok_or(AuthenticationError::Custom(
"XBL response didn't contain valid token".to_string(),
))?;
let uhs = Some(&json)
.and_then(|it| {
it.get("DisplayClaims")?
.get("xui")?
.get(0)?
.get("uhs")?
.as_str()
.map(String::from)
})
.ok_or(AuthenticationError::Custom(
"XBL response didn't contain valid user hash".to_string(),
))?;
Ok(XBLLogin { token, uhs })
}

View File

@@ -0,0 +1,57 @@
use crate::auth::AuthenticationError;
use serde_json::json;
const XSTS_AUTH_URL: &str = "https://xsts.auth.xboxlive.com/xsts/authorize";
pub enum XSTSResponse {
Unauthorized(String),
Success { token: String },
}
pub async fn fetch_token(token: &str) -> Result<XSTSResponse, AuthenticationError> {
let client = reqwest::Client::new();
let resp = client
.post(XSTS_AUTH_URL)
.header(reqwest::header::ACCEPT, "application/json")
.json(&json!({
"Properties": {
"SandboxId": "RETAIL",
"UserTokens": [
token
]
},
"RelyingParty": "rp://api.minecraftservices.com/",
"TokenType": "JWT"
}))
.send()
.await?;
let status = resp.status();
let body = resp.text().await?;
let json = serde_json::from_str::<serde_json::Value>(&body)?;
if status.is_success() {
Ok(json
.get("Token")
.and_then(|x| x.as_str().map(String::from))
.map(|it| XSTSResponse::Success { token: it })
.unwrap_or(XSTSResponse::Unauthorized(
"XSTS response didn't contain valid token!".to_string(),
)))
} else {
Ok(XSTSResponse::Unauthorized(
#[allow(clippy::unreadable_literal)]
match json.get("XErr").and_then(|x| x.as_i64()) {
Some(2148916238) => {
String::from("This Microsoft account is underage and is not linked to a family.")
},
Some(2148916235) => {
String::from("XBOX Live/Minecraft is not available in your country.")
},
Some(2148916233) => String::from("This account does not have a valid XBOX Live profile. Please buy Minecraft and try again!"),
Some(2148916236) | Some(2148916237) => String::from("This account needs adult verification on Xbox page."),
_ => String::from("Unknown error code"),
},
))
}
}

View File

@@ -1,8 +1,10 @@
pub mod checks;
pub mod email;
pub mod flows;
pub mod minecraft;
pub mod pats;
pub mod session;
mod templates;
pub mod validate;
pub use checks::{
@@ -43,8 +45,12 @@ pub enum AuthenticationError {
InvalidClientId,
#[error("User email/account is already registered on Modrinth")]
DuplicateUser,
#[error("Invalid state sent, you probably need to get a new websocket")]
SocketError,
#[error("Invalid callback URL specified")]
Url,
#[error("{0}")]
Custom(String),
}
impl actix_web::ResponseError for AuthenticationError {
@@ -63,6 +69,8 @@ impl actix_web::ResponseError for AuthenticationError {
AuthenticationError::Url => StatusCode::BAD_REQUEST,
AuthenticationError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::DuplicateUser => StatusCode::BAD_REQUEST,
AuthenticationError::Custom(..) => StatusCode::BAD_REQUEST,
AuthenticationError::SocketError => StatusCode::BAD_REQUEST,
}
}
@@ -82,6 +90,8 @@ impl actix_web::ResponseError for AuthenticationError {
AuthenticationError::Url => "url_error",
AuthenticationError::FileHosting(..) => "file_hosting",
AuthenticationError::DuplicateUser => "duplicate_user",
AuthenticationError::Custom(..) => "custom",
AuthenticationError::SocketError => "socket",
},
description: &self.to_string(),
})

View File

@@ -180,6 +180,12 @@ pub async fn edit_pat(
let mut transaction = pool.begin().await?;
if let Some(scopes) = &info.scopes {
if scopes.restricted() {
return Err(ApiError::InvalidInput(
"Invalid scopes requested!".to_string(),
));
}
sqlx::query!(
"
UPDATE pats
@@ -206,6 +212,12 @@ pub async fn edit_pat(
.await?;
}
if let Some(expires) = &info.expires {
if expires < &Utc::now() {
return Err(ApiError::InvalidInput(
"Expire date must be in the future!".to_string(),
));
}
sqlx::query!(
"
UPDATE pats

View File

@@ -0,0 +1,24 @@
<!DOCTYPE html>
<html lang="en">
<head>
<link rel="stylesheet" href="/auth/style.css"/>
<link rel="icon" type="image/png" href="/favicon.ico"/>
<title>Error - Modrinth</title>
</head>
<body>
<div class="content">
<img src="/logo.svg" alt="Modrinth Logo" class="logo"/>
<h2>{{ code }}</h2>
<p>An error has occurred during the authentication process.</p>
<p>
Try restarting the authentication flow within the launcher. If you are still facing issues,
join our <a href="https://discord.gg/EUHuJHt">Discord</a> for support!
</p>
<details>
<summary>Debug info</summary>
<p>{{ message }}</p>
</details>
</div>
</body>
</html>

66
src/auth/templates/mod.rs Normal file
View File

@@ -0,0 +1,66 @@
use crate::auth::AuthenticationError;
use actix_web::http::StatusCode;
use actix_web::{HttpResponse, ResponseError};
use std::fmt::{Debug, Display, Formatter};
pub struct Success<'a> {
pub icon: &'a str,
pub name: &'a str,
}
impl<'a> Success<'a> {
pub fn render(self) -> HttpResponse {
let html = include_str!("success.html");
HttpResponse::Ok()
.append_header(("Content-Type", "text/html; charset=utf-8"))
.body(
html.replace("{{ icon }}", self.icon)
.replace("{{ name }}", self.name),
)
}
}
#[derive(Debug)]
pub struct ErrorPage {
pub code: StatusCode,
pub message: String,
}
impl Display for ErrorPage {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let html = include_str!("error.html")
.replace("{{ code }}", &self.code.to_string())
.replace("{{ message }}", &self.message);
write!(f, "{}", html)?;
Ok(())
}
}
impl ErrorPage {
pub fn render(&self) -> HttpResponse {
HttpResponse::Ok()
.append_header(("Content-Type", "text/html; charset=utf-8"))
.body(self.to_string())
}
}
impl actix_web::ResponseError for ErrorPage {
fn status_code(&self) -> StatusCode {
self.code
}
fn error_response(&self) -> HttpResponse {
self.render()
}
}
impl From<AuthenticationError> for ErrorPage {
fn from(item: AuthenticationError) -> Self {
ErrorPage {
code: item.status_code(),
message: item.to_string(),
}
}
}

View File

@@ -0,0 +1,16 @@
<!DOCTYPE html>
<html lang="en">
<head>
<link rel="stylesheet" href="/auth/style.css"/>
<link rel="icon" type="image/png" href="/favicon.ico"/>
<title>Login - Modrinth</title>
</head>
<body>
<div class="content">
<img src="{{ icon }}" alt="{{ name }}" class="logo"/>
<h2>Login Successful</h2>
<p>Hey, {{ name }}! You can now safely close this tab.</p>
</div>
</body>
</html>

102
src/clickhouse/mod.rs Normal file
View File

@@ -0,0 +1,102 @@
use hyper::client::HttpConnector;
use hyper_tls::{native_tls, HttpsConnector};
pub async fn init_client() -> clickhouse::error::Result<clickhouse::Client> {
let database = dotenvy::var("CLICKHOUSE_DATABASE").unwrap();
let client = {
let mut http_connector = HttpConnector::new();
http_connector.enforce_http(false); // allow https URLs
let tls_connector = native_tls::TlsConnector::builder().build().unwrap().into();
let https_connector = HttpsConnector::from((http_connector, tls_connector));
let hyper_client = hyper::client::Client::builder().build(https_connector);
clickhouse::Client::with_http_client(hyper_client)
.with_url(dotenvy::var("CLICKHOUSE_URL").unwrap())
.with_user(dotenvy::var("CLICKHOUSE_USER").unwrap())
.with_password(dotenvy::var("CLICKHOUSE_PASSWORD").unwrap())
};
client
.query(&format!("CREATE DATABASE IF NOT EXISTS {database}"))
.execute()
.await?;
client
.query(&format!(
"
CREATE TABLE IF NOT EXISTS {database}.views
(
id UUID,
recorded DateTime64(4),
domain String,
site_path String,
user_id UInt64,
project_id UInt64,
ip IPv6,
country String,
user_agent String,
headers Array(Tuple(String, String)),
)
ENGINE = MergeTree()
PRIMARY KEY (id, recorded)
"
))
.execute()
.await?;
client
.query(&format!(
"
CREATE TABLE IF NOT EXISTS {database}.downloads
(
id UUID,
recorded DateTime64(4),
domain String,
site_path String,
user_id UInt64,
project_id UInt64,
version_id UInt64,
ip IPv6,
country String,
user_agent String,
headers Array(Tuple(String, String)),
)
ENGINE = MergeTree()
PRIMARY KEY (id, recorded)
"
))
.execute()
.await?;
client
.query(&format!(
"
CREATE TABLE IF NOT EXISTS {database}.playtime
(
id UUID,
recorded DateTime64(4),
seconds UInt64,
user_id UInt64,
project_id UInt64,
version_id UInt64,
loader String,
game_version String,
parent UInt64,
)
ENGINE = MergeTree()
PRIMARY KEY (id, recorded)
"
))
.execute()
.await?;
Ok(client.with_database(database))
}

View File

@@ -16,7 +16,7 @@ const FLOWS_NAMESPACE: &str = "flows";
pub enum Flow {
OAuth {
user_id: Option<UserId>,
url: String,
url: Option<String>,
provider: AuthProvider,
},
Login2FA {
@@ -33,6 +33,10 @@ pub enum Flow {
user_id: UserId,
confirm_email: String,
},
MinecraftAuth,
MicrosoftLogin {
access_token: String,
},
}
impl Flow {

View File

@@ -1,23 +1,26 @@
use crate::file_hosting::S3Host;
use crate::queue::analytics::AnalyticsQueue;
use crate::queue::download::DownloadQueue;
use crate::queue::payouts::PayoutsQueue;
use crate::queue::payouts::{process_payout, PayoutsQueue};
use crate::queue::session::AuthQueue;
use crate::queue::socket::ActiveSockets;
use crate::ratelimit::errors::ARError;
use crate::ratelimit::memory::{MemoryStore, MemoryStoreActor};
use crate::ratelimit::middleware::RateLimiter;
use crate::util::cors::default_cors;
use crate::util::env::{parse_strings_from_var, parse_var};
use actix_cors::Cors;
use actix_files::Files;
use actix_web::{web, App, HttpServer};
use chrono::{DateTime, Utc};
use deadpool_redis::{Config, Runtime};
use env_logger::Env;
use log::{error, info, warn};
use search::indexing::index_projects;
use search::indexing::IndexingSettings;
use std::sync::Arc;
use tokio::sync::Mutex;
use tokio::sync::{Mutex, RwLock};
mod auth;
mod clickhouse;
mod database;
mod file_hosting;
mod models;
@@ -121,8 +124,7 @@ async fn main() -> std::io::Result<()> {
let search_config_ref = search_config_ref.clone();
async move {
info!("Indexing local database");
let settings = IndexingSettings { index_local: true };
let result = index_projects(pool_ref, settings, &search_config_ref).await;
let result = index_projects(pool_ref, &search_config_ref).await;
if let Err(e) = result {
warn!("Local project indexing failed: {:?}", e);
}
@@ -280,11 +282,75 @@ async fn main() -> std::io::Result<()> {
}
});
info!("Initializing clickhouse connection");
let clickhouse = clickhouse::init_client().await.unwrap();
let reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap());
{
let reader_ref = reader.clone();
scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || {
let reader_ref = reader_ref.clone();
async move {
info!("Downloading MaxMind GeoLite2 country database");
let result = reader_ref.index().await;
if let Err(e) = result {
warn!(
"Downloading MaxMind GeoLite2 country database failed: {:?}",
e
);
}
info!("Done downloading MaxMind GeoLite2 country database");
}
});
}
info!("Downloading MaxMind GeoLite2 country database");
let analytics_queue = Arc::new(AnalyticsQueue::new());
{
let client_ref = clickhouse.clone();
let analytics_queue_ref = analytics_queue.clone();
scheduler.run(std::time::Duration::from_secs(60 * 5), move || {
let client_ref = client_ref.clone();
let analytics_queue_ref = analytics_queue_ref.clone();
async move {
info!("Indexing analytics queue");
let result = analytics_queue_ref.index(client_ref).await;
if let Err(e) = result {
warn!("Indexing analytics queue failed: {:?}", e);
}
info!("Done indexing analytics queue");
}
});
}
{
let pool_ref = pool.clone();
let redis_ref = redis_pool.clone();
let client_ref = clickhouse.clone();
scheduler.run(std::time::Duration::from_secs(60 * 60 * 6), move || {
let pool_ref = pool_ref.clone();
let redis_ref = redis_ref.clone();
let client_ref = client_ref.clone();
async move {
info!("Done running payouts");
let result = process_payout(&pool_ref, &redis_ref, &client_ref).await;
if let Err(e) = result {
warn!("Payouts run failed: {:?}", e);
}
info!("Done running payouts");
}
});
}
let ip_salt = Pepper {
pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(),
};
let payouts_queue = web::Data::new(Mutex::new(PayoutsQueue::new()));
let active_sockets = web::Data::new(RwLock::new(ActiveSockets::default()));
let store = MemoryStore::new();
@@ -294,14 +360,6 @@ async fn main() -> std::io::Result<()> {
HttpServer::new(move || {
App::new()
.wrap(actix_web::middleware::Compress::default())
.wrap(
Cors::default()
.allow_any_origin()
.allow_any_header()
.allow_any_method()
.max_age(3600)
.send_wildcard(),
)
.wrap(
RateLimiter::new(MemoryStoreActor::from(store.clone()).start())
.with_identifier(|req| {
@@ -323,6 +381,7 @@ async fn main() -> std::io::Result<()> {
.with_max_requests(300)
.with_ignore_key(dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok()),
)
.wrap(sentry_actix::Sentry::new())
.app_data(
web::FormConfig::default().error_handler(|err, _req| {
routes::ApiError::Validation(err.to_string()).into()
@@ -351,11 +410,15 @@ async fn main() -> std::io::Result<()> {
.app_data(session_queue.clone())
.app_data(payouts_queue.clone())
.app_data(web::Data::new(ip_salt.clone()))
.wrap(sentry_actix::Sentry::new())
.app_data(web::Data::new(analytics_queue.clone()))
.app_data(web::Data::new(clickhouse.clone()))
.app_data(web::Data::new(reader.clone()))
.app_data(active_sockets.clone())
.configure(routes::root_config)
.configure(routes::v2::config)
.configure(routes::v3::config)
.default_service(web::get().to(routes::not_found))
.service(Files::new("/", "assets/"))
.default_service(web::get().wrap(default_cors()).to(routes::not_found))
})
.bind(dotenvy::var("BIND_ADDR").unwrap())?
.run()
@@ -431,9 +494,6 @@ fn check_env_vars() -> bool {
failed |= true;
}
failed |= check_var::<String>("ARIADNE_ADMIN_KEY");
failed |= check_var::<String>("ARIADNE_URL");
failed |= check_var::<String>("PAYPAL_API_URL");
failed |= check_var::<String>("PAYPAL_CLIENT_ID");
failed |= check_var::<String>("PAYPAL_CLIENT_SECRET");
@@ -462,5 +522,21 @@ fn check_env_vars() -> bool {
failed |= check_var::<String>("BEEHIIV_PUBLICATION_ID");
failed |= check_var::<String>("BEEHIIV_API_KEY");
if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() {
warn!(
"Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings"
);
failed |= true;
}
failed |= check_var::<String>("CLICKHOUSE_URL");
failed |= check_var::<String>("CLICKHOUSE_USER");
failed |= check_var::<String>("CLICKHOUSE_PASSWORD");
failed |= check_var::<String>("CLICKHOUSE_DATABASE");
failed |= check_var::<String>("MAXMIND_LICENSE_KEY");
failed |= check_var::<u64>("PAYOUTS_BUDGET");
failed
}

111
src/models/analytics.rs Normal file
View File

@@ -0,0 +1,111 @@
use clickhouse::Row;
use serde::Serialize;
use std::hash::{Hash, Hasher};
use std::net::Ipv6Addr;
use uuid::Uuid;
#[derive(Row, Serialize, Clone)]
pub struct Download {
#[serde(with = "uuid::serde::compact")]
pub id: Uuid,
pub recorded: i64,
pub domain: String,
pub site_path: String,
// Modrinth User ID for logged in users, default 0
pub user_id: u64,
// default is 0 if unknown
pub project_id: u64,
// default is 0 if unknown
pub version_id: u64,
// The below information is used exclusively for data aggregation and fraud detection
// (ex: download botting).
pub ip: Ipv6Addr,
pub country: String,
pub user_agent: String,
pub headers: Vec<(String, String)>,
}
impl PartialEq<Self> for Download {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Download {}
impl Hash for Download {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
#[derive(Row, Serialize, Clone)]
pub struct PageView {
#[serde(with = "uuid::serde::compact")]
pub id: Uuid,
pub recorded: i64,
pub domain: String,
pub site_path: String,
// Modrinth User ID for logged in users
pub user_id: u64,
// Modrinth Project ID (used for payouts)
pub project_id: u64,
// The below information is used exclusively for data aggregation and fraud detection
// (ex: page view botting).
pub ip: Ipv6Addr,
pub country: String,
pub user_agent: String,
pub headers: Vec<(String, String)>,
}
impl PartialEq<Self> for PageView {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for PageView {}
impl Hash for PageView {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
#[derive(Row, Serialize, Clone)]
pub struct Playtime {
#[serde(with = "uuid::serde::compact")]
pub id: Uuid,
pub recorded: i64,
pub seconds: u16,
// Modrinth User ID for logged in users (unused atm)
pub user_id: u64,
// Modrinth Project ID
pub project_id: u64,
// Modrinth Version ID
pub version_id: u64,
pub loader: String,
pub game_version: String,
/// Parent modpack this playtime was recorded in
pub parent: u64,
}
impl PartialEq<Self> for Playtime {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Playtime {}
impl Hash for Playtime {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}

View File

@@ -19,7 +19,6 @@ pub use super::users::UserId;
///
/// This method panics if `n` is 0 or greater than 11, since a `u64`
/// can only represent up to 11 character base62 strings
#[allow(dead_code)]
#[inline]
pub fn random_base62(n: usize) -> u64 {
random_base62_rng(&mut rand::thread_rng(), n)

View File

@@ -1,3 +1,4 @@
pub mod analytics;
pub mod error;
pub mod ids;
pub mod notifications;

View File

@@ -82,7 +82,10 @@ bitflags::bitflags! {
// delete a session
const SESSION_DELETE = 1 << 29;
const ALL = 0b111111111111111111111111111111;
// perform analytics action
const PERFORM_ANALYTICS = 1 << 30;
const ALL = 0b1111111111111111111111111111111;
const NOT_RESTRICTED = 0b00000011111111111111100111;
const NONE = 0b0;
}
@@ -99,7 +102,8 @@ impl Scopes {
| Scopes::SESSION_READ
| Scopes::SESSION_DELETE
| Scopes::USER_AUTH_WRITE
| Scopes::USER_DELETE,
| Scopes::USER_DELETE
| Scopes::PERFORM_ANALYTICS,
)
}
}

70
src/queue/analytics.rs Normal file
View File

@@ -0,0 +1,70 @@
use crate::models::analytics::{Download, PageView, Playtime};
use dashmap::DashSet;
pub struct AnalyticsQueue {
views_queue: DashSet<PageView>,
downloads_queue: DashSet<Download>,
playtime_queue: DashSet<Playtime>,
}
// Batches analytics data points + transactions every few minutes
impl AnalyticsQueue {
pub fn new() -> Self {
AnalyticsQueue {
views_queue: DashSet::with_capacity(1000),
downloads_queue: DashSet::with_capacity(1000),
playtime_queue: DashSet::with_capacity(1000),
}
}
pub async fn add_view(&self, page_view: PageView) {
self.views_queue.insert(page_view);
}
pub async fn add_download(&self, download: Download) {
self.downloads_queue.insert(download);
}
pub async fn add_playtime(&self, playtime: Playtime) {
self.playtime_queue.insert(playtime);
}
pub async fn index(&self, client: clickhouse::Client) -> Result<(), clickhouse::error::Error> {
let views_queue = self.views_queue.clone();
self.views_queue.clear();
let downloads_queue = self.downloads_queue.clone();
self.downloads_queue.clear();
let playtime_queue = self.playtime_queue.clone();
self.playtime_queue.clear();
if !views_queue.is_empty() || !downloads_queue.is_empty() || !playtime_queue.is_empty() {
let mut views = client.insert("views")?;
for view in views_queue {
views.write(&view).await?;
}
views.end().await?;
let mut downloads = client.insert("downloads")?;
for download in downloads_queue {
downloads.write(&download).await?;
}
downloads.end().await?;
let mut playtimes = client.insert("playtime")?;
for playtime in playtime_queue {
playtimes.write(&playtime).await?;
}
playtimes.end().await?;
}
Ok(())
}
}

82
src/queue/maxmind.rs Normal file
View File

@@ -0,0 +1,82 @@
use flate2::read::GzDecoder;
use log::warn;
use maxminddb::geoip2::Country;
use std::io::{Cursor, Read};
use std::net::Ipv6Addr;
use tar::Archive;
use tokio::sync::RwLock;
pub struct MaxMindIndexer {
pub reader: RwLock<Option<maxminddb::Reader<Vec<u8>>>>,
}
impl MaxMindIndexer {
pub async fn new() -> Result<Self, reqwest::Error> {
let reader = MaxMindIndexer::inner_index(false).await.ok().flatten();
Ok(MaxMindIndexer {
reader: RwLock::new(reader),
})
}
pub async fn index(&self) -> Result<(), reqwest::Error> {
let reader = MaxMindIndexer::inner_index(false).await?;
if let Some(reader) = reader {
let mut reader_new = self.reader.write().await;
*reader_new = Some(reader);
}
Ok(())
}
async fn inner_index(
should_panic: bool,
) -> Result<Option<maxminddb::Reader<Vec<u8>>>, reqwest::Error> {
let response = reqwest::get(
format!(
"https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key={}&suffix=tar.gz",
dotenvy::var("MAXMIND_LICENSE_KEY").unwrap()
)
).await?.bytes().await.unwrap().to_vec();
let tarfile = GzDecoder::new(Cursor::new(response));
let mut archive = Archive::new(tarfile);
if let Ok(entries) = archive.entries() {
for mut file in entries.flatten() {
if let Ok(path) = file.header().path() {
if path.extension().and_then(|x| x.to_str()) == Some("mmdb") {
let mut buf = Vec::new();
file.read_to_end(&mut buf).unwrap();
let reader = maxminddb::Reader::from_source(buf).unwrap();
return Ok(Some(reader));
}
}
}
}
if should_panic {
panic!("Unable to download maxmind database- did you get a license key?")
} else {
warn!("Unable to download maxmind database.");
Ok(None)
}
}
pub async fn query(&self, ip: Ipv6Addr) -> Option<String> {
let maxmind = self.reader.read().await;
if let Some(ref maxmind) = *maxmind {
maxmind
.lookup::<Country>(ip.into())
.ok()
.and_then(|x| x.country.and_then(|x| x.iso_code.map(|x| x.to_string())))
} else {
None
}
}
}

View File

@@ -1,3 +1,6 @@
pub mod analytics;
pub mod download;
pub mod maxmind;
pub mod payouts;
pub mod session;
pub mod socket;

View File

@@ -1,9 +1,12 @@
use crate::models::projects::MonetizationStatus;
use crate::routes::ApiError;
use crate::util::env::parse_var;
use base64::Engine;
use chrono::{DateTime, Duration, Utc};
use chrono::{DateTime, Datelike, Duration, Utc, Weekday};
use rust_decimal::Decimal;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sqlx::PgPool;
use std::collections::HashMap;
pub struct PayoutsQueue {
@@ -197,3 +200,210 @@ impl PayoutsQueue {
Ok(Decimal::ZERO)
}
}
pub async fn process_payout(
pool: &PgPool,
redis: &deadpool_redis::Pool,
client: &clickhouse::Client,
) -> Result<(), ApiError> {
let start: DateTime<Utc> = DateTime::from_utc(
Utc::now()
.date_naive()
.and_hms_nano_opt(0, 0, 0, 0)
.unwrap_or_default(),
Utc,
);
let results = sqlx::query!(
"SELECT EXISTS(SELECT 1 FROM payouts_values WHERE created = $1)",
start,
)
.fetch_one(pool)
.await?;
if results.exists.unwrap_or(false) {
return Ok(());
}
let end = start + Duration::days(1);
#[derive(Deserialize, clickhouse::Row)]
struct ProjectMultiplier {
pub page_views: u64,
pub project_id: u64,
}
let (views_values, views_sum, downloads_values, downloads_sum) = futures::future::try_join4(
client
.query(
r#"
SELECT COUNT(id) page_views, project_id
FROM views
WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)
GROUP BY project_id
ORDER BY page_views DESC
"#,
)
.bind(start.timestamp())
.bind(end.timestamp())
.fetch_all::<ProjectMultiplier>(),
client
.query("SELECT COUNT(id) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)")
.bind(start.timestamp())
.bind(end.timestamp())
.fetch_one::<u64>(),
client
.query(
r#"
SELECT COUNT(id) page_views, project_id
FROM downloads
WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)
GROUP BY project_id
ORDER BY page_views DESC
"#,
)
.bind(start.timestamp())
.bind(end.timestamp())
.fetch_all::<ProjectMultiplier>(),
client
.query("SELECT COUNT(id) FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)")
.bind(start.timestamp())
.bind(end.timestamp())
.fetch_one::<u64>(),
)
.await?;
let mut transaction = pool.begin().await?;
struct PayoutMultipliers {
sum: u64,
values: HashMap<u64, u64>,
}
let mut views_values = views_values
.into_iter()
.map(|x| (x.project_id, x.page_views))
.collect::<HashMap<u64, u64>>();
let downloads_values = downloads_values
.into_iter()
.map(|x| (x.project_id, x.page_views))
.collect::<HashMap<u64, u64>>();
views_values.extend(downloads_values);
let multipliers: PayoutMultipliers = PayoutMultipliers {
sum: downloads_sum + views_sum,
values: views_values,
};
struct Project {
// user_id, payouts_split
team_members: Vec<(i64, Decimal)>,
}
let mut projects_map: HashMap<i64, Project> = HashMap::new();
use futures::TryStreamExt;
sqlx::query!(
"
SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split
FROM mods m
INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE
WHERE m.id = ANY($1) AND m.monetization_status = $2
",
&multipliers
.values
.keys()
.map(|x| *x as i64)
.collect::<Vec<i64>>(),
MonetizationStatus::Monetized.as_str(),
)
.fetch_many(&mut *transaction)
.try_for_each(|e| {
if let Some(row) = e.right() {
if let Some(project) = projects_map.get_mut(&row.id) {
project.team_members.push((row.user_id, row.payouts_split));
} else {
projects_map.insert(
row.id,
Project {
team_members: vec![(row.user_id, row.payouts_split)],
},
);
}
}
futures::future::ready(Ok(()))
})
.await?;
let amount = Decimal::from(parse_var::<u64>("PAYOUTS_BUDGET").unwrap_or(0));
let days = Decimal::from(28);
let weekdays = Decimal::from(20);
let weekend_bonus = Decimal::from(5) / Decimal::from(4);
let weekday_amount = amount / (weekdays + (weekend_bonus) * (days - weekdays));
let weekend_amount = weekday_amount * weekend_bonus;
let payout = match start.weekday() {
Weekday::Sat | Weekday::Sun => weekend_amount,
_ => weekday_amount,
};
for (id, project) in projects_map {
if let Some(value) = &multipliers.values.get(&(id as u64)) {
let project_multiplier: Decimal =
Decimal::from(**value) / Decimal::from(multipliers.sum);
let sum_splits: Decimal = project.team_members.iter().map(|x| x.1).sum();
let mut clear_cache_users = Vec::new();
if sum_splits > Decimal::ZERO {
for (user_id, split) in project.team_members {
let payout: Decimal = payout * project_multiplier * (split / sum_splits);
if payout > Decimal::ZERO {
sqlx::query!(
"
INSERT INTO payouts_values (user_id, mod_id, amount, created)
VALUES ($1, $2, $3, $4)
",
user_id,
id,
payout,
start
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
UPDATE users
SET balance = balance + $1
WHERE id = $2
",
payout,
user_id
)
.execute(&mut *transaction)
.await?;
clear_cache_users.push(user_id);
}
}
}
crate::database::models::User::clear_caches(
&clear_cache_users
.into_iter()
.map(|x| (crate::database::models::UserId(x), None))
.collect::<Vec<_>>(),
redis,
)
.await?;
}
}
transaction.commit().await?;
Ok(())
}

15
src/queue/socket.rs Normal file
View File

@@ -0,0 +1,15 @@
//! "Database" for Hydra
use actix_ws::Session;
use dashmap::DashMap;
pub struct ActiveSockets {
pub auth_sockets: DashMap<String, Session>,
}
impl Default for ActiveSockets {
fn default() -> Self {
Self {
auth_sockets: DashMap::new(),
}
}
}

View File

@@ -30,17 +30,6 @@ impl MemoryStore {
inner: Arc::new(DashMap::<String, (usize, Duration)>::new()),
}
}
#[allow(dead_code)]
/// Create a new hashmap with the provided capacity
pub fn with_capacity(capacity: usize) -> Self {
debug!("Creating new MemoryStore");
MemoryStore {
inner: Arc::new(DashMap::<String, (usize, Duration)>::with_capacity(
capacity,
)),
}
}
}
/// Actor for memory store

View File

@@ -1,7 +1,6 @@
use actix_web::{get, HttpResponse};
use actix_web::HttpResponse;
use serde_json::json;
#[get("/")]
pub async fn index_get() -> HttpResponse {
let data = json!({
"name": "modrinth-labrinth",

View File

@@ -1,4 +1,5 @@
use crate::file_hosting::FileHostingError;
use crate::util::cors::default_cors;
use actix_web::http::StatusCode;
use actix_web::{web, HttpResponse};
use futures::FutureExt;
@@ -14,11 +15,21 @@ mod updates;
pub use self::not_found::not_found;
pub fn root_config(cfg: &mut web::ServiceConfig) {
cfg.service(index::index_get);
cfg.service(web::scope("maven").configure(maven::config));
cfg.service(web::scope("updates").configure(updates::config));
cfg.route("", web::get().wrap(default_cors()).to(index::index_get));
cfg.service(
web::scope("api/v1").wrap_fn(|req, _srv| {
web::scope("maven")
.wrap(default_cors())
.configure(maven::config),
);
cfg.service(
web::scope("updates")
.wrap(default_cors())
.configure(updates::config),
);
cfg.service(
web::scope("api/v1")
.wrap(default_cors())
.wrap_fn(|req, _srv| {
async {
Ok(req.into_response(
HttpResponse::Gone()
@@ -40,6 +51,8 @@ pub enum ApiError {
Database(#[from] crate::database::models::DatabaseError),
#[error("Database Error: {0}")]
SqlxDatabase(#[from] sqlx::Error),
#[error("Clickhouse Error: {0}")]
Clickhouse(#[from] clickhouse::error::Error),
#[error("Internal server error: {0}")]
Xml(String),
#[error("Deserialization error: {0}")]
@@ -56,8 +69,6 @@ pub enum ApiError {
Search(#[from] meilisearch_sdk::errors::Error),
#[error("Indexing Error: {0}")]
Indexing(#[from] crate::search::indexing::IndexingError),
#[error("Ariadne Error: {0}")]
Analytics(String),
#[error("Payments Error: {0}")]
Payments(String),
#[error("Discord Error: {0}")]
@@ -82,6 +93,7 @@ impl actix_web::ResponseError for ApiError {
ApiError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
ApiError::Database(..) => StatusCode::INTERNAL_SERVER_ERROR,
ApiError::SqlxDatabase(..) => StatusCode::INTERNAL_SERVER_ERROR,
ApiError::Clickhouse(..) => StatusCode::INTERNAL_SERVER_ERROR,
ApiError::Authentication(..) => StatusCode::UNAUTHORIZED,
ApiError::CustomAuthentication(..) => StatusCode::UNAUTHORIZED,
ApiError::Xml(..) => StatusCode::INTERNAL_SERVER_ERROR,
@@ -91,7 +103,6 @@ impl actix_web::ResponseError for ApiError {
ApiError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR,
ApiError::InvalidInput(..) => StatusCode::BAD_REQUEST,
ApiError::Validation(..) => StatusCode::BAD_REQUEST,
ApiError::Analytics(..) => StatusCode::FAILED_DEPENDENCY,
ApiError::Payments(..) => StatusCode::FAILED_DEPENDENCY,
ApiError::Discord(..) => StatusCode::FAILED_DEPENDENCY,
ApiError::Turnstile => StatusCode::BAD_REQUEST,
@@ -118,7 +129,6 @@ impl actix_web::ResponseError for ApiError {
ApiError::FileHosting(..) => "file_hosting_error",
ApiError::InvalidInput(..) => "invalid_input",
ApiError::Validation(..) => "invalid_input",
ApiError::Analytics(..) => "analytics_error",
ApiError::Payments(..) => "payments_error",
ApiError::Discord(..) => "discord_error",
ApiError::Turnstile => "turnstile_error",
@@ -127,6 +137,7 @@ impl actix_web::ResponseError for ApiError {
ApiError::PasswordHashing(..) => "password_hashing_error",
ApiError::PasswordStrengthCheck(..) => "strength_check_error",
ApiError::Mail(..) => "mail_error",
ApiError::Clickhouse(..) => "clickhouse_error",
},
description: &self.to_string(),
})

View File

@@ -1,23 +1,24 @@
use crate::database::models::{User, UserId};
use crate::auth::validate::get_user_record_from_bearer_token;
use crate::models::analytics::Download;
use crate::models::ids::ProjectId;
use crate::models::projects::MonetizationStatus;
use crate::models::pats::Scopes;
use crate::queue::analytics::AnalyticsQueue;
use crate::queue::maxmind::MaxMindIndexer;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::util::guards::admin_key_guard;
use crate::DownloadQueue;
use actix_web::{patch, post, web, HttpResponse};
use chrono::{DateTime, SecondsFormat, Utc};
use rust_decimal::Decimal;
use actix_web::{patch, web, HttpRequest, HttpResponse};
use chrono::Utc;
use serde::Deserialize;
use serde_json::json;
use sqlx::PgPool;
use std::collections::HashMap;
use std::net::Ipv4Addr;
use std::sync::Arc;
use uuid::Uuid;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("admin")
.service(count_download)
.service(process_payout),
);
cfg.service(web::scope("admin").service(count_download));
}
#[derive(Deserialize)]
@@ -32,11 +33,28 @@ pub struct DownloadBody {
// This is an internal route, cannot be used without key
#[patch("/_count-download", guard = "admin_key_guard")]
#[allow(clippy::too_many_arguments)]
pub async fn count_download(
req: HttpRequest,
pool: web::Data<PgPool>,
redis: web::Data<deadpool_redis::Pool>,
maxmind: web::Data<Arc<MaxMindIndexer>>,
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
session_queue: web::Data<AuthQueue>,
download_body: web::Json<DownloadBody>,
download_queue: web::Data<DownloadQueue>,
) -> Result<HttpResponse, ApiError> {
let token = download_body
.headers
.iter()
.find(|x| x.0.to_lowercase() == "authorization")
.map(|x| &**x.1);
let user = get_user_record_from_bearer_token(&req, token, &**pool, &redis, &session_queue)
.await
.ok()
.flatten();
let project_id: crate::database::models::ids::ProjectId = download_body.project_id.into();
let id_option = crate::models::ids::base62_impl::parse_base62(&download_body.version_name)
@@ -83,322 +101,44 @@ pub async fn count_download(
.await;
}
let client = reqwest::Client::new();
let url = url::Url::parse(&download_body.url)
.map_err(|_| ApiError::InvalidInput("invalid download URL specified!".to_string()))?;
client
.post(format!("{}download", dotenvy::var("ARIADNE_URL")?))
.header("Modrinth-Admin", dotenvy::var("ARIADNE_ADMIN_KEY")?)
.json(&json!({
"ip": download_body.ip,
"url": download_body.url,
"project_id": download_body.project_id,
"version_id": crate::models::projects::VersionId(version_id as u64).to_string(),
"headers": download_body.headers
}))
.send()
.await
.ok();
Ok(HttpResponse::NoContent().body(""))
}
#[derive(Deserialize)]
pub struct PayoutData {
amount: Decimal,
date: DateTime<Utc>,
}
#[post("/_process_payout", guard = "admin_key_guard")]
pub async fn process_payout(
pool: web::Data<PgPool>,
redis: web::Data<deadpool_redis::Pool>,
data: web::Json<PayoutData>,
) -> Result<HttpResponse, ApiError> {
let start: DateTime<Utc> = DateTime::from_utc(
data.date
.date_naive()
.and_hms_nano_opt(0, 0, 0, 0)
.unwrap_or_default(),
Utc,
);
let client = reqwest::Client::new();
let mut transaction = pool.begin().await?;
#[derive(Deserialize)]
struct PayoutMultipliers {
sum: u64,
values: HashMap<String, u64>,
}
let multipliers: PayoutMultipliers = client
.get(format!("{}multipliers", dotenvy::var("ARIADNE_URL")?,))
.header("Modrinth-Admin", dotenvy::var("ARIADNE_ADMIN_KEY")?)
.query(&[(
"start_date",
start.to_rfc3339_opts(SecondsFormat::Nanos, true),
)])
.send()
.await
.map_err(|_| ApiError::Analytics("Error while fetching payout multipliers!".to_string()))?
.json()
.await
.map_err(|_| {
ApiError::Analytics("Error while deserializing payout multipliers!".to_string())
})?;
struct Project {
project_type: String,
// user_id, payouts_split
team_members: Vec<(i64, Decimal)>,
// user_id, payouts_split, actual_project_id
split_team_members: Vec<(i64, Decimal, i64)>,
}
let mut projects_map: HashMap<i64, Project> = HashMap::new();
use futures::TryStreamExt;
sqlx::query!(
"
SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split, pt.name project_type
FROM mods m
INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE
INNER JOIN project_types pt ON pt.id = m.project_type
WHERE m.id = ANY($1) AND m.monetization_status = $2
",
&multipliers
.values
.keys()
.flat_map(|x| x.parse::<i64>().ok())
.collect::<Vec<i64>>(),
MonetizationStatus::Monetized.as_str(),
)
.fetch_many(&mut *transaction)
.try_for_each(|e| {
if let Some(row) = e.right() {
if let Some(project) = projects_map.get_mut(&row.id) {
project.team_members.push((row.user_id, row.payouts_split));
} else {
projects_map.insert(
row.id,
Project {
project_type: row.project_type,
team_members: vec![(row.user_id, row.payouts_split)],
split_team_members: Default::default(),
},
);
}
}
futures::future::ready(Ok(()))
})
.await?;
// Specific Payout Conditions (ex: modpack payout split)
let mut projects_split_dependencies = Vec::new();
for (id, project) in &projects_map {
if project.project_type == "modpack" {
projects_split_dependencies.push(*id);
}
}
if !projects_split_dependencies.is_empty() {
// (dependent_id, (dependency_id, times_depended))
let mut project_dependencies: HashMap<i64, Vec<(i64, i64)>> = HashMap::new();
// dependency_ids to fetch team members from
let mut fetch_team_members: Vec<i64> = Vec::new();
sqlx::query!(
"
SELECT mv.mod_id, m.id, COUNT(m.id) times_depended FROM versions mv
INNER JOIN dependencies d ON d.dependent_id = mv.id
INNER JOIN versions v ON d.dependency_id = v.id
INNER JOIN mods m ON v.mod_id = m.id OR d.mod_dependency_id = m.id
WHERE mv.mod_id = ANY($1)
group by mv.mod_id, m.id;
",
&projects_split_dependencies
)
.fetch_many(&mut *transaction)
.try_for_each(|e| {
if let Some(row) = e.right() {
fetch_team_members.push(row.id);
if let Some(project) = project_dependencies.get_mut(&row.mod_id) {
project.push((row.id, row.times_depended.unwrap_or(0)));
} else {
project_dependencies
.insert(row.mod_id, vec![(row.id, row.times_depended.unwrap_or(0))]);
}
}
futures::future::ready(Ok(()))
})
.await?;
// (project_id, (user_id, payouts_split))
let mut team_members: HashMap<i64, Vec<(i64, Decimal)>> = HashMap::new();
sqlx::query!(
"
SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split
FROM mods m
INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE
WHERE m.id = ANY($1)
",
&*fetch_team_members
)
.fetch_many(&mut *transaction)
.try_for_each(|e| {
if let Some(row) = e.right() {
if let Some(project) = team_members.get_mut(&row.id) {
project.push((row.user_id, row.payouts_split));
} else {
team_members.insert(row.id, vec![(row.user_id, row.payouts_split)]);
}
}
futures::future::ready(Ok(()))
})
.await?;
for (project_id, dependencies) in project_dependencies {
let dep_sum: i64 = dependencies.iter().map(|x| x.1).sum();
let project = projects_map.get_mut(&project_id);
if let Some(project) = project {
if dep_sum > 0 {
for dependency in dependencies {
let project_multiplier: Decimal =
Decimal::from(dependency.1) / Decimal::from(dep_sum);
if let Some(members) = team_members.get(&dependency.0) {
let members_sum: Decimal = members.iter().map(|x| x.1).sum();
if members_sum > Decimal::ZERO {
for member in members {
let member_multiplier: Decimal = member.1 / members_sum;
project.split_team_members.push((
member.0,
member_multiplier * project_multiplier,
project_id,
));
}
}
}
}
}
}
}
}
for (id, project) in projects_map {
if let Some(value) = &multipliers.values.get(&id.to_string()) {
let project_multiplier: Decimal =
Decimal::from(**value) / Decimal::from(multipliers.sum);
let default_split_given = Decimal::ONE;
let split_given = Decimal::ONE / Decimal::from(5);
let split_retention = Decimal::from(4) / Decimal::from(5);
let sum_splits: Decimal = project.team_members.iter().map(|x| x.1).sum();
let sum_tm_splits: Decimal = project.split_team_members.iter().map(|x| x.1).sum();
let mut clear_cache_users = Vec::new();
if sum_splits > Decimal::ZERO {
for (user_id, split) in project.team_members {
let payout: Decimal = data.amount
* project_multiplier
* (split / sum_splits)
* (if !project.split_team_members.is_empty() {
&split_given
} else {
&default_split_given
});
if payout > Decimal::ZERO {
sqlx::query!(
"
INSERT INTO payouts_values (user_id, mod_id, amount, created)
VALUES ($1, $2, $3, $4)
",
user_id,
id,
payout,
start
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
UPDATE users
SET balance = balance + $1
WHERE id = $2
",
payout,
user_id
)
.execute(&mut *transaction)
.await?;
clear_cache_users.push(user_id);
}
}
}
if sum_tm_splits > Decimal::ZERO {
for (user_id, split, project_id) in project.split_team_members {
let payout: Decimal = data.amount
* project_multiplier
* (split / sum_tm_splits)
* split_retention;
if payout > Decimal::ZERO {
sqlx::query!(
"
INSERT INTO payouts_values (user_id, mod_id, amount, created)
VALUES ($1, $2, $3, $4)
",
user_id,
project_id,
payout,
start
)
.execute(&mut *transaction)
.await?;
sqlx::query!(
"
UPDATE users
SET balance = balance + $1
WHERE id = $2
",
payout,
user_id
)
.execute(&mut *transaction)
.await?;
clear_cache_users.push(user_id);
}
}
}
User::clear_caches(
&clear_cache_users
.into_iter()
.map(|x| (UserId(x), None))
.collect::<Vec<_>>(),
&redis,
)
.await?;
}
}
transaction.commit().await?;
let ip = super::analytics::convert_to_ip_v6(&download_body.ip)
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
analytics_queue
.add_download(Download {
id: Uuid::new_v4(),
recorded: Utc::now().timestamp_nanos() / 100_000,
domain: url.host_str().unwrap_or_default().to_string(),
site_path: url.path().to_string(),
user_id: user
.and_then(|(scopes, x)| {
if scopes.contains(Scopes::PERFORM_ANALYTICS) {
Some(x.id.0 as u64)
} else {
None
}
})
.unwrap_or(0),
project_id: project_id as u64,
version_id: version_id as u64,
ip,
country: maxmind.query(ip).await.unwrap_or_default(),
user_agent: download_body
.headers
.get("user-agent")
.cloned()
.unwrap_or_default(),
headers: download_body
.headers
.clone()
.into_iter()
.filter(|x| !super::analytics::FILTERED_HEADERS.contains(&&*x.0.to_lowercase()))
.collect(),
})
.await;
Ok(HttpResponse::NoContent().body(""))
}

248
src/routes/v2/analytics.rs Normal file
View File

@@ -0,0 +1,248 @@
use crate::auth::get_user_from_headers;
use crate::models::analytics::{PageView, Playtime};
use crate::models::pats::Scopes;
use crate::queue::maxmind::MaxMindIndexer;
use crate::queue::session::AuthQueue;
use crate::routes::ApiError;
use crate::util::env::parse_strings_from_var;
use crate::AnalyticsQueue;
use actix_cors::Cors;
use actix_web::{post, web};
use actix_web::{HttpRequest, HttpResponse};
use chrono::Utc;
use serde::Deserialize;
use sqlx::PgPool;
use std::collections::HashMap;
use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr};
use std::sync::Arc;
use url::Url;
use uuid::Uuid;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("v2/analytics")
.wrap(
Cors::default()
.allowed_origin_fn(|origin, _req_head| {
let allowed_origins =
parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").unwrap_or_default();
allowed_origins.contains(&"*".to_string())
|| allowed_origins
.contains(&origin.to_str().unwrap_or_default().to_string())
})
.allowed_methods(vec!["GET", "POST"])
.allowed_headers(vec![
actix_web::http::header::AUTHORIZATION,
actix_web::http::header::ACCEPT,
actix_web::http::header::CONTENT_TYPE,
])
.max_age(3600),
)
.service(page_view_ingest)
.service(playtime_ingest),
);
}
pub const FILTERED_HEADERS: &[&str] = &[
"authorization",
"cookie",
"modrinth-admin",
// we already retrieve/use these elsewhere- so they are unneeded
"user-agent",
"cf-connecting-ip",
"cf-ipcountry",
"x-forwarded-for",
"x-real-ip",
// We don't need the information vercel provides from its headers
"x-vercel-ip-city",
"x-vercel-ip-timezone",
"x-vercel-ip-longitude",
"x-vercel-proxy-signature",
"x-vercel-ip-country-region",
"x-vercel-forwarded-for",
"x-vercel-proxied-for",
"x-vercel-proxy-signature-ts",
"x-vercel-ip-latitude",
"x-vercel-ip-country",
];
pub fn convert_to_ip_v6(src: &str) -> Result<Ipv6Addr, AddrParseError> {
let ip_addr: IpAddr = src.parse()?;
Ok(match ip_addr {
IpAddr::V4(x) => x.to_ipv6_mapped(),
IpAddr::V6(x) => x,
})
}
#[derive(Deserialize)]
pub struct UrlInput {
url: String,
}
//this route should be behind the cloudflare WAF to prevent non-browsers from calling it
#[post("view")]
pub async fn page_view_ingest(
req: HttpRequest,
maxmind: web::Data<Arc<MaxMindIndexer>>,
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
session_queue: web::Data<AuthQueue>,
url_input: web::Json<UrlInput>,
pool: web::Data<PgPool>,
redis: web::Data<deadpool_redis::Pool>,
) -> Result<HttpResponse, ApiError> {
let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None)
.await
.ok();
let conn_info = req.connection_info().peer_addr().map(|x| x.to_string());
let url = Url::parse(&url_input.url)
.map_err(|_| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?;
let domain = url
.host_str()
.ok_or_else(|| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?;
let allowed_origins = parse_strings_from_var("CORS_ALLOWED_ORIGINS").unwrap_or_default();
if !(domain.ends_with(".modrinth.com")
|| domain == "modrinth.com"
|| allowed_origins.contains(&"*".to_string()))
{
return Err(ApiError::InvalidInput(
"invalid page view URL specified!".to_string(),
));
}
let headers = req
.headers()
.into_iter()
.map(|(key, val)| {
(
key.to_string().to_lowercase(),
val.to_str().unwrap_or_default().to_string(),
)
})
.collect::<HashMap<String, String>>();
let ip = convert_to_ip_v6(if let Some(header) = headers.get("cf-connecting-ip") {
header
} else {
conn_info.as_deref().unwrap_or_default()
})
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
let mut view = PageView {
id: Uuid::new_v4(),
recorded: Utc::now().timestamp_nanos() / 100_000,
domain: domain.to_string(),
site_path: url.path().to_string(),
user_id: 0,
project_id: 0,
ip,
country: maxmind.query(ip).await.unwrap_or_default(),
user_agent: headers.get("user-agent").cloned().unwrap_or_default(),
headers: headers
.into_iter()
.filter(|x| !FILTERED_HEADERS.contains(&&*x.0))
.collect(),
};
if let Some(segments) = url.path_segments() {
let segments_vec = segments.collect::<Vec<_>>();
if segments_vec.len() >= 2 {
const PROJECT_TYPES: &[&str] = &[
"mod",
"modpack",
"plugin",
"resourcepack",
"shader",
"datapack",
];
if PROJECT_TYPES.contains(&segments_vec[0]) {
let project =
crate::database::models::Project::get(segments_vec[1], &**pool, &redis).await?;
if let Some(project) = project {
view.project_id = project.inner.id.0 as u64;
}
}
}
}
if let Some((_, user)) = user {
view.user_id = user.id.0;
}
analytics_queue.add_view(view).await;
Ok(HttpResponse::NoContent().body(""))
}
#[derive(Deserialize)]
pub struct PlaytimeInput {
seconds: u16,
loader: String,
game_version: String,
parent: Option<crate::models::ids::VersionId>,
}
#[post("playtime")]
pub async fn playtime_ingest(
req: HttpRequest,
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
session_queue: web::Data<AuthQueue>,
playtime_input: web::Json<HashMap<crate::models::ids::VersionId, PlaytimeInput>>,
pool: web::Data<PgPool>,
redis: web::Data<deadpool_redis::Pool>,
) -> Result<HttpResponse, ApiError> {
let (_, user) = get_user_from_headers(
&req,
&**pool,
&redis,
&session_queue,
Some(&[Scopes::PERFORM_ANALYTICS]),
)
.await?;
let playtimes = playtime_input.0;
if playtimes.len() > 2000 {
return Err(ApiError::InvalidInput(
"Too much playtime entered for version!".to_string(),
));
}
let versions = crate::database::models::Version::get_many(
&playtimes.iter().map(|x| (*x.0).into()).collect::<Vec<_>>(),
&**pool,
&redis,
)
.await?;
for (id, playtime) in playtimes {
if playtime.seconds > 300 {
continue;
}
if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) {
analytics_queue
.add_playtime(Playtime {
id: Default::default(),
recorded: Utc::now().timestamp_nanos() / 100_000,
seconds: playtime.seconds,
user_id: user.id.0,
project_id: version.inner.id.0 as u64,
version_id: version.inner.project_id.0 as u64,
loader: playtime.loader,
game_version: playtime.game_version,
parent: playtime.parent.map(|x| x.0).unwrap_or(0),
})
.await;
}
}
Ok(HttpResponse::NoContent().finish())
}

View File

@@ -1,4 +1,5 @@
mod admin;
mod analytics;
mod moderation;
mod notifications;
pub(crate) mod project_creation;
@@ -14,10 +15,12 @@ mod version_file;
mod versions;
pub use super::ApiError;
use crate::util::cors::default_cors;
pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
cfg.service(
actix_web::web::scope("v2")
.wrap(default_cors())
.configure(admin::config)
.configure(crate::auth::session::config)
.configure(crate::auth::flows::config)
@@ -36,4 +39,6 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) {
.configure(version_file::config)
.configure(versions::config),
);
cfg.configure(analytics::config);
}

View File

@@ -1,9 +1,14 @@
pub use super::ApiError;
use crate::util::cors::default_cors;
use actix_web::{web, HttpResponse};
use serde_json::json;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(web::scope("v3").route("", web::get().to(hello_world)));
cfg.service(
web::scope("v3")
.wrap(default_cors())
.route("", web::get().to(hello_world)),
);
}
pub async fn hello_world() -> Result<HttpResponse, ApiError> {

View File

@@ -30,31 +30,10 @@ pub enum IndexingError {
// assumes a max average size of 1KiB per project to avoid this cap.
const MEILISEARCH_CHUNK_SIZE: usize = 10000;
#[derive(Debug)]
pub struct IndexingSettings {
pub index_local: bool,
}
impl IndexingSettings {
#[allow(dead_code)]
pub fn from_env() -> Self {
//FIXME: what?
let index_local = true;
Self { index_local }
}
}
pub async fn index_projects(
pool: PgPool,
settings: IndexingSettings,
config: &SearchConfig,
) -> Result<(), IndexingError> {
pub async fn index_projects(pool: PgPool, config: &SearchConfig) -> Result<(), IndexingError> {
let mut docs_to_add: Vec<UploadSearchProject> = vec![];
if settings.index_local {
docs_to_add.append(&mut index_local(pool.clone()).await?);
}
docs_to_add.append(&mut index_local(pool.clone()).await?);
// Write Indices
add_projects(docs_to_add, config).await?;
@@ -74,7 +53,6 @@ async fn create_index(
.await?;
match client.get_index(name).await {
// TODO: update index settings on startup (or delete old indices on startup)
Ok(index) => {
index
.set_settings(&default_settings())

10
src/util/cors.rs Normal file
View File

@@ -0,0 +1,10 @@
use actix_cors::Cors;
pub fn default_cors() -> Cors {
Cors::default()
.allow_any_origin()
.allow_any_header()
.allow_any_method()
.max_age(3600)
.send_wildcard()
}

View File

@@ -1,4 +1,5 @@
pub mod captcha;
pub mod cors;
pub mod env;
pub mod ext;
pub mod guards;