From 4630d175d7a9760ee77a11b62a21c436824e2dc7 Mon Sep 17 00:00:00 2001 From: Geometrically <18202329+Geometrically@users.noreply.github.com> Date: Mon, 4 Dec 2023 18:49:51 -0700 Subject: [PATCH] Optimize analytics queries (#781) * Optimize analytics queries * fix clippy --- src/clickhouse/fetch.rs | 177 +++++++-------------- src/clickhouse/mod.rs | 9 +- src/models/v3/analytics.rs | 57 +------ src/queue/payouts.rs | 8 +- src/routes/analytics.rs | 3 - src/routes/internal/admin.rs | 2 - src/routes/v2/admin.rs | 2 - src/routes/v2/analytics_get.rs | 272 --------------------------------- src/routes/v2/collections.rs | 191 ----------------------- src/routes/v2/organizations.rs | 265 -------------------------------- src/routes/v3/analytics_get.rs | 192 ++--------------------- 11 files changed, 83 insertions(+), 1095 deletions(-) delete mode 100644 src/routes/v2/analytics_get.rs delete mode 100644 src/routes/v2/collections.rs delete mode 100644 src/routes/v2/organizations.rs diff --git a/src/clickhouse/fetch.rs b/src/clickhouse/fetch.rs index 620655635..8f9de4d42 100644 --- a/src/clickhouse/fetch.rs +++ b/src/clickhouse/fetch.rs @@ -1,9 +1,6 @@ use std::sync::Arc; -use crate::{ - models::ids::{ProjectId, VersionId}, - routes::ApiError, -}; +use crate::{models::ids::ProjectId, routes::ApiError}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; @@ -39,201 +36,141 @@ pub struct ReturnDownloads { // Only one of project_id or version_id should be used // Fetches playtimes as a Vec of ReturnPlaytimes pub async fn fetch_playtimes( - projects: Option>, - versions: Option>, + projects: Vec, start_date: DateTime, end_date: DateTime, resolution_minute: u32, client: Arc, ) -> Result, ApiError> { - let project_or_version = if projects.is_some() && versions.is_none() { - "project_id" - } else if versions.is_some() { - "version_id" - } else { - return Err(ApiError::InvalidInput( - "Only one of 'project_id' or 'version_id' should be used.".to_string(), - )); - }; - - let mut query = client - .query(&format!( + let query = client + .query( " - SELECT - toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time, - {project_or_version} AS id, - SUM(seconds) AS total_seconds - FROM playtime - WHERE recorded BETWEEN ? AND ? - AND {project_or_version} IN ? - GROUP BY - time, - {project_or_version} - " - )) + SELECT + toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time, + project_id AS id, + SUM(seconds) AS total_seconds + FROM playtime + WHERE recorded BETWEEN ? AND ? + AND project_id IN ? + GROUP BY + time, + project_id + ", + ) .bind(resolution_minute) .bind(start_date.timestamp()) - .bind(end_date.timestamp()); - - if let Some(projects) = projects { - query = query.bind(projects.iter().map(|x| x.0).collect::>()); - } else if let Some(versions) = versions { - query = query.bind(versions.iter().map(|x| x.0).collect::>()); - } + .bind(end_date.timestamp()) + .bind(projects.iter().map(|x| x.0).collect::>()); Ok(query.fetch_all().await?) } // Fetches views as a Vec of ReturnViews pub async fn fetch_views( - projects: Option>, - versions: Option>, + projects: Vec, start_date: DateTime, end_date: DateTime, resolution_minutes: u32, client: Arc, ) -> Result, ApiError> { - let project_or_version = if projects.is_some() && versions.is_none() { - "project_id" - } else if versions.is_some() { - "version_id" - } else { - return Err(ApiError::InvalidInput( - "Only one of 'project_id' or 'version_id' should be used.".to_string(), - )); - }; - - let mut query = client - .query(&format!( + let query = client + .query( " SELECT toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time, - {project_or_version} AS id, - count(views.id) AS total_views + project_id AS id, + count(1) AS total_views FROM views WHERE recorded BETWEEN ? AND ? - AND {project_or_version} IN ? + AND project_id IN ? GROUP BY - time, {project_or_version} - " - )) + time, project_id + ", + ) .bind(resolution_minutes) .bind(start_date.timestamp()) - .bind(end_date.timestamp()); - - if let Some(projects) = projects { - query = query.bind(projects.iter().map(|x| x.0).collect::>()); - } else if let Some(versions) = versions { - query = query.bind(versions.iter().map(|x| x.0).collect::>()); - } + .bind(end_date.timestamp()) + .bind(projects.iter().map(|x| x.0).collect::>()); Ok(query.fetch_all().await?) } // Fetches downloads as a Vec of ReturnDownloads pub async fn fetch_downloads( - projects: Option>, - versions: Option>, + projects: Vec, start_date: DateTime, end_date: DateTime, resolution_minutes: u32, client: Arc, ) -> Result, ApiError> { - let project_or_version = if projects.is_some() && versions.is_none() { - "project_id" - } else if versions.is_some() { - "version_id" - } else { - return Err(ApiError::InvalidInput( - "Only one of 'project_id' or 'version_id' should be used.".to_string(), - )); - }; - - let mut query = client - .query(&format!( + let query = client + .query( " SELECT toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time, - {project_or_version} as id, - count(downloads.id) AS total_downloads + project_id as id, + count(1) AS total_downloads FROM downloads WHERE recorded BETWEEN ? AND ? - AND {project_or_version} IN ? - GROUP BY time, {project_or_version} - " - )) + AND project_id IN ? + GROUP BY time, project_id + ", + ) .bind(resolution_minutes) .bind(start_date.timestamp()) - .bind(end_date.timestamp()); - - if let Some(projects) = projects { - query = query.bind(projects.iter().map(|x| x.0).collect::>()); - } else if let Some(versions) = versions { - query = query.bind(versions.iter().map(|x| x.0).collect::>()); - } + .bind(end_date.timestamp()) + .bind(projects.iter().map(|x| x.0).collect::>()); Ok(query.fetch_all().await?) } // Fetches countries as a Vec of ReturnCountry pub async fn fetch_countries( - projects: Option>, - versions: Option>, + projects: Vec, start_date: DateTime, end_date: DateTime, client: Arc, ) -> Result, ApiError> { - let project_or_version = if projects.is_some() && versions.is_none() { - "project_id" - } else if versions.is_some() { - "version_id" - } else { - return Err(ApiError::InvalidInput( - "Only one of 'project_id' or 'version_id' should be used.".to_string(), - )); - }; - - let mut query = client.query(&format!( + let query = client.query( " WITH view_grouping AS ( SELECT country, - {project_or_version}, - count(id) AS total_views + project_id, + count(1) AS total_views FROM views WHERE recorded BETWEEN ? AND ? GROUP BY country, - {project_or_version} + project_id ), download_grouping AS ( SELECT country, - {project_or_version}, - count(id) AS total_downloads + project_id, + count(1) AS total_downloads FROM downloads WHERE recorded BETWEEN ? AND ? GROUP BY country, - {project_or_version} + project_id ) SELECT v.country, - v.{project_or_version}, + v.project_id, v.total_views, d.total_downloads FROM view_grouping AS v - LEFT JOIN download_grouping AS d ON (v.country = d.country) AND (v.{project_or_version} = d.{project_or_version}) - WHERE {project_or_version} IN ? + LEFT JOIN download_grouping AS d ON (v.country = d.country) AND (v.project_id = d.project_id) + WHERE project_id IN ? " - )).bind(start_date.timestamp()).bind(end_date.timestamp()).bind(start_date.timestamp()).bind(end_date.timestamp()); - - if let Some(projects) = projects { - query = query.bind(projects.iter().map(|x| x.0).collect::>()); - } else if let Some(versions) = versions { - query = query.bind(versions.iter().map(|x| x.0).collect::>()); - } + ) + .bind(start_date.timestamp()) + .bind(end_date.timestamp()) + .bind(start_date.timestamp()) + .bind(end_date.timestamp()) + .bind(projects.iter().map(|x| x.0).collect::>()); Ok(query.fetch_all().await?) } diff --git a/src/clickhouse/mod.rs b/src/clickhouse/mod.rs index c46c00893..c1763dc62 100644 --- a/src/clickhouse/mod.rs +++ b/src/clickhouse/mod.rs @@ -36,7 +36,6 @@ pub async fn init_client_with_database( " CREATE TABLE IF NOT EXISTS {database}.views ( - id UUID, recorded DateTime64(4), domain String, site_path String, @@ -50,7 +49,7 @@ pub async fn init_client_with_database( headers Array(Tuple(String, String)), ) ENGINE = MergeTree() - PRIMARY KEY (id, recorded) + PRIMARY KEY (project_id, recorded) " )) .execute() @@ -61,7 +60,6 @@ pub async fn init_client_with_database( " CREATE TABLE IF NOT EXISTS {database}.downloads ( - id UUID, recorded DateTime64(4), domain String, site_path String, @@ -76,7 +74,7 @@ pub async fn init_client_with_database( headers Array(Tuple(String, String)), ) ENGINE = MergeTree() - PRIMARY KEY (id, recorded) + PRIMARY KEY (project_id, recorded) " )) .execute() @@ -87,7 +85,6 @@ pub async fn init_client_with_database( " CREATE TABLE IF NOT EXISTS {database}.playtime ( - id UUID, recorded DateTime64(4), seconds UInt64, @@ -100,7 +97,7 @@ pub async fn init_client_with_database( parent UInt64, ) ENGINE = MergeTree() - PRIMARY KEY (id, recorded) + PRIMARY KEY (project_id, recorded) " )) .execute() diff --git a/src/models/v3/analytics.rs b/src/models/v3/analytics.rs index ea20da6b4..669175ad2 100644 --- a/src/models/v3/analytics.rs +++ b/src/models/v3/analytics.rs @@ -1,13 +1,10 @@ use clickhouse::Row; use serde::{Deserialize, Serialize}; -use std::hash::{Hash, Hasher}; +use std::hash::Hash; use std::net::Ipv6Addr; -use uuid::Uuid; -#[derive(Row, Serialize, Deserialize, Clone)] +#[derive(Row, Serialize, Deserialize, Clone, Eq, PartialEq, Hash)] pub struct Download { - #[serde(with = "uuid::serde::compact")] - pub id: Uuid, pub recorded: i64, pub domain: String, pub site_path: String, @@ -27,24 +24,8 @@ pub struct Download { pub headers: Vec<(String, String)>, } -impl PartialEq for Download { - fn eq(&self, other: &Self) -> bool { - self.id == other.id - } -} - -impl Eq for Download {} - -impl Hash for Download { - fn hash(&self, state: &mut H) { - self.id.hash(state); - } -} - -#[derive(Row, Serialize, Deserialize, Clone)] +#[derive(Row, Serialize, Deserialize, Clone, Eq, PartialEq, Hash)] pub struct PageView { - #[serde(with = "uuid::serde::compact")] - pub id: Uuid, pub recorded: i64, pub domain: String, pub site_path: String, @@ -62,24 +43,8 @@ pub struct PageView { pub headers: Vec<(String, String)>, } -impl PartialEq for PageView { - fn eq(&self, other: &Self) -> bool { - self.id == other.id - } -} - -impl Eq for PageView {} - -impl Hash for PageView { - fn hash(&self, state: &mut H) { - self.id.hash(state); - } -} - -#[derive(Row, Serialize, Deserialize, Clone, Debug)] +#[derive(Row, Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)] pub struct Playtime { - #[serde(with = "uuid::serde::compact")] - pub id: Uuid, pub recorded: i64, pub seconds: u64, @@ -95,17 +60,3 @@ pub struct Playtime { /// Parent modpack this playtime was recorded in pub parent: u64, } - -impl PartialEq for Playtime { - fn eq(&self, other: &Self) -> bool { - self.id == other.id - } -} - -impl Eq for Playtime {} - -impl Hash for Playtime { - fn hash(&self, state: &mut H) { - self.id.hash(state); - } -} diff --git a/src/queue/payouts.rs b/src/queue/payouts.rs index 7a2093756..1672ec507 100644 --- a/src/queue/payouts.rs +++ b/src/queue/payouts.rs @@ -550,7 +550,7 @@ pub async fn process_payout( client .query( r#" - SELECT COUNT(id) page_views, project_id + SELECT COUNT(1) page_views, project_id FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0) GROUP BY project_id @@ -561,14 +561,14 @@ pub async fn process_payout( .bind(end.timestamp()) .fetch_all::(), client - .query("SELECT COUNT(id) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)") + .query("SELECT COUNT(1) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)") .bind(start.timestamp()) .bind(end.timestamp()) .fetch_one::(), client .query( r#" - SELECT COUNT(id) page_views, project_id + SELECT COUNT(1) page_views, project_id FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0) GROUP BY project_id @@ -579,7 +579,7 @@ pub async fn process_payout( .bind(end.timestamp()) .fetch_all::(), client - .query("SELECT COUNT(id) FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)") + .query("SELECT COUNT(1) FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)") .bind(start.timestamp()) .bind(end.timestamp()) .fetch_one::(), diff --git a/src/routes/analytics.rs b/src/routes/analytics.rs index 36efdde05..04db14bee 100644 --- a/src/routes/analytics.rs +++ b/src/routes/analytics.rs @@ -16,7 +16,6 @@ use std::collections::HashMap; use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr}; use std::sync::Arc; use url::Url; -use uuid::Uuid; pub const FILTERED_HEADERS: &[&str] = &[ "authorization", @@ -107,7 +106,6 @@ pub async fn page_view_ingest( .unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped()); let mut view = PageView { - id: Uuid::new_v4(), recorded: get_current_tenths_of_ms(), domain: domain.to_string(), site_path: url.path().to_string(), @@ -203,7 +201,6 @@ pub async fn playtime_ingest( if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) { analytics_queue.add_playtime(Playtime { - id: Default::default(), recorded: get_current_tenths_of_ms(), seconds: playtime.seconds as u64, user_id: user.id.0, diff --git a/src/routes/internal/admin.rs b/src/routes/internal/admin.rs index 7e9b9d8d9..07afe8365 100644 --- a/src/routes/internal/admin.rs +++ b/src/routes/internal/admin.rs @@ -16,7 +16,6 @@ use sqlx::PgPool; use std::collections::HashMap; use std::net::Ipv4Addr; use std::sync::Arc; -use uuid::Uuid; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( @@ -103,7 +102,6 @@ pub async fn count_download( .unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped()); analytics_queue.add_download(Download { - id: Uuid::new_v4(), recorded: get_current_tenths_of_ms(), domain: url.host_str().unwrap_or_default().to_string(), site_path: url.path().to_string(), diff --git a/src/routes/v2/admin.rs b/src/routes/v2/admin.rs index 7e9b9d8d9..07afe8365 100644 --- a/src/routes/v2/admin.rs +++ b/src/routes/v2/admin.rs @@ -16,7 +16,6 @@ use sqlx::PgPool; use std::collections::HashMap; use std::net::Ipv4Addr; use std::sync::Arc; -use uuid::Uuid; pub fn config(cfg: &mut web::ServiceConfig) { cfg.service( @@ -103,7 +102,6 @@ pub async fn count_download( .unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped()); analytics_queue.add_download(Download { - id: Uuid::new_v4(), recorded: get_current_tenths_of_ms(), domain: url.host_str().unwrap_or_default().to_string(), site_path: url.path().to_string(), diff --git a/src/routes/v2/analytics_get.rs b/src/routes/v2/analytics_get.rs deleted file mode 100644 index 842abb9da..000000000 --- a/src/routes/v2/analytics_get.rs +++ /dev/null @@ -1,272 +0,0 @@ -use super::ApiError; -use crate::database::redis::RedisPool; -use crate::routes::{v2_reroute, v3}; -use crate::{models::ids::VersionId, queue::session::AuthQueue}; -use actix_web::{get, web, HttpRequest, HttpResponse}; -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use std::collections::HashMap; - -pub fn config(cfg: &mut web::ServiceConfig) { - cfg.service( - web::scope("analytics") - .service(playtimes_get) - .service(views_get) - .service(downloads_get) - .service(revenue_get) - .service(countries_downloads_get) - .service(countries_views_get), - ); -} - -/// The json data to be passed to fetch analytic data -/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. -/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively -/// start_date and end_date are inclusive -/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day) -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct GetData { - // only one of project_ids or version_ids should be used - // if neither are provided, all projects the user has access to will be used - pub project_ids: Option, - pub version_ids: Option, - - pub start_date: Option>, // defaults to 2 weeks ago - pub end_date: Option>, // defaults to now - - pub resolution_minutes: Option, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries) -} - -/// Get playtime data for a set of projects or versions -/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data -/// eg: -/// { -/// "4N1tEhnO": { -/// "20230824": 23 -/// } -///} -/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. -#[derive(Serialize, Deserialize, Clone)] -pub struct FetchedPlaytime { - pub time: u64, - pub total_seconds: u64, - pub loader_seconds: HashMap, - pub game_version_seconds: HashMap, - pub parent_seconds: HashMap, -} -#[get("playtime")] -pub async fn playtimes_get( - req: HttpRequest, - clickhouse: web::Data, - data: web::Query, - session_queue: web::Data, - pool: web::Data, - redis: web::Data, -) -> Result { - let data = data.into_inner(); - v3::analytics_get::playtimes_get( - req, - clickhouse, - web::Query(v3::analytics_get::GetData { - project_ids: data.project_ids, - version_ids: data.version_ids, - start_date: data.start_date, - end_date: data.end_date, - resolution_minutes: data.resolution_minutes, - }), - session_queue, - pool, - redis, - ) - .await -} - -/// Get view data for a set of projects or versions -/// Data is returned as a hashmap of project/version ids to a hashmap of days to views -/// eg: -/// { -/// "4N1tEhnO": { -/// "20230824": 1090 -/// } -///} -/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. -#[get("views")] -pub async fn views_get( - req: HttpRequest, - clickhouse: web::Data, - data: web::Query, - session_queue: web::Data, - pool: web::Data, - redis: web::Data, -) -> Result { - let data = data.into_inner(); - v3::analytics_get::views_get( - req, - clickhouse, - web::Query(v3::analytics_get::GetData { - project_ids: data.project_ids, - version_ids: data.version_ids, - start_date: data.start_date, - end_date: data.end_date, - resolution_minutes: data.resolution_minutes, - }), - session_queue, - pool, - redis, - ) - .await - .or_else(v2_reroute::flatten_404_error) -} - -/// Get download data for a set of projects or versions -/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads -/// eg: -/// { -/// "4N1tEhnO": { -/// "20230824": 32 -/// } -///} -/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. -#[get("downloads")] -pub async fn downloads_get( - req: HttpRequest, - clickhouse: web::Data, - data: web::Query, - session_queue: web::Data, - pool: web::Data, - redis: web::Data, -) -> Result { - let data = data.into_inner(); - v3::analytics_get::downloads_get( - req, - clickhouse, - web::Query(v3::analytics_get::GetData { - project_ids: data.project_ids, - version_ids: data.version_ids, - start_date: data.start_date, - end_date: data.end_date, - resolution_minutes: data.resolution_minutes, - }), - session_queue, - pool, - redis, - ) - .await - .or_else(v2_reroute::flatten_404_error) -} - -/// Get payout data for a set of projects -/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day -/// eg: -/// { -/// "4N1tEhnO": { -/// "20230824": 0.001 -/// } -///} -/// ONLY project IDs can be used. Unauthorized projects will be filtered out. -#[get("revenue")] -pub async fn revenue_get( - req: HttpRequest, - data: web::Query, - session_queue: web::Data, - pool: web::Data, - redis: web::Data, -) -> Result { - let data = data.into_inner(); - v3::analytics_get::revenue_get( - req, - web::Query(v3::analytics_get::GetData { - project_ids: data.project_ids, - version_ids: None, - start_date: data.start_date, - end_date: data.end_date, - resolution_minutes: data.resolution_minutes, - }), - session_queue, - pool, - redis, - ) - .await - .or_else(v2_reroute::flatten_404_error) -} - -/// Get country data for a set of projects or versions -/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads. -/// Unknown countries are labeled "". -/// This is usuable to see significant performing countries per project -/// eg: -/// { -/// "4N1tEhnO": { -/// "CAN": 22 -/// } -///} -/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. -/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch -#[get("countries/downloads")] -pub async fn countries_downloads_get( - req: HttpRequest, - clickhouse: web::Data, - data: web::Query, - session_queue: web::Data, - pool: web::Data, - redis: web::Data, -) -> Result { - let data = data.into_inner(); - v3::analytics_get::countries_downloads_get( - req, - clickhouse, - web::Query(v3::analytics_get::GetData { - project_ids: data.project_ids, - version_ids: data.version_ids, - start_date: data.start_date, - end_date: data.end_date, - resolution_minutes: data.resolution_minutes, - }), - session_queue, - pool, - redis, - ) - .await - .or_else(v2_reroute::flatten_404_error) -} - -/// Get country data for a set of projects or versions -/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views. -/// Unknown countries are labeled "". -/// This is usuable to see significant performing countries per project -/// eg: -/// { -/// "4N1tEhnO": { -/// "CAN": 56165 -/// } -///} -/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out. -/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch -#[get("countries/views")] -pub async fn countries_views_get( - req: HttpRequest, - clickhouse: web::Data, - data: web::Query, - session_queue: web::Data, - pool: web::Data, - redis: web::Data, -) -> Result { - let data = data.into_inner(); - v3::analytics_get::countries_views_get( - req, - clickhouse, - web::Query(v3::analytics_get::GetData { - project_ids: data.project_ids, - version_ids: data.version_ids, - start_date: data.start_date, - end_date: data.end_date, - resolution_minutes: data.resolution_minutes, - }), - session_queue, - pool, - redis, - ) - .await - .or_else(v2_reroute::flatten_404_error) -} diff --git a/src/routes/v2/collections.rs b/src/routes/v2/collections.rs deleted file mode 100644 index 15be66ef0..000000000 --- a/src/routes/v2/collections.rs +++ /dev/null @@ -1,191 +0,0 @@ -use crate::database::redis::RedisPool; -use crate::file_hosting::FileHost; -use crate::models::collections::CollectionStatus; -use crate::queue::session::AuthQueue; -use crate::routes::v3::project_creation::CreateError; -use crate::routes::{v3, ApiError}; -use actix_web::web::Data; -use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; -use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use std::sync::Arc; -use validator::Validate; - -pub fn config(cfg: &mut web::ServiceConfig) { - cfg.service(collections_get); - cfg.service(collection_create); - cfg.service( - web::scope("collection") - .service(collection_get) - .service(collection_delete) - .service(collection_edit) - .service(collection_icon_edit) - .service(delete_collection_icon), - ); -} - -#[derive(Serialize, Deserialize, Validate, Clone)] -pub struct CollectionCreateData { - #[validate( - length(min = 3, max = 64), - custom(function = "crate::util::validate::validate_name") - )] - /// The title or name of the project. - pub title: String, - #[validate(length(min = 3, max = 255))] - /// A short description of the collection. - pub description: String, - #[validate(length(max = 32))] - #[serde(default = "Vec::new")] - /// A list of initial projects to use with the created collection - pub projects: Vec, -} - -#[post("collection")] -pub async fn collection_create( - req: HttpRequest, - collection_create_data: web::Json, - client: Data, - redis: Data, - session_queue: Data, -) -> Result { - let collection_create_data = collection_create_data.into_inner(); - v3::collections::collection_create( - req, - web::Json(v3::collections::CollectionCreateData { - name: collection_create_data.title, - description: collection_create_data.description, - projects: collection_create_data.projects, - }), - client, - redis, - session_queue, - ) - .await -} - -#[derive(Serialize, Deserialize)] -pub struct CollectionIds { - pub ids: String, -} -#[get("collections")] -pub async fn collections_get( - req: HttpRequest, - web::Query(ids): web::Query, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::collections::collections_get( - req, - web::Query(v3::collections::CollectionIds { ids: ids.ids }), - pool, - redis, - session_queue, - ) - .await -} - -#[get("{id}")] -pub async fn collection_get( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::collections::collection_get(req, info, pool, redis, session_queue).await -} - -#[derive(Deserialize, Validate)] -pub struct EditCollection { - #[validate( - length(min = 3, max = 64), - custom(function = "crate::util::validate::validate_name") - )] - pub title: Option, - #[validate(length(min = 3, max = 256))] - pub description: Option, - pub status: Option, - #[validate(length(max = 64))] - pub new_projects: Option>, -} - -#[patch("{id}")] -pub async fn collection_edit( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - new_collection: web::Json, - redis: web::Data, - session_queue: web::Data, -) -> Result { - let new_collection = new_collection.into_inner(); - v3::collections::collection_edit( - req, - info, - pool, - web::Json(v3::collections::EditCollection { - name: new_collection.title, - description: new_collection.description, - status: new_collection.status, - new_projects: new_collection.new_projects, - }), - redis, - session_queue, - ) - .await -} - -#[derive(Serialize, Deserialize)] -pub struct Extension { - pub ext: String, -} - -#[patch("{id}/icon")] -#[allow(clippy::too_many_arguments)] -pub async fn collection_icon_edit( - web::Query(ext): web::Query, - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - file_host: web::Data>, - payload: web::Payload, - session_queue: web::Data, -) -> Result { - v3::collections::collection_icon_edit( - web::Query(v3::collections::Extension { ext: ext.ext }), - req, - info, - pool, - redis, - file_host, - payload, - session_queue, - ) - .await -} - -#[delete("{id}/icon")] -pub async fn delete_collection_icon( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - file_host: web::Data>, - session_queue: web::Data, -) -> Result { - v3::collections::delete_collection_icon(req, info, pool, redis, file_host, session_queue).await -} - -#[delete("{id}")] -pub async fn collection_delete( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::collections::collection_delete(req, info, pool, redis, session_queue).await -} diff --git a/src/routes/v2/organizations.rs b/src/routes/v2/organizations.rs deleted file mode 100644 index 29f1f467d..000000000 --- a/src/routes/v2/organizations.rs +++ /dev/null @@ -1,265 +0,0 @@ -use crate::database::redis::RedisPool; -use crate::file_hosting::FileHost; -use crate::models::projects::Project; -use crate::models::v2::projects::LegacyProject; -use crate::queue::session::AuthQueue; -use crate::routes::v3::project_creation::CreateError; -use crate::routes::{v2_reroute, v3, ApiError}; -use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; -use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use std::sync::Arc; -use validator::Validate; - -pub fn config(cfg: &mut web::ServiceConfig) { - cfg.service(organizations_get).service(organization_create); - cfg.service( - web::scope("organization") - .service(organization_get) - .service(organizations_edit) - .service(organization_delete) - .service(organization_projects_get) - .service(organization_projects_add) - .service(organization_projects_remove) - .service(organization_icon_edit) - .service(delete_organization_icon) - .service(super::teams::team_members_get_organization), - ); -} - -#[derive(Deserialize, Validate)] -pub struct NewOrganization { - #[validate( - length(min = 3, max = 64), - regex = "crate::util::validate::RE_URL_SAFE" - )] - // Title of the organization, also used as slug - pub title: String, - #[validate(length(min = 3, max = 256))] - pub description: String, -} - -#[post("organization")] -pub async fn organization_create( - req: HttpRequest, - new_organization: web::Json, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - let new_organization = new_organization.into_inner(); - v3::organizations::organization_create( - req, - web::Json(v3::organizations::NewOrganization { - name: new_organization.title, - description: new_organization.description, - }), - pool.clone(), - redis.clone(), - session_queue, - ) - .await -} - -#[get("{id}")] -pub async fn organization_get( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::organizations::organization_get(req, info, pool.clone(), redis.clone(), session_queue).await -} - -#[derive(Deserialize)] -pub struct OrganizationIds { - pub ids: String, -} -#[get("organizations")] -pub async fn organizations_get( - req: HttpRequest, - web::Query(ids): web::Query, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::organizations::organizations_get( - req, - web::Query(v3::organizations::OrganizationIds { ids: ids.ids }), - pool, - redis, - session_queue, - ) - .await -} - -#[derive(Serialize, Deserialize, Validate)] -pub struct OrganizationEdit { - #[validate(length(min = 3, max = 256))] - pub description: Option, - #[validate( - length(min = 3, max = 64), - regex = "crate::util::validate::RE_URL_SAFE" - )] - // Title of the organization, also used as slug - pub title: Option, -} - -#[patch("{id}")] -pub async fn organizations_edit( - req: HttpRequest, - info: web::Path<(String,)>, - new_organization: web::Json, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - let new_organization = new_organization.into_inner(); - v3::organizations::organizations_edit( - req, - info, - web::Json(v3::organizations::OrganizationEdit { - description: new_organization.description, - name: new_organization.title, - }), - pool.clone(), - redis.clone(), - session_queue, - ) - .await -} - -#[delete("{id}")] -pub async fn organization_delete( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::organizations::organization_delete(req, info, pool.clone(), redis.clone(), session_queue) - .await -} - -#[get("{id}/projects")] -pub async fn organization_projects_get( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - let response = v3::organizations::organization_projects_get( - req, - info, - pool.clone(), - redis.clone(), - session_queue, - ) - .await?; - - // Convert v3 projects to v2 - match v2_reroute::extract_ok_json::>(response).await { - Ok(project) => { - let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?; - Ok(HttpResponse::Ok().json(legacy_projects)) - } - Err(response) => Ok(response), - } -} - -#[derive(Deserialize)] -pub struct OrganizationProjectAdd { - pub project_id: String, // Also allow title/slug -} -#[post("{id}/projects")] -pub async fn organization_projects_add( - req: HttpRequest, - info: web::Path<(String,)>, - project_info: web::Json, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - let project_info = project_info.into_inner(); - v3::organizations::organization_projects_add( - req, - info, - web::Json(v3::organizations::OrganizationProjectAdd { - project_id: project_info.project_id, - }), - pool.clone(), - redis.clone(), - session_queue, - ) - .await -} - -#[delete("{organization_id}/projects/{project_id}")] -pub async fn organization_projects_remove( - req: HttpRequest, - info: web::Path<(String, String)>, - pool: web::Data, - redis: web::Data, - session_queue: web::Data, -) -> Result { - v3::organizations::organization_projects_remove( - req, - info, - pool.clone(), - redis.clone(), - session_queue, - ) - .await -} - -#[derive(Serialize, Deserialize)] -pub struct Extension { - pub ext: String, -} - -#[patch("{id}/icon")] -#[allow(clippy::too_many_arguments)] -pub async fn organization_icon_edit( - web::Query(ext): web::Query, - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - file_host: web::Data>, - payload: web::Payload, - session_queue: web::Data, -) -> Result { - v3::organizations::organization_icon_edit( - web::Query(v3::organizations::Extension { ext: ext.ext }), - req, - info, - pool.clone(), - redis.clone(), - file_host, - payload, - session_queue, - ) - .await -} - -#[delete("{id}/icon")] -pub async fn delete_organization_icon( - req: HttpRequest, - info: web::Path<(String,)>, - pool: web::Data, - redis: web::Data, - file_host: web::Data>, - session_queue: web::Data, -) -> Result { - v3::organizations::delete_organization_icon( - req, - info, - pool.clone(), - redis.clone(), - file_host, - session_queue, - ) - .await -} diff --git a/src/routes/v3/analytics_get.rs b/src/routes/v3/analytics_get.rs index ee12e02e6..281cf853b 100644 --- a/src/routes/v3/analytics_get.rs +++ b/src/routes/v3/analytics_get.rs @@ -6,10 +6,7 @@ use crate::{ auth::get_user_from_headers, database::models::user_item, models::{ - ids::{ - base62_impl::{parse_base62, to_base62}, - ProjectId, VersionId, - }, + ids::{base62_impl::to_base62, ProjectId, VersionId}, pats::Scopes, }, queue::session::AuthQueue, @@ -46,7 +43,6 @@ pub struct GetData { // only one of project_ids or version_ids should be used // if neither are provided, all projects the user has access to will be used pub project_ids: Option, - pub version_ids: Option, pub start_date: Option>, // defaults to 2 weeks ago pub end_date: Option>, // defaults to now @@ -94,17 +90,6 @@ pub async fn playtimes_get( .as_ref() .map(|ids| serde_json::from_str::>(ids)) .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); let end_date = data.end_date.unwrap_or(Utc::now()); @@ -113,13 +98,11 @@ pub async fn playtimes_get( // Convert String list to list of ProjectIds or VersionIds // - Filter out unauthorized projects/versions // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?; // Get the views let playtimes = crate::clickhouse::fetch_playtimes( - project_ids, - version_ids, + project_ids.unwrap_or_default(), start_date, end_date, resolution_minutes, @@ -173,17 +156,6 @@ pub async fn views_get( .as_ref() .map(|ids| serde_json::from_str::>(ids)) .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); let end_date = data.end_date.unwrap_or(Utc::now()); @@ -192,13 +164,11 @@ pub async fn views_get( // Convert String list to list of ProjectIds or VersionIds // - Filter out unauthorized projects/versions // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?; // Get the views let views = crate::clickhouse::fetch_views( - project_ids, - version_ids, + project_ids.unwrap_or_default(), start_date, end_date, resolution_minutes, @@ -252,17 +222,6 @@ pub async fn downloads_get( .as_ref() .map(|ids| serde_json::from_str::>(ids)) .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); let end_date = data.end_date.unwrap_or(Utc::now()); @@ -271,13 +230,11 @@ pub async fn downloads_get( // Convert String list to list of ProjectIds or VersionIds // - Filter out unauthorized projects/versions // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?; + let project_ids = filter_allowed_ids(project_ids, user_option, &pool, &redis).await?; // Get the downloads let downloads = crate::clickhouse::fetch_downloads( - project_ids, - version_ids, + project_ids.unwrap_or_default(), start_date, end_date, resolution_minutes, @@ -347,7 +304,7 @@ pub async fn revenue_get( // Convert String list to list of ProjectIds or VersionIds // - Filter out unauthorized projects/versions // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, _) = filter_allowed_ids(project_ids, None, user, &pool, &redis).await?; + let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?; let duration: PgInterval = Duration::minutes(resolution_minutes as i64) .try_into() @@ -427,17 +384,6 @@ pub async fn countries_downloads_get( .as_ref() .map(|ids| serde_json::from_str::>(ids)) .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); let end_date = data.end_date.unwrap_or(Utc::now()); @@ -445,13 +391,11 @@ pub async fn countries_downloads_get( // Convert String list to list of ProjectIds or VersionIds // - Filter out unauthorized projects/versions // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?; // Get the countries let countries = crate::clickhouse::fetch_countries( - project_ids, - version_ids, + project_ids.unwrap_or_default(), start_date, end_date, clickhouse.into_inner(), @@ -507,17 +451,6 @@ pub async fn countries_views_get( .as_ref() .map(|ids| serde_json::from_str::>(ids)) .transpose()?; - let version_ids = data - .version_ids - .as_ref() - .map(|ids| serde_json::from_str::>(ids)) - .transpose()?; - - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2)); let end_date = data.end_date.unwrap_or(Utc::now()); @@ -525,13 +458,11 @@ pub async fn countries_views_get( // Convert String list to list of ProjectIds or VersionIds // - Filter out unauthorized projects/versions // - If no project_ids or version_ids are provided, we default to all projects the user has access to - let (project_ids, version_ids) = - filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?; + let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?; // Get the countries let countries = crate::clickhouse::fetch_countries( - project_ids, - version_ids, + project_ids.unwrap_or_default(), start_date, end_date, clickhouse.into_inner(), @@ -554,19 +485,12 @@ pub async fn countries_views_get( async fn filter_allowed_ids( mut project_ids: Option>, - version_ids: Option>, user: crate::models::users::User, pool: &web::Data, redis: &RedisPool, -) -> Result<(Option>, Option>), ApiError> { - if project_ids.is_some() && version_ids.is_some() { - return Err(ApiError::InvalidInput( - "Only one of 'project_ids' or 'version_ids' should be used.".to_string(), - )); - } - +) -> Result>, ApiError> { // If no project_ids or version_ids are provided, we default to all projects the user has *public* access to - if project_ids.is_none() && version_ids.is_none() { + if project_ids.is_none() { project_ids = Some( user_item::User::get_projects(user.id.into(), &***pool, redis) .await? @@ -645,92 +569,6 @@ async fn filter_allowed_ids( } else { None }; - - let version_ids = if let Some(version_ids) = version_ids { - // Submitted version_ids are filtered by the user's permissions - let ids = version_ids - .iter() - .map(|id| Ok(VersionId(parse_base62(id)?).into())) - .collect::, ApiError>>()?; - let versions_data = database::models::Version::get_many(&ids, &***pool, redis).await?; - let project_ids = versions_data - .iter() - .map(|x| x.inner.project_id) - .collect::>(); - - let projects_data = - database::models::Project::get_many_ids(&project_ids, &***pool, redis).await?; - - let team_ids = projects_data - .iter() - .map(|x| x.inner.team_id) - .collect::>(); - let team_members = - database::models::TeamMember::get_from_team_full_many(&team_ids, &***pool, redis) - .await?; - - let organization_ids = projects_data - .iter() - .filter_map(|x| x.inner.organization_id) - .collect::>(); - let organizations = - database::models::Organization::get_many_ids(&organization_ids, &***pool, redis) - .await?; - - let organization_team_ids = organizations - .iter() - .map(|x| x.team_id) - .collect::>(); - let organization_team_members = database::models::TeamMember::get_from_team_full_many( - &organization_team_ids, - &***pool, - redis, - ) - .await?; - - let ids = projects_data - .into_iter() - .filter(|project| { - let team_member = team_members - .iter() - .find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into()); - - let organization = project - .inner - .organization_id - .and_then(|oid| organizations.iter().find(|x| x.id == oid)); - - let organization_team_member = if let Some(organization) = organization { - organization_team_members - .iter() - .find(|x| x.team_id == organization.team_id && x.user_id == user.id.into()) - } else { - None - }; - - let permissions = ProjectPermissions::get_permissions_by_role( - &user.role, - &team_member.cloned(), - &organization_team_member.cloned(), - ) - .unwrap_or_default(); - - permissions.contains(ProjectPermissions::VIEW_ANALYTICS) - }) - .map(|x| x.inner.id) - .collect::>(); - - let ids = versions_data - .into_iter() - .filter(|version| ids.contains(&version.inner.project_id)) - .map(|x| x.inner.id.into()) - .collect::>(); - - Some(ids) - } else { - None - }; - // Only one of project_ids or version_ids will be Some - Ok((project_ids, version_ids)) + Ok(project_ids) }