Optimize analytics queries (#781)

* Optimize analytics queries

* fix clippy
This commit is contained in:
Geometrically
2023-12-04 18:49:51 -07:00
committed by GitHub
parent 27055b96e3
commit 4630d175d7
11 changed files with 83 additions and 1095 deletions

View File

@@ -1,9 +1,6 @@
use std::sync::Arc;
use crate::{
models::ids::{ProjectId, VersionId},
routes::ApiError,
};
use crate::{models::ids::ProjectId, routes::ApiError};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@@ -39,201 +36,141 @@ pub struct ReturnDownloads {
// Only one of project_id or version_id should be used
// Fetches playtimes as a Vec of ReturnPlaytimes
pub async fn fetch_playtimes(
projects: Option<Vec<ProjectId>>,
versions: Option<Vec<VersionId>>,
projects: Vec<ProjectId>,
start_date: DateTime<Utc>,
end_date: DateTime<Utc>,
resolution_minute: u32,
client: Arc<clickhouse::Client>,
) -> Result<Vec<ReturnPlaytimes>, ApiError> {
let project_or_version = if projects.is_some() && versions.is_none() {
"project_id"
} else if versions.is_some() {
"version_id"
} else {
return Err(ApiError::InvalidInput(
"Only one of 'project_id' or 'version_id' should be used.".to_string(),
));
};
let mut query = client
.query(&format!(
let query = client
.query(
"
SELECT
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
{project_or_version} AS id,
SUM(seconds) AS total_seconds
FROM playtime
WHERE recorded BETWEEN ? AND ?
AND {project_or_version} IN ?
GROUP BY
time,
{project_or_version}
"
))
SELECT
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
project_id AS id,
SUM(seconds) AS total_seconds
FROM playtime
WHERE recorded BETWEEN ? AND ?
AND project_id IN ?
GROUP BY
time,
project_id
",
)
.bind(resolution_minute)
.bind(start_date.timestamp())
.bind(end_date.timestamp());
if let Some(projects) = projects {
query = query.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
} else if let Some(versions) = versions {
query = query.bind(versions.iter().map(|x| x.0).collect::<Vec<_>>());
}
.bind(end_date.timestamp())
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
Ok(query.fetch_all().await?)
}
// Fetches views as a Vec of ReturnViews
pub async fn fetch_views(
projects: Option<Vec<ProjectId>>,
versions: Option<Vec<VersionId>>,
projects: Vec<ProjectId>,
start_date: DateTime<Utc>,
end_date: DateTime<Utc>,
resolution_minutes: u32,
client: Arc<clickhouse::Client>,
) -> Result<Vec<ReturnViews>, ApiError> {
let project_or_version = if projects.is_some() && versions.is_none() {
"project_id"
} else if versions.is_some() {
"version_id"
} else {
return Err(ApiError::InvalidInput(
"Only one of 'project_id' or 'version_id' should be used.".to_string(),
));
};
let mut query = client
.query(&format!(
let query = client
.query(
"
SELECT
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
{project_or_version} AS id,
count(views.id) AS total_views
project_id AS id,
count(1) AS total_views
FROM views
WHERE recorded BETWEEN ? AND ?
AND {project_or_version} IN ?
AND project_id IN ?
GROUP BY
time, {project_or_version}
"
))
time, project_id
",
)
.bind(resolution_minutes)
.bind(start_date.timestamp())
.bind(end_date.timestamp());
if let Some(projects) = projects {
query = query.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
} else if let Some(versions) = versions {
query = query.bind(versions.iter().map(|x| x.0).collect::<Vec<_>>());
}
.bind(end_date.timestamp())
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
Ok(query.fetch_all().await?)
}
// Fetches downloads as a Vec of ReturnDownloads
pub async fn fetch_downloads(
projects: Option<Vec<ProjectId>>,
versions: Option<Vec<VersionId>>,
projects: Vec<ProjectId>,
start_date: DateTime<Utc>,
end_date: DateTime<Utc>,
resolution_minutes: u32,
client: Arc<clickhouse::Client>,
) -> Result<Vec<ReturnDownloads>, ApiError> {
let project_or_version = if projects.is_some() && versions.is_none() {
"project_id"
} else if versions.is_some() {
"version_id"
} else {
return Err(ApiError::InvalidInput(
"Only one of 'project_id' or 'version_id' should be used.".to_string(),
));
};
let mut query = client
.query(&format!(
let query = client
.query(
"
SELECT
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
{project_or_version} as id,
count(downloads.id) AS total_downloads
project_id as id,
count(1) AS total_downloads
FROM downloads
WHERE recorded BETWEEN ? AND ?
AND {project_or_version} IN ?
GROUP BY time, {project_or_version}
"
))
AND project_id IN ?
GROUP BY time, project_id
",
)
.bind(resolution_minutes)
.bind(start_date.timestamp())
.bind(end_date.timestamp());
if let Some(projects) = projects {
query = query.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
} else if let Some(versions) = versions {
query = query.bind(versions.iter().map(|x| x.0).collect::<Vec<_>>());
}
.bind(end_date.timestamp())
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
Ok(query.fetch_all().await?)
}
// Fetches countries as a Vec of ReturnCountry
pub async fn fetch_countries(
projects: Option<Vec<ProjectId>>,
versions: Option<Vec<VersionId>>,
projects: Vec<ProjectId>,
start_date: DateTime<Utc>,
end_date: DateTime<Utc>,
client: Arc<clickhouse::Client>,
) -> Result<Vec<ReturnCountry>, ApiError> {
let project_or_version = if projects.is_some() && versions.is_none() {
"project_id"
} else if versions.is_some() {
"version_id"
} else {
return Err(ApiError::InvalidInput(
"Only one of 'project_id' or 'version_id' should be used.".to_string(),
));
};
let mut query = client.query(&format!(
let query = client.query(
"
WITH view_grouping AS (
SELECT
country,
{project_or_version},
count(id) AS total_views
project_id,
count(1) AS total_views
FROM views
WHERE recorded BETWEEN ? AND ?
GROUP BY
country,
{project_or_version}
project_id
),
download_grouping AS (
SELECT
country,
{project_or_version},
count(id) AS total_downloads
project_id,
count(1) AS total_downloads
FROM downloads
WHERE recorded BETWEEN ? AND ?
GROUP BY
country,
{project_or_version}
project_id
)
SELECT
v.country,
v.{project_or_version},
v.project_id,
v.total_views,
d.total_downloads
FROM view_grouping AS v
LEFT JOIN download_grouping AS d ON (v.country = d.country) AND (v.{project_or_version} = d.{project_or_version})
WHERE {project_or_version} IN ?
LEFT JOIN download_grouping AS d ON (v.country = d.country) AND (v.project_id = d.project_id)
WHERE project_id IN ?
"
)).bind(start_date.timestamp()).bind(end_date.timestamp()).bind(start_date.timestamp()).bind(end_date.timestamp());
if let Some(projects) = projects {
query = query.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
} else if let Some(versions) = versions {
query = query.bind(versions.iter().map(|x| x.0).collect::<Vec<_>>());
}
)
.bind(start_date.timestamp())
.bind(end_date.timestamp())
.bind(start_date.timestamp())
.bind(end_date.timestamp())
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
Ok(query.fetch_all().await?)
}

View File

@@ -36,7 +36,6 @@ pub async fn init_client_with_database(
"
CREATE TABLE IF NOT EXISTS {database}.views
(
id UUID,
recorded DateTime64(4),
domain String,
site_path String,
@@ -50,7 +49,7 @@ pub async fn init_client_with_database(
headers Array(Tuple(String, String)),
)
ENGINE = MergeTree()
PRIMARY KEY (id, recorded)
PRIMARY KEY (project_id, recorded)
"
))
.execute()
@@ -61,7 +60,6 @@ pub async fn init_client_with_database(
"
CREATE TABLE IF NOT EXISTS {database}.downloads
(
id UUID,
recorded DateTime64(4),
domain String,
site_path String,
@@ -76,7 +74,7 @@ pub async fn init_client_with_database(
headers Array(Tuple(String, String)),
)
ENGINE = MergeTree()
PRIMARY KEY (id, recorded)
PRIMARY KEY (project_id, recorded)
"
))
.execute()
@@ -87,7 +85,6 @@ pub async fn init_client_with_database(
"
CREATE TABLE IF NOT EXISTS {database}.playtime
(
id UUID,
recorded DateTime64(4),
seconds UInt64,
@@ -100,7 +97,7 @@ pub async fn init_client_with_database(
parent UInt64,
)
ENGINE = MergeTree()
PRIMARY KEY (id, recorded)
PRIMARY KEY (project_id, recorded)
"
))
.execute()

View File

@@ -1,13 +1,10 @@
use clickhouse::Row;
use serde::{Deserialize, Serialize};
use std::hash::{Hash, Hasher};
use std::hash::Hash;
use std::net::Ipv6Addr;
use uuid::Uuid;
#[derive(Row, Serialize, Deserialize, Clone)]
#[derive(Row, Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
pub struct Download {
#[serde(with = "uuid::serde::compact")]
pub id: Uuid,
pub recorded: i64,
pub domain: String,
pub site_path: String,
@@ -27,24 +24,8 @@ pub struct Download {
pub headers: Vec<(String, String)>,
}
impl PartialEq<Self> for Download {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Download {}
impl Hash for Download {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
#[derive(Row, Serialize, Deserialize, Clone)]
#[derive(Row, Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
pub struct PageView {
#[serde(with = "uuid::serde::compact")]
pub id: Uuid,
pub recorded: i64,
pub domain: String,
pub site_path: String,
@@ -62,24 +43,8 @@ pub struct PageView {
pub headers: Vec<(String, String)>,
}
impl PartialEq<Self> for PageView {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for PageView {}
impl Hash for PageView {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}
#[derive(Row, Serialize, Deserialize, Clone, Debug)]
#[derive(Row, Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)]
pub struct Playtime {
#[serde(with = "uuid::serde::compact")]
pub id: Uuid,
pub recorded: i64,
pub seconds: u64,
@@ -95,17 +60,3 @@ pub struct Playtime {
/// Parent modpack this playtime was recorded in
pub parent: u64,
}
impl PartialEq<Self> for Playtime {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Playtime {}
impl Hash for Playtime {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
}
}

View File

@@ -550,7 +550,7 @@ pub async fn process_payout(
client
.query(
r#"
SELECT COUNT(id) page_views, project_id
SELECT COUNT(1) page_views, project_id
FROM views
WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)
GROUP BY project_id
@@ -561,14 +561,14 @@ pub async fn process_payout(
.bind(end.timestamp())
.fetch_all::<ProjectMultiplier>(),
client
.query("SELECT COUNT(id) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)")
.query("SELECT COUNT(1) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)")
.bind(start.timestamp())
.bind(end.timestamp())
.fetch_one::<u64>(),
client
.query(
r#"
SELECT COUNT(id) page_views, project_id
SELECT COUNT(1) page_views, project_id
FROM downloads
WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)
GROUP BY project_id
@@ -579,7 +579,7 @@ pub async fn process_payout(
.bind(end.timestamp())
.fetch_all::<ProjectMultiplier>(),
client
.query("SELECT COUNT(id) FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)")
.query("SELECT COUNT(1) FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)")
.bind(start.timestamp())
.bind(end.timestamp())
.fetch_one::<u64>(),

View File

@@ -16,7 +16,6 @@ use std::collections::HashMap;
use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr};
use std::sync::Arc;
use url::Url;
use uuid::Uuid;
pub const FILTERED_HEADERS: &[&str] = &[
"authorization",
@@ -107,7 +106,6 @@ pub async fn page_view_ingest(
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
let mut view = PageView {
id: Uuid::new_v4(),
recorded: get_current_tenths_of_ms(),
domain: domain.to_string(),
site_path: url.path().to_string(),
@@ -203,7 +201,6 @@ pub async fn playtime_ingest(
if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) {
analytics_queue.add_playtime(Playtime {
id: Default::default(),
recorded: get_current_tenths_of_ms(),
seconds: playtime.seconds as u64,
user_id: user.id.0,

View File

@@ -16,7 +16,6 @@ use sqlx::PgPool;
use std::collections::HashMap;
use std::net::Ipv4Addr;
use std::sync::Arc;
use uuid::Uuid;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
@@ -103,7 +102,6 @@ pub async fn count_download(
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
analytics_queue.add_download(Download {
id: Uuid::new_v4(),
recorded: get_current_tenths_of_ms(),
domain: url.host_str().unwrap_or_default().to_string(),
site_path: url.path().to_string(),

View File

@@ -16,7 +16,6 @@ use sqlx::PgPool;
use std::collections::HashMap;
use std::net::Ipv4Addr;
use std::sync::Arc;
use uuid::Uuid;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
@@ -103,7 +102,6 @@ pub async fn count_download(
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
analytics_queue.add_download(Download {
id: Uuid::new_v4(),
recorded: get_current_tenths_of_ms(),
domain: url.host_str().unwrap_or_default().to_string(),
site_path: url.path().to_string(),

View File

@@ -1,272 +0,0 @@
use super::ApiError;
use crate::database::redis::RedisPool;
use crate::routes::{v2_reroute, v3};
use crate::{models::ids::VersionId, queue::session::AuthQueue};
use actix_web::{get, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::collections::HashMap;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
web::scope("analytics")
.service(playtimes_get)
.service(views_get)
.service(downloads_get)
.service(revenue_get)
.service(countries_downloads_get)
.service(countries_views_get),
);
}
/// The json data to be passed to fetch analytic data
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively
/// start_date and end_date are inclusive
/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day)
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct GetData {
// only one of project_ids or version_ids should be used
// if neither are provided, all projects the user has access to will be used
pub project_ids: Option<String>,
pub version_ids: Option<String>,
pub start_date: Option<DateTime<Utc>>, // defaults to 2 weeks ago
pub end_date: Option<DateTime<Utc>>, // defaults to now
pub resolution_minutes: Option<u32>, // defaults to 1 day. Ignored in routes that do not aggregate over a resolution (eg: /countries)
}
/// Get playtime data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of days to playtime data
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 23
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
#[derive(Serialize, Deserialize, Clone)]
pub struct FetchedPlaytime {
pub time: u64,
pub total_seconds: u64,
pub loader_seconds: HashMap<String, u64>,
pub game_version_seconds: HashMap<String, u64>,
pub parent_seconds: HashMap<VersionId, u64>,
}
#[get("playtime")]
pub async fn playtimes_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let data = data.into_inner();
v3::analytics_get::playtimes_get(
req,
clickhouse,
web::Query(v3::analytics_get::GetData {
project_ids: data.project_ids,
version_ids: data.version_ids,
start_date: data.start_date,
end_date: data.end_date,
resolution_minutes: data.resolution_minutes,
}),
session_queue,
pool,
redis,
)
.await
}
/// Get view data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of days to views
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 1090
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
#[get("views")]
pub async fn views_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let data = data.into_inner();
v3::analytics_get::views_get(
req,
clickhouse,
web::Query(v3::analytics_get::GetData {
project_ids: data.project_ids,
version_ids: data.version_ids,
start_date: data.start_date,
end_date: data.end_date,
resolution_minutes: data.resolution_minutes,
}),
session_queue,
pool,
redis,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
/// Get download data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of days to downloads
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 32
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
#[get("downloads")]
pub async fn downloads_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let data = data.into_inner();
v3::analytics_get::downloads_get(
req,
clickhouse,
web::Query(v3::analytics_get::GetData {
project_ids: data.project_ids,
version_ids: data.version_ids,
start_date: data.start_date,
end_date: data.end_date,
resolution_minutes: data.resolution_minutes,
}),
session_queue,
pool,
redis,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
/// Get payout data for a set of projects
/// Data is returned as a hashmap of project ids to a hashmap of days to amount earned per day
/// eg:
/// {
/// "4N1tEhnO": {
/// "20230824": 0.001
/// }
///}
/// ONLY project IDs can be used. Unauthorized projects will be filtered out.
#[get("revenue")]
pub async fn revenue_get(
req: HttpRequest,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let data = data.into_inner();
v3::analytics_get::revenue_get(
req,
web::Query(v3::analytics_get::GetData {
project_ids: data.project_ids,
version_ids: None,
start_date: data.start_date,
end_date: data.end_date,
resolution_minutes: data.resolution_minutes,
}),
session_queue,
pool,
redis,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
/// Get country data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to downloads.
/// Unknown countries are labeled "".
/// This is usuable to see significant performing countries per project
/// eg:
/// {
/// "4N1tEhnO": {
/// "CAN": 22
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
#[get("countries/downloads")]
pub async fn countries_downloads_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let data = data.into_inner();
v3::analytics_get::countries_downloads_get(
req,
clickhouse,
web::Query(v3::analytics_get::GetData {
project_ids: data.project_ids,
version_ids: data.version_ids,
start_date: data.start_date,
end_date: data.end_date,
resolution_minutes: data.resolution_minutes,
}),
session_queue,
pool,
redis,
)
.await
.or_else(v2_reroute::flatten_404_error)
}
/// Get country data for a set of projects or versions
/// Data is returned as a hashmap of project/version ids to a hashmap of coutnry to views.
/// Unknown countries are labeled "".
/// This is usuable to see significant performing countries per project
/// eg:
/// {
/// "4N1tEhnO": {
/// "CAN": 56165
/// }
///}
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
/// For this endpoint, provided dates are a range to aggregate over, not specific days to fetch
#[get("countries/views")]
pub async fn countries_views_get(
req: HttpRequest,
clickhouse: web::Data<clickhouse::Client>,
data: web::Query<GetData>,
session_queue: web::Data<AuthQueue>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
) -> Result<HttpResponse, ApiError> {
let data = data.into_inner();
v3::analytics_get::countries_views_get(
req,
clickhouse,
web::Query(v3::analytics_get::GetData {
project_ids: data.project_ids,
version_ids: data.version_ids,
start_date: data.start_date,
end_date: data.end_date,
resolution_minutes: data.resolution_minutes,
}),
session_queue,
pool,
redis,
)
.await
.or_else(v2_reroute::flatten_404_error)
}

View File

@@ -1,191 +0,0 @@
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::collections::CollectionStatus;
use crate::queue::session::AuthQueue;
use crate::routes::v3::project_creation::CreateError;
use crate::routes::{v3, ApiError};
use actix_web::web::Data;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
use validator::Validate;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(collections_get);
cfg.service(collection_create);
cfg.service(
web::scope("collection")
.service(collection_get)
.service(collection_delete)
.service(collection_edit)
.service(collection_icon_edit)
.service(delete_collection_icon),
);
}
#[derive(Serialize, Deserialize, Validate, Clone)]
pub struct CollectionCreateData {
#[validate(
length(min = 3, max = 64),
custom(function = "crate::util::validate::validate_name")
)]
/// The title or name of the project.
pub title: String,
#[validate(length(min = 3, max = 255))]
/// A short description of the collection.
pub description: String,
#[validate(length(max = 32))]
#[serde(default = "Vec::new")]
/// A list of initial projects to use with the created collection
pub projects: Vec<String>,
}
#[post("collection")]
pub async fn collection_create(
req: HttpRequest,
collection_create_data: web::Json<CollectionCreateData>,
client: Data<PgPool>,
redis: Data<RedisPool>,
session_queue: Data<AuthQueue>,
) -> Result<HttpResponse, CreateError> {
let collection_create_data = collection_create_data.into_inner();
v3::collections::collection_create(
req,
web::Json(v3::collections::CollectionCreateData {
name: collection_create_data.title,
description: collection_create_data.description,
projects: collection_create_data.projects,
}),
client,
redis,
session_queue,
)
.await
}
#[derive(Serialize, Deserialize)]
pub struct CollectionIds {
pub ids: String,
}
#[get("collections")]
pub async fn collections_get(
req: HttpRequest,
web::Query(ids): web::Query<CollectionIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::collections::collections_get(
req,
web::Query(v3::collections::CollectionIds { ids: ids.ids }),
pool,
redis,
session_queue,
)
.await
}
#[get("{id}")]
pub async fn collection_get(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::collections::collection_get(req, info, pool, redis, session_queue).await
}
#[derive(Deserialize, Validate)]
pub struct EditCollection {
#[validate(
length(min = 3, max = 64),
custom(function = "crate::util::validate::validate_name")
)]
pub title: Option<String>,
#[validate(length(min = 3, max = 256))]
pub description: Option<String>,
pub status: Option<CollectionStatus>,
#[validate(length(max = 64))]
pub new_projects: Option<Vec<String>>,
}
#[patch("{id}")]
pub async fn collection_edit(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
new_collection: web::Json<EditCollection>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let new_collection = new_collection.into_inner();
v3::collections::collection_edit(
req,
info,
pool,
web::Json(v3::collections::EditCollection {
name: new_collection.title,
description: new_collection.description,
status: new_collection.status,
new_projects: new_collection.new_projects,
}),
redis,
session_queue,
)
.await
}
#[derive(Serialize, Deserialize)]
pub struct Extension {
pub ext: String,
}
#[patch("{id}/icon")]
#[allow(clippy::too_many_arguments)]
pub async fn collection_icon_edit(
web::Query(ext): web::Query<Extension>,
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
payload: web::Payload,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::collections::collection_icon_edit(
web::Query(v3::collections::Extension { ext: ext.ext }),
req,
info,
pool,
redis,
file_host,
payload,
session_queue,
)
.await
}
#[delete("{id}/icon")]
pub async fn delete_collection_icon(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::collections::delete_collection_icon(req, info, pool, redis, file_host, session_queue).await
}
#[delete("{id}")]
pub async fn collection_delete(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::collections::collection_delete(req, info, pool, redis, session_queue).await
}

View File

@@ -1,265 +0,0 @@
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::projects::Project;
use crate::models::v2::projects::LegacyProject;
use crate::queue::session::AuthQueue;
use crate::routes::v3::project_creation::CreateError;
use crate::routes::{v2_reroute, v3, ApiError};
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
use validator::Validate;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(organizations_get).service(organization_create);
cfg.service(
web::scope("organization")
.service(organization_get)
.service(organizations_edit)
.service(organization_delete)
.service(organization_projects_get)
.service(organization_projects_add)
.service(organization_projects_remove)
.service(organization_icon_edit)
.service(delete_organization_icon)
.service(super::teams::team_members_get_organization),
);
}
#[derive(Deserialize, Validate)]
pub struct NewOrganization {
#[validate(
length(min = 3, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
// Title of the organization, also used as slug
pub title: String,
#[validate(length(min = 3, max = 256))]
pub description: String,
}
#[post("organization")]
pub async fn organization_create(
req: HttpRequest,
new_organization: web::Json<NewOrganization>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, CreateError> {
let new_organization = new_organization.into_inner();
v3::organizations::organization_create(
req,
web::Json(v3::organizations::NewOrganization {
name: new_organization.title,
description: new_organization.description,
}),
pool.clone(),
redis.clone(),
session_queue,
)
.await
}
#[get("{id}")]
pub async fn organization_get(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::organizations::organization_get(req, info, pool.clone(), redis.clone(), session_queue).await
}
#[derive(Deserialize)]
pub struct OrganizationIds {
pub ids: String,
}
#[get("organizations")]
pub async fn organizations_get(
req: HttpRequest,
web::Query(ids): web::Query<OrganizationIds>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::organizations::organizations_get(
req,
web::Query(v3::organizations::OrganizationIds { ids: ids.ids }),
pool,
redis,
session_queue,
)
.await
}
#[derive(Serialize, Deserialize, Validate)]
pub struct OrganizationEdit {
#[validate(length(min = 3, max = 256))]
pub description: Option<String>,
#[validate(
length(min = 3, max = 64),
regex = "crate::util::validate::RE_URL_SAFE"
)]
// Title of the organization, also used as slug
pub title: Option<String>,
}
#[patch("{id}")]
pub async fn organizations_edit(
req: HttpRequest,
info: web::Path<(String,)>,
new_organization: web::Json<OrganizationEdit>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let new_organization = new_organization.into_inner();
v3::organizations::organizations_edit(
req,
info,
web::Json(v3::organizations::OrganizationEdit {
description: new_organization.description,
name: new_organization.title,
}),
pool.clone(),
redis.clone(),
session_queue,
)
.await
}
#[delete("{id}")]
pub async fn organization_delete(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::organizations::organization_delete(req, info, pool.clone(), redis.clone(), session_queue)
.await
}
#[get("{id}/projects")]
pub async fn organization_projects_get(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let response = v3::organizations::organization_projects_get(
req,
info,
pool.clone(),
redis.clone(),
session_queue,
)
.await?;
// Convert v3 projects to v2
match v2_reroute::extract_ok_json::<Vec<Project>>(response).await {
Ok(project) => {
let legacy_projects = LegacyProject::from_many(project, &**pool, &redis).await?;
Ok(HttpResponse::Ok().json(legacy_projects))
}
Err(response) => Ok(response),
}
}
#[derive(Deserialize)]
pub struct OrganizationProjectAdd {
pub project_id: String, // Also allow title/slug
}
#[post("{id}/projects")]
pub async fn organization_projects_add(
req: HttpRequest,
info: web::Path<(String,)>,
project_info: web::Json<OrganizationProjectAdd>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
let project_info = project_info.into_inner();
v3::organizations::organization_projects_add(
req,
info,
web::Json(v3::organizations::OrganizationProjectAdd {
project_id: project_info.project_id,
}),
pool.clone(),
redis.clone(),
session_queue,
)
.await
}
#[delete("{organization_id}/projects/{project_id}")]
pub async fn organization_projects_remove(
req: HttpRequest,
info: web::Path<(String, String)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::organizations::organization_projects_remove(
req,
info,
pool.clone(),
redis.clone(),
session_queue,
)
.await
}
#[derive(Serialize, Deserialize)]
pub struct Extension {
pub ext: String,
}
#[patch("{id}/icon")]
#[allow(clippy::too_many_arguments)]
pub async fn organization_icon_edit(
web::Query(ext): web::Query<Extension>,
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
payload: web::Payload,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::organizations::organization_icon_edit(
web::Query(v3::organizations::Extension { ext: ext.ext }),
req,
info,
pool.clone(),
redis.clone(),
file_host,
payload,
session_queue,
)
.await
}
#[delete("{id}/icon")]
pub async fn delete_organization_icon(
req: HttpRequest,
info: web::Path<(String,)>,
pool: web::Data<PgPool>,
redis: web::Data<RedisPool>,
file_host: web::Data<Arc<dyn FileHost + Send + Sync>>,
session_queue: web::Data<AuthQueue>,
) -> Result<HttpResponse, ApiError> {
v3::organizations::delete_organization_icon(
req,
info,
pool.clone(),
redis.clone(),
file_host,
session_queue,
)
.await
}

View File

@@ -6,10 +6,7 @@ use crate::{
auth::get_user_from_headers,
database::models::user_item,
models::{
ids::{
base62_impl::{parse_base62, to_base62},
ProjectId, VersionId,
},
ids::{base62_impl::to_base62, ProjectId, VersionId},
pats::Scopes,
},
queue::session::AuthQueue,
@@ -46,7 +43,6 @@ pub struct GetData {
// only one of project_ids or version_ids should be used
// if neither are provided, all projects the user has access to will be used
pub project_ids: Option<String>,
pub version_ids: Option<String>,
pub start_date: Option<DateTime<Utc>>, // defaults to 2 weeks ago
pub end_date: Option<DateTime<Utc>>, // defaults to now
@@ -94,17 +90,6 @@ pub async fn playtimes_get(
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let version_ids = data
.version_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
if project_ids.is_some() && version_ids.is_some() {
return Err(ApiError::InvalidInput(
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
));
}
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
@@ -113,13 +98,11 @@ pub async fn playtimes_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let (project_ids, version_ids) =
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?;
// Get the views
let playtimes = crate::clickhouse::fetch_playtimes(
project_ids,
version_ids,
project_ids.unwrap_or_default(),
start_date,
end_date,
resolution_minutes,
@@ -173,17 +156,6 @@ pub async fn views_get(
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let version_ids = data
.version_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
if project_ids.is_some() && version_ids.is_some() {
return Err(ApiError::InvalidInput(
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
));
}
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
@@ -192,13 +164,11 @@ pub async fn views_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let (project_ids, version_ids) =
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?;
// Get the views
let views = crate::clickhouse::fetch_views(
project_ids,
version_ids,
project_ids.unwrap_or_default(),
start_date,
end_date,
resolution_minutes,
@@ -252,17 +222,6 @@ pub async fn downloads_get(
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let version_ids = data
.version_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
if project_ids.is_some() && version_ids.is_some() {
return Err(ApiError::InvalidInput(
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
));
}
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
@@ -271,13 +230,11 @@ pub async fn downloads_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let (project_ids, version_ids) =
filter_allowed_ids(project_ids, version_ids, user_option, &pool, &redis).await?;
let project_ids = filter_allowed_ids(project_ids, user_option, &pool, &redis).await?;
// Get the downloads
let downloads = crate::clickhouse::fetch_downloads(
project_ids,
version_ids,
project_ids.unwrap_or_default(),
start_date,
end_date,
resolution_minutes,
@@ -347,7 +304,7 @@ pub async fn revenue_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let (project_ids, _) = filter_allowed_ids(project_ids, None, user, &pool, &redis).await?;
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?;
let duration: PgInterval = Duration::minutes(resolution_minutes as i64)
.try_into()
@@ -427,17 +384,6 @@ pub async fn countries_downloads_get(
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let version_ids = data
.version_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
if project_ids.is_some() && version_ids.is_some() {
return Err(ApiError::InvalidInput(
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
));
}
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
@@ -445,13 +391,11 @@ pub async fn countries_downloads_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let (project_ids, version_ids) =
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?;
// Get the countries
let countries = crate::clickhouse::fetch_countries(
project_ids,
version_ids,
project_ids.unwrap_or_default(),
start_date,
end_date,
clickhouse.into_inner(),
@@ -507,17 +451,6 @@ pub async fn countries_views_get(
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
let version_ids = data
.version_ids
.as_ref()
.map(|ids| serde_json::from_str::<Vec<String>>(ids))
.transpose()?;
if project_ids.is_some() && version_ids.is_some() {
return Err(ApiError::InvalidInput(
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
));
}
let start_date = data.start_date.unwrap_or(Utc::now() - Duration::weeks(2));
let end_date = data.end_date.unwrap_or(Utc::now());
@@ -525,13 +458,11 @@ pub async fn countries_views_get(
// Convert String list to list of ProjectIds or VersionIds
// - Filter out unauthorized projects/versions
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
let (project_ids, version_ids) =
filter_allowed_ids(project_ids, version_ids, user, &pool, &redis).await?;
let project_ids = filter_allowed_ids(project_ids, user, &pool, &redis).await?;
// Get the countries
let countries = crate::clickhouse::fetch_countries(
project_ids,
version_ids,
project_ids.unwrap_or_default(),
start_date,
end_date,
clickhouse.into_inner(),
@@ -554,19 +485,12 @@ pub async fn countries_views_get(
async fn filter_allowed_ids(
mut project_ids: Option<Vec<String>>,
version_ids: Option<Vec<String>>,
user: crate::models::users::User,
pool: &web::Data<PgPool>,
redis: &RedisPool,
) -> Result<(Option<Vec<ProjectId>>, Option<Vec<VersionId>>), ApiError> {
if project_ids.is_some() && version_ids.is_some() {
return Err(ApiError::InvalidInput(
"Only one of 'project_ids' or 'version_ids' should be used.".to_string(),
));
}
) -> Result<Option<Vec<ProjectId>>, ApiError> {
// If no project_ids or version_ids are provided, we default to all projects the user has *public* access to
if project_ids.is_none() && version_ids.is_none() {
if project_ids.is_none() {
project_ids = Some(
user_item::User::get_projects(user.id.into(), &***pool, redis)
.await?
@@ -645,92 +569,6 @@ async fn filter_allowed_ids(
} else {
None
};
let version_ids = if let Some(version_ids) = version_ids {
// Submitted version_ids are filtered by the user's permissions
let ids = version_ids
.iter()
.map(|id| Ok(VersionId(parse_base62(id)?).into()))
.collect::<Result<Vec<_>, ApiError>>()?;
let versions_data = database::models::Version::get_many(&ids, &***pool, redis).await?;
let project_ids = versions_data
.iter()
.map(|x| x.inner.project_id)
.collect::<Vec<database::models::ProjectId>>();
let projects_data =
database::models::Project::get_many_ids(&project_ids, &***pool, redis).await?;
let team_ids = projects_data
.iter()
.map(|x| x.inner.team_id)
.collect::<Vec<database::models::TeamId>>();
let team_members =
database::models::TeamMember::get_from_team_full_many(&team_ids, &***pool, redis)
.await?;
let organization_ids = projects_data
.iter()
.filter_map(|x| x.inner.organization_id)
.collect::<Vec<database::models::OrganizationId>>();
let organizations =
database::models::Organization::get_many_ids(&organization_ids, &***pool, redis)
.await?;
let organization_team_ids = organizations
.iter()
.map(|x| x.team_id)
.collect::<Vec<database::models::TeamId>>();
let organization_team_members = database::models::TeamMember::get_from_team_full_many(
&organization_team_ids,
&***pool,
redis,
)
.await?;
let ids = projects_data
.into_iter()
.filter(|project| {
let team_member = team_members
.iter()
.find(|x| x.team_id == project.inner.team_id && x.user_id == user.id.into());
let organization = project
.inner
.organization_id
.and_then(|oid| organizations.iter().find(|x| x.id == oid));
let organization_team_member = if let Some(organization) = organization {
organization_team_members
.iter()
.find(|x| x.team_id == organization.team_id && x.user_id == user.id.into())
} else {
None
};
let permissions = ProjectPermissions::get_permissions_by_role(
&user.role,
&team_member.cloned(),
&organization_team_member.cloned(),
)
.unwrap_or_default();
permissions.contains(ProjectPermissions::VIEW_ANALYTICS)
})
.map(|x| x.inner.id)
.collect::<Vec<_>>();
let ids = versions_data
.into_iter()
.filter(|version| ids.contains(&version.inner.project_id))
.map(|x| x.inner.id.into())
.collect::<Vec<_>>();
Some(ids)
} else {
None
};
// Only one of project_ids or version_ids will be Some
Ok((project_ids, version_ids))
Ok(project_ids)
}