You've already forked AstralRinth
forked from didirus/AstralRinth
move to monorepo dir
This commit is contained in:
164
apps/labrinth/src/clickhouse/fetch.rs
Normal file
164
apps/labrinth/src/clickhouse/fetch.rs
Normal file
@@ -0,0 +1,164 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::{models::ids::ProjectId, routes::ApiError};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(clickhouse::Row, Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ReturnIntervals {
|
||||
pub time: u32,
|
||||
pub id: u64,
|
||||
pub total: u64,
|
||||
}
|
||||
|
||||
#[derive(clickhouse::Row, Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct ReturnCountry {
|
||||
pub country: String,
|
||||
pub id: u64,
|
||||
pub total: u64,
|
||||
}
|
||||
|
||||
// Only one of project_id or version_id should be used
|
||||
// Fetches playtimes as a Vec of ReturnPlaytimes
|
||||
pub async fn fetch_playtimes(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
resolution_minute: u32,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnIntervals>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
|
||||
project_id AS id,
|
||||
SUM(seconds) AS total
|
||||
FROM playtime
|
||||
WHERE recorded BETWEEN ? AND ?
|
||||
AND project_id IN ?
|
||||
GROUP BY
|
||||
time,
|
||||
project_id
|
||||
",
|
||||
)
|
||||
.bind(resolution_minute)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
|
||||
Ok(query.fetch_all().await?)
|
||||
}
|
||||
|
||||
// Fetches views as a Vec of ReturnViews
|
||||
pub async fn fetch_views(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
resolution_minutes: u32,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnIntervals>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
|
||||
project_id AS id,
|
||||
count(1) AS total
|
||||
FROM views
|
||||
WHERE recorded BETWEEN ? AND ?
|
||||
AND project_id IN ?
|
||||
GROUP BY
|
||||
time, project_id
|
||||
",
|
||||
)
|
||||
.bind(resolution_minutes)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
|
||||
Ok(query.fetch_all().await?)
|
||||
}
|
||||
|
||||
// Fetches downloads as a Vec of ReturnDownloads
|
||||
pub async fn fetch_downloads(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
resolution_minutes: u32,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnIntervals>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
toUnixTimestamp(toStartOfInterval(recorded, toIntervalMinute(?))) AS time,
|
||||
project_id as id,
|
||||
count(1) AS total
|
||||
FROM downloads
|
||||
WHERE recorded BETWEEN ? AND ?
|
||||
AND project_id IN ?
|
||||
GROUP BY time, project_id
|
||||
",
|
||||
)
|
||||
.bind(resolution_minutes)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
|
||||
Ok(query.fetch_all().await?)
|
||||
}
|
||||
|
||||
pub async fn fetch_countries_downloads(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnCountry>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
country,
|
||||
project_id,
|
||||
count(1) AS total
|
||||
FROM downloads
|
||||
WHERE recorded BETWEEN ? AND ? AND project_id IN ?
|
||||
GROUP BY
|
||||
country,
|
||||
project_id
|
||||
",
|
||||
)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
|
||||
Ok(query.fetch_all().await?)
|
||||
}
|
||||
|
||||
pub async fn fetch_countries_views(
|
||||
projects: Vec<ProjectId>,
|
||||
start_date: DateTime<Utc>,
|
||||
end_date: DateTime<Utc>,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnCountry>, ApiError> {
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
country,
|
||||
project_id,
|
||||
count(1) AS total
|
||||
FROM views
|
||||
WHERE recorded BETWEEN ? AND ? AND project_id IN ?
|
||||
GROUP BY
|
||||
country,
|
||||
project_id
|
||||
",
|
||||
)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
|
||||
Ok(query.fetch_all().await?)
|
||||
}
|
||||
108
apps/labrinth/src/clickhouse/mod.rs
Normal file
108
apps/labrinth/src/clickhouse/mod.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper_tls::{native_tls, HttpsConnector};
|
||||
|
||||
mod fetch;
|
||||
|
||||
pub use fetch::*;
|
||||
|
||||
pub async fn init_client() -> clickhouse::error::Result<clickhouse::Client> {
|
||||
init_client_with_database(&dotenvy::var("CLICKHOUSE_DATABASE").unwrap()).await
|
||||
}
|
||||
|
||||
pub async fn init_client_with_database(
|
||||
database: &str,
|
||||
) -> clickhouse::error::Result<clickhouse::Client> {
|
||||
let client = {
|
||||
let mut http_connector = HttpConnector::new();
|
||||
http_connector.enforce_http(false); // allow https URLs
|
||||
|
||||
let tls_connector = native_tls::TlsConnector::builder().build().unwrap().into();
|
||||
let https_connector = HttpsConnector::from((http_connector, tls_connector));
|
||||
let hyper_client = hyper::client::Client::builder().build(https_connector);
|
||||
|
||||
clickhouse::Client::with_http_client(hyper_client)
|
||||
.with_url(dotenvy::var("CLICKHOUSE_URL").unwrap())
|
||||
.with_user(dotenvy::var("CLICKHOUSE_USER").unwrap())
|
||||
.with_password(dotenvy::var("CLICKHOUSE_PASSWORD").unwrap())
|
||||
};
|
||||
|
||||
client
|
||||
.query(&format!("CREATE DATABASE IF NOT EXISTS {database}"))
|
||||
.execute()
|
||||
.await?;
|
||||
|
||||
client
|
||||
.query(&format!(
|
||||
"
|
||||
CREATE TABLE IF NOT EXISTS {database}.views
|
||||
(
|
||||
recorded DateTime64(4),
|
||||
domain String,
|
||||
site_path String,
|
||||
|
||||
user_id UInt64,
|
||||
project_id UInt64,
|
||||
monetized Bool DEFAULT True,
|
||||
|
||||
ip IPv6,
|
||||
country String,
|
||||
user_agent String,
|
||||
headers Array(Tuple(String, String))
|
||||
)
|
||||
ENGINE = MergeTree()
|
||||
PRIMARY KEY (project_id, recorded, ip)
|
||||
"
|
||||
))
|
||||
.execute()
|
||||
.await?;
|
||||
|
||||
client
|
||||
.query(&format!(
|
||||
"
|
||||
CREATE TABLE IF NOT EXISTS {database}.downloads
|
||||
(
|
||||
recorded DateTime64(4),
|
||||
domain String,
|
||||
site_path String,
|
||||
|
||||
user_id UInt64,
|
||||
project_id UInt64,
|
||||
version_id UInt64,
|
||||
|
||||
ip IPv6,
|
||||
country String,
|
||||
user_agent String,
|
||||
headers Array(Tuple(String, String))
|
||||
)
|
||||
ENGINE = MergeTree()
|
||||
PRIMARY KEY (project_id, recorded, ip)
|
||||
"
|
||||
))
|
||||
.execute()
|
||||
.await?;
|
||||
|
||||
client
|
||||
.query(&format!(
|
||||
"
|
||||
CREATE TABLE IF NOT EXISTS {database}.playtime
|
||||
(
|
||||
recorded DateTime64(4),
|
||||
seconds UInt64,
|
||||
|
||||
user_id UInt64,
|
||||
project_id UInt64,
|
||||
version_id UInt64,
|
||||
|
||||
loader String,
|
||||
game_version String,
|
||||
parent UInt64
|
||||
)
|
||||
ENGINE = MergeTree()
|
||||
PRIMARY KEY (project_id, recorded, user_id)
|
||||
"
|
||||
))
|
||||
.execute()
|
||||
.await?;
|
||||
|
||||
Ok(client.with_database(database))
|
||||
}
|
||||
Reference in New Issue
Block a user