You've already forked AstralRinth
forked from didirus/AstralRinth
rounds dates for revenue (#745)
* rounds dates for revenue * analytics tests pass
This commit is contained in:
@@ -34,7 +34,8 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
/// The json data to be passed to fetch analytic data
|
||||
/// Either a list of project_ids or version_ids can be used, but not both. Unauthorized projects/versions will be filtered out.
|
||||
/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively.
|
||||
/// start_date and end_date are optional, and default to two weeks ago, and the maximum date respectively
|
||||
/// start_date and end_date are inclusive
|
||||
/// resolution_minutes is optional. This refers to the window by which we are looking (every day, every minute, etc) and defaults to 1440 (1 day)
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct GetData {
|
||||
@@ -334,6 +335,15 @@ pub async fn revenue_get(
|
||||
let end_date = data.end_date.unwrap_or(Utc::now());
|
||||
let resolution_minutes = data.resolution_minutes.unwrap_or(60 * 24);
|
||||
|
||||
// Round up/down to nearest duration as we are using pgadmin, does not have rounding in the fetch command
|
||||
// Round start_date down to nearest resolution
|
||||
let diff = start_date.timestamp() % (resolution_minutes as i64 * 60);
|
||||
let start_date = start_date - Duration::seconds(diff);
|
||||
|
||||
// Round end_date up to nearest resolution
|
||||
let diff = end_date.timestamp() % (resolution_minutes as i64 * 60);
|
||||
let end_date = end_date + Duration::seconds((resolution_minutes as i64 * 60) - diff);
|
||||
|
||||
// Convert String list to list of ProjectIds or VersionIds
|
||||
// - Filter out unauthorized projects/versions
|
||||
// - If no project_ids or version_ids are provided, we default to all projects the user has access to
|
||||
|
||||
137
tests/analytics.rs
Normal file
137
tests/analytics.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use common::database::*;
|
||||
use itertools::Itertools;
|
||||
use labrinth::models::ids::base62_impl::parse_base62;
|
||||
use rust_decimal::{prelude::ToPrimitive, Decimal};
|
||||
|
||||
use crate::common::environment::TestEnvironment;
|
||||
|
||||
// importing common module.
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
pub async fn analytics_revenue() {
|
||||
let test_env = TestEnvironment::build(None).await;
|
||||
let api = &test_env.v2;
|
||||
|
||||
let alpha_project_id = test_env
|
||||
.dummy
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.project_alpha
|
||||
.project_id
|
||||
.clone();
|
||||
|
||||
let pool = test_env.db.pool.clone();
|
||||
|
||||
// Generate sample revenue data- directly insert into sql
|
||||
let (mut insert_user_ids, mut insert_project_ids, mut insert_payouts, mut insert_starts) =
|
||||
(Vec::new(), Vec::new(), Vec::new(), Vec::new());
|
||||
|
||||
// Note: these go from most recent to least recent
|
||||
let money_time_pairs: [(f64, DateTime<Utc>); 10] = [
|
||||
(50.0, Utc::now() - Duration::minutes(5)),
|
||||
(50.1, Utc::now() - Duration::minutes(10)),
|
||||
(101.0, Utc::now() - Duration::days(1)),
|
||||
(200.0, Utc::now() - Duration::days(2)),
|
||||
(311.0, Utc::now() - Duration::days(3)),
|
||||
(400.0, Utc::now() - Duration::days(4)),
|
||||
(526.0, Utc::now() - Duration::days(5)),
|
||||
(633.0, Utc::now() - Duration::days(6)),
|
||||
(800.0, Utc::now() - Duration::days(14)),
|
||||
(800.0, Utc::now() - Duration::days(800)),
|
||||
];
|
||||
|
||||
let project_id = parse_base62(&alpha_project_id).unwrap() as i64;
|
||||
for (money, time) in money_time_pairs.iter() {
|
||||
insert_user_ids.push(USER_USER_ID_PARSED);
|
||||
insert_project_ids.push(project_id);
|
||||
insert_payouts.push(Decimal::from_f64_retain(*money).unwrap());
|
||||
insert_starts.push(*time);
|
||||
}
|
||||
|
||||
sqlx::query!(
|
||||
"
|
||||
INSERT INTO payouts_values (user_id, mod_id, amount, created)
|
||||
SELECT * FROM UNNEST ($1::bigint[], $2::bigint[], $3::numeric[], $4::timestamptz[])
|
||||
",
|
||||
&insert_user_ids[..],
|
||||
&insert_project_ids[..],
|
||||
&insert_payouts[..],
|
||||
&insert_starts[..]
|
||||
)
|
||||
.execute(&pool)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let day = 86400;
|
||||
|
||||
// Test analytics endpoint with default values
|
||||
// - all time points in the last 2 weeks
|
||||
// - 1 day resolution
|
||||
let analytics = api
|
||||
.get_analytics_revenue_deserialized(
|
||||
vec![&alpha_project_id],
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
USER_USER_PAT,
|
||||
)
|
||||
.await;
|
||||
assert_eq!(analytics.len(), 1); // 1 project
|
||||
let project_analytics = analytics.get(&alpha_project_id).unwrap();
|
||||
assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included
|
||||
// sorted_by_key, values in the order of smallest to largest key
|
||||
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.rev()
|
||||
.unzip();
|
||||
assert_eq!(
|
||||
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0],
|
||||
to_f64_vec_rounded_up(sorted_by_key)
|
||||
);
|
||||
// Ensure that the keys are in multiples of 1 day
|
||||
for k in sorted_keys {
|
||||
assert_eq!(k % day, 0);
|
||||
}
|
||||
|
||||
// Test analytics with last 900 days to include all data
|
||||
// keep resolution at default
|
||||
let analytics = api
|
||||
.get_analytics_revenue_deserialized(
|
||||
vec![&alpha_project_id],
|
||||
Some(Utc::now() - Duration::days(801)),
|
||||
None,
|
||||
None,
|
||||
USER_USER_PAT,
|
||||
)
|
||||
.await;
|
||||
let project_analytics = analytics.get(&alpha_project_id).unwrap();
|
||||
assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day
|
||||
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) = project_analytics
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.rev()
|
||||
.unzip();
|
||||
assert_eq!(
|
||||
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0, 800.0],
|
||||
to_f64_vec_rounded_up(sorted_by_key)
|
||||
);
|
||||
for k in sorted_keys {
|
||||
assert_eq!(k % day, 0);
|
||||
}
|
||||
|
||||
// Cleanup test db
|
||||
test_env.cleanup().await;
|
||||
}
|
||||
|
||||
fn to_f64_rounded_up(d: Decimal) -> f64 {
|
||||
d.round_dp_with_strategy(1, rust_decimal::RoundingStrategy::MidpointAwayFromZero)
|
||||
.to_f64()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn to_f64_vec_rounded_up(d: Vec<Decimal>) -> Vec<f64> {
|
||||
d.into_iter().map(to_f64_rounded_up).collect_vec()
|
||||
}
|
||||
@@ -1,10 +1,14 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use actix_web::{
|
||||
dev::ServiceResponse,
|
||||
test::{self, TestRequest},
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use chrono::{DateTime, Utc};
|
||||
use labrinth::models::projects::{Project, Version};
|
||||
use rust_decimal::Decimal;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::{
|
||||
@@ -190,4 +194,57 @@ impl ApiV2 {
|
||||
self.call(req).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_analytics_revenue(
|
||||
&self,
|
||||
id_or_slugs: Vec<&str>,
|
||||
start_date: Option<DateTime<Utc>>,
|
||||
end_date: Option<DateTime<Utc>>,
|
||||
resolution_minutes: Option<u32>,
|
||||
pat: &str,
|
||||
) -> ServiceResponse {
|
||||
let projects_string = serde_json::to_string(&id_or_slugs).unwrap();
|
||||
let projects_string = urlencoding::encode(&projects_string);
|
||||
|
||||
let mut extra_args = String::new();
|
||||
if let Some(start_date) = start_date {
|
||||
let start_date = start_date.to_rfc3339();
|
||||
// let start_date = serde_json::to_string(&start_date).unwrap();
|
||||
let start_date = urlencoding::encode(&start_date);
|
||||
extra_args.push_str(&format!("&start_date={start_date}"));
|
||||
}
|
||||
if let Some(end_date) = end_date {
|
||||
let end_date = end_date.to_rfc3339();
|
||||
// let end_date = serde_json::to_string(&end_date).unwrap();
|
||||
let end_date = urlencoding::encode(&end_date);
|
||||
extra_args.push_str(&format!("&end_date={end_date}"));
|
||||
}
|
||||
if let Some(resolution_minutes) = resolution_minutes {
|
||||
extra_args.push_str(&format!("&resolution_minutes={}", resolution_minutes));
|
||||
}
|
||||
|
||||
let req = test::TestRequest::get()
|
||||
.uri(&format!(
|
||||
"/v2/analytics/revenue?{projects_string}{extra_args}",
|
||||
))
|
||||
.append_header(("Authorization", pat))
|
||||
.to_request();
|
||||
|
||||
self.call(req).await
|
||||
}
|
||||
|
||||
pub async fn get_analytics_revenue_deserialized(
|
||||
&self,
|
||||
id_or_slugs: Vec<&str>,
|
||||
start_date: Option<DateTime<Utc>>,
|
||||
end_date: Option<DateTime<Utc>>,
|
||||
resolution_minutes: Option<u32>,
|
||||
pat: &str,
|
||||
) -> HashMap<String, HashMap<i64, Decimal>> {
|
||||
let resp = self
|
||||
.get_analytics_revenue(id_or_slugs, start_date, end_date, resolution_minutes, pat)
|
||||
.await;
|
||||
assert_eq!(resp.status(), 200);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ async fn version_ordering_when_unspecified_orders_oldest_first() {
|
||||
let alpha_version_id = env.dummy.as_ref().unwrap().project_alpha.version_id.clone();
|
||||
let new_version_id = get_json_val_str(
|
||||
env.v2
|
||||
.create_default_version(&alpha_project_id, None, USER_USER_PAT)
|
||||
.create_default_version(alpha_project_id, None, USER_USER_PAT)
|
||||
.await
|
||||
.id,
|
||||
);
|
||||
@@ -105,7 +105,7 @@ async fn version_ordering_when_specified_orders_specified_before_unspecified() {
|
||||
let alpha_version_id = env.dummy.as_ref().unwrap().project_alpha.version_id.clone();
|
||||
let new_version_id = get_json_val_str(
|
||||
env.v2
|
||||
.create_default_version(&alpha_project_id, Some(10000), USER_USER_PAT)
|
||||
.create_default_version(alpha_project_id, Some(10000), USER_USER_PAT)
|
||||
.await
|
||||
.id,
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user