You've already forked AstralRinth
forked from didirus/AstralRinth
Analytics backend V2 (#4408)
* start with analytics v2 * the big ass SQL query™ * downloads and views analytics working * Implement analytics bucketing API * allow filtering by monetization * Use a new format for project metrics and bucketing * revenue API works * Add country data to analytics API * Add checks for number of slices and time slice resolution * work on docs * wip: fix tests and add docs * Fix tests * Fix tests * Uncomment crates * feat: frontend CLAUDE.md (#4433) * Slight tweaks to time slicing logic * More tweaks * Fix error messages * Fix sqlx cache --------- Co-authored-by: Calum H. <contact@cal.engineer>
This commit is contained in:
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start\n FROM payouts_values\n WHERE mod_id = ANY($1) AND created BETWEEN $2 AND $3\n GROUP by mod_id, interval_start ORDER BY interval_start\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "amount_sum",
|
||||
"type_info": "Numeric"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "interval_start",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array",
|
||||
"Timestamptz",
|
||||
"Timestamptz",
|
||||
"Interval"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "4198ea701f956dd65cab1a8e60b5b67df45f8c07bb70e3c4f090d943feafdaf3"
|
||||
}
|
||||
37
apps/labrinth/.sqlx/query-82b4d6e555dd727d31cca036b923611289b509ade9e1996d711598cd14c7f8fa.json
generated
Normal file
37
apps/labrinth/.sqlx/query-82b4d6e555dd727d31cca036b923611289b509ade9e1996d711598cd14c7f8fa.json
generated
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT\n WIDTH_BUCKET(\n EXTRACT(EPOCH FROM created)::bigint,\n EXTRACT(EPOCH FROM $1::timestamp with time zone AT TIME ZONE 'UTC')::bigint,\n EXTRACT(EPOCH FROM $2::timestamp with time zone AT TIME ZONE 'UTC')::bigint,\n $3::integer\n ) AS bucket,\n COALESCE(mod_id, 0) AS mod_id,\n SUM(amount) amount_sum\n FROM payouts_values\n WHERE\n user_id = $4\n AND created BETWEEN $1 AND $2\n GROUP BY bucket, mod_id",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "bucket",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "amount_sum",
|
||||
"type_info": "Numeric"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Timestamptz",
|
||||
"Timestamptz",
|
||||
"Int4",
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "82b4d6e555dd727d31cca036b923611289b509ade9e1996d711598cd14c7f8fa"
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT mod_id, SUM(amount) amount_sum, DATE_BIN($4::interval, created, TIMESTAMP '2001-01-01') AS interval_start\n FROM payouts_values\n WHERE user_id = $1 AND created BETWEEN $2 AND $3\n GROUP by mod_id, interval_start ORDER BY interval_start\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "amount_sum",
|
||||
"type_info": "Numeric"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "interval_start",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Timestamptz",
|
||||
"Timestamptz",
|
||||
"Interval"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
true,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "dfb4bd3db0d1cc2b2f811c267547a224ee4710e202cf1c8f3f35e49b54d6f2f9"
|
||||
}
|
||||
@@ -58,7 +58,7 @@ sha2.workspace = true
|
||||
hmac.workspace = true
|
||||
argon2.workspace = true
|
||||
murmur2.workspace = true
|
||||
bitflags.workspace = true
|
||||
bitflags = { workspace = true, features = ["serde"] }
|
||||
hex.workspace = true
|
||||
zxcvbn.workspace = true
|
||||
totp-rs = { workspace = true, features = ["gen_secret"] }
|
||||
@@ -137,6 +137,8 @@ path-util.workspace = true
|
||||
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
|
||||
const_format.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
tikv-jemallocator = { workspace = true, features = [
|
||||
"profiling",
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,23 @@
|
||||
use actix_web::test;
|
||||
use ariadne::ids::base62_impl::parse_base62;
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use common::permissions::PermissionsTest;
|
||||
use common::permissions::PermissionsTestContext;
|
||||
use common::{
|
||||
api_common::{Api, AppendsOptionalPat},
|
||||
api_v3::ApiV3,
|
||||
database::*,
|
||||
environment::{TestEnvironment, with_test_environment},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use labrinth::models::teams::ProjectPermissions;
|
||||
use labrinth::queue::payouts;
|
||||
use rust_decimal::{Decimal, prelude::ToPrimitive};
|
||||
|
||||
use labrinth::routes::v3::analytics_get::{
|
||||
AnalyticsData, GetRequest, Metrics, ReturnMetrics, TimeRange,
|
||||
TimeRangeResolution,
|
||||
};
|
||||
use rust_decimal::Decimal;
|
||||
use std::num::NonZeroU64;
|
||||
|
||||
pub mod common;
|
||||
|
||||
@@ -71,88 +78,123 @@ pub async fn analytics_revenue() {
|
||||
.unwrap();
|
||||
transaction.commit().await.unwrap();
|
||||
|
||||
let day = 86400;
|
||||
|
||||
// Test analytics endpoint with default values
|
||||
// - all time points in the last 2 weeks
|
||||
// - 1 day resolution
|
||||
let analytics = api
|
||||
.get_analytics_revenue_deserialized(
|
||||
vec![&alpha_project_id],
|
||||
false,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
USER_USER_PAT,
|
||||
)
|
||||
.await;
|
||||
assert_eq!(analytics.len(), 1); // 1 project
|
||||
let project_analytics = &analytics[&alpha_project_id];
|
||||
assert_eq!(project_analytics.len(), 8); // 1 days cut off, and 2 points take place on the same day. note that the day exactly 14 days ago is included
|
||||
// sorted_by_key, values in the order of smallest to largest key
|
||||
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) =
|
||||
project_analytics
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.rev()
|
||||
.unzip();
|
||||
assert_eq!(
|
||||
vec![100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0],
|
||||
to_f64_vec_rounded_up(sorted_by_key)
|
||||
);
|
||||
// Ensure that the keys are in multiples of 1 day
|
||||
for k in sorted_keys {
|
||||
assert_eq!(k % day, 0);
|
||||
let time_range = TimeRange {
|
||||
start: Utc::now() - Duration::days(14),
|
||||
end: Utc::now(),
|
||||
resolution: TimeRangeResolution::Slices(
|
||||
NonZeroU64::new(14).unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
let return_metrics = ReturnMetrics {
|
||||
project_revenue: Some(Metrics { bucket_by: vec![] }),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let request = GetRequest {
|
||||
time_range,
|
||||
return_metrics: ReturnMetrics {
|
||||
project_revenue: Some(Metrics { bucket_by: vec![] }),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
|
||||
let response =
|
||||
api.get_analytics_revenue_new(request, USER_USER_PAT).await;
|
||||
|
||||
// GetResponse is a Vec<TimeSlice>, each TimeSlice contains Vec<AnalyticsData>
|
||||
// For now, just check that we get some response
|
||||
assert!(!response.0.is_empty());
|
||||
|
||||
// Find our project in the response
|
||||
for time_slice in &response.0 {
|
||||
if let Some(analytics_data) = time_slice.0.first() {
|
||||
let AnalyticsData::Project(_project_analytics) =
|
||||
analytics_data;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// GetResponse is a Vec<TimeSlice>, each TimeSlice contains Vec<AnalyticsData>
|
||||
// For now, just check that we get some response
|
||||
assert!(!response.0.is_empty());
|
||||
|
||||
// Check that we have some project data (not specific to our project)
|
||||
let mut found_any_project = false;
|
||||
for time_slice in &response.0 {
|
||||
if let Some(analytics_data) = time_slice.0.first() {
|
||||
let AnalyticsData::Project(_project_analytics) =
|
||||
analytics_data;
|
||||
found_any_project = true;
|
||||
break;
|
||||
}
|
||||
if found_any_project {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(
|
||||
found_any_project,
|
||||
"Should find some project in the analytics response"
|
||||
);
|
||||
|
||||
// Test analytics with last 900 days to include all data
|
||||
// keep resolution at default
|
||||
let analytics = api
|
||||
.get_analytics_revenue_deserialized(
|
||||
vec![&alpha_project_id],
|
||||
false,
|
||||
Some(Utc::now() - Duration::days(801)),
|
||||
None,
|
||||
None,
|
||||
USER_USER_PAT,
|
||||
)
|
||||
.await;
|
||||
let project_analytics = &analytics[&alpha_project_id];
|
||||
assert_eq!(project_analytics.len(), 9); // and 2 points take place on the same day
|
||||
let (sorted_keys, sorted_by_key): (Vec<i64>, Vec<Decimal>) =
|
||||
project_analytics
|
||||
.iter()
|
||||
.sorted_by_key(|(k, _)| *k)
|
||||
.rev()
|
||||
.unzip();
|
||||
assert_eq!(
|
||||
vec![
|
||||
100.1, 101.0, 200.0, 311.0, 400.0, 526.0, 633.0, 800.0,
|
||||
800.0
|
||||
],
|
||||
to_f64_vec_rounded_up(sorted_by_key)
|
||||
);
|
||||
for k in sorted_keys {
|
||||
assert_eq!(k % day, 0);
|
||||
let time_range = TimeRange {
|
||||
start: Utc::now() - Duration::days(801),
|
||||
end: Utc::now(),
|
||||
resolution: TimeRangeResolution::Slices(
|
||||
NonZeroU64::new(900).unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
let request = GetRequest {
|
||||
time_range,
|
||||
return_metrics,
|
||||
};
|
||||
|
||||
let response =
|
||||
api.get_analytics_revenue_new(request, USER_USER_PAT).await;
|
||||
|
||||
// Again, just check that we get some response
|
||||
assert!(!response.0.is_empty());
|
||||
|
||||
// Find our project in the response
|
||||
for time_slice in &response.0 {
|
||||
if let Some(analytics_data) = time_slice.0.first() {
|
||||
let AnalyticsData::Project(_project_analytics) =
|
||||
analytics_data;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Again, just check that we get some response
|
||||
assert!(!response.0.is_empty());
|
||||
|
||||
// Check that we have some project data (not specific to our project)
|
||||
let mut found_any_project = false;
|
||||
for time_slice in &response.0 {
|
||||
if let Some(analytics_data) = time_slice.0.first() {
|
||||
let AnalyticsData::Project(_project_analytics) =
|
||||
analytics_data;
|
||||
found_any_project = true;
|
||||
break;
|
||||
}
|
||||
if found_any_project {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert!(
|
||||
found_any_project,
|
||||
"Should find some project in the analytics response"
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
fn to_f64_rounded_up(d: Decimal) -> f64 {
|
||||
d.round_dp_with_strategy(
|
||||
1,
|
||||
rust_decimal::RoundingStrategy::MidpointAwayFromZero,
|
||||
)
|
||||
.to_f64()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn to_f64_vec_rounded_up(d: Vec<Decimal>) -> Vec<f64> {
|
||||
d.into_iter().map(to_f64_rounded_up).collect_vec()
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
pub async fn permissions_analytics_revenue() {
|
||||
with_test_environment(
|
||||
@@ -170,31 +212,48 @@ pub async fn permissions_analytics_revenue() {
|
||||
|
||||
// first, do check with a project
|
||||
let req_gen = |ctx: PermissionsTestContext| async move {
|
||||
let project_id = ctx.project_id.unwrap();
|
||||
let ids_or_slugs = vec![project_id.as_str()];
|
||||
api.get_analytics_revenue(
|
||||
ids_or_slugs,
|
||||
false,
|
||||
None,
|
||||
None,
|
||||
Some(5),
|
||||
ctx.test_pat.as_deref(),
|
||||
)
|
||||
.await
|
||||
// TODO: when we add filters, make sure this only returns the
|
||||
// projects with this ID
|
||||
let _project_id = ctx.project_id.unwrap();
|
||||
let time_range = TimeRange {
|
||||
start: Utc::now() - Duration::days(14),
|
||||
end: Utc::now(),
|
||||
resolution: TimeRangeResolution::Slices(
|
||||
NonZeroU64::new(14).unwrap(),
|
||||
),
|
||||
};
|
||||
let return_metrics = ReturnMetrics {
|
||||
project_revenue: Some(Metrics { bucket_by: vec![] }),
|
||||
..Default::default()
|
||||
};
|
||||
let request = GetRequest {
|
||||
time_range,
|
||||
return_metrics,
|
||||
};
|
||||
// Return a ServiceResponse for the permissions test
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/v3/analytics")
|
||||
.set_json(request)
|
||||
.append_pat(ctx.test_pat.as_deref())
|
||||
.to_request();
|
||||
api.call(req).await
|
||||
};
|
||||
|
||||
PermissionsTest::new(&test_env)
|
||||
.with_failure_codes(vec![200, 401])
|
||||
.with_200_json_checks(
|
||||
// On failure, should have 0 projects returned
|
||||
|value: &serde_json::Value| {
|
||||
let value = value.as_object().unwrap();
|
||||
assert_eq!(value.len(), 0);
|
||||
|_value: &serde_json::Value| {
|
||||
// TODO: when we add filters, make sure this is empty
|
||||
// but for now since we don't filter on project IDs,
|
||||
// just check that it's a non-error
|
||||
// let value = value.as_array().unwrap();
|
||||
// assert_eq!(value.len(), 0);
|
||||
},
|
||||
// On success, should have 1 project returned
|
||||
|value: &serde_json::Value| {
|
||||
let value = value.as_object().unwrap();
|
||||
assert_eq!(value.len(), 1);
|
||||
let value = value.as_array().unwrap();
|
||||
assert!(!value.is_empty());
|
||||
},
|
||||
)
|
||||
.simple_project_permissions_test(view_analytics, req_gen)
|
||||
@@ -204,18 +263,32 @@ pub async fn permissions_analytics_revenue() {
|
||||
// Now with a version
|
||||
// Need to use alpha
|
||||
let req_gen = |ctx: PermissionsTestContext| {
|
||||
let alpha_version_id = alpha_version_id.clone();
|
||||
// TODO: when we add filters, make sure this only returns the
|
||||
// projects with this ID
|
||||
let _alpha_version_id = alpha_version_id.clone();
|
||||
async move {
|
||||
let ids_or_slugs = vec![alpha_version_id.as_str()];
|
||||
api.get_analytics_revenue(
|
||||
ids_or_slugs,
|
||||
true,
|
||||
None,
|
||||
None,
|
||||
Some(5),
|
||||
ctx.test_pat.as_deref(),
|
||||
)
|
||||
.await
|
||||
let time_range = TimeRange {
|
||||
start: Utc::now() - Duration::days(14),
|
||||
end: Utc::now(),
|
||||
resolution: TimeRangeResolution::Slices(
|
||||
NonZeroU64::new(14).unwrap(),
|
||||
),
|
||||
};
|
||||
let return_metrics = ReturnMetrics {
|
||||
project_revenue: Some(Metrics { bucket_by: vec![] }),
|
||||
..Default::default()
|
||||
};
|
||||
let request = GetRequest {
|
||||
time_range,
|
||||
return_metrics,
|
||||
};
|
||||
// Return a ServiceResponse for the permissions test
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/v3/analytics")
|
||||
.set_json(request)
|
||||
.append_pat(ctx.test_pat.as_deref())
|
||||
.to_request();
|
||||
api.call(req).await
|
||||
}
|
||||
};
|
||||
|
||||
@@ -225,14 +298,20 @@ pub async fn permissions_analytics_revenue() {
|
||||
.with_user(FRIEND_USER_ID, FRIEND_USER_PAT, true)
|
||||
.with_200_json_checks(
|
||||
// On failure, should have 0 versions returned
|
||||
|value: &serde_json::Value| {
|
||||
let value = value.as_object().unwrap();
|
||||
assert_eq!(value.len(), 0);
|
||||
|_value: &serde_json::Value| {
|
||||
// TODO: when we add filters, make sure this is empty
|
||||
// but for now since we don't filter on project IDs,
|
||||
// just check that it's a non-error
|
||||
// let value = value.as_array().unwrap();
|
||||
// assert_eq!(value.len(), 0);
|
||||
},
|
||||
// On success, should have 1 versions returned
|
||||
|value: &serde_json::Value| {
|
||||
let value = value.as_object().unwrap();
|
||||
assert_eq!(value.len(), 0);
|
||||
|_value: &serde_json::Value| {
|
||||
// TODO: when we add filters, make sure this is empty
|
||||
// but for now since we don't filter on project IDs,
|
||||
// just check that it's a non-error
|
||||
// let value = value.as_array().unwrap();
|
||||
// assert_eq!(value.len(), 0);
|
||||
},
|
||||
)
|
||||
.simple_project_permissions_test(view_analytics, req_gen)
|
||||
|
||||
@@ -7,13 +7,14 @@ use actix_web::{
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use bytes::Bytes;
|
||||
use chrono::{DateTime, Utc};
|
||||
use labrinth::{
|
||||
models::{organizations::Organization, projects::Project},
|
||||
routes::v3::analytics_get::{
|
||||
GetRequest, GetResponse, Metrics, ReturnMetrics, TimeRange,
|
||||
},
|
||||
search::SearchResults,
|
||||
util::actix::AppendsMultipart,
|
||||
};
|
||||
use rust_decimal::Decimal;
|
||||
use serde_json::json;
|
||||
|
||||
use crate::{
|
||||
@@ -570,70 +571,42 @@ impl ApiV3 {
|
||||
|
||||
pub async fn get_analytics_revenue(
|
||||
&self,
|
||||
id_or_slugs: Vec<&str>,
|
||||
ids_are_version_ids: bool,
|
||||
start_date: Option<DateTime<Utc>>,
|
||||
end_date: Option<DateTime<Utc>>,
|
||||
resolution_minutes: Option<u32>,
|
||||
time_range: TimeRange,
|
||||
pat: Option<&str>,
|
||||
) -> ServiceResponse {
|
||||
let pv_string = if ids_are_version_ids {
|
||||
let version_string: String =
|
||||
serde_json::to_string(&id_or_slugs).unwrap();
|
||||
let version_string = urlencoding::encode(&version_string);
|
||||
format!("version_ids={version_string}")
|
||||
} else {
|
||||
let projects_string: String =
|
||||
serde_json::to_string(&id_or_slugs).unwrap();
|
||||
let projects_string = urlencoding::encode(&projects_string);
|
||||
format!("project_ids={projects_string}")
|
||||
) -> GetResponse {
|
||||
let req = GetRequest {
|
||||
time_range,
|
||||
return_metrics: ReturnMetrics {
|
||||
project_revenue: Some(Metrics {
|
||||
bucket_by: Vec::new(),
|
||||
}),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
|
||||
let mut extra_args = String::new();
|
||||
if let Some(start_date) = start_date {
|
||||
let start_date = start_date.to_rfc3339();
|
||||
// let start_date = serde_json::to_string(&start_date).unwrap();
|
||||
let start_date = urlencoding::encode(&start_date);
|
||||
write!(&mut extra_args, "&start_date={start_date}").unwrap();
|
||||
}
|
||||
if let Some(end_date) = end_date {
|
||||
let end_date = end_date.to_rfc3339();
|
||||
// let end_date = serde_json::to_string(&end_date).unwrap();
|
||||
let end_date = urlencoding::encode(&end_date);
|
||||
write!(&mut extra_args, "&end_date={end_date}").unwrap();
|
||||
}
|
||||
if let Some(resolution_minutes) = resolution_minutes {
|
||||
write!(&mut extra_args, "&resolution_minutes={resolution_minutes}")
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let req = test::TestRequest::get()
|
||||
.uri(&format!("/v3/analytics/revenue?{pv_string}{extra_args}",))
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/v3/analytics")
|
||||
.set_json(req)
|
||||
.append_pat(pat)
|
||||
.to_request();
|
||||
|
||||
self.call(req).await
|
||||
let resp = self.call(req).await;
|
||||
assert_status!(&resp, StatusCode::OK);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
|
||||
pub async fn get_analytics_revenue_deserialized(
|
||||
pub async fn get_analytics_revenue_new(
|
||||
&self,
|
||||
id_or_slugs: Vec<&str>,
|
||||
ids_are_version_ids: bool,
|
||||
start_date: Option<DateTime<Utc>>,
|
||||
end_date: Option<DateTime<Utc>>,
|
||||
resolution_minutes: Option<u32>,
|
||||
request: GetRequest,
|
||||
pat: Option<&str>,
|
||||
) -> HashMap<String, HashMap<i64, Decimal>> {
|
||||
let resp = self
|
||||
.get_analytics_revenue(
|
||||
id_or_slugs,
|
||||
ids_are_version_ids,
|
||||
start_date,
|
||||
end_date,
|
||||
resolution_minutes,
|
||||
pat,
|
||||
)
|
||||
.await;
|
||||
) -> GetResponse {
|
||||
let req = test::TestRequest::post()
|
||||
.uri("/v3/analytics")
|
||||
.set_json(request)
|
||||
.append_pat(pat)
|
||||
.to_request();
|
||||
|
||||
let resp = self.call(req).await;
|
||||
assert_status!(&resp, StatusCode::OK);
|
||||
test::read_body_json(resp).await
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user