You've already forked pages
forked from didirus/AstralRinth
Replace MaxMind with CloudFlare headers (#4934)
* Replace MaxMind with CloudFlare headers * Remove MaxMind env vars * Fix test harness
This commit is contained in:
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -4541,7 +4541,6 @@ dependencies = [
|
||||
"labrinth",
|
||||
"lettre",
|
||||
"meilisearch-sdk",
|
||||
"modrinth-maxmind",
|
||||
"modrinth-util",
|
||||
"muralpay",
|
||||
"murmur2",
|
||||
|
||||
@@ -109,7 +109,6 @@ lettre = { version = "0.11.19", default-features = false, features = [
|
||||
maxminddb = "0.26.0"
|
||||
meilisearch-sdk = { version = "0.30.0", default-features = false }
|
||||
modrinth-log = { path = "packages/modrinth-log" }
|
||||
modrinth-maxmind = { path = "packages/modrinth-maxmind" }
|
||||
modrinth-util = { path = "packages/modrinth-util" }
|
||||
muralpay = { path = "packages/muralpay" }
|
||||
murmur2 = "0.1.0"
|
||||
|
||||
@@ -114,9 +114,6 @@ CLICKHOUSE_USER=default
|
||||
CLICKHOUSE_PASSWORD=default
|
||||
CLICKHOUSE_DATABASE=staging_ariadne
|
||||
|
||||
MAXMIND_ACCOUNT_ID=none
|
||||
MAXMIND_LICENSE_KEY=none
|
||||
|
||||
FLAME_ANVIL_URL=none
|
||||
|
||||
STRIPE_API_KEY=none
|
||||
|
||||
@@ -118,9 +118,6 @@ CLICKHOUSE_USER=default
|
||||
CLICKHOUSE_PASSWORD=default
|
||||
CLICKHOUSE_DATABASE=staging_ariadne
|
||||
|
||||
MAXMIND_ACCOUNT_ID=none
|
||||
MAXMIND_LICENSE_KEY=none
|
||||
|
||||
FLAME_ANVIL_URL=none
|
||||
|
||||
STRIPE_API_KEY=none
|
||||
|
||||
@@ -70,7 +70,6 @@ itertools = { workspace = true }
|
||||
json-patch = { workspace = true }
|
||||
lettre = { workspace = true }
|
||||
meilisearch-sdk = { workspace = true, features = ["reqwest"] }
|
||||
modrinth-maxmind = { workspace = true }
|
||||
modrinth-util = { workspace = true, features = ["decimal", "utoipa"] }
|
||||
muralpay = { workspace = true, features = ["client", "mock", "utoipa"] }
|
||||
murmur2 = { workspace = true }
|
||||
|
||||
@@ -958,6 +958,16 @@ COPY public.loader_field_enum_values (id, enum_id, value, ordering, created, met
|
||||
812 2 rd-132211 \N 2009-05-13 20:11:00+00 {"type": "alpha", "major": false}
|
||||
11 2 1.21.5-pre1 \N 2025-03-11 12:49:44+00 {"type": "snapshot", "major": false}
|
||||
12 2 25w10a \N 2025-03-05 13:11:13+00 {"type": "snapshot", "major": false}
|
||||
1005 3 bukkit \N 2025-04-03 21:55:46.229944+00 \N
|
||||
1006 3 bungeecord \N 2025-04-03 21:55:46.229944+00 \N
|
||||
1007 3 canvas \N 2025-04-03 21:55:46.229944+00 \N
|
||||
1008 3 datapack \N 2025-04-03 21:55:46.229944+00 \N
|
||||
1009 3 fabric \N 2025-04-03 21:55:46.229944+00 \N
|
||||
10010 3 folia \N 2025-04-03 21:55:46.229944+00 \N
|
||||
10011 3 forge \N 2025-04-03 21:55:46.229944+00 \N
|
||||
10012 3 iris \N 2025-04-03 21:55:46.229944+00 \N
|
||||
10013 3 liteloader \N 2025-04-03 21:55:46.229944+00 \N
|
||||
10014 3 neoforge \N 2025-04-03 21:55:46.229944+00 \N
|
||||
\.
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::time::Duration;
|
||||
|
||||
use actix_web::web;
|
||||
use database::redis::RedisPool;
|
||||
use modrinth_maxmind::MaxMind;
|
||||
use queue::{
|
||||
analytics::AnalyticsQueue, email::EmailQueue, payouts::PayoutsQueue,
|
||||
session::AuthQueue, socket::ActiveSockets,
|
||||
@@ -55,7 +54,6 @@ pub struct LabrinthConfig {
|
||||
pub redis_pool: RedisPool,
|
||||
pub clickhouse: Client,
|
||||
pub file_host: Arc<dyn file_hosting::FileHost + Send + Sync>,
|
||||
pub maxmind: web::Data<MaxMind>,
|
||||
pub scheduler: Arc<scheduler::Scheduler>,
|
||||
pub ip_salt: Pepper,
|
||||
pub search_config: search::SearchConfig,
|
||||
@@ -80,7 +78,6 @@ pub fn app_setup(
|
||||
search_config: search::SearchConfig,
|
||||
clickhouse: &mut Client,
|
||||
file_host: Arc<dyn file_hosting::FileHost + Send + Sync>,
|
||||
maxmind: MaxMind,
|
||||
stripe_client: stripe::Client,
|
||||
anrok_client: anrok::Client,
|
||||
email_queue: EmailQueue,
|
||||
@@ -275,7 +272,6 @@ pub fn app_setup(
|
||||
redis_pool,
|
||||
clickhouse: clickhouse.clone(),
|
||||
file_host,
|
||||
maxmind: web::Data::new(maxmind),
|
||||
scheduler: Arc::new(scheduler),
|
||||
ip_salt,
|
||||
search_config,
|
||||
@@ -321,7 +317,6 @@ pub fn app_config(
|
||||
.app_data(labrinth_config.session_queue.clone())
|
||||
.app_data(labrinth_config.payouts_queue.clone())
|
||||
.app_data(labrinth_config.email_queue.clone())
|
||||
.app_data(labrinth_config.maxmind.clone())
|
||||
.app_data(web::Data::new(labrinth_config.ip_salt.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.analytics_queue.clone()))
|
||||
.app_data(web::Data::new(labrinth_config.clickhouse.clone()))
|
||||
@@ -496,9 +491,6 @@ pub fn check_env_vars() -> bool {
|
||||
failed |= check_var::<String>("CLICKHOUSE_PASSWORD");
|
||||
failed |= check_var::<String>("CLICKHOUSE_DATABASE");
|
||||
|
||||
failed |= check_var::<String>("MAXMIND_ACCOUNT_ID");
|
||||
failed |= check_var::<String>("MAXMIND_LICENSE_KEY");
|
||||
|
||||
failed |= check_var::<String>("FLAME_ANVIL_URL");
|
||||
|
||||
failed |= check_var::<String>("GOTENBERG_URL");
|
||||
|
||||
@@ -175,8 +175,6 @@ async fn main() -> std::io::Result<()> {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let maxmind_reader = modrinth_maxmind::MaxMind::new().await;
|
||||
|
||||
let prometheus = PrometheusMetricsBuilder::new("labrinth")
|
||||
.endpoint("/metrics")
|
||||
.exclude_regex(r"^/api/v1/.*$")
|
||||
@@ -205,7 +203,6 @@ async fn main() -> std::io::Result<()> {
|
||||
search_config.clone(),
|
||||
&mut clickhouse,
|
||||
file_host.clone(),
|
||||
maxmind_reader.clone(),
|
||||
stripe_client,
|
||||
anrok_client.clone(),
|
||||
email_queue,
|
||||
|
||||
@@ -9,7 +9,6 @@ use crate::util::date::get_current_tenths_of_ms;
|
||||
use crate::util::env::parse_strings_from_var;
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use actix_web::{post, web};
|
||||
use modrinth_maxmind::MaxMind;
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@@ -49,7 +48,6 @@ pub struct UrlInput {
|
||||
#[post("view")]
|
||||
pub async fn page_view_ingest(
|
||||
req: HttpRequest,
|
||||
maxmind: web::Data<MaxMind>,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
url_input: web::Json<UrlInput>,
|
||||
@@ -114,7 +112,7 @@ pub async fn page_view_ingest(
|
||||
user_id: 0,
|
||||
project_id: 0,
|
||||
ip,
|
||||
country: maxmind.query_country(ip).await.unwrap_or_default(),
|
||||
country: headers.get("cf-ipcountry").cloned().unwrap_or_default(),
|
||||
user_agent: headers.get("user-agent").cloned().unwrap_or_default(),
|
||||
headers: headers
|
||||
.into_iter()
|
||||
|
||||
@@ -10,7 +10,6 @@ use crate::search::SearchConfig;
|
||||
use crate::util::date::get_current_tenths_of_ms;
|
||||
use crate::util::guards::admin_key_guard;
|
||||
use actix_web::{HttpRequest, HttpResponse, patch, post, web};
|
||||
use modrinth_maxmind::MaxMind;
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
@@ -42,7 +41,6 @@ pub async fn count_download(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
maxmind: web::Data<MaxMind>,
|
||||
analytics_queue: web::Data<Arc<AnalyticsQueue>>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
download_body: web::Json<DownloadBody>,
|
||||
@@ -126,7 +124,11 @@ pub async fn count_download(
|
||||
project_id: project_id as u64,
|
||||
version_id: version_id as u64,
|
||||
ip,
|
||||
country: maxmind.query_country(ip).await.unwrap_or_default(),
|
||||
country: download_body
|
||||
.headers
|
||||
.get("cf-ipcountry")
|
||||
.cloned()
|
||||
.unwrap_or_default(),
|
||||
user_agent: download_body
|
||||
.headers
|
||||
.get("user-agent")
|
||||
|
||||
@@ -3,7 +3,6 @@ use crate::util::anrok;
|
||||
use crate::util::gotenberg::GotenbergClient;
|
||||
use crate::{LabrinthConfig, file_hosting};
|
||||
use crate::{check_env_vars, clickhouse};
|
||||
use modrinth_maxmind::MaxMind;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub mod api_common;
|
||||
@@ -40,8 +39,6 @@ pub async fn setup(db: &database::TemporaryDatabase) -> LabrinthConfig {
|
||||
Arc::new(file_hosting::MockHost::new());
|
||||
let mut clickhouse = clickhouse::init_client().await.unwrap();
|
||||
|
||||
let maxmind_reader = MaxMind::new().await;
|
||||
|
||||
let stripe_client =
|
||||
stripe::Client::new(dotenvy::var("STRIPE_API_KEY").unwrap());
|
||||
|
||||
@@ -58,7 +55,6 @@ pub async fn setup(db: &database::TemporaryDatabase) -> LabrinthConfig {
|
||||
search_config,
|
||||
&mut clickhouse,
|
||||
file_host.clone(),
|
||||
maxmind_reader,
|
||||
stripe_client,
|
||||
anrok_client,
|
||||
email_queue,
|
||||
|
||||
Reference in New Issue
Block a user