Batch inserts [MOD-555] (#726)

* Batch a bunch of inserts, but still more to do

* Insert many for clickhouse (+ tests)

* Batch the remaining ones except those requiring deduplication

* Risky dedups

* Bit o cleanup and formatting

* cargo sqlx prepare

* Add test around batch editing project categories

* Add struct to satisfy clippy

* Fix silly mistake that was caught by the tests!

* Leave room for growth in dummy_data
This commit is contained in:
Jackson Kruger
2023-10-11 13:32:58 -05:00
committed by GitHub
parent dfa43f3c5a
commit d92272ffa0
23 changed files with 1208 additions and 929 deletions

View File

@@ -150,7 +150,7 @@ pub async fn page_view_ingest(
view.user_id = user.id.0;
}
analytics_queue.add_view(view).await;
analytics_queue.add_view(view);
Ok(HttpResponse::NoContent().body(""))
}
@@ -202,19 +202,17 @@ pub async fn playtime_ingest(
}
if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) {
analytics_queue
.add_playtime(Playtime {
id: Default::default(),
recorded: Utc::now().timestamp_nanos() / 100_000,
seconds: playtime.seconds as u64,
user_id: user.id.0,
project_id: version.inner.project_id.0 as u64,
version_id: version.inner.id.0 as u64,
loader: playtime.loader,
game_version: playtime.game_version,
parent: playtime.parent.map(|x| x.0).unwrap_or(0),
})
.await;
analytics_queue.add_playtime(Playtime {
id: Default::default(),
recorded: Utc::now().timestamp_nanos() / 100_000,
seconds: playtime.seconds as u64,
user_id: user.id.0,
project_id: version.inner.project_id.0 as u64,
version_id: version.inner.id.0 as u64,
loader: playtime.loader,
game_version: playtime.game_version,
parent: playtime.parent.map(|x| x.0).unwrap_or(0),
});
}
}

View File

@@ -108,40 +108,36 @@ pub async fn count_download(
let ip = crate::routes::analytics::convert_to_ip_v6(&download_body.ip)
.unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped());
analytics_queue
.add_download(Download {
id: Uuid::new_v4(),
recorded: Utc::now().timestamp_nanos() / 100_000,
domain: url.host_str().unwrap_or_default().to_string(),
site_path: url.path().to_string(),
user_id: user
.and_then(|(scopes, x)| {
if scopes.contains(Scopes::PERFORM_ANALYTICS) {
Some(x.id.0 as u64)
} else {
None
}
})
.unwrap_or(0),
project_id: project_id as u64,
version_id: version_id as u64,
ip,
country: maxmind.query(ip).await.unwrap_or_default(),
user_agent: download_body
.headers
.get("user-agent")
.cloned()
.unwrap_or_default(),
headers: download_body
.headers
.clone()
.into_iter()
.filter(|x| {
!crate::routes::analytics::FILTERED_HEADERS.contains(&&*x.0.to_lowercase())
})
.collect(),
})
.await;
analytics_queue.add_download(Download {
id: Uuid::new_v4(),
recorded: Utc::now().timestamp_nanos() / 100_000,
domain: url.host_str().unwrap_or_default().to_string(),
site_path: url.path().to_string(),
user_id: user
.and_then(|(scopes, x)| {
if scopes.contains(Scopes::PERFORM_ANALYTICS) {
Some(x.id.0 as u64)
} else {
None
}
})
.unwrap_or(0),
project_id: project_id as u64,
version_id: version_id as u64,
ip,
country: maxmind.query(ip).await.unwrap_or_default(),
user_agent: download_body
.headers
.get("user-agent")
.cloned()
.unwrap_or_default(),
headers: download_body
.headers
.clone()
.into_iter()
.filter(|x| !crate::routes::analytics::FILTERED_HEADERS.contains(&&*x.0.to_lowercase()))
.collect(),
});
Ok(HttpResponse::NoContent().body(""))
}

View File

@@ -15,6 +15,7 @@ use crate::{database, models};
use actix_web::web::Data;
use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse};
use chrono::Utc;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
@@ -301,6 +302,11 @@ pub async fn collection_edit(
.execute(&mut *transaction)
.await?;
let collection_item_ids = new_project_ids
.iter()
.map(|_| collection_item.id.0)
.collect_vec();
let mut validated_project_ids = Vec::new();
for project_id in new_project_ids {
let project = database::models::Project::get(project_id, &**pool, &redis)
.await?
@@ -309,20 +315,20 @@ pub async fn collection_edit(
"The specified project {project_id} does not exist!"
))
})?;
// Insert- don't throw an error if it already exists
sqlx::query!(
"
INSERT INTO collections_mods (collection_id, mod_id)
VALUES ($1, $2)
ON CONFLICT DO NOTHING
",
collection_item.id as database::models::ids::CollectionId,
project.inner.id as database::models::ids::ProjectId,
)
.execute(&mut *transaction)
.await?;
validated_project_ids.push(project.inner.id.0);
}
// Insert- don't throw an error if it already exists
sqlx::query!(
"
INSERT INTO collections_mods (collection_id, mod_id)
SELECT * FROM UNNEST ($1::int8[], $2::int8[])
ON CONFLICT DO NOTHING
",
&collection_item_ids[..],
&validated_project_ids[..],
)
.execute(&mut *transaction)
.await?;
}
database::models::Collection::clear_cache(collection_item.id, &redis).await?;

File diff suppressed because it is too large Load Diff

View File

@@ -725,9 +725,7 @@ async fn upload_file_to_version_inner(
"At least one file must be specified".to_string(),
));
} else {
for file_builder in file_builders {
file_builder.insert(version_id, &mut *transaction).await?;
}
VersionFileBuilder::insert_many(file_builders, version_id, &mut *transaction).await?;
}
// Clear version cache

View File

@@ -3,6 +3,7 @@ use crate::auth::{
filter_authorized_versions, get_user_from_headers, is_authorized, is_authorized_version,
};
use crate::database;
use crate::database::models::version_item::{DependencyBuilder, LoaderVersion, VersionVersion};
use crate::database::models::{image_item, Organization};
use crate::database::redis::RedisPool;
use crate::models;
@@ -450,11 +451,12 @@ pub async fn version_edit(
})
.collect::<Vec<database::models::version_item::DependencyBuilder>>();
for dependency in builders {
dependency
.insert(version_item.inner.id, &mut transaction)
.await?;
}
DependencyBuilder::insert_many(
builders,
version_item.inner.id,
&mut transaction,
)
.await?;
}
}
}
@@ -469,6 +471,7 @@ pub async fn version_edit(
.execute(&mut *transaction)
.await?;
let mut version_versions = Vec::new();
for game_version in game_versions {
let game_version_id = database::models::categories::GameVersion::get_id(
&game_version.0,
@@ -481,17 +484,9 @@ pub async fn version_edit(
)
})?;
sqlx::query!(
"
INSERT INTO game_versions_versions (game_version_id, joining_version_id)
VALUES ($1, $2)
",
game_version_id as database::models::ids::GameVersionId,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
version_versions.push(VersionVersion::new(game_version_id, id));
}
VersionVersion::insert_many(version_versions, &mut transaction).await?;
database::models::Project::update_game_versions(
version_item.inner.project_id,
@@ -510,6 +505,7 @@ pub async fn version_edit(
.execute(&mut *transaction)
.await?;
let mut loader_versions = Vec::new();
for loader in loaders {
let loader_id =
database::models::categories::Loader::get_id(&loader.0, &mut *transaction)
@@ -519,18 +515,9 @@ pub async fn version_edit(
"No database entry for loader provided.".to_string(),
)
})?;
sqlx::query!(
"
INSERT INTO loaders_versions (loader_id, version_id)
VALUES ($1, $2)
",
loader_id as database::models::ids::LoaderId,
id as database::models::ids::VersionId,
)
.execute(&mut *transaction)
.await?;
loader_versions.push(LoaderVersion::new(loader_id, id));
}
LoaderVersion::insert_many(loader_versions, &mut transaction).await?;
database::models::Project::update_loaders(
version_item.inner.project_id,