Switch to Postgres (#39)

* WIP Switch to Postgres

* feat(postgres): more work on porting to postgres, now compiles

* feat(docker-compose): Changed the docker-compose.yml file to use postgres.

* Update docker, documentation, gh actions...

* Remove bson dependency

* Remove bson import

* feat: move mock filehost to trait rather than cargo feature

* feat(postgres): transactions for mod creation, multipart refactor

* fix: Add Cargo.lock so that sqlx functions

* Update sqlx offline build data

* fix: Use SQLX_OFFLINE to force sqlx into offline mode for CI

* Default release channels

* feat(postgres): refactor database models to fit postgres models

* fix: Fix sqlx prepare, fix double allocation in indexing

* Add dockerfile (#40)

Co-authored-by: Charalampos Fanoulis <charalampos.fanoulis@gmail.com>

Co-authored-by: Aeledfyr <aeledfyr@gmail.com>
Co-authored-by: redblueflame <contact@redblueflame.com>
Co-authored-by: Jai A <jai.a@tuta.io>
Co-authored-by: Valentin Ricard <redblueflame1@gmail.Com>
Co-authored-by: Charalampos Fanoulis <charalampos.fanoulis@gmail.com>
This commit is contained in:
AppleTheGolden
2020-07-23 22:46:33 +02:00
committed by GitHub
parent 95339a8338
commit ee69653a83
47 changed files with 4363 additions and 694 deletions

View File

@@ -178,7 +178,7 @@ pub async fn index_curseforge(
.replace("/256/256/", "/64/64/");
docs_to_add.push(SearchMod {
mod_id: -curseforge_mod.id,
mod_id: -curseforge_mod.id as i64,
author: (&curseforge_mod.authors[0].name).to_string(),
title: curseforge_mod.name,
description: curseforge_mod.summary.chars().take(150).collect(),

View File

@@ -1,70 +1,77 @@
use bson::doc;
use futures::StreamExt;
use futures::{StreamExt, TryStreamExt};
use log::info;
use crate::database::models::Item;
use crate::database::{DatabaseError, Mod, Version};
use super::IndexingError;
use crate::search::SearchMod;
use sqlx::postgres::PgPool;
pub async fn index_local(client: mongodb::Client) -> Result<Vec<SearchMod>, IndexingError> {
pub async fn index_local(pool: PgPool) -> Result<Vec<SearchMod>, IndexingError> {
info!("Indexing local mods!");
let mut docs_to_add: Vec<SearchMod> = vec![];
let db = client.database("modrinth");
let mut results = sqlx::query!(
"
SELECT m.id, m.title, m.description, m.downloads, m.icon_url, m.body_url, m.published FROM mods m
"
)
.fetch(&pool);
let mods = db.collection("mods");
let versions = db.collection("versions");
while let Some(result) = results.next().await {
if let Ok(result) = result {
let versions: Vec<String> = sqlx::query!(
"
SELECT gv.version FROM versions
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id=versions.id
INNER JOIN game_versions gv ON gvv.game_version_id=gv.id
WHERE versions.mod_id = $1
",
result.id
)
.fetch_many(&pool)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.version)) })
.try_collect::<Vec<String>>()
.await?;
let mut results = mods
.find(None, None)
.await
.map_err(DatabaseError::LocalDatabaseError)?;
let categories = sqlx::query!(
"
SELECT c.category
FROM mods_categories mc
INNER JOIN categories c ON mc.joining_category_id=c.id
WHERE mc.joining_mod_id = $1
",
result.id
)
.fetch_many(&pool)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.category)) })
.try_collect::<Vec<String>>()
.await?;
while let Some(unparsed_result) = results.next().await {
let result: Mod =
*Mod::from_doc(unparsed_result.map_err(DatabaseError::LocalDatabaseError)?)?;
let mut icon_url = "".to_string();
let mut mod_versions = versions
.find(doc! { "mod_id": result.id }, None)
.await
.map_err(DatabaseError::LocalDatabaseError)?;
if let Some(url) = result.icon_url {
icon_url = url;
}
let mut mod_game_versions = vec![];
while let Some(unparsed_version) = mod_versions.next().await {
let mut version = unparsed_version
.map_err(DatabaseError::LocalDatabaseError)
.and_then(Version::from_doc)?;
mod_game_versions.append(&mut version.game_versions);
docs_to_add.push(SearchMod {
mod_id: result.id,
author: "".to_string(),
title: result.title,
description: result.description,
keywords: categories,
versions,
downloads: result.downloads,
page_url: result.body_url,
icon_url,
author_url: "".to_string(),
date_created: result.published.to_string(),
created: 0,
date_modified: "".to_string(),
updated: 0,
latest_version: "".to_string(),
empty: String::from("{}{}{}"),
});
}
let mut icon_url = "".to_string();
if let Some(url) = result.icon_url {
icon_url = url;
}
docs_to_add.push(SearchMod {
mod_id: result.id,
author: "".to_string(),
title: result.title,
description: result.description,
keywords: result.categories,
versions: mod_game_versions,
downloads: result.downloads,
page_url: "".to_string(),
icon_url,
author_url: "".to_string(),
date_created: "".to_string(),
created: 0,
date_modified: "".to_string(),
updated: 0,
latest_version: "".to_string(),
empty: String::from("{}{}{}"),
});
}
Ok(docs_to_add)

View File

@@ -7,6 +7,7 @@ use crate::search::indexing::local_import::index_local;
use crate::search::SearchMod;
use meilisearch_sdk::client::Client;
use meilisearch_sdk::settings::Settings;
use sqlx::postgres::PgPool;
use std::collections::{HashMap, VecDeque};
use thiserror::Error;
@@ -21,7 +22,7 @@ pub enum IndexingError {
#[error("Error while parsing a timestamp: {0}")]
ParseDateError(#[from] chrono::format::ParseError),
#[error("Database Error: {0}")]
DatabaseError(#[from] crate::database::DatabaseError),
DatabaseError(#[from] sqlx::error::Error),
#[error("Environment Error")]
EnvError(#[from] dotenv::Error),
}
@@ -31,14 +32,14 @@ pub enum IndexingError {
// assumes a max average size of 1KiB per mod to avoid this cap.
const MEILISEARCH_CHUNK_SIZE: usize = 10000;
pub async fn index_mods(db: mongodb::Client) -> Result<(), IndexingError> {
pub async fn index_mods(pool: PgPool) -> Result<(), IndexingError> {
// Check if the index exists
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
let mut docs_to_add: Vec<SearchMod> = vec![];
docs_to_add.append(&mut index_local(db.clone()).await?);
docs_to_add.append(&mut index_local(pool.clone()).await?);
if dotenv::var("INDEX_CURSEFORGE")?
.parse()
.expect("`INDEX_CURSEFORGE` is not a boolean.")

View File

@@ -47,7 +47,7 @@ impl actix_web::ResponseError for SearchError {
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SearchMod {
pub mod_id: i32,
pub mod_id: i64,
pub author: String,
pub title: String,
pub description: String,
@@ -66,7 +66,7 @@ pub struct SearchMod {
}
impl Document for SearchMod {
type UIDType = i32;
type UIDType = i64;
fn get_uid(&self) -> &Self::UIDType {
&self.mod_id