Fixes failing tests (#813)

* fixes failing  tests

* fmt clippy

* updated dockerfile

* fixes failing tests; adds important fix from extracts_versions PR

* assert_eq -> assert_status, giving better error messages

* fixed random failure bug

* fmt, clippy, etc
This commit is contained in:
Wyatt Verchere
2024-01-05 08:20:56 -08:00
committed by GitHub
parent f5802fee31
commit 10eed05d87
37 changed files with 555 additions and 330 deletions

View File

@@ -47,8 +47,9 @@ impl FileHost for MockHost {
) -> Result<DeleteFileData, FileHostingError> {
let path = std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::remove_file(path)?;
if path.exists() {
std::fs::remove_file(path)?;
}
Ok(DeleteFileData {
file_id: file_id.to_string(),
file_name: file_name.to_string(),

View File

@@ -57,6 +57,7 @@ pub struct LabrinthConfig {
pub fn app_setup(
pool: sqlx::Pool<Postgres>,
redis_pool: RedisPool,
search_config: search::SearchConfig,
clickhouse: &mut Client,
file_host: Arc<dyn file_hosting::FileHost + Send + Sync>,
maxmind: Arc<queue::maxmind::MaxMindIndexer>,
@@ -66,11 +67,6 @@ pub fn app_setup(
dotenvy::var("BIND_ADDR").unwrap()
);
let search_config = search::SearchConfig {
address: dotenvy::var("MEILISEARCH_ADDR").unwrap(),
key: dotenvy::var("MEILISEARCH_KEY").unwrap(),
};
let mut scheduler = scheduler::Scheduler::new();
// The interval in seconds at which the local database is indexed

View File

@@ -6,10 +6,10 @@ use labrinth::file_hosting::S3Host;
use labrinth::ratelimit::errors::ARError;
use labrinth::ratelimit::memory::{MemoryStore, MemoryStoreActor};
use labrinth::ratelimit::middleware::RateLimiter;
use labrinth::search;
use labrinth::util::env::parse_var;
use labrinth::{check_env_vars, clickhouse, database, file_hosting, queue};
use log::{error, info};
use std::sync::Arc;
#[derive(Clone)]
@@ -93,11 +93,13 @@ async fn main() -> std::io::Result<()> {
.build()
.expect("Failed to create prometheus metrics middleware");
let search_config = search::SearchConfig::new(None);
info!("Starting Actix HTTP server!");
let labrinth_config = labrinth::app_setup(
pool.clone(),
redis_pool.clone(),
search_config.clone(),
&mut clickhouse,
file_host.clone(),
maxmind_reader.clone(),

View File

@@ -17,7 +17,7 @@ pub struct Organization {
pub id: OrganizationId,
/// The slug of the organization
pub slug: String,
/// The title (and slug) of the organization
/// The title of the organization
pub name: String,
/// The associated team of the organization
pub team_id: TeamId,

View File

@@ -81,7 +81,7 @@ pub fn root_config(cfg: &mut web::ServiceConfig) {
pub enum ApiError {
#[error("Environment Error")]
Env(#[from] dotenvy::Error),
#[error("Error while uploading file")]
#[error("Error while uploading file: {0}")]
FileHosting(#[from] FileHostingError),
#[error("Database Error: {0}")]
Database(#[from] crate::database::models::DatabaseError),

View File

@@ -8,7 +8,7 @@ use sqlx::PgPool;
use validator::Validate;
use crate::{
auth::get_user_from_headers,
auth::{filter_visible_projects, get_user_from_headers},
database::{models::User, redis::RedisPool},
file_hosting::FileHost,
models::{
@@ -65,23 +65,12 @@ pub async fn projects_list(
let id_option = User::get(&info.into_inner().0, &**pool, &redis).await?;
if let Some(id) = id_option.map(|x| x.id) {
let user_id: UserId = id.into();
let can_view_private = user
.map(|y| y.role.is_mod() || y.id == user_id)
.unwrap_or(false);
let project_data = User::get_projects(id, &**pool, &redis).await?;
let response: Vec<_> =
crate::database::Project::get_many_ids(&project_data, &**pool, &redis)
.await?
.into_iter()
.filter(|x| can_view_private || x.inner.status.is_searchable())
.map(Project::from)
.collect();
Ok(HttpResponse::Ok().json(response))
let projects: Vec<_> =
crate::database::Project::get_many_ids(&project_data, &**pool, &redis).await?;
let projects = filter_visible_projects(projects, &user, &pool).await?;
Ok(HttpResponse::Ok().json(projects))
} else {
Err(ApiError::NotFound)
}

View File

@@ -107,11 +107,12 @@ pub async fn get_indexes(
config: &SearchConfig,
) -> Result<Vec<Index>, meilisearch_sdk::errors::Error> {
let client = config.make_client();
let projects_index = create_or_update_index(&client, "projects", None).await?;
let project_name = config.get_index_name("projects");
let project_filtered_name = config.get_index_name("projects_filtered");
let projects_index = create_or_update_index(&client, &project_name, None).await?;
let projects_filtered_index = create_or_update_index(
&client,
"projects_filtered",
&project_filtered_name,
Some(&[
"sort",
"words",
@@ -128,7 +129,7 @@ pub async fn get_indexes(
async fn create_or_update_index(
client: &Client,
name: &'static str,
name: &str,
custom_rules: Option<&'static [&'static str]>,
) -> Result<Index, meilisearch_sdk::errors::Error> {
info!("Updating/creating index.");
@@ -207,7 +208,6 @@ async fn create_or_update_index(
typo_tolerance: None, // We don't use typo tolerance right now
dictionary: None, // We don't use dictionary right now
};
if old_settings.synonyms != settings.synonyms
|| old_settings.stop_words != settings.stop_words
|| old_settings.ranking_rules != settings.ranking_rules
@@ -294,16 +294,23 @@ async fn update_and_add_to_index(
new_filterable_attributes.extend(additional_fields.iter().map(|s| s.to_string()));
new_displayed_attributes.extend(additional_fields.iter().map(|s| s.to_string()));
info!("add attributes.");
index
let filterable_task = index
.set_filterable_attributes(new_filterable_attributes)
.await?;
index
let displayable_task = index
.set_displayed_attributes(new_displayed_attributes)
.await?;
filterable_task
.wait_for_completion(client, None, Some(TIMEOUT))
.await?;
displayable_task
.wait_for_completion(client, None, Some(TIMEOUT))
.await?;
info!("Adding to index.");
add_to_index(client, index, projects).await?;
Ok(())
}
@@ -315,7 +322,6 @@ pub async fn add_projects(
) -> Result<(), IndexingError> {
let client = config.make_client();
for index in indices {
info!("adding projects part1 or 2.");
update_and_add_to_index(&client, index, &projects, &additional_fields).await?;
}
@@ -329,7 +335,6 @@ fn default_settings() -> Settings {
sorted_sortable.sort();
let mut sorted_attrs = DEFAULT_ATTRIBUTES_FOR_FACETING.to_vec();
sorted_attrs.sort();
Settings::new()
.with_distinct_attribute("project_id")
.with_displayed_attributes(sorted_display)

View File

@@ -59,16 +59,34 @@ impl actix_web::ResponseError for SearchError {
}
}
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct SearchConfig {
pub address: String,
pub key: String,
pub meta_namespace: String,
}
impl SearchConfig {
// Panics if the environment variables are not set,
// but these are already checked for on startup.
pub fn new(meta_namespace: Option<String>) -> Self {
let address = dotenvy::var("MEILISEARCH_ADDR").expect("MEILISEARCH_ADDR not set");
let key = dotenvy::var("MEILISEARCH_KEY").expect("MEILISEARCH_KEY not set");
Self {
address,
key,
meta_namespace: meta_namespace.unwrap_or_default(),
}
}
pub fn make_client(&self) -> Client {
Client::new(self.address.as_str(), Some(self.key.as_str()))
}
pub fn get_index_name(&self, index: &str) -> String {
format!("{}_{}", self.meta_namespace, index)
}
}
/// A project document used for uploading projects to MeiliSearch's indices.
@@ -172,13 +190,18 @@ pub struct ResultSearchProject {
pub loader_fields: HashMap<String, Vec<serde_json::Value>>,
}
pub fn get_sort_index(index: &str) -> Result<(&str, [&str; 1]), SearchError> {
pub fn get_sort_index(
config: &SearchConfig,
index: &str,
) -> Result<(String, [&'static str; 1]), SearchError> {
let projects_name = config.get_index_name("projects");
let projects_filtered_name = config.get_index_name("projects_filtered");
Ok(match index {
"relevance" => ("projects", ["downloads:desc"]),
"downloads" => ("projects_filtered", ["downloads:desc"]),
"follows" => ("projects", ["follows:desc"]),
"updated" => ("projects", ["date_modified:desc"]),
"newest" => ("projects", ["date_created:desc"]),
"relevance" => (projects_name, ["downloads:desc"]),
"downloads" => (projects_filtered_name, ["downloads:desc"]),
"follows" => (projects_name, ["follows:desc"]),
"updated" => (projects_name, ["date_modified:desc"]),
"newest" => (projects_name, ["date_created:desc"]),
i => return Err(SearchError::InvalidIndex(i.to_string())),
})
}
@@ -193,8 +216,7 @@ pub async fn search_for_project(
let index = info.index.as_deref().unwrap_or("relevance");
let limit = info.limit.as_deref().unwrap_or("10").parse()?;
let sort = get_sort_index(index)?;
let sort = get_sort_index(config, index)?;
let meilisearch_index = client.get_index(sort.0).await?;
let mut filter_string = String::new();