Some small Labrinth refactors and fixes (#3698)

* chore(labrinth): fix typos, simplify out `remove_duplicates` func

* fix(labrinth): implement `capitalize_first` so that it can't panic on wide chars

* chore(labrinth): refactor out unneeded clone highlighted by nightly Clippy lints

* chore(labrinth): simplify `capitalize_first` implementation

* fix(labrinth): preserve ordering when deduplicating project field values

This addresses an unintended behavior change on
157647faf2778c74096e624aeef9cdb79539489c.

* fix(labrinth/tests): make `index_swaps` test run successfully

I wonder why we don't run these more often...

* refactor: rename `.env.example` files to `.env.local`, make local envs more consistent between frontend and backend

* chore(labrinth/.env.local): proper email verif. and password reset paths
This commit is contained in:
Alejandro González
2025-05-29 22:51:30 +02:00
committed by GitHub
parent be37f077d3
commit a9cfc37aac
10 changed files with 36 additions and 48 deletions

View File

@@ -1,4 +1,5 @@
use std::collections::{HashMap, HashSet};
use std::collections::HashMap;
use std::mem;
use crate::database::models::loader_fields::VersionField;
use crate::database::models::project_item::{LinkUrl, ProjectQueryResult};
@@ -8,6 +9,7 @@ use crate::models::ids::{
};
use ariadne::ids::UserId;
use chrono::{DateTime, Utc};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use validator::Validate;
@@ -95,19 +97,6 @@ pub struct Project {
pub fields: HashMap<String, Vec<serde_json::Value>>,
}
fn remove_duplicates(values: Vec<serde_json::Value>) -> Vec<serde_json::Value> {
let mut seen = HashSet::new();
values
.into_iter()
.filter(|value| {
// Convert the JSON value to a string for comparison
let as_string = value.to_string();
// Check if the string is already in the set
seen.insert(as_string)
})
.collect()
}
// This is a helper function to convert a list of VersionFields into a HashMap of field name to vecs of values
// This allows for removal of duplicates
pub fn from_duplicate_version_fields(
@@ -132,9 +121,9 @@ pub fn from_duplicate_version_fields(
}
}
// Remove duplicates by converting to string and back
// Remove duplicates
for (_, v) in fields.iter_mut() {
*v = remove_duplicates(v.clone());
*v = mem::take(v).into_iter().unique().collect_vec();
}
fields
}
@@ -624,7 +613,7 @@ pub struct Version {
pub downloads: u32,
/// The type of the release - `Alpha`, `Beta`, or `Release`.
pub version_type: VersionType,
/// The status of tne version
/// The status of the version
pub status: VersionStatus,
/// The requested status of the version (used for scheduling)
pub requested_status: Option<VersionStatus>,
@@ -880,7 +869,7 @@ impl std::fmt::Display for DependencyType {
}
impl DependencyType {
// These are constant, so this can remove unneccessary allocations (`to_string`)
// These are constant, so this can remove unnecessary allocations (`to_string`)
pub fn as_str(&self) -> &'static str {
match self {
DependencyType::Required => "required",

View File

@@ -264,11 +264,11 @@ pub fn convert_side_types_v2_bools(
}
pub fn capitalize_first(input: &str) -> String {
let mut result = input.to_owned();
if let Some(first_char) = result.get_mut(0..1) {
first_char.make_ascii_uppercase();
}
result
input
.chars()
.enumerate()
.map(|(i, c)| if i == 0 { c.to_ascii_uppercase() } else { c })
.collect()
}
#[cfg(test)]

View File

@@ -52,10 +52,9 @@ pub async fn get_version_from_hash(
.map(|x| x.1)
.ok();
let hash = info.into_inner().0.to_lowercase();
let algorithm = hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
let algorithm = hash_query.algorithm.clone().unwrap_or_else(|| {
default_algorithm_from_hashes(std::slice::from_ref(&hash))
});
let file = database::models::DBVersion::get_file_from_hash(
algorithm,
hash,
@@ -140,10 +139,9 @@ pub async fn get_update_from_hash(
.ok();
let hash = info.into_inner().0.to_lowercase();
if let Some(file) = database::models::DBVersion::get_file_from_hash(
hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()])),
hash_query.algorithm.clone().unwrap_or_else(|| {
default_algorithm_from_hashes(std::slice::from_ref(&hash))
}),
hash,
hash_query.version_id.map(|x| x.into()),
&**pool,
@@ -577,10 +575,9 @@ pub async fn delete_file(
.1;
let hash = info.into_inner().0.to_lowercase();
let algorithm = hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
let algorithm = hash_query.algorithm.clone().unwrap_or_else(|| {
default_algorithm_from_hashes(std::slice::from_ref(&hash))
});
let file = database::models::DBVersion::get_file_from_hash(
algorithm.clone(),
hash,
@@ -709,10 +706,9 @@ pub async fn download_version(
.ok();
let hash = info.into_inner().0.to_lowercase();
let algorithm = hash_query
.algorithm
.clone()
.unwrap_or_else(|| default_algorithm_from_hashes(&[hash.clone()]));
let algorithm = hash_query.algorithm.clone().unwrap_or_else(|| {
default_algorithm_from_hashes(std::slice::from_ref(&hash))
});
let file = database::models::DBVersion::get_file_from_hash(
algorithm.clone(),
hash,