Search test + v3 (#731)

* search patch for accurate loader/gv filtering

* backup

* basic search test

* finished test

* incomplete commit; backing up

* Working multipat reroute backup

* working rough draft v3

* most tests passing

* works

* search v2 conversion

* added some tags.rs v2 conversions

* Worked through warnings, unwraps, prints

* refactors

* new search test

* version files changes fixes

* redesign to revs

* removed old caches

* removed games

* fmt clippy

* merge conflicts

* fmt, prepare

* moved v2 routes over to v3

* fixes; tests passing

* project type changes

* moved files over

* fmt, clippy, prepare, etc

* loaders to loader_fields, added tests

* fmt, clippy, prepare

* fixed sorting bug

* reversed back- wrong order for consistency

* fmt; clippy; prepare

---------

Co-authored-by: Jai A <jaiagr+gpg@pm.me>
This commit is contained in:
Wyatt Verchere
2023-11-11 16:40:10 -08:00
committed by GitHub
parent 97ccb7df94
commit ae1c5342f2
133 changed files with 18153 additions and 11320 deletions

View File

@@ -1,82 +0,0 @@
use actix_web::test::TestRequest;
use bytes::{Bytes, BytesMut};
// Multipart functionality (actix-test does not innately support multipart)
#[derive(Debug, Clone)]
pub struct MultipartSegment {
pub name: String,
pub filename: Option<String>,
pub content_type: Option<String>,
pub data: MultipartSegmentData,
}
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub enum MultipartSegmentData {
Text(String),
Binary(Vec<u8>),
}
pub trait AppendsMultipart {
fn set_multipart(self, data: impl IntoIterator<Item = MultipartSegment>) -> Self;
}
impl AppendsMultipart for TestRequest {
fn set_multipart(self, data: impl IntoIterator<Item = MultipartSegment>) -> Self {
let (boundary, payload) = generate_multipart(data);
self.append_header((
"Content-Type",
format!("multipart/form-data; boundary={}", boundary),
))
.set_payload(payload)
}
}
fn generate_multipart(data: impl IntoIterator<Item = MultipartSegment>) -> (String, Bytes) {
let mut boundary = String::from("----WebKitFormBoundary");
boundary.push_str(&rand::random::<u64>().to_string());
boundary.push_str(&rand::random::<u64>().to_string());
boundary.push_str(&rand::random::<u64>().to_string());
let mut payload = BytesMut::new();
for segment in data {
payload.extend_from_slice(
format!(
"--{boundary}\r\nContent-Disposition: form-data; name=\"{name}\"",
boundary = boundary,
name = segment.name
)
.as_bytes(),
);
if let Some(filename) = &segment.filename {
payload.extend_from_slice(
format!("; filename=\"{filename}\"", filename = filename).as_bytes(),
);
}
if let Some(content_type) = &segment.content_type {
payload.extend_from_slice(
format!(
"\r\nContent-Type: {content_type}",
content_type = content_type
)
.as_bytes(),
);
}
payload.extend_from_slice(b"\r\n\r\n");
match &segment.data {
MultipartSegmentData::Text(text) => {
payload.extend_from_slice(text.as_bytes());
}
MultipartSegmentData::Binary(binary) => {
payload.extend_from_slice(binary);
}
}
payload.extend_from_slice(b"\r\n");
}
payload.extend_from_slice(format!("--{boundary}--\r\n", boundary = boundary).as_bytes());
(boundary, Bytes::from(payload))
}

View File

@@ -6,6 +6,7 @@ use std::rc::Rc;
pub mod organization;
pub mod project;
pub mod tags;
pub mod team;
pub mod version;
@@ -18,4 +19,15 @@ impl ApiV2 {
pub async fn call(&self, req: actix_http::Request) -> ServiceResponse {
self.test_app.call(req).await.unwrap()
}
pub async fn reset_search_index(&self) -> ServiceResponse {
let req = actix_web::test::TestRequest::post()
.uri("/v2/admin/_force_reindex")
.append_header((
"Modrinth-Admin",
dotenvy::var("LABRINTH_ADMIN_KEY").unwrap(),
))
.to_request();
self.call(req).await
}
}

View File

@@ -3,7 +3,7 @@ use actix_web::{
test::{self, TestRequest},
};
use bytes::Bytes;
use labrinth::models::{organizations::Organization, projects::Project};
use labrinth::models::{organizations::Organization, v2::projects::LegacyProject};
use serde_json::json;
use crate::common::request_data::ImageData;
@@ -58,7 +58,7 @@ impl ApiV2 {
&self,
id_or_title: &str,
pat: &str,
) -> Vec<Project> {
) -> Vec<LegacyProject> {
let resp = self.get_organization_projects(id_or_title, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await

View File

@@ -7,12 +7,15 @@ use actix_web::{
};
use bytes::Bytes;
use chrono::{DateTime, Utc};
use labrinth::models::projects::{Project, Version};
use labrinth::{
models::v2::projects::{LegacyProject, LegacyVersion},
search::SearchResults,
util::actix::AppendsMultipart,
};
use rust_decimal::Decimal;
use serde_json::json;
use crate::common::{
actix::AppendsMultipart,
asserts::assert_status,
database::MOD_USER_PAT,
request_data::{ImageData, ProjectCreationRequestData},
@@ -25,7 +28,7 @@ impl ApiV2 {
&self,
creation_data: ProjectCreationRequestData,
pat: &str,
) -> (Project, Vec<Version>) {
) -> (LegacyProject, Vec<LegacyVersion>) {
// Add a project.
let req = TestRequest::post()
.uri("/v2/project")
@@ -58,7 +61,7 @@ impl ApiV2 {
.append_header(("Authorization", pat))
.to_request();
let resp = self.call(req).await;
let versions: Vec<Version> = test::read_body_json(resp).await;
let versions: Vec<LegacyVersion> = test::read_body_json(resp).await;
(project, versions)
}
@@ -80,7 +83,7 @@ impl ApiV2 {
.to_request();
self.call(req).await
}
pub async fn get_project_deserialized(&self, id_or_slug: &str, pat: &str) -> Project {
pub async fn get_project_deserialized(&self, id_or_slug: &str, pat: &str) -> LegacyProject {
let resp = self.get_project(id_or_slug, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
@@ -98,36 +101,12 @@ impl ApiV2 {
&self,
user_id_or_username: &str,
pat: &str,
) -> Vec<Project> {
) -> Vec<LegacyProject> {
let resp = self.get_user_projects(user_id_or_username, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn get_version_from_hash(
&self,
hash: &str,
algorithm: &str,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::get()
.uri(&format!("/v2/version_file/{hash}?algorithm={algorithm}"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
pub async fn get_version_from_hash_deserialized(
&self,
hash: &str,
algorithm: &str,
pat: &str,
) -> Version {
let resp = self.get_version_from_hash(hash, algorithm, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn edit_project(
&self,
id_or_slug: &str,
@@ -195,6 +174,34 @@ impl ApiV2 {
}
}
pub async fn search_deserialized(
&self,
query: Option<&str>,
facets: Option<serde_json::Value>,
pat: &str,
) -> SearchResults {
let query_field = if let Some(query) = query {
format!("&query={}", urlencoding::encode(query))
} else {
"".to_string()
};
let facets_field = if let Some(facets) = facets {
format!("&facets={}", urlencoding::encode(&facets.to_string()))
} else {
"".to_string()
};
let req = test::TestRequest::get()
.uri(&format!("/v2/search?{}{}", query_field, facets_field))
.append_header(("Authorization", pat))
.to_request();
let resp = self.call(req).await;
let status = resp.status();
assert_eq!(status, 200);
test::read_body_json(resp).await
}
pub async fn get_analytics_revenue(
&self,
id_or_slugs: Vec<&str>,

View File

@@ -0,0 +1,69 @@
use actix_web::{
dev::ServiceResponse,
test::{self, TestRequest},
};
use labrinth::routes::v2::tags::{CategoryData, GameVersionQueryData, LoaderData};
use crate::common::database::ADMIN_USER_PAT;
use super::ApiV2;
impl ApiV2 {
// Tag gets do not include PAT, as they are public.
pub async fn get_side_types(&self) -> ServiceResponse {
let req = TestRequest::get()
.uri("/v2/tag/side_type")
.append_header(("Authorization", ADMIN_USER_PAT))
.to_request();
self.call(req).await
}
pub async fn get_side_types_deserialized(&self) -> Vec<String> {
let resp = self.get_side_types().await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn get_loaders(&self) -> ServiceResponse {
let req = TestRequest::get()
.uri("/v2/tag/loader")
.append_header(("Authorization", ADMIN_USER_PAT))
.to_request();
self.call(req).await
}
pub async fn get_loaders_deserialized(&self) -> Vec<LoaderData> {
let resp = self.get_loaders().await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn get_categories(&self) -> ServiceResponse {
let req = TestRequest::get()
.uri("/v2/tag/category")
.append_header(("Authorization", ADMIN_USER_PAT))
.to_request();
self.call(req).await
}
pub async fn get_categories_deserialized(&self) -> Vec<CategoryData> {
let resp = self.get_categories().await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn get_game_versions(&self) -> ServiceResponse {
let req = TestRequest::get()
.uri("/v2/tag/game_version")
.append_header(("Authorization", ADMIN_USER_PAT))
.to_request();
self.call(req).await
}
pub async fn get_game_versions_deserialized(&self) -> Vec<GameVersionQueryData> {
let resp = self.get_game_versions().await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
}

View File

@@ -1,9 +1,18 @@
use std::collections::HashMap;
use actix_http::{header::AUTHORIZATION, StatusCode};
use actix_web::{dev::ServiceResponse, test};
use labrinth::models::projects::Version;
use actix_web::{
dev::ServiceResponse,
test::{self, TestRequest},
};
use labrinth::{
models::{projects::VersionType, v2::projects::LegacyVersion},
routes::v2::version_file::FileUpdateData,
util::actix::AppendsMultipart,
};
use serde_json::json;
use crate::common::{self, actix::AppendsMultipart, asserts::assert_status};
use crate::common::{asserts::assert_status, request_data::VersionCreationRequestData};
use super::ApiV2;
@@ -13,12 +22,319 @@ pub fn url_encode_json_serialized_vec(elements: &[String]) -> String {
}
impl ApiV2 {
pub async fn add_public_version(
&self,
creation_data: VersionCreationRequestData,
pat: &str,
) -> LegacyVersion {
// Add a project.
let req = TestRequest::post()
.uri("/v2/version")
.append_header(("Authorization", pat))
.set_multipart(creation_data.segment_data)
.to_request();
let resp = self.call(req).await;
assert_status(&resp, StatusCode::OK);
let value: serde_json::Value = test::read_body_json(resp).await;
let version_id = value["id"].as_str().unwrap();
// // Approve as a moderator.
// let req = TestRequest::patch()
// .uri(&format!("/v2/project/{}", creation_data.slug))
// .append_header(("Authorization", MOD_USER_PAT))
// .set_json(json!(
// {
// "status": "approved"
// }
// ))
// .to_request();
// let resp = self.call(req).await;
// assert_status(resp, StatusCode::NO_CONTENT);
self.get_version_deserialized(version_id, pat).await
}
pub async fn get_version(&self, id: &str, pat: &str) -> ServiceResponse {
let req = TestRequest::get()
.uri(&format!("/v2/version/{id}"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
pub async fn get_version_deserialized(&self, id: &str, pat: &str) -> LegacyVersion {
let resp = self.get_version(id, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn edit_version(
&self,
version_id: &str,
patch: serde_json::Value,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::patch()
.uri(&format!("/v2/version/{version_id}"))
.append_header(("Authorization", pat))
.set_json(patch)
.to_request();
self.call(req).await
}
pub async fn get_version_from_hash(
&self,
hash: &str,
algorithm: &str,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::get()
.uri(&format!("/v2/version_file/{hash}?algorithm={algorithm}"))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
pub async fn get_version_from_hash_deserialized(
&self,
hash: &str,
algorithm: &str,
pat: &str,
) -> LegacyVersion {
let resp = self.get_version_from_hash(hash, algorithm, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn get_versions_from_hashes(
&self,
hashes: &[&str],
algorithm: &str,
pat: &str,
) -> ServiceResponse {
let req = TestRequest::post()
.uri("/v2/version_files")
.append_header(("Authorization", pat))
.set_json(json!({
"hashes": hashes,
"algorithm": algorithm,
}))
.to_request();
self.call(req).await
}
pub async fn get_versions_from_hashes_deserialized(
&self,
hashes: &[&str],
algorithm: &str,
pat: &str,
) -> HashMap<String, LegacyVersion> {
let resp = self.get_versions_from_hashes(hashes, algorithm, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn get_update_from_hash(
&self,
hash: &str,
algorithm: &str,
loaders: Option<Vec<String>>,
game_versions: Option<Vec<String>>,
version_types: Option<Vec<String>>,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::post()
.uri(&format!(
"/v2/version_file/{hash}/update?algorithm={algorithm}"
))
.append_header(("Authorization", pat))
.set_json(json!({
"loaders": loaders,
"game_versions": game_versions,
"version_types": version_types,
}))
.to_request();
self.call(req).await
}
pub async fn get_update_from_hash_deserialized(
&self,
hash: &str,
algorithm: &str,
loaders: Option<Vec<String>>,
game_versions: Option<Vec<String>>,
version_types: Option<Vec<String>>,
pat: &str,
) -> LegacyVersion {
let resp = self
.get_update_from_hash(hash, algorithm, loaders, game_versions, version_types, pat)
.await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn update_files(
&self,
algorithm: &str,
hashes: Vec<String>,
loaders: Option<Vec<String>>,
game_versions: Option<Vec<String>>,
version_types: Option<Vec<String>>,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::post()
.uri("/v2/version_files/update")
.append_header(("Authorization", pat))
.set_json(json!({
"algorithm": algorithm,
"hashes": hashes,
"loaders": loaders,
"game_versions": game_versions,
"version_types": version_types,
}))
.to_request();
self.call(req).await
}
pub async fn update_files_deserialized(
&self,
algorithm: &str,
hashes: Vec<String>,
loaders: Option<Vec<String>>,
game_versions: Option<Vec<String>>,
version_types: Option<Vec<String>>,
pat: &str,
) -> HashMap<String, LegacyVersion> {
let resp = self
.update_files(
algorithm,
hashes,
loaders,
game_versions,
version_types,
pat,
)
.await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
pub async fn update_individual_files(
&self,
algorithm: &str,
hashes: Vec<FileUpdateData>,
pat: &str,
) -> ServiceResponse {
let req = test::TestRequest::post()
.uri("/v2/version_files/update_individual")
.append_header(("Authorization", pat))
.set_json(json!({
"algorithm": algorithm,
"hashes": hashes
}))
.to_request();
self.call(req).await
}
pub async fn update_individual_files_deserialized(
&self,
algorithm: &str,
hashes: Vec<FileUpdateData>,
pat: &str,
) -> HashMap<String, LegacyVersion> {
let resp = self.update_individual_files(algorithm, hashes, pat).await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
// TODO: Not all fields are tested currently in the V2 tests, only the v2-v3 relevant ones are
#[allow(clippy::too_many_arguments)]
pub async fn get_project_versions(
&self,
project_id_slug: &str,
game_versions: Option<Vec<String>>,
loaders: Option<Vec<String>>,
featured: Option<bool>,
version_type: Option<VersionType>,
limit: Option<usize>,
offset: Option<usize>,
pat: &str,
) -> ServiceResponse {
let mut query_string = String::new();
if let Some(game_versions) = game_versions {
query_string.push_str(&format!(
"&game_versions={}",
urlencoding::encode(&serde_json::to_string(&game_versions).unwrap())
));
}
if let Some(loaders) = loaders {
query_string.push_str(&format!(
"&loaders={}",
urlencoding::encode(&serde_json::to_string(&loaders).unwrap())
));
}
if let Some(featured) = featured {
query_string.push_str(&format!("&featured={}", featured));
}
if let Some(version_type) = version_type {
query_string.push_str(&format!("&version_type={}", version_type));
}
if let Some(limit) = limit {
let limit = limit.to_string();
query_string.push_str(&format!("&limit={}", limit));
}
if let Some(offset) = offset {
let offset = offset.to_string();
query_string.push_str(&format!("&offset={}", offset));
}
let req = test::TestRequest::get()
.uri(&format!(
"/v2/project/{project_id_slug}/version?{}",
query_string.trim_start_matches('&')
))
.append_header(("Authorization", pat))
.to_request();
self.call(req).await
}
#[allow(clippy::too_many_arguments)]
pub async fn get_project_versions_deserialized(
&self,
slug: &str,
game_versions: Option<Vec<String>>,
loaders: Option<Vec<String>>,
featured: Option<bool>,
version_type: Option<VersionType>,
limit: Option<usize>,
offset: Option<usize>,
pat: &str,
) -> Vec<LegacyVersion> {
let resp = self
.get_project_versions(
slug,
game_versions,
loaders,
featured,
version_type,
limit,
offset,
pat,
)
.await;
assert_eq!(resp.status(), 200);
test::read_body_json(resp).await
}
// TODO: remove redundancy in these functions
pub async fn create_default_version(
&self,
project_id: &str,
ordering: Option<i32>,
pat: &str,
) -> Version {
) -> LegacyVersion {
let json_data = json!(
{
"project_id": project_id,
@@ -33,19 +349,19 @@ impl ApiV2 {
"ordering": ordering,
}
);
let json_segment = common::actix::MultipartSegment {
let json_segment = labrinth::util::actix::MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(
data: labrinth::util::actix::MultipartSegmentData::Text(
serde_json::to_string(&json_data).unwrap(),
),
};
let file_segment = common::actix::MultipartSegment {
let file_segment = labrinth::util::actix::MultipartSegment {
name: "basic-mod-different.jar".to_string(),
filename: Some("basic-mod.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
data: labrinth::util::actix::MultipartSegmentData::Binary(
include_bytes!("../../../tests/files/basic-mod-different.jar").to_vec(),
),
};
@@ -60,7 +376,7 @@ impl ApiV2 {
test::read_body_json(resp).await
}
pub async fn get_versions(&self, version_ids: Vec<String>, pat: &str) -> Vec<Version> {
pub async fn get_versions(&self, version_ids: Vec<String>, pat: &str) -> Vec<LegacyVersion> {
let ids = url_encode_json_serialized_vec(&version_ids);
let request = test::TestRequest::get()
.uri(&format!("/v2/versions?ids={}", ids))

View File

@@ -114,7 +114,6 @@ pub fn generate_authorize_uri(
optional_query_param("scope", scope),
optional_query_param("state", state),
)
.to_string()
}
pub async fn get_authorize_accept_flow_id(response: ServiceResponse) -> String {

View File

@@ -2,15 +2,13 @@
use crate::common::get_json_val_str;
use itertools::Itertools;
use labrinth::models::v2::projects::LegacyVersion;
pub fn assert_status(response: &actix_web::dev::ServiceResponse, status: actix_http::StatusCode) {
assert_eq!(response.status(), status, "{:#?}", response.response());
}
pub fn assert_version_ids(
versions: &[labrinth::models::projects::Version],
expected_ids: Vec<String>,
) {
pub fn assert_version_ids(versions: &[LegacyVersion], expected_ids: Vec<String>) {
let version_ids = versions
.iter()
.map(|v| get_json_val_str(v.id))

View File

@@ -1,25 +1,24 @@
#![allow(dead_code)]
use std::io::{Cursor, Write};
use actix_http::StatusCode;
use actix_web::test::{self, TestRequest};
use labrinth::{
models::projects::Project,
models::{
oauth_clients::OAuthClient, organizations::Organization, pats::Scopes, projects::Version,
},
use labrinth::models::{
oauth_clients::OAuthClient,
organizations::Organization,
pats::Scopes,
v2::projects::{LegacyProject, LegacyVersion},
};
use serde_json::json;
use sqlx::Executor;
use zip::{write::FileOptions, CompressionMethod, ZipWriter};
use crate::common::{actix::AppendsMultipart, database::USER_USER_PAT};
use crate::common::database::USER_USER_PAT;
use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData};
use super::{
actix::{MultipartSegment, MultipartSegmentData},
asserts::assert_status,
database::USER_USER_ID,
environment::TestEnvironment,
get_json_val_str,
request_data::get_public_project_creation_data,
};
use super::{environment::TestEnvironment, request_data::get_public_project_creation_data};
use super::{asserts::assert_status, database::USER_USER_ID, get_json_val_str};
pub const DUMMY_DATA_UPDATE: i64 = 3;
@@ -37,13 +36,107 @@ pub const DUMMY_CATEGORIES: &[&str] = &[
pub const DUMMY_OAUTH_CLIENT_ALPHA_SECRET: &str = "abcdefghijklmnopqrstuvwxyz";
#[allow(dead_code)]
pub enum DummyJarFile {
pub enum TestFile {
DummyProjectAlpha,
DummyProjectBeta,
BasicMod,
BasicModDifferent,
// Randomly generates a valid .jar with a random hash.
// Unlike the other dummy jar files, this one is not a static file.
// and BasicModRandom.bytes() will return a different file each time.
BasicModRandom { filename: String, bytes: Vec<u8> },
BasicModpackRandom { filename: String, bytes: Vec<u8> },
}
impl TestFile {
pub fn build_random_jar() -> Self {
let filename = format!("random-mod-{}.jar", rand::random::<u64>());
let fabric_mod_json = serde_json::json!({
"schemaVersion": 1,
"id": filename,
"version": "1.0.1",
"name": filename,
"description": "Does nothing",
"authors": [
"user"
],
"contact": {
"homepage": "https://www.modrinth.com",
"sources": "https://www.modrinth.com",
"issues": "https://www.modrinth.com"
},
"license": "MIT",
"icon": "none.png",
"environment": "client",
"entrypoints": {
"main": [
"io.github.modrinth.Modrinth"
]
},
"depends": {
"minecraft": ">=1.20-"
}
}
)
.to_string();
// Create a simulated zip file
let mut cursor = Cursor::new(Vec::new());
{
let mut zip = ZipWriter::new(&mut cursor);
zip.start_file(
"fabric.mod.json",
FileOptions::default().compression_method(CompressionMethod::Stored),
)
.unwrap();
zip.write_all(fabric_mod_json.as_bytes()).unwrap();
zip.finish().unwrap();
}
let bytes = cursor.into_inner();
TestFile::BasicModRandom { filename, bytes }
}
pub fn build_random_mrpack() -> Self {
let filename = format!("random-modpack-{}.mrpack", rand::random::<u64>());
let modrinth_index_json = serde_json::json!({
"formatVersion": 1,
"game": "minecraft",
"versionId": "1.20.1-9.6",
"name": filename,
"files": [],
"dependencies": {
"fabric-loader": "0.14.22",
"minecraft": "1.20.1"
}
}
)
.to_string();
// Create a simulated zip file
let mut cursor = Cursor::new(Vec::new());
{
let mut zip = ZipWriter::new(&mut cursor);
zip.start_file(
"modrinth.index.json",
FileOptions::default().compression_method(CompressionMethod::Stored),
)
.unwrap();
zip.write_all(modrinth_index_json.as_bytes()).unwrap();
zip.finish().unwrap();
}
let bytes = cursor.into_inner();
TestFile::BasicModpackRandom { filename, bytes }
}
}
#[derive(Clone)]
#[allow(dead_code)]
pub enum DummyImage {
SmallIcon, // 200x200
@@ -77,10 +170,10 @@ pub struct DummyData {
impl DummyData {
pub fn new(
project_alpha: Project,
project_alpha_version: Version,
project_beta: Project,
project_beta_version: Version,
project_alpha: LegacyProject,
project_alpha_version: LegacyVersion,
project_beta: LegacyProject,
project_beta_version: LegacyVersion,
organization_zeta: Organization,
oauth_client_alpha: OAuthClient,
) -> Self {
@@ -210,21 +303,21 @@ pub async fn get_dummy_data(test_env: &TestEnvironment) -> DummyData {
)
}
pub async fn add_project_alpha(test_env: &TestEnvironment) -> (Project, Version) {
pub async fn add_project_alpha(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) {
let (project, versions) = test_env
.v2
.add_public_project(
get_public_project_creation_data("alpha", Some(DummyJarFile::DummyProjectAlpha)),
get_public_project_creation_data("alpha", Some(TestFile::DummyProjectAlpha)),
USER_USER_PAT,
)
.await;
(project, versions.into_iter().next().unwrap())
}
pub async fn add_project_beta(test_env: &TestEnvironment) -> (Project, Version) {
pub async fn add_project_beta(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) {
// Adds dummy data to the database with sqlx (projects, versions, threads)
// Generate test project data.
let jar = DummyJarFile::DummyProjectBeta;
let jar = TestFile::DummyProjectBeta;
let json_data = json!(
{
"title": "Test Project Beta",
@@ -298,14 +391,14 @@ pub async fn add_organization_zeta(test_env: &TestEnvironment) -> Organization {
get_organization_zeta(test_env).await
}
pub async fn get_project_alpha(test_env: &TestEnvironment) -> (Project, Version) {
pub async fn get_project_alpha(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) {
// Get project
let req = TestRequest::get()
.uri("/v2/project/alpha")
.append_header(("Authorization", USER_USER_PAT))
.to_request();
let resp = test_env.call(req).await;
let project: Project = test::read_body_json(resp).await;
let project: LegacyProject = test::read_body_json(resp).await;
// Get project's versions
let req = TestRequest::get()
@@ -313,13 +406,13 @@ pub async fn get_project_alpha(test_env: &TestEnvironment) -> (Project, Version)
.append_header(("Authorization", USER_USER_PAT))
.to_request();
let resp = test_env.call(req).await;
let versions: Vec<Version> = test::read_body_json(resp).await;
let versions: Vec<LegacyVersion> = test::read_body_json(resp).await;
let version = versions.into_iter().next().unwrap();
(project, version)
}
pub async fn get_project_beta(test_env: &TestEnvironment) -> (Project, Version) {
pub async fn get_project_beta(test_env: &TestEnvironment) -> (LegacyProject, LegacyVersion) {
// Get project
let req = TestRequest::get()
.uri("/v2/project/beta")
@@ -327,7 +420,8 @@ pub async fn get_project_beta(test_env: &TestEnvironment) -> (Project, Version)
.to_request();
let resp = test_env.call(req).await;
assert_status(&resp, StatusCode::OK);
let project: Project = test::read_body_json(resp).await;
let project: serde_json::Value = test::read_body_json(resp).await;
let project: LegacyProject = serde_json::from_value(project).unwrap();
// Get project's versions
let req = TestRequest::get()
@@ -336,7 +430,7 @@ pub async fn get_project_beta(test_env: &TestEnvironment) -> (Project, Version)
.to_request();
let resp = test_env.call(req).await;
assert_status(&resp, StatusCode::OK);
let versions: Vec<Version> = test::read_body_json(resp).await;
let versions: Vec<LegacyVersion> = test::read_body_json(resp).await;
let version = versions.into_iter().next().unwrap();
(project, version)
@@ -362,31 +456,48 @@ pub async fn get_oauth_client_alpha(test_env: &TestEnvironment) -> OAuthClient {
oauth_clients.into_iter().next().unwrap()
}
impl DummyJarFile {
impl TestFile {
pub fn filename(&self) -> String {
match self {
DummyJarFile::DummyProjectAlpha => "dummy-project-alpha.jar",
DummyJarFile::DummyProjectBeta => "dummy-project-beta.jar",
DummyJarFile::BasicMod => "basic-mod.jar",
DummyJarFile::BasicModDifferent => "basic-mod-different.jar",
TestFile::DummyProjectAlpha => "dummy-project-alpha.jar",
TestFile::DummyProjectBeta => "dummy-project-beta.jar",
TestFile::BasicMod => "basic-mod.jar",
TestFile::BasicModDifferent => "basic-mod-different.jar",
TestFile::BasicModRandom { filename, .. } => filename,
TestFile::BasicModpackRandom { filename, .. } => filename,
}
.to_string()
}
pub fn bytes(&self) -> Vec<u8> {
match self {
DummyJarFile::DummyProjectAlpha => {
TestFile::DummyProjectAlpha => {
include_bytes!("../../tests/files/dummy-project-alpha.jar").to_vec()
}
DummyJarFile::DummyProjectBeta => {
TestFile::DummyProjectBeta => {
include_bytes!("../../tests/files/dummy-project-beta.jar").to_vec()
}
DummyJarFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(),
DummyJarFile::BasicModDifferent => {
TestFile::BasicMod => include_bytes!("../../tests/files/basic-mod.jar").to_vec(),
TestFile::BasicModDifferent => {
include_bytes!("../../tests/files/basic-mod-different.jar").to_vec()
}
TestFile::BasicModRandom { bytes, .. } => bytes.clone(),
TestFile::BasicModpackRandom { bytes, .. } => bytes.clone(),
}
}
pub fn project_type(&self) -> String {
match self {
TestFile::DummyProjectAlpha => "mod",
TestFile::DummyProjectBeta => "mod",
TestFile::BasicMod => "mod",
TestFile::BasicModDifferent => "mod",
TestFile::BasicModRandom { .. } => "mod",
TestFile::BasicModpackRandom { .. } => "modpack",
}
.to_string()
}
}
impl DummyImage {

View File

@@ -2,9 +2,6 @@ use labrinth::{check_env_vars, clickhouse};
use labrinth::{file_hosting, queue, LabrinthConfig};
use std::sync::Arc;
use self::database::TemporaryDatabase;
pub mod actix;
pub mod api_v2;
pub mod api_v3;
pub mod asserts;
@@ -18,7 +15,7 @@ pub mod scopes;
// Testing equivalent to 'setup' function, producing a LabrinthConfig
// If making a test, you should probably use environment::TestEnvironment::build() (which calls this)
pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig {
pub async fn setup(db: &database::TemporaryDatabase) -> LabrinthConfig {
println!("Setting up labrinth config");
dotenvy::dotenv().ok();
@@ -40,7 +37,7 @@ pub async fn setup(db: &TemporaryDatabase) -> LabrinthConfig {
redis_pool.clone(),
&mut clickhouse,
file_host.clone(),
maxmind_reader.clone(),
maxmind_reader,
)
}

View File

@@ -175,6 +175,7 @@ impl<'a> PermissionsTest<'a> {
let resp = test_env.call(request).await;
if !self.allowed_failure_codes.contains(&resp.status().as_u16()) {
println!("Body: {:?}", resp.response().body());
return Err(format!(
"Failure permissions test failed. Expected failure codes {} got {}",
self.allowed_failure_codes
@@ -206,6 +207,7 @@ impl<'a> PermissionsTest<'a> {
let resp = test_env.call(request).await;
if !resp.status().is_success() {
println!("Body: {:?}", resp.response().body());
return Err(format!(
"Success permissions test failed. Expected success, got {}",
resp.status().as_u16()
@@ -673,8 +675,7 @@ impl<'a> PermissionsTest<'a> {
Ok(())
};
tokio::try_join!(test_1, test_2, test_3, test_4, test_5, test_6, test_7,)
.map_err(|e| e.to_string())?;
tokio::try_join!(test_1, test_2, test_3, test_4, test_5, test_6, test_7,).map_err(|e| e)?;
Ok(())
}
@@ -837,7 +838,7 @@ impl<'a> PermissionsTest<'a> {
Ok(())
};
tokio::try_join!(test_1, test_2, test_3,).map_err(|e| e.to_string())?;
tokio::try_join!(test_1, test_2, test_3,).map_err(|e| e)?;
Ok(())
}

View File

@@ -1,15 +1,21 @@
#![allow(dead_code)]
use serde_json::json;
use super::{
actix::MultipartSegment,
dummy_data::{DummyImage, DummyJarFile},
use super::dummy_data::{DummyImage, TestFile};
use labrinth::{
models::projects::ProjectId,
util::actix::{MultipartSegment, MultipartSegmentData},
};
use crate::common::actix::MultipartSegmentData;
pub struct ProjectCreationRequestData {
pub slug: String,
pub jar: Option<DummyJarFile>,
pub jar: Option<TestFile>,
pub segment_data: Vec<MultipartSegment>,
}
pub struct VersionCreationRequestData {
pub version: String,
pub jar: Option<TestFile>,
pub segment_data: Vec<MultipartSegment>,
}
@@ -21,29 +27,64 @@ pub struct ImageData {
pub fn get_public_project_creation_data(
slug: &str,
version_jar: Option<DummyJarFile>,
version_jar: Option<TestFile>,
) -> ProjectCreationRequestData {
let initial_versions = if let Some(ref jar) = version_jar {
json!([{
"file_parts": [jar.filename()],
"version_number": "1.2.3",
"version_title": "start",
"dependencies": [],
"game_versions": ["1.20.1"] ,
"release_channel": "release",
"loaders": ["fabric"],
"featured": true
}])
let json_data = get_public_project_creation_data_json(slug, version_jar.as_ref());
let multipart_data = get_public_creation_data_multipart(&json_data, version_jar.as_ref());
ProjectCreationRequestData {
slug: slug.to_string(),
jar: version_jar,
segment_data: multipart_data,
}
}
pub fn get_public_version_creation_data(
project_id: ProjectId,
version_number: &str,
version_jar: TestFile,
) -> VersionCreationRequestData {
let mut json_data = get_public_version_creation_data_json(version_number, &version_jar);
json_data["project_id"] = json!(project_id);
let multipart_data = get_public_creation_data_multipart(&json_data, Some(&version_jar));
VersionCreationRequestData {
version: version_number.to_string(),
jar: Some(version_jar),
segment_data: multipart_data,
}
}
pub fn get_public_version_creation_data_json(
version_number: &str,
version_jar: &TestFile,
) -> serde_json::Value {
json!({
"file_parts": [version_jar.filename()],
"version_number": version_number,
"version_title": "start",
"dependencies": [],
"game_versions": ["1.20.1"] ,
"release_channel": "release",
"loaders": ["fabric"],
"featured": true
})
}
pub fn get_public_project_creation_data_json(
slug: &str,
version_jar: Option<&TestFile>,
) -> serde_json::Value {
let initial_versions = if let Some(jar) = version_jar {
json!([get_public_version_creation_data_json("1.2.3", jar)])
} else {
json!([])
};
let is_draft = version_jar.is_none();
let json_data = json!(
json!(
{
"title": format!("Test Project {slug}"),
"slug": slug,
"project_type": version_jar.as_ref().map(|f| f.project_type()).unwrap_or("mod".to_string()),
"description": "A dummy project for testing with.",
"body": "This project is approved, and versions are listed.",
"client_side": "required",
@@ -51,19 +92,24 @@ pub fn get_public_project_creation_data(
"initial_versions": initial_versions,
"is_draft": is_draft,
"categories": [],
"license_id": "MIT"
"license_id": "MIT",
}
);
)
}
pub fn get_public_creation_data_multipart(
json_data: &serde_json::Value,
version_jar: Option<&TestFile>,
) -> Vec<MultipartSegment> {
// Basic json
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
data: MultipartSegmentData::Text(serde_json::to_string(json_data).unwrap()),
};
let segment_data = if let Some(ref jar) = version_jar {
if let Some(jar) = version_jar {
// Basic file
let file_segment = MultipartSegment {
name: jar.filename(),
@@ -72,15 +118,9 @@ pub fn get_public_project_creation_data(
data: MultipartSegmentData::Binary(jar.bytes()),
};
vec![json_segment.clone(), file_segment]
vec![json_segment, file_segment]
} else {
vec![json_segment.clone()]
};
ProjectCreationRequestData {
slug: slug.to_string(),
jar: version_jar,
segment_data,
vec![json_segment]
}
}

View File

@@ -19,22 +19,43 @@ INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (52,
INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (53, 4, 'friend-pat', 'mrp_patfriend', $1, '2030-08-18 15:48:58.435729+00');
INSERT INTO pats (id, user_id, name, access_token, scopes, expires) VALUES (54, 5, 'enemy-pat', 'mrp_patenemy', $1, '2030-08-18 15:48:58.435729+00');
-- -- Sample game versions, loaders, categories
INSERT INTO game_versions (id, version, type, created)
VALUES (20000, '1.20.1', 'release', timezone('utc', now()));
INSERT INTO loaders (id, loader) VALUES (5, 'fabric');
INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (5,1);
INSERT INTO loaders (id, loader) VALUES (1, 'fabric');
INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,1);
INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (1,2);
INSERT INTO loaders (id, loader) VALUES (6, 'forge');
INSERT INTO loaders_project_types (joining_loader_id, joining_project_type_id) VALUES (6,1);
-- Adds dummies to mrpack_loaders
INSERT INTO loader_field_enum_values (enum_id, value) SELECT id, 'fabric' FROM loader_field_enums WHERE enum_name = 'mrpack_loaders';
INSERT INTO loader_field_enum_values (enum_id, value) SELECT id, 'forge' FROM loader_field_enums WHERE enum_name = 'mrpack_loaders';
INSERT INTO loaders_project_types_games (loader_id, project_type_id, game_id) SELECT joining_loader_id, joining_project_type_id, 1 FROM loaders_project_types WHERE joining_loader_id = 5;
INSERT INTO loaders_project_types_games (loader_id, project_type_id, game_id) SELECT joining_loader_id, joining_project_type_id, 1 FROM loaders_project_types WHERE joining_loader_id = 6;
-- Sample game versions, loaders, categories
-- Game versions is '2'
INSERT INTO loader_field_enum_values(enum_id, value, metadata)
VALUES (2, '1.20.1', '{"type":"release","major":false}');
INSERT INTO loader_field_enum_values(enum_id, value, metadata)
VALUES (2, '1.20.2', '{"type":"release","major":false}');
INSERT INTO loader_field_enum_values(enum_id, value, metadata)
VALUES (2, '1.20.3', '{"type":"release","major":false}');
INSERT INTO loader_field_enum_values(enum_id, value, metadata)
VALUES (2, '1.20.4', '{"type":"beta","major":false}');
INSERT INTO loader_field_enum_values(enum_id, value, metadata)
VALUES (2, '1.20.5', '{"type":"release","major":true}');
INSERT INTO loader_fields_loaders(loader_id, loader_field_id)
SELECT l.id, lf.id FROM loaders l CROSS JOIN loader_fields lf WHERE lf.field = 'game_versions' OR lf.field = 'client_side' OR lf.field = 'server_side';
INSERT INTO categories (id, category, project_type) VALUES
(1, 'combat', 1),
(2, 'decoration', 1),
(3, 'economy', 1),
(4, 'food', 1),
(5, 'magic', 1),
(6, 'mobs', 1),
(7, 'optimization', 1);
(51, 'combat', 1),
(52, 'decoration', 1),
(53, 'economy', 1),
(54, 'food', 1),
(55, 'magic', 1),
(56, 'mobs', 1),
(57, 'optimization', 1);
INSERT INTO categories (id, category, project_type) VALUES
(101, 'combat', 2),

View File

@@ -2,19 +2,19 @@ use actix_http::StatusCode;
use actix_web::test;
use bytes::Bytes;
use chrono::{Duration, Utc};
use common::actix::MultipartSegment;
use common::environment::with_test_environment;
use common::environment::{with_test_environment, TestEnvironment};
use common::permissions::{PermissionsTest, PermissionsTestContext};
use futures::StreamExt;
use itertools::Itertools;
use labrinth::database::models::project_item::{PROJECTS_NAMESPACE, PROJECTS_SLUGS_NAMESPACE};
use labrinth::models::ids::base62_impl::parse_base62;
use labrinth::models::teams::ProjectPermissions;
use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData};
use serde_json::json;
use crate::common::database::*;
use crate::common::{database::*, request_data};
use crate::common::dummy_data::DUMMY_CATEGORIES;
use crate::common::{actix::AppendsMultipart, environment::TestEnvironment};
use crate::common::dummy_data::{TestFile, DUMMY_CATEGORIES};
// importing common module.
mod common;
@@ -130,54 +130,50 @@ async fn test_add_remove_project() {
);
// Basic json
let json_segment = common::actix::MultipartSegment {
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
};
// Basic json, with a different file
json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar");
let json_diff_file_segment = common::actix::MultipartSegment {
data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
let json_diff_file_segment = MultipartSegment {
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
..json_segment.clone()
};
// Basic json, with a different file, and a different slug
json_data["slug"] = json!("new_demo");
json_data["initial_versions"][0]["file_parts"][0] = json!("basic-mod-different.jar");
let json_diff_slug_file_segment = common::actix::MultipartSegment {
data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
let json_diff_slug_file_segment = MultipartSegment {
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
..json_segment.clone()
};
// Basic file
let file_segment = common::actix::MultipartSegment {
let file_segment = MultipartSegment {
name: "basic-mod.jar".to_string(),
filename: Some("basic-mod.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod.jar").to_vec(),
),
data: MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()),
};
// Differently named file, with the same content (for hash testing)
let file_diff_name_segment = common::actix::MultipartSegment {
let file_diff_name_segment = MultipartSegment {
name: "basic-mod-different.jar".to_string(),
filename: Some("basic-mod-different.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod.jar").to_vec(),
),
data: MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()),
};
// Differently named file, with different content
let file_diff_name_content_segment = common::actix::MultipartSegment {
let file_diff_name_content_segment = MultipartSegment {
name: "basic-mod-different.jar".to_string(),
filename: Some("basic-mod-different.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
data: MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod-different.jar").to_vec(),
),
};
@@ -283,6 +279,55 @@ async fn test_add_remove_project() {
test_env.cleanup().await;
}
#[actix_rt::test]
async fn test_project_type_sanity() {
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v2;
// Perform all other patch tests on both 'mod' and 'modpack'
let test_creation_mod = request_data::get_public_project_creation_data(
"test-mod",
Some(TestFile::build_random_jar()),
);
let test_creation_modpack = request_data::get_public_project_creation_data(
"test-modpack",
Some(TestFile::build_random_mrpack()),
);
for (mod_or_modpack, test_creation_data) in [
("mod", test_creation_mod),
("modpack", test_creation_modpack),
] {
let (test_project, test_version) = api
.add_public_project(test_creation_data, USER_USER_PAT)
.await;
let test_project_slug = test_project.slug.as_ref().unwrap();
assert_eq!(test_project.project_type, mod_or_modpack);
assert_eq!(test_project.loaders, vec!["fabric"]);
assert_eq!(
test_version[0].loaders.iter().map(|x| &x.0).collect_vec(),
vec!["fabric"]
);
let project = api
.get_project_deserialized(test_project_slug, USER_USER_PAT)
.await;
assert_eq!(test_project.loaders, vec!["fabric"]);
assert_eq!(project.project_type, mod_or_modpack);
let version = api
.get_version_deserialized(&test_version[0].id.to_string(), USER_USER_PAT)
.await;
assert_eq!(
version.loaders.iter().map(|x| &x.0).collect_vec(),
vec!["fabric"]
);
}
// TODO: as we get more complicated strucures with v3 testing, and alpha/beta get more complicated, we should add more tests here,
// to ensure that projects created with v3 routes are still valid and work with v2 routes.
}
#[actix_rt::test]
pub async fn test_patch_project() {
let test_env = TestEnvironment::build(None).await;
@@ -426,19 +471,30 @@ pub async fn test_patch_project() {
assert_eq!(resp.status(), 404);
// New slug does work
let project = api.get_project_deserialized("newslug", USER_USER_PAT).await;
assert_eq!(project.slug, Some("newslug".to_string()));
assert_eq!(project.title, "New successful title");
assert_eq!(project.description, "New successful description");
assert_eq!(project.body, "New successful body");
assert_eq!(project.categories, vec![DUMMY_CATEGORIES[0]]);
assert_eq!(project.license.id, "MIT");
assert_eq!(project.issues_url, Some("https://github.com".to_string()));
assert_eq!(project.discord_url, Some("https://discord.gg".to_string()));
assert_eq!(project.wiki_url, Some("https://wiki.com".to_string()));
assert_eq!(project.client_side.to_string(), "optional");
assert_eq!(project.server_side.to_string(), "required");
assert_eq!(project.donation_urls.unwrap()[0].url, "https://patreon.com");
let resp = api.get_project("newslug", USER_USER_PAT).await;
let project: serde_json::Value = test::read_body_json(resp).await;
assert_eq!(project["slug"], json!(Some("newslug".to_string())));
assert_eq!(project["title"], "New successful title");
assert_eq!(project["description"], "New successful description");
assert_eq!(project["body"], "New successful body");
assert_eq!(project["categories"], json!(vec![DUMMY_CATEGORIES[0]]));
assert_eq!(project["license"]["id"], "MIT");
assert_eq!(
project["issues_url"],
json!(Some("https://github.com".to_string()))
);
assert_eq!(
project["discord_url"],
json!(Some("https://discord.gg".to_string()))
);
assert_eq!(
project["wiki_url"],
json!(Some("https://wiki.com".to_string()))
);
assert_eq!(project["client_side"], json!("optional"));
assert_eq!(project["server_side"], json!("required"));
assert_eq!(project["donation_urls"][0]["url"], "https://patreon.com");
// Cleanup test db
test_env.cleanup().await;
@@ -499,8 +555,8 @@ async fn permissions_patch_project() {
("title", json!("randomname")),
("description", json!("randomdescription")),
("categories", json!(["combat", "economy"])),
("client_side", json!("unsupported")),
("server_side", json!("unsupported")),
// ("client_side", json!("unsupported")),
// ("server_side", json!("unsupported")),
("additional_categories", json!(["decoration"])),
("issues_url", json!("https://issues.com")),
("source_url", json!("https://source.com")),
@@ -532,11 +588,10 @@ async fn permissions_patch_project() {
},
}))
};
PermissionsTest::new(&test_env)
.simple_project_permissions_test(edit_details, req_gen)
.await
.unwrap();
.into_iter();
}
})
.buffer_unordered(4)
@@ -744,7 +799,7 @@ async fn permissions_upload_version() {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(
data: MultipartSegmentData::Text(
serde_json::to_string(&json!({
"project_id": ctx.project_id.unwrap(),
"file_parts": ["basic-mod.jar"],
@@ -764,7 +819,7 @@ async fn permissions_upload_version() {
name: "basic-mod.jar".to_string(),
filename: Some("basic-mod.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
data: MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod.jar").to_vec(),
),
},
@@ -785,7 +840,7 @@ async fn permissions_upload_version() {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(
data: MultipartSegmentData::Text(
serde_json::to_string(&json!({
"file_parts": ["basic-mod-different.jar"],
}))
@@ -796,7 +851,7 @@ async fn permissions_upload_version() {
name: "basic-mod-different.jar".to_string(),
filename: Some("basic-mod-different.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
data: MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod-different.jar").to_vec(),
),
},

View File

@@ -1,8 +1,8 @@
use actix_web::test::{self, TestRequest};
use bytes::Bytes;
use chrono::{Duration, Utc};
use common::actix::AppendsMultipart;
use labrinth::models::pats::Scopes;
use labrinth::util::actix::{AppendsMultipart, MultipartSegment, MultipartSegmentData};
use serde_json::json;
use crate::common::{database::*, environment::TestEnvironment, scopes::ScopeTest};
@@ -225,19 +225,17 @@ pub async fn project_version_create_scopes() {
"license_id": "MIT"
}
);
let json_segment = common::actix::MultipartSegment {
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
};
let file_segment = common::actix::MultipartSegment {
let file_segment = MultipartSegment {
name: "basic-mod.jar".to_string(),
filename: Some("basic-mod.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod.jar").to_vec(),
),
data: MultipartSegmentData::Binary(include_bytes!("../tests/files/basic-mod.jar").to_vec()),
};
let req_gen = || {
@@ -266,17 +264,17 @@ pub async fn project_version_create_scopes() {
"featured": true
}
);
let json_segment = common::actix::MultipartSegment {
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
data: MultipartSegmentData::Text(serde_json::to_string(&json_data).unwrap()),
};
let file_segment = common::actix::MultipartSegment {
let file_segment = MultipartSegment {
name: "basic-mod-different.jar".to_string(),
filename: Some("basic-mod.jar".to_string()),
content_type: Some("application/java-archive".to_string()),
data: common::actix::MultipartSegmentData::Binary(
data: MultipartSegmentData::Binary(
include_bytes!("../tests/files/basic-mod-different.jar").to_vec(),
),
};
@@ -819,11 +817,11 @@ pub async fn version_write_scopes() {
// Generate test project data.
// Basic json
let json_segment = common::actix::MultipartSegment {
let json_segment = MultipartSegment {
name: "data".to_string(),
filename: None,
content_type: Some("application/json".to_string()),
data: common::actix::MultipartSegmentData::Text(
data: MultipartSegmentData::Text(
serde_json::to_string(&json!(
{
"file_types": {
@@ -836,11 +834,11 @@ pub async fn version_write_scopes() {
};
// Differently named file, with different content
let content_segment = common::actix::MultipartSegment {
let content_segment = MultipartSegment {
name: "simple-zip.zip".to_string(),
filename: Some("simple-zip.zip".to_string()),
content_type: Some("application/zip".to_string()),
data: common::actix::MultipartSegmentData::Binary(
data: MultipartSegmentData::Binary(
include_bytes!("../tests/files/simple-zip.zip").to_vec(),
),
};

298
tests/search.rs Normal file
View File

@@ -0,0 +1,298 @@
use crate::common::database::*;
use crate::common::dummy_data::DUMMY_CATEGORIES;
use crate::common::environment::TestEnvironment;
use crate::common::request_data::{get_public_version_creation_data, ProjectCreationRequestData};
use common::dummy_data::TestFile;
use common::request_data;
use futures::stream::StreamExt;
use labrinth::models::ids::base62_impl::parse_base62;
use serde_json::json;
use std::collections::HashMap;
use std::sync::Arc;
// importing common module.
mod common;
#[actix_rt::test]
async fn search_projects() {
// Test setup and dummy data
let test_env = TestEnvironment::build(Some(8)).await;
let api = &test_env.v2;
let test_name = test_env.db.database_name.clone();
// Add dummy projects of various categories for searchability
let mut project_creation_futures = vec![];
let create_async_future =
|id: u64,
pat: &'static str,
is_modpack: bool,
modify_json: Box<dyn Fn(&mut serde_json::Value)>| {
let slug = format!("{test_name}-searchable-project-{id}");
let jar = if is_modpack {
TestFile::build_random_mrpack()
} else {
TestFile::build_random_jar()
};
let mut basic_project_json =
request_data::get_public_project_creation_data_json(&slug, Some(&jar));
modify_json(&mut basic_project_json);
let basic_project_multipart =
request_data::get_public_creation_data_multipart(&basic_project_json, Some(&jar));
// Add a project- simple, should work.
let req = api.add_public_project(
ProjectCreationRequestData {
slug,
jar: Some(jar),
segment_data: basic_project_multipart,
},
pat,
);
async move {
let (project, _) = req.await;
// Approve, so that the project is searchable
let resp = api
.edit_project(
&project.id.to_string(),
json!({
"status": "approved"
}),
MOD_USER_PAT,
)
.await;
assert_eq!(resp.status(), 204);
(project.id.0, id)
}
};
// Test project 0
let id = 0;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[4..6]);
json["server_side"] = json!("required");
json["license_id"] = json!("LGPL-3.0-or-later");
};
project_creation_futures.push(create_async_future(
id,
USER_USER_PAT,
false,
Box::new(modify_json),
));
// Test project 1
let id = 1;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[0..2]);
json["client_side"] = json!("optional");
};
project_creation_futures.push(create_async_future(
id,
USER_USER_PAT,
false,
Box::new(modify_json),
));
// Test project 2
let id = 2;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[0..2]);
json["server_side"] = json!("required");
json["title"] = json!("Mysterious Project");
};
project_creation_futures.push(create_async_future(
id,
USER_USER_PAT,
false,
Box::new(modify_json),
));
// Test project 3
let id = 3;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[0..3]);
json["server_side"] = json!("required");
json["initial_versions"][0]["game_versions"] = json!(["1.20.4"]);
json["title"] = json!("Mysterious Project");
json["license_id"] = json!("LicenseRef-All-Rights-Reserved"); // closed source
};
project_creation_futures.push(create_async_future(
id,
FRIEND_USER_PAT,
false,
Box::new(modify_json),
));
// Test project 4
let id = 4;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[0..3]);
json["client_side"] = json!("optional");
json["initial_versions"][0]["game_versions"] = json!(["1.20.5"]);
};
project_creation_futures.push(create_async_future(
id,
USER_USER_PAT,
true,
Box::new(modify_json),
));
// Test project 5
let id = 5;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[5..6]);
json["client_side"] = json!("optional");
json["initial_versions"][0]["game_versions"] = json!(["1.20.5"]);
json["license_id"] = json!("LGPL-3.0-or-later");
};
project_creation_futures.push(create_async_future(
id,
USER_USER_PAT,
false,
Box::new(modify_json),
));
// Test project 6
let id = 6;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[5..6]);
json["client_side"] = json!("optional");
json["server_side"] = json!("required");
json["license_id"] = json!("LGPL-3.0-or-later");
};
project_creation_futures.push(create_async_future(
id,
FRIEND_USER_PAT,
false,
Box::new(modify_json),
));
// Test project 7 (testing the search bug)
// This project has an initial private forge version that is 1.20.3, and a fabric 1.20.5 version.
// This means that a search for fabric + 1.20.3 or forge + 1.20.5 should not return this project.
let id = 7;
let modify_json = |json: &mut serde_json::Value| {
json["categories"] = json!(DUMMY_CATEGORIES[5..6]);
json["client_side"] = json!("optional");
json["server_side"] = json!("required");
json["license_id"] = json!("LGPL-3.0-or-later");
json["initial_versions"][0]["loaders"] = json!(["forge"]);
json["initial_versions"][0]["game_versions"] = json!(["1.20.2"]);
};
project_creation_futures.push(create_async_future(
id,
USER_USER_PAT,
false,
Box::new(modify_json),
));
// Await all project creation
// Returns a mapping of:
// project id -> test id
let id_conversion: Arc<HashMap<u64, u64>> = Arc::new(
futures::future::join_all(project_creation_futures)
.await
.into_iter()
.collect(),
);
// Create a second version for project 7
let project_7 = api
.get_project_deserialized(&format!("{test_name}-searchable-project-7"), USER_USER_PAT)
.await;
api.add_public_version(
get_public_version_creation_data(project_7.id, "1.0.0", TestFile::build_random_jar()),
USER_USER_PAT,
)
.await;
// Pairs of:
// 1. vec of search facets
// 2. expected project ids to be returned by this search
let pairs = vec![
(json!([["categories:fabric"]]), vec![0, 1, 2, 3, 4, 5, 6, 7]),
(json!([["categories:forge"]]), vec![7]),
(
json!([["categories:fabric", "categories:forge"]]),
vec![0, 1, 2, 3, 4, 5, 6, 7],
),
(json!([["categories:fabric"], ["categories:forge"]]), vec![]),
(
json!([
["categories:fabric"],
[&format!("categories:{}", DUMMY_CATEGORIES[0])],
]),
vec![1, 2, 3, 4],
),
(json!([["project_type:modpack"]]), vec![4]),
(json!([["client_side:required"]]), vec![0, 2, 3]),
(json!([["server_side:required"]]), vec![0, 2, 3, 6, 7]),
(json!([["open_source:true"]]), vec![0, 1, 2, 4, 5, 6, 7]),
(json!([["license:MIT"]]), vec![1, 2, 4]),
(json!([[r#"title:'Mysterious Project'"#]]), vec![2, 3]),
(json!([["author:user"]]), vec![0, 1, 2, 4, 5, 7]),
(json!([["versions:1.20.5"]]), vec![4, 5]),
// bug fix
(
json!([
// Only the forge one has 1.20.2, so its true that this project 'has'
// 1.20.2 and a fabric version, but not true that it has a 1.20.2 fabric version.
["categories:fabric"],
["versions:1.20.2"]
]),
vec![],
),
// Project type change
// Modpack should still be able to search based on former loader, even though technically the loader is 'mrpack'
(json!([["categories:mrpack"]]), vec![4]),
(
json!([["categories:mrpack"], ["categories:fabric"]]),
vec![4],
),
(
json!([
["categories:mrpack"],
["categories:fabric"],
["project_type:modpack"]
]),
vec![4],
),
];
// TODO: versions, game versions
// Untested:
// - downloads (not varied)
// - color (not varied)
// - created_timestamp (not varied)
// - modified_timestamp (not varied)
// Forcibly reset the search index
let resp = api.reset_search_index().await;
assert_eq!(resp.status(), 204);
// Test searches
let stream = futures::stream::iter(pairs);
stream
.for_each_concurrent(1, |(facets, mut expected_project_ids)| {
let id_conversion = id_conversion.clone();
let test_name = test_name.clone();
async move {
let projects = api
.search_deserialized(Some(&test_name), Some(facets.clone()), USER_USER_PAT)
.await;
let mut found_project_ids: Vec<u64> = projects
.hits
.into_iter()
.map(|p| id_conversion[&parse_base62(&p.project_id).unwrap()])
.collect();
expected_project_ids.sort();
found_project_ids.sort();
assert_eq!(found_project_ids, expected_project_ids);
}
})
.await;
// Cleanup test db
test_env.cleanup().await;
}

68
tests/tags.rs Normal file
View File

@@ -0,0 +1,68 @@
use crate::common::environment::TestEnvironment;
use std::collections::HashSet;
mod common;
#[actix_rt::test]
async fn get_tags() {
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v2;
let game_versions = api.get_game_versions_deserialized().await;
let loaders = api.get_loaders_deserialized().await;
let side_types = api.get_side_types_deserialized().await;
let categories = api.get_categories_deserialized().await;
// These tests match dummy data and will need to be updated if the dummy data changes;
let game_version_versions = game_versions
.into_iter()
.map(|x| x.version)
.collect::<HashSet<_>>();
assert_eq!(
game_version_versions,
["1.20.1", "1.20.2", "1.20.3", "1.20.4", "1.20.5"]
.iter()
.map(|s| s.to_string())
.collect()
);
let loader_names = loaders.into_iter().map(|x| x.name).collect::<HashSet<_>>();
assert_eq!(
loader_names,
["fabric", "forge", "mrpack"]
.iter()
.map(|s| s.to_string())
.collect()
);
let side_type_names = side_types.into_iter().collect::<HashSet<_>>();
assert_eq!(
side_type_names,
["unknown", "required", "optional", "unsupported"]
.iter()
.map(|s| s.to_string())
.collect()
);
let category_names = categories
.into_iter()
.map(|x| x.name)
.collect::<HashSet<_>>();
assert_eq!(
category_names,
[
"combat",
"economy",
"food",
"optimization",
"decoration",
"mobs",
"magic"
]
.iter()
.map(|s| s.to_string())
.collect()
);
test_env.cleanup().await;
}

View File

@@ -3,7 +3,7 @@ use common::{
environment::with_test_environment,
};
use crate::common::{dummy_data::DummyJarFile, request_data::get_public_project_creation_data};
use crate::common::{dummy_data::TestFile, request_data::get_public_project_creation_data};
mod common;
@@ -25,7 +25,7 @@ pub async fn get_user_projects_after_creating_project_returns_new_project() {
let (project, _) = api
.add_public_project(
get_public_project_creation_data("slug", Some(DummyJarFile::BasicMod)),
get_public_project_creation_data("slug", Some(TestFile::BasicMod)),
USER_USER_PAT,
)
.await;
@@ -44,7 +44,7 @@ pub async fn get_user_projects_after_deleting_project_shows_removal() {
let api = test_env.v2;
let (project, _) = api
.add_public_project(
get_public_project_creation_data("iota", Some(DummyJarFile::BasicMod)),
get_public_project_creation_data("iota", Some(TestFile::BasicMod)),
USER_USER_PAT,
)
.await;

View File

@@ -1,11 +1,497 @@
use actix_web::test;
use common::environment::TestEnvironment;
use futures::StreamExt;
use labrinth::database::models::version_item::VERSIONS_NAMESPACE;
use labrinth::models::ids::base62_impl::parse_base62;
use labrinth::models::projects::{Loader, ProjectId, VersionId, VersionStatus, VersionType};
use labrinth::routes::v2::version_file::FileUpdateData;
use serde_json::json;
use crate::common::database::*;
use crate::common::dummy_data::TestFile;
use crate::common::request_data::get_public_version_creation_data;
// importing common module.
mod common;
#[actix_rt::test]
async fn test_get_version() {
// Test setup and dummy data
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v2;
let alpha_project_id: &String = &test_env.dummy.as_ref().unwrap().project_alpha.project_id;
let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id;
let beta_version_id = &test_env.dummy.as_ref().unwrap().project_beta.version_id;
// Perform request on dummy data
let version = api
.get_version_deserialized(alpha_version_id, USER_USER_PAT)
.await;
assert_eq!(&version.project_id.to_string(), alpha_project_id);
assert_eq!(&version.id.to_string(), alpha_version_id);
let cached_project = test_env
.db
.redis_pool
.get::<String, _>(VERSIONS_NAMESPACE, parse_base62(alpha_version_id).unwrap())
.await
.unwrap()
.unwrap();
let cached_project: serde_json::Value = serde_json::from_str(&cached_project).unwrap();
assert_eq!(
cached_project["inner"]["project_id"],
json!(parse_base62(alpha_project_id).unwrap())
);
// Request should fail on non-existent version
let resp = api.get_version("false", USER_USER_PAT).await;
assert_eq!(resp.status(), 404);
// Similarly, request should fail on non-authorized user, on a yet-to-be-approved or hidden project, with a 404 (hiding the existence of the project)
// TODO: beta version should already be draft in dummy data, but theres a bug in finding it that
api.edit_version(
beta_version_id,
json!({
"status": "draft"
}),
USER_USER_PAT,
)
.await;
let resp = api.get_version(beta_version_id, USER_USER_PAT).await;
assert_eq!(resp.status(), 200);
let resp = api.get_version(beta_version_id, ENEMY_USER_PAT).await;
assert_eq!(resp.status(), 404);
// Cleanup test db
test_env.cleanup().await;
}
#[actix_rt::test]
async fn version_updates() {
// Test setup and dummy data
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v2;
let alpha_project_id: &String = &test_env.dummy.as_ref().unwrap().project_alpha.project_id;
let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id;
let beta_version_id = &test_env.dummy.as_ref().unwrap().project_beta.version_id;
let alpha_version_hash = &test_env.dummy.as_ref().unwrap().project_alpha.file_hash;
let beta_version_hash = &test_env.dummy.as_ref().unwrap().project_beta.file_hash;
// Quick test, using get version from hash
let version = api
.get_version_from_hash_deserialized(alpha_version_hash, "sha1", USER_USER_PAT)
.await;
assert_eq!(&version.id.to_string(), alpha_version_id);
// Get versions from hash
let versions = api
.get_versions_from_hashes_deserialized(
&[alpha_version_hash.as_str(), beta_version_hash.as_str()],
"sha1",
USER_USER_PAT,
)
.await;
assert_eq!(versions.len(), 2);
assert_eq!(
&versions[alpha_version_hash].id.to_string(),
alpha_version_id
);
assert_eq!(&versions[beta_version_hash].id.to_string(), beta_version_id);
// When there is only the one version, there should be no updates
let version = api
.get_update_from_hash_deserialized(
alpha_version_hash,
"sha1",
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(&version.id.to_string(), alpha_version_id);
let versions = api
.update_files_deserialized(
"sha1",
vec![alpha_version_hash.to_string()],
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(versions.len(), 1);
assert_eq!(
&versions[alpha_version_hash].id.to_string(),
alpha_version_id
);
// Add 3 new versions, 1 before, and 2 after, with differing game_version/version_types/loaders
let mut update_ids = vec![];
for (version_number, patch_value) in [
(
"0.9.9",
json!({
"game_versions": ["1.20.1"],
}),
),
(
"1.5.0",
json!({
"game_versions": ["1.20.3"],
"loaders": ["fabric"],
}),
),
(
"1.5.1",
json!({
"game_versions": ["1.20.4"],
"loaders": ["forge"],
"version_type": "beta"
}),
),
]
.iter()
{
let version = api
.add_public_version(
get_public_version_creation_data(
ProjectId(parse_base62(alpha_project_id).unwrap()),
version_number,
TestFile::build_random_jar(),
),
USER_USER_PAT,
)
.await;
update_ids.push(version.id);
// Patch using json
api.edit_version(&version.id.to_string(), patch_value.clone(), USER_USER_PAT)
.await;
}
let check_expected = |game_versions: Option<Vec<String>>,
loaders: Option<Vec<String>>,
version_types: Option<Vec<String>>,
result_id: Option<VersionId>| async move {
let (success, result_id) = match result_id {
Some(id) => (true, id),
None => (false, VersionId(0)),
};
// get_update_from_hash
let resp = api
.get_update_from_hash(
alpha_version_hash,
"sha1",
loaders.clone(),
game_versions.clone(),
version_types.clone(),
USER_USER_PAT,
)
.await;
if success {
assert_eq!(resp.status(), 200);
let body: serde_json::Value = test::read_body_json(resp).await;
let id = body["id"].as_str().unwrap();
assert_eq!(id, &result_id.to_string());
} else {
assert_eq!(resp.status(), 404);
}
// update_files
let versions = api
.update_files_deserialized(
"sha1",
vec![alpha_version_hash.to_string()],
loaders.clone(),
game_versions.clone(),
version_types.clone(),
USER_USER_PAT,
)
.await;
if success {
assert_eq!(versions.len(), 1);
let first = versions.iter().next().unwrap();
assert_eq!(first.1.id, result_id);
} else {
assert_eq!(versions.len(), 0);
}
// update_individual_files
let hashes = vec![FileUpdateData {
hash: alpha_version_hash.to_string(),
loaders,
game_versions,
version_types: version_types.map(|v| {
v.into_iter()
.map(|v| serde_json::from_str(&format!("\"{v}\"")).unwrap())
.collect()
}),
}];
let versions = api
.update_individual_files_deserialized("sha1", hashes, USER_USER_PAT)
.await;
if success {
assert_eq!(versions.len(), 1);
let first = versions.iter().next().unwrap();
assert_eq!(first.1.id, result_id);
} else {
assert_eq!(versions.len(), 0);
}
};
let tests = vec![
check_expected(
Some(vec!["1.20.1".to_string()]),
None,
None,
Some(update_ids[0]),
),
check_expected(
Some(vec!["1.20.3".to_string()]),
None,
None,
Some(update_ids[1]),
),
check_expected(
Some(vec!["1.20.4".to_string()]),
None,
None,
Some(update_ids[2]),
),
// Loader restrictions
check_expected(
None,
Some(vec!["fabric".to_string()]),
None,
Some(update_ids[1]),
),
check_expected(
None,
Some(vec!["forge".to_string()]),
None,
Some(update_ids[2]),
),
// Version type restrictions
check_expected(
None,
None,
Some(vec!["release".to_string()]),
Some(update_ids[1]),
),
check_expected(
None,
None,
Some(vec!["beta".to_string()]),
Some(update_ids[2]),
),
// Specific combination
check_expected(
None,
Some(vec!["fabric".to_string()]),
Some(vec!["release".to_string()]),
Some(update_ids[1]),
),
// Impossible combination
check_expected(
None,
Some(vec!["fabric".to_string()]),
Some(vec!["beta".to_string()]),
None,
),
// No restrictions, should do the last one
check_expected(None, None, None, Some(update_ids[2])),
];
// Wait on all tests, 4 at a time
futures::stream::iter(tests)
.buffer_unordered(4)
.collect::<Vec<_>>()
.await;
// We do a couple small tests for get_project_versions_deserialized as well
// TODO: expand this more.
let versions = api
.get_project_versions_deserialized(
alpha_project_id,
None,
None,
None,
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(versions.len(), 4);
let versions = api
.get_project_versions_deserialized(
alpha_project_id,
None,
Some(vec!["forge".to_string()]),
None,
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(versions.len(), 1);
// Cleanup test db
test_env.cleanup().await;
}
#[actix_rt::test]
pub async fn test_patch_version() {
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v2;
let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id;
// // First, we do some patch requests that should fail.
// // Failure because the user is not authorized.
let resp = api
.edit_version(
alpha_version_id,
json!({
"name": "test 1",
}),
ENEMY_USER_PAT,
)
.await;
assert_eq!(resp.status(), 401);
// Failure because these are illegal requested statuses for a normal user.
for req in ["unknown", "scheduled"] {
let resp = api
.edit_version(
alpha_version_id,
json!({
"status": req,
// requested status it not set here, but in /schedule
}),
USER_USER_PAT,
)
.await;
assert_eq!(resp.status(), 400);
}
// Sucessful request to patch many fields.
let resp = api
.edit_version(
alpha_version_id,
json!({
"name": "new version name",
"version_number": "1.3.0",
"changelog": "new changelog",
"version_type": "beta",
// // "dependencies": [], TODO: test this
"game_versions": ["1.20.5"],
"loaders": ["forge"],
"featured": false,
// "primary_file": [], TODO: test this
// // "downloads": 0, TODO: moderator exclusive
"status": "draft",
// // "filetypes": ["jar"], TODO: test this
}),
USER_USER_PAT,
)
.await;
assert_eq!(resp.status(), 204);
let version = api
.get_version_deserialized(alpha_version_id, USER_USER_PAT)
.await;
assert_eq!(version.name, "new version name");
assert_eq!(version.version_number, "1.3.0");
assert_eq!(version.changelog, "new changelog");
assert_eq!(
version.version_type,
serde_json::from_str::<VersionType>("\"beta\"").unwrap()
);
assert_eq!(version.game_versions, vec!["1.20.5"]);
assert_eq!(version.loaders, vec![Loader("forge".to_string())]);
assert!(!version.featured);
assert_eq!(version.status, VersionStatus::from_string("draft"));
// These ones are checking the v2-v3 rerouting, we eneusre that only 'game_versions'
// works as expected, as well as only 'loaders'
let resp = api
.edit_version(
alpha_version_id,
json!({
"game_versions": ["1.20.1", "1.20.2", "1.20.4"],
}),
USER_USER_PAT,
)
.await;
assert_eq!(resp.status(), 204);
let version = api
.get_version_deserialized(alpha_version_id, USER_USER_PAT)
.await;
assert_eq!(version.game_versions, vec!["1.20.1", "1.20.2", "1.20.4"]);
assert_eq!(version.loaders, vec![Loader("forge".to_string())]); // From last patch
let resp = api
.edit_version(
alpha_version_id,
json!({
"loaders": ["fabric"],
}),
USER_USER_PAT,
)
.await;
assert_eq!(resp.status(), 204);
let version = api
.get_version_deserialized(alpha_version_id, USER_USER_PAT)
.await;
assert_eq!(version.game_versions, vec!["1.20.1", "1.20.2", "1.20.4"]); // From last patch
assert_eq!(version.loaders, vec![Loader("fabric".to_string())]);
// Cleanup test db
test_env.cleanup().await;
}
#[actix_rt::test]
pub async fn test_project_versions() {
let test_env = TestEnvironment::build(None).await;
let api = &test_env.v2;
let alpha_project_id: &String = &test_env.dummy.as_ref().unwrap().project_alpha.project_id;
let alpha_version_id = &test_env.dummy.as_ref().unwrap().project_alpha.version_id;
let _beta_version_id = &test_env.dummy.as_ref().unwrap().project_beta.version_id;
let _alpha_version_hash = &test_env.dummy.as_ref().unwrap().project_alpha.file_hash;
let _beta_version_hash = &test_env.dummy.as_ref().unwrap().project_beta.file_hash;
let versions = api
.get_project_versions_deserialized(
alpha_project_id,
None,
None,
None,
None,
None,
None,
USER_USER_PAT,
)
.await;
assert_eq!(versions.len(), 1);
assert_eq!(&versions[0].id.to_string(), alpha_version_id);
test_env.cleanup().await;
}
use crate::common::{asserts::assert_status, get_json_val_str};
use actix_http::StatusCode;
use common::{
asserts::assert_version_ids, database::USER_USER_PAT, environment::with_test_environment,
};
mod common;
#[actix_rt::test]
async fn can_create_version_with_ordering() {
with_test_environment(|env| async move {