You've already forked AstralRinth
forked from didirus/AstralRinth
Implement replied/unreplied filter for tech review (#5006)
* Implement replied/unreplied filter for tech review * project status filter * wip: issue type filter * correct filter field * wip: break up tech review query * Improve tech review query * Get tech review working more properly * Add get single project reports endpoint
This commit is contained in:
52
apps/labrinth/.sqlx/query-10e2a3b31ba94b93ed2d6c9753a5aabf13190a0b336089e6521022069813cf17.json
generated
Normal file
52
apps/labrinth/.sqlx/query-10e2a3b31ba94b93ed2d6c9753a5aabf13190a0b336089e6521022069813cf17.json
generated
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id AS \"report_id!: DelphiReportId\",\n file_id AS \"file_id!: DBFileId\",\n created,\n severity AS \"severity!: DelphiSeverity\"\n FROM delphi_reports\n WHERE file_id = ANY($1::bigint[])\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "report_id!: DelphiReportId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "file_id!: DBFileId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "created",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "severity!: DelphiSeverity",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "delphi_severity",
|
||||
"kind": {
|
||||
"Enum": [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"severe"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "10e2a3b31ba94b93ed2d6c9753a5aabf13190a0b336089e6521022069813cf17"
|
||||
}
|
||||
22
apps/labrinth/.sqlx/query-50d087248322373b2d5fc40e1c7b8a855b55d907d7fba0af0738c37366cb93e0.json
generated
Normal file
22
apps/labrinth/.sqlx/query-50d087248322373b2d5fc40e1c7b8a855b55d907d7fba0af0738c37366cb93e0.json
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "SELECT id FROM versions WHERE mod_id = ANY($1::bigint[])",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "50d087248322373b2d5fc40e1c7b8a855b55d907d7fba0af0738c37366cb93e0"
|
||||
}
|
||||
34
apps/labrinth/.sqlx/query-632b79f51dd39a75ebff838ca4beab463f84aa32246288cc251881ec41d207f5.json
generated
Normal file
34
apps/labrinth/.sqlx/query-632b79f51dd39a75ebff838ca4beab463f84aa32246288cc251881ec41d207f5.json
generated
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id AS \"id: DelphiReportIssueId\",\n report_id AS \"report_id: DelphiReportId\",\n issue_type\n FROM delphi_report_issues\n WHERE report_id = ANY($1::bigint[])\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id: DelphiReportIssueId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "report_id: DelphiReportId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "issue_type",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "632b79f51dd39a75ebff838ca4beab463f84aa32246288cc251881ec41d207f5"
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
81
apps/labrinth/.sqlx/query-80b52a09ca9a056251d1040936f768c266e5814c15638d455f569deed13ee7d0.json
generated
Normal file
81
apps/labrinth/.sqlx/query-80b52a09ca9a056251d1040936f768c266e5814c15638d455f569deed13ee7d0.json
generated
Normal file
@@ -0,0 +1,81 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n drid.id AS \"id!: DelphiReportIssueDetailsId\",\n drid.issue_id AS \"issue_id!: DelphiReportIssueId\",\n drid.key AS \"key!: String\",\n drid.file_path AS \"file_path!: String\",\n drid.data AS \"data!: sqlx::types::Json<HashMap<String, serde_json::Value>>\",\n drid.severity AS \"severity!: DelphiSeverity\",\n COALESCE(didv.verdict, 'pending'::delphi_report_issue_status) AS \"status!: DelphiStatus\"\n FROM delphi_report_issue_details drid\n INNER JOIN delphi_report_issues dri ON dri.id = drid.issue_id\n INNER JOIN delphi_reports dr ON dr.id = dri.report_id\n INNER JOIN files f ON f.id = dr.file_id\n INNER JOIN versions v ON v.id = f.version_id\n INNER JOIN mods m ON m.id = v.mod_id\n LEFT JOIN delphi_issue_detail_verdicts didv\n ON m.id = didv.project_id AND drid.key = didv.detail_key\n WHERE drid.issue_id = ANY($1::bigint[])\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id!: DelphiReportIssueDetailsId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "issue_id!: DelphiReportIssueId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "key!: String",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_path!: String",
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "data!: sqlx::types::Json<HashMap<String, serde_json::Value>>",
|
||||
"type_info": "Jsonb"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "severity!: DelphiSeverity",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "delphi_severity",
|
||||
"kind": {
|
||||
"Enum": [
|
||||
"low",
|
||||
"medium",
|
||||
"high",
|
||||
"severe"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "status!: DelphiStatus",
|
||||
"type_info": {
|
||||
"Custom": {
|
||||
"name": "delphi_report_issue_status",
|
||||
"kind": {
|
||||
"Enum": [
|
||||
"pending",
|
||||
"safe",
|
||||
"unsafe"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "80b52a09ca9a056251d1040936f768c266e5814c15638d455f569deed13ee7d0"
|
||||
}
|
||||
46
apps/labrinth/.sqlx/query-8e38571ffae8bf246973d328c890a45b87c0f71d53cf46f390c4b286677b2b31.json
generated
Normal file
46
apps/labrinth/.sqlx/query-8e38571ffae8bf246973d328c890a45b87c0f71d53cf46f390c4b286677b2b31.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT\n id AS \"file_id: DBFileId\",\n version_id AS \"version_id: DBVersionId\",\n url,\n filename,\n size\n FROM files\n WHERE version_id = ANY($1::bigint[])\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "file_id: DBFileId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "version_id: DBVersionId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "filename",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "size",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8e38571ffae8bf246973d328c890a45b87c0f71d53cf46f390c4b286677b2b31"
|
||||
}
|
||||
22
apps/labrinth/.sqlx/query-e01fc27d68a5c41a56700508633881813c2bdb753e1afa5ca6072ca2b04c09d5.json
generated
Normal file
22
apps/labrinth/.sqlx/query-e01fc27d68a5c41a56700508633881813c2bdb753e1afa5ca6072ca2b04c09d5.json
generated
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT t.id AS \"thread_id: DBThreadId\"\n FROM threads t\n WHERE t.mod_id = $1\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "thread_id: DBThreadId",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "e01fc27d68a5c41a56700508633881813c2bdb753e1afa5ca6072ca2b04c09d5"
|
||||
}
|
||||
34
apps/labrinth/.sqlx/query-f10a09a0fb0774dad4933e78db94bfb231020b356edbc58bdb6c5a11ad0fb4ac.json
generated
Normal file
34
apps/labrinth/.sqlx/query-f10a09a0fb0774dad4933e78db94bfb231020b356edbc58bdb6c5a11ad0fb4ac.json
generated
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT ON (m.id)\n m.id AS \"project_id: DBProjectId\",\n t.id AS \"thread_id: DBThreadId\"\n FROM mods m\n INNER JOIN threads t ON t.mod_id = m.id\n INNER JOIN versions v ON v.mod_id = m.id\n INNER JOIN files f ON f.version_id = v.id\n INNER JOIN delphi_reports dr ON dr.file_id = f.id\n INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id\n INNER JOIN delphi_report_issue_details drid\n ON drid.issue_id = dri.id\n LEFT JOIN delphi_issue_detail_verdicts didv\n ON m.id = didv.project_id AND drid.key = didv.detail_key\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON c.id = mc.joining_category_id\n LEFT JOIN threads_messages tm_last\n ON tm_last.thread_id = t.id\n AND tm_last.id = (\n SELECT id FROM threads_messages\n WHERE thread_id = t.id\n ORDER BY created DESC\n LIMIT 1\n )\n LEFT JOIN users u_last\n ON u_last.id = tm_last.author_id\n WHERE\n (cardinality($4::int[]) = 0 OR c.project_type = ANY($4::int[]))\n AND m.status NOT IN ('draft', 'rejected', 'withheld')\n AND (cardinality($6::text[]) = 0 OR m.status = ANY($6::text[]))\n AND (cardinality($7::text[]) = 0 OR dri.issue_type = ANY($7::text[]))\n AND (didv.verdict IS NULL OR didv.verdict = 'pending'::delphi_report_issue_status)\n AND (\n $5::text IS NULL\n OR ($5::text = 'unreplied' AND (tm_last.id IS NULL OR u_last.role IS NULL OR u_last.role NOT IN ('moderator', 'admin')))\n OR ($5::text = 'replied' AND tm_last.id IS NOT NULL AND u_last.role IS NOT NULL AND u_last.role IN ('moderator', 'admin'))\n )\n GROUP BY m.id, t.id\n ORDER BY m.id,\n CASE WHEN $3 = 'created_asc' THEN MIN(dr.created) ELSE TO_TIMESTAMP(0) END ASC,\n CASE WHEN $3 = 'created_desc' THEN MAX(dr.created) ELSE TO_TIMESTAMP(0) END DESC,\n CASE WHEN $3 = 'severity_asc' THEN MAX(dr.severity) ELSE 'low'::delphi_severity END ASC,\n CASE WHEN $3 = 'severity_desc' THEN MAX(dr.severity) ELSE 'low'::delphi_severity END DESC\n LIMIT $1 OFFSET $2\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "project_id: DBProjectId",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "thread_id: DBThreadId",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"Int8",
|
||||
"Text",
|
||||
"Int4Array",
|
||||
"Text",
|
||||
"TextArray",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "f10a09a0fb0774dad4933e78db94bfb231020b356edbc58bdb6c5a11ad0fb4ac"
|
||||
}
|
||||
@@ -61,6 +61,8 @@ impl DBDelphiReport {
|
||||
Copy,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
sqlx::Type,
|
||||
utoipa::ToSchema,
|
||||
|
||||
@@ -327,7 +327,7 @@ pub async fn run(
|
||||
.send()
|
||||
.await
|
||||
.and_then(|res| res.error_for_status())
|
||||
.map_err(ApiError::Delphi)?;
|
||||
.map_err(ApiError::delphi)?;
|
||||
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
@@ -411,10 +411,10 @@ async fn issue_type_schema(
|
||||
.send()
|
||||
.await
|
||||
.and_then(|res| res.error_for_status())
|
||||
.map_err(ApiError::Delphi)?
|
||||
.map_err(ApiError::delphi)?
|
||||
.json::<serde_json::Map<String, serde_json::Value>>()
|
||||
.await
|
||||
.map_err(ApiError::Delphi)?,
|
||||
.map_err(ApiError::delphi)?,
|
||||
Instant::now(),
|
||||
))
|
||||
.0,
|
||||
|
||||
@@ -2,9 +2,9 @@ use std::{collections::HashMap, fmt};
|
||||
|
||||
use actix_web::{HttpRequest, get, patch, post, put, web};
|
||||
use chrono::{DateTime, Utc};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
use super::ownership::get_projects_ownership;
|
||||
use crate::{
|
||||
@@ -12,14 +12,15 @@ use crate::{
|
||||
database::{
|
||||
DBProject,
|
||||
models::{
|
||||
DBFileId, DBProjectId, DBThread, DBThreadId, DBUser,
|
||||
DelphiReportId, DelphiReportIssueDetailsId, DelphiReportIssueId,
|
||||
ProjectTypeId,
|
||||
DBFileId, DBProjectId, DBThread, DBThreadId, DBUser, DBVersion,
|
||||
DBVersionId, DelphiReportId, DelphiReportIssueDetailsId,
|
||||
DelphiReportIssueId, ProjectTypeId,
|
||||
delphi_report_item::{
|
||||
DBDelphiReport, DelphiSeverity, DelphiStatus, DelphiVerdict,
|
||||
ReportIssueDetail,
|
||||
},
|
||||
thread_item::ThreadMessageBuilder,
|
||||
version_item::VersionQueryResult,
|
||||
},
|
||||
redis::RedisPool,
|
||||
},
|
||||
@@ -37,6 +38,7 @@ use eyre::eyre;
|
||||
|
||||
pub fn config(cfg: &mut utoipa_actix_web::service_config::ServiceConfig) {
|
||||
cfg.service(search_projects)
|
||||
.service(get_project_report)
|
||||
.service(get_report)
|
||||
.service(get_issue)
|
||||
.service(submit_report)
|
||||
@@ -69,7 +71,36 @@ fn default_sort_by() -> SearchProjectsSort {
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, utoipa::ToSchema)]
|
||||
pub struct SearchProjectsFilter {
|
||||
#[serde(default)]
|
||||
pub project_type: Vec<ProjectTypeId>,
|
||||
#[serde(default)]
|
||||
pub replied_to: Option<RepliedTo>,
|
||||
#[serde(default)]
|
||||
pub project_status: Vec<ProjectStatus>,
|
||||
#[serde(default)]
|
||||
pub issue_type: Vec<String>,
|
||||
}
|
||||
|
||||
/// Filter by whether a moderator has replied to the last message in the
|
||||
/// project's moderation thread.
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
Copy,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
utoipa::ToSchema,
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum RepliedTo {
|
||||
/// Last message in the thread is from a moderator, indicating a moderator
|
||||
/// has replied to it.
|
||||
Replied,
|
||||
/// Last message in the thread is not from a moderator.
|
||||
Unreplied,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
@@ -316,6 +347,15 @@ pub struct SearchResponse {
|
||||
pub ownership: HashMap<ProjectId, Ownership>,
|
||||
}
|
||||
|
||||
/// Response for a single project's technical review report.
|
||||
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
|
||||
pub struct ProjectReportResponse {
|
||||
/// The project's technical review report.
|
||||
pub project_report: Option<ProjectReport>,
|
||||
/// The moderation thread for this project.
|
||||
pub thread: Thread,
|
||||
}
|
||||
|
||||
/// Single project's reports from a search response.
|
||||
#[derive(Debug, Serialize, Deserialize, utoipa::ToSchema)]
|
||||
pub struct ProjectReport {
|
||||
@@ -355,6 +395,268 @@ pub struct ProjectModerationInfo {
|
||||
pub icon_url: Option<String>,
|
||||
}
|
||||
|
||||
async fn fetch_project_reports(
|
||||
project_ids: &[DBProjectId],
|
||||
pool: &PgPool,
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<ProjectReport>, ApiError> {
|
||||
struct FileRow {
|
||||
file_id: DBFileId,
|
||||
version_id: DBVersionId,
|
||||
url: String,
|
||||
filename: String,
|
||||
size: i32,
|
||||
}
|
||||
|
||||
struct DelphiReportRow {
|
||||
report_id: DelphiReportId,
|
||||
file_id: DBFileId,
|
||||
created: DateTime<Utc>,
|
||||
severity: DelphiSeverity,
|
||||
}
|
||||
|
||||
struct DelphiReportIssueRow {
|
||||
id: DelphiReportIssueId,
|
||||
report_id: DelphiReportId,
|
||||
issue_type: String,
|
||||
}
|
||||
|
||||
let version_id_rows = sqlx::query!(
|
||||
"SELECT id FROM versions WHERE mod_id = ANY($1::bigint[])",
|
||||
&project_ids.iter().map(|id| id.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch version ids")?;
|
||||
|
||||
let version_ids: Vec<DBVersionId> = version_id_rows
|
||||
.into_iter()
|
||||
.map(|r| DBVersionId(r.id))
|
||||
.collect();
|
||||
|
||||
let versions = DBVersion::get_many(&version_ids, pool, redis)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch versions")?;
|
||||
|
||||
let file_rows = sqlx::query!(
|
||||
r#"
|
||||
SELECT
|
||||
id AS "file_id: DBFileId",
|
||||
version_id AS "version_id: DBVersionId",
|
||||
url,
|
||||
filename,
|
||||
size
|
||||
FROM files
|
||||
WHERE version_id = ANY($1::bigint[])
|
||||
"#,
|
||||
&version_ids.iter().map(|id| id.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch files")?;
|
||||
|
||||
let report_rows = sqlx::query!(
|
||||
r#"
|
||||
SELECT
|
||||
id AS "report_id!: DelphiReportId",
|
||||
file_id AS "file_id!: DBFileId",
|
||||
created,
|
||||
severity AS "severity!: DelphiSeverity"
|
||||
FROM delphi_reports
|
||||
WHERE file_id = ANY($1::bigint[])
|
||||
"#,
|
||||
&file_rows.iter().map(|f| f.file_id.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch delphi reports")?;
|
||||
|
||||
let issue_rows = sqlx::query!(
|
||||
r#"
|
||||
SELECT
|
||||
id AS "id: DelphiReportIssueId",
|
||||
report_id AS "report_id: DelphiReportId",
|
||||
issue_type
|
||||
FROM delphi_report_issues
|
||||
WHERE report_id = ANY($1::bigint[])
|
||||
"#,
|
||||
&report_rows
|
||||
.iter()
|
||||
.map(|r| r.report_id.0)
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch delphi report issues")?;
|
||||
|
||||
let issue_ids: Vec<DelphiReportIssueId> =
|
||||
issue_rows.iter().map(|i| i.id).collect();
|
||||
|
||||
let detail_rows = sqlx::query!(
|
||||
r#"
|
||||
SELECT
|
||||
drid.id AS "id!: DelphiReportIssueDetailsId",
|
||||
drid.issue_id AS "issue_id!: DelphiReportIssueId",
|
||||
drid.key AS "key!: String",
|
||||
drid.file_path AS "file_path!: String",
|
||||
drid.data AS "data!: sqlx::types::Json<HashMap<String, serde_json::Value>>",
|
||||
drid.severity AS "severity!: DelphiSeverity",
|
||||
COALESCE(didv.verdict, 'pending'::delphi_report_issue_status) AS "status!: DelphiStatus"
|
||||
FROM delphi_report_issue_details drid
|
||||
INNER JOIN delphi_report_issues dri ON dri.id = drid.issue_id
|
||||
INNER JOIN delphi_reports dr ON dr.id = dri.report_id
|
||||
INNER JOIN files f ON f.id = dr.file_id
|
||||
INNER JOIN versions v ON v.id = f.version_id
|
||||
INNER JOIN mods m ON m.id = v.mod_id
|
||||
LEFT JOIN delphi_issue_detail_verdicts didv
|
||||
ON m.id = didv.project_id AND drid.key = didv.detail_key
|
||||
WHERE drid.issue_id = ANY($1::bigint[])
|
||||
"#,
|
||||
&issue_ids.iter().map(|i| i.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch delphi issue details")?;
|
||||
|
||||
let versions_by_project: HashMap<DBProjectId, Vec<VersionQueryResult>> =
|
||||
versions
|
||||
.into_iter()
|
||||
.into_group_map_by(|v| v.inner.project_id);
|
||||
|
||||
let files_by_version: HashMap<DBVersionId, Vec<FileRow>> = file_rows
|
||||
.into_iter()
|
||||
.map(|r| FileRow {
|
||||
file_id: r.file_id,
|
||||
version_id: r.version_id,
|
||||
url: r.url,
|
||||
filename: r.filename,
|
||||
size: r.size,
|
||||
})
|
||||
.into_group_map_by(|f| f.version_id);
|
||||
|
||||
let reports_by_file: HashMap<DBFileId, Vec<DelphiReportRow>> = report_rows
|
||||
.into_iter()
|
||||
.map(|r| DelphiReportRow {
|
||||
report_id: r.report_id,
|
||||
file_id: r.file_id,
|
||||
created: r.created,
|
||||
severity: r.severity,
|
||||
})
|
||||
.into_group_map_by(|r| r.file_id);
|
||||
|
||||
let issues_by_report: HashMap<DelphiReportId, Vec<DelphiReportIssueRow>> =
|
||||
issue_rows
|
||||
.into_iter()
|
||||
.map(|i| DelphiReportIssueRow {
|
||||
id: i.id,
|
||||
report_id: i.report_id,
|
||||
issue_type: i.issue_type,
|
||||
})
|
||||
.into_group_map_by(|i| i.report_id);
|
||||
|
||||
let details_by_issue: HashMap<DelphiReportIssueId, Vec<ReportIssueDetail>> =
|
||||
detail_rows
|
||||
.into_iter()
|
||||
.map(|d| ReportIssueDetail {
|
||||
id: d.id,
|
||||
issue_id: d.issue_id,
|
||||
key: d.key,
|
||||
file_path: d.file_path,
|
||||
decompiled_source: None,
|
||||
data: d.data.0,
|
||||
severity: d.severity,
|
||||
status: d.status,
|
||||
})
|
||||
.into_group_map_by(|d| d.issue_id);
|
||||
|
||||
let empty_versions: Vec<VersionQueryResult> = vec![];
|
||||
let empty_files: Vec<FileRow> = vec![];
|
||||
let empty_reports: Vec<DelphiReportRow> = vec![];
|
||||
let empty_issues: Vec<DelphiReportIssueRow> = vec![];
|
||||
let empty_details: Vec<ReportIssueDetail> = vec![];
|
||||
|
||||
let mut project_reports = Vec::<ProjectReport>::new();
|
||||
|
||||
for project_id in project_ids {
|
||||
let project_versions = versions_by_project
|
||||
.get(project_id)
|
||||
.unwrap_or(&empty_versions);
|
||||
|
||||
let mut version_reports = Vec::new();
|
||||
|
||||
for version_query in project_versions {
|
||||
let version_files = files_by_version
|
||||
.get(&version_query.inner.id)
|
||||
.unwrap_or(&empty_files);
|
||||
|
||||
let mut file_reports = Vec::new();
|
||||
|
||||
for file_row in version_files {
|
||||
let report_list = reports_by_file
|
||||
.get(&file_row.file_id)
|
||||
.unwrap_or(&empty_reports);
|
||||
|
||||
for report_row in report_list {
|
||||
let report_issues = issues_by_report
|
||||
.get(&report_row.report_id)
|
||||
.unwrap_or(&empty_issues);
|
||||
|
||||
let mut file_issues = Vec::new();
|
||||
|
||||
for issue_row in report_issues {
|
||||
if issue_row.issue_type == "__dummy" {
|
||||
continue;
|
||||
}
|
||||
|
||||
let issue_details = details_by_issue
|
||||
.get(&issue_row.id)
|
||||
.unwrap_or(&empty_details);
|
||||
|
||||
file_issues.push(FileIssue {
|
||||
id: issue_row.id,
|
||||
report_id: issue_row.report_id,
|
||||
issue_type: issue_row.issue_type.clone(),
|
||||
details: issue_details.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
file_reports.push(FileReport {
|
||||
report_id: report_row.report_id,
|
||||
file_id: FileId::from(file_row.file_id),
|
||||
created: report_row.created,
|
||||
flag_reason: FlagReason::Delphi,
|
||||
severity: report_row.severity,
|
||||
file_name: file_row.filename.clone(),
|
||||
file_size: file_row.size,
|
||||
download_url: file_row.url.clone(),
|
||||
issues: file_issues,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
version_reports.push(VersionReport {
|
||||
version_id: VersionId::from(version_query.inner.id),
|
||||
files: file_reports,
|
||||
});
|
||||
}
|
||||
|
||||
let max_severity = version_reports
|
||||
.iter()
|
||||
.flat_map(|vr| vr.files.iter())
|
||||
.map(|fr| fr.severity)
|
||||
.max();
|
||||
let project_report = ProjectReport {
|
||||
project_id: ProjectId::from(*project_id),
|
||||
max_severity,
|
||||
versions: version_reports,
|
||||
};
|
||||
|
||||
project_reports.push(project_report);
|
||||
}
|
||||
|
||||
Ok(project_reports)
|
||||
}
|
||||
|
||||
/// Searches all projects which are awaiting technical review.
|
||||
#[utoipa::path(
|
||||
security(("bearer_auth" = [])),
|
||||
@@ -386,113 +688,59 @@ async fn search_projects(
|
||||
let offset = i64::try_from(offset)
|
||||
.wrap_request_err("offset cannot fit into `i64`")?;
|
||||
|
||||
let mut project_reports = Vec::<ProjectReport>::new();
|
||||
let replied_to_filter = search_req.filter.replied_to.map(|r| match r {
|
||||
RepliedTo::Replied => "replied",
|
||||
RepliedTo::Unreplied => "unreplied",
|
||||
});
|
||||
|
||||
let mut project_ids = Vec::<DBProjectId>::new();
|
||||
let mut thread_ids = Vec::<DBThreadId>::new();
|
||||
|
||||
let mut rows = sqlx::query!(
|
||||
let rows = sqlx::query!(
|
||||
r#"
|
||||
SELECT
|
||||
project_id AS "project_id: DBProjectId",
|
||||
project_thread_id AS "project_thread_id: DBThreadId",
|
||||
report AS "report!: sqlx::types::Json<ProjectReport>"
|
||||
FROM (
|
||||
SELECT DISTINCT ON (m.id)
|
||||
m.id AS project_id,
|
||||
t.id AS project_thread_id,
|
||||
MAX(dr.severity) AS severity,
|
||||
MIN(dr.created) AS earliest_report_created,
|
||||
MAX(dr.created) AS latest_report_created,
|
||||
|
||||
jsonb_build_object(
|
||||
'project_id', to_base62(m.id),
|
||||
'max_severity', MAX(dr.severity),
|
||||
-- TODO: replace with `json_array` in Postgres 16
|
||||
'versions', (
|
||||
SELECT coalesce(jsonb_agg(jsonb_build_object(
|
||||
'version_id', to_base62(v.id),
|
||||
-- TODO: replace with `json_array` in Postgres 16
|
||||
'files', (
|
||||
SELECT coalesce(jsonb_agg(jsonb_build_object(
|
||||
'report_id', dr.id,
|
||||
'file_id', to_base62(f.id),
|
||||
'created', dr.created,
|
||||
'flag_reason', 'delphi',
|
||||
'severity', dr.severity,
|
||||
'file_name', f.filename,
|
||||
'file_size', f.size,
|
||||
'download_url', f.url,
|
||||
-- TODO: replace with `json_array` in Postgres 16
|
||||
'issues', (
|
||||
SELECT coalesce(jsonb_agg(
|
||||
to_jsonb(dri)
|
||||
|| jsonb_build_object(
|
||||
-- TODO: replace with `json_array` in Postgres 16
|
||||
'details', (
|
||||
SELECT coalesce(jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'id', didws.id,
|
||||
'issue_id', didws.issue_id,
|
||||
'key', didws.key,
|
||||
'file_path', didws.file_path,
|
||||
-- ignore `decompiled_source`
|
||||
'data', didws.data,
|
||||
'severity', didws.severity,
|
||||
'status', didws.status
|
||||
)
|
||||
), '[]'::jsonb)
|
||||
FROM delphi_issue_details_with_statuses didws
|
||||
WHERE didws.issue_id = dri.id
|
||||
)
|
||||
)
|
||||
), '[]'::jsonb)
|
||||
FROM delphi_report_issues dri
|
||||
WHERE
|
||||
dri.report_id = dr.id
|
||||
-- see delphi.rs todo comment
|
||||
AND dri.issue_type != '__dummy'
|
||||
)
|
||||
)), '[]'::jsonb)
|
||||
FROM delphi_reports dr
|
||||
WHERE dr.file_id = f.id
|
||||
)
|
||||
)), '[]'::jsonb)
|
||||
FROM versions v
|
||||
INNER JOIN files f ON f.version_id = v.id
|
||||
WHERE v.mod_id = m.id
|
||||
)
|
||||
) AS report
|
||||
FROM mods m
|
||||
INNER JOIN threads t ON t.mod_id = m.id
|
||||
INNER JOIN versions v ON v.mod_id = m.id
|
||||
INNER JOIN files f ON f.version_id = v.id
|
||||
|
||||
-- only return projects with at least 1 pending drid
|
||||
INNER JOIN delphi_reports dr ON dr.file_id = f.id
|
||||
INNER JOIN delphi_issue_details_with_statuses didws
|
||||
ON didws.project_id = m.id AND didws.status = 'pending'
|
||||
|
||||
-- filtering
|
||||
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
||||
LEFT JOIN categories c ON c.id = mc.joining_category_id
|
||||
WHERE
|
||||
-- project type
|
||||
(cardinality($4::int[]) = 0 OR c.project_type = ANY($4::int[]))
|
||||
AND m.status NOT IN ('draft', 'rejected', 'withheld')
|
||||
|
||||
GROUP BY m.id, t.id
|
||||
) t
|
||||
|
||||
-- sorting
|
||||
ORDER BY
|
||||
CASE WHEN $3 = 'created_asc' THEN t.earliest_report_created ELSE TO_TIMESTAMP(0) END ASC,
|
||||
CASE WHEN $3 = 'created_desc' THEN t.latest_report_created ELSE TO_TIMESTAMP(0) END DESC,
|
||||
CASE WHEN $3 = 'severity_asc' THEN t.severity ELSE 'low'::delphi_severity END ASC,
|
||||
CASE WHEN $3 = 'severity_desc' THEN t.severity ELSE 'low'::delphi_severity END DESC
|
||||
|
||||
-- pagination
|
||||
LIMIT $1
|
||||
OFFSET $2
|
||||
SELECT DISTINCT ON (m.id)
|
||||
m.id AS "project_id: DBProjectId",
|
||||
t.id AS "thread_id: DBThreadId"
|
||||
FROM mods m
|
||||
INNER JOIN threads t ON t.mod_id = m.id
|
||||
INNER JOIN versions v ON v.mod_id = m.id
|
||||
INNER JOIN files f ON f.version_id = v.id
|
||||
INNER JOIN delphi_reports dr ON dr.file_id = f.id
|
||||
INNER JOIN delphi_report_issues dri ON dri.report_id = dr.id
|
||||
INNER JOIN delphi_report_issue_details drid
|
||||
ON drid.issue_id = dri.id
|
||||
LEFT JOIN delphi_issue_detail_verdicts didv
|
||||
ON m.id = didv.project_id AND drid.key = didv.detail_key
|
||||
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
||||
LEFT JOIN categories c ON c.id = mc.joining_category_id
|
||||
LEFT JOIN threads_messages tm_last
|
||||
ON tm_last.thread_id = t.id
|
||||
AND tm_last.id = (
|
||||
SELECT id FROM threads_messages
|
||||
WHERE thread_id = t.id
|
||||
ORDER BY created DESC
|
||||
LIMIT 1
|
||||
)
|
||||
LEFT JOIN users u_last
|
||||
ON u_last.id = tm_last.author_id
|
||||
WHERE
|
||||
(cardinality($4::int[]) = 0 OR c.project_type = ANY($4::int[]))
|
||||
AND m.status NOT IN ('draft', 'rejected', 'withheld')
|
||||
AND (cardinality($6::text[]) = 0 OR m.status = ANY($6::text[]))
|
||||
AND (cardinality($7::text[]) = 0 OR dri.issue_type = ANY($7::text[]))
|
||||
AND (didv.verdict IS NULL OR didv.verdict = 'pending'::delphi_report_issue_status)
|
||||
AND (
|
||||
$5::text IS NULL
|
||||
OR ($5::text = 'unreplied' AND (tm_last.id IS NULL OR u_last.role IS NULL OR u_last.role NOT IN ('moderator', 'admin')))
|
||||
OR ($5::text = 'replied' AND tm_last.id IS NOT NULL AND u_last.role IS NOT NULL AND u_last.role IN ('moderator', 'admin'))
|
||||
)
|
||||
GROUP BY m.id, t.id
|
||||
ORDER BY m.id,
|
||||
CASE WHEN $3 = 'created_asc' THEN MIN(dr.created) ELSE TO_TIMESTAMP(0) END ASC,
|
||||
CASE WHEN $3 = 'created_desc' THEN MAX(dr.created) ELSE TO_TIMESTAMP(0) END DESC,
|
||||
CASE WHEN $3 = 'severity_asc' THEN MAX(dr.severity) ELSE 'low'::delphi_severity END ASC,
|
||||
CASE WHEN $3 = 'severity_desc' THEN MAX(dr.severity) ELSE 'low'::delphi_severity END DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
"#,
|
||||
limit,
|
||||
offset,
|
||||
@@ -503,20 +751,29 @@ async fn search_projects(
|
||||
.iter()
|
||||
.map(|ty| ty.0)
|
||||
.collect::<Vec<_>>(),
|
||||
replied_to_filter.as_deref(),
|
||||
&search_req
|
||||
.filter
|
||||
.project_status
|
||||
.iter()
|
||||
.map(|status| status.to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
&search_req
|
||||
.filter
|
||||
.issue_type
|
||||
)
|
||||
.fetch(&**pool);
|
||||
.fetch_all(&**pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch projects")?;
|
||||
|
||||
while let Some(row) = rows
|
||||
.next()
|
||||
.await
|
||||
.transpose()
|
||||
.wrap_internal_err("failed to fetch reports")?
|
||||
{
|
||||
project_reports.push(row.report.0);
|
||||
for row in rows {
|
||||
project_ids.push(row.project_id);
|
||||
thread_ids.push(row.project_thread_id);
|
||||
thread_ids.push(row.thread_id);
|
||||
}
|
||||
|
||||
let project_reports =
|
||||
fetch_project_reports(&project_ids, &pool, &redis).await?;
|
||||
|
||||
let projects = DBProject::get_many_ids(&project_ids, &**pool, &redis)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch projects")?
|
||||
@@ -585,6 +842,87 @@ async fn search_projects(
|
||||
}))
|
||||
}
|
||||
|
||||
/// Gets the technical review report for a specific project.
|
||||
#[utoipa::path(
|
||||
security(("bearer_auth" = [])),
|
||||
responses((status = OK, body = inline(ProjectReportResponse)))
|
||||
)]
|
||||
#[get("/project/{id}")]
|
||||
async fn get_project_report(
|
||||
req: HttpRequest,
|
||||
pool: web::Data<PgPool>,
|
||||
redis: web::Data<RedisPool>,
|
||||
session_queue: web::Data<AuthQueue>,
|
||||
path: web::Path<(ProjectId,)>,
|
||||
) -> Result<web::Json<ProjectReportResponse>, ApiError> {
|
||||
let user = check_is_moderator_from_headers(
|
||||
&req,
|
||||
&**pool,
|
||||
&redis,
|
||||
&session_queue,
|
||||
Scopes::PROJECT_READ,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let (project_id,) = path.into_inner();
|
||||
let db_project_id = DBProjectId::from(project_id);
|
||||
|
||||
let row = sqlx::query!(
|
||||
r#"
|
||||
SELECT t.id AS "thread_id: DBThreadId"
|
||||
FROM threads t
|
||||
WHERE t.mod_id = $1
|
||||
"#,
|
||||
db_project_id as _,
|
||||
)
|
||||
.fetch_optional(&**pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch thread")?
|
||||
.ok_or(ApiError::NotFound)?;
|
||||
|
||||
let project_reports =
|
||||
fetch_project_reports(&[db_project_id], &pool, &redis).await?;
|
||||
|
||||
let project_report = project_reports.into_iter().next();
|
||||
|
||||
let db_threads = DBThread::get_many(&[row.thread_id], &**pool)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch thread")?;
|
||||
let thread_author_ids = db_threads
|
||||
.iter()
|
||||
.flat_map(|thread| {
|
||||
thread
|
||||
.messages
|
||||
.iter()
|
||||
.filter_map(|message| message.author_id)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let thread_authors =
|
||||
DBUser::get_many_ids(&thread_author_ids, &**pool, &redis)
|
||||
.await
|
||||
.wrap_internal_err("failed to fetch thread authors")?
|
||||
.into_iter()
|
||||
.map(From::from)
|
||||
.collect::<Vec<_>>();
|
||||
let threads = db_threads
|
||||
.into_iter()
|
||||
.map(|thread| {
|
||||
let thread = Thread::from(thread, thread_authors.clone(), &user);
|
||||
(thread.id, thread)
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let thread = threads
|
||||
.get(&row.thread_id.into())
|
||||
.cloned()
|
||||
.ok_or(ApiError::NotFound)?;
|
||||
|
||||
Ok(web::Json(ProjectReportResponse {
|
||||
project_report,
|
||||
thread,
|
||||
}))
|
||||
}
|
||||
|
||||
/// See [`submit_report`].
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, utoipa::ToSchema)]
|
||||
pub struct SubmitReport {
|
||||
|
||||
@@ -163,8 +163,8 @@ pub enum ApiError {
|
||||
RateLimitError(u128, u32),
|
||||
#[error("Error while interacting with payment processor: {0}")]
|
||||
Stripe(#[from] stripe::StripeError),
|
||||
#[error("Error while interacting with Delphi: {0}")]
|
||||
Delphi(reqwest::Error),
|
||||
#[error("Error while interacting with Delphi: {0:?}")]
|
||||
Delphi(eyre::Error),
|
||||
#[error(transparent)]
|
||||
Mural(#[from] Box<muralpay::ApiError>),
|
||||
#[error("report still has {} issue details with no verdict", details.len())]
|
||||
@@ -174,6 +174,10 @@ pub enum ApiError {
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
pub fn delphi(err: impl Into<eyre::Error>) -> Self {
|
||||
Self::Delphi(err.into())
|
||||
}
|
||||
|
||||
pub fn as_api_error<'a>(&self) -> crate::models::error::ApiError<'a> {
|
||||
crate::models::error::ApiError {
|
||||
error: match self {
|
||||
|
||||
@@ -129,7 +129,7 @@ services:
|
||||
volumes:
|
||||
# Labrinth deposits version files here;
|
||||
# Delphi reads them from here
|
||||
- /tmp/modrinth:/tmp/modrinth:ro
|
||||
- /tmp/modrinth:/tmp/modrinth:ro,z
|
||||
extra_hosts:
|
||||
# Delphi must send a message on a webhook to our backend,
|
||||
# so it must have access to our local network
|
||||
|
||||
Reference in New Issue
Block a user