You've already forked AstralRinth
forked from didirus/AstralRinth
Chunking searches (#787)
* new attempt * revised searching CTEs * prepare fix * fix tests * fixes * restructured project_item to use queries * search changes! fmt clippy prepare * small changes
This commit is contained in:
36
.sqlx/query-1af33ce1ecbf8d0ab2dcc6de7d433ca05a82acc32dd447ff51487e0039706fec.json
generated
Normal file
36
.sqlx/query-1af33ce1ecbf8d0ab2dcc6de7d433ca05a82acc32dd447ff51487e0039706fec.json
generated
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT mod_id, v.id as id, date_published\n FROM mods m\n INNER JOIN versions v ON m.id = v.mod_id AND v.status = ANY($3)\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "date_published",
|
||||
"type_info": "Timestamptz"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array",
|
||||
"TextArray",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "1af33ce1ecbf8d0ab2dcc6de7d433ca05a82acc32dd447ff51487e0039706fec"
|
||||
}
|
||||
52
.sqlx/query-2140809b7b65c44c7de96ce89ca52a1808e134756baf6d847600668b7e0bbc95.json
generated
Normal file
52
.sqlx/query-2140809b7b65c44c7de96ce89ca52a1808e134756baf6d847600668b7e0bbc95.json
generated
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT id, enum_id, value, ordering, created, metadata\n FROM loader_field_enum_values lfev\n WHERE id = ANY($1) \n ORDER BY enum_id, ordering, created DESC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "enum_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "value",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "ordering",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "created",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "metadata",
|
||||
"type_info": "Jsonb"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "2140809b7b65c44c7de96ce89ca52a1808e134756baf6d847600668b7e0bbc95"
|
||||
}
|
||||
46
.sqlx/query-2390acbe75f9956e8e16c29faa90aa2fb6b3e11a417302b62fc4a6b4a1785f75.json
generated
Normal file
46
.sqlx/query-2390acbe75f9956e8e16c29faa90aa2fb6b3e11a417302b62fc4a6b4a1785f75.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT version_id, field_id, int_value, enum_value, string_value\n FROM version_fields\n WHERE version_id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "field_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "int_value",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "enum_value",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "string_value",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "2390acbe75f9956e8e16c29faa90aa2fb6b3e11a417302b62fc4a6b4a1785f75"
|
||||
}
|
||||
173
.sqlx/query-2fe731da3681f72ec03b89d7139a49ccb1069079d8600daa40688d5f528de83d.json
generated
Normal file
173
.sqlx/query-2fe731da3681f72ec03b89d7139a49ccb1069079d8600daa40688d5f528de83d.json
generated
Normal file
@@ -0,0 +1,173 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows,\n m.icon_url icon_url, m.description description, m.published published,\n m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,\n m.license_url license_url,\n m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,\n m.webhook_sent, m.color,\n t.id thread_id, m.monetization_status monetization_status,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories\n FROM mods m \n INNER JOIN threads t ON t.mod_id = m.id\n LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id\n LEFT JOIN categories c ON mc.joining_category_id = c.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n GROUP BY t.id, m.id;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "summary",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "downloads",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "follows",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "icon_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "published",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "updated",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "approved",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "queued",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "status",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "requested_status",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "license_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "team_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "organization_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 16,
|
||||
"name": "license",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 17,
|
||||
"name": "slug",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 18,
|
||||
"name": "moderation_message",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 19,
|
||||
"name": "moderation_message_body",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 20,
|
||||
"name": "webhook_sent",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 21,
|
||||
"name": "color",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 22,
|
||||
"name": "thread_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 23,
|
||||
"name": "monetization_status",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 24,
|
||||
"name": "categories",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 25,
|
||||
"name": "additional_categories",
|
||||
"type_info": "VarcharArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "2fe731da3681f72ec03b89d7139a49ccb1069079d8600daa40688d5f528de83d"
|
||||
}
|
||||
@@ -1,119 +0,0 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT m.id id, m.name name, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug,\n u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'field_id', vf.field_id,\n 'int_value', vf.int_value,\n 'enum_value', vf.enum_value,\n 'string_value', vf.string_value\n )\n ) filter (where vf.field_id is not null) version_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'version_id', 0, -- TODO: When webhook is updated to match others, this should match version\n 'lf_id', lf.id,\n 'loader_name', lo.loader,\n 'field', lf.field,\n 'field_type', lf.field_type,\n 'enum_type', lf.enum_type,\n 'min_val', lf.min_val,\n 'max_val', lf.max_val,\n 'optional', lf.optional\n )\n ) filter (where lf.id is not null) loader_fields,\n JSONB_AGG(\n DISTINCT jsonb_build_object(\n 'id', lfev.id,\n 'enum_id', lfev.enum_id,\n 'value', lfev.value,\n 'ordering', lfev.ordering,\n 'created', lfev.created,\n 'metadata', lfev.metadata\n ) \n ) filter (where lfev.id is not null) loader_field_enum_values\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n LEFT OUTER JOIN version_fields vf on v.id = vf.version_id\n LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id\n LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id\n LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id\n WHERE m.id = $1\n GROUP BY m.id, u.id;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "color",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "icon_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "slug",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "username",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "avatar_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "categories",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "loaders",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "project_types",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "games",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "gallery",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "featured_gallery",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 14,
|
||||
"name": "version_fields",
|
||||
"type_info": "Jsonb"
|
||||
},
|
||||
{
|
||||
"ordinal": 15,
|
||||
"name": "loader_fields",
|
||||
"type_info": "Jsonb"
|
||||
},
|
||||
{
|
||||
"ordinal": 16,
|
||||
"name": "loader_field_enum_values",
|
||||
"type_info": "Jsonb"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "4b9e5d78245ac083c167be708c196170c543a2157dbfa9d6249d98dc13bfaf72"
|
||||
}
|
||||
58
.sqlx/query-5329254eeb1e80d2a0f4f3bc2b613f3a7d54b0673f1a41f31fe5b5bbc4b5e478.json
generated
Normal file
58
.sqlx/query-5329254eeb1e80d2a0f4f3bc2b613f3a7d54b0673f1a41f31fe5b5bbc4b5e478.json
generated
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional\n FROM loader_fields lf\n WHERE id = ANY($1) \n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "field",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "field_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "enum_type",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "min_val",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "max_val",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "optional",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "5329254eeb1e80d2a0f4f3bc2b613f3a7d54b0673f1a41f31fe5b5bbc4b5e478"
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
34
.sqlx/query-6d867e712d89c915fc15940eadded0a383aa479e7f25f3a408661347e35c6538.json
generated
Normal file
34
.sqlx/query-6d867e712d89c915fc15940eadded0a383aa479e7f25f3a408661347e35c6538.json
generated
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT file_id, algorithm, encode(hash, 'escape') hash\n FROM hashes\n WHERE file_id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "file_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "algorithm",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "hash",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "6d867e712d89c915fc15940eadded0a383aa479e7f25f3a408661347e35c6538"
|
||||
}
|
||||
40
.sqlx/query-777b3dcb5f45db64393476b0f9401e2ba04e59229c93c2768167253ea30abb32.json
generated
Normal file
40
.sqlx/query-777b3dcb5f45db64393476b0f9401e2ba04e59229c93c2768167253ea30abb32.json
generated
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT mod_id,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games\n FROM versions v\n INNER JOIN loaders_versions lv ON v.id = lv.version_id\n INNER JOIN loaders l ON lv.loader_id = l.id\n INNER JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id\n INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id\n INNER JOIN games g ON lptg.game_id = g.id\n WHERE v.id = ANY($1)\n GROUP BY mod_id\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "loaders",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "project_types",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "games",
|
||||
"type_info": "VarcharArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "777b3dcb5f45db64393476b0f9401e2ba04e59229c93c2768167253ea30abb32"
|
||||
}
|
||||
59
.sqlx/query-7bb8a2e1e01817ea3778fcd2af039e38d085484dd20abf57d0eff8d7801b728b.json
generated
Normal file
59
.sqlx/query-7bb8a2e1e01817ea3778fcd2af039e38d085484dd20abf57d0eff8d7801b728b.json
generated
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT mod_id, mg.image_url, mg.featured, mg.name, mg.description, mg.created, mg.ordering\n FROM mods_gallery mg\n INNER JOIN mods m ON mg.mod_id = m.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "image_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "featured",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "created",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "ordering",
|
||||
"type_info": "Int8"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "7bb8a2e1e01817ea3778fcd2af039e38d085484dd20abf57d0eff8d7801b728b"
|
||||
}
|
||||
40
.sqlx/query-82d3a8a3bb864cbeda459065f7d4e413ffefa607a47be92b592d465b15b61006.json
generated
Normal file
40
.sqlx/query-82d3a8a3bb864cbeda459065f7d4e413ffefa607a47be92b592d465b15b61006.json
generated
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT version_id,\n ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games\n FROM versions v\n INNER JOIN loaders_versions lv ON v.id = lv.version_id\n INNER JOIN loaders l ON lv.loader_id = l.id\n INNER JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id\n INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id\n INNER JOIN games g ON lptg.game_id = g.id\n WHERE v.id = ANY($1)\n GROUP BY version_id\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "loaders",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "project_types",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "games",
|
||||
"type_info": "VarcharArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "82d3a8a3bb864cbeda459065f7d4e413ffefa607a47be92b592d465b15b61006"
|
||||
}
|
||||
94
.sqlx/query-8615354803791e238cc037b8a105008014ecd9764d198e62cc1ad18fc3185301.json
generated
Normal file
94
.sqlx/query-8615354803791e238cc037b8a105008014ecd9764d198e62cc1ad18fc3185301.json
generated
Normal file
@@ -0,0 +1,94 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering\n FROM versions v\n WHERE v.id = ANY($1)\n ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "author_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "version_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "version_number",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "changelog",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "date_published",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "downloads",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "version_type",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "featured",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "status",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "requested_status",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "ordering",
|
||||
"type_info": "Int4"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "8615354803791e238cc037b8a105008014ecd9764d198e62cc1ad18fc3185301"
|
||||
}
|
||||
47
.sqlx/query-8ff710a212087299ecc176ecc3cffbe5f411e76909ea458a359b9eea2c543e47.json
generated
Normal file
47
.sqlx/query-8ff710a212087299ecc176ecc3cffbe5f411e76909ea458a359b9eea2c543e47.json
generated
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT joining_mod_id as mod_id, joining_platform_id as platform_id, lp.name as platform_name, url, lp.donation as donation\n FROM mods_links ml\n INNER JOIN mods m ON ml.joining_mod_id = m.id \n INNER JOIN link_platforms lp ON ml.joining_platform_id = lp.id\n WHERE m.id = ANY($1) OR m.slug = ANY($2)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "platform_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "platform_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "donation",
|
||||
"type_info": "Bool"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "8ff710a212087299ecc176ecc3cffbe5f411e76909ea458a359b9eea2c543e47"
|
||||
}
|
||||
52
.sqlx/query-99080d0666e06794e44c80e05b17585e0f87c70d9ace28537898f27e7df0ded0.json
generated
Normal file
52
.sqlx/query-99080d0666e06794e44c80e05b17585e0f87c70d9ace28537898f27e7df0ded0.json
generated
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT id, enum_id, value, ordering, created, metadata\n FROM loader_field_enum_values lfev\n WHERE id = ANY($1) \n ORDER BY enum_id, ordering, created ASC\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "enum_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "value",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "ordering",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "created",
|
||||
"type_info": "Timestamptz"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "metadata",
|
||||
"type_info": "Jsonb"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int4Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "99080d0666e06794e44c80e05b17585e0f87c70d9ace28537898f27e7df0ded0"
|
||||
}
|
||||
46
.sqlx/query-b94d2551866c355159d01f77fe301b191de2a83d3ba3817ea60628a1b45a7a64.json
generated
Normal file
46
.sqlx/query-b94d2551866c355159d01f77fe301b191de2a83d3ba3817ea60628a1b45a7a64.json
generated
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT dependent_id as version_id, d.mod_dependency_id as dependency_project_id, d.dependency_id as dependency_version_id, d.dependency_file_name as file_name, d.dependency_type as dependency_type\n FROM dependencies d\n WHERE dependent_id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "dependency_project_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "dependency_version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "file_name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "dependency_type",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false
|
||||
]
|
||||
},
|
||||
"hash": "b94d2551866c355159d01f77fe301b191de2a83d3ba3817ea60628a1b45a7a64"
|
||||
}
|
||||
101
.sqlx/query-bc615a9b9aa5773a1f5c3bbc292bcaa8d011d95c26218cd416eac65d5545fbd4.json
generated
Normal file
101
.sqlx/query-bc615a9b9aa5773a1f5c3bbc292bcaa8d011d95c26218cd416eac65d5545fbd4.json
generated
Normal file
@@ -0,0 +1,101 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT m.id id, m.name name, m.description description, m.color color,\n m.icon_url icon_url, m.slug slug,\n u.username username, u.avatar_url avatar_url,\n ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null) categories,\n ARRAY_AGG(DISTINCT lo.loader) filter (where lo.loader is not null) loaders,\n ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,\n ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,\n ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery\n FROM mods m\n LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE\n LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id\n LEFT OUTER JOIN versions v ON v.mod_id = m.id AND v.status != ALL($2)\n LEFT OUTER JOIN loaders_versions lv ON lv.version_id = v.id\n LEFT OUTER JOIN loaders lo ON lo.id = lv.loader_id\n LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = lo.id\n LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id\n LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = lo.id AND lptg.project_type_id = pt.id\n LEFT JOIN games g ON lptg.game_id = g.id\n LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE\n INNER JOIN users u ON tm.user_id = u.id\n WHERE m.id = $1\n GROUP BY m.id, u.id;\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "name",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "description",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "color",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "icon_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "slug",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "username",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 7,
|
||||
"name": "avatar_url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 8,
|
||||
"name": "categories",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 9,
|
||||
"name": "loaders",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 10,
|
||||
"name": "project_types",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 11,
|
||||
"name": "games",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 12,
|
||||
"name": "gallery",
|
||||
"type_info": "VarcharArray"
|
||||
},
|
||||
{
|
||||
"ordinal": 13,
|
||||
"name": "featured_gallery",
|
||||
"type_info": "VarcharArray"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8",
|
||||
"TextArray"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
false,
|
||||
true,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null
|
||||
]
|
||||
},
|
||||
"hash": "bc615a9b9aa5773a1f5c3bbc292bcaa8d011d95c26218cd416eac65d5545fbd4"
|
||||
}
|
||||
52
.sqlx/query-ca53a711735ba065d441356ed744a95e948354bb5b9a6047749fdc2a514f456c.json
generated
Normal file
52
.sqlx/query-ca53a711735ba065d441356ed744a95e948354bb5b9a6047749fdc2a514f456c.json
generated
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT mod_id, version_id, field_id, int_value, enum_value, string_value\n FROM versions v\n INNER JOIN version_fields vf ON v.id = vf.version_id\n WHERE v.id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "mod_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "field_id",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "int_value",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "enum_value",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "string_value",
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "ca53a711735ba065d441356ed744a95e948354bb5b9a6047749fdc2a514f456c"
|
||||
}
|
||||
58
.sqlx/query-e72736bb7fca4df41cf34186b1edf04d6b4d496971aaf87ed1a88e7d64eab823.json
generated
Normal file
58
.sqlx/query-e72736bb7fca4df41cf34186b1edf04d6b4d496971aaf87ed1a88e7d64eab823.json
generated
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type\n FROM files f\n WHERE f.version_id = ANY($1)\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
"ordinal": 0,
|
||||
"name": "version_id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 1,
|
||||
"name": "id",
|
||||
"type_info": "Int8"
|
||||
},
|
||||
{
|
||||
"ordinal": 2,
|
||||
"name": "url",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 3,
|
||||
"name": "filename",
|
||||
"type_info": "Varchar"
|
||||
},
|
||||
{
|
||||
"ordinal": 4,
|
||||
"name": "is_primary",
|
||||
"type_info": "Bool"
|
||||
},
|
||||
{
|
||||
"ordinal": 5,
|
||||
"name": "size",
|
||||
"type_info": "Int4"
|
||||
},
|
||||
{
|
||||
"ordinal": 6,
|
||||
"name": "file_type",
|
||||
"type_info": "Varchar"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Int8Array"
|
||||
]
|
||||
},
|
||||
"nullable": [
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "e72736bb7fca4df41cf34186b1edf04d6b4d496971aaf87ed1a88e7d64eab823"
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -115,8 +115,9 @@ pub async fn fetch_countries_downloads(
|
||||
end_date: DateTime<Utc>,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnCountry>, ApiError> {
|
||||
let query = client.query(
|
||||
"
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
country,
|
||||
project_id,
|
||||
@@ -126,8 +127,8 @@ pub async fn fetch_countries_downloads(
|
||||
GROUP BY
|
||||
country,
|
||||
project_id
|
||||
"
|
||||
)
|
||||
",
|
||||
)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
@@ -141,8 +142,9 @@ pub async fn fetch_countries_views(
|
||||
end_date: DateTime<Utc>,
|
||||
client: Arc<clickhouse::Client>,
|
||||
) -> Result<Vec<ReturnCountry>, ApiError> {
|
||||
let query = client.query(
|
||||
"
|
||||
let query = client
|
||||
.query(
|
||||
"
|
||||
SELECT
|
||||
country,
|
||||
project_id,
|
||||
@@ -152,8 +154,8 @@ pub async fn fetch_countries_views(
|
||||
GROUP BY
|
||||
country,
|
||||
project_id
|
||||
"
|
||||
)
|
||||
",
|
||||
)
|
||||
.bind(start_date.timestamp())
|
||||
.bind(end_date.timestamp())
|
||||
.bind(projects.iter().map(|x| x.0).collect::<Vec<_>>());
|
||||
|
||||
@@ -228,6 +228,20 @@ impl LoaderFieldType {
|
||||
LoaderFieldType::ArrayEnum(_) => "array_enum",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_array(&self) -> bool {
|
||||
match self {
|
||||
LoaderFieldType::ArrayInteger => true,
|
||||
LoaderFieldType::ArrayText => true,
|
||||
LoaderFieldType::ArrayBoolean => true,
|
||||
LoaderFieldType::ArrayEnum(_) => true,
|
||||
|
||||
LoaderFieldType::Integer => false,
|
||||
LoaderFieldType::Text => false,
|
||||
LoaderFieldType::Boolean => false,
|
||||
LoaderFieldType::Enum(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
@@ -283,7 +297,7 @@ pub struct QueryVersionField {
|
||||
pub version_id: VersionId,
|
||||
pub field_id: LoaderFieldId,
|
||||
pub int_value: Option<i32>,
|
||||
pub enum_value: Option<LoaderFieldEnumValue>,
|
||||
pub enum_value: Option<LoaderFieldEnumValueId>,
|
||||
pub string_value: Option<String>,
|
||||
}
|
||||
|
||||
@@ -293,7 +307,7 @@ impl QueryVersionField {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_enum_value(mut self, enum_value: LoaderFieldEnumValue) -> Self {
|
||||
pub fn with_enum_value(mut self, enum_value: LoaderFieldEnumValueId) -> Self {
|
||||
self.enum_value = Some(enum_value);
|
||||
self
|
||||
}
|
||||
@@ -304,6 +318,27 @@ impl QueryVersionField {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
pub struct QueryLoaderField {
|
||||
pub id: LoaderFieldId,
|
||||
pub field: String,
|
||||
pub field_type: String,
|
||||
pub enum_type: Option<LoaderFieldEnumId>,
|
||||
pub min_val: Option<i32>,
|
||||
pub max_val: Option<i32>,
|
||||
pub optional: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
pub struct QueryLoaderFieldEnumValue {
|
||||
pub id: LoaderFieldEnumValueId,
|
||||
pub enum_id: LoaderFieldEnumId,
|
||||
pub value: String,
|
||||
pub ordering: Option<i32>,
|
||||
pub created: DateTime<Utc>,
|
||||
pub metadata: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
pub struct SideType {
|
||||
pub id: SideTypeId,
|
||||
@@ -710,11 +745,11 @@ impl VersionField {
|
||||
}
|
||||
}
|
||||
VersionFieldValue::Enum(_, v) => {
|
||||
query_version_fields.push(base.clone().with_enum_value(v))
|
||||
query_version_fields.push(base.clone().with_enum_value(v.id))
|
||||
}
|
||||
VersionFieldValue::ArrayEnum(_, v) => {
|
||||
for ev in v {
|
||||
query_version_fields.push(base.clone().with_enum_value(ev));
|
||||
query_version_fields.push(base.clone().with_enum_value(ev.id));
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -733,7 +768,7 @@ impl VersionField {
|
||||
l.field_id.0,
|
||||
l.version_id.0,
|
||||
l.int_value,
|
||||
l.enum_value.as_ref().map(|e| e.id.0),
|
||||
l.enum_value.as_ref().map(|e| e.0),
|
||||
l.string_value.clone(),
|
||||
)
|
||||
})
|
||||
@@ -807,106 +842,53 @@ impl VersionField {
|
||||
}
|
||||
|
||||
pub fn from_query_json(
|
||||
loader_fields: Option<serde_json::Value>,
|
||||
version_fields: Option<serde_json::Value>,
|
||||
loader_field_enum_values: Option<serde_json::Value>,
|
||||
query_version_field_combined: Vec<QueryVersionField>,
|
||||
query_loader_fields: &[QueryLoaderField],
|
||||
query_loader_field_enum_values: &[QueryLoaderFieldEnumValue],
|
||||
allow_many: bool, // If true, will allow multiple values for a single singleton field, returning them as separate VersionFields
|
||||
// allow_many = true, multiple Bools => two VersionFields of Bool
|
||||
// allow_many = false, multiple Bools => error
|
||||
// multiple Arraybools => 1 VersionField of ArrayBool
|
||||
) -> Vec<VersionField> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct JsonLoaderField {
|
||||
version_id: i64,
|
||||
|
||||
lf_id: i32,
|
||||
field: String,
|
||||
field_type: String,
|
||||
enum_type: Option<i32>,
|
||||
min_val: Option<i32>,
|
||||
max_val: Option<i32>,
|
||||
optional: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct JsonVersionField {
|
||||
field_id: i32,
|
||||
int_value: Option<i32>,
|
||||
enum_value: Option<i32>,
|
||||
string_value: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct JsonLoaderFieldEnumValue {
|
||||
id: i32,
|
||||
enum_id: i32,
|
||||
value: String,
|
||||
ordering: Option<i32>,
|
||||
created: DateTime<Utc>,
|
||||
metadata: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
let query_loader_fields: Vec<JsonLoaderField> = loader_fields
|
||||
.and_then(|x| serde_json::from_value(x).ok())
|
||||
.unwrap_or_default();
|
||||
let query_version_field_combined: Vec<JsonVersionField> = version_fields
|
||||
.and_then(|x| serde_json::from_value(x).ok())
|
||||
.unwrap_or_default();
|
||||
let query_loader_field_enum_values: Vec<JsonLoaderFieldEnumValue> =
|
||||
loader_field_enum_values
|
||||
.and_then(|x| serde_json::from_value(x).ok())
|
||||
.unwrap_or_default();
|
||||
query_loader_fields
|
||||
.into_iter()
|
||||
.iter()
|
||||
.flat_map(|q| {
|
||||
let loader_field_type = match LoaderFieldType::build(&q.field_type, q.enum_type) {
|
||||
Some(lft) => lft,
|
||||
None => return vec![],
|
||||
};
|
||||
let loader_field_type =
|
||||
match LoaderFieldType::build(&q.field_type, q.enum_type.map(|l| l.0)) {
|
||||
Some(lft) => lft,
|
||||
None => return vec![],
|
||||
};
|
||||
let loader_field = LoaderField {
|
||||
id: LoaderFieldId(q.lf_id),
|
||||
id: q.id,
|
||||
field: q.field.clone(),
|
||||
field_type: loader_field_type,
|
||||
optional: q.optional,
|
||||
min_val: q.min_val,
|
||||
max_val: q.max_val,
|
||||
};
|
||||
let version_id = VersionId(q.version_id);
|
||||
let values = query_version_field_combined
|
||||
.iter()
|
||||
.filter_map(|qvf| {
|
||||
if qvf.field_id == q.lf_id {
|
||||
let lfev = query_loader_field_enum_values
|
||||
.iter()
|
||||
.find(|x| Some(x.id) == qvf.enum_value);
|
||||
|
||||
Some(QueryVersionField {
|
||||
version_id,
|
||||
field_id: LoaderFieldId(qvf.field_id),
|
||||
int_value: qvf.int_value,
|
||||
enum_value: lfev.map(|lfev| LoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(lfev.id),
|
||||
enum_id: LoaderFieldEnumId(lfev.enum_id),
|
||||
value: lfev.value.clone(),
|
||||
ordering: lfev.ordering,
|
||||
created: lfev.created,
|
||||
metadata: lfev.metadata.clone().unwrap_or_default(),
|
||||
}),
|
||||
string_value: qvf.string_value.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
// todo: avoid clone here?
|
||||
let version_fields = query_version_field_combined
|
||||
.iter()
|
||||
.filter(|qvf| qvf.field_id == q.id)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
if allow_many {
|
||||
VersionField::build_many(loader_field, version_id, values)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.unique()
|
||||
.collect_vec()
|
||||
VersionField::build_many(
|
||||
loader_field,
|
||||
version_fields,
|
||||
query_loader_field_enum_values,
|
||||
)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.unique()
|
||||
.collect_vec()
|
||||
} else {
|
||||
match VersionField::build(loader_field, version_id, values) {
|
||||
match VersionField::build(
|
||||
loader_field,
|
||||
version_fields,
|
||||
query_loader_field_enum_values,
|
||||
) {
|
||||
Ok(vf) => vec![vf],
|
||||
Err(_) => vec![],
|
||||
}
|
||||
@@ -917,10 +899,14 @@ impl VersionField {
|
||||
|
||||
pub fn build(
|
||||
loader_field: LoaderField,
|
||||
version_id: VersionId,
|
||||
query_version_fields: Vec<QueryVersionField>,
|
||||
query_loader_field_enum_values: &[QueryLoaderFieldEnumValue],
|
||||
) -> Result<VersionField, DatabaseError> {
|
||||
let value = VersionFieldValue::build(&loader_field.field_type, query_version_fields)?;
|
||||
let (version_id, value) = VersionFieldValue::build(
|
||||
&loader_field.field_type,
|
||||
query_version_fields,
|
||||
query_loader_field_enum_values,
|
||||
)?;
|
||||
Ok(VersionField {
|
||||
version_id,
|
||||
field_id: loader_field.id,
|
||||
@@ -931,13 +917,17 @@ impl VersionField {
|
||||
|
||||
pub fn build_many(
|
||||
loader_field: LoaderField,
|
||||
version_id: VersionId,
|
||||
query_version_fields: Vec<QueryVersionField>,
|
||||
query_loader_field_enum_values: &[QueryLoaderFieldEnumValue],
|
||||
) -> Result<Vec<VersionField>, DatabaseError> {
|
||||
let values = VersionFieldValue::build_many(&loader_field.field_type, query_version_fields)?;
|
||||
let values = VersionFieldValue::build_many(
|
||||
&loader_field.field_type,
|
||||
query_version_fields,
|
||||
query_loader_field_enum_values,
|
||||
)?;
|
||||
Ok(values
|
||||
.into_iter()
|
||||
.map(|value| VersionField {
|
||||
.map(|(version_id, value)| VersionField {
|
||||
version_id,
|
||||
field_id: loader_field.id,
|
||||
field_name: loader_field.field.clone(),
|
||||
@@ -1030,13 +1020,14 @@ impl VersionFieldValue {
|
||||
pub fn build(
|
||||
field_type: &LoaderFieldType,
|
||||
qvfs: Vec<QueryVersionField>,
|
||||
) -> Result<VersionFieldValue, DatabaseError> {
|
||||
qlfev: &[QueryLoaderFieldEnumValue],
|
||||
) -> Result<(VersionId, VersionFieldValue), DatabaseError> {
|
||||
match field_type {
|
||||
LoaderFieldType::Integer
|
||||
| LoaderFieldType::Text
|
||||
| LoaderFieldType::Boolean
|
||||
| LoaderFieldType::Enum(_) => {
|
||||
let mut fields = Self::build_many(field_type, qvfs)?;
|
||||
let mut fields = Self::build_many(field_type, qvfs, qlfev)?;
|
||||
if fields.len() > 1 {
|
||||
return Err(DatabaseError::SchemaError(format!(
|
||||
"Multiple fields for field {}",
|
||||
@@ -1054,7 +1045,7 @@ impl VersionFieldValue {
|
||||
| LoaderFieldType::ArrayText
|
||||
| LoaderFieldType::ArrayBoolean
|
||||
| LoaderFieldType::ArrayEnum(_) => {
|
||||
let fields = Self::build_many(field_type, qvfs)?;
|
||||
let fields = Self::build_many(field_type, qvfs, qlfev)?;
|
||||
Ok(fields.into_iter().next().ok_or_else(|| {
|
||||
DatabaseError::SchemaError(format!(
|
||||
"No version fields for field {}",
|
||||
@@ -1066,14 +1057,15 @@ impl VersionFieldValue {
|
||||
}
|
||||
|
||||
// Build from internal query data
|
||||
// This encapsulates reundant behavior in db querie -> object conversions
|
||||
// This encapsulates redundant behavior in db query -> object conversions
|
||||
// This allows for multiple fields to be built at once. If there are multiple fields,
|
||||
// but the type only allows for a single field, then multiple VersionFieldValues will be returned
|
||||
// If there are multiple fields, and the type allows for multiple fields, then a single VersionFieldValue will be returned (array.len == 1)
|
||||
pub fn build_many(
|
||||
field_type: &LoaderFieldType,
|
||||
qvfs: Vec<QueryVersionField>,
|
||||
) -> Result<Vec<VersionFieldValue>, DatabaseError> {
|
||||
qlfev: &[QueryLoaderFieldEnumValue],
|
||||
) -> Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError> {
|
||||
let field_name = field_type.to_str();
|
||||
let did_not_exist_error = |field_name: &str, desired_field: &str| {
|
||||
DatabaseError::SchemaError(format!(
|
||||
@@ -1082,82 +1074,168 @@ impl VersionFieldValue {
|
||||
))
|
||||
};
|
||||
|
||||
Ok(match field_type {
|
||||
// Check errors- version_id must all be the same
|
||||
let version_id = qvfs
|
||||
.iter()
|
||||
.map(|qvf| qvf.version_id)
|
||||
.unique()
|
||||
.collect::<Vec<_>>();
|
||||
// If the field type is a non-array, then the reason for multiple version ids is that there are multiple versions being aggregated, and those version ids are contained within.
|
||||
// If the field type is an array, then the reason for multiple version ids is that there are multiple values for a single version
|
||||
// (or a greater aggregation between multiple arrays, in which case the per-field version is lost, so we just take the first one and use it for that)
|
||||
let version_id = version_id.into_iter().next().unwrap_or(VersionId(0));
|
||||
|
||||
let field_id = qvfs
|
||||
.iter()
|
||||
.map(|qvf| qvf.field_id)
|
||||
.unique()
|
||||
.collect::<Vec<_>>();
|
||||
if field_id.len() > 1 {
|
||||
return Err(DatabaseError::SchemaError(format!(
|
||||
"Multiple field ids for field {}",
|
||||
field_name
|
||||
)));
|
||||
}
|
||||
|
||||
let mut value = match field_type {
|
||||
// Singleton fields
|
||||
// If there are multiple, we assume multiple versions are being concatenated
|
||||
LoaderFieldType::Integer => qvfs
|
||||
.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok(VersionFieldValue::Integer(
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))?,
|
||||
Ok((
|
||||
qvf.version_id,
|
||||
VersionFieldValue::Integer(
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))?,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<VersionFieldValue>, DatabaseError>>()?,
|
||||
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
|
||||
LoaderFieldType::Text => qvfs
|
||||
.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok::<VersionFieldValue, DatabaseError>(VersionFieldValue::Text(
|
||||
qvf.string_value
|
||||
.ok_or(did_not_exist_error(field_name, "string_value"))?,
|
||||
Ok((
|
||||
qvf.version_id,
|
||||
VersionFieldValue::Text(
|
||||
qvf.string_value
|
||||
.ok_or(did_not_exist_error(field_name, "string_value"))?,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<VersionFieldValue>, DatabaseError>>()?,
|
||||
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
|
||||
LoaderFieldType::Boolean => qvfs
|
||||
.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok::<VersionFieldValue, DatabaseError>(VersionFieldValue::Boolean(
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))?
|
||||
!= 0,
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<VersionFieldValue>, DatabaseError>>()?,
|
||||
LoaderFieldType::Enum(id) => qvfs
|
||||
.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok::<VersionFieldValue, DatabaseError>(VersionFieldValue::Enum(
|
||||
*id,
|
||||
qvf.enum_value
|
||||
.ok_or(did_not_exist_error(field_name, "enum_value"))?,
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<VersionFieldValue>, DatabaseError>>()?,
|
||||
LoaderFieldType::ArrayInteger => vec![VersionFieldValue::ArrayInteger(
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
)],
|
||||
LoaderFieldType::ArrayText => vec![VersionFieldValue::ArrayText(
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
qvf.string_value
|
||||
.ok_or(did_not_exist_error(field_name, "string_value"))
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
)],
|
||||
LoaderFieldType::ArrayBoolean => vec![VersionFieldValue::ArrayBoolean(
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok::<bool, DatabaseError>(
|
||||
Ok((
|
||||
qvf.version_id,
|
||||
VersionFieldValue::Boolean(
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))?
|
||||
!= 0,
|
||||
)
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
|
||||
LoaderFieldType::Enum(id) => qvfs
|
||||
.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok((
|
||||
qvf.version_id,
|
||||
VersionFieldValue::Enum(*id, {
|
||||
let enum_id = qvf
|
||||
.enum_value
|
||||
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
|
||||
let lfev = qlfev
|
||||
.iter()
|
||||
.find(|x| x.id == enum_id)
|
||||
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
|
||||
LoaderFieldEnumValue {
|
||||
id: lfev.id,
|
||||
enum_id: lfev.enum_id,
|
||||
value: lfev.value.clone(),
|
||||
ordering: lfev.ordering,
|
||||
created: lfev.created,
|
||||
metadata: lfev.metadata.clone().unwrap_or_default(),
|
||||
}
|
||||
}),
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<(VersionId, VersionFieldValue)>, DatabaseError>>()?,
|
||||
|
||||
// Array fields
|
||||
// We concatenate into one array
|
||||
LoaderFieldType::ArrayInteger => vec![(
|
||||
version_id,
|
||||
VersionFieldValue::ArrayInteger(
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
)],
|
||||
LoaderFieldType::ArrayEnum(id) => vec![VersionFieldValue::ArrayEnum(
|
||||
*id,
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
qvf.enum_value
|
||||
.ok_or(did_not_exist_error(field_name, "enum_value"))
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
LoaderFieldType::ArrayText => vec![(
|
||||
version_id,
|
||||
VersionFieldValue::ArrayText(
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
qvf.string_value
|
||||
.ok_or(did_not_exist_error(field_name, "string_value"))
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
)],
|
||||
})
|
||||
LoaderFieldType::ArrayBoolean => vec![(
|
||||
version_id,
|
||||
VersionFieldValue::ArrayBoolean(
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
Ok::<bool, DatabaseError>(
|
||||
qvf.int_value
|
||||
.ok_or(did_not_exist_error(field_name, "int_value"))?
|
||||
!= 0,
|
||||
)
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
)],
|
||||
LoaderFieldType::ArrayEnum(id) => vec![(
|
||||
version_id,
|
||||
VersionFieldValue::ArrayEnum(
|
||||
*id,
|
||||
qvfs.into_iter()
|
||||
.map(|qvf| {
|
||||
let enum_id = qvf
|
||||
.enum_value
|
||||
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
|
||||
let lfev = qlfev
|
||||
.iter()
|
||||
.find(|x| x.id == enum_id)
|
||||
.ok_or(did_not_exist_error(field_name, "enum_value"))?;
|
||||
Ok::<_, DatabaseError>(LoaderFieldEnumValue {
|
||||
id: lfev.id,
|
||||
enum_id: lfev.enum_id,
|
||||
value: lfev.value.clone(),
|
||||
ordering: lfev.ordering,
|
||||
created: lfev.created,
|
||||
metadata: lfev.metadata.clone().unwrap_or_default(),
|
||||
})
|
||||
})
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
)],
|
||||
};
|
||||
|
||||
// Sort arrayenums by ordering, then by created
|
||||
for (_, v) in value.iter_mut() {
|
||||
if let VersionFieldValue::ArrayEnum(_, v) = v {
|
||||
v.sort_by(|a, b| a.ordering.cmp(&b.ordering).then(a.created.cmp(&b.created)));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
// Serialize to internal value, such as for converting to user-facing JSON
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use super::loader_fields::VersionField;
|
||||
use super::loader_fields::{
|
||||
QueryLoaderField, QueryLoaderFieldEnumValue, QueryVersionField, VersionField,
|
||||
};
|
||||
use super::{ids::*, User};
|
||||
use crate::database::models;
|
||||
use crate::database::models::DatabaseError;
|
||||
@@ -6,6 +8,7 @@ use crate::database::redis::RedisPool;
|
||||
use crate::models::ids::base62_impl::{parse_base62, to_base62};
|
||||
use crate::models::projects::{MonetizationStatus, ProjectStatus};
|
||||
use chrono::{DateTime, Utc};
|
||||
use dashmap::{DashMap, DashSet};
|
||||
use futures::TryStreamExt;
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -446,7 +449,7 @@ impl Project {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryProject>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Project::get_many(&[string], executor, redis)
|
||||
.await
|
||||
@@ -459,7 +462,7 @@ impl Project {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryProject>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Project::get_many(&[crate::models::ids::ProjectId::from(id)], executor, redis)
|
||||
.await
|
||||
@@ -472,7 +475,7 @@ impl Project {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<QueryProject>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let ids = project_ids
|
||||
.iter()
|
||||
@@ -487,13 +490,14 @@ impl Project {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<QueryProject>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
if project_strings.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut redis = redis.connect().await?;
|
||||
let mut exec = exec.acquire().await?;
|
||||
|
||||
let mut found_projects = Vec::new();
|
||||
let mut remaining_strings = project_strings
|
||||
@@ -544,104 +548,200 @@ impl Project {
|
||||
.flat_map(|x| parse_base62(&x.to_string()).ok())
|
||||
.map(|x| x as i64)
|
||||
.collect();
|
||||
let slugs = remaining_strings
|
||||
.into_iter()
|
||||
.map(|x| x.to_string().to_lowercase())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let all_version_ids = DashSet::new();
|
||||
let versions: DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id, v.id as id, date_published
|
||||
FROM mods m
|
||||
INNER JOIN versions v ON m.id = v.mod_id AND v.status = ANY($3)
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
",
|
||||
&project_ids_parsed,
|
||||
&slugs,
|
||||
&*crate::models::projects::VersionStatus::iterator()
|
||||
.filter(|x| x.is_listed())
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<(VersionId, DateTime<Utc>)>>, m| {
|
||||
let version_id = VersionId(m.id);
|
||||
let date_published = m.date_published;
|
||||
all_version_ids.insert(version_id);
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push((version_id, date_published));
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<ProjectId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id, version_id, field_id, int_value, enum_value, string_value
|
||||
FROM versions v
|
||||
INNER JOIN version_fields vf ON v.id = vf.version_id
|
||||
WHERE v.id = ANY($1)
|
||||
",
|
||||
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<QueryVersionField>>, m| {
|
||||
let qvf = QueryVersionField {
|
||||
version_id: VersionId(m.version_id),
|
||||
field_id: LoaderFieldId(m.field_id),
|
||||
int_value: m.int_value,
|
||||
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
|
||||
string_value: m.string_value,
|
||||
};
|
||||
|
||||
loader_field_ids.insert(LoaderFieldId(m.field_id));
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(qvf);
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
FROM loader_fields lf
|
||||
WHERE id = ANY($1)
|
||||
",
|
||||
&loader_field_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, enum_id, value, ordering, created, metadata
|
||||
FROM loader_field_enum_values lfev
|
||||
WHERE id = ANY($1)
|
||||
ORDER BY enum_id, ordering, created DESC
|
||||
",
|
||||
&loader_field_enum_value_ids
|
||||
.iter()
|
||||
.map(|x| x.0)
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(m.id),
|
||||
enum_id: LoaderFieldEnumId(m.enum_id),
|
||||
value: m.value,
|
||||
ordering: m.ordering,
|
||||
created: m.created,
|
||||
metadata: m.metadata,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let mods_gallery: DashMap<ProjectId, Vec<GalleryItem>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id, mg.image_url, mg.featured, mg.name, mg.description, mg.created, mg.ordering
|
||||
FROM mods_gallery mg
|
||||
INNER JOIN mods m ON mg.mod_id = m.id
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
",
|
||||
&project_ids_parsed,
|
||||
&slugs
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<GalleryItem>>, m| {
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(GalleryItem {
|
||||
image_url: m.image_url,
|
||||
featured: m.featured.unwrap_or(false),
|
||||
name: m.name,
|
||||
description: m.description,
|
||||
created: m.created,
|
||||
ordering: m.ordering,
|
||||
});
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
let links: DashMap<ProjectId, Vec<LinkUrl>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT joining_mod_id as mod_id, joining_platform_id as platform_id, lp.name as platform_name, url, lp.donation as donation
|
||||
FROM mods_links ml
|
||||
INNER JOIN mods m ON ml.joining_mod_id = m.id
|
||||
INNER JOIN link_platforms lp ON ml.joining_platform_id = lp.id
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
",
|
||||
&project_ids_parsed,
|
||||
&slugs
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<ProjectId, Vec<LinkUrl>>, m| {
|
||||
acc.entry(ProjectId(m.mod_id))
|
||||
.or_default()
|
||||
.push(LinkUrl {
|
||||
platform_id: LinkPlatformId(m.platform_id),
|
||||
platform_name: m.platform_name,
|
||||
url: m.url,
|
||||
donation: m.donation,
|
||||
});
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
type StringTriple = (Vec<String>, Vec<String>, Vec<String>);
|
||||
let loaders_ptypes_games: DashMap<ProjectId, StringTriple> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT mod_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games
|
||||
FROM versions v
|
||||
INNER JOIN loaders_versions lv ON v.id = lv.version_id
|
||||
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||
INNER JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||
INNER JOIN games g ON lptg.game_id = g.id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY mod_id
|
||||
",
|
||||
&all_version_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let project_id = ProjectId(m.mod_id);
|
||||
let loaders = m.loaders.unwrap_or_default();
|
||||
let project_types = m.project_types.unwrap_or_default();
|
||||
let games = m.games.unwrap_or_default();
|
||||
|
||||
(project_id, (loaders, project_types, games))
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
// TODO: Possible improvements to look into:
|
||||
// - use multiple queries instead of CTES (for cleanliness?)
|
||||
// - repeated joins to mods in separate CTEs- perhaps 1 CTE for mods and use later (in mods_gallery_json, mods_donations_json, etc.)
|
||||
let db_projects: Vec<QueryProject> = sqlx::query!(
|
||||
"
|
||||
WITH version_fields_cte AS (
|
||||
SELECT mod_id, version_id, field_id, int_value, enum_value, string_value
|
||||
FROM mods m
|
||||
INNER JOIN versions v ON m.id = v.mod_id
|
||||
INNER JOIN version_fields vf ON v.id = vf.version_id
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
),
|
||||
version_fields_json AS (
|
||||
SELECT DISTINCT mod_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object('version_id', version_id, 'field_id', field_id, 'int_value', int_value, 'enum_value', enum_value, 'string_value', string_value)
|
||||
) version_fields_json
|
||||
FROM version_fields_cte
|
||||
GROUP BY mod_id
|
||||
),
|
||||
loader_fields_cte AS (
|
||||
SELECT DISTINCT vf.mod_id, vf.version_id, lf.*, l.loader
|
||||
FROM loader_fields lf
|
||||
INNER JOIN version_fields_cte vf ON lf.id = vf.field_id
|
||||
LEFT JOIN loaders_versions lv ON vf.version_id = lv.version_id
|
||||
LEFT JOIN loaders l ON lv.loader_id = l.id
|
||||
GROUP BY vf.mod_id, vf.version_id, lf.enum_type, lf.id, l.loader
|
||||
),
|
||||
loader_fields_json AS (
|
||||
SELECT DISTINCT mod_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'version_id', lf.version_id,
|
||||
'lf_id', id, 'loader_name', loader, 'field', field, 'field_type', field_type, 'enum_type', enum_type, 'min_val', min_val, 'max_val', max_val, 'optional', optional
|
||||
)
|
||||
) filter (where lf.id is not null) loader_fields_json
|
||||
FROM loader_fields_cte lf
|
||||
GROUP BY mod_id
|
||||
),
|
||||
loader_field_enum_values_json AS (
|
||||
SELECT DISTINCT mod_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'id', lfev.id, 'enum_id', lfev.enum_id, 'value', lfev.value, 'ordering', lfev.ordering, 'created', lfev.created, 'metadata', lfev.metadata
|
||||
)
|
||||
) filter (where lfev.id is not null) loader_field_enum_values_json
|
||||
FROM loader_field_enum_values lfev
|
||||
INNER JOIN loader_fields_cte lf on lf.enum_type = lfev.enum_id
|
||||
GROUP BY mod_id
|
||||
),
|
||||
versions_cte AS (
|
||||
SELECT DISTINCT mod_id, v.id as id, date_published
|
||||
FROM mods m
|
||||
INNER JOIN versions v ON m.id = v.mod_id AND v.status = ANY($3)
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
),
|
||||
versions_json AS (
|
||||
SELECT DISTINCT mod_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'id', id, 'date_published', date_published
|
||||
)
|
||||
) filter (where id is not null) versions_json
|
||||
FROM versions_cte
|
||||
GROUP BY mod_id
|
||||
),
|
||||
loaders_cte AS (
|
||||
SELECT DISTINCT mod_id, l.id as id, l.loader
|
||||
FROM versions_cte
|
||||
INNER JOIN loaders_versions lv ON versions_cte.id = lv.version_id
|
||||
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||
),
|
||||
mods_gallery_json AS (
|
||||
SELECT DISTINCT mod_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'image_url', mg.image_url, 'featured', mg.featured, 'name', mg.name, 'description', mg.description, 'created', mg.created, 'ordering', mg.ordering
|
||||
)
|
||||
) filter (where image_url is not null) mods_gallery_json
|
||||
FROM mods_gallery mg
|
||||
INNER JOIN mods m ON mg.mod_id = m.id
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
GROUP BY mod_id
|
||||
),
|
||||
links_json AS (
|
||||
SELECT DISTINCT joining_mod_id as mod_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'platform_id', ml.joining_platform_id, 'platform_name', lp.name,'url', ml.url, 'donation', lp.donation
|
||||
)
|
||||
) filter (where ml.joining_platform_id is not null) links_json
|
||||
FROM mods_links ml
|
||||
INNER JOIN mods m ON ml.joining_mod_id = m.id AND m.id = ANY($1) OR m.slug = ANY($2)
|
||||
INNER JOIN link_platforms lp ON ml.joining_platform_id = lp.id
|
||||
GROUP BY mod_id
|
||||
)
|
||||
|
||||
SELECT m.id id, m.name name, m.summary summary, m.downloads downloads, m.follows follows,
|
||||
m.icon_url icon_url, m.description description, m.published published,
|
||||
m.updated updated, m.approved approved, m.queued, m.status status, m.requested_status requested_status,
|
||||
@@ -649,43 +749,28 @@ impl Project {
|
||||
m.team_id team_id, m.organization_id organization_id, m.license license, m.slug slug, m.moderation_message moderation_message, m.moderation_message_body moderation_message_body,
|
||||
m.webhook_sent, m.color,
|
||||
t.id thread_id, m.monetization_status monetization_status,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is false) categories,
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories,
|
||||
v.versions_json versions,
|
||||
mg.mods_gallery_json gallery,
|
||||
ml.links_json links,
|
||||
vf.version_fields_json version_fields,
|
||||
lf.loader_fields_json loader_fields,
|
||||
lfev.loader_field_enum_values_json loader_field_enum_values
|
||||
ARRAY_AGG(DISTINCT c.category) filter (where c.category is not null and mc.is_additional is true) additional_categories
|
||||
FROM mods m
|
||||
INNER JOIN threads t ON t.mod_id = m.id
|
||||
LEFT JOIN mods_gallery_json mg ON mg.mod_id = m.id
|
||||
LEFT JOIN links_json ml ON ml.mod_id = m.id
|
||||
LEFT JOIN mods_categories mc ON mc.joining_mod_id = m.id
|
||||
LEFT JOIN categories c ON mc.joining_category_id = c.id
|
||||
LEFT JOIN versions_json v ON v.mod_id = m.id
|
||||
LEFT JOIN loaders_cte l on l.mod_id = m.id
|
||||
LEFT JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
LEFT JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
LEFT JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||
LEFT JOIN games g ON lptg.game_id = g.id
|
||||
LEFT OUTER JOIN version_fields_json vf ON m.id = vf.mod_id
|
||||
LEFT OUTER JOIN loader_fields_json lf ON m.id = lf.mod_id
|
||||
LEFT OUTER JOIN loader_field_enum_values_json lfev ON m.id = lfev.mod_id
|
||||
WHERE m.id = ANY($1) OR m.slug = ANY($2)
|
||||
GROUP BY t.id, m.id, version_fields_json, loader_fields_json, loader_field_enum_values_json, versions_json, mods_gallery_json, links_json;
|
||||
GROUP BY t.id, m.id;
|
||||
",
|
||||
&project_ids_parsed,
|
||||
&remaining_strings.into_iter().map(|x| x.to_string().to_lowercase()).collect::<Vec<_>>(),
|
||||
&*crate::models::projects::VersionStatus::iterator().filter(|x| x.is_listed()).map(|x| x.to_string()).collect::<Vec<String>>()
|
||||
&slugs,
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.fetch_many(&mut *exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|m| {
|
||||
let id = m.id;
|
||||
let project_id = ProjectId(id);
|
||||
let (loaders, project_types, games) = loaders_ptypes_games.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let mut versions = versions.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let mut gallery = mods_gallery.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let urls = links.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&project_id).map(|x| x.1).unwrap_or_default();
|
||||
QueryProject {
|
||||
inner: Project {
|
||||
id: ProjectId(id),
|
||||
@@ -717,41 +802,23 @@ impl Project {
|
||||
monetization_status: MonetizationStatus::from_string(
|
||||
&m.monetization_status,
|
||||
),
|
||||
loaders: m.loaders.unwrap_or_default(),
|
||||
loaders,
|
||||
},
|
||||
categories: m.categories.unwrap_or_default(),
|
||||
additional_categories: m.additional_categories.unwrap_or_default(),
|
||||
project_types: m.project_types.unwrap_or_default(),
|
||||
games: m.games.unwrap_or_default(),
|
||||
project_types,
|
||||
games,
|
||||
versions: {
|
||||
#[derive(Deserialize)]
|
||||
struct Version {
|
||||
pub id: VersionId,
|
||||
pub date_published: DateTime<Utc>,
|
||||
}
|
||||
|
||||
let mut versions: Vec<Version> = serde_json::from_value(
|
||||
m.versions.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
versions.sort_by(|a, b| a.date_published.cmp(&b.date_published));
|
||||
versions.into_iter().map(|x| x.id).collect()
|
||||
// Each version is a tuple of (VersionId, DateTime<Utc>)
|
||||
versions.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
versions.into_iter().map(|x| x.0).collect()
|
||||
},
|
||||
gallery_items: {
|
||||
let mut gallery: Vec<GalleryItem> = serde_json::from_value(
|
||||
m.gallery.unwrap_or_default(),
|
||||
).ok().unwrap_or_default();
|
||||
|
||||
gallery.sort_by(|a, b| a.ordering.cmp(&b.ordering));
|
||||
|
||||
gallery
|
||||
},
|
||||
urls: serde_json::from_value(
|
||||
m.links.unwrap_or_default(),
|
||||
).unwrap_or_default(),
|
||||
aggregate_version_fields: VersionField::from_query_json(m.loader_fields, m.version_fields, m.loader_field_enum_values, true),
|
||||
urls,
|
||||
aggregate_version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, true),
|
||||
thread_id: ThreadId(m.thread_id),
|
||||
}}))
|
||||
})
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
use super::ids::*;
|
||||
use super::loader_fields::VersionField;
|
||||
use super::DatabaseError;
|
||||
use crate::database::models::loader_fields::{
|
||||
QueryLoaderField, QueryLoaderFieldEnumValue, QueryVersionField,
|
||||
};
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::projects::{FileType, VersionStatus};
|
||||
use chrono::{DateTime, Utc};
|
||||
use dashmap::{DashMap, DashSet};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::Ordering;
|
||||
@@ -475,7 +479,7 @@ impl Version {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Option<QueryVersion>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
Self::get_many(&[id], executor, redis)
|
||||
.await
|
||||
@@ -488,7 +492,7 @@ impl Version {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<QueryVersion>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
@@ -496,6 +500,7 @@ impl Version {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut exec = exec.acquire().await?;
|
||||
let mut redis = redis.connect().await?;
|
||||
|
||||
let mut version_ids_parsed: Vec<i64> = version_ids.iter().map(|x| x.0).collect();
|
||||
@@ -524,116 +529,230 @@ impl Version {
|
||||
}
|
||||
|
||||
if !version_ids_parsed.is_empty() {
|
||||
let loader_field_ids = DashSet::new();
|
||||
let loader_field_enum_value_ids = DashSet::new();
|
||||
let version_fields: DashMap<VersionId, Vec<QueryVersionField>> = sqlx::query!(
|
||||
"
|
||||
SELECT version_id, field_id, int_value, enum_value, string_value
|
||||
FROM version_fields
|
||||
WHERE version_id = ANY($1)
|
||||
",
|
||||
&version_ids_parsed
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<QueryVersionField>>, m| {
|
||||
let qvf = QueryVersionField {
|
||||
version_id: VersionId(m.version_id),
|
||||
field_id: LoaderFieldId(m.field_id),
|
||||
int_value: m.int_value,
|
||||
enum_value: m.enum_value.map(LoaderFieldEnumValueId),
|
||||
string_value: m.string_value,
|
||||
};
|
||||
|
||||
loader_field_ids.insert(LoaderFieldId(m.field_id));
|
||||
if let Some(enum_value) = m.enum_value {
|
||||
loader_field_enum_value_ids.insert(LoaderFieldEnumValueId(enum_value));
|
||||
}
|
||||
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(qvf);
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
let loader_fields: Vec<QueryLoaderField> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, field, field_type, enum_type, min_val, max_val, optional
|
||||
FROM loader_fields lf
|
||||
WHERE id = ANY($1)
|
||||
",
|
||||
&loader_field_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderField {
|
||||
id: LoaderFieldId(m.id),
|
||||
field: m.field,
|
||||
field_type: m.field_type,
|
||||
enum_type: m.enum_type.map(LoaderFieldEnumId),
|
||||
min_val: m.min_val,
|
||||
max_val: m.max_val,
|
||||
optional: m.optional,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
let loader_field_enum_values: Vec<QueryLoaderFieldEnumValue> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT id, enum_id, value, ordering, created, metadata
|
||||
FROM loader_field_enum_values lfev
|
||||
WHERE id = ANY($1)
|
||||
ORDER BY enum_id, ordering, created ASC
|
||||
",
|
||||
&loader_field_enum_value_ids
|
||||
.iter()
|
||||
.map(|x| x.0)
|
||||
.collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.map_ok(|m| QueryLoaderFieldEnumValue {
|
||||
id: LoaderFieldEnumValueId(m.id),
|
||||
enum_id: LoaderFieldEnumId(m.enum_id),
|
||||
value: m.value,
|
||||
ordering: m.ordering,
|
||||
created: m.created,
|
||||
metadata: m.metadata,
|
||||
})
|
||||
.try_collect()
|
||||
.await?;
|
||||
|
||||
type StringTriple = (Vec<String>, Vec<String>, Vec<String>);
|
||||
let loaders_ptypes_games: DashMap<VersionId, StringTriple> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT version_id,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games
|
||||
FROM versions v
|
||||
INNER JOIN loaders_versions lv ON v.id = lv.version_id
|
||||
INNER JOIN loaders l ON lv.loader_id = l.id
|
||||
INNER JOIN loaders_project_types lpt ON lpt.joining_loader_id = l.id
|
||||
INNER JOIN project_types pt ON pt.id = lpt.joining_project_type_id
|
||||
INNER JOIN loaders_project_types_games lptg ON lptg.loader_id = l.id AND lptg.project_type_id = pt.id
|
||||
INNER JOIN games g ON lptg.game_id = g.id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY version_id
|
||||
",
|
||||
&version_ids_parsed
|
||||
).fetch(&mut *exec)
|
||||
.map_ok(|m| {
|
||||
let version_id = VersionId(m.version_id);
|
||||
let loaders = m.loaders.unwrap_or_default();
|
||||
let project_types = m.project_types.unwrap_or_default();
|
||||
let games = m.games.unwrap_or_default();
|
||||
|
||||
(version_id, (loaders, project_types, games))
|
||||
|
||||
}
|
||||
).try_collect().await?;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
let file_ids = DashSet::new();
|
||||
let reverse_file_map = DashMap::new();
|
||||
let files : DashMap<VersionId, Vec<File>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type
|
||||
FROM files f
|
||||
WHERE f.version_id = ANY($1)
|
||||
",
|
||||
&version_ids_parsed
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<File>>, m| {
|
||||
let file = File {
|
||||
id: FileId(m.id),
|
||||
url: m.url,
|
||||
filename: m.filename,
|
||||
primary: m.is_primary,
|
||||
size: m.size as u32,
|
||||
file_type: m.file_type.map(|x| FileType::from_string(&x)),
|
||||
};
|
||||
|
||||
file_ids.insert(FileId(m.id));
|
||||
reverse_file_map.insert(FileId(m.id), VersionId(m.version_id));
|
||||
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(file);
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
let hashes: DashMap<VersionId, Vec<Hash>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT file_id, algorithm, encode(hash, 'escape') hash
|
||||
FROM hashes
|
||||
WHERE file_id = ANY($1)
|
||||
",
|
||||
&file_ids.iter().map(|x| x.0).collect::<Vec<_>>()
|
||||
)
|
||||
.fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<VersionId, Vec<Hash>>, m| {
|
||||
if let Some(found_hash) = m.hash {
|
||||
let hash = Hash {
|
||||
file_id: FileId(m.file_id),
|
||||
algorithm: m.algorithm,
|
||||
hash: found_hash,
|
||||
};
|
||||
|
||||
let version_id = *reverse_file_map.get(&FileId(m.file_id)).unwrap();
|
||||
|
||||
acc.entry(version_id).or_default().push(hash);
|
||||
}
|
||||
async move { Ok(acc) }
|
||||
})
|
||||
.await?;
|
||||
|
||||
let dependencies : DashMap<VersionId, Vec<QueryDependency>> = sqlx::query!(
|
||||
"
|
||||
SELECT DISTINCT dependent_id as version_id, d.mod_dependency_id as dependency_project_id, d.dependency_id as dependency_version_id, d.dependency_file_name as file_name, d.dependency_type as dependency_type
|
||||
FROM dependencies d
|
||||
WHERE dependent_id = ANY($1)
|
||||
",
|
||||
&version_ids_parsed
|
||||
).fetch(&mut *exec)
|
||||
.try_fold(DashMap::new(), |acc : DashMap<_,Vec<QueryDependency>>, m| {
|
||||
let dependency = QueryDependency {
|
||||
project_id: m.dependency_project_id.map(ProjectId),
|
||||
version_id: m.dependency_version_id.map(VersionId),
|
||||
file_name: m.file_name,
|
||||
dependency_type: m.dependency_type,
|
||||
};
|
||||
|
||||
acc.entry(VersionId(m.version_id))
|
||||
.or_default()
|
||||
.push(dependency);
|
||||
async move { Ok(acc) }
|
||||
}
|
||||
).await?;
|
||||
|
||||
let db_versions: Vec<QueryVersion> = sqlx::query!(
|
||||
"
|
||||
WITH version_fields_cte AS (
|
||||
SELECT version_id, field_id, int_value, enum_value, string_value
|
||||
FROM version_fields WHERE version_id = ANY($1)
|
||||
),
|
||||
version_fields_json AS (
|
||||
SELECT DISTINCT version_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object('field_id', field_id, 'int_value', int_value, 'enum_value', enum_value, 'string_value', string_value)
|
||||
) version_fields_json
|
||||
FROM version_fields_cte
|
||||
GROUP BY version_id
|
||||
),
|
||||
loader_fields_cte AS (
|
||||
SELECT DISTINCT vf.version_id, lf.*, l.loader
|
||||
FROM loader_fields lf
|
||||
INNER JOIN version_fields_cte vf ON lf.id = vf.field_id
|
||||
LEFT JOIN loaders_versions lv ON vf.version_id = lv.version_id
|
||||
LEFT JOIN loaders l ON lv.loader_id = l.id
|
||||
GROUP BY vf.version_id, lf.enum_type, lf.id, l.loader
|
||||
),
|
||||
loader_fields_json AS (
|
||||
SELECT DISTINCT version_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'version_id', lf.version_id,
|
||||
'lf_id', id, 'loader_name', loader, 'field', field, 'field_type', field_type, 'enum_type', enum_type, 'min_val', min_val, 'max_val', max_val, 'optional', optional
|
||||
)
|
||||
) filter (where lf.id is not null) loader_fields_json
|
||||
FROM loader_fields_cte lf
|
||||
GROUP BY version_id
|
||||
),
|
||||
loader_field_enum_values_json AS (
|
||||
SELECT DISTINCT version_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'id', lfev.id, 'enum_id', lfev.enum_id, 'value', lfev.value, 'ordering', lfev.ordering, 'created', lfev.created, 'metadata', lfev.metadata
|
||||
)
|
||||
) filter (where lfev.id is not null) loader_field_enum_values_json
|
||||
FROM loader_field_enum_values lfev
|
||||
INNER JOIN loader_fields_cte lf on lf.enum_type = lfev.enum_id
|
||||
GROUP BY version_id
|
||||
),
|
||||
files_cte AS (
|
||||
SELECT DISTINCT version_id, f.id, f.url, f.filename, f.is_primary, f.size, f.file_type
|
||||
FROM files f
|
||||
WHERE f.version_id = ANY($1)
|
||||
),
|
||||
files_json AS (
|
||||
SELECT DISTINCT version_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object('id', id, 'url', url, 'filename', filename, 'primary', is_primary, 'size', size, 'file_type', file_type)
|
||||
) files_json
|
||||
FROM files_cte lf
|
||||
GROUP BY version_id
|
||||
),
|
||||
hashes_json AS (
|
||||
SELECT DISTINCT version_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object('algorithm', algorithm, 'hash', encode(hash, 'escape'), 'file_id', file_id)
|
||||
) hashes_json
|
||||
FROM hashes
|
||||
INNER JOIN files_cte lf on lf.id = hashes.file_id
|
||||
GROUP BY version_id
|
||||
),
|
||||
dependencies_json AS (
|
||||
SELECT DISTINCT dependent_id as version_id,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object('project_id', d.mod_dependency_id, 'version_id', d.dependency_id, 'dependency_type', d.dependency_type,'file_name', dependency_file_name)
|
||||
) dependencies_json
|
||||
FROM dependencies d
|
||||
WHERE dependent_id = ANY($1)
|
||||
GROUP BY version_id
|
||||
)
|
||||
|
||||
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
|
||||
v.changelog changelog, v.date_published date_published, v.downloads downloads,
|
||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering,
|
||||
ARRAY_AGG(DISTINCT l.loader) filter (where l.loader is not null) loaders,
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,
|
||||
f.files_json files,
|
||||
h.hashes_json hashes,
|
||||
d.dependencies_json dependencies,
|
||||
vf.version_fields_json version_fields,
|
||||
lf.loader_fields_json loader_fields,
|
||||
lfev.loader_field_enum_values_json loader_field_enum_values
|
||||
v.version_type version_type, v.featured featured, v.status status, v.requested_status requested_status, v.ordering ordering
|
||||
FROM versions v
|
||||
LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id
|
||||
LEFT OUTER JOIN loaders l on lv.loader_id = l.id
|
||||
LEFT OUTER JOIN loaders_project_types lpt on l.id = lpt.joining_loader_id
|
||||
LEFT JOIN project_types pt on lpt.joining_project_type_id = pt.id
|
||||
LEFT OUTER JOIN loaders_project_types_games lptg on l.id = lptg.loader_id AND pt.id = lptg.project_type_id
|
||||
LEFT JOIN games g on lptg.game_id = g.id
|
||||
LEFT OUTER JOIN files_json f on v.id = f.version_id
|
||||
LEFT OUTER JOIN hashes_json h on v.id = h.version_id
|
||||
LEFT OUTER JOIN dependencies_json d on v.id = d.version_id
|
||||
LEFT OUTER JOIN version_fields_json vf ON v.id = vf.version_id
|
||||
LEFT OUTER JOIN loader_fields_json lf ON v.id = lf.version_id
|
||||
LEFT OUTER JOIN loader_field_enum_values_json lfev ON v.id = lfev.version_id
|
||||
WHERE v.id = ANY($1)
|
||||
GROUP BY v.id, vf.version_fields_json, lf.loader_fields_json, lfev.loader_field_enum_values_json, f.files_json, h.hashes_json, d.dependencies_json
|
||||
ORDER BY v.ordering ASC NULLS LAST, v.date_published ASC;
|
||||
",
|
||||
&version_ids_parsed
|
||||
)
|
||||
.fetch_many(exec)
|
||||
.fetch_many(&mut *exec)
|
||||
.try_filter_map(|e| async {
|
||||
Ok(e.right().map(|v|
|
||||
{
|
||||
let version_id = VersionId(v.id);
|
||||
let (loaders, project_types, games) = loaders_ptypes_games.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let files = files.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let hashes = hashes.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let version_fields = version_fields.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
let dependencies = dependencies.remove(&version_id).map(|x|x.1).unwrap_or_default();
|
||||
|
||||
QueryVersion {
|
||||
inner: Version {
|
||||
id: VersionId(v.id),
|
||||
@@ -652,39 +771,10 @@ impl Version {
|
||||
ordering: v.ordering,
|
||||
},
|
||||
files: {
|
||||
#[derive(Deserialize)]
|
||||
struct Hash {
|
||||
pub file_id: FileId,
|
||||
pub algorithm: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct File {
|
||||
pub id: FileId,
|
||||
pub url: String,
|
||||
pub filename: String,
|
||||
pub primary: bool,
|
||||
pub size: u32,
|
||||
pub file_type: Option<FileType>,
|
||||
}
|
||||
|
||||
let hashes: Vec<Hash> = serde_json::from_value(
|
||||
v.hashes.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let files: Vec<File> = serde_json::from_value(
|
||||
v.files.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut files = files.into_iter().map(|x| {
|
||||
let mut file_hashes = HashMap::new();
|
||||
|
||||
for hash in &hashes {
|
||||
for hash in hashes.iter() {
|
||||
if hash.file_id == x.id {
|
||||
file_hashes.insert(
|
||||
hash.algorithm.clone(),
|
||||
@@ -695,8 +785,8 @@ impl Version {
|
||||
|
||||
QueryFile {
|
||||
id: x.id,
|
||||
url: x.url,
|
||||
filename: x.filename,
|
||||
url: x.url.clone(),
|
||||
filename: x.filename.clone(),
|
||||
hashes: file_hashes,
|
||||
primary: x.primary,
|
||||
size: x.size,
|
||||
@@ -716,17 +806,13 @@ impl Version {
|
||||
|
||||
files
|
||||
},
|
||||
version_fields: VersionField::from_query_json(v.loader_fields, v.version_fields, v.loader_field_enum_values, false),
|
||||
loaders: v.loaders.unwrap_or_default(),
|
||||
project_types: v.project_types.unwrap_or_default(),
|
||||
games: v.games.unwrap_or_default(),
|
||||
dependencies: serde_json::from_value(
|
||||
v.dependencies.unwrap_or_default(),
|
||||
)
|
||||
.ok()
|
||||
.unwrap_or_default(),
|
||||
version_fields: VersionField::from_query_json(version_fields, &loader_fields, &loader_field_enum_values, false),
|
||||
loaders,
|
||||
project_types,
|
||||
games,
|
||||
dependencies,
|
||||
}
|
||||
))
|
||||
}))
|
||||
})
|
||||
.try_collect::<Vec<QueryVersion>>()
|
||||
.await?;
|
||||
|
||||
@@ -16,6 +16,7 @@ use crate::models::projects::{
|
||||
use crate::models::threads::ThreadId;
|
||||
use crate::routes::v2_reroute;
|
||||
use chrono::{DateTime, Utc};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use validator::Validate;
|
||||
|
||||
@@ -123,14 +124,25 @@ impl LegacyProject {
|
||||
|
||||
// - if loader is mrpack, this is a modpack
|
||||
// the loaders are whatever the corresponding loader fields are
|
||||
if versions_item.loaders == vec!["mrpack".to_string()] {
|
||||
if loaders.contains(&"mrpack".to_string()) {
|
||||
project_type = "modpack".to_string();
|
||||
if let Some(mrpack_loaders) = versions_item
|
||||
.version_fields
|
||||
.iter()
|
||||
.find(|f| f.field_name == "mrpack_loaders")
|
||||
{
|
||||
loaders = mrpack_loaders.value.as_strings();
|
||||
if let Some(mrpack_loaders) = data.fields.iter().find(|f| f.0 == "mrpack_loaders") {
|
||||
let values = mrpack_loaders
|
||||
.1
|
||||
.iter()
|
||||
.filter_map(|v| v.as_str())
|
||||
.map(|v| v.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// drop mrpack from loaders
|
||||
loaders = loaders
|
||||
.into_iter()
|
||||
.filter(|l| l != "mrpack")
|
||||
.collect::<Vec<_>>();
|
||||
// and replace with mrpack_loaders
|
||||
loaders.extend(values);
|
||||
// remove duplicate loaders
|
||||
loaders = loaders.into_iter().unique().collect::<Vec<_>>();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -198,7 +210,7 @@ impl LegacyProject {
|
||||
redis: &RedisPool,
|
||||
) -> Result<Vec<Self>, DatabaseError>
|
||||
where
|
||||
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
|
||||
E: sqlx::Acquire<'a, Database = sqlx::Postgres>,
|
||||
{
|
||||
let version_ids: Vec<_> = data
|
||||
.iter()
|
||||
@@ -300,7 +312,7 @@ impl From<Version> for LegacyVersion {
|
||||
// - if loader is mrpack, this is a modpack
|
||||
// the v2 loaders are whatever the corresponding loader fields are
|
||||
let mut loaders = data.loaders.into_iter().map(|l| l.0).collect::<Vec<_>>();
|
||||
if loaders == vec!["mrpack".to_string()] {
|
||||
if loaders.contains(&"mrpack".to_string()) {
|
||||
if let Some((_, mrpack_loaders)) = data
|
||||
.fields
|
||||
.into_iter()
|
||||
|
||||
@@ -12,14 +12,10 @@ use crate::models;
|
||||
use crate::search::UploadSearchProject;
|
||||
use sqlx::postgres::PgPool;
|
||||
|
||||
pub async fn index_local(
|
||||
pub async fn get_all_ids(
|
||||
pool: PgPool,
|
||||
redis: &RedisPool,
|
||||
) -> Result<(Vec<UploadSearchProject>, Vec<String>), IndexingError> {
|
||||
info!("Indexing local projects!");
|
||||
let loader_field_keys: Arc<DashSet<String>> = Arc::new(DashSet::new());
|
||||
|
||||
let all_visible_ids: HashMap<VersionId, (ProjectId, String)> = sqlx::query!(
|
||||
) -> Result<Vec<(VersionId, ProjectId, String)>, IndexingError> {
|
||||
let all_visible_ids: Vec<(VersionId, ProjectId, String)> = sqlx::query!(
|
||||
"
|
||||
SELECT v.id id, m.id mod_id, u.username owner_username
|
||||
|
||||
@@ -45,33 +41,48 @@ pub async fn index_local(
|
||||
Ok(e.right().map(|m| {
|
||||
let project_id: ProjectId = ProjectId(m.mod_id);
|
||||
let version_id: VersionId = VersionId(m.id);
|
||||
(version_id, (project_id, m.owner_username))
|
||||
(version_id, project_id, m.owner_username)
|
||||
}))
|
||||
})
|
||||
.try_collect::<HashMap<_, _>>()
|
||||
.try_collect::<Vec<_>>()
|
||||
.await?;
|
||||
|
||||
let project_ids = all_visible_ids
|
||||
Ok(all_visible_ids)
|
||||
}
|
||||
|
||||
pub async fn index_local(
|
||||
pool: &PgPool,
|
||||
redis: &RedisPool,
|
||||
visible_ids: HashMap<VersionId, (ProjectId, String)>,
|
||||
) -> Result<(Vec<UploadSearchProject>, Vec<String>), IndexingError> {
|
||||
info!("Indexing local projects!");
|
||||
let loader_field_keys: Arc<DashSet<String>> = Arc::new(DashSet::new());
|
||||
|
||||
let project_ids = visible_ids
|
||||
.values()
|
||||
.map(|(project_id, _)| project_id)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let projects: HashMap<_, _> = project_item::Project::get_many_ids(&project_ids, &pool, redis)
|
||||
let projects: HashMap<_, _> = project_item::Project::get_many_ids(&project_ids, pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|p| (p.inner.id, p))
|
||||
.collect();
|
||||
|
||||
let version_ids = all_visible_ids.keys().cloned().collect::<Vec<_>>();
|
||||
let versions: HashMap<_, _> = version_item::Version::get_many(&version_ids, &pool, redis)
|
||||
info!("Fetched local projects!");
|
||||
|
||||
let version_ids = visible_ids.keys().cloned().collect::<Vec<_>>();
|
||||
let versions: HashMap<_, _> = version_item::Version::get_many(&version_ids, pool, redis)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|v| (v.inner.id, v))
|
||||
.collect();
|
||||
|
||||
info!("Fetched local versions!");
|
||||
|
||||
let mut uploads = Vec::new();
|
||||
// TODO: could possibly clone less here?
|
||||
for (version_id, (project_id, owner_username)) in all_visible_ids {
|
||||
for (version_id, (project_id, owner_username)) in visible_ids {
|
||||
let m = projects.get(&project_id);
|
||||
let v = versions.get(&version_id);
|
||||
|
||||
|
||||
@@ -1,15 +1,21 @@
|
||||
/// This module is used for the indexing from any source.
|
||||
pub mod local_import;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::search::{SearchConfig, UploadSearchProject};
|
||||
use itertools::Itertools;
|
||||
use local_import::index_local;
|
||||
use log::info;
|
||||
use meilisearch_sdk::client::Client;
|
||||
use meilisearch_sdk::indexes::Index;
|
||||
use meilisearch_sdk::settings::{PaginationSetting, Settings};
|
||||
use sqlx::postgres::PgPool;
|
||||
use thiserror::Error;
|
||||
|
||||
use self::local_import::get_all_ids;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum IndexingError {
|
||||
#[error("Error while connecting to the MeiliSearch database")]
|
||||
@@ -31,6 +37,7 @@ pub enum IndexingError {
|
||||
// assumes a max average size of 1KiB per project to avoid this cap.
|
||||
const MEILISEARCH_CHUNK_SIZE: usize = 10000;
|
||||
|
||||
const FETCH_PROJECT_SIZE: usize = 5000;
|
||||
pub async fn index_projects(
|
||||
pool: PgPool,
|
||||
redis: RedisPool,
|
||||
@@ -39,10 +46,40 @@ pub async fn index_projects(
|
||||
let mut docs_to_add: Vec<UploadSearchProject> = vec![];
|
||||
let mut additional_fields: Vec<String> = vec![];
|
||||
|
||||
let (mut uploads, mut loader_fields) = index_local(pool.clone(), &redis).await?;
|
||||
docs_to_add.append(&mut uploads);
|
||||
additional_fields.append(&mut loader_fields);
|
||||
let all_ids = get_all_ids(pool.clone()).await?;
|
||||
let all_ids_len = all_ids.len();
|
||||
info!("Got all ids, indexing {} projects", all_ids_len);
|
||||
let mut so_far = 0;
|
||||
|
||||
let as_chunks: Vec<_> = all_ids
|
||||
.into_iter()
|
||||
.chunks(FETCH_PROJECT_SIZE)
|
||||
.into_iter()
|
||||
.map(|x| x.collect::<Vec<_>>())
|
||||
.collect();
|
||||
|
||||
for id_chunk in as_chunks {
|
||||
info!(
|
||||
"Fetching chunk {}-{}/{}, size: {}",
|
||||
so_far,
|
||||
so_far + FETCH_PROJECT_SIZE,
|
||||
all_ids_len,
|
||||
id_chunk.len()
|
||||
);
|
||||
so_far += FETCH_PROJECT_SIZE;
|
||||
|
||||
let id_chunk = id_chunk
|
||||
.into_iter()
|
||||
.map(|(version_id, project_id, owner_username)| {
|
||||
(version_id, (project_id, owner_username.to_lowercase()))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
let (mut uploads, mut loader_fields) = index_local(&pool, &redis, id_chunk).await?;
|
||||
docs_to_add.append(&mut uploads);
|
||||
additional_fields.append(&mut loader_fields);
|
||||
}
|
||||
|
||||
info!("Got all ids, indexing...");
|
||||
// Write Indices
|
||||
add_projects(docs_to_add, additional_fields, config).await?;
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
|
||||
use crate::database::models::loader_fields::VersionField;
|
||||
use crate::database::redis::RedisPool;
|
||||
use crate::models::projects::ProjectId;
|
||||
use crate::routes::ApiError;
|
||||
@@ -80,6 +79,8 @@ pub async fn send_discord_webhook(
|
||||
) -> Result<(), ApiError> {
|
||||
// TODO: this currently uses Minecraft as it is a v2 webhook, and requires 'game_versions', a minecraft-java loader field.
|
||||
// TODO: This should be updated to use the generic loader fields w/ discord from the project game
|
||||
|
||||
// TODO: This should use the project_item get route
|
||||
let all_game_versions = MinecraftGameVersion::list(pool, redis).await?;
|
||||
|
||||
let row =
|
||||
@@ -93,38 +94,7 @@ pub async fn send_discord_webhook(
|
||||
ARRAY_AGG(DISTINCT pt.name) filter (where pt.name is not null) project_types,
|
||||
ARRAY_AGG(DISTINCT g.slug) filter (where g.slug is not null) games,
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is false) gallery,
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'field_id', vf.field_id,
|
||||
'int_value', vf.int_value,
|
||||
'enum_value', vf.enum_value,
|
||||
'string_value', vf.string_value
|
||||
)
|
||||
) filter (where vf.field_id is not null) version_fields,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'version_id', 0, -- TODO: When webhook is updated to match others, this should match version
|
||||
'lf_id', lf.id,
|
||||
'loader_name', lo.loader,
|
||||
'field', lf.field,
|
||||
'field_type', lf.field_type,
|
||||
'enum_type', lf.enum_type,
|
||||
'min_val', lf.min_val,
|
||||
'max_val', lf.max_val,
|
||||
'optional', lf.optional
|
||||
)
|
||||
) filter (where lf.id is not null) loader_fields,
|
||||
JSONB_AGG(
|
||||
DISTINCT jsonb_build_object(
|
||||
'id', lfev.id,
|
||||
'enum_id', lfev.enum_id,
|
||||
'value', lfev.value,
|
||||
'ordering', lfev.ordering,
|
||||
'created', lfev.created,
|
||||
'metadata', lfev.metadata
|
||||
)
|
||||
) filter (where lfev.id is not null) loader_field_enum_values
|
||||
ARRAY_AGG(DISTINCT mg.image_url) filter (where mg.image_url is not null and mg.featured is true) featured_gallery
|
||||
FROM mods m
|
||||
LEFT OUTER JOIN mods_categories mc ON joining_mod_id = m.id AND mc.is_additional = FALSE
|
||||
LEFT OUTER JOIN categories c ON mc.joining_category_id = c.id
|
||||
@@ -138,10 +108,6 @@ pub async fn send_discord_webhook(
|
||||
LEFT OUTER JOIN mods_gallery mg ON mg.mod_id = m.id
|
||||
INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.is_owner = TRUE AND tm.accepted = TRUE
|
||||
INNER JOIN users u ON tm.user_id = u.id
|
||||
LEFT OUTER JOIN version_fields vf on v.id = vf.version_id
|
||||
LEFT OUTER JOIN loader_fields lf on vf.field_id = lf.id
|
||||
LEFT OUTER JOIN loader_field_enums lfe on lf.enum_type = lfe.id
|
||||
LEFT OUTER JOIN loader_field_enum_values lfev on lfev.enum_id = lfe.id
|
||||
WHERE m.id = $1
|
||||
GROUP BY m.id, u.id;
|
||||
",
|
||||
@@ -157,11 +123,6 @@ pub async fn send_discord_webhook(
|
||||
let categories = project.categories.unwrap_or_default();
|
||||
let loaders = project.loaders.unwrap_or_default();
|
||||
|
||||
// let versions: Vec<GameVersion> =
|
||||
// serde_json::from_value(project.versions.unwrap_or_default())
|
||||
// .ok()
|
||||
// .unwrap_or_default();
|
||||
|
||||
if !categories.is_empty() {
|
||||
fields.push(DiscordEmbedField {
|
||||
name: "Categories",
|
||||
@@ -226,12 +187,17 @@ pub async fn send_discord_webhook(
|
||||
|
||||
// TODO: Modified to keep "Versions" as a field as it may be hardcoded. Ideally, this pushes all loader fields to the embed for v3
|
||||
// TODO: This might need some work to manually test
|
||||
let version_fields = VersionField::from_query_json(
|
||||
project.loader_fields,
|
||||
project.version_fields,
|
||||
project.loader_field_enum_values,
|
||||
true,
|
||||
);
|
||||
let version_fields = crate::database::models::project_item::Project::get_id(
|
||||
crate::database::models::ids::ProjectId(project.id),
|
||||
pool,
|
||||
redis,
|
||||
)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|project| project.aggregate_version_fields)
|
||||
.unwrap_or_default();
|
||||
|
||||
let versions = version_fields
|
||||
.into_iter()
|
||||
.find_map(|vf| MinecraftGameVersion::try_from_version_field(&vf).ok())
|
||||
|
||||
@@ -370,10 +370,16 @@ async fn creating_loader_fields() {
|
||||
project.fields.get("game_versions").unwrap(),
|
||||
&[json!("1.20.1"), json!("1.20.2"), json!("1.20.5")]
|
||||
);
|
||||
assert_eq!(
|
||||
project.fields.get("singleplayer").unwrap(),
|
||||
&[json!(false), json!(true)]
|
||||
);
|
||||
assert!(project
|
||||
.fields
|
||||
.get("singleplayer")
|
||||
.unwrap()
|
||||
.contains(&json!(false)));
|
||||
assert!(project
|
||||
.fields
|
||||
.get("singleplayer")
|
||||
.unwrap()
|
||||
.contains(&json!(true)));
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user