Secure auth route, fix quilt deps bug, optimize queries more (#374)

* Secure auth route, fix quilt deps bug, optimize queries more

* Add to_lowercase for multiple hashes functions
This commit is contained in:
Geometrically
2022-06-17 16:56:28 -07:00
committed by GitHub
parent 355689ed19
commit 782bb11894
15 changed files with 842 additions and 592 deletions

4
.env
View File

@@ -40,4 +40,6 @@ GITHUB_CLIENT_SECRET=none
RATE_LIMIT_IGNORE_IPS='["127.0.0.1"]' RATE_LIMIT_IGNORE_IPS='["127.0.0.1"]'
WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "edge.forgecdn.net", "github.com", "raw.githubusercontent.com"]' WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "edge.forgecdn.net", "github.com", "raw.githubusercontent.com"]'
ALLOWED_CALLBACK_URLS='["localhost", ".modrinth.com", "modrinth.com", "-modrinth.vercel.app"]'

2
Cargo.lock generated
View File

@@ -1441,7 +1441,7 @@ dependencies = [
[[package]] [[package]]
name = "labrinth" name = "labrinth"
version = "2.4.1" version = "2.4.2"
dependencies = [ dependencies = [
"actix", "actix",
"actix-cors", "actix-cors",

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "labrinth" name = "labrinth"
version = "2.4.1" version = "2.4.2"
#Team members, please add your emails and usernames #Team members, please add your emails and usernames
authors = ["geometrically <jai.a@tuta.io>", "Redblueflame <contact@redblueflame.com>", "Aeledfyr <aeledfyr@gmail.com>", "Charalampos Fanoulis <yo@fanoulis.dev>", "AppleTheGolden <scotsbox@protonmail.com>"] authors = ["geometrically <jai.a@tuta.io>", "Redblueflame <contact@redblueflame.com>", "Aeledfyr <aeledfyr@gmail.com>", "Charalampos Fanoulis <yo@fanoulis.dev>", "AppleTheGolden <scotsbox@protonmail.com>"]
edition = "2018" edition = "2018"

View File

@@ -662,6 +662,32 @@
"nullable": [] "nullable": []
} }
}, },
"2278a7db5eb0474576fa9c86ba97bd6bf13864b3f9ce55ed2ab0cb94edbadaf5": {
"query": "\n SELECT url, expires FROM states\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "url",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "expires",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false
]
}
},
"232d7d0319c20dd5fff29331b067d6c6373bcff761a77958a2bb5f59068a83a5": { "232d7d0319c20dd5fff29331b067d6c6373bcff761a77958a2bb5f59068a83a5": {
"query": "\n UPDATE team_members\n SET permissions = $1\n WHERE (team_id = $2 AND user_id = $3)\n ", "query": "\n UPDATE team_members\n SET permissions = $1\n WHERE (team_id = $2 AND user_id = $3)\n ",
"describe": { "describe": {
@@ -772,50 +798,6 @@
"nullable": [] "nullable": []
} }
}, },
"2c7c46497580e96c2ede1a696c960a8f53af9b8d0fc995484618b9090add8890": {
"query": "\n SELECT id, title, notification_id, action_route, action_route_method\n FROM notifications_actions\n WHERE notification_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int4"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "notification_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "action_route",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "action_route_method",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false
]
}
},
"2d2e5b06be5125226ed9e4d7b7b5f99043db73537f2199f2146bdcd56091ae75": { "2d2e5b06be5125226ed9e4d7b7b5f99043db73537f2199f2146bdcd56091ae75": {
"query": "\n INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted)\n VALUES ($1, $2, $3, $4, $5, $6)\n ", "query": "\n INSERT INTO team_members (id, team_id, user_id, role, permissions, accepted)\n VALUES ($1, $2, $3, $4, $5, $6)\n ",
"describe": { "describe": {
@@ -858,6 +840,68 @@
"nullable": [] "nullable": []
} }
}, },
"35b728453ade9cd9c535411fff194105c05726cea29446c3532aec9bfa4ffd2d": {
"query": "\n SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,\n STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.id = $1\n GROUP BY n.id, n.user_id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "text",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "link",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "read",
"type_info": "Bool"
},
{
"ordinal": 6,
"name": "notification_type",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "actions",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
null
]
}
},
"371048e45dd74c855b84cdb8a6a565ccbef5ad166ec9511ab20621c336446da6": { "371048e45dd74c855b84cdb8a6a565ccbef5ad166ec9511ab20621c336446da6": {
"query": "\n UPDATE mods\n SET follows = follows - 1\n WHERE id = $1\n ", "query": "\n UPDATE mods\n SET follows = follows - 1\n WHERE id = $1\n ",
"describe": { "describe": {
@@ -870,6 +914,74 @@
"nullable": [] "nullable": []
} }
}, },
"375d3e7221ca352efc3bec374b9924c864c1ea5808e99ee6b89e4dcb0d39ba7a": {
"query": "\n SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,\n STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.id = ANY($1)\n GROUP BY n.id, n.user_id\n ORDER BY n.created DESC;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "text",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "link",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "read",
"type_info": "Bool"
},
{
"ordinal": 7,
"name": "notification_type",
"type_info": "Varchar"
},
{
"ordinal": 8,
"name": "actions",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
true,
null
]
}
},
"3831c1b321e47690f1f54597506a0d43362eda9540c56acb19c06532bba50b01": { "3831c1b321e47690f1f54597506a0d43362eda9540c56acb19c06532bba50b01": {
"query": "\n SELECT id, user_id, role, permissions, accepted\n FROM team_members\n WHERE team_id = $1\n ", "query": "\n SELECT id, user_id, role, permissions, accepted\n FROM team_members\n WHERE team_id = $1\n ",
"describe": { "describe": {
@@ -1313,116 +1425,6 @@
"nullable": [] "nullable": []
} }
}, },
"4e40451ec96cf4fa3a806fc71ffa953c087457e3778c25c30dcd664b04e496c6": {
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured,\n STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,\n STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,\n STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,\n STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.date_published ASC;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "author_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "version_name",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "changelog",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "version_type",
"type_info": "Varchar"
},
{
"ordinal": 10,
"name": "featured",
"type_info": "Bool"
},
{
"ordinal": 11,
"name": "game_versions",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "loaders",
"type_info": "Text"
},
{
"ordinal": 13,
"name": "files",
"type_info": "Text"
},
{
"ordinal": 14,
"name": "hashes",
"type_info": "Text"
},
{
"ordinal": 15,
"name": "dependencies",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false,
false,
false,
false,
null,
null,
null,
null,
null
]
}
},
"4e9f9eafbfd705dfc94571018cb747245a98ea61bad3fae4b3ce284229d99955": { "4e9f9eafbfd705dfc94571018cb747245a98ea61bad3fae4b3ce284229d99955": {
"query": "\n UPDATE mods\n SET description = $1\n WHERE (id = $2)\n ", "query": "\n UPDATE mods\n SET description = $1\n WHERE (id = $2)\n ",
"describe": { "describe": {
@@ -1714,6 +1716,34 @@
] ]
} }
}, },
"57ff857e0d7f6deab7da6e806b83c61648809c8820cf1a46e833ff97583fe888": {
"query": "\n SELECT DISTINCT ON(v.date_published, v.id) version_id, v.date_published FROM versions v\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))\n WHERE v.mod_id = $1\n ORDER BY v.date_published, v.id ASC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "version_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "date_published",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8",
"VarcharArray",
"VarcharArray"
]
},
"nullable": [
false,
false
]
}
},
"5917ab5017e27be2c4c5231426b19c3b37fd171ff47f97a0cb4e2094a0234298": { "5917ab5017e27be2c4c5231426b19c3b37fd171ff47f97a0cb4e2094a0234298": {
"query": "\n SELECT id, name FROM project_types\n WHERE name = ANY($1)\n ", "query": "\n SELECT id, name FROM project_types\n WHERE name = ANY($1)\n ",
"describe": { "describe": {
@@ -1936,28 +1966,6 @@
"nullable": [] "nullable": []
} }
}, },
"5ff8fd471ff62f86aa95e52cee2723b31ec3d7fc53c3ef1454df40eef0ceff53": {
"query": "\n SELECT version.id FROM (\n SELECT DISTINCT ON(v.id) v.id, v.date_published FROM versions v\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id\n INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))\n INNER JOIN loaders_versions lv ON lv.version_id = v.id\n INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))\n WHERE v.mod_id = $1\n ) AS version\n ORDER BY version.date_published ASC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8",
"VarcharArray",
"VarcharArray"
]
},
"nullable": [
false
]
}
},
"6131d32a65f5e04775308386812f25c6d8464582678536a392a4a3737667f363": { "6131d32a65f5e04775308386812f25c6d8464582678536a392a4a3737667f363": {
"query": "\n SELECT id, short, name FROM licenses\n ", "query": "\n SELECT id, short, name FROM licenses\n ",
"describe": { "describe": {
@@ -2022,116 +2030,6 @@
] ]
} }
}, },
"62ebb6f33a26f05f28b3d175ef19cb247c7c7f1ac0fce2fae15e6787b33a1b96": {
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured,\n STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,\n STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,\n STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,\n STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = $1\n GROUP BY v.id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "author_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "version_name",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "changelog",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "version_type",
"type_info": "Varchar"
},
{
"ordinal": 10,
"name": "featured",
"type_info": "Bool"
},
{
"ordinal": 11,
"name": "game_versions",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "loaders",
"type_info": "Text"
},
{
"ordinal": 13,
"name": "files",
"type_info": "Text"
},
{
"ordinal": 14,
"name": "hashes",
"type_info": "Text"
},
{
"ordinal": 15,
"name": "dependencies",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false,
false,
false,
false,
null,
null,
null,
null,
null
]
}
},
"67d021f0776276081d3c50ca97afa6b78b98860bf929009e845e9c00a192e3b5": { "67d021f0776276081d3c50ca97afa6b78b98860bf929009e845e9c00a192e3b5": {
"query": "\n SELECT id FROM report_types\n WHERE name = $1\n ", "query": "\n SELECT id FROM report_types\n WHERE name = $1\n ",
"describe": { "describe": {
@@ -2999,26 +2897,6 @@
"nullable": [] "nullable": []
} }
}, },
"7be4ba7c3dd53abd79715b9a9ead6b8815a2e4994f6887ac853f832c5ca17150": {
"query": "\n SELECT id\n FROM notifications\n WHERE user_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"7c61fee015231f0a97c25d24f2c6be24821e39e330ab82344ad3b985d0d2aaea": { "7c61fee015231f0a97c25d24f2c6be24821e39e330ab82344ad3b985d0d2aaea": {
"query": "\n SELECT id FROM mods_gallery\n WHERE image_url = $1\n ", "query": "\n SELECT id FROM mods_gallery\n WHERE image_url = $1\n ",
"describe": { "describe": {
@@ -3427,6 +3305,116 @@
] ]
} }
}, },
"8c65c87d288ca385eee56f98a0880eac5bf73d2760af70b7d660ded5f7a619f2": {
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured,\n STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,\n STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,\n STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,\n STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = ANY($1)\n GROUP BY v.id\n ORDER BY v.date_published ASC;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "author_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "version_name",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "changelog",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "version_type",
"type_info": "Varchar"
},
{
"ordinal": 10,
"name": "featured",
"type_info": "Bool"
},
{
"ordinal": 11,
"name": "game_versions",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "loaders",
"type_info": "Text"
},
{
"ordinal": 13,
"name": "files",
"type_info": "Text"
},
{
"ordinal": 14,
"name": "hashes",
"type_info": "Text"
},
{
"ordinal": 15,
"name": "dependencies",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8Array"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false,
false,
false,
false,
null,
null,
null,
null,
null
]
}
},
"8f706d78ac4235ea04c59e2c220a4791e1d08fdf287b783b4aaef36fd2445467": { "8f706d78ac4235ea04c59e2c220a4791e1d08fdf287b783b4aaef36fd2445467": {
"query": "\n DELETE FROM loaders\n WHERE loader = $1\n ", "query": "\n DELETE FROM loaders\n WHERE loader = $1\n ",
"describe": { "describe": {
@@ -3697,6 +3685,116 @@
"nullable": [] "nullable": []
} }
}, },
"a007ddf78c7bcbac5e511d15d0f130ecb5527fefcdce02972ede5b5cd2b10630": {
"query": "\n SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,\n v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,\n v.version_type version_type, v.featured featured,\n STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,\n STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,\n STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,\n STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies\n FROM versions v\n LEFT OUTER JOIN game_versions_versions gvv on v.id = gvv.joining_version_id\n LEFT OUTER JOIN game_versions gv on gvv.game_version_id = gv.id\n LEFT OUTER JOIN loaders_versions lv on v.id = lv.version_id\n LEFT OUTER JOIN loaders l on lv.loader_id = l.id\n LEFT OUTER JOIN files f on v.id = f.version_id\n LEFT OUTER JOIN hashes h on f.id = h.file_id\n LEFT OUTER JOIN dependencies d on v.id = d.dependent_id\n WHERE v.id = $1\n GROUP BY v.id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "author_id",
"type_info": "Int8"
},
{
"ordinal": 3,
"name": "version_name",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "changelog",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 7,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 8,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 9,
"name": "version_type",
"type_info": "Varchar"
},
{
"ordinal": 10,
"name": "featured",
"type_info": "Bool"
},
{
"ordinal": 11,
"name": "game_versions",
"type_info": "Text"
},
{
"ordinal": 12,
"name": "loaders",
"type_info": "Text"
},
{
"ordinal": 13,
"name": "files",
"type_info": "Text"
},
{
"ordinal": 14,
"name": "hashes",
"type_info": "Text"
},
{
"ordinal": 15,
"name": "dependencies",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true,
false,
false,
false,
false,
null,
null,
null,
null,
null
]
}
},
"a39ce28b656032f862b205cffa393a76b989f4803654a615477a94fda5f57354": { "a39ce28b656032f862b205cffa393a76b989f4803654a615477a94fda5f57354": {
"query": "\n DELETE FROM states\n WHERE id = $1\n ", "query": "\n DELETE FROM states\n WHERE id = $1\n ",
"describe": { "describe": {
@@ -4719,6 +4817,74 @@
] ]
} }
}, },
"c83cef31a25dd3037fa6049da572301e3e85871dcc3a1de8acb395d34fa0cf74": {
"query": "\n SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,\n STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions\n FROM notifications n\n LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id\n WHERE n.user_id = $1\n GROUP BY n.id, n.user_id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "text",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "link",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "read",
"type_info": "Bool"
},
{
"ordinal": 7,
"name": "notification_type",
"type_info": "Varchar"
},
{
"ordinal": 8,
"name": "actions",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
true,
null
]
}
},
"c9d63ed46799db7c30a7e917d97a5d4b2b78b0234cce49e136fa57526b38c1ca": { "c9d63ed46799db7c30a7e917d97a5d4b2b78b0234cce49e136fa57526b38c1ca": {
"query": "\n SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)\n ", "query": "\n SELECT EXISTS(SELECT 1 FROM versions WHERE id = $1)\n ",
"describe": { "describe": {
@@ -5681,62 +5847,6 @@
] ]
} }
}, },
"ea96ab7c1290f4caddcef8ecf2aec0216654faca05ff760ffa553ad3e32827f5": {
"query": "\n SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type\n FROM notifications n\n WHERE n.id = $1\n GROUP BY n.id, n.user_id;\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "text",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "link",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "read",
"type_info": "Bool"
},
{
"ordinal": 6,
"name": "notification_type",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
false,
false,
true
]
}
},
"ebef881a0dae70e990814e567ed3de9565bb29b772782bc974c953af195fd6d7": { "ebef881a0dae70e990814e567ed3de9565bb29b772782bc974c953af195fd6d7": {
"query": "\n SELECT n.id FROM notifications n\n WHERE n.user_id = $1\n ", "query": "\n SELECT n.id FROM notifications n\n WHERE n.user_id = $1\n ",
"describe": { "describe": {
@@ -5957,32 +6067,6 @@
"nullable": [] "nullable": []
} }
}, },
"f7bea04e8e279e27a24de1bdf3c413daa8677994df5131494b28691ed6611efc": {
"query": "\n SELECT url,expires FROM states\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "url",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "expires",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false
]
}
},
"f8c00875a7450c74423f9913cc3500898e9fcb6aa7eb8fc2f6fd16dc560773de": { "f8c00875a7450c74423f9913cc3500898e9fcb6aa7eb8fc2f6fd16dc560773de": {
"query": "\n SELECT short, name FROM donation_platforms\n WHERE id = $1\n ", "query": "\n SELECT short, name FROM donation_platforms\n WHERE id = $1\n ",
"describe": { "describe": {

View File

@@ -2,6 +2,7 @@
// TODO: remove attr once routes are created // TODO: remove attr once routes are created
use thiserror::Error; use thiserror::Error;
use time::OffsetDateTime;
pub mod categories; pub mod categories;
pub mod ids; pub mod ids;
@@ -125,3 +126,11 @@ impl ids::ProjectTypeId {
Ok(result.map(|r| ids::ProjectTypeId(r.id))) Ok(result.map(|r| ids::ProjectTypeId(r.id)))
} }
} }
pub fn convert_postgres_date(input: &str) -> OffsetDateTime {
OffsetDateTime::parse(
format!("{}:00Z", input.replace(' ', "T")),
time::Format::Rfc3339,
)
.unwrap_or_else(|_| OffsetDateTime::now_utc())
}

View File

@@ -118,31 +118,40 @@ impl Notification {
id: NotificationId, id: NotificationId,
executor: E, executor: E,
) -> Result<Option<Self>, sqlx::error::Error> ) -> Result<Option<Self>, sqlx::error::Error>
where where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{ {
let (notifications, actions) = futures::join!( let result = sqlx::query!(
sqlx::query!( "
" SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
SELECT n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
FROM notifications n FROM notifications n
WHERE n.id = $1 LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
GROUP BY n.id, n.user_id; WHERE n.id = $1
", GROUP BY n.id, n.user_id;
id as NotificationId, ",
) id as NotificationId,
.fetch_optional(executor), )
sqlx::query!( .fetch_optional(executor)
" .await?;
SELECT id, title, notification_id, action_route, action_route_method
FROM notifications_actions if let Some(row) = result {
WHERE notification_id = $1 let mut actions: Vec<NotificationAction> = Vec::new();
",
id as NotificationId, row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
).fetch_all(executor), let action: Vec<&str> = x.split(" |||| ").collect();
);
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(action[0].parse().unwrap_or(0)),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
if let Some(row) = notifications? {
Ok(Some(Notification { Ok(Some(Notification {
id, id,
user_id: UserId(row.user_id), user_id: UserId(row.user_id),
@@ -152,16 +161,7 @@ impl Notification {
link: row.link, link: row.link,
read: row.read, read: row.read,
created: row.created, created: row.created,
actions: actions? actions,
.into_iter()
.map(|x| NotificationAction {
id: NotificationActionId(x.id),
notification_id: NotificationId(x.notification_id),
title: x.title,
action_route_method: x.action_route_method,
action_route: x.action_route,
})
.collect(),
})) }))
} else { } else {
Ok(None) Ok(None)
@@ -172,38 +172,116 @@ impl Notification {
notification_ids: Vec<NotificationId>, notification_ids: Vec<NotificationId>,
exec: E, exec: E,
) -> Result<Vec<Notification>, sqlx::Error> ) -> Result<Vec<Notification>, sqlx::Error>
where where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{ {
futures::future::try_join_all( use futures::stream::TryStreamExt;
notification_ids.into_iter().map(|id| Self::get(id, exec)),
let notification_ids_parsed: Vec<i64> = notification_ids.into_iter().map(|x| x.0).collect();
sqlx::query!(
"
SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.id = ANY($1)
GROUP BY n.id, n.user_id
ORDER BY n.created DESC;
",
&notification_ids_parsed
) )
.await .fetch_many(exec)
.map(|x| x.into_iter().flatten().collect()) .try_filter_map(|e| async {
Ok(e.right().map(|row| {
let id = NotificationId(row.id);
let mut actions: Vec<NotificationAction> = Vec::new();
row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
let action: Vec<&str> = x.split(" |||| ").collect();
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(action[0].parse().unwrap_or(0)),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
Notification {
id,
user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title,
text: row.text,
link: row.link,
read: row.read,
created: row.created,
actions,
}
}))
})
.try_collect::<Vec<Notification>>()
.await
} }
pub async fn get_many_user<'a, E>( pub async fn get_many_user<'a, E>(
user_id: UserId, user_id: UserId,
exec: E, exec: E,
) -> Result<Vec<Notification>, sqlx::Error> ) -> Result<Vec<Notification>, sqlx::Error>
where where
E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy, E: sqlx::Executor<'a, Database = sqlx::Postgres> + Copy,
{ {
let notification_ids = sqlx::query!( use futures::stream::TryStreamExt;
sqlx::query!(
" "
SELECT id SELECT n.id, n.user_id, n.title, n.text, n.link, n.created, n.read, n.type notification_type,
FROM notifications STRING_AGG(DISTINCT na.id || ' |||| ' || na.title || ' |||| ' || na.action_route || ' |||| ' || na.action_route_method, ' ~~~~ ') actions
WHERE user_id = $1 FROM notifications n
LEFT OUTER JOIN notifications_actions na on n.id = na.notification_id
WHERE n.user_id = $1
GROUP BY n.id, n.user_id;
", ",
user_id as UserId user_id as UserId
) )
.fetch_all(exec) .fetch_many(exec)
.await? .try_filter_map(|e| async {
.into_iter() Ok(e.right().map(|row| {
.map(|x| NotificationId(x.id)) let id = NotificationId(row.id);
.collect(); let mut actions: Vec<NotificationAction> = Vec::new();
Self::get_many(notification_ids, exec).await row.actions.unwrap_or_default().split(" ~~~~ ").for_each(|x| {
let action: Vec<&str> = x.split(" |||| ").collect();
if action.len() >= 3 {
actions.push(NotificationAction {
id: NotificationActionId(action[0].parse().unwrap_or(0)),
notification_id: id,
title: action[1].to_string(),
action_route_method: action[3].to_string(),
action_route: action[2].to_string(),
});
}
});
Notification {
id,
user_id: UserId(row.user_id),
notification_type: row.notification_type,
title: row.title,
text: row.text,
link: row.link,
read: row.read,
created: row.created,
actions,
}
}))
})
.try_collect::<Vec<Notification>>()
.await
} }
pub async fn remove( pub async fn remove(

View File

@@ -1,4 +1,5 @@
use super::ids::*; use super::ids::*;
use crate::database::models::convert_postgres_date;
use time::OffsetDateTime; use time::OffsetDateTime;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@@ -719,11 +720,7 @@ impl Project {
} else { } else {
Some(strings[4].to_string()) Some(strings[4].to_string())
}, },
created: OffsetDateTime::parse( created: convert_postgres_date(strings[2]),
strings[2],
time::Format::Rfc3339,
)
.unwrap_or_else(|_| OffsetDateTime::now_utc()),
}) })
} else { } else {
None None
@@ -835,7 +832,7 @@ impl Project {
featured: strings[1].parse().unwrap_or(false), featured: strings[1].parse().unwrap_or(false),
title: if strings[3] == " " { None } else { Some(strings[3].to_string()) }, title: if strings[3] == " " { None } else { Some(strings[3].to_string()) },
description: if strings[4] == " " { None } else { Some(strings[4].to_string()) }, description: if strings[4] == " " { None } else { Some(strings[4].to_string()) },
created: OffsetDateTime::parse(strings[2], time::Format::Rfc3339).unwrap_or_else(|_| OffsetDateTime::now_utc()) created: convert_postgres_date(strings[2])
}) })
} else { } else {
None None

View File

@@ -1,5 +1,6 @@
use super::ids::*; use super::ids::*;
use super::DatabaseError; use super::DatabaseError;
use crate::database::models::convert_postgres_date;
use std::collections::HashMap; use std::collections::HashMap;
use time::OffsetDateTime; use time::OffsetDateTime;
@@ -498,22 +499,20 @@ impl Version {
let vec = sqlx::query!( let vec = sqlx::query!(
" "
SELECT version.id FROM ( SELECT DISTINCT ON(v.date_published, v.id) version_id, v.date_published FROM versions v
SELECT DISTINCT ON(v.id) v.id, v.date_published FROM versions v INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id = v.id INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[]))
INNER JOIN game_versions gv on gvv.game_version_id = gv.id AND (cardinality($2::varchar[]) = 0 OR gv.version = ANY($2::varchar[])) INNER JOIN loaders_versions lv ON lv.version_id = v.id
INNER JOIN loaders_versions lv ON lv.version_id = v.id INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[]))
INNER JOIN loaders l on lv.loader_id = l.id AND (cardinality($3::varchar[]) = 0 OR l.loader = ANY($3::varchar[])) WHERE v.mod_id = $1
WHERE v.mod_id = $1 ORDER BY v.date_published, v.id ASC
) AS version
ORDER BY version.date_published ASC
", ",
project_id as ProjectId, project_id as ProjectId,
&game_versions.unwrap_or_default(), &game_versions.unwrap_or_default(),
&loaders.unwrap_or_default(), &loaders.unwrap_or_default(),
) )
.fetch_many(exec) .fetch_many(exec)
.try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.id))) }) .try_filter_map(|e| async { Ok(e.right().map(|v| VersionId(v.version_id))) })
.try_collect::<Vec<VersionId>>() .try_collect::<Vec<VersionId>>()
.await?; .await?;
@@ -615,7 +614,7 @@ impl Version {
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number, SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads, v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured, v.version_type version_type, v.featured featured,
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders, STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files, STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes, STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
@@ -636,26 +635,6 @@ impl Version {
.await?; .await?;
if let Some(v) = result { if let Some(v) = result {
let hashes: Vec<(FileId, String, Vec<u8>)> = v
.hashes
.unwrap_or_default()
.split(" ~~~~ ")
.map(|f| {
let hash: Vec<&str> = f.split(" |||| ").collect();
if hash.len() >= 3 {
Some((
FileId(hash[2].parse().unwrap_or(0)),
hash[0].to_string(),
hash[1].to_string().into_bytes(),
))
} else {
None
}
})
.flatten()
.collect();
Ok(Some(QueryVersion { Ok(Some(QueryVersion {
id: VersionId(v.id), id: VersionId(v.id),
project_id: ProjectId(v.mod_id), project_id: ProjectId(v.mod_id),
@@ -666,44 +645,87 @@ impl Version {
changelog_url: v.changelog_url, changelog_url: v.changelog_url,
date_published: v.date_published, date_published: v.date_published,
downloads: v.downloads, downloads: v.downloads,
files: v files: {
.files let hashes: Vec<(FileId, String, Vec<u8>)> = v
.unwrap_or_default() .hashes
.split(" ~~~~ ") .unwrap_or_default()
.map(|f| { .split(" ~~~~ ")
let file: Vec<&str> = f.split(" |||| ").collect(); .map(|f| {
let hash: Vec<&str> = f.split(" |||| ").collect();
if file.len() >= 5 { if hash.len() >= 3 {
let file_id = FileId(file[0].parse().unwrap_or(0)); Some((
let mut file_hashes = HashMap::new(); FileId(hash[2].parse().unwrap_or(0)),
hash[0].to_string(),
for hash in &hashes { hash[1].to_string().into_bytes(),
if (hash.0).0 == file_id.0 { ))
file_hashes } else {
.insert(hash.1.clone(), hash.2.clone()); None
}
} }
})
.flatten()
.collect();
Some(QueryFile { v.files
id: file_id, .unwrap_or_default()
url: file[3].to_string(), .split(" ~~~~ ")
filename: file[4].to_string(), .map(|f| {
hashes: file_hashes, let file: Vec<&str> = f.split(" |||| ").collect();
primary: file[1].parse().unwrap_or(false),
size: file[2].parse().unwrap_or(0), if file.len() >= 5 {
}) let file_id =
} else { FileId(file[0].parse().unwrap_or(0));
None let mut file_hashes = HashMap::new();
}
}) for hash in &hashes {
.flatten() if (hash.0).0 == file_id.0 {
.collect(), file_hashes.insert(
game_versions: v hash.1.clone(),
.game_versions hash.2.clone(),
.unwrap_or_default() );
.split(" ~~~~ ") }
.map(|x| x.to_string()) }
.collect(),
Some(QueryFile {
id: file_id,
url: file[3].to_string(),
filename: file[4].to_string(),
hashes: file_hashes,
primary: file[1].parse().unwrap_or(false),
size: file[2].parse().unwrap_or(0),
})
} else {
None
}
})
.flatten()
.collect()
},
game_versions: {
let game_versions = v.game_versions.unwrap_or_default();
let mut gv = game_versions
.split(" ~~~~ ")
.flat_map(|x| {
let version: Vec<&str> =
x.split(" |||| ").collect();
if version.len() >= 2 {
Some((
version[0],
convert_postgres_date(version[1])
.unix_timestamp(),
))
} else {
None
}
})
.collect::<Vec<(&str, i64)>>();
gv.sort_by(|a, b| a.1.cmp(&b.1));
gv.into_iter().map(|x| x.0.to_string()).collect()
},
loaders: v loaders: v
.loaders .loaders
.unwrap_or_default() .unwrap_or_default()
@@ -770,7 +792,7 @@ impl Version {
SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number, SELECT v.id id, v.mod_id mod_id, v.author_id author_id, v.name version_name, v.version_number version_number,
v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads, v.changelog changelog, v.changelog_url changelog_url, v.date_published date_published, v.downloads downloads,
v.version_type version_type, v.featured featured, v.version_type version_type, v.featured featured,
STRING_AGG(DISTINCT gv.version, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders, STRING_AGG(DISTINCT gv.version || ' |||| ' || gv.created, ' ~~~~ ') game_versions, STRING_AGG(DISTINCT l.loader, ' ~~~~ ') loaders,
STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files, STRING_AGG(DISTINCT f.id || ' |||| ' || f.is_primary || ' |||| ' || f.size || ' |||| ' || f.url || ' |||| ' || f.filename, ' ~~~~ ') files,
STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes, STRING_AGG(DISTINCT h.algorithm || ' |||| ' || encode(h.hash, 'escape') || ' |||| ' || h.file_id, ' ~~~~ ') hashes,
STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies STRING_AGG(DISTINCT COALESCE(d.dependency_id, 0) || ' |||| ' || COALESCE(d.mod_dependency_id, 0) || ' |||| ' || d.dependency_type || ' |||| ' || COALESCE(d.dependency_file_name, ' '), ' ~~~~ ') dependencies
@@ -790,21 +812,7 @@ impl Version {
) )
.fetch_many(exec) .fetch_many(exec)
.try_filter_map(|e| async { .try_filter_map(|e| async {
Ok(e.right().map(|v| { Ok(e.right().map(|v|
let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
let hash: Vec<&str> = f.split(" |||| ").collect();
if hash.len() >= 3 {
Some((
FileId(hash[2].parse().unwrap_or(0)),
hash[0].to_string(),
hash[1].to_string().into_bytes(),
))
} else {
None
}
}).flatten().collect();
QueryVersion { QueryVersion {
id: VersionId(v.id), id: VersionId(v.id),
project_id: ProjectId(v.mod_id), project_id: ProjectId(v.mod_id),
@@ -815,32 +823,71 @@ impl Version {
changelog_url: v.changelog_url, changelog_url: v.changelog_url,
date_published: v.date_published, date_published: v.date_published,
downloads: v.downloads, downloads: v.downloads,
files: v.files.unwrap_or_default().split(" ~~~~ ").map(|f| { files: {
let file: Vec<&str> = f.split(" |||| ").collect(); let hashes: Vec<(FileId, String, Vec<u8>)> = v.hashes.unwrap_or_default().split(" ~~~~ ").map(|f| {
let hash: Vec<&str> = f.split(" |||| ").collect();
if file.len() >= 5 { if hash.len() >= 3 {
let file_id = FileId(file[0].parse().unwrap_or(0)); Some((
let mut file_hashes = HashMap::new(); FileId(hash[2].parse().unwrap_or(0)),
hash[0].to_string(),
for hash in &hashes { hash[1].to_string().into_bytes(),
if (hash.0).0 == file_id.0 { ))
file_hashes.insert(hash.1.clone(), hash.2.clone()); } else {
} None
} }
}).flatten().collect();
Some(QueryFile { v.files.unwrap_or_default().split(" ~~~~ ").map(|f| {
id: file_id, let file: Vec<&str> = f.split(" |||| ").collect();
url: file[3].to_string(),
filename: file[4].to_string(), if file.len() >= 5 {
hashes: file_hashes, let file_id = FileId(file[0].parse().unwrap_or(0));
primary: file[1].parse().unwrap_or(false), let mut file_hashes = HashMap::new();
size: file[2].parse().unwrap_or(0),
for hash in &hashes {
if (hash.0).0 == file_id.0 {
file_hashes.insert(hash.1.clone(), hash.2.clone());
}
}
Some(QueryFile {
id: file_id,
url: file[3].to_string(),
filename: file[4].to_string(),
hashes: file_hashes,
primary: file[1].parse().unwrap_or(false),
size: file[2].parse().unwrap_or(0),
})
} else {
None
}
}).flatten().collect()
},
game_versions: {
let game_versions = v
.game_versions
.unwrap_or_default();
let mut gv = game_versions
.split(" ~~~~ ")
.flat_map(|x| {
let version: Vec<&str> = x.split(" |||| ").collect();
if version.len() >= 2 {
Some((version[0], convert_postgres_date(version[1]).unix_timestamp()))
} else {
None
}
}) })
} else { .collect::<Vec<(&str, i64)>>();
None
} gv.sort_by(|a, b| a.1.cmp(&b.1));
}).flatten().collect(),
game_versions: v.game_versions.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(), gv.into_iter()
.map(|x| x.0.to_string())
.collect()
},
loaders: v.loaders.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(), loaders: v.loaders.unwrap_or_default().split(" ~~~~ ").map(|x| x.to_string()).collect(),
featured: v.featured, featured: v.featured,
dependencies: v.dependencies dependencies: v.dependencies
@@ -878,7 +925,7 @@ impl Version {
}).flatten().collect(), }).flatten().collect(),
version_type: v.version_type version_type: v.version_type
} }
})) ))
}) })
.try_collect::<Vec<QueryVersion>>() .try_collect::<Vec<QueryVersion>>()
.await .await

View File

@@ -253,9 +253,7 @@ async fn main() -> std::io::Result<()> {
}) })
.with_interval(std::time::Duration::from_secs(60)) .with_interval(std::time::Duration::from_secs(60))
.with_max_requests(300) .with_max_requests(300)
.with_ignore_key( .with_ignore_key(dotenv::var("RATE_LIMIT_IGNORE_KEY").ok()),
dotenv::var("RATE_LIMIT_IGNORE_KEY").ok(),
),
) )
.app_data(web::Data::new(pool.clone())) .app_data(web::Data::new(pool.clone()))
.app_data(web::Data::new(file_host.clone())) .app_data(web::Data::new(file_host.clone()))
@@ -296,6 +294,11 @@ fn check_env_vars() -> bool {
failed |= true; failed |= true;
} }
if parse_strings_from_var("ALLOWED_CALLBACK_URLS").is_none() {
warn!("Variable `ALLOWED_CALLBACK_URLS` missing in dotenv or not a json array of strings");
failed |= true;
}
failed |= check_var::<String>("SITE_URL"); failed |= check_var::<String>("SITE_URL");
failed |= check_var::<String>("CDN_URL"); failed |= check_var::<String>("CDN_URL");
failed |= check_var::<String>("LABRINTH_ADMIN_KEY"); failed |= check_var::<String>("LABRINTH_ADMIN_KEY");

View File

@@ -51,7 +51,7 @@ where
max_requests: 0, max_requests: 0,
store, store,
identifier: Rc::new(Box::new(identifier)), identifier: Rc::new(Box::new(identifier)),
ignore_key: None ignore_key: None,
} }
} }

View File

@@ -1,3 +1,16 @@
/*!
This auth module is primarily for use within the main website. Applications interacting with the
authenticated API (a very small portion - notifications, private projects, editing/creating projects
and versions) should either retrieve the Modrinth GitHub token through the site, or create a personal
app token for use with Modrinth.
JUst as a summary: Don't implement this flow in your application! Instead, use a personal access token
or create your own GitHub OAuth2 application.
This system will be revisited and allow easier interaction with the authenticated API once we roll
out our own authentication system.
*/
use crate::database::models::{generate_state_id, User}; use crate::database::models::{generate_state_id, User};
use crate::models::error::ApiError; use crate::models::error::ApiError;
use crate::models::ids::base62_impl::{parse_base62, to_base62}; use crate::models::ids::base62_impl::{parse_base62, to_base62};
@@ -11,6 +24,7 @@ use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool; use sqlx::postgres::PgPool;
use thiserror::Error; use thiserror::Error;
use time::OffsetDateTime; use time::OffsetDateTime;
use crate::parse_strings_from_var;
pub fn config(cfg: &mut ServiceConfig) { pub fn config(cfg: &mut ServiceConfig) {
cfg.service(scope("auth").service(auth_callback).service(init)); cfg.service(scope("auth").service(auth_callback).service(init));
@@ -34,6 +48,8 @@ pub enum AuthorizationError {
Authentication(#[from] crate::util::auth::AuthenticationError), Authentication(#[from] crate::util::auth::AuthenticationError),
#[error("Error while decoding Base62")] #[error("Error while decoding Base62")]
Decoding(#[from] DecodingError), Decoding(#[from] DecodingError),
#[error("Invalid callback URL specified")]
Url,
} }
impl actix_web::ResponseError for AuthorizationError { impl actix_web::ResponseError for AuthorizationError {
fn status_code(&self) -> StatusCode { fn status_code(&self) -> StatusCode {
@@ -50,6 +66,7 @@ impl actix_web::ResponseError for AuthorizationError {
AuthorizationError::InvalidCredentials => StatusCode::UNAUTHORIZED, AuthorizationError::InvalidCredentials => StatusCode::UNAUTHORIZED,
AuthorizationError::Decoding(..) => StatusCode::BAD_REQUEST, AuthorizationError::Decoding(..) => StatusCode::BAD_REQUEST,
AuthorizationError::Authentication(..) => StatusCode::UNAUTHORIZED, AuthorizationError::Authentication(..) => StatusCode::UNAUTHORIZED,
AuthorizationError::Url => StatusCode::BAD_REQUEST,
} }
} }
@@ -65,7 +82,8 @@ impl actix_web::ResponseError for AuthorizationError {
AuthorizationError::Decoding(..) => "decoding_error", AuthorizationError::Decoding(..) => "decoding_error",
AuthorizationError::Authentication(..) => { AuthorizationError::Authentication(..) => {
"authentication_error" "authentication_error"
} },
AuthorizationError::Url => "url_error",
}, },
description: &self.to_string(), description: &self.to_string(),
}) })
@@ -96,6 +114,16 @@ pub async fn init(
Query(info): Query<AuthorizationInit>, Query(info): Query<AuthorizationInit>,
client: Data<PgPool>, client: Data<PgPool>,
) -> Result<HttpResponse, AuthorizationError> { ) -> Result<HttpResponse, AuthorizationError> {
let url = url::Url::parse(&info.url).map_err(|_| AuthorizationError::Url)?;
let allowed_callback_urls = parse_strings_from_var("ALLOWED_CALLBACK_URLS")
.unwrap_or_default();
let domain = url.domain().ok_or(AuthorizationError::Url)?;
if !allowed_callback_urls.iter().any(|x| domain.ends_with(x)) {
return Err(AuthorizationError::Url);
}
let mut transaction = client.begin().await?; let mut transaction = client.begin().await?;
let state = generate_state_id(&mut transaction).await?; let state = generate_state_id(&mut transaction).await?;
@@ -136,7 +164,7 @@ pub async fn auth_callback(
let result_option = sqlx::query!( let result_option = sqlx::query!(
" "
SELECT url,expires FROM states SELECT url, expires FROM states
WHERE id = $1 WHERE id = $1
", ",
state_id as i64 state_id as i64
@@ -145,13 +173,11 @@ pub async fn auth_callback(
.await?; .await?;
if let Some(result) = result_option { if let Some(result) = result_option {
// let now = OffsetDateTime::now_utc(); let duration = result.expires - OffsetDateTime::now_utc();
// TODO: redo this condition later..
// let duration = now - result.expires; if duration.whole_seconds() < 0 {
// return Err(AuthorizationError::InvalidCredentials);
// if duration.whole_seconds() < 0 { }
// return Err(AuthorizationError::InvalidCredentials);
// }
sqlx::query!( sqlx::query!(
" "

View File

@@ -385,18 +385,29 @@ pub async fn transfer_ownership(
let id = info.into_inner().0; let id = info.into_inner().0;
let current_user = get_user_from_headers(req.headers(), &**pool).await?; let current_user = get_user_from_headers(req.headers(), &**pool).await?;
let member = TeamMember::get_from_user_id(
id.into(), if !current_user.role.is_mod() {
current_user.id.into(), let member = TeamMember::get_from_user_id(
&**pool, id.into(),
) current_user.id.into(),
.await? &**pool,
.ok_or_else(|| {
ApiError::CustomAuthentication(
"You don't have permission to edit members of this team"
.to_string(),
) )
})?; .await?
.ok_or_else(|| {
ApiError::CustomAuthentication(
"You don't have permission to edit members of this team"
.to_string(),
)
})?;
if member.role != crate::models::teams::OWNER_ROLE {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit the ownership of this team"
.to_string(),
));
}
}
let new_member = TeamMember::get_from_user_id( let new_member = TeamMember::get_from_user_id(
id.into(), id.into(),
new_owner.user_id.into(), new_owner.user_id.into(),
@@ -409,13 +420,6 @@ pub async fn transfer_ownership(
) )
})?; })?;
if member.role != crate::models::teams::OWNER_ROLE {
return Err(ApiError::CustomAuthentication(
"You don't have permission to edit the ownership of this team"
.to_string(),
));
}
if !new_member.accepted { if !new_member.accepted {
return Err(ApiError::InvalidInput( return Err(ApiError::InvalidInput(
"You can only transfer ownership to members who are currently in your team".to_string(), "You can only transfer ownership to members who are currently in your team".to_string(),

View File

@@ -35,7 +35,7 @@ pub struct InitialVersionData {
regex = "crate::util::validate::RE_URL_SAFE" regex = "crate::util::validate::RE_URL_SAFE"
)] )]
pub version_number: String, pub version_number: String,
#[validate(length(min = 3, max = 256))] #[validate(length(min = 1, max = 256))]
#[serde(alias = "name")] #[serde(alias = "name")]
pub version_title: String, pub version_title: String,
#[validate(length(max = 65536))] #[validate(length(max = 65536))]
@@ -639,11 +639,11 @@ pub async fn upload_file(
field: &mut Field, field: &mut Field,
file_host: &dyn FileHost, file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>, uploaded_files: &mut Vec<UploadedFile>,
version_files: &mut Vec<models::version_item::VersionFileBuilder>, version_files: &mut Vec<VersionFileBuilder>,
dependencies: &mut Vec<models::version_item::DependencyBuilder>, dependencies: &mut Vec<DependencyBuilder>,
cdn_url: &str, cdn_url: &str,
content_disposition: &actix_web::http::header::ContentDisposition, content_disposition: &actix_web::http::header::ContentDisposition,
project_id: crate::models::ids::ProjectId, project_id: ProjectId,
version_number: &str, version_number: &str,
project_type: &str, project_type: &str,
loaders: Vec<Loader>, loaders: Vec<Loader>,

View File

@@ -303,7 +303,7 @@ pub async fn get_versions_from_hashes(
let hashes_parsed: Vec<Vec<u8>> = file_data let hashes_parsed: Vec<Vec<u8>> = file_data
.hashes .hashes
.iter() .iter()
.map(|x| x.as_bytes().to_vec()) .map(|x| x.to_lowercase().as_bytes().to_vec())
.collect(); .collect();
let result = sqlx::query!( let result = sqlx::query!(
@@ -360,7 +360,7 @@ pub async fn download_files(
let hashes_parsed: Vec<Vec<u8>> = file_data let hashes_parsed: Vec<Vec<u8>> = file_data
.hashes .hashes
.iter() .iter()
.map(|x| x.as_bytes().to_vec()) .map(|x| x.to_lowercase().as_bytes().to_vec())
.collect(); .collect();
let mut transaction = pool.begin().await?; let mut transaction = pool.begin().await?;
@@ -411,7 +411,7 @@ pub async fn update_files(
let hashes_parsed: Vec<Vec<u8>> = update_data let hashes_parsed: Vec<Vec<u8>> = update_data
.hashes .hashes
.iter() .iter()
.map(|x| x.as_bytes().to_vec()) .map(|x| x.to_lowercase().as_bytes().to_vec())
.collect(); .collect();
let mut transaction = pool.begin().await?; let mut transaction = pool.begin().await?;

View File

@@ -20,7 +20,7 @@ impl super::Validator for PackValidator {
} }
fn get_supported_loaders(&self) -> &[&str] { fn get_supported_loaders(&self) -> &[&str] {
&["forge", "fabric"] &["forge", "fabric", "quilt"]
} }
fn get_supported_game_versions(&self) -> SupportedGameVersions { fn get_supported_game_versions(&self) -> SupportedGameVersions {