1
0

Merge pull request #63 from modrinth/ghauth

GitHub Authentication
This commit is contained in:
Geometrically
2020-09-29 11:27:56 -07:00
committed by GitHub
31 changed files with 1391 additions and 35 deletions

3
.env
View File

@@ -20,3 +20,6 @@ MAX_CURSEFORGE_ID=450000
LOCAL_INDEX_INTERVAL=3600
# 12 hours
EXTERNAL_INDEX_INTERVAL=43200
GITHUB_CLIENT_ID=3acffb2e808d16d4b226
GITHUB_CLIENT_SECRET=none

3
.idea/sqldialects.xml generated
View File

@@ -2,5 +2,8 @@
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/migrations/20200716160921_init.sql" dialect="GenericSQL" />
<file url="file://$PROJECT_DIR$/migrations/20200812183213_unique-loaders.sql" dialect="GenericSQL" />
<file url="file://$PROJECT_DIR$/migrations/20200928033759_edit-states.sql" dialect="PostgreSQL" />
<file url="PROJECT" dialect="PostgreSQL" />
</component>
</project>

View File

@@ -0,0 +1,4 @@
CREATE TABLE states (
id bigint PRIMARY KEY,
url varchar(500)
);

View File

@@ -0,0 +1,4 @@
-- Add migration script here
ALTER TABLE states
ADD COLUMN expires timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP + interval '1 hour';

View File

@@ -0,0 +1,3 @@
-- Add migration script here
ALTER TABLE states
ALTER COLUMN url SET NOT NULL;

View File

@@ -0,0 +1,8 @@
ALTER TABLE users
ADD COLUMN github_id bigint NOT NULL default 0,
ADD COLUMN username varchar(255) NOT NULL default 'username',
ADD COLUMN name varchar(255) NOT NULL default 'John Doe',
ADD COLUMN email varchar(255) NULL default 'johndoe@modrinth.com',
ADD COLUMN avatar_url varchar(500) NOT NULL default '...',
ADD COLUMN bio varchar(160) NOT NULL default 'I make mods!',
ADD COLUMN created timestamptz default CURRENT_TIMESTAMP NOT NULL

View File

@@ -0,0 +1,3 @@
-- Add migration script here
ALTER TABLE users
ADD COLUMN role varchar(50) NOT NULL default 'developer'

View File

@@ -0,0 +1,3 @@
-- Add migration script here
ALTER TABLE versions
ADD COLUMN author_id bigint REFERENCES users NOT NULL default 0

View File

@@ -1,5 +1,86 @@
{
"db": "PostgreSQL",
"03209c5bda2d704e688439919a7b3903db6ad7caebf7ddafb3ea52d312d47bfb": {
"query": "\n INSERT INTO users (\n id, github_id, username, name, email,\n avatar_url, bio, created\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8\n )\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Varchar",
"Varchar",
"Varchar",
"Varchar",
"Varchar",
"Timestamptz"
]
},
"nullable": []
}
},
"1016a0bf55e9474357ac5ef725605ac337e82e1a2b93726ae795ec48f0d696dd": {
"query": "\n SELECT v.mod_id, v.author_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n release_channels.channel\n FROM versions v\n INNER JOIN release_channels ON v.release_channel = release_channels.id\n WHERE v.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "author_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "channel",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
true,
false,
false,
false
]
}
},
"1524c0462be70077736ac70fcd037fbf75651456b692e2ce40fa2e3fc8123984": {
"query": "\n SELECT hashes.algorithm, hashes.hash FROM hashes\n WHERE hashes.file_id = $1\n ",
"describe": {
@@ -62,6 +143,26 @@
]
}
},
"1c7b0eb4341af5a7942e52f632cf582561f10b4b6a41a082fb8a60f04ac17c6e": {
"query": "SELECT EXISTS(SELECT 1 FROM states WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"1ffce9b2d5c9fa6c8b9abce4bad9f9419c44ad6367b7463b979c91b9b5b4fea1": {
"query": "SELECT EXISTS(SELECT 1 FROM versions WHERE id=$1)",
"describe": {
@@ -102,6 +203,26 @@
]
}
},
"275939f581a82197b45b0d56248926063f09ef86754498a720c5568cdb1f5ae0": {
"query": "SELECT user_id FROM team_members WHERE team_id=$1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"29e657d26f0fb24a766f5b5eb6a94d01d1616884d8ca10e91536e974d5b585a6": {
"query": "\n INSERT INTO loaders_versions (loader_id, version_id)\n VALUES ($1, $2)\n ",
"describe": {
@@ -145,6 +266,62 @@
"nullable": []
}
},
"351af9c9c1c05556bdd8c373f406a66c9358c51dc4222f8abc5095fbf2458471": {
"query": "\n SELECT u.id, u.name, u.email,\n u.avatar_url, u.username, u.bio,\n u.created\n FROM users u\n WHERE u.github_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "avatar_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "created",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false
]
}
},
"35272854c6aeb743218e73ccf6f34427ab72f25492dfa752f87a50e3da7204c5": {
"query": "\n SELECT v.mod_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n release_channels.channel\n FROM versions v\n INNER JOIN release_channels ON v.release_channel = release_channels.id\n WHERE v.id = $1\n ",
"describe": {
@@ -288,6 +465,19 @@
]
}
},
"4f307a8851b0cab7870798ba017955c8ebaba7444791dd65ffebcbac32d3585d": {
"query": "\n INSERT INTO states (id, url)\n VALUES ($1, $2)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Varchar"
]
},
"nullable": []
}
},
"560c3ba57c965c3ebdbe393b062da8a30a8a7116a9bace2aa7de2e8431fe0bc7": {
"query": "\n INSERT INTO mods_categories (joining_mod_id, joining_category_id)\n VALUES ($1, $2)\n ",
"describe": {
@@ -409,6 +599,68 @@
]
}
},
"6562c876826ad3091a14eb50fa1f961a971c1d1bb158fc3dcb55d469a73facc6": {
"query": "\n SELECT v.mod_id, v.author_id, v.name, v.version_number,\n v.changelog_url, v.date_published, v.downloads,\n v.release_channel\n FROM versions v\n WHERE v.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "author_id",
"type_info": "Int8"
},
{
"ordinal": 2,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "changelog_url",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "date_published",
"type_info": "Timestamptz"
},
{
"ordinal": 6,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 7,
"name": "release_channel",
"type_info": "Int4"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
true,
false,
false,
false
]
}
},
"6b28cb8b54ef57c9b6f03607611f688455f0e2b27eb5deda5a8cbc5b506b4602": {
"query": "\n DELETE FROM mods\n WHERE id = $1\n ",
"describe": {
@@ -421,6 +673,38 @@
"nullable": []
}
},
"71db1bc306ff6da3a92544e1585aa11c5627b50d95b15e794b2fa5dc838ea1a3": {
"query": "\n SELECT mod_id, version_number, author_id\n FROM versions\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "mod_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "version_number",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "author_id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
false
]
}
},
"72d6b5f2f11d88981db82c7247c9e7e5ebfd8d34985a1a8209d6628e66490f37": {
"query": "\n SELECT id FROM categories\n WHERE category = $1\n ",
"describe": {
@@ -453,6 +737,26 @@
"nullable": []
}
},
"73d9b1e00609919f3adbe5f4ca9e41304bffb1cd4397a85a9911f2260e9a98f5": {
"query": "\n INSERT INTO versions (\n id, mod_id, author_id, name, version_number,\n changelog_url, date_published,\n downloads, release_channel\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7,\n $8, $9\n )\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8",
"Int8",
"Int8",
"Varchar",
"Varchar",
"Varchar",
"Timestamptz",
"Int4",
"Int4"
]
},
"nullable": []
}
},
"89fbff6249b248d3e150879aaea1662140bcb10d5104992c784285322c8b3b94": {
"query": "\n SELECT version FROM game_versions\n ",
"describe": {
@@ -557,6 +861,18 @@
]
}
},
"a39ce28b656032f862b205cffa393a76b989f4803654a615477a94fda5f57354": {
"query": "\n DELETE FROM states\n WHERE id = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": []
}
},
"a55925860b4a46af864a8c38f942d7cdd85c00638e761b9696de0bf47335173b": {
"query": "\n SELECT mod_id, version_number\n FROM versions\n WHERE id = $1\n ",
"describe": {
@@ -627,6 +943,68 @@
"nullable": []
}
},
"a94eb4862ba30ca21f15198d9b7b9fd80ce01d45457e0b4d68270b5e3f9be8c6": {
"query": "\n SELECT u.github_id, u.name, u.email,\n u.avatar_url, u.username, u.bio,\n u.created, u.role\n FROM users u\n WHERE u.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "github_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "avatar_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "role",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false,
false
]
}
},
"b0e3d1c70b87bb54819e3fac04b684a9b857aeedb4dcb7cb400c2af0dbb12922": {
"query": "\n DELETE FROM teams\n WHERE id = $1\n ",
"describe": {
@@ -750,6 +1128,26 @@
]
}
},
"bf7f721664f5e0ed41adc41b5483037256635f28ff6c4e5d3cbcec4387f9c8ef": {
"query": "SELECT EXISTS(SELECT 1 FROM users WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"c0899dcff4d7bc1ba3e953e5099210316bff2f98e6ab77ba84bc612eac4bce0a": {
"query": "\n SELECT gv.version FROM versions\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id=versions.id\n INNER JOIN game_versions gv ON gvv.game_version_id=gv.id\n WHERE versions.mod_id = $1\n ",
"describe": {
@@ -895,6 +1293,26 @@
]
}
},
"d0172d12dce3d8ddc888893ec1cdd93ad232685e80f706e70dea22c85d96df63": {
"query": "SELECT team_id FROM mods WHERE id=$1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "team_id",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"d12bc07adb4dc8147d0ddccd72a4f23ed38cd31d7db3d36ebbe2c9b627130f0b": {
"query": "\n DELETE FROM team_members\n WHERE team_id = $1\n ",
"describe": {
@@ -1051,6 +1469,68 @@
]
}
},
"ea877d50ba461eae97ba3a35c3da71e7cdb7a92de1bb877d6b5dd766aca4e4ef": {
"query": "\n SELECT u.id, u.name, u.email,\n u.avatar_url, u.username, u.bio,\n u.created, u.role\n FROM users u\n WHERE u.github_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "avatar_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "created",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "role",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false,
false
]
}
},
"eaea3f606f926d7e1fc51a9798ce3c6448f0f02d55ce48bb38e84dc1bdced740": {
"query": "\n INSERT INTO versions (\n id, mod_id, name, version_number,\n changelog_url, date_published,\n downloads, release_channel\n )\n VALUES (\n $1, $2, $3, $4,\n $5, $6,\n $7, $8\n )\n ",
"describe": {
@@ -1184,6 +1664,88 @@
]
}
},
"f772d6c3d287da99e00390517ea56cf3190658781da471bef58230e82b892b8c": {
"query": "\n SELECT u.github_id, u.name, u.email,\n u.avatar_url, u.username, u.bio,\n u.created\n FROM users u\n WHERE u.id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "github_id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "email",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "avatar_url",
"type_info": "Varchar"
},
{
"ordinal": 4,
"name": "username",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "created",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false
]
}
},
"f7bea04e8e279e27a24de1bdf3c413daa8677994df5131494b28691ed6611efc": {
"query": "\n SELECT url,expires FROM states\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "url",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "expires",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false,
false
]
}
},
"f80ca292323952d10dbd26d3453ced5c12bdd1b71dcd3cb3ade4c7d4dc3590f6": {
"query": "\n SELECT gv.version FROM game_versions_versions gvv\n INNER JOIN game_versions gv ON gvv.game_version_id=gv.id\n WHERE gvv.joining_version_id = $1\n ",
"describe": {

117
src/auth/mod.rs Normal file
View File

@@ -0,0 +1,117 @@
use crate::database::models;
use crate::models::users::{Role, User, UserId};
use actix_web::http::HeaderMap;
use serde::{Deserialize, Serialize};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum AuthenticationError {
#[error("An unknown database error occurred")]
SqlxDatabaseError(#[from] sqlx::Error),
#[error("Database Error: {0}")]
DatabaseError(#[from] crate::database::models::DatabaseError),
#[error("Error while parsing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
#[error("Error while communicating to GitHub OAuth2: {0}")]
GithubError(#[from] reqwest::Error),
#[error("Invalid Authentication Credentials")]
InvalidCredentialsError,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GitHubUser {
pub login: String,
pub id: u64,
pub avatar_url: String,
pub name: String,
pub email: Option<String>,
pub bio: String,
}
pub async fn get_github_user_from_token(
access_token: &str,
) -> Result<GitHubUser, AuthenticationError> {
Ok(reqwest::Client::new()
.get("https://api.github.com/user")
.header(reqwest::header::USER_AGENT, "Modrinth")
.header(
reqwest::header::AUTHORIZATION,
format!("token {}", access_token),
)
.send()
.await?
.json()
.await?)
}
pub async fn get_user_from_token<'a, 'b, E>(
access_token: &str,
executor: E,
) -> Result<User, AuthenticationError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let github_user = get_github_user_from_token(access_token).await?;
let res = models::User::get_from_github_id(github_user.id, executor).await?;
match res {
Some(result) => Ok(User {
id: UserId::from(result.id),
github_id: result.github_id as u64,
username: result.username,
name: result.name,
email: result.email,
avatar_url: result.avatar_url,
bio: result.bio,
created: result.created,
role: Role::from_string(&*result.role),
}),
None => Err(AuthenticationError::InvalidCredentialsError),
}
}
pub async fn get_user_from_headers<'a, 'b, E>(
headers: &HeaderMap,
executor: E,
) -> Result<User, AuthenticationError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let token = headers
.get("Authentication")
.ok_or(AuthenticationError::InvalidCredentialsError)?
.to_str()
.map_err(|_| AuthenticationError::InvalidCredentialsError)?;
Ok(get_user_from_token(token, executor).await?)
}
pub async fn check_is_moderator_from_headers<'a, 'b, E>(
headers: &HeaderMap,
executor: E,
) -> Result<User, AuthenticationError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let user = get_user_from_headers(headers, executor).await?;
match user.role {
Role::Moderator | Role::Admin => Ok(user),
_ => Err(AuthenticationError::InvalidCredentialsError),
}
}
pub async fn check_is_admin_from_headers<'a, 'b, E>(
headers: &HeaderMap,
executor: E,
) -> Result<User, AuthenticationError>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let user = get_user_from_headers(headers, executor).await?;
match user.role {
Role::Admin => Ok(user),
_ => Err(AuthenticationError::InvalidCredentialsError),
}
}

View File

@@ -73,6 +73,20 @@ generate_ids!(
"SELECT EXISTS(SELECT 1 FROM team_members WHERE id=$1)",
TeamMemberId
);
generate_ids!(
pub generate_state_id,
StateId,
8,
"SELECT EXISTS(SELECT 1 FROM states WHERE id=$1)",
StateId
);
generate_ids!(
pub generate_user_id,
UserId,
8,
"SELECT EXISTS(SELECT 1 FROM users WHERE id=$1)",
UserId
);
#[derive(Copy, Clone, Debug, Type)]
#[sqlx(transparent)]
@@ -109,6 +123,10 @@ pub struct CategoryId(pub i32);
#[sqlx(transparent)]
pub struct FileId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
#[sqlx(transparent)]
pub struct StateId(pub i64);
use crate::models::ids;
impl From<ids::ModId> for ModId {

View File

@@ -7,12 +7,14 @@ pub mod categories;
pub mod ids;
pub mod mod_item;
pub mod team_item;
pub mod user_item;
pub mod version_item;
pub use ids::*;
pub use mod_item::Mod;
pub use team_item::Team;
pub use team_item::TeamMember;
pub use user_item::User;
pub use version_item::FileHash;
pub use version_item::Version;
pub use version_item::VersionFile;

View File

@@ -0,0 +1,115 @@
use super::ids::UserId;
pub struct User {
pub id: UserId,
pub github_id: i64,
pub username: String,
pub name: String,
pub email: Option<String>,
pub avatar_url: String,
pub bio: String,
pub created: chrono::DateTime<chrono::Utc>,
pub role: String,
}
impl User {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
sqlx::query!(
"
INSERT INTO users (
id, github_id, username, name, email,
avatar_url, bio, created
)
VALUES (
$1, $2, $3, $4, $5,
$6, $7, $8
)
",
self.id as UserId,
self.github_id,
&self.username,
&self.name,
self.email.as_ref(),
&self.avatar_url,
&self.bio,
self.created,
)
.execute(&mut *transaction)
.await?;
Ok(())
}
pub async fn get<'a, 'b, E>(id: UserId, executor: E) -> Result<Option<Self>, sqlx::error::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let result = sqlx::query!(
"
SELECT u.github_id, u.name, u.email,
u.avatar_url, u.username, u.bio,
u.created, u.role
FROM users u
WHERE u.id = $1
",
id as UserId,
)
.fetch_optional(executor)
.await?;
if let Some(row) = result {
Ok(Some(User {
id,
github_id: row.github_id,
name: row.name,
email: row.email,
avatar_url: row.avatar_url,
username: row.username,
bio: row.bio,
created: row.created,
role: row.role,
}))
} else {
Ok(None)
}
}
pub async fn get_from_github_id<'a, 'b, E>(
github_id: u64,
executor: E,
) -> Result<Option<Self>, sqlx::error::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
let result = sqlx::query!(
"
SELECT u.id, u.name, u.email,
u.avatar_url, u.username, u.bio,
u.created, u.role
FROM users u
WHERE u.github_id = $1
",
github_id as i64,
)
.fetch_optional(executor)
.await?;
if let Some(row) = result {
Ok(Some(User {
id: UserId(row.id),
github_id: github_id as i64,
name: row.name,
email: row.email,
avatar_url: row.avatar_url,
username: row.username,
bio: row.bio,
created: row.created,
role: row.role,
}))
} else {
Ok(None)
}
}
}

View File

@@ -5,6 +5,7 @@ use super::DatabaseError;
pub struct VersionBuilder {
pub version_id: VersionId,
pub mod_id: ModId,
pub author_id: UserId,
pub name: String,
pub version_number: String,
pub changelog_url: Option<String>,
@@ -73,6 +74,7 @@ impl VersionBuilder {
let version = Version {
id: self.version_id,
mod_id: self.mod_id,
author_id: self.author_id,
name: self.name,
version_number: self.version_number,
changelog_url: self.changelog_url,
@@ -133,6 +135,7 @@ impl VersionBuilder {
pub struct Version {
pub id: VersionId,
pub mod_id: ModId,
pub author_id: UserId,
pub name: String,
pub version_number: String,
pub changelog_url: Option<String>,
@@ -149,18 +152,19 @@ impl Version {
sqlx::query!(
"
INSERT INTO versions (
id, mod_id, name, version_number,
id, mod_id, author_id, name, version_number,
changelog_url, date_published,
downloads, release_channel
)
VALUES (
$1, $2, $3, $4,
$5, $6,
$7, $8
$1, $2, $3, $4, $5,
$6, $7,
$8, $9
)
",
self.id as VersionId,
self.mod_id as ModId,
self.author_id as UserId,
&self.name,
&self.version_number,
self.changelog_url.as_ref(),
@@ -339,7 +343,7 @@ impl Version {
{
let result = sqlx::query!(
"
SELECT v.mod_id, v.name, v.version_number,
SELECT v.mod_id, v.author_id, v.name, v.version_number,
v.changelog_url, v.date_published, v.downloads,
v.release_channel
FROM versions v
@@ -354,6 +358,7 @@ impl Version {
Ok(Some(Version {
id,
mod_id: ModId(row.mod_id),
author_id: UserId(row.author_id),
name: row.name,
version_number: row.version_number,
changelog_url: row.changelog_url,
@@ -375,7 +380,7 @@ impl Version {
{
let result = sqlx::query!(
"
SELECT v.mod_id, v.name, v.version_number,
SELECT v.mod_id, v.author_id, v.name, v.version_number,
v.changelog_url, v.date_published, v.downloads,
release_channels.channel
FROM versions v
@@ -455,6 +460,7 @@ impl Version {
Ok(Some(QueryVersion {
id,
mod_id: ModId(row.mod_id),
author_id: UserId(row.author_id),
name: row.name,
version_number: row.version_number,
changelog_url: row.changelog_url,
@@ -493,6 +499,7 @@ pub struct FileHash {
pub struct QueryVersion {
pub id: VersionId,
pub mod_id: ModId,
pub author_id: UserId,
pub name: String,
pub version_number: String,
pub changelog_url: Option<String>,

View File

@@ -1,10 +1,10 @@
use log::{debug, info};
use log::info;
use sqlx::migrate::{Migrate, MigrateDatabase, Migrator};
use sqlx::postgres::{PgPool, PgPoolOptions};
use sqlx::{Connection, PgConnection, Postgres};
use std::path::Path;
const MIGRATION_FOLDER: &'static str = "migrations";
const MIGRATION_FOLDER: &str = "migrations";
pub async fn connect() -> Result<PgPool, sqlx::Error> {
info!("Initializing database connection");

View File

@@ -8,6 +8,7 @@ use search::indexing::index_mods;
use search::indexing::IndexingSettings;
use std::sync::Arc;
mod auth;
mod database;
mod file_hosting;
mod models;
@@ -193,8 +194,10 @@ async fn main() -> std::io::Result<()> {
.service(routes::index_get)
.service(
web::scope("/api/v1/")
.configure(routes::auth_config)
.configure(routes::tags_config)
.configure(routes::mods_config),
.configure(routes::mods_config)
.configure(routes::users_config),
)
.default_service(web::get().to(routes::not_found))
})
@@ -246,4 +249,7 @@ fn check_env_vars() {
}
check_var::<usize>("LOCAL_INDEX_INTERVAL");
check_var::<String>("GITHUB_CLIENT_ID");
check_var::<String>("GITHUB_CLIENT_SECRET");
}

View File

@@ -1,7 +1,8 @@
use thiserror::Error;
pub use super::mods::{ModId, VersionId};
pub use super::teams::{TeamId, UserId};
pub use super::teams::TeamId;
pub use super::users::UserId;
/// Generates a random 64 bit integer that is exactly `n` characters
/// long when encoded as base62.
@@ -169,7 +170,7 @@ pub mod base62_impl {
output
}
fn parse_base62(string: &str) -> Result<u64, DecodingError> {
pub fn parse_base62(string: &str) -> Result<u64, DecodingError> {
let mut num: u64 = 0;
for c in string.chars() {
let next_digit;

View File

@@ -2,3 +2,4 @@ pub mod error;
pub mod ids;
pub mod mods;
pub mod teams;
pub mod users;

View File

@@ -1,5 +1,6 @@
use super::ids::Base62Id;
use super::teams::TeamId;
use super::users::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
@@ -54,6 +55,8 @@ pub struct Version {
pub id: VersionId,
/// The ID of the mod this version is for.
pub mod_id: ModId,
/// The ID of the author who published this version
pub author_id: UserId,
/// The name of this version
pub name: String,

View File

@@ -1,13 +1,8 @@
use super::ids::Base62Id;
use crate::models::users::UserId;
use serde::{Deserialize, Serialize};
//TODO Implement Item for teams
/// The ID of a specific user, encoded as base62 for usage in the API
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct UserId(pub u64);
/// The ID of a team
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]

47
src/models/users.rs Normal file
View File

@@ -0,0 +1,47 @@
use super::ids::Base62Id;
use serde::{Deserialize, Serialize};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
#[serde(into = "Base62Id")]
pub struct UserId(pub u64);
#[derive(Serialize, Deserialize)]
pub struct User {
pub id: UserId,
pub github_id: u64,
pub username: String,
pub name: String,
pub email: Option<String>,
pub avatar_url: String,
pub bio: String,
pub created: chrono::DateTime<chrono::Utc>,
pub role: Role,
}
#[derive(Serialize, Deserialize)]
pub enum Role {
Developer,
Moderator,
Admin,
}
impl ToString for Role {
fn to_string(&self) -> String {
match self {
Role::Developer => String::from("developer"),
Role::Moderator => String::from("moderator"),
Role::Admin => String::from("admin"),
}
}
}
impl Role {
pub fn from_string(string: &str) -> Role {
match string {
"admin" => Role::Admin,
"moderator" => Role::Moderator,
_ => Role::Developer,
}
}
}

206
src/routes/auth.rs Normal file
View File

@@ -0,0 +1,206 @@
use crate::auth::get_github_user_from_token;
use crate::database::models::{generate_state_id, User};
use crate::models::error::ApiError;
use crate::models::ids::base62_impl::{parse_base62, to_base62};
use crate::models::ids::DecodingError;
use crate::models::users::Role;
use actix_web::http::StatusCode;
use actix_web::web::{scope, Data, Query, ServiceConfig};
use actix_web::{get, HttpResponse};
use chrono::Utc;
use log::info;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use thiserror::Error;
pub fn config(cfg: &mut ServiceConfig) {
cfg.service(scope("/auth/").service(auth_callback).service(init));
}
#[derive(Error, Debug)]
pub enum AuthorizationError {
#[error("Environment Error")]
EnvError(#[from] dotenv::Error),
#[error("An unknown database error occured")]
SqlxDatabaseError(#[from] sqlx::Error),
#[error("Database Error: {0}")]
DatabaseError(#[from] crate::database::models::DatabaseError),
#[error("Error while parsing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
#[error("Error while communicating to GitHub OAuth2: {0}")]
GithubError(#[from] reqwest::Error),
#[error("Invalid Authentication credentials")]
InvalidCredentialsError,
#[error("Authentication Error: {0}")]
AuthenticationError(#[from] crate::auth::AuthenticationError),
#[error("Error while decoding Base62")]
DecodingError(#[from] DecodingError),
}
impl actix_web::ResponseError for AuthorizationError {
fn status_code(&self) -> StatusCode {
match self {
AuthorizationError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthorizationError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthorizationError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthorizationError::SerDeError(..) => StatusCode::BAD_REQUEST,
AuthorizationError::GithubError(..) => StatusCode::FAILED_DEPENDENCY,
AuthorizationError::InvalidCredentialsError => StatusCode::UNAUTHORIZED,
AuthorizationError::DecodingError(..) => StatusCode::BAD_REQUEST,
AuthorizationError::AuthenticationError(..) => StatusCode::UNAUTHORIZED,
}
}
fn error_response(&self) -> HttpResponse {
HttpResponse::build(self.status_code()).json(ApiError {
error: match self {
AuthorizationError::EnvError(..) => "environment_error",
AuthorizationError::SqlxDatabaseError(..) => "database_error",
AuthorizationError::DatabaseError(..) => "database_error",
AuthorizationError::SerDeError(..) => "invalid_input",
AuthorizationError::GithubError(..) => "github_error",
AuthorizationError::InvalidCredentialsError => "invalid_credentials",
AuthorizationError::DecodingError(..) => "decoding_error",
AuthorizationError::AuthenticationError(..) => "authentication_error",
},
description: &self.to_string(),
})
}
}
#[derive(Serialize, Deserialize)]
pub struct AuthorizationInit {
pub url: String,
}
#[derive(Serialize, Deserialize)]
pub struct Authorization {
pub code: String,
pub state: String,
}
#[derive(Serialize, Deserialize)]
pub struct AccessToken {
pub access_token: String,
pub scope: String,
pub token_type: String,
}
//http://localhost:8000/api/v1/auth/init?url=https%3A%2F%2Fmodrinth.com%2Fmods
#[get("init")]
pub async fn init(
Query(info): Query<AuthorizationInit>,
client: Data<PgPool>,
) -> Result<HttpResponse, AuthorizationError> {
let mut transaction = client.begin().await?;
let state = generate_state_id(&mut transaction).await?;
sqlx::query!(
"
INSERT INTO states (id, url)
VALUES ($1, $2)
",
state.0,
info.url
)
.execute(&mut *transaction)
.await?;
transaction.commit().await?;
let client_id = dotenv::var("GITHUB_CLIENT_ID")?;
let url = format!(
"https://github.com/login/oauth/authorize?client_id={}&state={}&scope={}",
client_id,
to_base62(state.0 as u64),
"%20repo%20read%3Aorg%20read%3Auser%20user%3Aemail"
);
Ok(HttpResponse::TemporaryRedirect()
.header("Location", &*url)
.json(AuthorizationInit { url }))
}
#[get("callback")]
pub async fn auth_callback(
Query(info): Query<Authorization>,
client: Data<PgPool>,
) -> Result<HttpResponse, AuthorizationError> {
let mut transaction = client.begin().await?;
let state_id = parse_base62(&*info.state)?;
let result = sqlx::query!(
"
SELECT url,expires FROM states
WHERE id = $1
",
state_id as i64
)
.fetch_one(&mut *transaction)
.await?;
let now = Utc::now();
let duration = result.expires.signed_duration_since(now);
if duration.num_seconds() < 0 {
return Err(AuthorizationError::InvalidCredentialsError);
}
sqlx::query!(
"
DELETE FROM states
WHERE id = $1
",
state_id as i64
)
.execute(&mut *transaction)
.await?;
let client_id = dotenv::var("GITHUB_CLIENT_ID")?;
let client_secret = dotenv::var("GITHUB_CLIENT_SECRET")?;
let url = format!(
"https://github.com/login/oauth/access_token?client_id={}&client_secret={}&code={}",
client_id, client_secret, info.code
);
let token: AccessToken = reqwest::Client::new()
.post(&url)
.header(reqwest::header::ACCEPT, "application/json")
.send()
.await?
.json()
.await?;
let user = get_github_user_from_token(&*token.access_token).await?;
let user_result = User::get_from_github_id(user.id, &mut *transaction).await?;
match user_result {
Some(x) => info!("{:?}", x.id),
None => {
let user_id = crate::database::models::generate_user_id(&mut transaction).await?;
User {
id: user_id,
github_id: user.id as i64,
username: user.login,
name: user.name,
email: user.email,
avatar_url: user.avatar_url,
bio: user.bio,
created: Utc::now(),
role: Role::Developer.to_string(),
}
.insert(&mut transaction)
.await?;
}
}
transaction.commit().await?;
let redirect_url = format!("{}?code={}", result.url, token.access_token);
Ok(HttpResponse::TemporaryRedirect()
.header("Location", &*redirect_url)
.json(AuthorizationInit { url: redirect_url }))
}

View File

@@ -1,13 +1,16 @@
use actix_web::web;
mod auth;
mod index;
mod mod_creation;
mod mods;
mod not_found;
mod tags;
mod users;
mod version_creation;
mod versions;
pub use auth::config as auth_config;
pub use tags::config as tags_config;
pub use self::index::index_get;
@@ -38,16 +41,29 @@ pub fn versions_config(cfg: &mut web::ServiceConfig) {
);
}
pub fn users_config(cfg: &mut web::ServiceConfig) {
cfg.service(users::user_auth_get);
cfg.service(
web::scope("user")
.service(users::user_get)
.service(users::user_delete),
);
}
#[derive(thiserror::Error, Debug)]
pub enum ApiError {
#[error("Internal server error")]
DatabaseError(#[from] crate::database::models::DatabaseError),
#[error("Authentication Error")]
AuthenticationError,
}
impl actix_web::ResponseError for ApiError {
fn status_code(&self) -> actix_web::http::StatusCode {
match self {
ApiError::DatabaseError(..) => actix_web::http::StatusCode::INTERNAL_SERVER_ERROR,
ApiError::AuthenticationError => actix_web::http::StatusCode::UNAUTHORIZED,
}
}
@@ -56,6 +72,7 @@ impl actix_web::ResponseError for ApiError {
crate::models::error::ApiError {
error: match self {
ApiError::DatabaseError(..) => "database_error",
ApiError::AuthenticationError => "unauthorized",
},
description: &self.to_string(),
},

View File

@@ -1,14 +1,16 @@
use crate::auth::{get_user_from_headers, AuthenticationError};
use crate::database::models;
use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError;
use crate::models::mods::{ModId, VersionId, VersionType};
use crate::models::teams::TeamMember;
use crate::models::users::UserId;
use crate::routes::version_creation::InitialVersionData;
use crate::search::indexing::queue::CreationQueue;
use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
use actix_web::web::Data;
use actix_web::{post, HttpResponse};
use actix_web::{post, HttpRequest, HttpResponse};
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
@@ -42,6 +44,8 @@ pub enum CreateError {
InvalidLoader(String),
#[error("Invalid category: {0}")]
InvalidCategory(String),
#[error("Authentication Error: {0}")]
Unauthorized(#[from] AuthenticationError),
}
impl actix_web::ResponseError for CreateError {
@@ -59,6 +63,7 @@ impl actix_web::ResponseError for CreateError {
CreateError::InvalidGameVersion(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidLoader(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidCategory(..) => StatusCode::BAD_REQUEST,
CreateError::Unauthorized(..) => StatusCode::UNAUTHORIZED,
}
}
@@ -77,6 +82,7 @@ impl actix_web::ResponseError for CreateError {
CreateError::InvalidGameVersion(..) => "invalid_input",
CreateError::InvalidLoader(..) => "invalid_input",
CreateError::InvalidCategory(..) => "invalid_input",
CreateError::Unauthorized(..) => "unauthorized",
},
description: &self.to_string(),
})
@@ -126,6 +132,7 @@ pub async fn undo_uploads(
#[post("mod")]
pub async fn mod_create(
req: HttpRequest,
payload: Multipart,
client: Data<PgPool>,
file_host: Data<Arc<dyn FileHost + Send + Sync>>,
@@ -135,6 +142,7 @@ pub async fn mod_create(
let mut uploaded_files = Vec::new();
let result = mod_create_inner(
req,
payload,
&mut transaction,
&***file_host,
@@ -161,6 +169,7 @@ pub async fn mod_create(
}
async fn mod_create_inner(
req: HttpRequest,
mut payload: Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
@@ -170,6 +179,7 @@ async fn mod_create_inner(
let cdn_url = dotenv::var("CDN_URL")?;
let mod_id = models::generate_mod_id(transaction).await?.into();
let user = get_user_from_headers(req.headers(), &mut *transaction).await?;
let mut created_versions: Vec<models::version_item::VersionBuilder> = vec![];
@@ -287,6 +297,7 @@ async fn mod_create_inner(
let version = models::version_item::VersionBuilder {
version_id: version_id.into(),
mod_id: mod_id.into(),
author_id: user.id.into(),
name: version_data.version_title.clone(),
version_number: version_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
@@ -356,6 +367,16 @@ async fn mod_create_inner(
)));
};
let ids: Vec<UserId> = (&create_data.team_members)
.iter()
.map(|m| m.user_id)
.collect();
if !ids.contains(&user.id) {
return Err(CreateError::InvalidInput(String::from(
"Team members must include yourself!",
)));
}
let mut categories = Vec::with_capacity(create_data.categories.len());
for category in &create_data.categories {
let id = models::categories::Category::get_id(&category, &mut *transaction)
@@ -430,9 +451,9 @@ async fn mod_create_inner(
versions: versions_list,
page_url: mod_builder.body_url.clone(),
icon_url: mod_builder.icon_url.clone().unwrap(),
// TODO: Author/team info, latest version info
author: String::new(),
author_url: String::new(),
author: user.username,
author_url: format!("https://modrinth.com/user/{}", user.id),
// TODO: latest version info
latest_version: String::new(),
downloads: 0,
date_created: formatted.clone(),

View File

@@ -1,9 +1,10 @@
use super::ApiError;
use crate::auth::check_is_moderator_from_headers;
use crate::database;
use crate::models;
use crate::models::mods::SearchRequest;
use crate::search::{search_for_mod, SearchError};
use actix_web::{delete, get, web, HttpResponse};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use sqlx::PgPool;
#[get("mod")]
@@ -48,13 +49,23 @@ pub async fn mod_get(
}
}
// TODO: This really needs auth
// TODO: The mod remains in meilisearch's index until the index is deleted
#[delete("{id}")]
pub async fn mod_delete(
req: HttpRequest,
info: web::Path<(models::ids::ModId,)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let id = info.0;
let result = database::models::Mod::remove_full(id.into(), &**pool)
.await

View File

@@ -1,6 +1,7 @@
use super::ApiError;
use crate::auth::check_is_admin_from_headers;
use crate::database::models;
use actix_web::{delete, get, put, web, HttpResponse};
use actix_web::{delete, get, put, web, HttpRequest, HttpResponse};
use models::categories::{Category, GameVersion, Loader};
use sqlx::PgPool;
@@ -32,9 +33,20 @@ pub async fn category_list(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiE
// TODO: don't fail if category already exists
#[put("category/{name}")]
pub async fn category_create(
req: HttpRequest,
pool: web::Data<PgPool>,
category: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let name = category.into_inner().0;
let _id = Category::builder().name(&name)?.insert(&**pool).await?;
@@ -44,9 +56,20 @@ pub async fn category_create(
#[delete("category/{name}")]
pub async fn category_delete(
req: HttpRequest,
pool: web::Data<PgPool>,
category: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let name = category.into_inner().0;
let mut transaction = pool.begin().await.map_err(models::DatabaseError::from)?;
@@ -75,9 +98,20 @@ pub async fn loader_list(pool: web::Data<PgPool>) -> Result<HttpResponse, ApiErr
// TODO: don't fail if loader already exists
#[put("loader/{name}")]
pub async fn loader_create(
req: HttpRequest,
pool: web::Data<PgPool>,
loader: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let name = loader.into_inner().0;
let _id = Loader::builder().name(&name)?.insert(&**pool).await?;
@@ -87,9 +121,20 @@ pub async fn loader_create(
#[delete("loader/{name}")]
pub async fn loader_delete(
req: HttpRequest,
pool: web::Data<PgPool>,
loader: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let name = loader.into_inner().0;
let mut transaction = pool.begin().await.map_err(models::DatabaseError::from)?;
@@ -117,9 +162,20 @@ pub async fn game_version_list(pool: web::Data<PgPool>) -> Result<HttpResponse,
// remain idempotent
#[put("game_version/{name}")]
pub async fn game_version_create(
req: HttpRequest,
pool: web::Data<PgPool>,
game_version: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let name = game_version.into_inner().0;
let _id = GameVersion::builder()
@@ -132,9 +188,20 @@ pub async fn game_version_create(
#[delete("game_version/{name}")]
pub async fn game_version_delete(
req: HttpRequest,
pool: web::Data<PgPool>,
game_version: web::Path<(String,)>,
) -> Result<HttpResponse, ApiError> {
check_is_admin_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let name = game_version.into_inner().0;
let mut transaction = pool.begin().await.map_err(models::DatabaseError::from)?;

78
src/routes/users.rs Normal file
View File

@@ -0,0 +1,78 @@
use crate::auth::{check_is_moderator_from_headers, get_user_from_headers};
use crate::models::users::{Role, UserId};
use crate::routes::ApiError;
use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use sqlx::PgPool;
#[post("mod")]
pub async fn user_auth_get(
req: HttpRequest,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
Ok(HttpResponse::Ok().json(
get_user_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?,
))
}
#[get("{id}")]
pub async fn user_get(
info: web::Path<(UserId,)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
let id = info.0;
let user_data = crate::database::models::User::get(id.into(), &**pool)
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?;
if let Some(data) = user_data {
let response = crate::models::users::User {
id: data.id.into(),
github_id: data.github_id as u64,
username: data.username,
name: data.name,
email: None,
avatar_url: data.avatar_url,
bio: data.bio,
created: data.created,
role: Role::from_string(&*data.role),
};
Ok(HttpResponse::Ok().json(response))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}
// TODO: Make this actually do stuff
#[delete("{id}")]
pub async fn user_delete(
req: HttpRequest,
info: web::Path<(UserId,)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
let _id = info.0;
let result = Some(());
if result.is_some() {
Ok(HttpResponse::Ok().body(""))
} else {
Ok(HttpResponse::NotFound().body(""))
}
}

View File

@@ -1,3 +1,4 @@
use crate::auth::get_user_from_headers;
use crate::database::models;
use crate::database::models::version_item::{VersionBuilder, VersionFileBuilder};
use crate::file_hosting::FileHost;
@@ -7,7 +8,7 @@ use crate::models::mods::{
use crate::routes::mod_creation::{CreateError, UploadedFile};
use actix_multipart::{Field, Multipart};
use actix_web::web::Data;
use actix_web::{post, HttpResponse};
use actix_web::{post, HttpRequest, HttpResponse};
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
@@ -32,6 +33,7 @@ struct InitialFileData {
// under `/api/v1/mod/{mod_id}`
#[post("version")]
pub async fn version_create(
req: HttpRequest,
url_data: actix_web::web::Path<(ModId,)>,
payload: Multipart,
client: Data<PgPool>,
@@ -43,6 +45,7 @@ pub async fn version_create(
let mod_id = url_data.into_inner().0.into();
let result = version_create_inner(
req,
payload,
&mut transaction,
&***file_host,
@@ -69,6 +72,7 @@ pub async fn version_create(
}
async fn version_create_inner(
req: HttpRequest,
mut payload: Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
@@ -80,6 +84,8 @@ async fn version_create_inner(
let mut initial_version_data = None;
let mut version_builder = None;
let user = get_user_from_headers(req.headers(), &mut *transaction).await?;
while let Some(item) = payload.next().await {
let mut field: Field = item.map_err(CreateError::MultipartError)?;
let content_disposition = field.content_disposition().ok_or_else(|| {
@@ -126,6 +132,25 @@ async fn version_create_inner(
));
}
let team_id = sqlx::query!(
"SELECT team_id FROM mods WHERE id=$1",
mod_id as models::ModId,
)
.fetch_one(&mut *transaction)
.await?
.team_id;
let member_ids_rows =
sqlx::query!("SELECT user_id FROM team_members WHERE team_id=$1", team_id,)
.fetch_all(&mut *transaction)
.await?;
let member_ids: Vec<i64> = member_ids_rows.iter().map(|m| m.user_id).collect();
if !member_ids.contains(&(user.id.0 as i64)) {
return Err(CreateError::InvalidInput("Unauthorized".to_string()));
}
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let body_url = format!(
"data/{}/changelogs/{}/body.md",
@@ -156,6 +181,7 @@ async fn version_create_inner(
version_builder = Some(VersionBuilder {
version_id: version_id.into(),
mod_id,
author_id: user.id.into(),
name: version_create_data.version_title.clone(),
version_number: version_create_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
@@ -239,6 +265,7 @@ async fn version_create_inner(
let response = Version {
id: version_builder_safe.version_id.into(),
mod_id: version_builder_safe.mod_id.into(),
author_id: user.id,
name: version_builder_safe.name.clone(),
version_number: version_builder_safe.version_number.clone(),
changelog_url: version_builder_safe.changelog_url.clone(),
@@ -282,6 +309,7 @@ async fn version_create_inner(
// under /api/v1/mod/{mod_id}/version/{version_id}
#[post("file")]
pub async fn upload_file_to_version(
req: HttpRequest,
url_data: actix_web::web::Path<(ModId, VersionId)>,
payload: Multipart,
client: Data<PgPool>,
@@ -295,6 +323,7 @@ pub async fn upload_file_to_version(
let version_id = models::VersionId::from(data.1);
let result = upload_file_to_version_inner(
req,
payload,
&mut transaction,
&***file_host,
@@ -322,6 +351,7 @@ pub async fn upload_file_to_version(
}
async fn upload_file_to_version_inner(
req: HttpRequest,
mut payload: Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
@@ -334,9 +364,11 @@ async fn upload_file_to_version_inner(
let mut initial_file_data: Option<InitialFileData> = None;
let mut file_builder: Option<VersionFileBuilder> = None;
let user = get_user_from_headers(req.headers(), &mut *transaction).await?;
let result = sqlx::query!(
"
SELECT mod_id, version_number
SELECT mod_id, version_number, author_id
FROM versions
WHERE id = $1
",
@@ -359,6 +391,10 @@ async fn upload_file_to_version_inner(
));
}
if version.author_id as u64 != user.id.0 {
return Err(CreateError::InvalidInput("Unauthorized".to_string()));
}
let mod_id = ModId(version.mod_id as u64);
let version_number = version.version_number;

View File

@@ -1,7 +1,8 @@
use super::ApiError;
use crate::auth::check_is_moderator_from_headers;
use crate::database;
use crate::models;
use actix_web::{delete, get, web, HttpResponse};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use sqlx::PgPool;
// TODO: this needs filtering, and a better response type
@@ -62,6 +63,7 @@ pub async fn version_get(
let response = models::mods::Version {
id: data.id.into(),
mod_id: data.mod_id.into(),
author_id: data.author_id.into(),
name: data.name,
version_number: data.version_number,
@@ -111,12 +113,22 @@ pub async fn version_get(
}
}
// TODO: This really needs auth
#[delete("{version_id}")]
pub async fn version_delete(
req: HttpRequest,
info: web::Path<(models::ids::ModId, models::ids::VersionId)>,
pool: web::Data<PgPool>,
) -> Result<HttpResponse, ApiError> {
check_is_moderator_from_headers(
req.headers(),
&mut *pool
.acquire()
.await
.map_err(|e| ApiError::DatabaseError(e.into()))?,
)
.await
.map_err(|_| ApiError::AuthenticationError)?;
// TODO: check if the mod exists and matches the version id
let id = info.1;
let result = database::models::Version::remove_full(id.into(), &**pool)

View File

@@ -63,9 +63,9 @@ pub async fn index_mods(pool: PgPool, settings: IndexingSettings) -> Result<(),
}
if settings.index_external {
let end_index = dotenv::var("MAX_CURSEFORGE_ID")
.ok()
.map(|i| i.parse().unwrap())
.unwrap_or(450_000);
.ok()
.map(|i| i.parse().unwrap())
.unwrap_or(450_000);
docs_to_add.append(&mut index_curseforge(1, end_index).await?);
}
@@ -271,7 +271,11 @@ fn default_settings() -> Settings {
.with_searchable_attributes(searchable_attributes)
.with_stop_words(vec![])
.with_synonyms(HashMap::new())
.with_attributes_for_faceting(vec![String::from("categories"), String::from("host"), String::from("versions")])
.with_attributes_for_faceting(vec![
String::from("categories"),
String::from("host"),
String::from("versions"),
])
}
//endregion

View File

@@ -5,8 +5,7 @@ use actix_web::web::HttpResponse;
use meilisearch_sdk::client::Client;
use meilisearch_sdk::document::Document;
use meilisearch_sdk::search::Query;
use serde::ser::SerializeStruct;
use serde::{Deserialize, Serialize, Serializer};
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::cmp::min;
use thiserror::Error;
@@ -15,7 +14,7 @@ pub mod indexing;
#[derive(Error, Debug)]
pub enum SearchError {
#[error("Error while connecting to the MeiliSearch database")]
#[error("Error while connecting to the MeiliSearch database: {0}")]
IndexDBError(#[from] meilisearch_sdk::errors::Error),
#[error("Error while serializing or deserializing JSON: {0}")]
SerDeError(#[from] serde_json::Error),