Switch to Postgres (#39)

* WIP Switch to Postgres

* feat(postgres): more work on porting to postgres, now compiles

* feat(docker-compose): Changed the docker-compose.yml file to use postgres.

* Update docker, documentation, gh actions...

* Remove bson dependency

* Remove bson import

* feat: move mock filehost to trait rather than cargo feature

* feat(postgres): transactions for mod creation, multipart refactor

* fix: Add Cargo.lock so that sqlx functions

* Update sqlx offline build data

* fix: Use SQLX_OFFLINE to force sqlx into offline mode for CI

* Default release channels

* feat(postgres): refactor database models to fit postgres models

* fix: Fix sqlx prepare, fix double allocation in indexing

* Add dockerfile (#40)

Co-authored-by: Charalampos Fanoulis <charalampos.fanoulis@gmail.com>

Co-authored-by: Aeledfyr <aeledfyr@gmail.com>
Co-authored-by: redblueflame <contact@redblueflame.com>
Co-authored-by: Jai A <jai.a@tuta.io>
Co-authored-by: Valentin Ricard <redblueflame1@gmail.Com>
Co-authored-by: Charalampos Fanoulis <charalampos.fanoulis@gmail.com>
This commit is contained in:
AppleTheGolden
2020-07-23 22:46:33 +02:00
committed by GitHub
parent 95339a8338
commit ee69653a83
47 changed files with 4363 additions and 694 deletions

2
.dockerignore Normal file
View File

@@ -0,0 +1,2 @@
target
.env

5
.env
View File

@@ -3,13 +3,14 @@ DEBUG=true
CDN_URL=cdn.modrinth.com
MONGODB_ADDR=mongodb://toor:modrinthadmin@localhost:27017
DATABASE_URL=postgresql://labrinth@localhost/labrinth
MEILISEARCH_ADDR=http://localhost:7700
BIND_ADDR=127.0.0.1:8000
MOCK_FILE_PATH=/tmp/modrinth
BACKBLAZE_ENABLED=false
BACKBLAZE_KEY_ID=none
BACKBLAZE_KEY=none
BACKBLAZE_BUCKET_ID=none
BACKBLAZE_BUCKET_ID=none

View File

@@ -7,6 +7,7 @@ on:
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
lint:
@@ -30,3 +31,5 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --all-features
env:
SQLX_OFFLINE: true

View File

@@ -0,0 +1,25 @@
name: Docker image build
on:
push:
branches:
- master
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build and push Docker images
uses: docker/build-push-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
repository: modrinth/labrinth/labrinth
tag_with_ref: false
tags: master
tag_with_sha: true

View File

@@ -0,0 +1,26 @@
name: Docker image build
on:
push:
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build and push Docker images
uses: docker/build-push-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
repository: modrinth/labrinth/labrinth
tag_with_ref: true
tags: latest
tag_with_sha: true

25
.github/workflows/docker-compile.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
name: Docker image build
on:
push:
branches:
- !master
pull_request:
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build and push Docker images
uses: docker/build-push-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
repository: modrinth/labrinth/labrinth
tag_with_ref: true
tag_with_sha: true

View File

@@ -5,7 +5,7 @@ on:
label:
push:
branches:
- maser
- master
paths:
- '.github/labels.yml'

View File

@@ -7,6 +7,7 @@ on:
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
build:
@@ -25,7 +26,11 @@ jobs:
uses: actions-rs/toolchain@v1
with:
toolchain: stable
env:
SQLX_OFFLINE: true
- uses: actions-rs/cargo@v1
name: Build program
with:
command: build
env:
SQLX_OFFLINE: true

View File

@@ -2,20 +2,27 @@ name: Unit Tests
on:
push:
branches: [ master ]
branches: [master]
pull_request:
env:
CARGO_TERM_COLOR: always
SQLX_OFFLINE: true
jobs:
test:
name: ${{ matrix.os }}-rust-${{ matrix.rust }}
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.continue-on-error }}
strategy:
matrix:
os: [ubuntu-latest]
rust:
- stable
- beta
- nightly
rust: [beta, nightly]
continue-on-error: [true]
include:
- os: ubuntu-latest
- rust: stable
- continue-on-error: false
steps:
- uses: actions/checkout@v2
@@ -34,10 +41,13 @@ jobs:
- uses: actions-rs/cargo@v1
with:
command: build
env:
SQLX_OFFLINE: true
- uses: actions-rs/cargo@v1
with:
command: test
env:
BACKBLAZE_BUCKET_ID: ${{ secrets.BACKBLAZE_BUCKET_ID }}
BACKBLAZE_KEY: ${{ secrets.BACKBLAZE_KEY }}
BACKBLAZE_KEY_ID: ${{ secrets.BACKBLAZE_KEY_ID }}
BACKBLAZE_KEY_ID: ${{ secrets.BACKBLAZE_KEY_ID }}
SQLX_OFFLINE: true

5
.gitignore vendored
View File

@@ -97,7 +97,10 @@ fabric.properties
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# Cargo.lock
# Since we're using git dependencies, Cargo.lock is required to stop random
# errors when an upstream build fails. We're also making an executable, so we
# shouldn't have it in the .gitignore
# These are backup files generated by rustfmt
**/*.rs.bk

11
.idea/dataSources.xml generated Normal file
View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="PostgreSQL" uuid="ed1c901a-da61-499a-9f1e-123a59bd7b15">
<driver-ref>postgresql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
<jdbc-url>jdbc:postgresql://localhost:5432/labrinth</jdbc-url>
</data-source>
</component>
</project>

14
.idea/deployment.xml generated Normal file
View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PublishConfigData">
<serverData>
<paths name="Remote Host (b864ed82-8c7b-4ccd-920b-a7b8ba7cc008)">
<serverdata>
<mappings>
<mapping local="$PROJECT_DIR$" web="/" />
</mappings>
</serverdata>
</paths>
</serverData>
</component>
</project>

6
.idea/sqldialects.xml generated Normal file
View File

@@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/migrations/20200716160921_init.sql" dialect="GenericSQL" />
</component>
</project>

View File

@@ -6,10 +6,15 @@ We reccomend using [Docker](https://www.docker.com/) for setting up your dev env
```sh
docker-compose up
```
which will deploy a Meilisearch container on port 7700, a MongoDB instance on port 27017 and a MongoDB web UI on port 8081
which will deploy a Meilisearch container on port 7700, a PostgreSQL container on port 5432 and a pgAdmin web UI on port 8070.
When prompted to input a server password in pgAdmin, simply enter nothing into the password field.
Alternatively, follow theese steps:
1. Install and run a [MeiliSearch](https://docs.meilisearch.com/guides/introduction/quick_start_guide.html) instance
2. Install [A local MongoDB server](https://www.mongodb.com/try/download/community)
3. Run `mongod --dbpath path/to/db`
4. Everything should be setup and you should be ready to contribute.
You will have to set up the database now. To do so, install the sqlx cli:
```sh
cargo install --git https://github.com/launchbadge/sqlx sqlx-cli --no-default-features --features postgres
```
then, run the following commands to create the database and install schemas:
```sh
sqlx database create
sqlx migrate run
```

2653
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,14 @@
[package]
name = "modrinth"
name = "labrinth"
version = "0.1.0"
#Team members, please add your emails and usernames
authors = ["geometrically <jai.a@tuta.io>", "Redblueflame <contact@redblueflame.com>", "Aeledfyr <aeledfyr@gmail.com>", "cfanoulis"]
authors = ["geometrically <jai.a@tuta.io>", "Redblueflame <contact@redblueflame.com>", "Aeledfyr <aeledfyr@gmail.com>", "Charalampos Fanoulis <yo@fanoulis.dev>"]
edition = "2018"
[[bin]]
name = "labrinth"
path = "src/main.rs"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
@@ -18,7 +22,7 @@ reqwest = {version="0.10.6", features=["json"]}
meilisearch-sdk = "0.1.4"
serde_json = "1.0"
serde = {version="1.0", features=["derive"]}
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4", features = ["serde"] }
rand = "0.7"
@@ -26,9 +30,6 @@ dotenv = "0.15"
log = "0.4.8"
env_logger = "0.7.1"
mongodb = "1.0.0"
bson = "1.0.0"
thiserror = "1.0.20"
async-trait = "0.1.36"
@@ -38,6 +39,14 @@ futures-timer = "3.0.2"
base64 = "0.12.3"
sha1 = {version="0.6.0", features=["std"]}
[features]
default = []
backblaze = []
[dependencies.sqlx]
git = "https://github.com/launchbadge/sqlx/"
branch = "master"
default-features = false
features = ["runtime-actix", "postgres", "chrono", "offline", "macros"]
[dependencies.sqlx-macros]
git = "https://github.com/launchbadge/sqlx/"
branch = "master"
default-features = false
features = ["runtime-actix", "postgres", "chrono", "offline"]

26
Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
FROM rust:1.43.1 as build
ENV PKG_CONFIG_ALLOW_CROSS=1
WORKDIR /usr/src/labrinth
# Download and compile deps
COPY Cargo.toml .
COPY Cargo.lock .
COPY docker_utils/dummy.rs .
# Change temporarely the path of the code
RUN sed -i 's|src/main.rs|dummy.rs|' Cargo.toml
# Build only deps
RUN cargo build --release
# Now return the file back to normal
RUN sed -i 's|dummy.rs|src/main.rs|' Cargo.toml
# Copy everything
COPY . .
# Build our code
ARG SQLX_OFFLINE=true
RUN cargo build --release
FROM gcr.io/distroless/cc-debian10
COPY --from=build /usr/src/labrinth/target/release/labrinth /usr/local/bin/labrinth
CMD ["labrinth"]

View File

@@ -1,24 +1,15 @@
version: '3'
services:
mongo:
image: mongo
restart: always
ports:
- 27017:27017
environment:
MONGO_INITDB_ROOT_USERNAME: toor
MONGO_INITDB_ROOT_PASSWORD: modrinthadmin
postgres_db:
image: postgres:alpine
volumes:
- mongodb-data:/data/db
mongo-express:
image: mongo-express
restart: always
- db-data:/var/lib/postgresql/data
ports:
- 8081:8081
- 5432:5432
environment:
ME_CONFIG_MONGODB_ADMINUSERNAME: toor
ME_CONFIG_MONGODB_ADMINPASSWORD: modrinthadmin
ME_CONFIG_OPTIONS_EDITORTHEME: material
POSTGRES_DB: postgres
POSTGRES_USER: labrinth
POSTGRES_HOST_AUTH_METHOD: trust
meilisearch:
image: getmeili/meilisearch
restart: on-failure
@@ -26,8 +17,17 @@ services:
- 7700:7700
volumes:
- meilisearch-data:/data.ms
pgadmin:
image: dpage/pgadmin4:latest
environment:
PGADMIN_DEFAULT_EMAIL: admin@modrinth.com
PGADMIN_DEFAULT_PASSWORD: secret
PGADMIN_CONFIG_SERVER_MODE: "False"
PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: "False"
ports:
- "8070:80"
volumes:
- ./pgadmin_default_servers.json:/pgadmin4/servers.json
volumes:
meilisearch-data:
external: true
mongodb-data:
external: true
db-data:

1
docker_utils/dummy.rs Normal file
View File

@@ -0,0 +1 @@
fn main() {}

View File

@@ -0,0 +1,102 @@
CREATE TABLE users (
-- TODO
id bigint PRIMARY KEY
);
CREATE TABLE game_versions (
id serial PRIMARY KEY,
version varchar(255) NOT NULL
);
CREATE TABLE loaders (
id serial PRIMARY KEY,
loader varchar(255) NOT NULL
);
CREATE TABLE teams (
id bigint PRIMARY KEY
);
CREATE TABLE release_channel (
id serial PRIMARY KEY,
channel varchar(255)
);
CREATE TABLE mods (
id bigint PRIMARY KEY,
team_id bigint REFERENCES teams NOT NULL,
title varchar(255) NOT NULL,
description varchar(2048) NOT NULL,
body_url varchar(2048) NOT NULL,
published timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
downloads integer NOT NULL DEFAULT 0,
icon_url varchar(2048) NULL,
issues_url varchar(2048) NULL,
source_url varchar(2048) NULL,
wiki_url varchar(2048) NULL
);
CREATE TABLE versions (
id bigint PRIMARY KEY,
mod_id bigint REFERENCES mods,
name varchar(255) NOT NULL,
version_number varchar(255) NOT NULL,
changelog_url varchar(255) NULL,
date_published timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
downloads integer NOT NULL DEFAULT 0,
release_channel int REFERENCES release_channel ON UPDATE CASCADE NOT NULL
);
CREATE TABLE loaders_versions (
loader_id int REFERENCES loaders ON UPDATE CASCADE NOT NULL,
version_id bigint REFERENCES versions ON UPDATE CASCADE NOT NULL,
PRIMARY KEY (loader_id, version_id)
);
CREATE TABLE game_versions_versions (
game_version_id integer REFERENCES game_versions ON UPDATE CASCADE NOT NULL,
joining_version_id bigint REFERENCES versions ON UPDATE CASCADE NOT NULL,
PRIMARY KEY (game_version_id, joining_version_id)
);
CREATE TABLE files (
id bigint PRIMARY KEY,
version_id bigint REFERENCES versions NOT NULL,
url varchar(2048) NOT NULL
);
CREATE TABLE hashes (
file_id bigint REFERENCES files NOT NULL,
algorithm varchar(255) NOT NULL,
hash bytea NOT NULL,
PRIMARY KEY (file_id, algorithm)
);
CREATE TABLE dependencies (
id serial PRIMARY KEY,
dependent_id bigint REFERENCES versions ON UPDATE CASCADE NOT NULL,
dependency_id bigint REFERENCES versions ON UPDATE CASCADE NOT NULL,
CONSTRAINT valid_dependency CHECK (dependent_id <> dependency_id) -- No dependency on yourself
);
CREATE TABLE team_members (
id bigint PRIMARY KEY,
team_id bigint REFERENCES teams NOT NULL,
user_id bigint REFERENCES users NOT NULL,
member_name varchar(255) NOT NULL,
role varchar(255) NOT NULL
);
CREATE TABLE categories (
id serial PRIMARY KEY,
category varchar(255) UNIQUE
);
CREATE TABLE mods_categories (
joining_mod_id bigint REFERENCES mods ON UPDATE CASCADE NOT NULL,
joining_category_id int REFERENCES categories ON UPDATE CASCADE NOT NULL,
PRIMARY KEY (joining_mod_id, joining_category_id)
);

View File

@@ -0,0 +1,3 @@
-- Add migration script here
ALTER TABLE categories
ALTER COLUMN category SET NOT NULL;

View File

@@ -0,0 +1,7 @@
-- Add migration script here
INSERT INTO release_channel (channel) VALUES ('release');
INSERT INTO release_channel (channel) VALUES ('release-hidden');
INSERT INTO release_channel (channel) VALUES ('beta');
INSERT INTO release_channel (channel) VALUES ('beta-hidden');
INSERT INTO release_channel (channel) VALUES ('alpha');
INSERT INTO release_channel (channel) VALUES ('alpha-hidden');

View File

@@ -0,0 +1,2 @@
-- Add migration script here
ALTER TABLE release_channel RENAME TO release_channels

View File

@@ -0,0 +1,3 @@
-- Add migration script here
ALTER TABLE files
ADD filename varchar(2048) NOT NULL;

View File

@@ -0,0 +1,18 @@
{
"Servers": {
"1": {
"Name": "Labrinth",
"Group": "Servers",
"Host": "postgres_db",
"Port": 5432,
"MaintenanceDB": "postgres",
"Username": "labrinth",
"SSLMode": "prefer",
"SSLCompression": 0,
"Timeout": 10,
"UseSSHTunnel": 0,
"TunnelPort": "22",
"TunnelAuthentication": 0
}
}
}

197
sqlx-data.json Normal file
View File

@@ -0,0 +1,197 @@
{
"db": "PostgreSQL",
"1ffce9b2d5c9fa6c8b9abce4bad9f9419c44ad6367b7463b979c91b9b5b4fea1": {
"query": "SELECT EXISTS(SELECT 1 FROM versions WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"4c99c0840159d18e88cd6094a41117258f2337346c145d926b5b610c76b5125f": {
"query": "\n SELECT c.category\n FROM mods_categories mc\n INNER JOIN categories c ON mc.joining_category_id=c.id\n WHERE mc.joining_mod_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "category",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"c0899dcff4d7bc1ba3e953e5099210316bff2f98e6ab77ba84bc612eac4bce0a": {
"query": "\n SELECT gv.version FROM versions\n INNER JOIN game_versions_versions gvv ON gvv.joining_version_id=versions.id\n INNER JOIN game_versions gv ON gvv.game_version_id=gv.id\n WHERE versions.mod_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "version",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
false
]
}
},
"ccd913bb2f3006ffe881ce2fc4ef1e721d18fe2eed6ac62627046c955129610c": {
"query": "SELECT EXISTS(SELECT 1 FROM files WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"d6453e50041b5521fa9e919a9162e533bb9426f8c584d98474c6ad414db715c8": {
"query": "SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"e7d0a64a08df6783c942f2fcadd94dd45f8d96ad3d3736e52ce90f68d396cdab": {
"query": "SELECT EXISTS(SELECT 1 FROM team_members WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"e8d4589132b094df1e7a3ca0440344fc8013c0d20b3c71a1142ccbee91fb3c70": {
"query": "SELECT EXISTS(SELECT 1 FROM teams WHERE id=$1)",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "exists",
"type_info": "Bool"
}
],
"parameters": {
"Left": [
"Int8"
]
},
"nullable": [
null
]
}
},
"efe1bc80203f608226fa33e44654b681cc4430cec63bf7cf09b5281ff8c1c437": {
"query": "\n SELECT m.id, m.title, m.description, m.downloads, m.icon_url, m.body_url, m.published FROM mods m\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Int8"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "description",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "downloads",
"type_info": "Int4"
},
{
"ordinal": 4,
"name": "icon_url",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "body_url",
"type_info": "Varchar"
},
{
"ordinal": 6,
"name": "published",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
false,
true,
false,
false
]
}
}
}

View File

@@ -1,19 +1,6 @@
pub mod models;
mod mongo_database;
mod postgres_database;
pub use models::Mod;
pub use models::Version;
pub use mongo_database::connect;
use thiserror::Error;
type Result<T> = std::result::Result<T, DatabaseError>;
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Impossible to find document")]
NotFound(),
#[error("BSON deserialization error")]
BsonError(#[from] bson::de::Error),
#[error("Local database error")]
LocalDatabaseError(#[from] mongodb::error::Error),
}
pub use postgres_database::connect;

141
src/database/models/ids.rs Normal file
View File

@@ -0,0 +1,141 @@
use super::DatabaseError;
use crate::models::ids::random_base62;
use sqlx_macros::Type;
const ID_RETRY_COUNT: usize = 20;
macro_rules! generate_ids {
($vis:vis $function_name:ident, $return_type:ty, $id_length:expr, $select_stmnt:literal, $id_function:expr) => {
$vis async fn $function_name(
con: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<$return_type, DatabaseError> {
let length = $id_length;
let mut id = random_base62(length);
let mut retry_count = 0;
// Check if ID is unique
loop {
let results = sqlx::query!($select_stmnt, id as i64)
.fetch_one(&mut *con)
.await?;
if results.exists.unwrap_or(true) {
id = random_base62(length);
} else {
break;
}
retry_count += 1;
if retry_count > ID_RETRY_COUNT {
return Err(DatabaseError::RandomIdError);
}
}
Ok($id_function(id as i64))
}
};
}
generate_ids!(
pub generate_mod_id,
ModId,
8,
"SELECT EXISTS(SELECT 1 FROM mods WHERE id=$1)",
ModId
);
generate_ids!(
pub generate_version_id,
VersionId,
8,
"SELECT EXISTS(SELECT 1 FROM versions WHERE id=$1)",
VersionId
);
generate_ids!(
pub generate_team_id,
TeamId,
8,
"SELECT EXISTS(SELECT 1 FROM teams WHERE id=$1)",
TeamId
);
generate_ids!(
pub generate_file_id,
FileId,
8,
"SELECT EXISTS(SELECT 1 FROM files WHERE id=$1)",
FileId
);
generate_ids!(
pub generate_team_member_id,
TeamMemberId,
8,
"SELECT EXISTS(SELECT 1 FROM team_members WHERE id=$1)",
TeamMemberId
);
#[derive(Copy, Clone, Debug, Type)]
pub struct UserId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
pub struct TeamId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
pub struct TeamMemberId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
pub struct ModId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
pub struct VersionId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
pub struct ChannelId(pub i64);
#[derive(Copy, Clone, Debug, Type)]
pub struct GameVersionId(pub i32);
#[derive(Copy, Clone, Debug, Type)]
pub struct LoaderId(pub i32);
#[derive(Copy, Clone, Debug, Type)]
pub struct CategoryId(pub i32);
#[derive(Copy, Clone, Debug, Type)]
pub struct FileId(pub i64);
use crate::models::ids;
impl From<ids::ModId> for ModId {
fn from(id: ids::ModId) -> Self {
ModId(id.0 as i64)
}
}
impl From<ModId> for ids::ModId {
fn from(id: ModId) -> Self {
ids::ModId(id.0 as u64)
}
}
impl From<ids::UserId> for UserId {
fn from(id: ids::UserId) -> Self {
UserId(id.0 as i64)
}
}
impl From<UserId> for ids::UserId {
fn from(id: UserId) -> Self {
ids::UserId(id.0 as u64)
}
}
impl From<ids::TeamId> for TeamId {
fn from(id: ids::TeamId) -> Self {
TeamId(id.0 as i64)
}
}
impl From<TeamId> for ids::TeamId {
fn from(id: TeamId) -> Self {
ids::TeamId(id.0 as u64)
}
}
impl From<ids::VersionId> for VersionId {
fn from(id: ids::VersionId) -> Self {
VersionId(id.0 as i64)
}
}
impl From<VersionId> for ids::VersionId {
fn from(id: VersionId) -> Self {
ids::VersionId(id.0 as u64)
}
}

View File

@@ -1,32 +1,25 @@
mod mod_item;
mod team_item;
mod version_item;
#![allow(unused)]
// TODO: remove attr once routes are created
use crate::database::DatabaseError::NotFound;
use crate::database::Result;
use async_trait::async_trait;
use bson::doc;
use bson::Document;
use thiserror::Error;
pub mod ids;
pub mod mod_item;
pub mod team_item;
pub mod version_item;
pub use ids::*;
pub use mod_item::Mod;
use mongodb::Database;
pub use team_item::Team;
pub use team_item::TeamMember;
pub use version_item::FileHash;
pub use version_item::Version;
pub use version_item::VersionFile;
#[async_trait]
pub trait Item {
fn get_collection() -> &'static str;
async fn get_by_id(client: Database, id: &str) -> Result<Box<Self>> {
let filter = doc! { "_id": id };
let collection = client.collection(Self::get_collection());
let doc: Document = match collection.find_one(filter, None).await? {
Some(e) => e,
None => return Err(NotFound()),
};
let elem: Box<Self> = Self::from_doc(doc)?;
Ok(elem)
}
fn from_doc(elem: Document) -> Result<Box<Self>>;
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Error while interacting with the database")]
DatabaseError(#[from] sqlx::error::Error),
#[error("Error while trying to generate random ID")]
RandomIdError,
}

View File

@@ -1,36 +1,108 @@
use crate::database::models::team_item::Team;
use crate::database::models::Item;
use crate::database::Result;
use bson::{Bson, Document};
use serde::{Deserialize, Serialize};
use super::ids::*;
#[derive(Deserialize, Serialize)]
pub struct Mod {
/// The ID for the mod, must be serializable to base62
pub id: i32,
//Todo: Move to own table
/// The team that owns the mod
pub team: Team,
pub struct ModBuilder {
pub mod_id: ModId,
pub team_id: TeamId,
pub title: String,
pub description: String,
pub body_url: String,
pub published: String,
pub icon_url: Option<String>,
pub issues_url: Option<String>,
pub source_url: Option<String>,
pub wiki_url: Option<String>,
pub categories: Vec<CategoryId>,
pub initial_versions: Vec<super::version_item::VersionBuilder>,
}
impl ModBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<ModId, super::DatabaseError> {
let mod_struct = Mod {
id: self.mod_id,
team_id: self.team_id,
title: self.title,
description: self.description,
body_url: self.body_url,
published: chrono::Utc::now(),
downloads: 0,
icon_url: self.icon_url,
issues_url: self.issues_url,
source_url: self.source_url,
wiki_url: self.wiki_url,
};
mod_struct.insert(&mut *transaction).await?;
for mut version in self.initial_versions {
version.mod_id = self.mod_id;
version.insert(&mut *transaction).await?;
}
for category in self.categories {
sqlx::query(
"
INSERT INTO mod_categories (joining_mod_id, joining_category_id)
VALUES ($1, $2)
",
)
.bind(self.mod_id)
.bind(category)
.execute(&mut *transaction)
.await?;
}
Ok(self.mod_id)
}
}
pub struct Mod {
pub id: ModId,
pub team_id: TeamId,
pub title: String,
pub description: String,
pub body_url: String,
pub published: chrono::DateTime<chrono::Utc>,
pub downloads: i32,
pub categories: Vec<String>,
///A vector of Version IDs specifying the mod version of a dependency
pub version_ids: Vec<i32>,
pub icon_url: Option<String>,
pub issues_url: Option<String>,
pub source_url: Option<String>,
pub wiki_url: Option<String>,
}
impl Item for Mod {
fn get_collection() -> &'static str {
"mods"
}
fn from_doc(elem: Document) -> Result<Box<Mod>> {
let result: Mod = bson::from_bson(Bson::from(elem))?;
Ok(Box::from(result))
impl Mod {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
sqlx::query(
"
INSERT INTO mods (
id, team_id, title, description, body_url,
published, downloads, icon_url, issues_url,
source_url, wiki_url
)
VALUES (
$1, $2, $3, $4, $5,
$6, $7, $8, $9,
$10, $11
)
",
)
.bind(self.id)
.bind(self.team_id)
.bind(&self.title)
.bind(&self.description)
.bind(&self.body_url)
.bind(self.published)
.bind(self.downloads)
.bind(self.icon_url.as_ref())
.bind(self.issues_url.as_ref())
.bind(self.source_url.as_ref())
.bind(self.wiki_url.as_ref())
.execute(&mut *transaction)
.await?;
Ok(())
}
}

View File

@@ -1,19 +1,74 @@
use serde::{Deserialize, Serialize};
use super::ids::*;
pub struct TeamBuilder {
pub members: Vec<TeamMemberBuilder>,
}
pub struct TeamMemberBuilder {
pub user_id: UserId,
pub name: String,
pub role: String,
}
impl TeamBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<TeamId, super::DatabaseError> {
let team_id = generate_team_id(&mut *transaction).await?;
let team = Team { id: team_id };
sqlx::query(
"
INSERT INTO teams (id)
VALUES ($1)
",
)
.bind(team.id)
.execute(&mut *transaction)
.await?;
for member in self.members {
let team_member_id = generate_team_member_id(&mut *transaction).await?;
let team_member = TeamMember {
id: team_member_id,
team_id,
user_id: member.user_id,
name: member.name,
role: member.role,
};
sqlx::query(
"
INSERT INTO team_members (id, team_id, user_id, name, role)
VALUES ($1, $2)
",
)
.bind(team_member.id)
.bind(team_member.team_id)
.bind(team_member.user_id)
.bind(team_member.name)
.bind(team_member.role)
.execute(&mut *transaction)
.await?;
}
Ok(team_id)
}
}
/// A team of users who control a mod
#[derive(Serialize, Deserialize)]
pub struct Team {
/// The id of the team
pub id: i32,
/// A list of the members of the team
pub members: Vec<TeamMember>,
pub id: TeamId,
}
/// A member of a team
#[derive(Serialize, Deserialize, Clone)]
pub struct TeamMember {
pub id: TeamMemberId,
pub team_id: TeamId,
/// The ID of the user associated with the member
pub user_id: i32,
pub user_id: UserId,
/// The name of the user
pub name: String,
pub role: String,

View File

@@ -1,48 +1,215 @@
use crate::database::models::Item;
use crate::database::Result;
use bson::{Bson, Document};
use serde::{Deserialize, Serialize};
use super::ids::*;
use super::DatabaseError;
//TODO: Files should probably be moved to their own table
#[derive(Deserialize, Serialize)]
pub struct Version {
///The unqiue VersionId of this version
pub version_id: i32,
/// The ModId of the mod that this version belongs to
pub mod_id: i32,
pub struct VersionBuilder {
pub version_id: VersionId,
pub mod_id: ModId,
pub name: String,
pub number: String,
pub version_number: String,
pub changelog_url: Option<String>,
pub date_published: String,
pub downloads: i32,
pub files: Vec<VersionFile>,
pub dependencies: Vec<i32>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
pub version_type: String,
pub files: Vec<VersionFileBuilder>,
pub dependencies: Vec<VersionId>,
pub game_versions: Vec<GameVersionId>,
pub loaders: Vec<LoaderId>,
pub release_channel: ChannelId,
}
#[derive(Serialize, Deserialize)]
pub struct VersionFile {
pub game_versions: Vec<String>,
pub hashes: Vec<FileHash>,
pub struct VersionFileBuilder {
pub url: String,
pub filename: String,
pub hashes: Vec<HashBuilder>,
}
/// A hash of a mod's file
#[derive(Serialize, Deserialize)]
pub struct FileHash {
pub struct HashBuilder {
pub algorithm: String,
pub hash: String,
pub hash: Vec<u8>,
}
impl Item for Version {
fn get_collection() -> &'static str {
"versions"
}
impl VersionBuilder {
pub async fn insert(
self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<VersionId, DatabaseError> {
let version = Version {
id: self.version_id,
mod_id: self.mod_id,
name: self.name,
version_number: self.version_number,
changelog_url: self.changelog_url,
date_published: chrono::Utc::now(),
downloads: 0,
release_channel: self.release_channel,
};
fn from_doc(elem: Document) -> Result<Box<Version>> {
let version: Version = bson::from_bson(Bson::from(elem))?;
Ok(Box::from(version))
version.insert(&mut *transaction).await?;
for file in self.files {
let file_id = generate_file_id(&mut *transaction).await?;
sqlx::query(
"
INSERT INTO files (id, version_id, url, filename)
VALUES ($1, $2, $3, $4)
",
)
.bind(file_id)
.bind(self.version_id)
.bind(file.url)
.bind(file.filename)
.execute(&mut *transaction)
.await?;
for hash in file.hashes {
sqlx::query(
"
INSERT INTO hashes (file_id, algorithm, hash)
VALUES ($1, $2, $3)
",
)
.bind(file_id)
.bind(hash.algorithm)
.bind(hash.hash)
.execute(&mut *transaction)
.await?;
}
}
for dependency in self.dependencies {
sqlx::query(
"
INSERT INTO dependencies (dependent_id, dependency_id)
VALUES ($1, $2)
",
)
.bind(self.version_id)
.bind(dependency)
.execute(&mut *transaction)
.await?;
}
for loader in self.loaders {
sqlx::query(
"
INSERT INTO dependencies (loader_id, version_id)
VALUES ($1, $2)
",
)
.bind(loader)
.bind(self.version_id)
.execute(&mut *transaction)
.await?;
}
for game_version in self.game_versions {
sqlx::query(
"
INSERT INTO dependencies (game_version_id, joining_version_id)
VALUES ($1, $2)
",
)
.bind(game_version)
.bind(self.version_id)
.execute(&mut *transaction)
.await?;
}
Ok(self.version_id)
}
}
pub struct Version {
pub id: VersionId,
pub mod_id: ModId,
pub name: String,
pub version_number: String,
pub changelog_url: Option<String>,
pub date_published: chrono::DateTime<chrono::Utc>,
pub downloads: i32,
pub release_channel: ChannelId,
}
impl Version {
pub async fn insert(
&self,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
) -> Result<(), sqlx::error::Error> {
sqlx::query(
"
INSERT INTO versions (
id, mod_id, name, version_number,
changelog_url, date_published,
downloads, release_channel
)
VALUES (
$1, $2, $3, $4,
$5, $6,
$7, $8
)
",
)
.bind(self.id)
.bind(self.mod_id)
.bind(&self.name)
.bind(&self.version_number)
.bind(self.changelog_url.as_ref())
.bind(self.date_published)
.bind(self.downloads)
.bind(self.release_channel)
.execute(&mut *transaction)
.await?;
Ok(())
}
pub async fn get_dependencies<'a, E>(&self, exec: E) -> Result<Vec<VersionId>, sqlx::Error>
where
E: sqlx::Executor<'a, Database = sqlx::Postgres>,
{
use futures::stream::TryStreamExt;
let vec = sqlx::query_as::<_, (VersionId,)>(
"
SELECT id FROM versions v
INNER JOIN dependencies d ON d.dependency_id = v.id
WHERE d.dependent_id = $1
",
)
.bind(self.id)
.fetch_many(exec)
.try_filter_map(|e| async { Ok(e.right().map(|(v,)| v)) })
.try_collect::<Vec<VersionId>>()
.await?;
Ok(vec)
}
}
pub struct ReleaseChannel {
pub id: ChannelId,
pub channel: String,
}
pub struct Loader {
pub id: LoaderId,
pub loader: String,
}
pub struct GameVersion {
pub id: GameVersionId,
pub version: String,
}
pub struct VersionFile {
pub id: FileId,
pub version_id: VersionId,
pub url: String,
pub filename: String,
}
pub struct FileHash {
pub file_id: FileId,
pub algorithm: String,
pub hash: Vec<u8>,
}
pub struct Category {
pub id: CategoryId,
pub category: String,
}

View File

@@ -1,14 +0,0 @@
use log::info;
use mongodb::error::Error;
use mongodb::options::ClientOptions;
use mongodb::Client;
pub async fn connect() -> Result<Client, Error> {
info!("Initializing database connection");
let mongodb_addr = dotenv::var("MONGODB_ADDR").expect("`MONGO_ADDR` not in .env");
let mut client_options = ClientOptions::parse(&mongodb_addr).await?;
client_options.app_name = Some("labrinth".to_string());
Client::with_options(client_options)
}

View File

@@ -0,0 +1,14 @@
use log::info;
use sqlx::postgres::{PgPool, PgPoolOptions};
pub async fn connect() -> Result<PgPool, sqlx::Error> {
info!("Initializing database connection");
let database_url = dotenv::var("DATABASE_URL").expect("`DATABASE_URL` not in .env");
let pool = PgPoolOptions::new()
.max_connections(20)
.connect(&database_url)
.await?;
Ok(pool)
}

View File

@@ -0,0 +1,135 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
mod authorization;
mod delete;
mod upload;
pub struct BackblazeHost {
upload_url_data: authorization::UploadUrlData,
authorization_data: authorization::AuthorizationData,
}
impl BackblazeHost {
pub async fn new(key_id: &str, key: &str, bucket_id: &str) -> Self {
let authorization_data = authorization::authorize_account(key_id, key).await.unwrap();
let upload_url_data = authorization::get_upload_url(&authorization_data, bucket_id)
.await
.unwrap();
BackblazeHost {
authorization_data,
upload_url_data,
}
}
}
#[async_trait]
impl FileHost for BackblazeHost {
async fn upload_file(
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
) -> Result<UploadFileData, FileHostingError> {
let upload_data =
upload::upload_file(&self.upload_url_data, content_type, file_name, file_bytes).await?;
Ok(UploadFileData {
file_id: upload_data.file_id,
file_name: upload_data.file_name,
content_length: upload_data.content_length,
content_sha1: upload_data.content_sha1,
content_md5: upload_data.content_md5,
content_type: upload_data.content_type,
upload_timestamp: upload_data.upload_timestamp,
})
}
/*
async fn upload_file_streaming(
&self,
content_type: &str,
file_name: &str,
stream: reqwest::Body
) -> Result<UploadFileData, FileHostingError> {
use futures::stream::StreamExt;
let mut data = Vec::new();
while let Some(chunk) = stream.next().await {
data.extend_from_slice(&chunk.map_err(|e| FileHostingError::Other(e))?);
}
self.upload_file(content_type, file_name, data).await
}
*/
async fn delete_file_version(
&self,
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let delete_data =
delete::delete_file_version(&self.authorization_data, file_id, file_name).await?;
Ok(DeleteFileData {
file_id: delete_data.file_id,
file_name: delete_data.file_name,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use authorization::*;
use delete::*;
use upload::*;
#[actix_rt::test]
async fn test_authorization() {
println!("{}", dotenv::var("BACKBLAZE_BUCKET_ID").unwrap());
let authorization_data = authorize_account(
&dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
&dotenv::var("BACKBLAZE_KEY").unwrap(),
)
.await
.unwrap();
get_upload_url(
&authorization_data,
&dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await
.unwrap();
}
#[actix_rt::test]
async fn test_file_management() {
let authorization_data = authorize_account(
&dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
&dotenv::var("BACKBLAZE_KEY").unwrap(),
)
.await
.unwrap();
let upload_url_data = get_upload_url(
&authorization_data,
&dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await
.unwrap();
let upload_data = upload_file(
&upload_url_data,
"text/plain",
"test.txt",
"test file".to_string().into_bytes(),
)
.await
.unwrap();
delete_file_version(
&authorization_data,
&upload_data.file_id,
&upload_data.file_name,
)
.await
.unwrap();
}
}

View File

@@ -30,10 +30,9 @@ pub struct UploadUrlData {
pub authorization_token: String,
}
#[cfg(feature = "backblaze")]
pub async fn authorize_account(
key_id: String,
application_key: String,
key_id: &str,
application_key: &str,
) -> Result<AuthorizationData, FileHostingError> {
let combined_key = format!("{}:{}", key_id, application_key);
let formatted_key = format!("Basic {}", base64::encode(combined_key));
@@ -52,17 +51,16 @@ pub async fn authorize_account(
}
}
#[cfg(feature = "backblaze")]
pub async fn get_upload_url(
authorization_data: AuthorizationData,
bucket_id: String,
authorization_data: &AuthorizationData,
bucket_id: &str,
) -> Result<UploadUrlData, FileHostingError> {
let response = reqwest::Client::new()
.post(&format!("{}/b2api/v2/b2_get_upload_url", authorization_data.api_url).to_string())
.header(reqwest::header::CONTENT_TYPE, "application/json")
.header(
reqwest::header::AUTHORIZATION,
authorization_data.authorization_token,
&authorization_data.authorization_token,
)
.body(
serde_json::json!({
@@ -79,50 +77,3 @@ pub async fn get_upload_url(
Err(FileHostingError::BackblazeError(response.json().await?))
}
}
#[cfg(not(feature = "backblaze"))]
pub async fn authorize_account(
_key_id: String,
_application_key: String,
) -> Result<AuthorizationData, FileHostingError> {
Ok(AuthorizationData {
absolute_minimum_part_size: 5000000,
account_id: String::from("MOCK_ACCOUNT_ID"),
allowed: AuthorizationPermissions {
bucket_id: None,
bucket_name: None,
capabilities: vec![
String::from("listKeys"),
String::from("writeKeys"),
String::from("deleteKeys"),
String::from("listAllBucketNames"),
String::from("listBuckets"),
String::from("writeBuckets"),
String::from("deleteBuckets"),
String::from("readBuckets"),
String::from("listFiles"),
String::from("readFiles"),
String::from("shareFiles"),
String::from("writeFiles"),
String::from("deleteFiles"),
],
name_prefix: None,
},
api_url: String::from("https://api.example.com"),
authorization_token: String::from("MOCK_AUTH_TOKEN"),
download_url: String::from("https://download.example.com"),
recommended_part_size: 100000000,
})
}
#[cfg(not(feature = "backblaze"))]
pub async fn get_upload_url(
_authorization_data: AuthorizationData,
_bucket_id: String,
) -> Result<UploadUrlData, FileHostingError> {
Ok(UploadUrlData {
bucket_id: String::from("MOCK_BUCKET_ID"),
upload_url: String::from("https://download.example.com"),
authorization_token: String::from("MOCK_AUTH_TOKEN"),
})
}

View File

@@ -1,4 +1,5 @@
use crate::file_hosting::{AuthorizationData, FileHostingError};
use super::authorization::AuthorizationData;
use crate::file_hosting::FileHostingError;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -8,7 +9,6 @@ pub struct DeleteFileData {
pub file_name: String,
}
#[cfg(feature = "backblaze")]
pub async fn delete_file_version(
authorization_data: &AuthorizationData,
file_id: &str,
@@ -40,19 +40,3 @@ pub async fn delete_file_version(
Err(FileHostingError::BackblazeError(response.json().await?))
}
}
#[cfg(not(feature = "backblaze"))]
pub async fn delete_file_version(
_authorization_data: &AuthorizationData,
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let path = std::path::Path::new(&dotenv::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::remove_file(path)?;
Ok(DeleteFileData {
file_id: file_id.to_string(),
file_name: file_name.to_string(),
})
}

View File

@@ -1,4 +1,4 @@
use crate::file_hosting::authorization::UploadUrlData;
use super::authorization::UploadUrlData;
use crate::file_hosting::FileHostingError;
use serde::{Deserialize, Serialize};
@@ -16,7 +16,6 @@ pub struct UploadFileData {
pub upload_timestamp: u64,
}
#[cfg(feature = "backblaze")]
//Content Types found here: https://www.backblaze.com/b2/docs/content-types.html
pub async fn upload_file(
url_data: &UploadUrlData,
@@ -47,29 +46,3 @@ pub async fn upload_file(
Err(FileHostingError::BackblazeError(response.json().await?))
}
}
#[cfg(not(feature = "backblaze"))]
pub async fn upload_file(
_url_data: &UploadUrlData,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
) -> Result<UploadFileData, FileHostingError> {
let path = std::path::Path::new(&dotenv::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::create_dir_all(path.parent().ok_or(FileHostingError::InvalidFilename)?)?;
let content_sha1 = sha1::Sha1::from(&file_bytes).hexdigest();
std::fs::write(path, &file_bytes)?;
Ok(UploadFileData {
file_id: String::from("MOCK_FILE_ID"),
file_name: file_name.to_string(),
account_id: String::from("MOCK_ACCOUNT_ID"),
bucket_id: String::from("MOCK_BUCKET_ID"),
content_length: file_bytes.len() as u32,
content_sha1,
content_md5: None,
content_type: content_type.to_string(),
upload_timestamp: chrono::Utc::now().timestamp_millis() as u64,
})
}

51
src/file_hosting/mock.rs Normal file
View File

@@ -0,0 +1,51 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
pub struct MockHost(());
impl MockHost {
pub fn new() -> Self {
MockHost(())
}
}
#[async_trait]
impl FileHost for MockHost {
async fn upload_file(
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
) -> Result<UploadFileData, FileHostingError> {
let path = std::path::Path::new(&dotenv::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::create_dir_all(path.parent().ok_or(FileHostingError::InvalidFilename)?)?;
let content_sha1 = sha1::Sha1::from(&file_bytes).hexdigest();
std::fs::write(path, &file_bytes)?;
Ok(UploadFileData {
file_id: String::from("MOCK_FILE_ID"),
file_name: file_name.to_string(),
content_length: file_bytes.len() as u32,
content_sha1,
content_md5: None,
content_type: content_type.to_string(),
upload_timestamp: chrono::Utc::now().timestamp_millis() as u64,
})
}
async fn delete_file_version(
&self,
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let path = std::path::Path::new(&dotenv::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
std::fs::remove_file(path)?;
Ok(DeleteFileData {
file_id: file_id.to_string(),
file_name: file_name.to_string(),
})
}
}

View File

@@ -1,90 +1,53 @@
use async_trait::async_trait;
use thiserror::Error;
mod authorization;
mod delete;
mod upload;
mod backblaze;
mod mock;
pub use authorization::authorize_account;
pub use authorization::get_upload_url;
pub use authorization::AuthorizationData;
pub use authorization::AuthorizationPermissions;
pub use authorization::UploadUrlData;
pub use upload::upload_file;
pub use upload::UploadFileData;
pub use delete::delete_file_version;
pub use delete::DeleteFileData;
pub use backblaze::BackblazeHost;
pub use mock::MockHost;
#[derive(Error, Debug)]
pub enum FileHostingError {
#[cfg(feature = "backblaze")]
#[error("Error while accessing the data from backblaze")]
HttpError(#[from] reqwest::Error),
#[cfg(feature = "backblaze")]
#[error("Backblaze error: {0}")]
BackblazeError(serde_json::Value),
#[cfg(not(feature = "backblaze"))]
#[error("File system error in file hosting: {0}")]
FileSystemError(#[from] std::io::Error),
#[cfg(not(feature = "backblaze"))]
#[error("Invalid Filename")]
InvalidFilename,
}
#[cfg(test)]
mod tests {
use super::*;
#[actix_rt::test]
async fn test_authorization() {
println!("{}", dotenv::var("BACKBLAZE_BUCKET_ID").unwrap());
let authorization_data = authorize_account(
dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
dotenv::var("BACKBLAZE_KEY").unwrap(),
)
.await
.unwrap();
get_upload_url(
authorization_data,
dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await
.unwrap();
}
#[actix_rt::test]
async fn test_file_management() {
let authorization_data = authorize_account(
dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
dotenv::var("BACKBLAZE_KEY").unwrap(),
)
.await
.unwrap();
let upload_url_data = get_upload_url(
authorization_data.clone(),
dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await
.unwrap();
let upload_data = upload_file(
&upload_url_data,
"text/plain",
"test.txt",
"test file".to_string().into_bytes(),
)
.await
.unwrap();
delete_file_version(
&authorization_data,
&upload_data.file_id,
&upload_data.file_name,
)
.await
.unwrap();
}
#[derive(Debug, Clone)]
pub struct UploadFileData {
pub file_id: String,
pub file_name: String,
pub content_length: u32,
pub content_sha1: String,
pub content_md5: Option<String>,
pub content_type: String,
pub upload_timestamp: u64,
}
#[derive(Debug, Clone)]
pub struct DeleteFileData {
pub file_id: String,
pub file_name: String,
}
#[async_trait]
pub trait FileHost {
async fn upload_file(
&self,
content_type: &str,
file_name: &str,
file_bytes: Vec<u8>,
) -> Result<UploadFileData, FileHostingError>;
async fn delete_file_version(
&self,
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError>;
}

View File

@@ -5,6 +5,7 @@ use env_logger::Env;
use log::info;
use std::env;
use std::fs::File;
use std::sync::Arc;
mod database;
mod file_hosting;
@@ -19,54 +20,57 @@ async fn main() -> std::io::Result<()> {
check_env_vars();
//Database Connecter
let client = database::connect()
// Database Connector
let pool = database::connect()
.await
.expect("Database connection failed");
let client_ref = client.clone();
let client_ref = pool.clone();
//File Hosting Initializer
let authorization_data = file_hosting::authorize_account(
dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
dotenv::var("BACKBLAZE_KEY").unwrap(),
)
.await
.unwrap();
let upload_url_data = file_hosting::get_upload_url(
authorization_data.clone(),
dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await
.unwrap();
let backblaze_enabled = dotenv::var("BACKBLAZE_ENABLED")
.ok()
.and_then(|s| s.parse::<bool>().ok())
.unwrap_or(false);
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = if backblaze_enabled {
Arc::new(
file_hosting::BackblazeHost::new(
&dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
&dotenv::var("BACKBLAZE_KEY").unwrap(),
&dotenv::var("BACKBLAZE_BUCKET_ID").unwrap(),
)
.await,
)
} else {
Arc::new(file_hosting::MockHost::new())
};
// Get executable path
let mut exe_path = env::current_exe()?.parent().unwrap().to_path_buf();
// Create the path to the index lock file
exe_path.push("index.v1.lock");
//Indexing mods if not already done
// Indexing mods if not already done
if env::args().any(|x| x == "regen") {
// User forced regen of indexing
info!("Forced regeneration of indexes!");
index_mods(client).await.expect("Mod indexing failed");
index_mods(pool).await.expect("Mod indexing failed");
} else if !exe_path.exists() {
// The indexes were not created, or the version was upgraded
info!("Indexing of mods for first time...");
index_mods(client).await.expect("Mod indexing failed");
index_mods(pool).await.expect("Mod indexing failed");
// Create the lock file
File::create(exe_path)?;
}
info!("Starting Actix HTTP server!");
//Init App
// Init App
HttpServer::new(move || {
App::new()
.wrap(Logger::default())
.wrap(Logger::new("%a %{User-Agent}i"))
.data(client_ref.clone())
.data(authorization_data.clone())
.data(upload_url_data.clone())
.data(file_host.clone())
.service(routes::index_get)
.service(routes::mod_search)
.service(routes::mod_create)
@@ -93,11 +97,17 @@ fn check_env_vars() {
}
}
check_var::<bool>("INDEX_CURSEFORGE");
check_var::<String>("MONGODB_ADDR");
check_var::<String>("DATABASE_URL");
check_var::<String>("MEILISEARCH_ADDR");
check_var::<String>("BIND_ADDR");
check_var::<String>("BACKBLAZE_KEY_ID");
check_var::<String>("BACKBLAZE_KEY");
check_var::<String>("BACKBLAZE_BUCKET_ID");
if dotenv::var("BACKBLAZE_ENABLED")
.ok()
.and_then(|s| s.parse::<bool>().ok())
.unwrap_or(false)
{
check_var::<String>("BACKBLAZE_KEY_ID");
check_var::<String>("BACKBLAZE_KEY");
check_var::<String>("BACKBLAZE_BUCKET_ID");
}
}

View File

@@ -100,6 +100,7 @@ pub struct FileHash {
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "lowercase")]
pub enum VersionType {
Release,
Beta,

View File

@@ -1,55 +1,51 @@
use crate::database::models::{FileHash, Mod, Team, Version, VersionFile};
use crate::file_hosting::{upload_file, FileHostingError, UploadUrlData};
use crate::database::models;
use crate::file_hosting::{FileHost, FileHostingError};
use crate::models::error::ApiError;
use crate::models::ids::random_base62;
use crate::models::mods::{GameVersion, ModId, VersionId, VersionType};
use crate::models::teams::TeamMember;
use actix_multipart::{Field, Multipart};
use actix_web::http::StatusCode;
use actix_web::web::Data;
use actix_web::{post, HttpResponse};
use bson::doc;
use bson::Bson;
use chrono::Utc;
use futures::stream::StreamExt;
use mongodb::Client;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum CreateError {
#[error("Environment Error")]
EnvError(#[from] dotenv::Error),
#[error("Error while adding project to database")]
DatabaseError(#[from] mongodb::error::Error),
#[error("An unknown database error occured")]
SqlxDatabaseError(#[from] sqlx::Error),
#[error("Database Error: {0}")]
DatabaseError(#[from] models::DatabaseError),
#[error("Error while parsing multipart payload")]
MultipartError(actix_multipart::MultipartError),
#[error("Error while parsing JSON")]
#[error("Error while parsing JSON: {0}")]
SerDeError(#[from] serde_json::Error),
#[error("Error while serializing BSON")]
BsonError(#[from] bson::ser::Error),
#[error("Error while uploading file")]
FileHostingError(#[from] FileHostingError),
#[error("{}", .0)]
MissingValueError(String),
#[error("Error while trying to generate random ID")]
RandomIdError,
#[error("Invalid format for mod icon: {0}")]
InvalidIconFormat(String),
#[error("Error with multipart data: {0}")]
InvalidInput(String),
}
impl actix_web::ResponseError for CreateError {
fn status_code(&self) -> StatusCode {
match self {
CreateError::EnvError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::SqlxDatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::DatabaseError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::FileHostingError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::BsonError(..) => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::SerDeError(..) => StatusCode::BAD_REQUEST,
CreateError::MultipartError(..) => StatusCode::BAD_REQUEST,
CreateError::MissingValueError(..) => StatusCode::BAD_REQUEST,
CreateError::InvalidIconFormat(..) => StatusCode::BAD_REQUEST,
CreateError::RandomIdError => StatusCode::INTERNAL_SERVER_ERROR,
CreateError::InvalidInput(..) => StatusCode::BAD_REQUEST,
}
}
@@ -57,14 +53,14 @@ impl actix_web::ResponseError for CreateError {
HttpResponse::build(self.status_code()).json(ApiError {
error: match self {
CreateError::EnvError(..) => "environment_error",
CreateError::SqlxDatabaseError(..) => "database_error",
CreateError::DatabaseError(..) => "database_error",
CreateError::FileHostingError(..) => "file_hosting_error",
CreateError::BsonError(..) => "database_error",
CreateError::SerDeError(..) => "invalid_input",
CreateError::MultipartError(..) => "invalid_input",
CreateError::MissingValueError(..) => "invalid_input",
CreateError::RandomIdError => "id_generation_error",
CreateError::InvalidIconFormat(..) => "invalid_input",
CreateError::InvalidInput(..) => "invalid_input",
},
description: &self.to_string(),
})
@@ -73,13 +69,13 @@ impl actix_web::ResponseError for CreateError {
#[derive(Serialize, Deserialize, Clone)]
struct InitialVersionData {
pub file_indexes: Vec<i32>,
pub file_parts: Vec<String>,
pub version_number: String,
pub version_title: String,
pub version_body: String,
pub dependencies: Vec<VersionId>,
pub game_versions: Vec<GameVersion>,
pub version_type: VersionType,
pub release_channel: VersionType,
pub loaders: Vec<String>,
}
@@ -107,295 +103,320 @@ struct ModCreateData {
pub wiki_url: Option<String>,
}
struct UploadedFile {
file_id: String,
file_name: String,
}
async fn undo_uploads(
file_host: &dyn FileHost,
uploaded_files: &[UploadedFile],
) -> Result<(), CreateError> {
for file in uploaded_files {
file_host
.delete_file_version(&file.file_id, &file.file_name)
.await?;
}
Ok(())
}
#[post("api/v1/mod")]
pub async fn mod_create(
mut payload: Multipart,
client: Data<Client>,
upload_url: Data<UploadUrlData>,
payload: Multipart,
client: Data<PgPool>,
file_host: Data<std::sync::Arc<dyn FileHost + Send + Sync>>,
) -> Result<HttpResponse, CreateError> {
//TODO Switch to transactions for safer database and file upload calls (once it is implemented in the APIs)
let cdn_url = dotenv::var("CDN_URL")?;
let mut transaction = client.begin().await?;
let mut uploaded_files = Vec::new();
let db = client.database("modrinth");
let result = mod_create_inner(
payload,
&mut transaction,
&***file_host,
&mut uploaded_files,
)
.await;
let mods = db.collection("mods");
let versions = db.collection("versions");
if result.is_err() {
let undo_result = undo_uploads(&***file_host, &uploaded_files).await;
let rollback_result = transaction.rollback().await;
let mut mod_id = ModId(random_base62(8));
let mut retry_count = 0;
//Check if ID is unique
loop {
let filter = doc! { "_id": mod_id.0 };
if mods.find(filter, None).await?.next().await.is_some() {
mod_id = ModId(random_base62(8));
} else {
break;
if let Err(e) = undo_result {
return Err(e);
}
retry_count += 1;
if retry_count > 20 {
return Err(CreateError::RandomIdError);
if let Err(e) = rollback_result {
return Err(e.into());
}
} else {
transaction.commit().await?;
}
let mut created_versions: Vec<Version> = vec![];
result
}
async fn mod_create_inner(
mut payload: Multipart,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
file_host: &dyn FileHost,
uploaded_files: &mut Vec<UploadedFile>,
) -> Result<HttpResponse, CreateError> {
let cdn_url = dotenv::var("CDN_URL")?;
let mod_id = models::generate_mod_id(transaction).await?.into();
let mut created_versions: Vec<models::version_item::VersionBuilder> = vec![];
let mut mod_create_data: Option<ModCreateData> = None;
let mut icon_url = "".to_string();
let mut current_file_index = 0;
while let Some(item) = payload.next().await {
let mut field: Field = item.map_err(CreateError::MultipartError)?;
let content_disposition = field.content_disposition().ok_or_else(|| {
CreateError::MissingValueError("Missing content disposition!".to_string())
CreateError::MissingValueError("Missing content disposition".to_string())
})?;
let name = content_disposition
.get_name()
.ok_or_else(|| CreateError::MissingValueError("Missing content name!".to_string()))?;
.ok_or_else(|| CreateError::MissingValueError("Missing content name".to_string()))?;
while let Some(chunk) = field.next().await {
let data = &chunk.map_err(CreateError::MultipartError)?;
if name == "data" {
mod_create_data = Some(serde_json::from_slice(&data)?);
} else {
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get(last_period + 1..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
if let Some(create_data) = &mod_create_data {
if name == "icon" {
if let Some(ext) = get_image_content_type(file_extension) {
let upload_data = upload_file(
upload_url.get_ref(),
ext,
&format!("mods/icons/{}/{}", mod_id, file_name),
data.to_vec(),
)
.await?;
icon_url = format!("{}/{}", cdn_url, upload_data.file_name);
} else {
return Err(CreateError::InvalidIconFormat(file_extension.to_string()));
}
} else if &*file_extension == "jar" {
let initial_version_data = create_data
.initial_versions
.iter()
.position(|x| x.file_indexes.contains(&current_file_index));
if let Some(version_data_index) = initial_version_data {
let version_data = create_data
.initial_versions
.get(version_data_index)
.ok_or_else(|| {
CreateError::MissingValueError(
"Missing file extension!".to_string(),
)
})?
.clone();
let mut created_version_filter = created_versions
.iter_mut()
.filter(|x| x.number == version_data.version_number);
match created_version_filter.next() {
Some(created_version) => {
let upload_data = upload_file(
upload_url.get_ref(),
"application/java-archive",
&format!(
"{}/{}/{}",
create_data.mod_namespace.replace(".", "/"),
version_data.version_number,
file_name
),
data.to_vec(),
)
.await?;
created_version.files.push(VersionFile {
game_versions: version_data
.game_versions
.into_iter()
.map(|x| x.0)
.collect(),
hashes: vec![FileHash {
algorithm: "sha1".to_string(),
hash: upload_data.content_sha1,
}],
url: format!("{}/{}", cdn_url, upload_data.file_name),
});
}
None => {
//Check if ID is unique
let mut version_id = VersionId(random_base62(8));
retry_count = 0;
loop {
let filter = doc! { "_id": version_id.0 };
if versions.find(filter, None).await?.next().await.is_some()
{
version_id = VersionId(random_base62(8));
} else {
break;
}
retry_count += 1;
if retry_count > 20 {
return Err(CreateError::RandomIdError);
}
}
let body_url = format!(
"data/{}/changelogs/{}/body.md",
mod_id, version_id
);
upload_file(
upload_url.get_ref(),
"text/plain",
&body_url,
version_data.version_body.into_bytes(),
)
.await?;
let upload_data = upload_file(
upload_url.get_ref(),
"application/java-archive",
&format!(
"{}/{}/{}",
create_data.mod_namespace.replace(".", "/"),
version_data.version_number,
file_name
),
data.to_vec(),
)
.await?;
let version = Version {
version_id: version_id.0 as i32,
mod_id: mod_id.0 as i32,
name: version_data.version_title,
number: version_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
date_published: Utc::now().to_rfc2822(),
downloads: 0,
version_type: version_data.version_type.to_string(),
files: vec![VersionFile {
game_versions: version_data
.game_versions
.into_iter()
.map(|x| x.0)
.collect::<Vec<_>>(),
hashes: vec![FileHash {
algorithm: "sha1".to_string(),
hash: upload_data.content_sha1,
}],
url: format!("{}/{}", cdn_url, upload_data.file_name),
}],
dependencies: version_data
.dependencies
.into_iter()
.map(|x| x.0 as i32)
.collect::<Vec<_>>(),
game_versions: vec![],
loaders: vec![],
};
//TODO: Malware scan + file validation
created_versions.push(version);
}
}
}
}
}
if name == "data" {
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
mod_create_data = Some(serde_json::from_slice(&data)?);
continue;
}
current_file_index += 1;
}
for version in &created_versions {
let serialized_version = serde_json::to_string(&version)?;
let document = Bson::from(serialized_version)
.as_document()
.ok_or_else(|| {
CreateError::MissingValueError(
"No document present for database entry!".to_string(),
)
})?
.clone();
versions.insert_one(document, None).await?;
}
if let Some(create_data) = mod_create_data {
let body_url = format!("data/{}/body.md", mod_id);
upload_file(
upload_url.get_ref(),
"text/plain",
&body_url,
create_data.mod_body.into_bytes(),
)
.await?;
let created_mod: Mod = Mod {
id: mod_id.0 as i32,
team: Team {
id: random_base62(8) as i32,
members: create_data
.team_members
.into_iter()
.map(|x| crate::database::models::TeamMember {
user_id: x.user_id.0 as i32,
name: x.name,
role: x.role,
})
.collect(),
},
title: create_data.mod_name,
icon_url: Some(icon_url),
description: create_data.mod_description,
body_url: format!("{}/{}", cdn_url, body_url),
published: Utc::now().to_rfc2822(),
downloads: 0,
categories: create_data.categories,
version_ids: created_versions
.into_iter()
.map(|x| x.version_id as i32)
.collect::<Vec<_>>(),
issues_url: create_data.issues_url,
source_url: create_data.source_url,
wiki_url: create_data.wiki_url,
let file_name = content_disposition.get_filename().ok_or_else(|| {
CreateError::MissingValueError("Missing content file name".to_string())
})?;
let file_extension = if let Some(last_period) = file_name.rfind('.') {
file_name.get((last_period + 1)..).unwrap_or("")
} else {
return Err(CreateError::MissingValueError(
"Missing content file extension".to_string(),
));
};
let document = bson::to_bson(&created_mod)?
.as_document()
.ok_or_else(|| {
CreateError::MissingValueError(
"No document present for database entry!".to_string(),
)
})?
.clone();
if name == "icon" {
icon_url = process_icon_upload(
uploaded_files,
mod_id,
file_name,
file_extension,
file_host,
field,
&cdn_url,
)
.await?;
continue;
}
mods.insert_one(document, None).await?;
if &*file_extension == "jar" {
let create_data = mod_create_data.as_ref().ok_or_else(|| {
CreateError::InvalidInput(String::from("`data` field must come before file fields"))
})?;
let version_data = create_data
.initial_versions
.iter()
.find(|x| x.file_parts.iter().any(|n| n == name))
.ok_or_else(|| {
CreateError::InvalidInput(format!(
"Jar file `{}` (field {}) isn't specified in the versions data",
file_name, name
))
})?;
// If a version has already been created for this version, add the
// file to it instead of creating a new version.
let created_version = if let Some(created_version) = created_versions
.iter_mut()
.find(|x| x.version_number == version_data.version_number)
{
created_version
} else {
let version_id: VersionId = models::generate_version_id(transaction).await?.into();
let body_url = format!("data/{}/changelogs/{}/body.md", mod_id, version_id);
let uploaded_text = file_host
.upload_file(
"text/plain",
&body_url,
version_data.version_body.clone().into_bytes(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: uploaded_text.file_id.clone(),
file_name: uploaded_text.file_name.clone(),
});
// TODO: do a real lookup for the channels
let release_channel = match version_data.release_channel {
VersionType::Release => models::ChannelId(0),
VersionType::Beta => models::ChannelId(2),
VersionType::Alpha => models::ChannelId(4),
};
let version = models::version_item::VersionBuilder {
version_id: version_id.into(),
mod_id: mod_id.into(),
name: version_data.version_title.clone(),
version_number: version_data.version_number.clone(),
changelog_url: Some(format!("{}/{}", cdn_url, body_url)),
files: Vec::with_capacity(1),
dependencies: version_data
.dependencies
.iter()
.map(|x| (*x).into())
.collect::<Vec<_>>(),
// TODO: add game_versions and loaders info
game_versions: vec![],
loaders: vec![],
release_channel,
};
created_versions.push(version);
created_versions.last_mut().unwrap()
};
// Upload the new jar file
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
"application/java-archive",
&format!(
"{}/{}/{}",
create_data.mod_namespace.replace(".", "/"),
version_data.version_number,
file_name
),
data.to_vec(),
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
// Add the newly uploaded file to the existing or new version
// TODO: Malware scan + file validation
created_version
.files
.push(models::version_item::VersionFileBuilder {
filename: file_name.to_string(),
url: format!("{}/{}", cdn_url, upload_data.file_name),
hashes: vec![models::version_item::HashBuilder {
algorithm: "sha1".to_string(),
// This is an invalid cast - the database expects the hash's
// bytes, but this is the string version.
hash: upload_data.content_sha1.into_bytes(),
}],
});
}
}
let create_data = if let Some(create_data) = mod_create_data {
create_data
} else {
return Err(CreateError::InvalidInput(String::from(
"Multipart upload missing `data` field",
)));
};
let body_url = format!("data/{}/body.md", mod_id);
let upload_data = file_host
.upload_file("text/plain", &body_url, create_data.mod_body.into_bytes())
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
let team = models::team_item::TeamBuilder {
members: create_data
.team_members
.into_iter()
.map(|member| models::team_item::TeamMemberBuilder {
user_id: member.user_id.into(),
name: member.name,
role: member.role,
})
.collect(),
};
let team_id = team.insert(&mut *transaction).await?;
// Insert the new mod into the database
let mod_builder = models::mod_item::ModBuilder {
mod_id: mod_id.into(),
team_id,
title: create_data.mod_name,
description: create_data.mod_description,
body_url: format!("{}/{}", cdn_url, body_url),
icon_url: Some(icon_url),
issues_url: create_data.issues_url,
source_url: create_data.source_url,
wiki_url: create_data.wiki_url,
// TODO: convert `create_data.categories` from Vec<String> to Vec<CategoryId>
categories: Vec::new(),
initial_versions: created_versions,
};
let _mod_id = mod_builder.insert(&mut *transaction).await?;
// TODO: respond with the new mod info, or with just the new mod id.
Ok(HttpResponse::Ok().into())
}
async fn process_icon_upload(
uploaded_files: &mut Vec<UploadedFile>,
mod_id: ModId,
file_name: &str,
file_extension: &str,
file_host: &dyn FileHost,
mut field: actix_multipart::Field,
cdn_url: &str,
) -> Result<String, CreateError> {
if let Some(content_type) = get_image_content_type(file_extension) {
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
data.extend_from_slice(&chunk.map_err(CreateError::MultipartError)?);
}
let upload_data = file_host
.upload_file(
content_type,
&format!("mods/icons/{}/{}", mod_id, file_name),
data,
)
.await?;
uploaded_files.push(UploadedFile {
file_id: upload_data.file_id.clone(),
file_name: upload_data.file_name.clone(),
});
Ok(format!("{}/{}", cdn_url, upload_data.file_name))
} else {
Err(CreateError::InvalidIconFormat(file_extension.to_string()))
}
}
fn get_image_content_type(extension: &str) -> Option<&'static str> {
let content_type = match &*extension {
"bmp" => "image/bmp",

View File

@@ -178,7 +178,7 @@ pub async fn index_curseforge(
.replace("/256/256/", "/64/64/");
docs_to_add.push(SearchMod {
mod_id: -curseforge_mod.id,
mod_id: -curseforge_mod.id as i64,
author: (&curseforge_mod.authors[0].name).to_string(),
title: curseforge_mod.name,
description: curseforge_mod.summary.chars().take(150).collect(),

View File

@@ -1,70 +1,77 @@
use bson::doc;
use futures::StreamExt;
use futures::{StreamExt, TryStreamExt};
use log::info;
use crate::database::models::Item;
use crate::database::{DatabaseError, Mod, Version};
use super::IndexingError;
use crate::search::SearchMod;
use sqlx::postgres::PgPool;
pub async fn index_local(client: mongodb::Client) -> Result<Vec<SearchMod>, IndexingError> {
pub async fn index_local(pool: PgPool) -> Result<Vec<SearchMod>, IndexingError> {
info!("Indexing local mods!");
let mut docs_to_add: Vec<SearchMod> = vec![];
let db = client.database("modrinth");
let mut results = sqlx::query!(
"
SELECT m.id, m.title, m.description, m.downloads, m.icon_url, m.body_url, m.published FROM mods m
"
)
.fetch(&pool);
let mods = db.collection("mods");
let versions = db.collection("versions");
while let Some(result) = results.next().await {
if let Ok(result) = result {
let versions: Vec<String> = sqlx::query!(
"
SELECT gv.version FROM versions
INNER JOIN game_versions_versions gvv ON gvv.joining_version_id=versions.id
INNER JOIN game_versions gv ON gvv.game_version_id=gv.id
WHERE versions.mod_id = $1
",
result.id
)
.fetch_many(&pool)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.version)) })
.try_collect::<Vec<String>>()
.await?;
let mut results = mods
.find(None, None)
.await
.map_err(DatabaseError::LocalDatabaseError)?;
let categories = sqlx::query!(
"
SELECT c.category
FROM mods_categories mc
INNER JOIN categories c ON mc.joining_category_id=c.id
WHERE mc.joining_mod_id = $1
",
result.id
)
.fetch_many(&pool)
.try_filter_map(|e| async { Ok(e.right().map(|c| c.category)) })
.try_collect::<Vec<String>>()
.await?;
while let Some(unparsed_result) = results.next().await {
let result: Mod =
*Mod::from_doc(unparsed_result.map_err(DatabaseError::LocalDatabaseError)?)?;
let mut icon_url = "".to_string();
let mut mod_versions = versions
.find(doc! { "mod_id": result.id }, None)
.await
.map_err(DatabaseError::LocalDatabaseError)?;
if let Some(url) = result.icon_url {
icon_url = url;
}
let mut mod_game_versions = vec![];
while let Some(unparsed_version) = mod_versions.next().await {
let mut version = unparsed_version
.map_err(DatabaseError::LocalDatabaseError)
.and_then(Version::from_doc)?;
mod_game_versions.append(&mut version.game_versions);
docs_to_add.push(SearchMod {
mod_id: result.id,
author: "".to_string(),
title: result.title,
description: result.description,
keywords: categories,
versions,
downloads: result.downloads,
page_url: result.body_url,
icon_url,
author_url: "".to_string(),
date_created: result.published.to_string(),
created: 0,
date_modified: "".to_string(),
updated: 0,
latest_version: "".to_string(),
empty: String::from("{}{}{}"),
});
}
let mut icon_url = "".to_string();
if let Some(url) = result.icon_url {
icon_url = url;
}
docs_to_add.push(SearchMod {
mod_id: result.id,
author: "".to_string(),
title: result.title,
description: result.description,
keywords: result.categories,
versions: mod_game_versions,
downloads: result.downloads,
page_url: "".to_string(),
icon_url,
author_url: "".to_string(),
date_created: "".to_string(),
created: 0,
date_modified: "".to_string(),
updated: 0,
latest_version: "".to_string(),
empty: String::from("{}{}{}"),
});
}
Ok(docs_to_add)

View File

@@ -7,6 +7,7 @@ use crate::search::indexing::local_import::index_local;
use crate::search::SearchMod;
use meilisearch_sdk::client::Client;
use meilisearch_sdk::settings::Settings;
use sqlx::postgres::PgPool;
use std::collections::{HashMap, VecDeque};
use thiserror::Error;
@@ -21,7 +22,7 @@ pub enum IndexingError {
#[error("Error while parsing a timestamp: {0}")]
ParseDateError(#[from] chrono::format::ParseError),
#[error("Database Error: {0}")]
DatabaseError(#[from] crate::database::DatabaseError),
DatabaseError(#[from] sqlx::error::Error),
#[error("Environment Error")]
EnvError(#[from] dotenv::Error),
}
@@ -31,14 +32,14 @@ pub enum IndexingError {
// assumes a max average size of 1KiB per mod to avoid this cap.
const MEILISEARCH_CHUNK_SIZE: usize = 10000;
pub async fn index_mods(db: mongodb::Client) -> Result<(), IndexingError> {
pub async fn index_mods(pool: PgPool) -> Result<(), IndexingError> {
// Check if the index exists
let address = &*dotenv::var("MEILISEARCH_ADDR")?;
let client = Client::new(address, "");
let mut docs_to_add: Vec<SearchMod> = vec![];
docs_to_add.append(&mut index_local(db.clone()).await?);
docs_to_add.append(&mut index_local(pool.clone()).await?);
if dotenv::var("INDEX_CURSEFORGE")?
.parse()
.expect("`INDEX_CURSEFORGE` is not a boolean.")

View File

@@ -47,7 +47,7 @@ impl actix_web::ResponseError for SearchError {
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct SearchMod {
pub mod_id: i32,
pub mod_id: i64,
pub author: String,
pub title: String,
pub description: String,
@@ -66,7 +66,7 @@ pub struct SearchMod {
}
impl Document for SearchMod {
type UIDType = i32;
type UIDType = i64;
fn get_uid(&self) -> &Self::UIDType {
&self.mod_id