move to monorepo dir

This commit is contained in:
Jai A
2024-10-16 14:11:42 -07:00
parent ff7975773e
commit e3a3379615
756 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,84 @@
use actix_web::test::TestRequest;
use bytes::{Bytes, BytesMut};
// Multipart functionality for actix
// Primarily for testing or some implementations of route-redirection
// (actix-test does not innately support multipart)
#[derive(Debug, Clone)]
pub struct MultipartSegment {
pub name: String,
pub filename: Option<String>,
pub content_type: Option<String>,
pub data: MultipartSegmentData,
}
#[derive(Debug, Clone)]
#[allow(dead_code)]
pub enum MultipartSegmentData {
Text(String),
Binary(Vec<u8>),
}
pub trait AppendsMultipart {
fn set_multipart(self, data: impl IntoIterator<Item = MultipartSegment>) -> Self;
}
impl AppendsMultipart for TestRequest {
fn set_multipart(self, data: impl IntoIterator<Item = MultipartSegment>) -> Self {
let (boundary, payload) = generate_multipart(data);
self.append_header((
"Content-Type",
format!("multipart/form-data; boundary={}", boundary),
))
.set_payload(payload)
}
}
pub fn generate_multipart(data: impl IntoIterator<Item = MultipartSegment>) -> (String, Bytes) {
let mut boundary: String = String::from("----WebKitFormBoundary");
boundary.push_str(&rand::random::<u64>().to_string());
boundary.push_str(&rand::random::<u64>().to_string());
boundary.push_str(&rand::random::<u64>().to_string());
let mut payload = BytesMut::new();
for segment in data {
payload.extend_from_slice(
format!(
"--{boundary}\r\nContent-Disposition: form-data; name=\"{name}\"",
boundary = boundary,
name = segment.name
)
.as_bytes(),
);
if let Some(filename) = &segment.filename {
payload.extend_from_slice(
format!("; filename=\"{filename}\"", filename = filename).as_bytes(),
);
}
if let Some(content_type) = &segment.content_type {
payload.extend_from_slice(
format!(
"\r\nContent-Type: {content_type}",
content_type = content_type
)
.as_bytes(),
);
}
payload.extend_from_slice(b"\r\n\r\n");
match &segment.data {
MultipartSegmentData::Text(text) => {
payload.extend_from_slice(text.as_bytes());
}
MultipartSegmentData::Binary(binary) => {
payload.extend_from_slice(binary);
}
}
payload.extend_from_slice(b"\r\n");
}
payload.extend_from_slice(format!("--{boundary}--\r\n", boundary = boundary).as_bytes());
(boundary, Bytes::from(payload))
}

View File

@@ -0,0 +1,18 @@
#[macro_export]
macro_rules! bitflags_serde_impl {
($type:ident, $int_type:ident) => {
impl serde::Serialize for $type {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_i64(self.bits() as i64)
}
}
impl<'de> serde::Deserialize<'de> for $type {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let v: i64 = Deserialize::deserialize(deserializer)?;
Ok($type::from_bits_truncate(v as $int_type))
}
}
};
}

View File

@@ -0,0 +1,41 @@
use crate::routes::ApiError;
use crate::util::env::parse_var;
use actix_web::HttpRequest;
use serde::Deserialize;
use serde_json::json;
pub async fn check_turnstile_captcha(req: &HttpRequest, challenge: &str) -> Result<bool, ApiError> {
let conn_info = req.connection_info().clone();
let ip_addr = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
if let Some(header) = req.headers().get("CF-Connecting-IP") {
header.to_str().ok()
} else {
conn_info.peer_addr()
}
} else {
conn_info.peer_addr()
};
let client = reqwest::Client::new();
#[derive(Deserialize)]
struct Response {
success: bool,
}
let val: Response = client
.post("https://challenges.cloudflare.com/turnstile/v0/siteverify")
.json(&json!({
"secret": dotenvy::var("TURNSTILE_SECRET")?,
"response": challenge,
"remoteip": ip_addr,
}))
.send()
.await
.map_err(|_| ApiError::Turnstile)?
.json()
.await
.map_err(|_| ApiError::Turnstile)?;
Ok(val.success)
}

View File

@@ -0,0 +1,10 @@
use actix_cors::Cors;
pub fn default_cors() -> Cors {
Cors::default()
.allow_any_origin()
.allow_any_header()
.allow_any_method()
.max_age(3600)
.send_wildcard()
}

View File

@@ -0,0 +1,9 @@
use chrono::Utc;
// this converts timestamps to the timestamp format clickhouse requires/uses
pub fn get_current_tenths_of_ms() -> i64 {
Utc::now()
.timestamp_nanos_opt()
.expect("value can not be represented in a timestamp with nanosecond precision.")
/ 100_000
}

View File

@@ -0,0 +1,10 @@
use std::str::FromStr;
pub fn parse_var<T: FromStr>(var: &'static str) -> Option<T> {
dotenvy::var(var).ok().and_then(|i| i.parse().ok())
}
pub fn parse_strings_from_var(var: &'static str) -> Option<Vec<String>> {
dotenvy::var(var)
.ok()
.and_then(|s| serde_json::from_str::<Vec<String>>(&s).ok())
}

View File

@@ -0,0 +1,30 @@
pub fn get_image_content_type(extension: &str) -> Option<&'static str> {
match extension {
"bmp" => Some("image/bmp"),
"gif" => Some("image/gif"),
"jpeg" | "jpg" => Some("image/jpeg"),
"png" => Some("image/png"),
"webp" => Some("image/webp"),
_ => None,
}
}
pub fn get_image_ext(content_type: &str) -> Option<&'static str> {
match content_type {
"image/bmp" => Some("bmp"),
"image/gif" => Some("gif"),
"image/jpeg" => Some("jpg"),
"image/png" => Some("png"),
"image/webp" => Some("webp"),
_ => None,
}
}
pub fn project_file_type(ext: &str) -> Option<&str> {
match ext {
"jar" => Some("application/java-archive"),
"zip" | "litemod" => Some("application/zip"),
"mrpack" => Some("application/x-modrinth-modpack+zip"),
_ => None,
}
}

View File

@@ -0,0 +1,11 @@
use actix_web::guard::GuardContext;
pub const ADMIN_KEY_HEADER: &str = "Modrinth-Admin";
pub fn admin_key_guard(ctx: &GuardContext) -> bool {
let admin_key = std::env::var("LABRINTH_ADMIN_KEY")
.expect("No admin key provided, this should have been caught by check_env_vars");
ctx.head()
.headers()
.get(ADMIN_KEY_HEADER)
.map_or(false, |it| it.as_bytes() == admin_key.as_bytes())
}

View File

@@ -0,0 +1,202 @@
use crate::database;
use crate::database::models::image_item;
use crate::database::redis::RedisPool;
use crate::file_hosting::FileHost;
use crate::models::images::ImageContext;
use crate::routes::ApiError;
use color_thief::ColorFormat;
use image::imageops::FilterType;
use image::{DynamicImage, EncodableLayout, GenericImageView, ImageError, ImageOutputFormat};
use std::io::Cursor;
use webp::Encoder;
pub fn get_color_from_img(data: &[u8]) -> Result<Option<u32>, ImageError> {
let image = image::load_from_memory(data)?
.resize(256, 256, FilterType::Nearest)
.crop_imm(128, 128, 64, 64);
let color = color_thief::get_palette(image.to_rgb8().as_bytes(), ColorFormat::Rgb, 10, 2)
.ok()
.and_then(|x| x.first().copied())
.map(|x| (x.r as u32) << 16 | (x.g as u32) << 8 | (x.b as u32));
Ok(color)
}
pub struct UploadImageResult {
pub url: String,
pub url_path: String,
pub raw_url: String,
pub raw_url_path: String,
pub color: Option<u32>,
}
pub async fn upload_image_optimized(
upload_folder: &str,
bytes: bytes::Bytes,
file_extension: &str,
target_width: Option<u32>,
min_aspect_ratio: Option<f32>,
file_host: &dyn FileHost,
) -> Result<UploadImageResult, ApiError> {
let content_type =
crate::util::ext::get_image_content_type(file_extension).ok_or_else(|| {
ApiError::InvalidInput(format!("Invalid format for image: {}", file_extension))
})?;
let cdn_url = dotenvy::var("CDN_URL")?;
let hash = sha1::Sha1::from(&bytes).hexdigest();
let (processed_image, processed_image_ext) =
process_image(bytes.clone(), content_type, target_width, min_aspect_ratio)?;
let color = get_color_from_img(&bytes)?;
// Only upload the processed image if it's smaller than the original
let processed_upload_data = if processed_image.len() < bytes.len() {
Some(
file_host
.upload_file(
content_type,
&format!(
"{}/{}_{}.{}",
upload_folder,
hash,
target_width.unwrap_or(0),
processed_image_ext
),
processed_image,
)
.await?,
)
} else {
None
};
let upload_data = file_host
.upload_file(
content_type,
&format!("{}/{}.{}", upload_folder, hash, file_extension),
bytes,
)
.await?;
let url = format!("{}/{}", cdn_url, upload_data.file_name);
Ok(UploadImageResult {
url: processed_upload_data
.clone()
.map(|x| format!("{}/{}", cdn_url, x.file_name))
.unwrap_or_else(|| url.clone()),
url_path: processed_upload_data
.map(|x| x.file_name)
.unwrap_or_else(|| upload_data.file_name.clone()),
raw_url: url,
raw_url_path: upload_data.file_name,
color,
})
}
fn process_image(
image_bytes: bytes::Bytes,
content_type: &str,
target_width: Option<u32>,
min_aspect_ratio: Option<f32>,
) -> Result<(bytes::Bytes, String), ImageError> {
if content_type.to_lowercase() == "image/gif" {
return Ok((image_bytes.clone(), "gif".to_string()));
}
let mut img = image::load_from_memory(&image_bytes)?;
let webp_bytes = convert_to_webp(&img)?;
img = image::load_from_memory(&webp_bytes)?;
// Resize the image
let (orig_width, orig_height) = img.dimensions();
let aspect_ratio = orig_width as f32 / orig_height as f32;
if let Some(target_width) = target_width {
if img.width() > target_width {
let new_height = (target_width as f32 / aspect_ratio).round() as u32;
img = img.resize(target_width, new_height, FilterType::Lanczos3);
}
}
if let Some(min_aspect_ratio) = min_aspect_ratio {
// Crop if necessary
if aspect_ratio < min_aspect_ratio {
let crop_height = (img.width() as f32 / min_aspect_ratio).round() as u32;
let y_offset = (img.height() - crop_height) / 2;
img = img.crop_imm(0, y_offset, img.width(), crop_height);
}
}
// Optimize and compress
let mut output = Vec::new();
img.write_to(&mut Cursor::new(&mut output), ImageOutputFormat::WebP)?;
Ok((bytes::Bytes::from(output), "webp".to_string()))
}
fn convert_to_webp(img: &DynamicImage) -> Result<Vec<u8>, ImageError> {
let rgba = img.to_rgba8();
let encoder = Encoder::from_rgba(&rgba, img.width(), img.height());
let webp = encoder.encode(75.0); // Quality factor: 0-100, 75 is a good balance
Ok(webp.to_vec())
}
pub async fn delete_old_images(
image_url: Option<String>,
raw_image_url: Option<String>,
file_host: &dyn FileHost,
) -> Result<(), ApiError> {
let cdn_url = dotenvy::var("CDN_URL")?;
let cdn_url_start = format!("{cdn_url}/");
if let Some(image_url) = image_url {
let name = image_url.split(&cdn_url_start).nth(1);
if let Some(icon_path) = name {
file_host.delete_file_version("", icon_path).await?;
}
}
if let Some(raw_image_url) = raw_image_url {
let name = raw_image_url.split(&cdn_url_start).nth(1);
if let Some(icon_path) = name {
file_host.delete_file_version("", icon_path).await?;
}
}
Ok(())
}
// check changes to associated images
// if they no longer exist in the String list, delete them
// Eg: if description is modified and no longer contains a link to an iamge
pub async fn delete_unused_images(
context: ImageContext,
reference_strings: Vec<&str>,
transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>,
redis: &RedisPool,
) -> Result<(), ApiError> {
let uploaded_images = database::models::Image::get_many_contexted(context, transaction).await?;
for image in uploaded_images {
let mut should_delete = true;
for reference in &reference_strings {
if image.url.contains(reference) {
should_delete = false;
break;
}
}
if should_delete {
image_item::Image::remove(image.id, transaction, redis).await?;
image_item::Image::clear_cache(image.id, redis).await?;
}
}
Ok(())
}

View File

@@ -0,0 +1,14 @@
pub mod actix;
pub mod bitflag;
pub mod captcha;
pub mod cors;
pub mod date;
pub mod env;
pub mod ext;
pub mod guards;
pub mod img;
pub mod ratelimit;
pub mod redis;
pub mod routes;
pub mod validate;
pub mod webhook;

View File

@@ -0,0 +1,167 @@
use governor::clock::{Clock, DefaultClock};
use governor::{middleware, state, RateLimiter};
use std::str::FromStr;
use std::sync::Arc;
use crate::routes::ApiError;
use crate::util::env::parse_var;
use actix_web::{
body::EitherBody,
dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform},
Error, ResponseError,
};
use futures_util::future::LocalBoxFuture;
use futures_util::future::{ready, Ready};
pub type KeyedRateLimiter<K = String, MW = middleware::StateInformationMiddleware> =
Arc<RateLimiter<K, state::keyed::DefaultKeyedStateStore<K>, DefaultClock, MW>>;
pub struct RateLimit(pub KeyedRateLimiter);
impl<S, B> Transform<S, ServiceRequest> for RateLimit
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B>>;
type Error = Error;
type Transform = RateLimitService<S>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(RateLimitService {
service,
rate_limiter: Arc::clone(&self.0),
}))
}
}
#[doc(hidden)]
pub struct RateLimitService<S> {
service: S,
rate_limiter: KeyedRateLimiter,
}
impl<S, B> Service<ServiceRequest> for RateLimitService<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B>>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
if let Some(key) = req.headers().get("x-ratelimit-key") {
if key.to_str().ok() == dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok().as_deref() {
let res = self.service.call(req);
return Box::pin(async move {
let service_response = res.await?;
Ok(service_response.map_into_left_body())
});
}
}
let conn_info = req.connection_info().clone();
let ip = if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
if let Some(header) = req.headers().get("CF-Connecting-IP") {
header.to_str().ok()
} else {
conn_info.peer_addr()
}
} else {
conn_info.peer_addr()
};
if let Some(ip) = ip {
let ip = ip.to_string();
match self.rate_limiter.check_key(&ip) {
Ok(snapshot) => {
let fut = self.service.call(req);
Box::pin(async move {
match fut.await {
Ok(mut service_response) => {
// Now you have a mutable reference to the ServiceResponse, so you can modify its headers.
let headers = service_response.headers_mut();
headers.insert(
actix_web::http::header::HeaderName::from_str(
"x-ratelimit-limit",
)
.unwrap(),
snapshot.quota().burst_size().get().into(),
);
headers.insert(
actix_web::http::header::HeaderName::from_str(
"x-ratelimit-remaining",
)
.unwrap(),
snapshot.remaining_burst_capacity().into(),
);
headers.insert(
actix_web::http::header::HeaderName::from_str(
"x-ratelimit-reset",
)
.unwrap(),
snapshot
.quota()
.burst_size_replenished_in()
.as_secs()
.into(),
);
// Return the modified response as Ok.
Ok(service_response.map_into_left_body())
}
Err(e) => {
// Handle error case
Err(e)
}
}
})
}
Err(negative) => {
let wait_time = negative.wait_time_from(DefaultClock::default().now());
let mut response = ApiError::RateLimitError(
wait_time.as_millis(),
negative.quota().burst_size().get(),
)
.error_response();
let headers = response.headers_mut();
headers.insert(
actix_web::http::header::HeaderName::from_str("x-ratelimit-limit").unwrap(),
negative.quota().burst_size().get().into(),
);
headers.insert(
actix_web::http::header::HeaderName::from_str("x-ratelimit-remaining")
.unwrap(),
0.into(),
);
headers.insert(
actix_web::http::header::HeaderName::from_str("x-ratelimit-reset").unwrap(),
wait_time.as_secs().into(),
);
Box::pin(async { Ok(req.into_response(response.map_into_right_body())) })
}
}
} else {
let response =
ApiError::CustomAuthentication("Unable to obtain user IP address!".to_string())
.error_response();
Box::pin(async { Ok(req.into_response(response.map_into_right_body())) })
}
}
}

View File

@@ -0,0 +1,18 @@
use redis::Cmd;
pub fn redis_args(cmd: &mut Cmd, args: &[String]) {
for arg in args {
cmd.arg(arg);
}
}
pub async fn redis_execute<T>(
cmd: &mut Cmd,
redis: &mut deadpool_redis::Connection,
) -> Result<T, deadpool_redis::PoolError>
where
T: redis::FromRedisValue,
{
let res = cmd.query_async::<_, T>(redis).await?;
Ok(res)
}

View File

@@ -0,0 +1,40 @@
use crate::routes::v3::project_creation::CreateError;
use crate::routes::ApiError;
use actix_multipart::Field;
use actix_web::web::Payload;
use bytes::BytesMut;
use futures::StreamExt;
pub async fn read_from_payload(
payload: &mut Payload,
cap: usize,
err_msg: &'static str,
) -> Result<BytesMut, ApiError> {
let mut bytes = BytesMut::new();
while let Some(item) = payload.next().await {
if bytes.len() >= cap {
return Err(ApiError::InvalidInput(String::from(err_msg)));
} else {
bytes.extend_from_slice(&item.map_err(|_| {
ApiError::InvalidInput("Unable to parse bytes in payload sent!".to_string())
})?);
}
}
Ok(bytes)
}
pub async fn read_from_field(
field: &mut Field,
cap: usize,
err_msg: &'static str,
) -> Result<BytesMut, CreateError> {
let mut bytes = BytesMut::new();
while let Some(chunk) = field.next().await {
if bytes.len() >= cap {
return Err(CreateError::InvalidInput(String::from(err_msg)));
} else {
bytes.extend_from_slice(&chunk?);
}
}
Ok(bytes)
}

View File

@@ -0,0 +1,151 @@
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::Regex;
use validator::{ValidationErrors, ValidationErrorsKind};
use crate::models::pats::Scopes;
lazy_static! {
pub static ref RE_URL_SAFE: Regex = Regex::new(r#"^[a-zA-Z0-9!@$()`.+,_"-]*$"#).unwrap();
}
//TODO: In order to ensure readability, only the first error is printed, this may need to be expanded on in the future!
pub fn validation_errors_to_string(errors: ValidationErrors, adder: Option<String>) -> String {
let mut output = String::new();
let map = errors.into_errors();
let key_option = map.keys().next().copied();
if let Some(field) = key_option {
if let Some(error) = map.get(field) {
return match error {
ValidationErrorsKind::Struct(errors) => {
validation_errors_to_string(*errors.clone(), Some(format!("of item {field}")))
}
ValidationErrorsKind::List(list) => {
if let Some((index, errors)) = list.iter().next() {
output.push_str(&validation_errors_to_string(
*errors.clone(),
Some(format!("of list {field} with index {index}")),
));
}
output
}
ValidationErrorsKind::Field(errors) => {
if let Some(error) = errors.first() {
if let Some(adder) = adder {
output.push_str(&format!(
"Field {} {} failed validation with error: {}",
field, adder, error.code
));
} else {
output.push_str(&format!(
"Field {} failed validation with error: {}",
field, error.code
));
}
}
output
}
};
}
}
String::new()
}
pub fn validate_deps(
values: &[crate::models::projects::Dependency],
) -> Result<(), validator::ValidationError> {
if values
.iter()
.duplicates_by(|x| {
format!(
"{}-{}-{}",
x.version_id
.unwrap_or(crate::models::projects::VersionId(0)),
x.project_id
.unwrap_or(crate::models::projects::ProjectId(0)),
x.file_name.as_deref().unwrap_or_default()
)
})
.next()
.is_some()
{
return Err(validator::ValidationError::new("duplicate dependency"));
}
Ok(())
}
pub fn validate_url(value: &str) -> Result<(), validator::ValidationError> {
let url = url::Url::parse(value)
.ok()
.ok_or_else(|| validator::ValidationError::new("invalid URL"))?;
if url.scheme() != "https" {
return Err(validator::ValidationError::new("URL must be https"));
}
Ok(())
}
pub fn validate_url_hashmap_optional_values(
values: &std::collections::HashMap<String, Option<String>>,
) -> Result<(), validator::ValidationError> {
for value in values.values().flatten() {
validate_url(value)?;
}
Ok(())
}
pub fn validate_url_hashmap_values(
values: &std::collections::HashMap<String, String>,
) -> Result<(), validator::ValidationError> {
for value in values.values() {
validate_url(value)?;
}
Ok(())
}
pub fn validate_no_restricted_scopes(value: &Scopes) -> Result<(), validator::ValidationError> {
if value.is_restricted() {
return Err(validator::ValidationError::new(
"Restricted scopes not allowed",
));
}
Ok(())
}
pub fn validate_name(value: &str) -> Result<(), validator::ValidationError> {
if value.trim().is_empty() {
return Err(validator::ValidationError::new(
"Name cannot contain only whitespace.",
));
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn validate_name_with_valid_input() {
let result = validate_name("My Test mod");
assert!(result.is_ok());
}
#[test]
fn validate_name_with_invalid_input_returns_error() {
let result = validate_name(" ");
assert!(result.is_err());
}
}

View File

@@ -0,0 +1,583 @@
use crate::database::models::legacy_loader_fields::MinecraftGameVersion;
use crate::database::redis::RedisPool;
use crate::models::ids::base62_impl::to_base62;
use crate::models::projects::ProjectId;
use crate::routes::ApiError;
use chrono::{DateTime, Utc};
use serde::Serialize;
use sqlx::PgPool;
const PLUGIN_LOADERS: &[&str] = &[
"bukkit",
"spigot",
"paper",
"purpur",
"bungeecord",
"waterfall",
"velocity",
"sponge",
];
struct WebhookMetadata {
pub project_url: String,
pub project_title: String,
pub project_summary: String,
pub display_project_type: String,
pub project_icon_url: Option<String>,
pub color: Option<u32>,
pub author: Option<WebhookAuthor>,
pub categories_formatted: Vec<String>,
pub loaders_formatted: Vec<String>,
pub versions_formatted: Vec<String>,
pub gallery_image: Option<String>,
}
struct WebhookAuthor {
pub name: String,
pub url: String,
pub icon_url: Option<String>,
}
async fn get_webhook_metadata(
project_id: ProjectId,
pool: &PgPool,
redis: &RedisPool,
emoji: bool,
) -> Result<Option<WebhookMetadata>, ApiError> {
let project =
crate::database::models::project_item::Project::get_id(project_id.into(), pool, redis)
.await?;
if let Some(mut project) = project {
let mut owner = None;
if let Some(organization_id) = project.inner.organization_id {
let organization = crate::database::models::organization_item::Organization::get_id(
organization_id,
pool,
redis,
)
.await?;
if let Some(organization) = organization {
owner = Some(WebhookAuthor {
name: organization.name,
url: format!(
"{}/organization/{}",
dotenvy::var("SITE_URL").unwrap_or_default(),
organization.slug
),
icon_url: organization.icon_url,
});
}
} else {
let team = crate::database::models::team_item::TeamMember::get_from_team_full(
project.inner.team_id,
pool,
redis,
)
.await?;
if let Some(member) = team.into_iter().find(|x| x.is_owner) {
let user =
crate::database::models::user_item::User::get_id(member.user_id, pool, redis)
.await?;
if let Some(user) = user {
owner = Some(WebhookAuthor {
url: format!(
"{}/user/{}",
dotenvy::var("SITE_URL").unwrap_or_default(),
user.username
),
name: user.username,
icon_url: user.avatar_url,
});
}
}
};
let all_game_versions = MinecraftGameVersion::list(None, None, pool, redis).await?;
let versions = project
.aggregate_version_fields
.clone()
.into_iter()
.find_map(|vf| MinecraftGameVersion::try_from_version_field(&vf).ok())
.unwrap_or_default();
let formatted_game_versions = get_gv_range(versions, all_game_versions);
let mut project_type = project.project_types.pop().unwrap_or_default(); // TODO: Should this grab a not-first?
if project
.inner
.loaders
.iter()
.all(|x| PLUGIN_LOADERS.contains(&&**x))
{
project_type = "plugin".to_string();
} else if project.inner.loaders.iter().any(|x| x == "datapack") {
project_type = "datapack".to_string();
}
let mut display_project_type = match &*project_type {
"datapack" => "data pack",
"resourcepack" => "resource pack",
_ => &*project_type,
}
.to_string();
Ok(Some(WebhookMetadata {
project_url: format!(
"{}/{}/{}",
dotenvy::var("SITE_URL").unwrap_or_default(),
project_type,
project
.inner
.slug
.clone()
.unwrap_or_else(|| to_base62(project.inner.id.0 as u64))
),
project_title: project.inner.name,
project_summary: project.inner.summary,
display_project_type: format!(
"{}{display_project_type}",
display_project_type.remove(0).to_uppercase()
),
project_icon_url: project.inner.icon_url,
color: project.inner.color,
author: owner,
categories_formatted: project
.categories
.into_iter()
.map(|mut x| format!("{}{x}", x.remove(0).to_uppercase()))
.collect::<Vec<_>>(),
loaders_formatted: project
.inner
.loaders
.into_iter()
.map(|loader| {
let mut x = if &*loader == "datapack" {
"Data Pack".to_string()
} else if &*loader == "mrpack" {
"Modpack".to_string()
} else {
loader.clone()
};
if emoji {
let emoji_id: i64 = match &*loader {
"bukkit" => 1049793345481883689,
"bungeecord" => 1049793347067314220,
"canvas" => 1107352170656968795,
"datapack" => 1057895494652788866,
"fabric" => 1049793348719890532,
"folia" => 1107348745571537018,
"forge" => 1049793350498275358,
"iris" => 1107352171743281173,
"liteloader" => 1049793351630733333,
"minecraft" => 1049793352964526100,
"modloader" => 1049793353962762382,
"neoforge" => 1140437823783190679,
"optifine" => 1107352174415052901,
"paper" => 1049793355598540810,
"purpur" => 1140436034505674762,
"quilt" => 1049793857681887342,
"rift" => 1049793359373414502,
"spigot" => 1049793413886779413,
"sponge" => 1049793416969605231,
"vanilla" => 1107350794178678855,
"velocity" => 1049793419108700170,
"waterfall" => 1049793420937412638,
_ => 1049805243866681424,
};
format!("<:{loader}:{emoji_id}> {}{x}", x.remove(0).to_uppercase())
} else {
format!("{}{x}", x.remove(0).to_uppercase())
}
})
.collect(),
versions_formatted: formatted_game_versions,
gallery_image: project
.gallery_items
.into_iter()
.find(|x| x.featured)
.map(|x| x.image_url),
}))
} else {
Ok(None)
}
}
pub async fn send_slack_webhook(
project_id: ProjectId,
pool: &PgPool,
redis: &RedisPool,
webhook_url: String,
message: Option<String>,
) -> Result<(), ApiError> {
let metadata = get_webhook_metadata(project_id, pool, redis, false).await?;
if let Some(metadata) = metadata {
let mut blocks = vec![];
if let Some(message) = message {
blocks.push(serde_json::json!({
"type": "section",
"text": {
"type": "mrkdwn",
"text": message,
}
}));
}
if let Some(ref author) = metadata.author {
let mut elements = vec![];
if let Some(ref icon_url) = author.icon_url {
elements.push(serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": "Author"
}));
}
elements.push(serde_json::json!({
"type": "mrkdwn",
"text": format!("<{}|{}>", author.url, author.name)
}));
blocks.push(serde_json::json!({
"type": "context",
"elements": elements
}));
}
let mut project_block = serde_json::json!({
"type": "section",
"text": {
"type": "mrkdwn",
"text": format!(
"*<{}|{}>*\n\n{}\n\n*Categories:* {}\n\n*Loaders:* {}\n\n*Versions:* {}",
metadata.project_url,
metadata.project_title,
metadata.project_summary,
metadata.categories_formatted.join(", "),
metadata.loaders_formatted.join(", "),
metadata.versions_formatted.join(", ")
)
}
});
if let Some(icon_url) = metadata.project_icon_url {
if let Some(project_block) = project_block.as_object_mut() {
project_block.insert(
"accessory".to_string(),
serde_json::json!({
"type": "image",
"image_url": icon_url,
"alt_text": metadata.project_title
}),
);
}
}
blocks.push(project_block);
if let Some(gallery_image) = metadata.gallery_image {
blocks.push(serde_json::json!({
"type": "image",
"image_url": gallery_image,
"alt_text": metadata.project_title
}));
}
blocks.push(
serde_json::json!({
"type": "context",
"elements": [
{
"type": "image",
"image_url": "https://cdn-raw.modrinth.com/modrinth-new.png",
"alt_text": "Author"
},
{
"type": "mrkdwn",
"text": format!("{} on Modrinth • <!date^{}^{{date_short_pretty}} at {{time}}|Unknown date>", metadata.display_project_type, Utc::now().timestamp())
}
]
})
);
let client = reqwest::Client::new();
client
.post(&webhook_url)
.json(&serde_json::json!({
"blocks": blocks,
}))
.send()
.await
.map_err(|_| ApiError::Discord("Error while sending projects webhook".to_string()))?;
}
Ok(())
}
#[derive(Serialize)]
struct DiscordEmbed {
pub author: Option<DiscordEmbedAuthor>,
pub title: String,
pub description: String,
pub url: String,
pub timestamp: DateTime<Utc>,
pub color: u32,
pub fields: Vec<DiscordEmbedField>,
pub thumbnail: DiscordEmbedThumbnail,
pub image: Option<DiscordEmbedImage>,
pub footer: Option<DiscordEmbedFooter>,
}
#[derive(Serialize)]
struct DiscordEmbedAuthor {
pub name: String,
pub url: Option<String>,
pub icon_url: Option<String>,
}
#[derive(Serialize)]
struct DiscordEmbedField {
pub name: &'static str,
pub value: String,
pub inline: bool,
}
#[derive(Serialize)]
struct DiscordEmbedImage {
pub url: Option<String>,
}
#[derive(Serialize)]
struct DiscordEmbedThumbnail {
pub url: Option<String>,
}
#[derive(Serialize)]
struct DiscordEmbedFooter {
pub text: String,
pub icon_url: Option<String>,
}
#[derive(Serialize)]
struct DiscordWebhook {
pub avatar_url: Option<String>,
pub username: Option<String>,
pub embeds: Vec<DiscordEmbed>,
pub content: Option<String>,
}
pub async fn send_discord_webhook(
project_id: ProjectId,
pool: &PgPool,
redis: &RedisPool,
webhook_url: String,
message: Option<String>,
) -> Result<(), ApiError> {
let metadata = get_webhook_metadata(project_id, pool, redis, true).await?;
if let Some(project) = metadata {
let mut fields = vec![];
if !project.categories_formatted.is_empty() {
fields.push(DiscordEmbedField {
name: "Categories",
value: project.categories_formatted.join("\n"),
inline: true,
});
}
if !project.loaders_formatted.is_empty() {
fields.push(DiscordEmbedField {
name: "Loaders",
value: project.loaders_formatted.join("\n"),
inline: true,
});
}
if !project.versions_formatted.is_empty() {
fields.push(DiscordEmbedField {
name: "Versions",
value: project.versions_formatted.join("\n"),
inline: true,
});
}
let embed = DiscordEmbed {
author: project.author.map(|x| DiscordEmbedAuthor {
name: x.name,
url: Some(x.url),
icon_url: x.icon_url,
}),
url: project.project_url,
title: project.project_title, // Do not change DiscordEmbed
description: project.project_summary,
timestamp: Utc::now(),
color: project.color.unwrap_or(0x1bd96a),
fields,
thumbnail: DiscordEmbedThumbnail {
url: project.project_icon_url,
},
image: project
.gallery_image
.map(|x| DiscordEmbedImage { url: Some(x) }),
footer: Some(DiscordEmbedFooter {
text: format!("{} on Modrinth", project.display_project_type),
icon_url: Some("https://cdn-raw.modrinth.com/modrinth-new.png".to_string()),
}),
};
let client = reqwest::Client::new();
client
.post(&webhook_url)
.json(&DiscordWebhook {
avatar_url: Some("https://cdn.modrinth.com/Modrinth_Dark_Logo.png".to_string()),
username: Some("Modrinth Release".to_string()),
embeds: vec![embed],
content: message,
})
.send()
.await
.map_err(|_| ApiError::Discord("Error while sending projects webhook".to_string()))?;
}
Ok(())
}
fn get_gv_range(
mut game_versions: Vec<MinecraftGameVersion>,
mut all_game_versions: Vec<MinecraftGameVersion>,
) -> Vec<String> {
// both -> least to greatest
game_versions.sort_by(|a, b| a.created.cmp(&b.created));
game_versions.dedup_by(|a, b| a.version == b.version);
all_game_versions.sort_by(|a, b| a.created.cmp(&b.created));
let all_releases = all_game_versions
.iter()
.filter(|x| &*x.type_ == "release")
.cloned()
.collect::<Vec<_>>();
let mut intervals = Vec::new();
let mut current_interval = 0;
const MAX_VALUE: usize = 1000000;
for (i, current_version) in game_versions.iter().enumerate() {
let current_version = &current_version.version;
let index = all_game_versions
.iter()
.position(|x| &*x.version == current_version)
.unwrap_or(MAX_VALUE);
let release_index = all_releases
.iter()
.position(|x| &*x.version == current_version)
.unwrap_or(MAX_VALUE);
if i == 0 {
intervals.push(vec![vec![i, index, release_index]])
} else {
let interval_base = &intervals[current_interval];
if ((index as i32) - (interval_base[interval_base.len() - 1][1] as i32) == 1
|| (release_index as i32) - (interval_base[interval_base.len() - 1][2] as i32) == 1)
&& (all_game_versions[interval_base[0][1]].type_ == "release"
|| all_game_versions[index].type_ != "release")
{
if intervals[current_interval].get(1).is_some() {
intervals[current_interval][1] = vec![i, index, release_index];
} else {
intervals[current_interval].insert(1, vec![i, index, release_index]);
}
} else {
current_interval += 1;
intervals.push(vec![vec![i, index, release_index]]);
}
}
}
let mut new_intervals = Vec::new();
for interval in intervals {
if interval.len() == 2 && interval[0][2] != MAX_VALUE && interval[1][2] == MAX_VALUE {
let mut last_snapshot: Option<usize> = None;
for j in ((interval[0][1] + 1)..=interval[1][1]).rev() {
if all_game_versions[j].type_ == "release" {
new_intervals.push(vec![
interval[0].clone(),
vec![
game_versions
.iter()
.position(|x| x.version == all_game_versions[j].version)
.unwrap_or(MAX_VALUE),
j,
all_releases
.iter()
.position(|x| x.version == all_game_versions[j].version)
.unwrap_or(MAX_VALUE),
],
]);
if let Some(last_snapshot) = last_snapshot {
if last_snapshot != j + 1 {
new_intervals.push(vec![
vec![
game_versions
.iter()
.position(|x| {
x.version == all_game_versions[last_snapshot].version
})
.unwrap_or(MAX_VALUE),
last_snapshot,
MAX_VALUE,
],
interval[1].clone(),
])
}
} else {
new_intervals.push(vec![interval[1].clone()])
}
break;
} else {
last_snapshot = Some(j);
}
}
} else {
new_intervals.push(interval);
}
}
let mut output = Vec::new();
for interval in new_intervals {
if interval.len() == 2 {
output.push(format!(
"{}{}",
&game_versions[interval[0][0]].version, &game_versions[interval[1][0]].version
))
} else {
output.push(game_versions[interval[0][0]].version.clone())
}
}
output
}