1
0

Switch to time crate, add file sizes (#329)

* Switch to time crate, add file sizes

* Update deps, adjust pack format

* Run formatter, fix clippy
This commit is contained in:
Geometrically
2022-03-29 19:35:09 -07:00
committed by GitHub
parent a3d5479878
commit 80e00a80d5
38 changed files with 563 additions and 318 deletions

View File

@@ -1,6 +1,7 @@
use super::ids::*;
use super::DatabaseError;
use futures::TryStreamExt;
use time::OffsetDateTime;
pub struct ProjectType {
pub id: ProjectTypeId,
@@ -19,7 +20,7 @@ pub struct GameVersion {
pub id: GameVersionId,
pub version: String,
pub version_type: String,
pub date: chrono::DateTime<chrono::Utc>,
pub date: OffsetDateTime,
pub major: bool,
}
@@ -469,7 +470,7 @@ impl<'a> LoaderBuilder<'a> {
pub struct GameVersionBuilder<'a> {
pub version: Option<&'a str>,
pub version_type: Option<&'a str>,
pub date: Option<&'a chrono::DateTime<chrono::Utc>>,
pub date: Option<&'a OffsetDateTime>,
}
impl GameVersion {
@@ -689,7 +690,7 @@ impl<'a> GameVersionBuilder<'a> {
pub fn created(
self,
created: &'a chrono::DateTime<chrono::Utc>,
created: &'a OffsetDateTime,
) -> GameVersionBuilder<'a> {
Self {
date: Some(created),
@@ -718,7 +719,7 @@ impl<'a> GameVersionBuilder<'a> {
",
self.version,
self.version_type,
self.date.map(chrono::DateTime::naive_utc),
self.date.map(|x| time::PrimitiveDateTime::new(x.date(), x.time())),
)
.fetch_one(exec)
.await?;

View File

@@ -1,5 +1,6 @@
use super::ids::*;
use crate::database::models::DatabaseError;
use time::OffsetDateTime;
pub struct NotificationBuilder {
pub notification_type: Option<String>,
@@ -22,7 +23,7 @@ pub struct Notification {
pub text: String,
pub link: String,
pub read: bool,
pub created: chrono::DateTime<chrono::Utc>,
pub created: OffsetDateTime,
pub actions: Vec<NotificationAction>,
}
@@ -71,7 +72,7 @@ impl NotificationBuilder {
text: self.text.clone(),
link: self.link.clone(),
read: false,
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
actions,
}
.insert(&mut *transaction)

View File

@@ -1,5 +1,5 @@
use super::ids::*;
use chrono::{DateTime, Utc};
use time::OffsetDateTime;
#[derive(Clone, Debug)]
pub struct DonationUrl {
@@ -42,7 +42,7 @@ pub struct GalleryItem {
pub featured: bool,
pub title: Option<String>,
pub description: Option<String>,
pub created: DateTime<Utc>,
pub created: OffsetDateTime,
}
impl GalleryItem {
@@ -109,8 +109,8 @@ impl ProjectBuilder {
description: self.description,
body: self.body,
body_url: None,
published: chrono::Utc::now(),
updated: chrono::Utc::now(),
published: time::OffsetDateTime::now_utc(),
updated: time::OffsetDateTime::now_utc(),
status: self.status,
downloads: 0,
follows: 0,
@@ -169,8 +169,8 @@ pub struct Project {
pub description: String,
pub body: String,
pub body_url: Option<String>,
pub published: chrono::DateTime<chrono::Utc>,
pub updated: chrono::DateTime<chrono::Utc>,
pub published: time::OffsetDateTime,
pub updated: time::OffsetDateTime,
pub status: StatusId,
pub downloads: i32,
pub follows: i32,

View File

@@ -1,4 +1,5 @@
use super::ids::*;
use time::OffsetDateTime;
pub struct Report {
pub id: ReportId,
@@ -8,7 +9,7 @@ pub struct Report {
pub user_id: Option<UserId>,
pub body: String,
pub reporter: UserId,
pub created: chrono::DateTime<chrono::Utc>,
pub created: OffsetDateTime,
}
pub struct QueryReport {
@@ -19,7 +20,7 @@ pub struct QueryReport {
pub user_id: Option<UserId>,
pub body: String,
pub reporter: UserId,
pub created: chrono::DateTime<chrono::Utc>,
pub created: OffsetDateTime,
}
impl Report {

View File

@@ -1,4 +1,5 @@
use super::ids::{ProjectId, UserId};
use time::OffsetDateTime;
pub struct User {
pub id: UserId,
@@ -8,7 +9,7 @@ pub struct User {
pub email: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: chrono::DateTime<chrono::Utc>,
pub created: OffsetDateTime,
pub role: String,
}

View File

@@ -1,6 +1,7 @@
use super::ids::*;
use super::DatabaseError;
use std::collections::HashMap;
use time::OffsetDateTime;
pub struct VersionBuilder {
pub version_id: VersionId,
@@ -78,6 +79,7 @@ pub struct VersionFileBuilder {
pub filename: String,
pub hashes: Vec<HashBuilder>,
pub primary: bool,
pub size: u32,
}
impl VersionFileBuilder {
@@ -90,14 +92,15 @@ impl VersionFileBuilder {
sqlx::query!(
"
INSERT INTO files (id, version_id, url, filename, is_primary)
VALUES ($1, $2, $3, $4, $5)
INSERT INTO files (id, version_id, url, filename, is_primary, size)
VALUES ($1, $2, $3, $4, $5, $6)
",
file_id as FileId,
version_id as VersionId,
self.url,
self.filename,
self.primary
self.primary,
self.size as i32
)
.execute(&mut *transaction)
.await?;
@@ -138,7 +141,7 @@ impl VersionBuilder {
version_number: self.version_number,
changelog: self.changelog,
changelog_url: None,
date_published: chrono::Utc::now(),
date_published: OffsetDateTime::now_utc(),
downloads: 0,
featured: self.featured,
version_type: self.version_type,
@@ -238,7 +241,7 @@ pub struct Version {
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: chrono::DateTime<chrono::Utc>,
pub date_published: OffsetDateTime,
pub downloads: i32,
pub version_type: String,
pub featured: bool,
@@ -639,7 +642,7 @@ impl Version {
).fetch_all(executor),
sqlx::query!(
"
SELECT id, filename, is_primary, url
SELECT id, filename, is_primary, url, size
FROM files
WHERE version_id = $1
",
@@ -699,6 +702,7 @@ impl Version {
.or_default()
.clone(),
primary: x.is_primary,
size: x.size as u32,
})
.collect(),
game_versions: game_versions?
@@ -760,7 +764,7 @@ pub struct QueryVersion {
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: chrono::DateTime<chrono::Utc>,
pub date_published: OffsetDateTime,
pub downloads: i32,
pub version_type: String,
@@ -785,4 +789,5 @@ pub struct QueryFile {
pub filename: String,
pub hashes: HashMap<String, Vec<u8>>,
pub primary: bool,
pub size: u32,
}

View File

@@ -1,8 +1,8 @@
use std::time::Duration;
use log::info;
use sqlx::migrate::MigrateDatabase;
use sqlx::postgres::{PgPool, PgPoolOptions};
use sqlx::{Connection, PgConnection, Postgres};
use std::time::Duration;
pub async fn connect() -> Result<PgPool, sqlx::Error> {
info!("Initializing database connection");

View File

@@ -2,6 +2,7 @@ use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
use bytes::Bytes;
use sha2::Digest;
use time::OffsetDateTime;
pub struct MockHost(());
@@ -38,7 +39,7 @@ impl FileHost for MockHost {
content_sha1,
content_md5: None,
content_type: content_type.to_string(),
upload_timestamp: chrono::Utc::now().timestamp_millis() as u64,
upload_timestamp: OffsetDateTime::now_utc().unix_timestamp() as u64,
})
}

View File

@@ -7,6 +7,7 @@ use s3::bucket::Bucket;
use s3::creds::Credentials;
use s3::region::Region;
use sha2::Digest;
use time::OffsetDateTime;
pub struct S3Host {
bucket: Bucket,
@@ -84,7 +85,7 @@ impl FileHost for S3Host {
content_sha1,
content_md5: None,
content_type: content_type.to_string(),
upload_timestamp: chrono::Utc::now().timestamp_millis() as u64,
upload_timestamp: OffsetDateTime::now_utc().unix_timestamp() as u64,
})
}

View File

@@ -33,7 +33,7 @@ pub fn random_base62_rng<R: rand::RngCore>(rng: &mut R, n: usize) -> u64 {
assert!(n > 0 && n <= 11);
// gen_range is [low, high): max value is `MULTIPLES[n] - 1`,
// which is n characters long when encoded
rng.gen_range(MULTIPLES[n - 1], MULTIPLES[n])
rng.gen_range(MULTIPLES[n - 1]..MULTIPLES[n])
}
const MULTIPLES: [u64; 12] = [

View File

@@ -1,7 +1,7 @@
use super::ids::Base62Id;
use super::users::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@@ -18,7 +18,8 @@ pub struct Notification {
pub text: String,
pub link: String,
pub read: bool,
pub created: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub actions: Vec<NotificationAction>,
}

View File

@@ -3,8 +3,8 @@ use super::teams::TeamId;
use super::users::UserId;
use crate::database::models::project_item::QueryProject;
use crate::database::models::version_item::QueryVersion;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use validator::Validate;
/// The ID of a specific project, encoded as base62 for usage in the API
@@ -38,10 +38,14 @@ pub struct Project {
pub body: String,
/// The link to the long description of the project. (Deprecated), being replaced by `body`
pub body_url: Option<String>,
/// The date at which the project was first published.
pub published: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub published: OffsetDateTime,
#[serde(with = "crate::util::time_ser")]
/// The date at which the project was first published.
pub updated: DateTime<Utc>,
pub updated: OffsetDateTime,
/// The status of the project
pub status: ProjectStatus,
@@ -152,7 +156,8 @@ pub struct GalleryItem {
pub featured: bool,
pub title: Option<String>,
pub description: Option<String>,
pub created: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
@@ -297,8 +302,10 @@ pub struct Version {
pub changelog: String,
/// A link to the changelog for this version of the project. (Deprecated), being replaced by `changelog`
pub changelog_url: Option<String>,
#[serde(with = "crate::util::time_ser")]
/// The date that this version was published.
pub date_published: DateTime<Utc>,
pub date_published: OffsetDateTime,
/// The number of downloads this specific version has had.
pub downloads: u32,
/// The type of the release - `Alpha`, `Beta`, or `Release`.
@@ -351,6 +358,7 @@ impl From<QueryVersion> for Version {
.collect::<Option<_>>()
.unwrap_or_default(),
primary: f.primary,
size: f.size,
}
})
.collect(),
@@ -387,6 +395,8 @@ pub struct VersionFile {
pub filename: String,
/// Whether the file is the primary file of a version
pub primary: bool,
/// The size in bytes of the file
pub size: u32,
}
/// A dependency which describes what versions are required, break support, or are optional to the

View File

@@ -1,7 +1,7 @@
use super::ids::Base62Id;
use crate::models::ids::UserId;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@@ -16,7 +16,8 @@ pub struct Report {
pub item_type: ItemType,
pub reporter: UserId,
pub body: String,
pub created: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
}
#[derive(Serialize, Deserialize, Clone)]

View File

@@ -1,5 +1,6 @@
use super::ids::Base62Id;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(from = "Base62Id")]
@@ -17,7 +18,8 @@ pub struct User {
pub email: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: chrono::DateTime<chrono::Utc>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
pub role: Role,
}

View File

@@ -7,10 +7,10 @@ use crate::util::auth::get_github_user_from_token;
use actix_web::http::StatusCode;
use actix_web::web::{scope, Data, Query, ServiceConfig};
use actix_web::{get, HttpResponse};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use thiserror::Error;
use time::OffsetDateTime;
pub fn config(cfg: &mut ServiceConfig) {
cfg.service(scope("auth").service(auth_callback).service(init));
@@ -145,10 +145,10 @@ pub async fn auth_callback(
.await?;
if let Some(result) = result_option {
let now = Utc::now();
let duration = result.expires.signed_duration_since(now);
let now = OffsetDateTime::now_utc();
let duration = now - result.expires;
if duration.num_seconds() < 0 {
if duration.whole_seconds() < 0 {
return Err(AuthorizationError::InvalidCredentials);
}
@@ -225,7 +225,7 @@ pub async fn auth_callback(
email: user.email,
avatar_url: Some(user.avatar_url),
bio: user.bio,
created: Utc::now(),
created: OffsetDateTime::now_utc(),
role: Role::Developer.to_string(),
}
.insert(&mut transaction)

View File

@@ -108,7 +108,7 @@ pub async fn maven_metadata(
.map(|x| x.version_number.clone())
.collect::<Vec<_>>(),
},
last_updated: data.inner.updated.format("%Y%m%d%H%M%S").to_string(),
last_updated: data.inner.updated.format("%Y%m%d%H%M%S"),
},
};

View File

@@ -19,6 +19,7 @@ use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use std::sync::Arc;
use thiserror::Error;
use time::OffsetDateTime;
use validator::Validate;
#[derive(Error, Debug)]
@@ -498,7 +499,7 @@ pub async fn project_create_inner(
featured: item.featured,
title: item.title.clone(),
description: item.description.clone(),
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
});
continue;
@@ -693,7 +694,7 @@ pub async fn project_create_inner(
.collect(),
};
let now = chrono::Utc::now();
let now = OffsetDateTime::now_utc();
let response = crate::models::projects::Project {
id: project_id,

View File

@@ -17,6 +17,7 @@ use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, Row};
use std::sync::Arc;
use time::OffsetDateTime;
use validator::Validate;
#[get("search")]
@@ -1134,7 +1135,7 @@ pub async fn add_gallery_item(
featured: item.featured,
title: item.title,
description: item.description,
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
}
.insert(&mut transaction)
.await?;

View File

@@ -8,6 +8,7 @@ use actix_web::{delete, get, post, web, HttpRequest, HttpResponse};
use futures::StreamExt;
use serde::Deserialize;
use sqlx::PgPool;
use time::OffsetDateTime;
#[derive(Deserialize)]
pub struct CreateReport {
@@ -59,7 +60,7 @@ pub async fn report_create(
user_id: None,
body: new_report.body.clone(),
reporter: current_user.id.into(),
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
};
match new_report.item_type {
@@ -108,7 +109,7 @@ pub async fn report_create(
item_type: new_report.item_type.clone(),
reporter: current_user.id,
body: new_report.body.clone(),
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
}))
}

View File

@@ -7,6 +7,7 @@ use crate::util::auth::check_is_admin_from_headers;
use actix_web::{delete, get, put, web, HttpRequest, HttpResponse};
use models::categories::{Category, GameVersion, Loader};
use sqlx::PgPool;
use time::OffsetDateTime;
pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service(
@@ -201,7 +202,8 @@ pub async fn loader_delete(
pub struct GameVersionQueryData {
pub version: String,
pub version_type: String,
pub date: chrono::DateTime<chrono::Utc>,
#[serde(with = "crate::util::time_ser")]
pub date: OffsetDateTime,
pub major: bool,
}
@@ -241,7 +243,7 @@ pub async fn game_version_list(
pub struct GameVersionData {
#[serde(rename = "type")]
type_: String,
date: Option<chrono::DateTime<chrono::Utc>>,
date: Option<OffsetDateTime>,
}
#[put("game_version/{name}")]

View File

@@ -7,10 +7,10 @@ use crate::util::auth::{
};
use actix_web::web;
use actix_web::{get, post, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use futures::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use time::OffsetDateTime;
#[derive(Serialize, Deserialize)]
pub struct Report {
@@ -20,7 +20,8 @@ pub struct Report {
pub item_type: ItemType,
pub reporter: UserId,
pub body: String,
pub created: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub created: OffsetDateTime,
}
#[derive(Serialize, Deserialize, Clone)]
@@ -92,7 +93,7 @@ pub async fn report_create(
user_id: None,
body: new_report.body.clone(),
reporter: current_user.id.into(),
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
};
match new_report.item_type {
@@ -141,7 +142,7 @@ pub async fn report_create(
item_type: new_report.item_type.clone(),
reporter: current_user.id,
body: new_report.body.clone(),
created: chrono::Utc::now(),
created: OffsetDateTime::now_utc(),
}))
}

View File

@@ -9,10 +9,10 @@ use crate::routes::ApiError;
use crate::util::auth::get_user_from_headers;
use crate::{database, models};
use actix_web::{delete, get, web, HttpRequest, HttpResponse};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use std::sync::Arc;
use time::OffsetDateTime;
/// A specific version of a mod
#[derive(Serialize, Deserialize)]
@@ -25,7 +25,8 @@ pub struct LegacyVersion {
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub date_published: OffsetDateTime,
pub downloads: u32,
pub version_type: VersionType,
pub files: Vec<VersionFile>,

View File

@@ -20,6 +20,7 @@ use actix_web::{post, HttpRequest, HttpResponse};
use futures::stream::StreamExt;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use time::OffsetDateTime;
use validator::Validate;
#[derive(Serialize, Deserialize, Validate, Clone)]
@@ -398,7 +399,7 @@ async fn version_create_inner(
version_number: builder.version_number.clone(),
changelog: builder.changelog.clone(),
changelog_url: None,
date_published: chrono::Utc::now(),
date_published: OffsetDateTime::now_utc(),
downloads: 0,
version_type: version_data.release_channel,
files: builder
@@ -422,6 +423,7 @@ async fn version_create_inner(
url: file.url.clone(),
filename: file.filename.clone(),
primary: file.primary,
size: file.size,
})
.collect::<Vec<_>>(),
dependencies: version_data.dependencies,
@@ -719,6 +721,7 @@ pub async fn upload_file(
&& version_files.iter().all(|x| !x.primary)
&& !ignore_primary)
|| force_primary,
size: upload_data.content_length,
});
Ok(())

View File

@@ -1,4 +1,3 @@
use actix_rt::time;
use actix_rt::Arbiter;
use futures::StreamExt;
@@ -18,7 +17,7 @@ impl Scheduler {
F: FnMut() -> R + Send + 'static,
R: std::future::Future<Output = ()> + Send + 'static,
{
let future = IntervalStream::new(time::interval(interval))
let future = IntervalStream::new(actix_rt::time::interval(interval))
.for_each_concurrent(2, move |_| task());
self.arbiter.spawn(future);
@@ -75,6 +74,8 @@ pub enum VersionIndexingError {
use crate::util::env::parse_var;
use serde::Deserialize;
use time::Format::Rfc3339;
use time::OffsetDateTime;
use tokio_stream::wrappers::IntervalStream;
#[derive(Deserialize)]
@@ -87,8 +88,8 @@ struct VersionFormat<'a> {
id: String,
#[serde(rename = "type")]
type_: std::borrow::Cow<'a, str>,
#[serde(rename = "releaseTime")]
release_time: chrono::DateTime<chrono::Utc>,
#[serde(rename = "releaseTime", with = "crate::util::time_ser")]
release_time: OffsetDateTime,
}
async fn update_versions(
@@ -127,30 +128,26 @@ async fn update_versions(
lazy_static::lazy_static! {
/// Mojank for some reason has versions released at the same DateTime. This hardcodes them to fix this,
/// as most of our ordering logic is with DateTime
static ref HALL_OF_SHAME_2: [(&'static str, chrono::DateTime<chrono::Utc>); 4] = [
static ref HALL_OF_SHAME_2: [(&'static str, OffsetDateTime); 4] = [
(
"1.4.5",
chrono::DateTime::parse_from_rfc3339("2012-12-19T22:00:00+00:00")
OffsetDateTime::parse("2012-12-19T22:00:00+00:00", Rfc3339)
.unwrap()
.into(),
),
(
"1.4.6",
chrono::DateTime::parse_from_rfc3339("2012-12-19T22:00:01+00:00")
OffsetDateTime::parse("2012-12-19T22:00:01+00:00", Rfc3339)
.unwrap()
.into(),
),
(
"1.6.3",
chrono::DateTime::parse_from_rfc3339("2013-09-13T10:54:41+00:00")
OffsetDateTime::parse("2013-09-13T10:54:41+00:00", Rfc3339)
.unwrap()
.into(),
),
(
"13w37b",
chrono::DateTime::parse_from_rfc3339("2013-09-13T10:54:42+00:00")
OffsetDateTime::parse("2013-09-13T10:54:42+00:00", Rfc3339)
.unwrap()
.into(),
),
];
}

View File

@@ -68,9 +68,9 @@ pub async fn index_local(
icon_url: m.icon_url.unwrap_or_default(),
author: m.username,
date_created: m.published,
created_timestamp: m.published.timestamp(),
created_timestamp: m.published.unix_timestamp(),
date_modified: m.updated,
modified_timestamp: m.updated.timestamp(),
modified_timestamp: m.updated.unix_timestamp(),
latest_version: versions.last().cloned().unwrap_or_else(|| "None".to_string()),
versions,
license: m.short,
@@ -142,9 +142,9 @@ pub async fn query_one(
icon_url: m.icon_url.unwrap_or_default(),
author: m.username,
date_created: m.published,
created_timestamp: m.published.timestamp(),
created_timestamp: m.published.unix_timestamp(),
date_modified: m.updated,
modified_timestamp: m.updated.timestamp(),
modified_timestamp: m.updated.unix_timestamp(),
latest_version: versions
.last()
.cloned()

View File

@@ -17,7 +17,7 @@ pub enum IndexingError {
#[error("Error while serializing or deserializing JSON: {0}")]
Serde(#[from] serde_json::Error),
#[error("Error while parsing a timestamp: {0}")]
ParseDate(#[from] chrono::format::ParseError),
ParseDate(#[from] time::error::Error),
#[error("Database Error: {0}")]
Sqlx(#[from] sqlx::error::Error),
#[error("Database Error: {0}")]

View File

@@ -2,13 +2,13 @@ use crate::models::error::ApiError;
use crate::models::projects::SearchRequest;
use actix_web::http::StatusCode;
use actix_web::HttpResponse;
use chrono::{DateTime, Utc};
use meilisearch_sdk::client::Client;
use meilisearch_sdk::document::Document;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::cmp::min;
use thiserror::Error;
use time::OffsetDateTime;
pub mod indexing;
@@ -84,12 +84,15 @@ pub struct UploadSearchProject {
pub server_side: String,
pub gallery: Vec<String>,
#[serde(with = "crate::util::time_ser")]
/// RFC 3339 formatted creation date of the project
pub date_created: DateTime<Utc>,
pub date_created: OffsetDateTime,
/// Unix timestamp of the creation date of the project
pub created_timestamp: i64,
#[serde(with = "crate::util::time_ser")]
/// RFC 3339 formatted date/time of last major modification (update)
pub date_modified: DateTime<Utc>,
pub date_modified: OffsetDateTime,
/// Unix timestamp of the last major modification
pub modified_timestamp: i64,
}

View File

@@ -3,5 +3,6 @@ pub mod env;
pub mod ext;
pub mod guards;
pub mod routes;
pub mod time_ser;
pub mod validate;
pub mod webhook;

42
src/util/time_ser.rs Normal file
View File

@@ -0,0 +1,42 @@
//! Use the well-known [RFC3339 format] when serializing and deserializing an [`OffsetDateTime`].
//!
//! Use this module in combination with serde's [`#[with]`][with] attribute.
//!
//! [RFC3339 format]: https://tools.ietf.org/html/rfc3339#section-5.6
//! [with]: https://serde.rs/field-attrs.html#with
use core::fmt;
use core::marker::PhantomData;
use serde::{de, Deserializer, Serialize, Serializer};
use time::Format::Rfc3339;
use time::OffsetDateTime;
/// Serialize an [`OffsetDateTime`] using the well-known RFC3339 format.
pub fn serialize<S: Serializer>(
datetime: &OffsetDateTime,
serializer: S,
) -> Result<S::Ok, S::Error> {
datetime.format(Rfc3339).serialize(serializer)
}
/// Deserialize an [`OffsetDateTime`] from its RFC3339 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
) -> Result<OffsetDateTime, D::Error> {
deserializer.deserialize_any(Visitor(PhantomData))
}
pub(super) struct Visitor<T: ?Sized>(pub(super) PhantomData<T>);
impl<'a> de::Visitor<'a> for Visitor<OffsetDateTime> {
type Value = OffsetDateTime;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("an `OffsetDateTime`")
}
fn visit_str<E: de::Error>(self, value: &str) -> Result<OffsetDateTime, E> {
OffsetDateTime::parse(value, Rfc3339).map_err(E::custom)
}
}

View File

@@ -1,13 +1,14 @@
use crate::models::projects::Project;
use chrono::{DateTime, Utc};
use serde::Serialize;
use time::OffsetDateTime;
#[derive(Serialize)]
struct DiscordEmbed {
pub title: String,
pub description: String,
pub url: String,
pub timestamp: DateTime<Utc>,
#[serde(with = "crate::util::time_ser")]
pub timestamp: OffsetDateTime,
pub color: u32,
pub fields: Vec<DiscordEmbedField>,
pub image: DiscordEmbedImage,

View File

@@ -1,8 +1,8 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use time::OffsetDateTime;
use zip::ZipArchive;
pub struct FabricValidator;
@@ -22,9 +22,8 @@ impl super::Validator for FabricValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 18w49a, the first fabric version
SupportedGameVersions::PastDate(DateTime::from_utc(
NaiveDateTime::from_timestamp(1543969469, 0),
Utc,
SupportedGameVersions::PastDate(OffsetDateTime::from_unix_timestamp(
1543969469,
))
}

View File

@@ -1,8 +1,8 @@
use crate::validate::{
SupportedGameVersions, ValidationError, ValidationResult,
};
use chrono::{DateTime, NaiveDateTime, Utc};
use std::io::Cursor;
use time::OffsetDateTime;
use zip::ZipArchive;
pub struct ForgeValidator;
@@ -22,9 +22,8 @@ impl super::Validator for ForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Time since release of 1.13, the first forge version which uses the new TOML system
SupportedGameVersions::PastDate(DateTime::<Utc>::from_utc(
NaiveDateTime::from_timestamp(1540122067, 0),
Utc,
SupportedGameVersions::PastDate(OffsetDateTime::from_unix_timestamp(
1540122067,
))
}
@@ -68,14 +67,8 @@ impl super::Validator for LegacyForgeValidator {
fn get_supported_game_versions(&self) -> SupportedGameVersions {
// Times between versions 1.5.2 to 1.12.2, which all use the legacy way of defining mods
SupportedGameVersions::Range(
DateTime::from_utc(
NaiveDateTime::from_timestamp(1366818300, 0),
Utc,
),
DateTime::from_utc(
NaiveDateTime::from_timestamp(1505810340, 0),
Utc,
),
OffsetDateTime::from_unix_timestamp(1366818300),
OffsetDateTime::from_unix_timestamp(1505810340),
)
}

View File

@@ -2,9 +2,9 @@ use crate::models::projects::{GameVersion, Loader};
use crate::validate::fabric::FabricValidator;
use crate::validate::forge::{ForgeValidator, LegacyForgeValidator};
use crate::validate::pack::PackValidator;
use chrono::{DateTime, Utc};
use std::io::Cursor;
use thiserror::Error;
use time::OffsetDateTime;
use zip::ZipArchive;
mod fabric;
@@ -35,8 +35,8 @@ pub enum ValidationResult {
pub enum SupportedGameVersions {
All,
PastDate(DateTime<Utc>),
Range(DateTime<Utc>, DateTime<Utc>),
PastDate(OffsetDateTime),
Range(OffsetDateTime, OffsetDateTime),
#[allow(dead_code)]
Custom(Vec<GameVersion>),
}

View File

@@ -33,6 +33,7 @@ pub struct PackFile<'a> {
pub env: Option<std::collections::HashMap<EnvType, SideType>>,
#[validate(custom(function = "validate_download_url"))]
pub downloads: Vec<&'a str>,
pub file_size: u32,
}
fn validate_download_url(
@@ -167,6 +168,12 @@ impl super::Validator for PackValidator {
));
}
if file.hashes.get(&FileHash::Sha512).is_none() {
return Err(ValidationError::InvalidInput(
"All pack files must provide a SHA512 hash!".into(),
));
}
let path = std::path::Path::new(file.path)
.components()
.next()