You've already forked AstralRinth
forked from didirus/AstralRinth
Add S3 File Host (#81)
* Add S3 File Host * Fix tests, set default ACL level to public * Refactor * Fix merge conflicts * Env fixes * Run formatter * Remove extra allocations
This commit is contained in:
@@ -3,9 +3,13 @@ use thiserror::Error;
|
||||
|
||||
mod backblaze;
|
||||
mod mock;
|
||||
mod s3_host;
|
||||
|
||||
pub use backblaze::BackblazeHost;
|
||||
pub use mock::MockHost;
|
||||
use s3::creds::AwsCredsError;
|
||||
use s3::S3Error;
|
||||
pub use s3_host::S3Host;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum FileHostingError {
|
||||
@@ -13,6 +17,10 @@ pub enum FileHostingError {
|
||||
HttpError(#[from] reqwest::Error),
|
||||
#[error("Backblaze error: {0}")]
|
||||
BackblazeError(serde_json::Value),
|
||||
#[error("S3 error: {0}")]
|
||||
S3Error(#[from] S3Error),
|
||||
#[error("S3 Authentication error: {0}")]
|
||||
S3CredentialsError(#[from] AwsCredsError),
|
||||
#[error("File system error in file hosting: {0}")]
|
||||
FileSystemError(#[from] std::io::Error),
|
||||
#[error("Invalid Filename")]
|
||||
|
||||
104
src/file_hosting/s3_host.rs
Normal file
104
src/file_hosting/s3_host.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use crate::file_hosting::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
|
||||
use async_trait::async_trait;
|
||||
use s3::bucket::Bucket;
|
||||
use s3::creds::Credentials;
|
||||
use s3::region::Region;
|
||||
|
||||
pub struct S3Host {
|
||||
bucket: Bucket,
|
||||
}
|
||||
|
||||
impl S3Host {
|
||||
pub fn new(
|
||||
bucket_name: &str,
|
||||
bucket_region: &str,
|
||||
url: &str,
|
||||
access_token: &str,
|
||||
secret: &str,
|
||||
) -> Result<S3Host, FileHostingError> {
|
||||
let mut bucket = Bucket::new(
|
||||
bucket_name,
|
||||
Region::Custom {
|
||||
region: bucket_region.to_string(),
|
||||
endpoint: url.to_string(),
|
||||
},
|
||||
Credentials::new(Some(access_token), Some(secret), None, None, None)?,
|
||||
)?;
|
||||
|
||||
bucket.add_header("x-amz-acl", "public-read");
|
||||
|
||||
Ok(S3Host { bucket })
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl FileHost for S3Host {
|
||||
async fn upload_file(
|
||||
&self,
|
||||
content_type: &str,
|
||||
file_name: &str,
|
||||
file_bytes: Vec<u8>,
|
||||
) -> Result<UploadFileData, FileHostingError> {
|
||||
let content_sha1 = sha1::Sha1::from(&file_bytes).hexdigest();
|
||||
|
||||
self.bucket
|
||||
.put_object_with_content_type(
|
||||
format!("/{}", file_name),
|
||||
file_bytes.as_slice(),
|
||||
content_type,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(UploadFileData {
|
||||
file_id: file_name.to_string(),
|
||||
file_name: file_name.to_string(),
|
||||
content_length: file_bytes.len() as u32,
|
||||
content_sha1,
|
||||
content_md5: None,
|
||||
content_type: content_type.to_string(),
|
||||
upload_timestamp: chrono::Utc::now().timestamp_millis() as u64,
|
||||
})
|
||||
}
|
||||
|
||||
async fn delete_file_version(
|
||||
&self,
|
||||
file_id: &str,
|
||||
file_name: &str,
|
||||
) -> Result<DeleteFileData, FileHostingError> {
|
||||
self.bucket.delete_object(format!("/{}", file_name)).await?;
|
||||
|
||||
Ok(DeleteFileData {
|
||||
file_id: file_id.to_string(),
|
||||
file_name: file_name.to_string(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::file_hosting::s3_host::S3Host;
|
||||
use crate::file_hosting::FileHost;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_file_management() {
|
||||
let s3_host = S3Host::new(
|
||||
&*dotenv::var("S3_BUCKET_NAME").unwrap(),
|
||||
&*dotenv::var("S3_REGION").unwrap(),
|
||||
&*dotenv::var("S3_URL").unwrap(),
|
||||
&*dotenv::var("S3_ACCESS_TOKEN").unwrap(),
|
||||
&*dotenv::var("S3_SECRET").unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
s3_host
|
||||
.upload_file(
|
||||
"text/plain",
|
||||
"test.txt",
|
||||
"test file".to_string().into_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
s3_host.delete_file_version("", "test.txt").await.unwrap();
|
||||
}
|
||||
}
|
||||
44
src/main.rs
44
src/main.rs
@@ -1,3 +1,4 @@
|
||||
use crate::file_hosting::S3Host;
|
||||
use actix_cors::Cors;
|
||||
use actix_web::middleware::Logger;
|
||||
use actix_web::{http, web, App, HttpServer};
|
||||
@@ -64,12 +65,10 @@ async fn main() -> std::io::Result<()> {
|
||||
.await
|
||||
.expect("Database connection failed");
|
||||
|
||||
let backblaze_enabled = dotenv::var("BACKBLAZE_ENABLED")
|
||||
.ok()
|
||||
.and_then(|s| s.parse::<bool>().ok())
|
||||
.unwrap_or(false);
|
||||
let storage_backend = dotenv::var("STORAGE_BACKEND").unwrap_or_else(|_| "local".to_string());
|
||||
|
||||
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = if backblaze_enabled {
|
||||
let file_host: Arc<dyn file_hosting::FileHost + Send + Sync> = if storage_backend == "backblaze"
|
||||
{
|
||||
Arc::new(
|
||||
file_hosting::BackblazeHost::new(
|
||||
&dotenv::var("BACKBLAZE_KEY_ID").unwrap(),
|
||||
@@ -78,8 +77,21 @@ async fn main() -> std::io::Result<()> {
|
||||
)
|
||||
.await,
|
||||
)
|
||||
} else {
|
||||
} else if storage_backend == "s3" {
|
||||
Arc::new(
|
||||
S3Host::new(
|
||||
&*dotenv::var("S3_BUCKET_NAME").unwrap(),
|
||||
&*dotenv::var("S3_REGION").unwrap(),
|
||||
&*dotenv::var("S3_URL").unwrap(),
|
||||
&*dotenv::var("S3_ACCESS_TOKEN").unwrap(),
|
||||
&*dotenv::var("S3_SECRET").unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
} else if storage_backend == "local" {
|
||||
Arc::new(file_hosting::MockHost::new())
|
||||
} else {
|
||||
panic!("Invalid storage backend specified. Aborting startup!")
|
||||
};
|
||||
|
||||
let mut scheduler = scheduler::Scheduler::new();
|
||||
@@ -243,16 +255,24 @@ fn check_env_vars() {
|
||||
check_var::<String>("MEILISEARCH_ADDR");
|
||||
check_var::<String>("BIND_ADDR");
|
||||
|
||||
if dotenv::var("BACKBLAZE_ENABLED")
|
||||
.ok()
|
||||
.and_then(|s| s.parse::<bool>().ok())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
check_var::<String>("STORAGE_BACKEND");
|
||||
|
||||
let storage_backend = dotenv::var("STORAGE_BACKEND").ok();
|
||||
|
||||
if storage_backend.as_deref() == Some("backblaze") {
|
||||
check_var::<String>("BACKBLAZE_KEY_ID");
|
||||
check_var::<String>("BACKBLAZE_KEY");
|
||||
check_var::<String>("BACKBLAZE_BUCKET_ID");
|
||||
} else {
|
||||
} else if storage_backend.as_deref() == Some("s3") {
|
||||
check_var::<String>("S3_ACCESS_TOKEN");
|
||||
check_var::<String>("S3_SECRET");
|
||||
check_var::<String>("S3_URL");
|
||||
check_var::<String>("S3_REGION");
|
||||
check_var::<String>("S3_BUCKET_NAME");
|
||||
} else if storage_backend.as_deref() == Some("local") {
|
||||
check_var::<String>("MOCK_FILE_PATH");
|
||||
} else if let Some(backend) = storage_backend {
|
||||
warn!("Variable `STORAGE_BACKEND` contains an invalid value: {}. Expected \"backblaze\", \"s3\", or \"local\".", backend);
|
||||
}
|
||||
|
||||
check_var::<bool>("INDEX_CURSEFORGE");
|
||||
|
||||
Reference in New Issue
Block a user