Fix uploading (#305)

* Upgrade rust-s3 to fix tokio panics

* Run fmt

* Update deps
This commit is contained in:
Geometrically
2022-02-20 20:16:32 -07:00
committed by GitHub
parent 3ee144459f
commit 9492363b22
6 changed files with 416 additions and 812 deletions

1141
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -51,7 +51,7 @@ lazy_static = "1.4.0"
futures = "0.3.6"
futures-timer = "3.0.2"
rust-s3 = "0.26.1"
rust-s3 = "0.29.0"
async-trait = "0.1.41"
sqlx = { version = "0.5.10", features = ["runtime-actix-rustls", "postgres", "chrono", "offline", "macros", "migrate"] }

View File

@@ -8,8 +8,6 @@ mod s3_host;
pub use backblaze::BackblazeHost;
use bytes::Bytes;
pub use mock::MockHost;
use s3::creds::AwsCredsError;
use s3::S3Error;
pub use s3_host::S3Host;
#[derive(Error, Debug)]
@@ -19,9 +17,7 @@ pub enum FileHostingError {
#[error("Backblaze error: {0}")]
BackblazeError(serde_json::Value),
#[error("S3 error: {0}")]
S3Error(#[from] S3Error),
#[error("S3 Authentication error: {0}")]
S3CredentialsError(#[from] AwsCredsError),
S3Error(String),
#[error("File system error in file hosting: {0}")]
FileSystemError(#[from] std::io::Error),
#[error("Invalid Filename")]

View File

@@ -24,8 +24,13 @@ impl S3Host {
region: bucket_region.to_string(),
endpoint: url.to_string(),
},
Credentials::new(Some(access_token), Some(secret), None, None, None)?,
)?;
Credentials::new(Some(access_token), Some(secret), None, None, None).map_err(|_| {
FileHostingError::S3Error("Error while creating credentials".to_string())
})?,
)
.map_err(|_| {
FileHostingError::S3Error("Error while creating Bucket instance".to_string())
})?;
bucket.add_header("x-amz-acl", "public-read");
@@ -46,7 +51,10 @@ impl FileHost for S3Host {
self.bucket
.put_object_with_content_type(format!("/{}", file_name), &*file_bytes, content_type)
.await?;
.await
.map_err(|_| {
FileHostingError::S3Error("Error while uploading file to S3".to_string())
})?;
Ok(UploadFileData {
file_id: file_name.to_string(),
@@ -65,7 +73,12 @@ impl FileHost for S3Host {
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
self.bucket.delete_object(format!("/{}", file_name)).await?;
self.bucket
.delete_object(format!("/{}", file_name))
.await
.map_err(|_| {
FileHostingError::S3Error("Error while deleting file from S3".to_string())
})?;
Ok(DeleteFileData {
file_id: file_id.to_string(),

View File

@@ -4,7 +4,7 @@ use crate::ratelimit::memory::{MemoryStore, MemoryStoreActor};
use crate::ratelimit::middleware::RateLimiter;
use crate::util::env::{parse_strings_from_var, parse_var};
use actix_cors::Cors;
use actix_web::{http, web, App, HttpServer};
use actix_web::{web, App, HttpServer};
use env_logger::Env;
use gumdrop::Options;
use log::{error, info, warn};
@@ -246,33 +246,33 @@ async fn main() -> std::io::Result<()> {
.max_age(3600)
.send_wildcard(),
)
// .wrap(
// RateLimiter::new(MemoryStoreActor::from(store.clone()).start())
// .with_identifier(|req| {
// let connection_info = req.connection_info();
// let ip =
// String::from(if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
// if let Some(header) = req.headers().get("CF-Connecting-IP") {
// header.to_str().map_err(|_| ARError::IdentificationError)?
// } else {
// connection_info
// .peer_addr()
// .ok_or(ARError::IdentificationError)?
// }
// } else {
// connection_info
// .peer_addr()
// .ok_or(ARError::IdentificationError)?
// });
//
// Ok(ip)
// })
// .with_interval(std::time::Duration::from_secs(60))
// .with_max_requests(300)
// .with_ignore_ips(
// parse_strings_from_var("RATE_LIMIT_IGNORE_IPS").unwrap_or_default(),
// ),
// )
.wrap(
RateLimiter::new(MemoryStoreActor::from(store.clone()).start())
.with_identifier(|req| {
let connection_info = req.connection_info();
let ip =
String::from(if parse_var("CLOUDFLARE_INTEGRATION").unwrap_or(false) {
if let Some(header) = req.headers().get("CF-Connecting-IP") {
header.to_str().map_err(|_| ARError::IdentificationError)?
} else {
connection_info
.peer_addr()
.ok_or(ARError::IdentificationError)?
}
} else {
connection_info
.peer_addr()
.ok_or(ARError::IdentificationError)?
});
Ok(ip)
})
.with_interval(std::time::Duration::from_secs(60))
.with_max_requests(300)
.with_ignore_ips(
parse_strings_from_var("RATE_LIMIT_IGNORE_IPS").unwrap_or_default(),
),
)
.app_data(web::Data::new(pool.clone()))
.app_data(web::Data::new(file_host.clone()))
.app_data(web::Data::new(indexing_queue.clone()))

View File

@@ -1,7 +1,7 @@
use actix_web::{get, HttpResponse};
use serde_json::json;
#[get("")]
#[get("/")]
pub async fn index_get() -> HttpResponse {
let data = json!({
"name": "modrinth-labrinth",