Initial shared instances backend (#3800)

* Create base shared instance migration and initial routes

* Fix build

* Add version uploads

* Add permissions field for shared instance users

* Actually use permissions field

* Add "public" flag to shared instances that allow GETing them without authorization

* Add the ability to get and list shared instance versions

* Add the ability to delete shared instance versions

* Fix build after merge

* Secured file hosting (#3784)

* Remove Backblaze-specific file-hosting backend

* Added S3_USES_PATH_STYLE_BUCKETS

* Remove unused file_id parameter from delete_file_version

* Add support for separate public and private buckets in labrinth::file_hosting

* Rename delete_file_version to delete_file

* Add (untested) get_url_for_private_file

* Remove url field from shared instance routes

* Remove url field from shared instance routes

* Use private bucket for shared instance versions

* Make S3 environment variables fully separate between public and private buckets

* Change file host expiry for shared instances to 180 seconds

* Fix lint

* Merge shared instance migrations into a single migration

* Replace shared instance owners with Ghost instead of deleting the instance
This commit is contained in:
Josiah Glosson
2025-06-19 14:46:12 -05:00
committed by GitHub
parent d4864deac5
commit cc34e69524
61 changed files with 2161 additions and 491 deletions

View File

@@ -1,108 +0,0 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use async_trait::async_trait;
use bytes::Bytes;
use reqwest::Response;
use serde::Deserialize;
use sha2::Digest;
mod authorization;
mod delete;
mod upload;
pub struct BackblazeHost {
upload_url_data: authorization::UploadUrlData,
authorization_data: authorization::AuthorizationData,
}
impl BackblazeHost {
pub async fn new(key_id: &str, key: &str, bucket_id: &str) -> Self {
let authorization_data =
authorization::authorize_account(key_id, key).await.unwrap();
let upload_url_data =
authorization::get_upload_url(&authorization_data, bucket_id)
.await
.unwrap();
BackblazeHost {
upload_url_data,
authorization_data,
}
}
}
#[async_trait]
impl FileHost for BackblazeHost {
async fn upload_file(
&self,
content_type: &str,
file_name: &str,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
let upload_data = upload::upload_file(
&self.upload_url_data,
content_type,
file_name,
file_bytes,
)
.await?;
Ok(UploadFileData {
file_id: upload_data.file_id,
file_name: upload_data.file_name,
content_length: upload_data.content_length,
content_sha512,
content_sha1: upload_data.content_sha1,
content_md5: upload_data.content_md5,
content_type: upload_data.content_type,
upload_timestamp: upload_data.upload_timestamp,
})
}
/*
async fn upload_file_streaming(
&self,
content_type: &str,
file_name: &str,
stream: reqwest::Body
) -> Result<UploadFileData, FileHostingError> {
use futures::stream::StreamExt;
let mut data = Vec::new();
while let Some(chunk) = stream.next().await {
data.extend_from_slice(&chunk.map_err(|e| FileHostingError::Other(e))?);
}
self.upload_file(content_type, file_name, data).await
}
*/
async fn delete_file_version(
&self,
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let delete_data = delete::delete_file_version(
&self.authorization_data,
file_id,
file_name,
)
.await?;
Ok(DeleteFileData {
file_id: delete_data.file_id,
file_name: delete_data.file_name,
})
}
}
pub async fn process_response<T>(
response: Response,
) -> Result<T, FileHostingError>
where
T: for<'de> Deserialize<'de>,
{
if response.status().is_success() {
Ok(response.json().await?)
} else {
Err(FileHostingError::BackblazeError(response.json().await?))
}
}

View File

@@ -1,81 +0,0 @@
use crate::file_hosting::FileHostingError;
use base64::Engine;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AuthorizationPermissions {
bucket_id: Option<String>,
bucket_name: Option<String>,
capabilities: Vec<String>,
name_prefix: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AuthorizationData {
pub absolute_minimum_part_size: i32,
pub account_id: String,
pub allowed: AuthorizationPermissions,
pub api_url: String,
pub authorization_token: String,
pub download_url: String,
pub recommended_part_size: i32,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct UploadUrlData {
pub bucket_id: String,
pub upload_url: String,
pub authorization_token: String,
}
pub async fn authorize_account(
key_id: &str,
application_key: &str,
) -> Result<AuthorizationData, FileHostingError> {
let combined_key = format!("{key_id}:{application_key}");
let formatted_key = format!(
"Basic {}",
base64::engine::general_purpose::STANDARD.encode(combined_key)
);
let response = reqwest::Client::new()
.get("https://api.backblazeb2.com/b2api/v2/b2_authorize_account")
.header(reqwest::header::CONTENT_TYPE, "application/json")
.header(reqwest::header::AUTHORIZATION, formatted_key)
.send()
.await?;
super::process_response(response).await
}
pub async fn get_upload_url(
authorization_data: &AuthorizationData,
bucket_id: &str,
) -> Result<UploadUrlData, FileHostingError> {
let response = reqwest::Client::new()
.post(
format!(
"{}/b2api/v2/b2_get_upload_url",
authorization_data.api_url
)
.to_string(),
)
.header(reqwest::header::CONTENT_TYPE, "application/json")
.header(
reqwest::header::AUTHORIZATION,
&authorization_data.authorization_token,
)
.body(
serde_json::json!({
"bucketId": bucket_id,
})
.to_string(),
)
.send()
.await?;
super::process_response(response).await
}

View File

@@ -1,38 +0,0 @@
use super::authorization::AuthorizationData;
use crate::file_hosting::FileHostingError;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DeleteFileData {
pub file_id: String,
pub file_name: String,
}
pub async fn delete_file_version(
authorization_data: &AuthorizationData,
file_id: &str,
file_name: &str,
) -> Result<DeleteFileData, FileHostingError> {
let response = reqwest::Client::new()
.post(format!(
"{}/b2api/v2/b2_delete_file_version",
authorization_data.api_url
))
.header(reqwest::header::CONTENT_TYPE, "application/json")
.header(
reqwest::header::AUTHORIZATION,
&authorization_data.authorization_token,
)
.body(
serde_json::json!({
"fileName": file_name,
"fileId": file_id
})
.to_string(),
)
.send()
.await?;
super::process_response(response).await
}

View File

@@ -1,47 +0,0 @@
use super::authorization::UploadUrlData;
use crate::file_hosting::FileHostingError;
use bytes::Bytes;
use hex::ToHex;
use serde::{Deserialize, Serialize};
use sha1::Digest;
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct UploadFileData {
pub file_id: String,
pub file_name: String,
pub account_id: String,
pub bucket_id: String,
pub content_length: u32,
pub content_sha1: String,
pub content_md5: Option<String>,
pub content_type: String,
pub upload_timestamp: u64,
}
//Content Types found here: https://www.backblaze.com/b2/docs/content-types.html
pub async fn upload_file(
url_data: &UploadUrlData,
content_type: &str,
file_name: &str,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let response = reqwest::Client::new()
.post(&url_data.upload_url)
.header(
reqwest::header::AUTHORIZATION,
&url_data.authorization_token,
)
.header("X-Bz-File-Name", file_name)
.header(reqwest::header::CONTENT_TYPE, content_type)
.header(reqwest::header::CONTENT_LENGTH, file_bytes.len())
.header(
"X-Bz-Content-Sha1",
sha1::Sha1::digest(&file_bytes).encode_hex::<String>(),
)
.body(file_bytes)
.send()
.await?;
super::process_response(response).await
}

View File

@@ -1,9 +1,13 @@
use super::{DeleteFileData, FileHost, FileHostingError, UploadFileData};
use super::{
DeleteFileData, FileHost, FileHostPublicity, FileHostingError,
UploadFileData,
};
use async_trait::async_trait;
use bytes::Bytes;
use chrono::Utc;
use hex::ToHex;
use sha2::Digest;
use std::path::PathBuf;
#[derive(Default)]
pub struct MockHost(());
@@ -20,11 +24,10 @@ impl FileHost for MockHost {
&self,
content_type: &str,
file_name: &str,
file_publicity: FileHostPublicity,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let path =
std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
let path = get_file_path(file_name, file_publicity);
std::fs::create_dir_all(
path.parent().ok_or(FileHostingError::InvalidFilename)?,
)?;
@@ -33,8 +36,8 @@ impl FileHost for MockHost {
std::fs::write(path, &*file_bytes)?;
Ok(UploadFileData {
file_id: String::from("MOCK_FILE_ID"),
file_name: file_name.to_string(),
file_publicity,
content_length: file_bytes.len() as u32,
content_sha512,
content_sha1,
@@ -44,20 +47,40 @@ impl FileHost for MockHost {
})
}
async fn delete_file_version(
async fn get_url_for_private_file(
&self,
file_id: &str,
file_name: &str,
_expiry_secs: u32,
) -> Result<String, FileHostingError> {
let cdn_url = dotenvy::var("CDN_URL").unwrap();
Ok(format!("{cdn_url}/private/{file_name}"))
}
async fn delete_file(
&self,
file_name: &str,
file_publicity: FileHostPublicity,
) -> Result<DeleteFileData, FileHostingError> {
let path =
std::path::Path::new(&dotenvy::var("MOCK_FILE_PATH").unwrap())
.join(file_name.replace("../", ""));
let path = get_file_path(file_name, file_publicity);
if path.exists() {
std::fs::remove_file(path)?;
}
Ok(DeleteFileData {
file_id: file_id.to_string(),
file_name: file_name.to_string(),
})
}
}
fn get_file_path(
file_name: &str,
file_publicity: FileHostPublicity,
) -> PathBuf {
let mut path = PathBuf::from(dotenvy::var("MOCK_FILE_PATH").unwrap());
if matches!(file_publicity, FileHostPublicity::Private) {
path.push("private");
}
path.push(file_name.replace("../", ""));
path
}

View File

@@ -1,23 +1,17 @@
use async_trait::async_trait;
use thiserror::Error;
mod backblaze;
mod mock;
mod s3_host;
pub use backblaze::BackblazeHost;
use bytes::Bytes;
pub use mock::MockHost;
pub use s3_host::S3Host;
pub use s3_host::{S3BucketConfig, S3Host};
#[derive(Error, Debug)]
pub enum FileHostingError {
#[error("Error while accessing the data from backblaze")]
HttpError(#[from] reqwest::Error),
#[error("Backblaze error: {0}")]
BackblazeError(serde_json::Value),
#[error("S3 error: {0}")]
S3Error(String),
#[error("S3 error when {0}: {1}")]
S3Error(&'static str, s3::error::S3Error),
#[error("File system error in file hosting: {0}")]
FileSystemError(#[from] std::io::Error),
#[error("Invalid Filename")]
@@ -26,8 +20,8 @@ pub enum FileHostingError {
#[derive(Debug, Clone)]
pub struct UploadFileData {
pub file_id: String,
pub file_name: String,
pub file_publicity: FileHostPublicity,
pub content_length: u32,
pub content_sha512: String,
pub content_sha1: String,
@@ -38,22 +32,34 @@ pub struct UploadFileData {
#[derive(Debug, Clone)]
pub struct DeleteFileData {
pub file_id: String,
pub file_name: String,
}
#[derive(Debug, Copy, Clone)]
pub enum FileHostPublicity {
Public,
Private,
}
#[async_trait]
pub trait FileHost {
async fn upload_file(
&self,
content_type: &str,
file_name: &str,
file_publicity: FileHostPublicity,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError>;
async fn delete_file_version(
async fn get_url_for_private_file(
&self,
file_id: &str,
file_name: &str,
expiry_secs: u32,
) -> Result<String, FileHostingError>;
async fn delete_file(
&self,
file_name: &str,
file_publicity: FileHostPublicity,
) -> Result<DeleteFileData, FileHostingError>;
}

View File

@@ -1,5 +1,6 @@
use crate::file_hosting::{
DeleteFileData, FileHost, FileHostingError, UploadFileData,
DeleteFileData, FileHost, FileHostPublicity, FileHostingError,
UploadFileData,
};
use async_trait::async_trait;
use bytes::Bytes;
@@ -10,50 +11,70 @@ use s3::creds::Credentials;
use s3::region::Region;
use sha2::Digest;
pub struct S3BucketConfig {
pub name: String,
pub uses_path_style: bool,
pub region: String,
pub url: String,
pub access_token: String,
pub secret: String,
}
pub struct S3Host {
bucket: Bucket,
public_bucket: Bucket,
private_bucket: Bucket,
}
impl S3Host {
pub fn new(
bucket_name: &str,
bucket_region: &str,
url: &str,
access_token: &str,
secret: &str,
public_bucket: S3BucketConfig,
private_bucket: S3BucketConfig,
) -> Result<S3Host, FileHostingError> {
let bucket = Bucket::new(
bucket_name,
if bucket_region == "r2" {
Region::R2 {
account_id: url.to_string(),
}
} else {
Region::Custom {
region: bucket_region.to_string(),
endpoint: url.to_string(),
}
},
Credentials::new(
Some(access_token),
Some(secret),
None,
None,
None,
)
.map_err(|_| {
FileHostingError::S3Error(
"Error while creating credentials".to_string(),
let create_bucket =
|config: S3BucketConfig| -> Result<_, FileHostingError> {
let mut bucket = Bucket::new(
"",
if config.region == "r2" {
Region::R2 {
account_id: config.url,
}
} else {
Region::Custom {
region: config.region,
endpoint: config.url,
}
},
Credentials {
access_key: Some(config.access_token),
secret_key: Some(config.secret),
..Credentials::anonymous().unwrap()
},
)
})?,
)
.map_err(|_| {
FileHostingError::S3Error(
"Error while creating Bucket instance".to_string(),
)
})?;
.map_err(|e| {
FileHostingError::S3Error("creating Bucket instance", e)
})?;
Ok(S3Host { bucket: *bucket })
bucket.name = config.name;
if config.uses_path_style {
bucket.set_path_style();
} else {
bucket.set_subdomain_style();
}
Ok(bucket)
};
Ok(S3Host {
public_bucket: *create_bucket(public_bucket)?,
private_bucket: *create_bucket(private_bucket)?,
})
}
fn get_bucket(&self, publicity: FileHostPublicity) -> &Bucket {
match publicity {
FileHostPublicity::Public => &self.public_bucket,
FileHostPublicity::Private => &self.private_bucket,
}
}
}
@@ -63,27 +84,24 @@ impl FileHost for S3Host {
&self,
content_type: &str,
file_name: &str,
file_publicity: FileHostPublicity,
file_bytes: Bytes,
) -> Result<UploadFileData, FileHostingError> {
let content_sha1 = sha1::Sha1::digest(&file_bytes).encode_hex();
let content_sha512 = format!("{:x}", sha2::Sha512::digest(&file_bytes));
self.bucket
self.get_bucket(file_publicity)
.put_object_with_content_type(
format!("/{file_name}"),
&file_bytes,
content_type,
)
.await
.map_err(|err| {
FileHostingError::S3Error(format!(
"Error while uploading file {file_name} to S3: {err}"
))
})?;
.map_err(|e| FileHostingError::S3Error("uploading file", e))?;
Ok(UploadFileData {
file_id: file_name.to_string(),
file_name: file_name.to_string(),
file_publicity,
content_length: file_bytes.len() as u32,
content_sha512,
content_sha1,
@@ -93,22 +111,32 @@ impl FileHost for S3Host {
})
}
async fn delete_file_version(
async fn get_url_for_private_file(
&self,
file_id: &str,
file_name: &str,
expiry_secs: u32,
) -> Result<String, FileHostingError> {
let url = self
.private_bucket
.presign_get(format!("/{file_name}"), expiry_secs, None)
.await
.map_err(|e| {
FileHostingError::S3Error("generating presigned URL", e)
})?;
Ok(url)
}
async fn delete_file(
&self,
file_name: &str,
file_publicity: FileHostPublicity,
) -> Result<DeleteFileData, FileHostingError> {
self.bucket
self.get_bucket(file_publicity)
.delete_object(format!("/{file_name}"))
.await
.map_err(|err| {
FileHostingError::S3Error(format!(
"Error while deleting file {file_name} to S3: {err}"
))
})?;
.map_err(|e| FileHostingError::S3Error("deleting file", e))?;
Ok(DeleteFileData {
file_id: file_id.to_string(),
file_name: file_name.to_string(),
})
}