Merge tag 'v0.10.16' into beta

This commit is contained in:
2025-11-01 14:14:52 +03:00
203 changed files with 6321 additions and 2161 deletions

View File

@@ -1,6 +1,8 @@
**/*.rs
.sqlx
java/build
# Migrations existing before Prettier formatted them shall always be ignored,
# as any changes to them will break existing deployments
migrations/20240711194701_init.sql

View File

@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, hide_nametag_skins_page, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry, personalized_ads,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,\n skipped_update, pending_update_toast_for_version, auto_download_updates\n FROM settings\n ",
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, hide_nametag_skins_page, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry, personalized_ads,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,\n skipped_update, pending_update_toast_for_version, auto_download_updates,\n version\n FROM settings\n ",
"describe": {
"columns": [
{
@@ -157,6 +157,11 @@
"name": "auto_download_updates",
"ordinal": 30,
"type_info": "Integer"
},
{
"name": "version",
"ordinal": 31,
"type_info": "Integer"
}
],
"parameters": {
@@ -193,8 +198,9 @@
false,
true,
true,
true
true,
false
]
},
"hash": "7dc83d7ffa3d583fc5ffaf13811a8dab4d0b9ded6200f827b9de7ac32e5318d5"
"hash": "07ea3a644644de61c4ed7c30ee711d29fd49f10534230b1b03097275a30cb50f"
}

View File

@@ -41,7 +41,7 @@
{
"name": "display_claims!: serde_json::Value",
"ordinal": 7,
"type_info": "Text"
"type_info": "Null"
}
],
"parameters": {

View File

@@ -1,12 +1,12 @@
{
"db_name": "SQLite",
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n personalized_ads = $11,\n\n onboarded = $12,\n\n extra_launch_args = jsonb($13),\n custom_env_vars = jsonb($14),\n mc_memory_max = $15,\n mc_force_fullscreen = $16,\n mc_game_resolution_x = $17,\n mc_game_resolution_y = $18,\n hide_on_process_start = $19,\n\n hook_pre_launch = $20,\n hook_wrapper = $21,\n hook_post_exit = $22,\n\n custom_dir = $23,\n prev_custom_dir = $24,\n migrated = $25,\n\n toggle_sidebar = $26,\n feature_flags = $27,\n hide_nametag_skins_page = $28,\n\n skipped_update = $29,\n pending_update_toast_for_version = $30,\n auto_download_updates = $31\n ",
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n personalized_ads = $11,\n\n onboarded = $12,\n\n extra_launch_args = jsonb($13),\n custom_env_vars = jsonb($14),\n mc_memory_max = $15,\n mc_force_fullscreen = $16,\n mc_game_resolution_x = $17,\n mc_game_resolution_y = $18,\n hide_on_process_start = $19,\n\n hook_pre_launch = $20,\n hook_wrapper = $21,\n hook_post_exit = $22,\n\n custom_dir = $23,\n prev_custom_dir = $24,\n migrated = $25,\n\n toggle_sidebar = $26,\n feature_flags = $27,\n hide_nametag_skins_page = $28,\n\n skipped_update = $29,\n pending_update_toast_for_version = $30,\n auto_download_updates = $31,\n\n version = $32\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 31
"Right": 32
},
"nullable": []
},
"hash": "eb95fac3043d0ffd10caef69cc469474cc5c0d36cc0698c4cc0852da81fed158"
"hash": "a40e60da6dd1312d4a1ed52fa8fd2394e7ad21de1cb44cf8b93c4b1459cdc716"
}

View File

@@ -73,6 +73,7 @@ serde_json = { workspace = true }
serde_with = { workspace = true }
sha1_smol = { workspace = true }
sha2 = { workspace = true }
shlex = { workspace = true }
sqlx = { workspace = true, features = [
"json",
"macros",

View File

@@ -1,18 +1,7 @@
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction
import com.github.jengelman.gradle.plugins.shadow.transformers.CacheableTransformer
import com.github.jengelman.gradle.plugins.shadow.transformers.ResourceTransformer
import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext
import org.apache.tools.zip.ZipEntry
import org.apache.tools.zip.ZipOutputStream
import java.io.IOException
import java.util.jar.JarFile
import java.util.jar.Attributes as JarAttributes
import java.util.jar.Manifest as JarManifest
plugins {
java
id("com.diffplug.spotless") version "7.0.4"
id("com.gradleup.shadow") version "9.0.0-rc2"
id("com.diffplug.spotless") version "8.0.0"
id("com.gradleup.shadow") version "9.2.2"
}
repositories {
@@ -20,9 +9,9 @@ repositories {
}
dependencies {
implementation("org.ow2.asm:asm:9.8")
implementation("org.ow2.asm:asm-tree:9.8")
implementation("com.google.code.gson:gson:2.13.1")
implementation("org.ow2.asm:asm:9.9")
implementation("org.ow2.asm:asm-tree:9.9")
implementation("com.google.code.gson:gson:2.13.2")
testImplementation(libs.junit.jupiter)
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
@@ -30,7 +19,7 @@ dependencies {
java {
toolchain {
languageVersion = JavaLanguageVersion.of(11)
languageVersion = JavaLanguageVersion.of(17)
}
}
@@ -56,52 +45,9 @@ tasks.shadowJar {
attributes["Premain-Class"] = "com.modrinth.theseus.agent.TheseusAgent"
}
enableRelocation = true
addMultiReleaseAttribute = false
enableAutoRelocation = true
relocationPrefix = "com.modrinth.theseus.shadow"
// Adapted from ManifestResourceTransformer to do one thing: remove Multi-Release.
// Multi-Release gets added by shadow because gson has Multi-Release set to true, however
// shadow strips the actual versions directory, as gson only has a module-info.class in there.
// However, older versions of SecureJarHandler crash if Multi-Release is set to true but the
// versions directory is missing.
transform(@CacheableTransformer object : ResourceTransformer {
private var manifestDiscovered = false
private var manifest: JarManifest? = null
override fun canTransformResource(element: FileTreeElement): Boolean {
return JarFile.MANIFEST_NAME.equals(element.path, ignoreCase = true)
}
override fun transform(context: TransformerContext) {
if (!manifestDiscovered) {
try {
manifest = JarManifest(context.inputStream)
manifestDiscovered = true
} catch (e: IOException) {
logger.warn("Failed to read MANIFEST.MF", e)
}
}
}
override fun hasTransformedResource(): Boolean = true
override fun modifyOutputStream(
os: ZipOutputStream,
preserveFileTimestamps: Boolean
) {
// If we didn't find a manifest, then let's create one.
if (manifest == null) {
manifest = JarManifest()
}
manifest!!.mainAttributes.remove(JarAttributes.Name.MULTI_RELEASE)
os.putNextEntry(ZipEntry(JarFile.MANIFEST_NAME).apply {
time = ShadowCopyAction.CONSTANT_TIME_FOR_ZIP_ENTRIES
})
manifest!!.write(os)
}
})
}
tasks.named<Test>("test") {

View File

@@ -1,5 +1,5 @@
[versions]
junit-jupiter = "5.12.1"
junit-jupiter = "5.14.0"
[libraries]
junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit-jupiter" }

View File

@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-9.1.0-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME

View File

@@ -1,6 +1,6 @@
plugins {
// Apply the foojay-resolver plugin to allow automatic download of JDKs
id("org.gradle.toolchains.foojay-resolver-convention") version "0.10.0"
id("org.gradle.toolchains.foojay-resolver-convention") version "1.0.0"
}
rootProject.name = "theseus"

View File

@@ -0,0 +1 @@
ALTER TABLE settings ADD COLUMN version INTEGER NOT NULL DEFAULT 1;

View File

@@ -1,6 +1,6 @@
//! Miscellaneous PNG utilities for Minecraft skins.
use std::io::Read;
use std::io::{BufRead, Cursor, Seek};
use std::sync::Arc;
use base64::Engine;
@@ -9,7 +9,8 @@ use data_url::DataUrl;
use futures::{Stream, TryStreamExt, future::Either, stream};
use itertools::Itertools;
use rgb::Rgba;
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::SyncIoBridge};
use tokio::io::AsyncReadExt;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use url::Url;
use crate::{
@@ -95,7 +96,8 @@ pub fn dimensions(png_data: &[u8]) -> crate::Result<(u32, u32)> {
pub async fn normalize_skin_texture(
texture: &UrlOrBlob,
) -> crate::Result<Bytes> {
let texture_stream = SyncIoBridge::new(Box::pin(
let mut texture_data = Vec::with_capacity(8192);
Box::pin(
match texture {
UrlOrBlob::Url(url) => Either::Left(
url_to_data_stream(url)
@@ -112,84 +114,84 @@ pub async fn normalize_skin_texture(
),
}
.compat(),
));
)
.read_to_end(&mut texture_data)
.await?;
tokio::task::spawn_blocking(|| {
let mut png_reader = {
let mut decoder = png::Decoder::new(texture_stream);
decoder.set_transformations(
png::Transformations::normalize_to_color8(),
);
decoder.read_info()
}?;
let mut png_reader = {
let mut decoder = png::Decoder::new(Cursor::new(texture_data));
decoder
.set_transformations(png::Transformations::normalize_to_color8());
decoder.read_info()
}?;
// The code below assumes that the skin texture has valid dimensions.
// This also serves as a way to bail out early for obviously invalid or
// adversarial textures
if png_reader.info().width != 64
|| ![64, 32].contains(&png_reader.info().height)
{
Err(ErrorKind::InvalidSkinTexture)?;
}
// The code below assumes that the skin texture has valid dimensions.
// This also serves as a way to bail out early for obviously invalid or
// adversarial textures
if png_reader.info().width != 64
|| ![64, 32].contains(&png_reader.info().height)
{
Err(ErrorKind::InvalidSkinTexture)?;
}
let is_legacy_skin = png_reader.info().height == 32;
let mut texture_buf =
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
if is_legacy_skin {
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
}
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
let is_legacy_skin = png_reader.info().height == 32;
let mut texture_buf =
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
if is_legacy_skin {
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
}
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
let mut encoded_png = vec![];
let mut encoded_png = vec![];
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
png_encoder.set_color(png::ColorType::Rgba);
png_encoder.set_depth(png::BitDepth::Eight);
png_encoder.set_filter(png::FilterType::NoFilter);
png_encoder.set_compression(png::Compression::Fast);
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
png_encoder.set_color(png::ColorType::Rgba);
png_encoder.set_depth(png::BitDepth::Eight);
png_encoder.set_filter(png::Filter::NoFilter);
png_encoder.set_compression(png::Compression::Fast);
// Keeping color space information properly set, to handle the occasional
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
// that keeps most people wondering, is what sets a carefully crafted image
// manipulation routine apart :)
if let Some(source_chromaticities) =
png_reader.info().source_chromaticities.as_ref().copied()
{
png_encoder.set_source_chromaticities(source_chromaticities);
}
if let Some(source_gamma) =
png_reader.info().source_gamma.as_ref().copied()
{
png_encoder.set_source_gamma(source_gamma);
}
if let Some(source_srgb) = png_reader.info().srgb.as_ref().copied() {
png_encoder.set_source_srgb(source_srgb);
}
// Keeping color space information properly set, to handle the occasional
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
// that keeps most people wondering, is what sets a carefully crafted image
// manipulation routine apart :)
if let Some(source_chromaticities) =
png_reader.info().source_chromaticities.as_ref().copied()
{
png_encoder.set_source_chromaticities(source_chromaticities);
}
if let Some(source_gamma) = png_reader.info().source_gamma.as_ref().copied()
{
png_encoder.set_source_gamma(source_gamma);
}
if let Some(source_srgb) = png_reader.info().srgb.as_ref().copied() {
png_encoder.set_source_srgb(source_srgb);
}
let png_buf = bytemuck::try_cast_slice(&texture_buf)
.map_err(|_| ErrorKind::InvalidPng)?;
let mut png_writer = png_encoder.write_header()?;
png_writer.write_image_data(png_buf)?;
png_writer.finish()?;
let png_buf = bytemuck::try_cast_slice(&texture_buf)
.map_err(|_| ErrorKind::InvalidPng)?;
let mut png_writer = png_encoder.write_header()?;
png_writer.write_image_data(png_buf)?;
png_writer.finish()?;
Ok(encoded_png.into())
})
.await?
Ok(encoded_png.into())
}
/// Reads a skin texture and returns a 64x64 buffer in RGBA format.
fn get_skin_texture_buffer<R: Read>(
fn get_skin_texture_buffer<R: BufRead + Seek>(
png_reader: &mut png::Reader<R>,
is_legacy_skin: bool,
) -> crate::Result<Vec<Rgba<u8>>> {
let output_buffer_size = png_reader
.output_buffer_size()
.expect("Reasonable skin texture size verified already");
let mut png_buf = if is_legacy_skin {
// Legacy skins have half the height, so duplicate the rows to
// turn them into a 64x64 texture
vec![0; png_reader.output_buffer_size() * 2]
vec![0; output_buffer_size * 2]
} else {
// Modern skins are left as-is
vec![0; png_reader.output_buffer_size()]
vec![0; output_buffer_size]
};
png_reader.next_frame(&mut png_buf)?;
@@ -373,9 +375,10 @@ fn set_alpha(
#[tokio::test]
async fn normalize_skin_texture_works() {
let decode_to_pixels = |png_data: &[u8]| {
let decoder = png::Decoder::new(png_data);
let decoder = png::Decoder::new(Cursor::new(png_data));
let mut reader = decoder.read_info().expect("Failed to read PNG info");
let mut buffer = vec![0; reader.output_buffer_size()];
let mut buffer =
vec![0; reader.output_buffer_size().expect("Skin size too large")];
reader
.next_frame(&mut buffer)
.expect("Failed to decode PNG");

View File

@@ -12,6 +12,8 @@ use crate::util::fetch::{fetch_mirrors, write};
use crate::util::io;
use crate::{State, profile};
use async_zip::base::read::seek::ZipFileReader;
use futures::StreamExt;
use path_util::SafeRelativeUtf8UnixPathBuf;
use super::install_from::{
CreatePack, CreatePackLocation, PackFormat, generate_pack_from_file,
@@ -19,7 +21,6 @@ use super::install_from::{
};
use crate::data::ProjectType;
use std::io::{Cursor, ErrorKind};
use std::path::PathBuf;
/// Install a pack
/// Wrapper around install_pack_files that generates a pack creation description, and
@@ -93,197 +94,194 @@ pub async fn install_zipped_mrpack_files(
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Sets generated profile attributes to the pack ones (using profile::edit)
set_profile_information(
profile_path.clone(),
&description,
&pack.name,
&pack.dependencies,
ignore_lock,
)
.await?;
let profile_path = profile_path.clone();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
pack_version: version_id,
},
100.0,
"Downloading modpack",
)
.await?;
let num_files = pack.files.len();
use futures::StreamExt;
loading_try_for_each_concurrent(
futures::stream::iter(pack.files.into_iter())
.map(Ok::<PackFile, crate::Error>),
None,
Some(&loading_bar),
70.0,
num_files,
None,
|project| {
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env
&& env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
}
let file = fetch_mirrors(
&project
.downloads
.iter()
.map(|x| &**x)
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&state.pool,
)
.await?;
let path = profile::get_full_path(&profile_path)
.await?
.join(project.path.as_str());
cache_file_hash(
file.clone(),
&profile_path,
project.path.as_str(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
ProjectType::get_from_parent_folder(&path),
&state.pool,
)
.await?;
write(&path, &file, &state.io_semaphore).await?;
Ok(())
}
},
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Extracting overrides"))?;
let mut total_len = 0;
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
total_len += 1;
}
}
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
// Reads the file into the 'content' variable
let mut content = Vec::new();
let mut reader = zip_reader.reader_with_entry(index).await?;
reader.read_to_end_checked(&mut content).await?;
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
if new_path.file_name().is_some() {
let bytes = bytes::Bytes::from(content);
cache_file_hash(
bytes.clone(),
&profile_path,
&new_path.to_string_lossy(),
None,
ProjectType::get_from_parent_folder(&new_path),
&state.pool,
)
.await?;
write(
&profile::get_full_path(&profile_path)
.await?
.join(new_path),
&bytes,
&state.io_semaphore,
)
.await?;
}
emit_loading(
&loading_bar,
30.0 / total_len as f64,
Some(&format!("Extracting override {index}/{total_len}")),
)?;
}
}
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon = profile::get_full_path(&profile_path)
.await?
.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
}
Ok::<String, crate::Error>(profile_path.clone())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
let Some(manifest_idx) = zip_reader.file().entries().iter().position(|f| {
matches!(f.filename().as_str(), Ok("modrinth.index.json"))
}) else {
return Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
)));
};
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(manifest_idx).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Sets generated profile attributes to the pack ones (using profile::edit)
set_profile_information(
profile_path.clone(),
&description,
&pack.name,
&pack.dependencies,
ignore_lock,
)
.await?;
let profile_path = profile_path.clone();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
pack_version: version_id,
},
100.0,
"Downloading modpack",
)
.await?;
let num_files = pack.files.len();
loading_try_for_each_concurrent(
futures::stream::iter(pack.files.into_iter())
.map(Ok::<PackFile, crate::Error>),
None,
Some(&loading_bar),
70.0,
num_files,
None,
|project| {
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env
&& env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
}
let file = fetch_mirrors(
&project
.downloads
.iter()
.map(|x| &**x)
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&state.pool,
)
.await?;
let path = profile::get_full_path(&profile_path)
.await?
.join(project.path.as_str());
cache_file_hash(
file.clone(),
&profile_path,
project.path.as_str(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
ProjectType::get_from_parent_folder(&path),
&state.pool,
)
.await?;
write(&path, &file, &state.io_semaphore).await?;
Ok(())
}
},
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Extracting overrides"))?;
let override_file_entries = zip_reader
.file()
.entries()
.iter()
.enumerate()
.filter_map(|(index, file)| {
let filename = file.filename().as_str().unwrap_or_default();
((filename.starts_with("overrides/")
|| filename.starts_with("client-overrides/"))
&& !filename.ends_with('/'))
.then(|| (index, file.clone()))
})
.collect::<Vec<_>>();
let override_file_entries_count = override_file_entries.len();
for (i, (index, file)) in override_file_entries.into_iter().enumerate() {
let relative_override_file_path =
SafeRelativeUtf8UnixPathBuf::try_from(
file.filename().as_str().unwrap().to_string(),
)?;
let relative_override_file_path = relative_override_file_path
.strip_prefix("overrides")
.or_else(|_| relative_override_file_path.strip_prefix("client-overrides"))
.map_err(|_| {
crate::Error::from(crate::ErrorKind::OtherError(
format!("Failed to strip override prefix from override file path: {relative_override_file_path}")
))
})?;
let mut file_bytes = vec![];
let mut reader = zip_reader.reader_with_entry(index).await?;
reader.read_to_end_checked(&mut file_bytes).await?;
let file_bytes = bytes::Bytes::from(file_bytes);
cache_file_hash(
file_bytes.clone(),
&profile_path,
relative_override_file_path.as_str(),
None,
ProjectType::get_from_parent_folder(
relative_override_file_path.as_str(),
),
&state.pool,
)
.await?;
write(
&profile::get_full_path(&profile_path)
.await?
.join(relative_override_file_path.as_str()),
&file_bytes,
&state.io_semaphore,
)
.await?;
emit_loading(
&loading_bar,
30.0 / override_file_entries_count as f64,
Some(&format!(
"Extracting override {}/{override_file_entries_count}",
i + 1
)),
)?;
}
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon = profile::get_full_path(&profile_path)
.await?
.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
}
Ok::<String, crate::Error>(profile_path.clone())
}
#[tracing::instrument(skip(mrpack_file))]
@@ -303,127 +301,130 @@ pub async fn remove_all_related_files(
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let Some(manifest_idx) = zip_reader.file().entries().iter().position(|f| {
matches!(f.filename().as_str(), Ok("modrinth.index.json"))
}) else {
return Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)));
};
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let mut manifest = String::new();
let pack: PackFormat = serde_json::from_str(&manifest)?;
let mut reader = zip_reader.reader_with_entry(manifest_idx).await?;
reader.read_to_string_checked(&mut manifest).await?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
let pack: PackFormat = serde_json::from_str(&manifest)?;
// Set install stage to installing, and do not change it back (as files are being removed and are not being reinstalled here)
crate::api::profile::edit(&profile_path, |prof| {
prof.install_stage = ProfileInstallStage::PackInstalling;
async { Ok(()) }
})
.await?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// First, remove all modrinth projects by their version hashes
// Remove all modrinth projects by their version hashes
// We need to do a fetch to get the project ids from Modrinth
let state = State::get().await?;
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha1)?.clone()))
.collect::<Vec<_>>();
// Set install stage to installing, and do not change it back (as files are being removed and are not being reinstalled here)
crate::api::profile::edit(&profile_path, |prof| {
prof.install_stage = ProfileInstallStage::PackInstalling;
async { Ok(()) }
})
.await?;
// First, get project info by hash
let file_infos = CachedEntry::get_file_many(
&all_hashes.iter().map(|x| &**x).collect::<Vec<_>>(),
None,
// First, remove all modrinth projects by their version hashes
// Remove all modrinth projects by their version hashes
// We need to do a fetch to get the project ids from Modrinth
let state = State::get().await?;
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha1)?.clone()))
.collect::<Vec<_>>();
// First, get project info by hash
let file_infos = CachedEntry::get_file_many(
&all_hashes.iter().map(|x| &**x).collect::<Vec<_>>(),
None,
&state.pool,
&state.api_semaphore,
)
.await?;
let to_remove = file_infos
.into_iter()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile = profile::get(&profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
})?;
let profile_full_path = profile::get_full_path(&profile_path).await?;
for (file_path, project) in profile
.get_projects(
Some(CacheBehaviour::MustRevalidate),
&state.pool,
&state.api_semaphore,
)
.await?;
let to_remove = file_infos
.into_iter()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile = profile::get(&profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
})?;
let profile_full_path = profile::get_full_path(&profile_path).await?;
for (file_path, project) in profile
.get_projects(
Some(CacheBehaviour::MustRevalidate),
&state.pool,
&state.api_semaphore,
)
.await?
.await?
{
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
match io::remove_file(profile_full_path.join(file_path)).await {
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
}
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
match io::remove_file(profile_full_path.join(file.path.as_str()))
.await
{
match io::remove_file(profile_full_path.join(file_path)).await {
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
// Iterate over each 'overrides' file and remove it
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
// Remove this file if a corresponding one exists in the filesystem
match io::remove_file(
profile::get_full_path(&profile_path)
.await?
.join(&new_path),
)
.await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
}
Ok(())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
}
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
match io::remove_file(profile_full_path.join(file.path.as_str())).await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
// Iterate over each 'overrides' file and remove it
let override_file_entries =
zip_reader.file().entries().iter().filter(|file| {
let filename = file.filename().as_str().unwrap_or_default();
(filename.starts_with("overrides/")
|| filename.starts_with("client-overrides/"))
&& !filename.ends_with('/')
});
for file in override_file_entries {
let relative_override_file_path =
SafeRelativeUtf8UnixPathBuf::try_from(
file.filename().as_str().unwrap().to_string(),
)?;
let relative_override_file_path = relative_override_file_path
.strip_prefix("overrides")
.or_else(|_| relative_override_file_path.strip_prefix("client-overrides"))
.map_err(|_| {
crate::Error::from(crate::ErrorKind::OtherError(
format!("Failed to strip override prefix from override file path: {relative_override_file_path}")
))
})?;
// Remove this file if a corresponding one exists in the filesystem
match io::remove_file(
profile::get_full_path(&profile_path)
.await?
.join(relative_override_file_path.as_str()),
)
.await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
Ok(())
}

View File

@@ -666,7 +666,14 @@ async fn run_credentials(
.filter(|hook_command| !hook_command.is_empty());
if let Some(hook) = pre_launch_hooks {
// TODO: hook parameters
let mut cmd = hook.split(' ');
let mut cmd = shlex::split(hook)
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Invalid pre-launch command: {hook}",
))
})?
.into_iter();
if let Some(command) = cmd.next() {
let full_path = get_full_path(&profile.path).await?;
let result = Command::new(command)

View File

@@ -210,6 +210,9 @@ pub enum ErrorKind {
#[error("Deserialization error: {0}")]
DeserializationError(#[from] serde::de::value::Error),
#[error("Discord IPC error: {0}")]
DiscordRichPresenceError(#[from] discord_rich_presence::error::Error),
}
#[derive(Debug)]

View File

@@ -570,7 +570,19 @@ pub async fn launch_minecraft(
let args = version_info.arguments.clone().unwrap_or_default();
let mut command = match wrapper {
Some(hook) => {
let mut command = Command::new(hook);
let mut cmd = shlex::split(hook)
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Invalid wrapper command: {hook}",
))
})?
.into_iter();
let mut command = Command::new(cmd.next().ok_or(
crate::ErrorKind::LauncherError(
"Empty wrapper command".to_owned(),
),
)?);
command.args(cmd);
command.arg(&java_version.path);
command
}

View File

@@ -40,12 +40,7 @@ impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub fn init() -> crate::Result<DiscordGuard> {
let dipc =
DiscordIpcClient::new("1190718475832918136").map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not create Discord client {e}",
))
})?;
let dipc = DiscordIpcClient::new("1190718475832918136");
Ok(DiscordGuard {
client: Arc::new(RwLock::new(dipc)),
@@ -130,25 +125,14 @@ impl DiscordGuard {
let mut client: tokio::sync::RwLockWriteGuard<'_, DiscordIpcClient> =
self.client.write().await;
let res = client.set_activity(activity.clone());
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not update Discord activity {e}",
))
};
if reconnect_if_fail {
if let Err(_e) = res {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
.set_activity(activity)
.map_err(could_not_set_err)?); // try again, but don't reconnect if it fails again
client.reconnect()?;
return Ok(client.set_activity(activity)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_set_err)?;
res?;
}
Ok(())
@@ -169,25 +153,13 @@ impl DiscordGuard {
let mut client = self.client.write().await;
let res = client.clear_activity();
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not clear Discord activity {e}",
))
};
if reconnect_if_fail {
if res.is_err() {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
.clear_activity()
.map_err(could_not_clear_err)?); // try again, but don't reconnect if it fails again
client.reconnect()?;
return Ok(client.clear_activity()?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_clear_err)?;
res?;
}
Ok(())
}

View File

@@ -272,7 +272,7 @@ impl FriendsSocket {
pub async fn disconnect(&self) -> crate::Result<()> {
let mut write_lock = self.write.write().await;
if let Some(ref mut write_half) = *write_lock {
write_half.close().await?;
SinkExt::close(write_half).await?;
*write_lock = None;
}
Ok(())

View File

@@ -89,6 +89,7 @@ impl State {
let res = tokio::try_join!(
state.discord_rpc.clear_to_default(true),
Profile::refresh_all(),
Settings::migrate(&state.pool),
ModrinthCredentials::refresh_all(),
);

View File

@@ -516,7 +516,7 @@ impl Process {
chrono::DateTime::<Utc>::from_timestamp(secs, nsecs)
.unwrap_or_default()
} else {
chrono::DateTime::<Utc>::from_timestamp(timestamp_val, 0)
chrono::DateTime::<Utc>::from_timestamp_secs(timestamp_val)
.unwrap_or_default()
};
@@ -743,7 +743,14 @@ impl Process {
// We do not wait on the post exist command to finish running! We let it spawn + run on its own.
// This behaviour may be changed in the future
if let Some(hook) = post_exit_command {
let mut cmd = hook.split(' ');
let mut cmd = shlex::split(&hook)
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Invalid post-exit command: {hook}",
))
})?
.into_iter();
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command.args(cmd).current_dir(

View File

@@ -103,10 +103,11 @@ impl ProfileInstallStage {
pub enum LauncherFeatureVersion {
None,
MigratedServerLastPlayTime,
MigratedLaunchHooks,
}
impl LauncherFeatureVersion {
pub const MOST_RECENT: Self = Self::MigratedServerLastPlayTime;
pub const MOST_RECENT: Self = Self::MigratedLaunchHooks;
pub fn as_str(&self) -> &'static str {
match *self {
@@ -114,6 +115,7 @@ impl LauncherFeatureVersion {
Self::MigratedServerLastPlayTime => {
"migrated_server_last_play_time"
}
Self::MigratedLaunchHooks => "migrated_launch_hooks",
}
}
@@ -123,6 +125,7 @@ impl LauncherFeatureVersion {
"migrated_server_last_play_time" => {
Self::MigratedServerLastPlayTime
}
"migrated_launch_hooks" => Self::MigratedLaunchHooks,
_ => Self::None,
}
}
@@ -225,10 +228,14 @@ impl ProjectType {
}
}
pub fn get_from_parent_folder(path: &Path) -> Option<Self> {
// Get parent folder
let path = path.parent()?.file_name()?;
match path.to_str()? {
pub fn get_from_parent_folder(path: impl AsRef<Path>) -> Option<Self> {
match path
.as_ref()
.parent()?
.file_name()?
.to_str()
.unwrap_or_default()
{
"mods" => Some(ProjectType::Mod),
"datapacks" => Some(ProjectType::DataPack),
"resourcepacks" => Some(ProjectType::ResourcePack),
@@ -781,6 +788,30 @@ impl Profile {
self.launcher_feature_version =
LauncherFeatureVersion::MigratedServerLastPlayTime;
}
LauncherFeatureVersion::MigratedServerLastPlayTime => {
let quoter = shlex::Quoter::new().allow_nul(true);
// Previously split by spaces
if let Some(pre_launch) = self.hooks.pre_launch.as_ref() {
self.hooks.pre_launch =
Some(quoter.join(pre_launch.split(' ')).unwrap())
}
// Previously treated as complete path to command
if let Some(wrapper) = self.hooks.wrapper.as_ref() {
self.hooks.wrapper =
Some(quoter.quote(wrapper).unwrap().to_string())
}
// Previously split by spaces
if let Some(post_exit) = self.hooks.post_exit.as_ref() {
self.hooks.post_exit =
Some(quoter.join(post_exit.split(' ')).unwrap())
}
self.launcher_feature_version =
LauncherFeatureVersion::MigratedLaunchHooks;
}
LauncherFeatureVersion::MOST_RECENT => unreachable!(
"LauncherFeatureVersion::MOST_RECENT was not updated"
),

View File

@@ -1,6 +1,7 @@
//! Theseus settings file
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
use std::collections::HashMap;
// Types
@@ -42,6 +43,8 @@ pub struct Settings {
pub skipped_update: Option<String>,
pub pending_update_toast_for_version: Option<String>,
pub auto_download_updates: Option<bool>,
pub version: usize,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, Hash, PartialEq)]
@@ -54,6 +57,8 @@ pub enum FeatureFlag {
}
impl Settings {
const CURRENT_VERSION: usize = 2;
pub async fn get(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Self> {
@@ -68,7 +73,8 @@ impl Settings {
mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,
hook_pre_launch, hook_wrapper, hook_post_exit,
custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,
skipped_update, pending_update_toast_for_version, auto_download_updates
skipped_update, pending_update_toast_for_version, auto_download_updates,
version
FROM settings
"
)
@@ -126,6 +132,7 @@ impl Settings {
pending_update_toast_for_version: res
.pending_update_toast_for_version,
auto_download_updates: res.auto_download_updates.map(|x| x == 1),
version: res.version as usize,
})
}
@@ -140,6 +147,7 @@ impl Settings {
let extra_launch_args = serde_json::to_string(&self.extra_launch_args)?;
let custom_env_vars = serde_json::to_string(&self.custom_env_vars)?;
let feature_flags = serde_json::to_string(&self.feature_flags)?;
let version = self.version as i64;
sqlx::query!(
"
@@ -183,7 +191,9 @@ impl Settings {
skipped_update = $29,
pending_update_toast_for_version = $30,
auto_download_updates = $31
auto_download_updates = $31,
version = $32
",
max_concurrent_writes,
max_concurrent_downloads,
@@ -216,12 +226,75 @@ impl Settings {
self.skipped_update,
self.pending_update_toast_for_version,
self.auto_download_updates,
version,
)
.execute(exec)
.await?;
Ok(())
}
pub async fn migrate(exec: &Pool<Sqlite>) -> crate::Result<()> {
let mut settings = Self::get(exec).await?;
if settings.version < Settings::CURRENT_VERSION {
tracing::info!(
"Migrating settings version {} to {:?}",
settings.version,
Settings::CURRENT_VERSION
);
}
while settings.version < Settings::CURRENT_VERSION {
if let Err(err) = settings.perform_migration() {
tracing::error!(
"Failed to migrate settings from version {}: {}",
settings.version,
err
);
return Err(err);
}
}
settings.update(exec).await?;
Ok(())
}
pub fn perform_migration(&mut self) -> crate::Result<()> {
match self.version {
1 => {
let quoter = shlex::Quoter::new().allow_nul(true);
// Previously split by spaces
if let Some(pre_launch) = self.hooks.pre_launch.as_ref() {
self.hooks.pre_launch =
Some(quoter.join(pre_launch.split(' ')).unwrap())
}
// Previously treated as complete path to command
if let Some(wrapper) = self.hooks.wrapper.as_ref() {
self.hooks.wrapper =
Some(quoter.quote(wrapper).unwrap().to_string())
}
// Previously split by spaces
if let Some(post_exit) = self.hooks.post_exit.as_ref() {
self.hooks.post_exit =
Some(quoter.join(post_exit.split(' ')).unwrap())
}
self.version = 2;
}
version => {
return Err(crate::ErrorKind::OtherError(format!(
"Invalid settings version: {version}"
))
.into());
}
}
Ok(())
}
}
/// Theseus theme

View File

@@ -12,6 +12,7 @@ serde_bytes = { workspace = true }
serde_cbor = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
utoipa = { workspace = true }
uuid = { workspace = true, features = ["fast-rng", "serde", "v4"] }
[lints]

View File

@@ -94,6 +94,7 @@ macro_rules! base62_id {
serde::Deserialize,
Debug,
Hash,
utoipa::ToSchema,
)]
#[serde(from = "ariadne::ids::Base62Id")]
#[serde(into = "ariadne::ids::Base62Id")]

View File

@@ -188,6 +188,7 @@ import _UnplugIcon from './icons/unplug.svg?component'
import _UpdatedIcon from './icons/updated.svg?component'
import _UploadIcon from './icons/upload.svg?component'
import _UserIcon from './icons/user.svg?component'
import _UserCogIcon from './icons/user-cog.svg?component'
import _UserPlusIcon from './icons/user-plus.svg?component'
import _UserXIcon from './icons/user-x.svg?component'
import _UsersIcon from './icons/users.svg?component'
@@ -387,6 +388,7 @@ export const UnlinkIcon = _UnlinkIcon
export const UnplugIcon = _UnplugIcon
export const UpdatedIcon = _UpdatedIcon
export const UploadIcon = _UploadIcon
export const UserCogIcon = _UserCogIcon
export const UserPlusIcon = _UserPlusIcon
export const UserXIcon = _UserXIcon
export const UserIcon = _UserIcon

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-user-cog-icon lucide-user-cog"><path d="M10 15H6a4 4 0 0 0-4 4v2"/><path d="m14.305 16.53.923-.382"/><path d="m15.228 13.852-.923-.383"/><path d="m16.852 12.228-.383-.923"/><path d="m16.852 17.772-.383.924"/><path d="m19.148 12.228.383-.923"/><path d="m19.53 18.696-.382-.924"/><path d="m20.772 13.852.924-.383"/><path d="m20.772 16.148.924.383"/><circle cx="18" cy="15" r="3"/><circle cx="9" cy="7" r="4"/></svg>

After

Width:  |  Height:  |  Size: 615 B

View File

@@ -303,7 +303,7 @@ html {
--color-text-primary: #ffffff;
--color-text-default: #b0bac5;
--color-text-tertiary: #80878f;
--color-text-tertiary: #96a2b0;
// ===
@@ -389,10 +389,10 @@ html {
.oled-mode {
@extend .dark-mode;
--surface-1: #000000;
--surface-2: #101013;
--surface-3: #1b1b20;
--surface-4: #25262b;
--surface-5: #2e2f34;
--surface-2: #09090a;
--surface-3: #101013;
--surface-4: #1b1b20;
--surface-5: #25262b;
--color-ad: #0d1828;

View File

@@ -0,0 +1,110 @@
---
title: More Ways to Withdraw
summary: 'Coming soon: new withdraw options and a redesigned revenue dashboard'
date: 2025-10-27T16:30:00-07:00
authors: ['AJfd8YH6', 'bOHH0P9Z', 'xSQqYYIN']
---
Hey everyone! We've heard your feedback on creator withdrawal options, and we're excited to share that more are finally coming to Modrinth! Over the past month, we've been working on overhauling the withdrawal experience and supporting more withdraw methods.
**This update will roll out later this week**, but we wanted to give everyone an early heads-up so you can hold off on withdrawing when payouts go out in a couple days in case one of the new options interests you. There's a lot packed into this release, so let's dive in!
<div class="video-wrapper mb-8">
<video autoplay loop muted playsinline>
<source src="./revenue-page-from-home.mp4" type="video/mp4" />
</video>
</div>
## **TL;DR**
- New revenue page and withdrawal flow
- New bank transfer option for 29 countries
- New crypto (USDC) option for hard to reach countries
- PayPal and Venmo moved to Tremendous for international users (lower to no FX fees)
- Email notifications for payout updates (available, successful, failed, etc.)
- New withdrawal compliance
---
## Streamlined Withdrawal Experience
A big goal for this release was to make payouts as clear as possible for creators. The revenue screen and withdrawal process has been completely overhauled.
**Your revenue is now displayed as a balance, with a detailed breakdown below.**
<div class="video-wrapper mb-8">
<video autoplay loop muted playsinline>
<source src="./balance-progress-bar.mp4" type="video/mp4" />
</video>
</div>
> Green shows available revenue, blue and purple show estimated revenue from the last two completed months, and grey shows the current month revenue that's still being processed.
We've also cleaned up the transaction history. You'll now see your most recent transactions directly on the revenue page, along with any deposits from Modrinth into your account.
## New Withdraw Methods
The second big goal of this release was to bring more options, better fees, and new ways to withdraw from countries that were previously hard to reach.
<div class="video-wrapper mb-8">
<video autoplay loop muted playsinline>
<source src="./withdraw-example.mp4" type="video/mp4" />
</video>
</div>
Withdrawing with ease from your balance using our new modal flow. Click the green Withdraw button to get started, and you'll immediately see all available methods and fees for your country, including a few new ones:
### Bank Transfers (29 Countries)
You can now withdraw directly to your bank account via wire or ACH, depending on your country. Supported countries and their fees are listed below.
We're working to expand this list, most notably to Canada, the UK, and countries across Asia, but there's no timeline _yet_.
| Country | Currency | Transaction Fee | FX Fee | **Total Fee** |
| --------------- | -------- | --------------- | ----------------- | ----------------- |
| 🇺🇸 USA | USD | $0.50 + 1% | 0.50% | **~1.5% + $0.50** |
| 🇪🇺 EU | EUR | €1.00 + 1% | 0.60% | **~1.6% + €1.00** |
| 🇦🇷 Argentina | ARS | $0.00 + 1% | 0.50% | **~1.5%** |
| 🇲🇽 Mexico | MXN | $0.50 + 1% | 0.90% | **~1.9% + $0.50** |
| 🇧🇷 Brazil | BRL | $0.25 + 1% | 1.30% (incl. IOF) | **~2.3% + $0.25** |
| 🇨🇱 Chile | CLP | $1.20 + 1% | 0.95% | **~2.0% + $1.20** |
| 🇨🇷 Costa Rica | CRC | $0.80 + 1% | 1.05% | **~2.1% + $0.80** |
| 🇵🇪 Peru (PEN) | PEN | $1.00 + 1% | 1.15% | **~2.1% + $1.00** |
| 🇿🇦 South Africa | ZAR | $1.50 + 1% | 1.40% | **~2.4% + $1.50** |
| 🇵🇪 Peru (USD) | USD | $5.00 + 1% | 0.50% | **~1.5% + $5.00** |
| 🇨🇴 Colombia | COP | $0.35 + 1% | 0.95% (incl. GMF) | **~2.0% + $0.35** |
_Total Fee includes both transaction and FX fees._
> **Supported EU countries:** Austria, Belgium, Cyprus, Estonia, Finland, France, Germany, Greece, Ireland, Italy, Latvia, Lithuania, Luxembourg, Malta, Netherlands, Portugal, Slovakia, Spain
### Crypto Withdrawals (USDC)
We've also added USDC withdrawals on the Polygon network. This option is available worldwide, so everyone can now withdraw funds. Fees are a flat 1% + network fees, making it a great low-cost option.
### PayPal & Venmo
We've moved PayPal and Venmo to two different methods depending on your country:
- **United States:** PayPal and Venmo will remain on the existing system with the same low fees: $0.25 + 2%, capped at $1.00.
- **Outside of the United States:** PayPal and Venmo have been moved to Tremendous, which has zero FX fees and charges a flat 6%, capped at $25.00. This should be an improvement over the old method for larger creators, where PayPal charged high uncapped fees for currency conversion.
## New Email Notifications
Earlier this month, we quietly rolled out a new email system that lets us give both creators and users better feedback about what's going on.
This includes emails for things like project status changes, payouts available, and successful withdrawals. We plan to keep building on this and will be adding email preference settings soon!
## Tax Compliance
Last but not least, we're also rolling out our new tax compliance system. We partially rolled this out earlier this month, but the full version is now live.
Creator withdrawals will now be limited to $600.00 USD per calendar year as of 2025, per U.S. regulations. As a creator approaches that threshold, they'll be prompted to fill out a W-8 or W-9 tax form, depending on their country.
![A snippet of the tax form stage of the new withdraw modal.](./tax-compliance.png)
Completing this form helps Modrinth stay compliant and will automatically unlock withdrawals again once submitted. For non-US users, these details are not automatically sent to the US government! They are for our own records in the case of an audit and we need to prove where we're sending money to.
---
Thank you to all the creators and players supporting Modrinth, we hope you enjoy this long deserved update! 💚

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,12 @@
// AUTO-GENERATED FILE - DO NOT EDIT
export const article = {
html: () => import(`./creator_withdrawals_overhaul.content`).then(m => m.html),
title: "More Ways to Withdraw",
summary: "Coming soon: new withdraw options and a redesigned revenue dashboard",
date: "2025-10-27T23:30:00.000Z",
slug: "creator-withdrawals-overhaul",
authors: ["AJfd8YH6","bOHH0P9Z","xSQqYYIN"],
unlisted: false,
thumbnail: true,
};

View File

@@ -7,6 +7,7 @@ import { article as carbon_ads } from "./carbon_ads";
import { article as creator_monetization } from "./creator_monetization";
import { article as creator_update } from "./creator_update";
import { article as creator_updates_july_2025 } from "./creator_updates_july_2025";
import { article as creator_withdrawals_overhaul } from "./creator_withdrawals_overhaul";
import { article as design_refresh } from "./design_refresh";
import { article as download_adjustment } from "./download_adjustment";
import { article as free_server_medal } from "./free_server_medal";
@@ -57,6 +58,7 @@ export const articles = [
free_server_medal,
download_adjustment,
design_refresh,
creator_withdrawals_overhaul,
creator_updates_july_2025,
creator_update,
creator_monetization,

Binary file not shown.

After

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -0,0 +1,25 @@
[package]
name = "modrinth-maxmind"
edition.workspace = true
rust-version.workspace = true
repository.workspace = true
[dependencies]
bytes = { workspace = true }
directories = { workspace = true }
eyre = { workspace = true }
flate2 = { workspace = true }
maxminddb = { workspace = true }
modrinth-util = { workspace = true }
reqwest = { workspace = true, features = ["json"] }
tar = { workspace = true }
tokio = { workspace = true, features = ["fs"] }
tracing = { workspace = true }
[dev-dependencies]
clap = { workspace = true, features = ["derive"] }
tokio = { workspace = true, features = ["full"] }
tracing-subscriber = { workspace = true }
[lints]
workspace = true

View File

@@ -0,0 +1 @@
Allows opening and reading a MaxMind GeoIP database, for use in an `actix-web` app.

View File

@@ -0,0 +1,35 @@
//! Example/testing binary for checking if a MaxMind database can be loaded from
//! the current environment.
use std::net::IpAddr;
use eyre::Result;
use maxminddb::geoip2;
use modrinth_util::Context;
use tracing::info;
/// Looks up country details for an IP using the MaxMind database
#[derive(Debug, clap::Parser)]
struct Args {
/// IP address to look up
ip: IpAddr,
}
#[tokio::main]
async fn main() -> Result<()> {
let args = <Args as clap::Parser>::parse();
tracing_subscriber::fmt().init();
let maxmind = modrinth_maxmind::init_reader()
.await
.wrap_err("failed to create reader")?;
let ip = args.ip;
let country = maxmind
.lookup::<geoip2::Country>(ip)
.wrap_err("failed to lookup country")?;
info!("Country details for {ip:?}:\n{country:#?}");
Ok(())
}

View File

@@ -0,0 +1,174 @@
#![doc = include_str!("../README.md")]
use std::{
io::{Cursor, Read},
net::IpAddr,
path::Path,
sync::Arc,
};
use flate2::read::GzDecoder;
pub use maxminddb::{self, geoip2};
use bytes::Bytes;
use eyre::{Result, bail, eyre};
use modrinth_util::{Context, env_var};
use tokio::fs;
use tracing::{debug, info, warn};
/// MaxMind GeoIP database reader for use as a `web::Data` parameter.
#[derive(Debug, Clone)]
pub struct MaxMind {
/// Database reader.
///
/// If the backend was not configured with MaxMind, the reader will not be
/// available.
pub reader: Option<Arc<maxminddb::Reader<Bytes>>>,
}
impl MaxMind {
/// Creates a [`MaxMind`] with no reader.
#[must_use]
pub const fn none() -> Self {
Self { reader: None }
}
/// Attempts to create a [`MaxMind`] with a MaxMind GeoIP database reader.
///
/// This reads creation and download parameters from environment variables.
///
/// If the database could not be created or downloaded, this will make a
/// [`MaxMind`] with no reader.
pub async fn new() -> Self {
Self {
reader: init_reader()
.await
.inspect_err(|err| {
warn!("Failed to initialize MaxMind: {err:#}");
})
.map(Arc::new)
.ok(),
}
}
/// Queries the MaxMind database for the ISO country code of an IP address.
///
/// If MaxMind is not configured or the database could not be read, returns
/// [`None`].
pub async fn query_country(&self, ip: impl Into<IpAddr>) -> Option<String> {
let reader = self.reader.as_ref()?;
reader
.lookup::<geoip2::Country>(ip.into())
.ok()?
.and_then(|c| c.country)
.and_then(|c| c.iso_code.map(|s| s.to_string()))
}
}
/// Creates a [`maxminddb::Reader`] for use in [`MaxMind::reader`].
///
/// # Errors
///
/// Errors if the database is not present, or could not be downloaded (i.e.
/// missing license key).
pub async fn init_reader() -> Result<maxminddb::Reader<Bytes>> {
let db = if let Ok(db_path) = env_var("MAXMIND_DB") {
info!("Using MaxMind database at {db_path:?}");
fs::read(&db_path)
.await
.map(Bytes::from)
.wrap_err_with(|| {
eyre!("failed to read database from {db_path:?}")
})?
} else {
let account_id = env_var("MAXMIND_ACCOUNT_ID")?;
let license_key = env_var("MAXMIND_LICENSE_KEY")?;
let dirs = directories::ProjectDirs::from(
"com.modrinth",
"Modrinth",
"modrinth-backend",
)
.wrap_err("failed to get cache directory")?;
let cache_dir = dirs.cache_dir();
let db_path = cache_dir.join("geolite.mmdb");
match fs::read(&db_path).await {
Ok(db) => {
info!("Using cached MaxMind database at {db_path:?}");
Bytes::from(db)
}
Err(err) => {
debug!(
"Failed to read MaxMind database from {db_path:?}, will download: {err}"
);
let db = download(&account_id, &license_key).await?;
match write_to_cache(cache_dir, &db_path, &db).await {
Ok(()) => {
info!("Wrote GeoIP database cache to {db_path:?}");
}
Err(err) => warn!(
"Failed to write GeoIP database cache to {db_path:?}: {err:?}",
),
}
info!("Downloaded and cached database");
db
}
}
};
maxminddb::Reader::from_source(db).wrap_err("failed to create reader")
}
async fn download(account_id: &str, license_key: &str) -> Result<Bytes> {
info!("Downloading MaxMind GeoIP database");
let db = reqwest::Client::new()
.get("https://download.maxmind.com/geoip/databases/GeoLite2-Country/download?suffix=tar.gz")
.basic_auth(account_id, Some(license_key))
.send()
.await
.wrap_err("failed to begin downloading GeoIP database")?
.error_for_status()
.wrap_err("failed to download GeoIP database")?
.bytes()
.await
.wrap_err("failed to finish downloading GeoIP database")?;
let db = GzDecoder::new(Cursor::new(db));
let mut archive = tar::Archive::new(db);
let entries = archive.entries().wrap_err("failed to read entries")?;
for entry in entries {
let mut entry = entry.wrap_err("failed to read entry")?;
let Ok(path) = entry.header().path() else {
continue;
};
if path.extension().and_then(|x| x.to_str()) == Some("mmdb") {
let mut buf = Vec::new();
entry
.read_to_end(&mut buf)
.wrap_err("failed to read entry")?;
return Ok(Bytes::from(buf));
}
}
bail!("no entries in archive");
}
async fn write_to_cache(
cache_dir: &Path,
db_path: &Path,
db: &[u8],
) -> Result<()> {
fs::create_dir_all(cache_dir)
.await
.wrap_err("failed to create parent directories")?;
fs::write(db_path, db)
.await
.wrap_err("failed to write to file")?;
Ok(())
}

View File

@@ -0,0 +1,15 @@
[package]
name = "modrinth-util"
edition.workspace = true
rust-version.workspace = true
repository.workspace = true
[dependencies]
actix-web = { workspace = true }
derive_more = { workspace = true, features = ["display", "error", "from"] }
dotenvy = { workspace = true }
eyre = { workspace = true }
serde = { workspace = true, features = ["derive"] }
[lints]
workspace = true

View File

@@ -0,0 +1 @@
Modrinth services utilities.

View File

@@ -0,0 +1,326 @@
use std::{
convert::Infallible,
fmt::{Debug, Display},
};
use actix_web::{HttpResponse, ResponseError, http::StatusCode};
use derive_more::{Display, Error};
use serde::{Deserialize, Serialize};
/// Error when calling an HTTP endpoint.
#[derive(Debug, Display, Error)]
pub enum ApiError {
/// Error occurred on the server side, which the caller has no fault in.
Internal(eyre::Report),
/// Caller made an invalid or malformed request.
Request(eyre::Report),
/// Caller attempted a request which they are not allowed to make.
Auth(eyre::Report),
}
impl ResponseError for ApiError {
fn status_code(&self) -> StatusCode {
match self {
Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Request(_) => StatusCode::BAD_REQUEST,
Self::Auth(_) => StatusCode::UNAUTHORIZED,
}
}
fn error_response(&self) -> HttpResponse {
HttpResponse::build(self.status_code()).json(ErrorResponse {
// internal error details are not leaked to the caller
description: match self {
Self::Internal(_) => None,
_ => Some(self.to_string()),
},
})
}
}
/// How an [`ApiError`] is represented when sending over an HTTP request.
#[derive(Debug, Serialize, Deserialize)]
pub struct ErrorResponse {
/// Text description of the error that occurred.
///
/// [`ApiError::Internal`] errors have their description filtered out, and
/// will hold [`None`].
pub description: Option<String>,
}
/// Allows wrapping [`Result`]s and [`Option`]s into [`Result<T, ApiError>`]s.
#[allow(
clippy::missing_errors_doc,
reason = "this trait's purpose is improving error handling"
)]
pub trait Context<T, E>: Sized {
/// Maps the error variant into an [`eyre::Report`], creating the message
/// using `f`.
fn wrap_err_with<D>(self, f: impl FnOnce() -> D) -> Result<T, eyre::Report>
where
D: Send + Sync + Debug + Display + 'static;
/// Maps the error variant into an [`eyre::Report`] with the given message.
/// Maps the error variant into an [`eyre::Report`] with the given message.
#[inline]
fn wrap_err<D>(self, msg: D) -> Result<T, eyre::Report>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_err_with(|| msg)
}
/// Maps the error variant into an [`ApiError::Internal`] using the closure to create the message.
#[inline]
fn wrap_internal_err_with<D>(
self,
f: impl FnOnce() -> D,
) -> Result<T, ApiError>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_err_with(f).map_err(ApiError::Internal)
}
/// Maps the error variant into an [`ApiError::Internal`] with the given message.
#[inline]
fn wrap_internal_err<D>(self, msg: D) -> Result<T, ApiError>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_internal_err_with(|| msg)
}
/// Maps the error variant into an [`ApiError::Request`] using the closure to create the message.
#[inline]
fn wrap_request_err_with<D>(
self,
f: impl FnOnce() -> D,
) -> Result<T, ApiError>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_err_with(f).map_err(ApiError::Request)
}
/// Maps the error variant into an [`ApiError::Request`] with the given message.
#[inline]
fn wrap_request_err<D>(self, msg: D) -> Result<T, ApiError>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_request_err_with(|| msg)
}
/// Maps the error variant into an [`ApiError::Auth`] using the closure to create the message.
#[inline]
fn wrap_auth_err_with<D>(self, f: impl FnOnce() -> D) -> Result<T, ApiError>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_err_with(f).map_err(ApiError::Auth)
}
/// Maps the error variant into an [`ApiError::Auth`] with the given message.
#[inline]
fn wrap_auth_err<D>(self, msg: D) -> Result<T, ApiError>
where
D: Send + Sync + Debug + Display + 'static,
{
self.wrap_auth_err_with(|| msg)
}
}
impl<T, E> Context<T, E> for Result<T, E>
where
Self: eyre::WrapErr<T, E>,
{
fn wrap_err_with<D>(self, f: impl FnOnce() -> D) -> Result<T, eyre::Report>
where
D: Send + Sync + Debug + Display + 'static,
{
eyre::WrapErr::wrap_err_with(self, f)
}
}
impl<T> Context<T, Infallible> for Option<T> {
fn wrap_err_with<D>(self, f: impl FnOnce() -> D) -> Result<T, eyre::Report>
where
D: Send + Sync + Debug + Display + 'static,
{
self.ok_or_else(|| eyre::Report::msg(f()))
}
}
// impl<T, E, Ty> Context<T, E> for Ty where Ty: eyre::WrapErr<T, E> {}
// impl<T, Ty> Context<T, Infallible> for Ty where Ty: eyre::OptionExt<T> {}
// impl<T, E> Context<T, E> for Result<T, E>
// where
// Self: eyre::WrapErr<T, E>,
// {
// fn wrap_err_with<D>(self, f: impl FnOnce() -> D) -> Result<T, eyre::Report>
// where
// D: Send + Sync + Debug + Display + 'static,
// {
// self.map_err(|err| eyre::Report::new(err).wrap_err(f()))
// }
// }
// impl<T> Context<T, Infallible> for Option<T> {
// fn wrap_err_with<D>(self, f: impl FnOnce() -> D) -> Result<T, eyre::Report>
// where
// D: Send + Sync + Debug + Display + 'static,
// {
// self.ok_or_else(|| eyre::Report::msg(f()))
// }
// }
#[cfg(test)]
mod tests {
use super::*;
use actix_web::http::StatusCode;
#[test]
fn test_api_error_display() {
let error = ApiError::Internal(eyre::eyre!("test internal error"));
assert!(error.to_string().contains("test internal error"));
let error = ApiError::Request(eyre::eyre!("test request error"));
assert!(error.to_string().contains("test request error"));
let error = ApiError::Auth(eyre::eyre!("test auth error"));
assert!(error.to_string().contains("test auth error"));
}
#[test]
fn test_api_error_debug() {
let error = ApiError::Internal(eyre::eyre!("test error"));
let debug_str = format!("{error:?}");
assert!(debug_str.contains("Internal"));
assert!(debug_str.contains("test error"));
}
#[test]
fn test_response_error_status_codes() {
let internal_error = ApiError::Internal(eyre::eyre!("internal error"));
assert_eq!(
internal_error.status_code(),
StatusCode::INTERNAL_SERVER_ERROR
);
let request_error = ApiError::Request(eyre::eyre!("request error"));
assert_eq!(request_error.status_code(), StatusCode::BAD_REQUEST);
let auth_error = ApiError::Auth(eyre::eyre!("auth error"));
assert_eq!(auth_error.status_code(), StatusCode::UNAUTHORIZED);
}
#[test]
fn test_response_error_response() {
let error = ApiError::Request(eyre::eyre!("test request error"));
let response = error.error_response();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
// Skip the body parsing test as it requires async and is more complex
// The important thing is that the error response is created correctly
}
#[test]
fn test_context_trait_result() {
let result: Result<i32, std::io::Error> = Ok(42);
let wrapped = result.wrap_err("context message");
assert_eq!(wrapped.unwrap(), 42);
let result: Result<i32, std::io::Error> = Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"not found",
));
let wrapped = result.wrap_err("context message");
assert!(wrapped.is_err());
assert!(wrapped.unwrap_err().to_string().contains("context message"));
}
#[test]
fn test_context_trait_option() {
let option: Option<i32> = Some(42);
let wrapped = option.wrap_err("context message");
assert_eq!(wrapped.unwrap(), 42);
let option: Option<i32> = None;
let wrapped = option.wrap_err("context message");
assert!(wrapped.is_err());
assert_eq!(wrapped.unwrap_err().to_string(), "context message");
}
#[test]
fn test_context_trait_internal_error() {
let result: Result<i32, std::io::Error> = Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"not found",
));
let wrapped = result.wrap_internal_err("internal error context");
assert!(wrapped.is_err());
match wrapped.unwrap_err() {
ApiError::Internal(report) => {
assert!(report.to_string().contains("internal error context"));
}
_ => panic!("Expected Internal error"),
}
}
#[test]
fn test_context_trait_request_error() {
let result: Result<i32, std::io::Error> = Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"not found",
));
let wrapped = result.wrap_request_err("request error context");
assert!(wrapped.is_err());
match wrapped.unwrap_err() {
ApiError::Request(report) => {
assert!(report.to_string().contains("request error context"));
}
_ => panic!("Expected Request error"),
}
}
#[test]
fn test_context_trait_auth_error() {
let result: Result<i32, std::io::Error> = Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"not found",
));
let wrapped = result.wrap_auth_err("auth error context");
assert!(wrapped.is_err());
match wrapped.unwrap_err() {
ApiError::Auth(report) => {
assert!(report.to_string().contains("auth error context"));
}
_ => panic!("Expected Auth error"),
}
}
#[test]
fn test_context_trait_with_closure() {
let result: Result<i32, std::io::Error> = Err(std::io::Error::new(
std::io::ErrorKind::NotFound,
"not found",
));
let wrapped =
result.wrap_err_with(|| format!("context with {}", "dynamic"));
assert!(wrapped.is_err());
assert!(
wrapped
.unwrap_err()
.to_string()
.contains("context with dynamic")
);
}
}

View File

@@ -0,0 +1,23 @@
#![doc = include_str!("../README.md")]
mod error;
pub use error::*;
use eyre::{Result, eyre};
/// Fetches an environment variable, possibly loading it using [`dotenvy`].
///
/// # Errors
///
/// Errors if the environment variable is missing or empty, providing a
/// pretty-printed error including the environment variable name.
#[track_caller]
pub fn env_var(key: &str) -> Result<String> {
let value = dotenvy::var(key)
.wrap_err_with(|| eyre!("missing environment variable `{key}`"))?;
if value.is_empty() {
Err(eyre!("environment variable `{key}` is empty"))
} else {
Ok(value)
}
}

View File

@@ -3,12 +3,13 @@
<button
v-if="!!slots.title"
:class="buttonClass ?? 'flex flex-col gap-2 bg-transparent m-0 p-0 border-none'"
@click="() => (isOpen ? close() : open())"
@click="() => (forceOpen ? undefined : toggledOpen ? close() : open())"
>
<slot name="button" :open="isOpen">
<div class="flex items-center gap-1 w-full">
<slot name="title" />
<DropdownIcon
v-if="!forceOpen"
class="ml-auto size-5 transition-transform duration-300 shrink-0"
:class="{ 'rotate-180': isOpen }"
/>
@@ -28,7 +29,7 @@
<script setup lang="ts">
import { DropdownIcon } from '@modrinth/assets'
import { ref, useSlots } from 'vue'
import { computed, ref, useSlots } from 'vue'
const props = withDefaults(
defineProps<{
@@ -37,6 +38,7 @@ const props = withDefaults(
buttonClass?: string
contentClass?: string
titleWrapperClass?: string
forceOpen?: boolean
}>(),
{
type: 'standard',
@@ -44,27 +46,29 @@ const props = withDefaults(
buttonClass: null,
contentClass: null,
titleWrapperClass: null,
forceOpen: false,
},
)
const isOpen = ref(props.openByDefault)
const toggledOpen = ref(props.openByDefault)
const isOpen = computed(() => toggledOpen.value || props.forceOpen)
const emit = defineEmits(['onOpen', 'onClose'])
const slots = useSlots()
function open() {
isOpen.value = true
toggledOpen.value = true
emit('onOpen')
}
function close() {
isOpen.value = false
toggledOpen.value = false
emit('onClose')
}
defineExpose({
open,
close,
isOpen,
isOpen: toggledOpen,
})
defineOptions({

View File

@@ -5,9 +5,11 @@
:disabled="disabled"
:dropdown-id="dropdownId"
:tooltip="tooltip"
:placement="placement"
>
<slot></slot>
<template #menu>
<slot name="menu-header" />
<template v-for="(option, index) in options.filter((x) => x.shown === undefined || x.shown)">
<div
v-if="isDivider(option)"
@@ -96,12 +98,14 @@ withDefaults(
disabled?: boolean
dropdownId?: string
tooltip?: string
placement?: string
}>(),
{
options: () => [],
disabled: false,
dropdownId: undefined,
tooltip: undefined,
placement: 'bottom-end',
},
)

View File

@@ -3,7 +3,7 @@
ref="dropdown"
no-auto-focus
:aria-id="dropdownId || null"
placement="bottom-end"
:placement="placement"
:class="dropdownClass"
@apply-hide="focusTrigger"
@apply-show="focusMenuChild"
@@ -45,6 +45,11 @@ defineProps({
default: null,
required: false,
},
placement: {
type: String,
default: 'bottom-end',
required: false,
},
})
function focusMenuChild() {

View File

@@ -18,7 +18,9 @@
{{ selectedItem.formatted_name ?? selectedItem.option }}
</TagItem>
<TagItem
v-for="providedItem in items.filter((x) => x.provided)"
v-for="providedItem in items.filter(
(x) => x.provided && !overriddenProvidedFilterTypes.includes(x.type),
)"
:key="`provided-filter-${providedItem.type}-${providedItem.option}`"
v-tooltip="formatMessage(providedMessage ?? defaultProvidedMessage)"
:style="{ '--_bg-color': `var(--color-raised-bg)` }"

View File

@@ -88,7 +88,7 @@
/>
<TresAmbientLight :intensity="2" />
<TresDirectionalLight :position="[2, 4, 3]" :intensity="1.2" :cast-shadow="true" />
<TresDirectionalLight :position="[-3, 4, -2]" :intensity="1.2" :cast-shadow="true" />
</TresCanvas>
<div

View File

@@ -17,6 +17,9 @@
"button.cancel": {
"defaultMessage": "Cancel"
},
"button.clear": {
"defaultMessage": "Clear"
},
"button.close": {
"defaultMessage": "Close"
},

View File

@@ -25,6 +25,10 @@ export const commonMessages = defineMessages({
id: 'button.cancel',
defaultMessage: 'Cancel',
},
clearButton: {
id: 'button.clear',
defaultMessage: 'Clear',
},
closeButton: {
id: 'button.close',
defaultMessage: 'Close',

View File

@@ -10,6 +10,39 @@ export type VersionEntry = {
}
const VERSIONS: VersionEntry[] = [
{
date: `2025-10-26T18:30:00-07:00`,
product: 'app',
version: '0.10.15',
body: `## Improvements
- Fixed skins page uploading modified 'normalized' versions of the skin texture instead of the original.
- Improved skins page lighting to have the player model be lit more from the front.`,
},
{
date: `2025-10-26T18:05:00-07:00`,
product: 'web',
body: `## Improvements
- Fixed the colors of OLED mode being brighter than intended.`,
},
{
date: `2025-10-24T21:05:00-07:00`,
product: 'app',
version: '0.10.14',
body: `## Improvements
- Fixed window maximized state not being saved properly.
- Fixed padding issue when Friends are loading.
- Fixed the colors of OLED mode being brighter than intended.`,
},
{
date: `2025-10-19T17:45:00-07:00`,
product: 'app',
version: '0.10.13',
body: `## Improvements
- Revamped the app sidebar and friends UI to be more straightforward and easier to use.
- Improved the UI of the Modrinth account button in the bottom left corner. It's now more visually consistent with the other navigation buttons and it has a link to your profile.
- Updated the ad fallback to be green again instead of blue.
- Fixed 'Open folder' in the instance page context menu having the wrong icon.`,
},
{
date: `2025-10-15T12:15:00-07:00`,
product: 'app',

View File

@@ -9,6 +9,7 @@ export class ModrinthServerError extends Error {
public readonly originalError?: Error,
public readonly module?: string,
public readonly v1Error?: V1ErrorInfo,
public readonly responseData?: any,
) {
let errorMessage = message
let method = 'GET'

View File

@@ -373,3 +373,5 @@ export function arrayBufferToBase64(buffer: Uint8Array | ArrayBuffer): string {
const bytes = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)
return btoa(String.fromCharCode(...bytes))
}
export const DEFAULT_CREDIT_EMAIL_MESSAGE =
"We're really sorry about the recent issues with your server."