Merge tag 'v0.10.16' into beta

This commit is contained in:
2025-11-01 14:14:52 +03:00
203 changed files with 6321 additions and 2161 deletions

View File

@@ -1,6 +1,8 @@
**/*.rs
.sqlx
java/build
# Migrations existing before Prettier formatted them shall always be ignored,
# as any changes to them will break existing deployments
migrations/20240711194701_init.sql

View File

@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, hide_nametag_skins_page, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry, personalized_ads,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,\n skipped_update, pending_update_toast_for_version, auto_download_updates\n FROM settings\n ",
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, hide_nametag_skins_page, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry, personalized_ads,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,\n skipped_update, pending_update_toast_for_version, auto_download_updates,\n version\n FROM settings\n ",
"describe": {
"columns": [
{
@@ -157,6 +157,11 @@
"name": "auto_download_updates",
"ordinal": 30,
"type_info": "Integer"
},
{
"name": "version",
"ordinal": 31,
"type_info": "Integer"
}
],
"parameters": {
@@ -193,8 +198,9 @@
false,
true,
true,
true
true,
false
]
},
"hash": "7dc83d7ffa3d583fc5ffaf13811a8dab4d0b9ded6200f827b9de7ac32e5318d5"
"hash": "07ea3a644644de61c4ed7c30ee711d29fd49f10534230b1b03097275a30cb50f"
}

View File

@@ -41,7 +41,7 @@
{
"name": "display_claims!: serde_json::Value",
"ordinal": 7,
"type_info": "Text"
"type_info": "Null"
}
],
"parameters": {

View File

@@ -1,12 +1,12 @@
{
"db_name": "SQLite",
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n personalized_ads = $11,\n\n onboarded = $12,\n\n extra_launch_args = jsonb($13),\n custom_env_vars = jsonb($14),\n mc_memory_max = $15,\n mc_force_fullscreen = $16,\n mc_game_resolution_x = $17,\n mc_game_resolution_y = $18,\n hide_on_process_start = $19,\n\n hook_pre_launch = $20,\n hook_wrapper = $21,\n hook_post_exit = $22,\n\n custom_dir = $23,\n prev_custom_dir = $24,\n migrated = $25,\n\n toggle_sidebar = $26,\n feature_flags = $27,\n hide_nametag_skins_page = $28,\n\n skipped_update = $29,\n pending_update_toast_for_version = $30,\n auto_download_updates = $31\n ",
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n personalized_ads = $11,\n\n onboarded = $12,\n\n extra_launch_args = jsonb($13),\n custom_env_vars = jsonb($14),\n mc_memory_max = $15,\n mc_force_fullscreen = $16,\n mc_game_resolution_x = $17,\n mc_game_resolution_y = $18,\n hide_on_process_start = $19,\n\n hook_pre_launch = $20,\n hook_wrapper = $21,\n hook_post_exit = $22,\n\n custom_dir = $23,\n prev_custom_dir = $24,\n migrated = $25,\n\n toggle_sidebar = $26,\n feature_flags = $27,\n hide_nametag_skins_page = $28,\n\n skipped_update = $29,\n pending_update_toast_for_version = $30,\n auto_download_updates = $31,\n\n version = $32\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 31
"Right": 32
},
"nullable": []
},
"hash": "eb95fac3043d0ffd10caef69cc469474cc5c0d36cc0698c4cc0852da81fed158"
"hash": "a40e60da6dd1312d4a1ed52fa8fd2394e7ad21de1cb44cf8b93c4b1459cdc716"
}

View File

@@ -73,6 +73,7 @@ serde_json = { workspace = true }
serde_with = { workspace = true }
sha1_smol = { workspace = true }
sha2 = { workspace = true }
shlex = { workspace = true }
sqlx = { workspace = true, features = [
"json",
"macros",

View File

@@ -1,18 +1,7 @@
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction
import com.github.jengelman.gradle.plugins.shadow.transformers.CacheableTransformer
import com.github.jengelman.gradle.plugins.shadow.transformers.ResourceTransformer
import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext
import org.apache.tools.zip.ZipEntry
import org.apache.tools.zip.ZipOutputStream
import java.io.IOException
import java.util.jar.JarFile
import java.util.jar.Attributes as JarAttributes
import java.util.jar.Manifest as JarManifest
plugins {
java
id("com.diffplug.spotless") version "7.0.4"
id("com.gradleup.shadow") version "9.0.0-rc2"
id("com.diffplug.spotless") version "8.0.0"
id("com.gradleup.shadow") version "9.2.2"
}
repositories {
@@ -20,9 +9,9 @@ repositories {
}
dependencies {
implementation("org.ow2.asm:asm:9.8")
implementation("org.ow2.asm:asm-tree:9.8")
implementation("com.google.code.gson:gson:2.13.1")
implementation("org.ow2.asm:asm:9.9")
implementation("org.ow2.asm:asm-tree:9.9")
implementation("com.google.code.gson:gson:2.13.2")
testImplementation(libs.junit.jupiter)
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
@@ -30,7 +19,7 @@ dependencies {
java {
toolchain {
languageVersion = JavaLanguageVersion.of(11)
languageVersion = JavaLanguageVersion.of(17)
}
}
@@ -56,52 +45,9 @@ tasks.shadowJar {
attributes["Premain-Class"] = "com.modrinth.theseus.agent.TheseusAgent"
}
enableRelocation = true
addMultiReleaseAttribute = false
enableAutoRelocation = true
relocationPrefix = "com.modrinth.theseus.shadow"
// Adapted from ManifestResourceTransformer to do one thing: remove Multi-Release.
// Multi-Release gets added by shadow because gson has Multi-Release set to true, however
// shadow strips the actual versions directory, as gson only has a module-info.class in there.
// However, older versions of SecureJarHandler crash if Multi-Release is set to true but the
// versions directory is missing.
transform(@CacheableTransformer object : ResourceTransformer {
private var manifestDiscovered = false
private var manifest: JarManifest? = null
override fun canTransformResource(element: FileTreeElement): Boolean {
return JarFile.MANIFEST_NAME.equals(element.path, ignoreCase = true)
}
override fun transform(context: TransformerContext) {
if (!manifestDiscovered) {
try {
manifest = JarManifest(context.inputStream)
manifestDiscovered = true
} catch (e: IOException) {
logger.warn("Failed to read MANIFEST.MF", e)
}
}
}
override fun hasTransformedResource(): Boolean = true
override fun modifyOutputStream(
os: ZipOutputStream,
preserveFileTimestamps: Boolean
) {
// If we didn't find a manifest, then let's create one.
if (manifest == null) {
manifest = JarManifest()
}
manifest!!.mainAttributes.remove(JarAttributes.Name.MULTI_RELEASE)
os.putNextEntry(ZipEntry(JarFile.MANIFEST_NAME).apply {
time = ShadowCopyAction.CONSTANT_TIME_FOR_ZIP_ENTRIES
})
manifest!!.write(os)
}
})
}
tasks.named<Test>("test") {

View File

@@ -1,5 +1,5 @@
[versions]
junit-jupiter = "5.12.1"
junit-jupiter = "5.14.0"
[libraries]
junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit-jupiter" }

View File

@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-9.1.0-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME

View File

@@ -1,6 +1,6 @@
plugins {
// Apply the foojay-resolver plugin to allow automatic download of JDKs
id("org.gradle.toolchains.foojay-resolver-convention") version "0.10.0"
id("org.gradle.toolchains.foojay-resolver-convention") version "1.0.0"
}
rootProject.name = "theseus"

View File

@@ -0,0 +1 @@
ALTER TABLE settings ADD COLUMN version INTEGER NOT NULL DEFAULT 1;

View File

@@ -1,6 +1,6 @@
//! Miscellaneous PNG utilities for Minecraft skins.
use std::io::Read;
use std::io::{BufRead, Cursor, Seek};
use std::sync::Arc;
use base64::Engine;
@@ -9,7 +9,8 @@ use data_url::DataUrl;
use futures::{Stream, TryStreamExt, future::Either, stream};
use itertools::Itertools;
use rgb::Rgba;
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::SyncIoBridge};
use tokio::io::AsyncReadExt;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use url::Url;
use crate::{
@@ -95,7 +96,8 @@ pub fn dimensions(png_data: &[u8]) -> crate::Result<(u32, u32)> {
pub async fn normalize_skin_texture(
texture: &UrlOrBlob,
) -> crate::Result<Bytes> {
let texture_stream = SyncIoBridge::new(Box::pin(
let mut texture_data = Vec::with_capacity(8192);
Box::pin(
match texture {
UrlOrBlob::Url(url) => Either::Left(
url_to_data_stream(url)
@@ -112,84 +114,84 @@ pub async fn normalize_skin_texture(
),
}
.compat(),
));
)
.read_to_end(&mut texture_data)
.await?;
tokio::task::spawn_blocking(|| {
let mut png_reader = {
let mut decoder = png::Decoder::new(texture_stream);
decoder.set_transformations(
png::Transformations::normalize_to_color8(),
);
decoder.read_info()
}?;
let mut png_reader = {
let mut decoder = png::Decoder::new(Cursor::new(texture_data));
decoder
.set_transformations(png::Transformations::normalize_to_color8());
decoder.read_info()
}?;
// The code below assumes that the skin texture has valid dimensions.
// This also serves as a way to bail out early for obviously invalid or
// adversarial textures
if png_reader.info().width != 64
|| ![64, 32].contains(&png_reader.info().height)
{
Err(ErrorKind::InvalidSkinTexture)?;
}
// The code below assumes that the skin texture has valid dimensions.
// This also serves as a way to bail out early for obviously invalid or
// adversarial textures
if png_reader.info().width != 64
|| ![64, 32].contains(&png_reader.info().height)
{
Err(ErrorKind::InvalidSkinTexture)?;
}
let is_legacy_skin = png_reader.info().height == 32;
let mut texture_buf =
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
if is_legacy_skin {
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
}
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
let is_legacy_skin = png_reader.info().height == 32;
let mut texture_buf =
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
if is_legacy_skin {
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
}
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
let mut encoded_png = vec![];
let mut encoded_png = vec![];
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
png_encoder.set_color(png::ColorType::Rgba);
png_encoder.set_depth(png::BitDepth::Eight);
png_encoder.set_filter(png::FilterType::NoFilter);
png_encoder.set_compression(png::Compression::Fast);
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
png_encoder.set_color(png::ColorType::Rgba);
png_encoder.set_depth(png::BitDepth::Eight);
png_encoder.set_filter(png::Filter::NoFilter);
png_encoder.set_compression(png::Compression::Fast);
// Keeping color space information properly set, to handle the occasional
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
// that keeps most people wondering, is what sets a carefully crafted image
// manipulation routine apart :)
if let Some(source_chromaticities) =
png_reader.info().source_chromaticities.as_ref().copied()
{
png_encoder.set_source_chromaticities(source_chromaticities);
}
if let Some(source_gamma) =
png_reader.info().source_gamma.as_ref().copied()
{
png_encoder.set_source_gamma(source_gamma);
}
if let Some(source_srgb) = png_reader.info().srgb.as_ref().copied() {
png_encoder.set_source_srgb(source_srgb);
}
// Keeping color space information properly set, to handle the occasional
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
// that keeps most people wondering, is what sets a carefully crafted image
// manipulation routine apart :)
if let Some(source_chromaticities) =
png_reader.info().source_chromaticities.as_ref().copied()
{
png_encoder.set_source_chromaticities(source_chromaticities);
}
if let Some(source_gamma) = png_reader.info().source_gamma.as_ref().copied()
{
png_encoder.set_source_gamma(source_gamma);
}
if let Some(source_srgb) = png_reader.info().srgb.as_ref().copied() {
png_encoder.set_source_srgb(source_srgb);
}
let png_buf = bytemuck::try_cast_slice(&texture_buf)
.map_err(|_| ErrorKind::InvalidPng)?;
let mut png_writer = png_encoder.write_header()?;
png_writer.write_image_data(png_buf)?;
png_writer.finish()?;
let png_buf = bytemuck::try_cast_slice(&texture_buf)
.map_err(|_| ErrorKind::InvalidPng)?;
let mut png_writer = png_encoder.write_header()?;
png_writer.write_image_data(png_buf)?;
png_writer.finish()?;
Ok(encoded_png.into())
})
.await?
Ok(encoded_png.into())
}
/// Reads a skin texture and returns a 64x64 buffer in RGBA format.
fn get_skin_texture_buffer<R: Read>(
fn get_skin_texture_buffer<R: BufRead + Seek>(
png_reader: &mut png::Reader<R>,
is_legacy_skin: bool,
) -> crate::Result<Vec<Rgba<u8>>> {
let output_buffer_size = png_reader
.output_buffer_size()
.expect("Reasonable skin texture size verified already");
let mut png_buf = if is_legacy_skin {
// Legacy skins have half the height, so duplicate the rows to
// turn them into a 64x64 texture
vec![0; png_reader.output_buffer_size() * 2]
vec![0; output_buffer_size * 2]
} else {
// Modern skins are left as-is
vec![0; png_reader.output_buffer_size()]
vec![0; output_buffer_size]
};
png_reader.next_frame(&mut png_buf)?;
@@ -373,9 +375,10 @@ fn set_alpha(
#[tokio::test]
async fn normalize_skin_texture_works() {
let decode_to_pixels = |png_data: &[u8]| {
let decoder = png::Decoder::new(png_data);
let decoder = png::Decoder::new(Cursor::new(png_data));
let mut reader = decoder.read_info().expect("Failed to read PNG info");
let mut buffer = vec![0; reader.output_buffer_size()];
let mut buffer =
vec![0; reader.output_buffer_size().expect("Skin size too large")];
reader
.next_frame(&mut buffer)
.expect("Failed to decode PNG");

View File

@@ -12,6 +12,8 @@ use crate::util::fetch::{fetch_mirrors, write};
use crate::util::io;
use crate::{State, profile};
use async_zip::base::read::seek::ZipFileReader;
use futures::StreamExt;
use path_util::SafeRelativeUtf8UnixPathBuf;
use super::install_from::{
CreatePack, CreatePackLocation, PackFormat, generate_pack_from_file,
@@ -19,7 +21,6 @@ use super::install_from::{
};
use crate::data::ProjectType;
use std::io::{Cursor, ErrorKind};
use std::path::PathBuf;
/// Install a pack
/// Wrapper around install_pack_files that generates a pack creation description, and
@@ -93,197 +94,194 @@ pub async fn install_zipped_mrpack_files(
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Sets generated profile attributes to the pack ones (using profile::edit)
set_profile_information(
profile_path.clone(),
&description,
&pack.name,
&pack.dependencies,
ignore_lock,
)
.await?;
let profile_path = profile_path.clone();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
pack_version: version_id,
},
100.0,
"Downloading modpack",
)
.await?;
let num_files = pack.files.len();
use futures::StreamExt;
loading_try_for_each_concurrent(
futures::stream::iter(pack.files.into_iter())
.map(Ok::<PackFile, crate::Error>),
None,
Some(&loading_bar),
70.0,
num_files,
None,
|project| {
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env
&& env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
}
let file = fetch_mirrors(
&project
.downloads
.iter()
.map(|x| &**x)
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&state.pool,
)
.await?;
let path = profile::get_full_path(&profile_path)
.await?
.join(project.path.as_str());
cache_file_hash(
file.clone(),
&profile_path,
project.path.as_str(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
ProjectType::get_from_parent_folder(&path),
&state.pool,
)
.await?;
write(&path, &file, &state.io_semaphore).await?;
Ok(())
}
},
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Extracting overrides"))?;
let mut total_len = 0;
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
total_len += 1;
}
}
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
// Reads the file into the 'content' variable
let mut content = Vec::new();
let mut reader = zip_reader.reader_with_entry(index).await?;
reader.read_to_end_checked(&mut content).await?;
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
if new_path.file_name().is_some() {
let bytes = bytes::Bytes::from(content);
cache_file_hash(
bytes.clone(),
&profile_path,
&new_path.to_string_lossy(),
None,
ProjectType::get_from_parent_folder(&new_path),
&state.pool,
)
.await?;
write(
&profile::get_full_path(&profile_path)
.await?
.join(new_path),
&bytes,
&state.io_semaphore,
)
.await?;
}
emit_loading(
&loading_bar,
30.0 / total_len as f64,
Some(&format!("Extracting override {index}/{total_len}")),
)?;
}
}
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon = profile::get_full_path(&profile_path)
.await?
.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
}
Ok::<String, crate::Error>(profile_path.clone())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
let Some(manifest_idx) = zip_reader.file().entries().iter().position(|f| {
matches!(f.filename().as_str(), Ok("modrinth.index.json"))
}) else {
return Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
)));
};
let mut manifest = String::new();
let mut reader = zip_reader.reader_with_entry(manifest_idx).await?;
reader.read_to_string_checked(&mut manifest).await?;
let pack: PackFormat = serde_json::from_str(&manifest)?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// Sets generated profile attributes to the pack ones (using profile::edit)
set_profile_information(
profile_path.clone(),
&description,
&pack.name,
&pack.dependencies,
ignore_lock,
)
.await?;
let profile_path = profile_path.clone();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
pack_version: version_id,
},
100.0,
"Downloading modpack",
)
.await?;
let num_files = pack.files.len();
loading_try_for_each_concurrent(
futures::stream::iter(pack.files.into_iter())
.map(Ok::<PackFile, crate::Error>),
None,
Some(&loading_bar),
70.0,
num_files,
None,
|project| {
let profile_path = profile_path.clone();
async move {
//TODO: Future update: prompt user for optional files in a modpack
if let Some(env) = project.env
&& env
.get(&EnvType::Client)
.is_some_and(|x| x == &SideType::Unsupported)
{
return Ok(());
}
let file = fetch_mirrors(
&project
.downloads
.iter()
.map(|x| &**x)
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&state.pool,
)
.await?;
let path = profile::get_full_path(&profile_path)
.await?
.join(project.path.as_str());
cache_file_hash(
file.clone(),
&profile_path,
project.path.as_str(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
ProjectType::get_from_parent_folder(&path),
&state.pool,
)
.await?;
write(&path, &file, &state.io_semaphore).await?;
Ok(())
}
},
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Extracting overrides"))?;
let override_file_entries = zip_reader
.file()
.entries()
.iter()
.enumerate()
.filter_map(|(index, file)| {
let filename = file.filename().as_str().unwrap_or_default();
((filename.starts_with("overrides/")
|| filename.starts_with("client-overrides/"))
&& !filename.ends_with('/'))
.then(|| (index, file.clone()))
})
.collect::<Vec<_>>();
let override_file_entries_count = override_file_entries.len();
for (i, (index, file)) in override_file_entries.into_iter().enumerate() {
let relative_override_file_path =
SafeRelativeUtf8UnixPathBuf::try_from(
file.filename().as_str().unwrap().to_string(),
)?;
let relative_override_file_path = relative_override_file_path
.strip_prefix("overrides")
.or_else(|_| relative_override_file_path.strip_prefix("client-overrides"))
.map_err(|_| {
crate::Error::from(crate::ErrorKind::OtherError(
format!("Failed to strip override prefix from override file path: {relative_override_file_path}")
))
})?;
let mut file_bytes = vec![];
let mut reader = zip_reader.reader_with_entry(index).await?;
reader.read_to_end_checked(&mut file_bytes).await?;
let file_bytes = bytes::Bytes::from(file_bytes);
cache_file_hash(
file_bytes.clone(),
&profile_path,
relative_override_file_path.as_str(),
None,
ProjectType::get_from_parent_folder(
relative_override_file_path.as_str(),
),
&state.pool,
)
.await?;
write(
&profile::get_full_path(&profile_path)
.await?
.join(relative_override_file_path.as_str()),
&file_bytes,
&state.io_semaphore,
)
.await?;
emit_loading(
&loading_bar,
30.0 / override_file_entries_count as f64,
Some(&format!(
"Extracting override {}/{override_file_entries_count}",
i + 1
)),
)?;
}
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon = profile::get_full_path(&profile_path)
.await?
.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
}
Ok::<String, crate::Error>(profile_path.clone())
}
#[tracing::instrument(skip(mrpack_file))]
@@ -303,127 +301,130 @@ pub async fn remove_all_related_files(
})?;
// Extract index of modrinth.index.json
let zip_index_option = zip_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "modrinth.index.json"
});
if let Some(zip_index) = zip_index_option {
let mut manifest = String::new();
let Some(manifest_idx) = zip_reader.file().entries().iter().position(|f| {
matches!(f.filename().as_str(), Ok("modrinth.index.json"))
}) else {
return Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)));
};
let mut reader = zip_reader.reader_with_entry(zip_index).await?;
reader.read_to_string_checked(&mut manifest).await?;
let mut manifest = String::new();
let pack: PackFormat = serde_json::from_str(&manifest)?;
let mut reader = zip_reader.reader_with_entry(manifest_idx).await?;
reader.read_to_string_checked(&mut manifest).await?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
let pack: PackFormat = serde_json::from_str(&manifest)?;
// Set install stage to installing, and do not change it back (as files are being removed and are not being reinstalled here)
crate::api::profile::edit(&profile_path, |prof| {
prof.install_stage = ProfileInstallStage::PackInstalling;
async { Ok(()) }
})
.await?;
if &*pack.game != "minecraft" {
return Err(crate::ErrorKind::InputError(
"Pack does not support Minecraft".to_string(),
)
.into());
}
// First, remove all modrinth projects by their version hashes
// Remove all modrinth projects by their version hashes
// We need to do a fetch to get the project ids from Modrinth
let state = State::get().await?;
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha1)?.clone()))
.collect::<Vec<_>>();
// Set install stage to installing, and do not change it back (as files are being removed and are not being reinstalled here)
crate::api::profile::edit(&profile_path, |prof| {
prof.install_stage = ProfileInstallStage::PackInstalling;
async { Ok(()) }
})
.await?;
// First, get project info by hash
let file_infos = CachedEntry::get_file_many(
&all_hashes.iter().map(|x| &**x).collect::<Vec<_>>(),
None,
// First, remove all modrinth projects by their version hashes
// Remove all modrinth projects by their version hashes
// We need to do a fetch to get the project ids from Modrinth
let state = State::get().await?;
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha1)?.clone()))
.collect::<Vec<_>>();
// First, get project info by hash
let file_infos = CachedEntry::get_file_many(
&all_hashes.iter().map(|x| &**x).collect::<Vec<_>>(),
None,
&state.pool,
&state.api_semaphore,
)
.await?;
let to_remove = file_infos
.into_iter()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile = profile::get(&profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
})?;
let profile_full_path = profile::get_full_path(&profile_path).await?;
for (file_path, project) in profile
.get_projects(
Some(CacheBehaviour::MustRevalidate),
&state.pool,
&state.api_semaphore,
)
.await?;
let to_remove = file_infos
.into_iter()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile = profile::get(&profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
})?;
let profile_full_path = profile::get_full_path(&profile_path).await?;
for (file_path, project) in profile
.get_projects(
Some(CacheBehaviour::MustRevalidate),
&state.pool,
&state.api_semaphore,
)
.await?
.await?
{
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
if let Some(metadata) = &project.metadata
&& to_remove.contains(&metadata.project_id)
{
match io::remove_file(profile_full_path.join(file_path)).await {
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
}
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
match io::remove_file(profile_full_path.join(file.path.as_str()))
.await
{
match io::remove_file(profile_full_path.join(file_path)).await {
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
// Iterate over each 'overrides' file and remove it
for index in 0..zip_reader.file().entries().len() {
let file = zip_reader.file().entries().get(index).unwrap();
let filename = file.filename().as_str().unwrap_or_default();
let file_path = PathBuf::from(filename);
if (filename.starts_with("overrides")
|| filename.starts_with("client-overrides"))
&& !filename.ends_with('/')
{
let mut new_path = PathBuf::new();
let components = file_path.components().skip(1);
for component in components {
new_path.push(component);
}
// Remove this file if a corresponding one exists in the filesystem
match io::remove_file(
profile::get_full_path(&profile_path)
.await?
.join(&new_path),
)
.await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
}
Ok(())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
)))
}
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
match io::remove_file(profile_full_path.join(file.path.as_str())).await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
// Iterate over each 'overrides' file and remove it
let override_file_entries =
zip_reader.file().entries().iter().filter(|file| {
let filename = file.filename().as_str().unwrap_or_default();
(filename.starts_with("overrides/")
|| filename.starts_with("client-overrides/"))
&& !filename.ends_with('/')
});
for file in override_file_entries {
let relative_override_file_path =
SafeRelativeUtf8UnixPathBuf::try_from(
file.filename().as_str().unwrap().to_string(),
)?;
let relative_override_file_path = relative_override_file_path
.strip_prefix("overrides")
.or_else(|_| relative_override_file_path.strip_prefix("client-overrides"))
.map_err(|_| {
crate::Error::from(crate::ErrorKind::OtherError(
format!("Failed to strip override prefix from override file path: {relative_override_file_path}")
))
})?;
// Remove this file if a corresponding one exists in the filesystem
match io::remove_file(
profile::get_full_path(&profile_path)
.await?
.join(relative_override_file_path.as_str()),
)
.await
{
Ok(_) => (),
Err(err) if err.kind() == ErrorKind::NotFound => (),
Err(err) => return Err(err.into()),
}
}
Ok(())
}

View File

@@ -666,7 +666,14 @@ async fn run_credentials(
.filter(|hook_command| !hook_command.is_empty());
if let Some(hook) = pre_launch_hooks {
// TODO: hook parameters
let mut cmd = hook.split(' ');
let mut cmd = shlex::split(hook)
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Invalid pre-launch command: {hook}",
))
})?
.into_iter();
if let Some(command) = cmd.next() {
let full_path = get_full_path(&profile.path).await?;
let result = Command::new(command)

View File

@@ -210,6 +210,9 @@ pub enum ErrorKind {
#[error("Deserialization error: {0}")]
DeserializationError(#[from] serde::de::value::Error),
#[error("Discord IPC error: {0}")]
DiscordRichPresenceError(#[from] discord_rich_presence::error::Error),
}
#[derive(Debug)]

View File

@@ -570,7 +570,19 @@ pub async fn launch_minecraft(
let args = version_info.arguments.clone().unwrap_or_default();
let mut command = match wrapper {
Some(hook) => {
let mut command = Command::new(hook);
let mut cmd = shlex::split(hook)
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Invalid wrapper command: {hook}",
))
})?
.into_iter();
let mut command = Command::new(cmd.next().ok_or(
crate::ErrorKind::LauncherError(
"Empty wrapper command".to_owned(),
),
)?);
command.args(cmd);
command.arg(&java_version.path);
command
}

View File

@@ -40,12 +40,7 @@ impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub fn init() -> crate::Result<DiscordGuard> {
let dipc =
DiscordIpcClient::new("1190718475832918136").map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not create Discord client {e}",
))
})?;
let dipc = DiscordIpcClient::new("1190718475832918136");
Ok(DiscordGuard {
client: Arc::new(RwLock::new(dipc)),
@@ -130,25 +125,14 @@ impl DiscordGuard {
let mut client: tokio::sync::RwLockWriteGuard<'_, DiscordIpcClient> =
self.client.write().await;
let res = client.set_activity(activity.clone());
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not update Discord activity {e}",
))
};
if reconnect_if_fail {
if let Err(_e) = res {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
.set_activity(activity)
.map_err(could_not_set_err)?); // try again, but don't reconnect if it fails again
client.reconnect()?;
return Ok(client.set_activity(activity)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_set_err)?;
res?;
}
Ok(())
@@ -169,25 +153,13 @@ impl DiscordGuard {
let mut client = self.client.write().await;
let res = client.clear_activity();
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not clear Discord activity {e}",
))
};
if reconnect_if_fail {
if res.is_err() {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
.clear_activity()
.map_err(could_not_clear_err)?); // try again, but don't reconnect if it fails again
client.reconnect()?;
return Ok(client.clear_activity()?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_clear_err)?;
res?;
}
Ok(())
}

View File

@@ -272,7 +272,7 @@ impl FriendsSocket {
pub async fn disconnect(&self) -> crate::Result<()> {
let mut write_lock = self.write.write().await;
if let Some(ref mut write_half) = *write_lock {
write_half.close().await?;
SinkExt::close(write_half).await?;
*write_lock = None;
}
Ok(())

View File

@@ -89,6 +89,7 @@ impl State {
let res = tokio::try_join!(
state.discord_rpc.clear_to_default(true),
Profile::refresh_all(),
Settings::migrate(&state.pool),
ModrinthCredentials::refresh_all(),
);

View File

@@ -516,7 +516,7 @@ impl Process {
chrono::DateTime::<Utc>::from_timestamp(secs, nsecs)
.unwrap_or_default()
} else {
chrono::DateTime::<Utc>::from_timestamp(timestamp_val, 0)
chrono::DateTime::<Utc>::from_timestamp_secs(timestamp_val)
.unwrap_or_default()
};
@@ -743,7 +743,14 @@ impl Process {
// We do not wait on the post exist command to finish running! We let it spawn + run on its own.
// This behaviour may be changed in the future
if let Some(hook) = post_exit_command {
let mut cmd = hook.split(' ');
let mut cmd = shlex::split(&hook)
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Invalid post-exit command: {hook}",
))
})?
.into_iter();
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command.args(cmd).current_dir(

View File

@@ -103,10 +103,11 @@ impl ProfileInstallStage {
pub enum LauncherFeatureVersion {
None,
MigratedServerLastPlayTime,
MigratedLaunchHooks,
}
impl LauncherFeatureVersion {
pub const MOST_RECENT: Self = Self::MigratedServerLastPlayTime;
pub const MOST_RECENT: Self = Self::MigratedLaunchHooks;
pub fn as_str(&self) -> &'static str {
match *self {
@@ -114,6 +115,7 @@ impl LauncherFeatureVersion {
Self::MigratedServerLastPlayTime => {
"migrated_server_last_play_time"
}
Self::MigratedLaunchHooks => "migrated_launch_hooks",
}
}
@@ -123,6 +125,7 @@ impl LauncherFeatureVersion {
"migrated_server_last_play_time" => {
Self::MigratedServerLastPlayTime
}
"migrated_launch_hooks" => Self::MigratedLaunchHooks,
_ => Self::None,
}
}
@@ -225,10 +228,14 @@ impl ProjectType {
}
}
pub fn get_from_parent_folder(path: &Path) -> Option<Self> {
// Get parent folder
let path = path.parent()?.file_name()?;
match path.to_str()? {
pub fn get_from_parent_folder(path: impl AsRef<Path>) -> Option<Self> {
match path
.as_ref()
.parent()?
.file_name()?
.to_str()
.unwrap_or_default()
{
"mods" => Some(ProjectType::Mod),
"datapacks" => Some(ProjectType::DataPack),
"resourcepacks" => Some(ProjectType::ResourcePack),
@@ -781,6 +788,30 @@ impl Profile {
self.launcher_feature_version =
LauncherFeatureVersion::MigratedServerLastPlayTime;
}
LauncherFeatureVersion::MigratedServerLastPlayTime => {
let quoter = shlex::Quoter::new().allow_nul(true);
// Previously split by spaces
if let Some(pre_launch) = self.hooks.pre_launch.as_ref() {
self.hooks.pre_launch =
Some(quoter.join(pre_launch.split(' ')).unwrap())
}
// Previously treated as complete path to command
if let Some(wrapper) = self.hooks.wrapper.as_ref() {
self.hooks.wrapper =
Some(quoter.quote(wrapper).unwrap().to_string())
}
// Previously split by spaces
if let Some(post_exit) = self.hooks.post_exit.as_ref() {
self.hooks.post_exit =
Some(quoter.join(post_exit.split(' ')).unwrap())
}
self.launcher_feature_version =
LauncherFeatureVersion::MigratedLaunchHooks;
}
LauncherFeatureVersion::MOST_RECENT => unreachable!(
"LauncherFeatureVersion::MOST_RECENT was not updated"
),

View File

@@ -1,6 +1,7 @@
//! Theseus settings file
use serde::{Deserialize, Serialize};
use sqlx::{Pool, Sqlite};
use std::collections::HashMap;
// Types
@@ -42,6 +43,8 @@ pub struct Settings {
pub skipped_update: Option<String>,
pub pending_update_toast_for_version: Option<String>,
pub auto_download_updates: Option<bool>,
pub version: usize,
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, Hash, PartialEq)]
@@ -54,6 +57,8 @@ pub enum FeatureFlag {
}
impl Settings {
const CURRENT_VERSION: usize = 2;
pub async fn get(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Self> {
@@ -68,7 +73,8 @@ impl Settings {
mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,
hook_pre_launch, hook_wrapper, hook_post_exit,
custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,
skipped_update, pending_update_toast_for_version, auto_download_updates
skipped_update, pending_update_toast_for_version, auto_download_updates,
version
FROM settings
"
)
@@ -126,6 +132,7 @@ impl Settings {
pending_update_toast_for_version: res
.pending_update_toast_for_version,
auto_download_updates: res.auto_download_updates.map(|x| x == 1),
version: res.version as usize,
})
}
@@ -140,6 +147,7 @@ impl Settings {
let extra_launch_args = serde_json::to_string(&self.extra_launch_args)?;
let custom_env_vars = serde_json::to_string(&self.custom_env_vars)?;
let feature_flags = serde_json::to_string(&self.feature_flags)?;
let version = self.version as i64;
sqlx::query!(
"
@@ -183,7 +191,9 @@ impl Settings {
skipped_update = $29,
pending_update_toast_for_version = $30,
auto_download_updates = $31
auto_download_updates = $31,
version = $32
",
max_concurrent_writes,
max_concurrent_downloads,
@@ -216,12 +226,75 @@ impl Settings {
self.skipped_update,
self.pending_update_toast_for_version,
self.auto_download_updates,
version,
)
.execute(exec)
.await?;
Ok(())
}
pub async fn migrate(exec: &Pool<Sqlite>) -> crate::Result<()> {
let mut settings = Self::get(exec).await?;
if settings.version < Settings::CURRENT_VERSION {
tracing::info!(
"Migrating settings version {} to {:?}",
settings.version,
Settings::CURRENT_VERSION
);
}
while settings.version < Settings::CURRENT_VERSION {
if let Err(err) = settings.perform_migration() {
tracing::error!(
"Failed to migrate settings from version {}: {}",
settings.version,
err
);
return Err(err);
}
}
settings.update(exec).await?;
Ok(())
}
pub fn perform_migration(&mut self) -> crate::Result<()> {
match self.version {
1 => {
let quoter = shlex::Quoter::new().allow_nul(true);
// Previously split by spaces
if let Some(pre_launch) = self.hooks.pre_launch.as_ref() {
self.hooks.pre_launch =
Some(quoter.join(pre_launch.split(' ')).unwrap())
}
// Previously treated as complete path to command
if let Some(wrapper) = self.hooks.wrapper.as_ref() {
self.hooks.wrapper =
Some(quoter.quote(wrapper).unwrap().to_string())
}
// Previously split by spaces
if let Some(post_exit) = self.hooks.post_exit.as_ref() {
self.hooks.post_exit =
Some(quoter.join(post_exit.split(' ')).unwrap())
}
self.version = 2;
}
version => {
return Err(crate::ErrorKind::OtherError(format!(
"Invalid settings version: {version}"
))
.into());
}
}
Ok(())
}
}
/// Theseus theme