Update Rust & Java dependencies (#4540)

* Update Java dependencies

* Baselint lint fixes

* Update Rust version

* Update actix-files 0.6.6 -> 0.6.8

* Update actix-http 3.11.0 -> 3.11.2

* Update actix-rt 2.10.0 -> 2.11.0

* Update async_zip 0.0.17 -> 0.0.18

* Update async-compression 0.4.27 -> 0.4.32

* Update async-trait 0.1.88 -> 0.1.89

* Update async-tungstenite 0.30.0 -> 0.31.0

* Update const_format 0.2.34 -> 0.2.35

* Update bitflags 2.9.1 -> 2.9.4

* Update bytemuck 1.23.1 -> 1.24.0

* Update typed-path 0.11.0 -> 0.12.0

* Update chrono 0.4.41 -> 0.4.42

* Update cidre 0.11.2 -> 0.11.3

* Update clap 4.5.43 -> 4.5.48

* Update data-url 0.3.1 -> 0.3.2

* Update discord-rich-presence 0.2.5 -> 1.0.0

* Update enumset 1.1.7 -> 1.1.10

* Update flate2 1.1.2 -> 1.1.4

* Update hyper 1.6.0 -> 1.7.0

* Update hyper-util 0.1.16 -> 0.1.17

* Update iana-time-zone 0.1.63 -> 0.1.64

* Update image 0.25.6 -> 0.25.8

* Update indexmap 2.10.0 -> 2.11.4

* Update json-patch 4.0.0 -> 4.1.0

* Update meilisearch-sdk 0.29.1 -> 0.30.0

* Update clickhouse 0.13.3 -> 0.14.0

* Fix some prettier things

* Update lettre 0.11.18 -> 0.11.19

* Update phf 0.12.1 -> 0.13.1

* Update png 0.17.16 -> 0.18.0

* Update quick-xml 0.38.1 -> 0.38.3

* Update redis 0.32.4 -> 0.32.7

* Update regex 1.11.1 -> 1.11.3

* Update reqwest 0.12.22 -> 0.12.23

* Update rust_decimal 1.37.2 -> 1.38.0

* Update rust-s3 0.35.1 -> 0.37.0

* Update serde 1.0.219 -> 1.0.228

* Update serde_bytes 0.11.17 -> 0.11.19

* Update serde_json 1.0.142 -> 1.0.145

* Update serde_with 3.14.0 -> 3.15.0

* Update sentry 0.42.0 -> 0.45.0 and sentry-actix 0.42.0 -> 0.45.0

* Update spdx 0.10.9 -> 0.12.0

* Update sysinfo 0.36.1 -> 0.37.2

* Update tauri 2.7.0 -> 2.8.5

* Update tauri-build 2.3.1 -> 2.4.1

* Update tauri-plugin-deep-link 2.4.1 -> 2.4.3

* Update tauri-plugin-dialog 2.3.2 -> 2.4.0

* Update tauri-plugin-http 2.5.1 -> 2.5.2

* Update tauri-plugin-opener 2.4.0 -> 2.5.0

* Update tauri-plugin-os 2.3.0 -> 2.3.1

* Update tauri-plugin-single-instance 2.3.2 -> 2.3.4

* Update tempfile 3.20.0 -> 3.23.0

* Update thiserror 2.0.12 -> 2.0.17

* Update tracing-subscriber 0.3.19 -> 0.3.20

* Update url 2.5.4 -> 2.5.7

* Update uuid 1.17.0 -> 1.18.1

* Update webp 0.3.0 -> 0.3.1

* Update whoami 1.6.0 -> 1.6.1

* Note that windows and windows-core can't be updated yet

* Update zbus 5.9.0 -> 5.11.0

* Update zip 4.3.0 -> 6.0.0

* Fix build

* Enforce rustls crypto provider

* Refresh Cargo.lock

* Update transitive dependencies

* Bump Gradle usage to Java 17

* Use ubuntu-latest consistently across workflows

* Fix lint

* Fix lint in Rust

* Update native-dialog 0.9.0 -> 0.9.2

* Update regex 1.11.3 -> 1.12.2

* Update reqwest 0.12.23 -> 0.12.24

* Update rust_decimal 1.38.0 -> 1.39.0

* Remaining lock-only updates

* chore: move TLS impl of some other dependencies to aws-lc-rs

The AWS bloatware "virus" expands by sheer force of widespread adoption
by the ecosystem... 🫣

* chore(fmt): run Tombi

---------

Co-authored-by: Alejandro González <me@alegon.dev>
This commit is contained in:
Josiah Glosson
2025-10-15 14:45:47 -06:00
committed by GitHub
parent 75e3994c6e
commit b23d3e674f
35 changed files with 630 additions and 583 deletions

View File

@@ -1,6 +1,8 @@
**/*.rs
.sqlx
java/build
# Migrations existing before Prettier formatted them shall always be ignored,
# as any changes to them will break existing deployments
migrations/20240711194701_init.sql

View File

@@ -1,18 +1,7 @@
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction
import com.github.jengelman.gradle.plugins.shadow.transformers.CacheableTransformer
import com.github.jengelman.gradle.plugins.shadow.transformers.ResourceTransformer
import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext
import org.apache.tools.zip.ZipEntry
import org.apache.tools.zip.ZipOutputStream
import java.io.IOException
import java.util.jar.JarFile
import java.util.jar.Attributes as JarAttributes
import java.util.jar.Manifest as JarManifest
plugins {
java
id("com.diffplug.spotless") version "7.0.4"
id("com.gradleup.shadow") version "9.0.0-rc2"
id("com.diffplug.spotless") version "8.0.0"
id("com.gradleup.shadow") version "9.2.2"
}
repositories {
@@ -20,9 +9,9 @@ repositories {
}
dependencies {
implementation("org.ow2.asm:asm:9.8")
implementation("org.ow2.asm:asm-tree:9.8")
implementation("com.google.code.gson:gson:2.13.1")
implementation("org.ow2.asm:asm:9.9")
implementation("org.ow2.asm:asm-tree:9.9")
implementation("com.google.code.gson:gson:2.13.2")
testImplementation(libs.junit.jupiter)
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
@@ -30,7 +19,7 @@ dependencies {
java {
toolchain {
languageVersion = JavaLanguageVersion.of(11)
languageVersion = JavaLanguageVersion.of(17)
}
}
@@ -56,52 +45,9 @@ tasks.shadowJar {
attributes["Premain-Class"] = "com.modrinth.theseus.agent.TheseusAgent"
}
enableRelocation = true
addMultiReleaseAttribute = false
enableAutoRelocation = true
relocationPrefix = "com.modrinth.theseus.shadow"
// Adapted from ManifestResourceTransformer to do one thing: remove Multi-Release.
// Multi-Release gets added by shadow because gson has Multi-Release set to true, however
// shadow strips the actual versions directory, as gson only has a module-info.class in there.
// However, older versions of SecureJarHandler crash if Multi-Release is set to true but the
// versions directory is missing.
transform(@CacheableTransformer object : ResourceTransformer {
private var manifestDiscovered = false
private var manifest: JarManifest? = null
override fun canTransformResource(element: FileTreeElement): Boolean {
return JarFile.MANIFEST_NAME.equals(element.path, ignoreCase = true)
}
override fun transform(context: TransformerContext) {
if (!manifestDiscovered) {
try {
manifest = JarManifest(context.inputStream)
manifestDiscovered = true
} catch (e: IOException) {
logger.warn("Failed to read MANIFEST.MF", e)
}
}
}
override fun hasTransformedResource(): Boolean = true
override fun modifyOutputStream(
os: ZipOutputStream,
preserveFileTimestamps: Boolean
) {
// If we didn't find a manifest, then let's create one.
if (manifest == null) {
manifest = JarManifest()
}
manifest!!.mainAttributes.remove(JarAttributes.Name.MULTI_RELEASE)
os.putNextEntry(ZipEntry(JarFile.MANIFEST_NAME).apply {
time = ShadowCopyAction.CONSTANT_TIME_FOR_ZIP_ENTRIES
})
manifest!!.write(os)
}
})
}
tasks.named<Test>("test") {

View File

@@ -1,5 +1,5 @@
[versions]
junit-jupiter = "5.12.1"
junit-jupiter = "5.14.0"
[libraries]
junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit-jupiter" }

View File

@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-9.1.0-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME

View File

@@ -1,6 +1,6 @@
plugins {
// Apply the foojay-resolver plugin to allow automatic download of JDKs
id("org.gradle.toolchains.foojay-resolver-convention") version "0.10.0"
id("org.gradle.toolchains.foojay-resolver-convention") version "1.0.0"
}
rootProject.name = "theseus"

View File

@@ -1,6 +1,6 @@
//! Miscellaneous PNG utilities for Minecraft skins.
use std::io::Read;
use std::io::{BufRead, Cursor, Seek};
use std::sync::Arc;
use base64::Engine;
@@ -9,7 +9,8 @@ use data_url::DataUrl;
use futures::{Stream, TryStreamExt, future::Either, stream};
use itertools::Itertools;
use rgb::Rgba;
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::SyncIoBridge};
use tokio::io::AsyncReadExt;
use tokio_util::compat::FuturesAsyncReadCompatExt;
use url::Url;
use crate::{
@@ -95,7 +96,8 @@ pub fn dimensions(png_data: &[u8]) -> crate::Result<(u32, u32)> {
pub async fn normalize_skin_texture(
texture: &UrlOrBlob,
) -> crate::Result<Bytes> {
let texture_stream = SyncIoBridge::new(Box::pin(
let mut texture_data = Vec::with_capacity(8192);
Box::pin(
match texture {
UrlOrBlob::Url(url) => Either::Left(
url_to_data_stream(url)
@@ -112,84 +114,84 @@ pub async fn normalize_skin_texture(
),
}
.compat(),
));
)
.read_to_end(&mut texture_data)
.await?;
tokio::task::spawn_blocking(|| {
let mut png_reader = {
let mut decoder = png::Decoder::new(texture_stream);
decoder.set_transformations(
png::Transformations::normalize_to_color8(),
);
decoder.read_info()
}?;
let mut png_reader = {
let mut decoder = png::Decoder::new(Cursor::new(texture_data));
decoder
.set_transformations(png::Transformations::normalize_to_color8());
decoder.read_info()
}?;
// The code below assumes that the skin texture has valid dimensions.
// This also serves as a way to bail out early for obviously invalid or
// adversarial textures
if png_reader.info().width != 64
|| ![64, 32].contains(&png_reader.info().height)
{
Err(ErrorKind::InvalidSkinTexture)?;
}
// The code below assumes that the skin texture has valid dimensions.
// This also serves as a way to bail out early for obviously invalid or
// adversarial textures
if png_reader.info().width != 64
|| ![64, 32].contains(&png_reader.info().height)
{
Err(ErrorKind::InvalidSkinTexture)?;
}
let is_legacy_skin = png_reader.info().height == 32;
let mut texture_buf =
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
if is_legacy_skin {
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
}
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
let is_legacy_skin = png_reader.info().height == 32;
let mut texture_buf =
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
if is_legacy_skin {
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
}
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
let mut encoded_png = vec![];
let mut encoded_png = vec![];
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
png_encoder.set_color(png::ColorType::Rgba);
png_encoder.set_depth(png::BitDepth::Eight);
png_encoder.set_filter(png::FilterType::NoFilter);
png_encoder.set_compression(png::Compression::Fast);
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
png_encoder.set_color(png::ColorType::Rgba);
png_encoder.set_depth(png::BitDepth::Eight);
png_encoder.set_filter(png::Filter::NoFilter);
png_encoder.set_compression(png::Compression::Fast);
// Keeping color space information properly set, to handle the occasional
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
// that keeps most people wondering, is what sets a carefully crafted image
// manipulation routine apart :)
if let Some(source_chromaticities) =
png_reader.info().source_chromaticities.as_ref().copied()
{
png_encoder.set_source_chromaticities(source_chromaticities);
}
if let Some(source_gamma) =
png_reader.info().source_gamma.as_ref().copied()
{
png_encoder.set_source_gamma(source_gamma);
}
if let Some(source_srgb) = png_reader.info().srgb.as_ref().copied() {
png_encoder.set_source_srgb(source_srgb);
}
// Keeping color space information properly set, to handle the occasional
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
// that keeps most people wondering, is what sets a carefully crafted image
// manipulation routine apart :)
if let Some(source_chromaticities) =
png_reader.info().source_chromaticities.as_ref().copied()
{
png_encoder.set_source_chromaticities(source_chromaticities);
}
if let Some(source_gamma) = png_reader.info().source_gamma.as_ref().copied()
{
png_encoder.set_source_gamma(source_gamma);
}
if let Some(source_srgb) = png_reader.info().srgb.as_ref().copied() {
png_encoder.set_source_srgb(source_srgb);
}
let png_buf = bytemuck::try_cast_slice(&texture_buf)
.map_err(|_| ErrorKind::InvalidPng)?;
let mut png_writer = png_encoder.write_header()?;
png_writer.write_image_data(png_buf)?;
png_writer.finish()?;
let png_buf = bytemuck::try_cast_slice(&texture_buf)
.map_err(|_| ErrorKind::InvalidPng)?;
let mut png_writer = png_encoder.write_header()?;
png_writer.write_image_data(png_buf)?;
png_writer.finish()?;
Ok(encoded_png.into())
})
.await?
Ok(encoded_png.into())
}
/// Reads a skin texture and returns a 64x64 buffer in RGBA format.
fn get_skin_texture_buffer<R: Read>(
fn get_skin_texture_buffer<R: BufRead + Seek>(
png_reader: &mut png::Reader<R>,
is_legacy_skin: bool,
) -> crate::Result<Vec<Rgba<u8>>> {
let output_buffer_size = png_reader
.output_buffer_size()
.expect("Reasonable skin texture size verified already");
let mut png_buf = if is_legacy_skin {
// Legacy skins have half the height, so duplicate the rows to
// turn them into a 64x64 texture
vec![0; png_reader.output_buffer_size() * 2]
vec![0; output_buffer_size * 2]
} else {
// Modern skins are left as-is
vec![0; png_reader.output_buffer_size()]
vec![0; output_buffer_size]
};
png_reader.next_frame(&mut png_buf)?;
@@ -373,9 +375,10 @@ fn set_alpha(
#[tokio::test]
async fn normalize_skin_texture_works() {
let decode_to_pixels = |png_data: &[u8]| {
let decoder = png::Decoder::new(png_data);
let decoder = png::Decoder::new(Cursor::new(png_data));
let mut reader = decoder.read_info().expect("Failed to read PNG info");
let mut buffer = vec![0; reader.output_buffer_size()];
let mut buffer =
vec![0; reader.output_buffer_size().expect("Skin size too large")];
reader
.next_frame(&mut buffer)
.expect("Failed to decode PNG");

View File

@@ -176,6 +176,9 @@ pub enum ErrorKind {
#[error("Deserialization error: {0}")]
DeserializationError(#[from] serde::de::value::Error),
#[error("Discord IPC error: {0}")]
DiscordRichPresenceError(#[from] discord_rich_presence::error::Error),
}
#[derive(Debug)]

View File

@@ -18,12 +18,7 @@ impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub fn init() -> crate::Result<DiscordGuard> {
let dipc =
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not create Discord client {e}",
))
})?;
let dipc = DiscordIpcClient::new("1123683254248148992");
Ok(DiscordGuard {
client: Arc::new(RwLock::new(dipc)),
@@ -87,25 +82,14 @@ impl DiscordGuard {
let mut client: tokio::sync::RwLockWriteGuard<'_, DiscordIpcClient> =
self.client.write().await;
let res = client.set_activity(activity.clone());
let could_not_set_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not update Discord activity {e}",
))
};
if reconnect_if_fail {
if let Err(_e) = res {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
.set_activity(activity)
.map_err(could_not_set_err)?); // try again, but don't reconnect if it fails again
client.reconnect()?;
return Ok(client.set_activity(activity)?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_set_err)?;
res?;
}
Ok(())
@@ -126,25 +110,13 @@ impl DiscordGuard {
let mut client = self.client.write().await;
let res = client.clear_activity();
let could_not_clear_err = |e: Box<dyn serde::ser::StdError>| {
crate::ErrorKind::OtherError(format!(
"Could not clear Discord activity {e}",
))
};
if reconnect_if_fail {
if res.is_err() {
client.reconnect().map_err(|e| {
crate::ErrorKind::OtherError(format!(
"Could not reconnect to Discord IPC {e}",
))
})?;
return Ok(client
.clear_activity()
.map_err(could_not_clear_err)?); // try again, but don't reconnect if it fails again
client.reconnect()?;
return Ok(client.clear_activity()?); // try again, but don't reconnect if it fails again
}
} else {
res.map_err(could_not_clear_err)?;
res?;
}
Ok(())
}

View File

@@ -272,7 +272,7 @@ impl FriendsSocket {
pub async fn disconnect(&self) -> crate::Result<()> {
let mut write_lock = self.write.write().await;
if let Some(ref mut write_half) = *write_lock {
write_half.close().await?;
SinkExt::close(write_half).await?;
*write_lock = None;
}
Ok(())

View File

@@ -516,7 +516,7 @@ impl Process {
chrono::DateTime::<Utc>::from_timestamp(secs, nsecs)
.unwrap_or_default()
} else {
chrono::DateTime::<Utc>::from_timestamp(timestamp_val, 0)
chrono::DateTime::<Utc>::from_timestamp_secs(timestamp_val)
.unwrap_or_default()
};