Merge commit '7fa442fb28a2b9156690ff147206275163e7aec8' into beta
@@ -41,7 +41,7 @@
|
||||
{
|
||||
"name": "display_claims!: serde_json::Value",
|
||||
"ordinal": 7,
|
||||
"type_info": "Null"
|
||||
"type_info": "Text"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, hide_nametag_skins_page, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry, personalized_ads,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar\n FROM settings\n ",
|
||||
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, hide_nametag_skins_page, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry, personalized_ads,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,\n skipped_update, pending_update_toast_for_version, auto_download_updates\n FROM settings\n ",
|
||||
"describe": {
|
||||
"columns": [
|
||||
{
|
||||
@@ -142,6 +142,21 @@
|
||||
"name": "toggle_sidebar",
|
||||
"ordinal": 27,
|
||||
"type_info": "Integer"
|
||||
},
|
||||
{
|
||||
"name": "skipped_update",
|
||||
"ordinal": 28,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "pending_update_toast_for_version",
|
||||
"ordinal": 29,
|
||||
"type_info": "Text"
|
||||
},
|
||||
{
|
||||
"name": "auto_download_updates",
|
||||
"ordinal": 30,
|
||||
"type_info": "Integer"
|
||||
}
|
||||
],
|
||||
"parameters": {
|
||||
@@ -175,8 +190,11 @@
|
||||
true,
|
||||
false,
|
||||
null,
|
||||
false
|
||||
false,
|
||||
true,
|
||||
true,
|
||||
true
|
||||
]
|
||||
},
|
||||
"hash": "5193f519f021b2e7013cdb67a6e1a31ae4bd7532d02f8b00b43d5645351941ca"
|
||||
"hash": "7dc83d7ffa3d583fc5ffaf13811a8dab4d0b9ded6200f827b9de7ac32e5318d5"
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"db_name": "SQLite",
|
||||
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n personalized_ads = $11,\n\n onboarded = $12,\n\n extra_launch_args = jsonb($13),\n custom_env_vars = jsonb($14),\n mc_memory_max = $15,\n mc_force_fullscreen = $16,\n mc_game_resolution_x = $17,\n mc_game_resolution_y = $18,\n hide_on_process_start = $19,\n\n hook_pre_launch = $20,\n hook_wrapper = $21,\n hook_post_exit = $22,\n\n custom_dir = $23,\n prev_custom_dir = $24,\n migrated = $25,\n\n toggle_sidebar = $26,\n feature_flags = $27,\n hide_nametag_skins_page = $28\n ",
|
||||
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n personalized_ads = $11,\n\n onboarded = $12,\n\n extra_launch_args = jsonb($13),\n custom_env_vars = jsonb($14),\n mc_memory_max = $15,\n mc_force_fullscreen = $16,\n mc_game_resolution_x = $17,\n mc_game_resolution_y = $18,\n hide_on_process_start = $19,\n\n hook_pre_launch = $20,\n hook_wrapper = $21,\n hook_post_exit = $22,\n\n custom_dir = $23,\n prev_custom_dir = $24,\n migrated = $25,\n\n toggle_sidebar = $26,\n feature_flags = $27,\n hide_nametag_skins_page = $28,\n\n skipped_update = $29,\n pending_update_toast_for_version = $30,\n auto_download_updates = $31\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Right": 28
|
||||
"Right": 31
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "3613473fb4d836ee0fb3c292e6bf5e50912064c29ebf1a1e5ead79c44c37e64c"
|
||||
"hash": "eb95fac3043d0ffd10caef69cc469474cc5c0d36cc0698c4cc0852da81fed158"
|
||||
}
|
||||
@@ -1,127 +1,129 @@
|
||||
[package]
|
||||
name = "theseus"
|
||||
version = "1.0.0-local" # The actual version is set by the theseus-build workflow on tagging
|
||||
authors = ["Jai A <jaiagr+gpg@pm.me>"]
|
||||
# The actual version is set by the theseus-build workflow on tagging
|
||||
version = "1.0.0-local"
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
ariadne = { workspace = true }
|
||||
async-compression = { workspace = true, features = ["gzip", "tokio"] }
|
||||
async-recursion = { workspace = true }
|
||||
async-tungstenite = { workspace = true, features = [
|
||||
"tokio-runtime",
|
||||
"tokio-rustls-webpki-roots",
|
||||
] }
|
||||
async-walkdir = { workspace = true }
|
||||
async_zip = { workspace = true, features = [
|
||||
"bzip2",
|
||||
"chrono",
|
||||
"deflate",
|
||||
"deflate64",
|
||||
"tokio-fs",
|
||||
"zstd",
|
||||
] }
|
||||
base64 = { workspace = true }
|
||||
bytemuck = { workspace = true, features = ["extern_crate_alloc"] }
|
||||
bytes = { workspace = true, features = ["serde"] }
|
||||
chardetng = { workspace = true }
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
daedalus = { workspace = true }
|
||||
dashmap = { workspace = true, features = ["serde"] }
|
||||
data-url = { workspace = true }
|
||||
derive_more = { workspace = true, features = ["display"] }
|
||||
dirs = { workspace = true }
|
||||
discord-rich-presence = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
either = { workspace = true }
|
||||
encoding_rs = { workspace = true }
|
||||
enumset = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
fs4 = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true, features = ["alloc", "async-await"] }
|
||||
heck = { workspace = true }
|
||||
hickory-resolver = { workspace = true }
|
||||
indicatif = { workspace = true, optional = true }
|
||||
itertools = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
notify-debouncer-mini = { workspace = true }
|
||||
p256 = { workspace = true, features = ["ecdsa"] }
|
||||
paste = { workspace = true }
|
||||
path-util = { workspace = true }
|
||||
phf = { workspace = true }
|
||||
png = { workspace = true }
|
||||
quartz_nbt = { workspace = true, features = ["serde"] }
|
||||
quick-xml = { workspace = true, features = ["async-tokio"] }
|
||||
rand = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
reqwest = { workspace = true, features = [
|
||||
"brotli",
|
||||
"charset",
|
||||
"deflate",
|
||||
"gzip",
|
||||
"http2",
|
||||
"json",
|
||||
"macos-system-configuration",
|
||||
"multipart",
|
||||
"rustls-tls-webpki-roots",
|
||||
"stream",
|
||||
] }
|
||||
rgb = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json.workspace = true
|
||||
serde_ini.workspace = true
|
||||
serde_with.workspace = true
|
||||
sha1_smol.workspace = true
|
||||
sha2.workspace = true
|
||||
serde_ini = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
serde_with = { workspace = true }
|
||||
sha1_smol = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
sqlx = { workspace = true, features = [
|
||||
"json",
|
||||
"macros",
|
||||
"migrate",
|
||||
"runtime-tokio",
|
||||
"sqlite",
|
||||
"uuid",
|
||||
] }
|
||||
sysinfo = { workspace = true, features = ["disk", "system"] }
|
||||
tauri = { workspace = true, features = ["unstable"], optional = true }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true, features = [
|
||||
"fs",
|
||||
"io-util",
|
||||
"macros",
|
||||
"net",
|
||||
"process",
|
||||
"sync",
|
||||
"time",
|
||||
] }
|
||||
tokio-util = { workspace = true, features = [
|
||||
"compat",
|
||||
"io",
|
||||
"io-util",
|
||||
"time",
|
||||
] }
|
||||
tracing = { workspace = true }
|
||||
tracing-error = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, features = ["chrono", "env-filter"] }
|
||||
url = { workspace = true, features = ["serde"] }
|
||||
uuid = { workspace = true, features = ["serde", "v4"] }
|
||||
zip.workspace = true
|
||||
async_zip = { workspace = true, features = [
|
||||
"chrono",
|
||||
"tokio-fs",
|
||||
"deflate",
|
||||
"bzip2",
|
||||
"zstd",
|
||||
"deflate64",
|
||||
] }
|
||||
flate2.workspace = true
|
||||
tempfile.workspace = true
|
||||
dashmap = { workspace = true, features = ["serde"] }
|
||||
quick-xml = { workspace = true, features = ["async-tokio"] }
|
||||
enumset.workspace = true
|
||||
chardetng.workspace = true
|
||||
encoding_rs.workspace = true
|
||||
hashlink.workspace = true
|
||||
png.workspace = true
|
||||
bytemuck.workspace = true
|
||||
rgb.workspace = true
|
||||
phf.workspace = true
|
||||
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
daedalus.workspace = true
|
||||
dirs.workspace = true
|
||||
|
||||
regex.workspace = true
|
||||
sysinfo = { workspace = true, features = ["system", "disk"] }
|
||||
thiserror.workspace = true
|
||||
either.workspace = true
|
||||
data-url.workspace = true
|
||||
|
||||
tracing.workspace = true
|
||||
tracing-subscriber = { workspace = true, features = ["chrono", "env-filter"] }
|
||||
tracing-error.workspace = true
|
||||
|
||||
paste.workspace = true
|
||||
heck.workspace = true
|
||||
|
||||
tauri = { workspace = true, optional = true, features = ["unstable"] }
|
||||
indicatif = { workspace = true, optional = true }
|
||||
|
||||
async-tungstenite = { workspace = true, features = ["tokio-runtime", "tokio-rustls-webpki-roots"] }
|
||||
futures = { workspace = true, features = ["async-await", "alloc"] }
|
||||
reqwest = { workspace = true, features = [
|
||||
"json",
|
||||
"stream",
|
||||
"deflate",
|
||||
"gzip",
|
||||
"brotli",
|
||||
"rustls-tls-webpki-roots",
|
||||
"charset",
|
||||
"http2",
|
||||
"macos-system-configuration",
|
||||
"multipart",
|
||||
] }
|
||||
tokio = { workspace = true, features = [
|
||||
"time",
|
||||
"io-util",
|
||||
"net",
|
||||
"sync",
|
||||
"fs",
|
||||
"macros",
|
||||
"process",
|
||||
] }
|
||||
tokio-util = { workspace = true, features = ["compat", "io", "io-util"] }
|
||||
async-recursion.workspace = true
|
||||
fs4 = { workspace = true, features = ["tokio"] }
|
||||
async-walkdir.workspace = true
|
||||
async-compression = { workspace = true, features = ["tokio", "gzip"] }
|
||||
|
||||
notify.workspace = true
|
||||
notify-debouncer-mini.workspace = true
|
||||
|
||||
dunce.workspace = true
|
||||
|
||||
whoami.workspace = true
|
||||
|
||||
discord-rich-presence.workspace = true
|
||||
|
||||
p256 = { workspace = true, features = ["ecdsa"] }
|
||||
rand.workspace = true
|
||||
base64.workspace = true
|
||||
|
||||
sqlx = { workspace = true, features = [
|
||||
"runtime-tokio",
|
||||
"sqlite",
|
||||
"macros",
|
||||
"migrate",
|
||||
"json",
|
||||
"uuid",
|
||||
] }
|
||||
|
||||
quartz_nbt = { workspace = true, features = ["serde"] }
|
||||
hickory-resolver.workspace = true
|
||||
|
||||
ariadne.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winreg.workspace = true
|
||||
whoami = { workspace = true }
|
||||
zbus = { workspace = true }
|
||||
zip = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
dotenvy.workspace = true
|
||||
dunce.workspace = true
|
||||
dotenvy = { workspace = true }
|
||||
dunce = { workspace = true }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
cidre = { workspace = true, features = ["blocks", "nw"] }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows = { workspace = true, features = ["Networking_Connectivity"] }
|
||||
windows-core = { workspace = true }
|
||||
winreg = { workspace = true }
|
||||
|
||||
[features]
|
||||
tauri = ["dep:tauri"]
|
||||
cli = ["dep:indicatif"]
|
||||
tauri = ["dep:tauri"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -53,7 +53,6 @@ fn build_java_jars() {
|
||||
.arg("build")
|
||||
.arg("--no-daemon")
|
||||
.arg("--console=rich")
|
||||
.arg("--info")
|
||||
.current_dir(dunce::canonicalize("java").unwrap())
|
||||
.status()
|
||||
.expect("Failed to wait on Gradle build");
|
||||
|
||||
@@ -1,3 +1,14 @@
|
||||
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowCopyAction
|
||||
import com.github.jengelman.gradle.plugins.shadow.transformers.CacheableTransformer
|
||||
import com.github.jengelman.gradle.plugins.shadow.transformers.ResourceTransformer
|
||||
import com.github.jengelman.gradle.plugins.shadow.transformers.TransformerContext
|
||||
import org.apache.tools.zip.ZipEntry
|
||||
import org.apache.tools.zip.ZipOutputStream
|
||||
import java.io.IOException
|
||||
import java.util.jar.JarFile
|
||||
import java.util.jar.Attributes as JarAttributes
|
||||
import java.util.jar.Manifest as JarManifest
|
||||
|
||||
plugins {
|
||||
java
|
||||
id("com.diffplug.spotless") version "7.0.4"
|
||||
@@ -11,6 +22,7 @@ repositories {
|
||||
dependencies {
|
||||
implementation("org.ow2.asm:asm:9.8")
|
||||
implementation("org.ow2.asm:asm-tree:9.8")
|
||||
implementation("com.google.code.gson:gson:2.13.1")
|
||||
|
||||
testImplementation(libs.junit.jupiter)
|
||||
testRuntimeOnly("org.junit.platform:junit-platform-launcher")
|
||||
@@ -46,6 +58,50 @@ tasks.shadowJar {
|
||||
|
||||
enableRelocation = true
|
||||
relocationPrefix = "com.modrinth.theseus.shadow"
|
||||
|
||||
// Adapted from ManifestResourceTransformer to do one thing: remove Multi-Release.
|
||||
// Multi-Release gets added by shadow because gson has Multi-Release set to true, however
|
||||
// shadow strips the actual versions directory, as gson only has a module-info.class in there.
|
||||
// However, older versions of SecureJarHandler crash if Multi-Release is set to true but the
|
||||
// versions directory is missing.
|
||||
transform(@CacheableTransformer object : ResourceTransformer {
|
||||
private var manifestDiscovered = false
|
||||
private var manifest: JarManifest? = null
|
||||
|
||||
override fun canTransformResource(element: FileTreeElement): Boolean {
|
||||
return JarFile.MANIFEST_NAME.equals(element.path, ignoreCase = true)
|
||||
}
|
||||
|
||||
override fun transform(context: TransformerContext) {
|
||||
if (!manifestDiscovered) {
|
||||
try {
|
||||
manifest = JarManifest(context.inputStream)
|
||||
manifestDiscovered = true
|
||||
} catch (e: IOException) {
|
||||
logger.warn("Failed to read MANIFEST.MF", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun hasTransformedResource(): Boolean = true
|
||||
|
||||
override fun modifyOutputStream(
|
||||
os: ZipOutputStream,
|
||||
preserveFileTimestamps: Boolean
|
||||
) {
|
||||
// If we didn't find a manifest, then let's create one.
|
||||
if (manifest == null) {
|
||||
manifest = JarManifest()
|
||||
}
|
||||
|
||||
manifest!!.mainAttributes.remove(JarAttributes.Name.MULTI_RELEASE)
|
||||
|
||||
os.putNextEntry(ZipEntry(JarFile.MANIFEST_NAME).apply {
|
||||
time = ShadowCopyAction.CONSTANT_TIME_FOR_ZIP_ENTRIES
|
||||
})
|
||||
manifest!!.write(os)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
tasks.named<Test>("test") {
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
package com.modrinth.theseus;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import com.modrinth.theseus.rpc.RpcHandlers;
|
||||
import com.modrinth.theseus.rpc.TheseusRpc;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.AccessibleObject;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
public final class MinecraftLaunch {
|
||||
public static void main(String[] args) throws IOException, ReflectiveOperationException {
|
||||
@@ -13,45 +15,19 @@ public final class MinecraftLaunch {
|
||||
final String[] gameArgs = Arrays.copyOfRange(args, 1, args.length);
|
||||
|
||||
System.setProperty("modrinth.process.args", String.join("\u001f", gameArgs));
|
||||
parseInput();
|
||||
|
||||
final CompletableFuture<Void> waitForLaunch = new CompletableFuture<>();
|
||||
TheseusRpc.connectAndStart(
|
||||
System.getProperty("modrinth.internal.ipc.host"),
|
||||
Integer.getInteger("modrinth.internal.ipc.port"),
|
||||
new RpcHandlers()
|
||||
.handler("set_system_property", String.class, String.class, System::setProperty)
|
||||
.handler("launch", () -> waitForLaunch.complete(null)));
|
||||
|
||||
waitForLaunch.join();
|
||||
relaunch(mainClass, gameArgs);
|
||||
}
|
||||
|
||||
private static void parseInput() throws IOException {
|
||||
final ByteArrayOutputStream line = new ByteArrayOutputStream();
|
||||
while (true) {
|
||||
final int b = System.in.read();
|
||||
if (b < 0) {
|
||||
throw new IllegalStateException("Stdin terminated while parsing");
|
||||
}
|
||||
if (b != '\n') {
|
||||
line.write(b);
|
||||
continue;
|
||||
}
|
||||
if (handleLine(line.toString("UTF-8"))) {
|
||||
break;
|
||||
}
|
||||
line.reset();
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean handleLine(String line) {
|
||||
final String[] parts = line.split("\t", 2);
|
||||
switch (parts[0]) {
|
||||
case "property": {
|
||||
final String[] keyValue = parts[1].split("\t", 2);
|
||||
System.setProperty(keyValue[0], keyValue[1]);
|
||||
return false;
|
||||
}
|
||||
case "launch":
|
||||
return true;
|
||||
}
|
||||
|
||||
System.err.println("Unknown input line " + line);
|
||||
return false;
|
||||
}
|
||||
|
||||
private static void relaunch(String mainClassName, String[] args) throws ReflectiveOperationException {
|
||||
final int javaVersion = getJavaVersion();
|
||||
final Class<?> mainClass = Class.forName(mainClassName);
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
package com.modrinth.theseus.rpc;
|
||||
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonNull;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class RpcHandlers {
|
||||
private final Map<String, Function<JsonElement[], JsonElement>> handlers = new HashMap<>();
|
||||
private boolean frozen;
|
||||
|
||||
public RpcHandlers handler(String functionName, Runnable handler) {
|
||||
return addHandler(functionName, args -> {
|
||||
handler.run();
|
||||
return JsonNull.INSTANCE;
|
||||
});
|
||||
}
|
||||
|
||||
public <A, B> RpcHandlers handler(
|
||||
String functionName, Class<A> arg1Type, Class<B> arg2Type, BiConsumer<A, B> handler) {
|
||||
return addHandler(functionName, args -> {
|
||||
if (args.length != 2) {
|
||||
throw new IllegalArgumentException(functionName + " expected 2 arguments");
|
||||
}
|
||||
final A arg1 = TheseusRpc.GSON.fromJson(args[0], arg1Type);
|
||||
final B arg2 = TheseusRpc.GSON.fromJson(args[1], arg2Type);
|
||||
handler.accept(arg1, arg2);
|
||||
return JsonNull.INSTANCE;
|
||||
});
|
||||
}
|
||||
|
||||
private RpcHandlers addHandler(String functionName, Function<JsonElement[], JsonElement> handler) {
|
||||
if (frozen) {
|
||||
throw new IllegalStateException("Cannot add handler to frozen RpcHandlers instance");
|
||||
}
|
||||
handlers.put(functionName, handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
Map<String, Function<JsonElement[], JsonElement>> build() {
|
||||
frozen = true;
|
||||
return handlers;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
package com.modrinth.theseus.rpc;
|
||||
|
||||
public class RpcMethodException extends RuntimeException {
|
||||
private static final long serialVersionUID = 1922360184188807964L;
|
||||
|
||||
public RpcMethodException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,183 @@
|
||||
package com.modrinth.theseus.rpc;
|
||||
|
||||
import com.google.gson.*;
|
||||
import com.google.gson.reflect.TypeToken;
|
||||
import java.io.*;
|
||||
import java.net.Socket;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
|
||||
public final class TheseusRpc {
|
||||
static final Gson GSON = new GsonBuilder()
|
||||
.setStrictness(Strictness.STRICT)
|
||||
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
|
||||
.disableHtmlEscaping()
|
||||
.create();
|
||||
private static final TypeToken<RpcMessage> MESSAGE_TYPE = TypeToken.get(RpcMessage.class);
|
||||
|
||||
private static final AtomicReference<TheseusRpc> RPC = new AtomicReference<>();
|
||||
|
||||
private final BlockingQueue<RpcMessage> mainThreadQueue = new LinkedBlockingQueue<>();
|
||||
private final Map<UUID, ResponseWaiter<?>> awaitingResponse = new ConcurrentHashMap<>();
|
||||
private final Map<String, Function<JsonElement[], JsonElement>> handlers;
|
||||
private final Socket socket;
|
||||
|
||||
private TheseusRpc(Socket socket, RpcHandlers handlers) {
|
||||
this.socket = socket;
|
||||
this.handlers = handlers.build();
|
||||
}
|
||||
|
||||
public static void connectAndStart(String host, int port, RpcHandlers handlers) throws IOException {
|
||||
if (RPC.get() != null) {
|
||||
throw new IllegalStateException("Can only connect to RPC once");
|
||||
}
|
||||
|
||||
final Socket socket = new Socket(host, port);
|
||||
final TheseusRpc rpc = new TheseusRpc(socket, handlers);
|
||||
final Thread mainThread = new Thread(rpc::mainThread, "Theseus RPC Main");
|
||||
final Thread readThread = new Thread(rpc::readThread, "Theseus RPC Read");
|
||||
mainThread.setDaemon(true);
|
||||
readThread.setDaemon(true);
|
||||
mainThread.start();
|
||||
readThread.start();
|
||||
RPC.set(rpc);
|
||||
}
|
||||
|
||||
public static TheseusRpc getRpc() {
|
||||
final TheseusRpc rpc = RPC.get();
|
||||
if (rpc == null) {
|
||||
throw new IllegalStateException("Called getRpc before RPC initialized");
|
||||
}
|
||||
return rpc;
|
||||
}
|
||||
|
||||
public <T> CompletableFuture<T> callMethod(TypeToken<T> returnType, String method, Object... args) {
|
||||
final JsonElement[] jsonArgs = new JsonElement[args.length];
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
jsonArgs[i] = GSON.toJsonTree(args[i]);
|
||||
}
|
||||
|
||||
final RpcMessage message = new RpcMessage(method, jsonArgs);
|
||||
final ResponseWaiter<T> responseWaiter = new ResponseWaiter<>(returnType);
|
||||
awaitingResponse.put(message.id, responseWaiter);
|
||||
mainThreadQueue.add(message);
|
||||
return responseWaiter.future;
|
||||
}
|
||||
|
||||
private void mainThread() {
|
||||
try {
|
||||
final Writer writer = new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8);
|
||||
while (true) {
|
||||
final RpcMessage message = mainThreadQueue.take();
|
||||
final RpcMessage toSend;
|
||||
if (message.isForSending) {
|
||||
toSend = message;
|
||||
} else {
|
||||
final Function<JsonElement[], JsonElement> handler = handlers.get(message.method);
|
||||
if (handler == null) {
|
||||
System.err.println("Unknown theseus RPC method " + message.method);
|
||||
continue;
|
||||
}
|
||||
RpcMessage response;
|
||||
try {
|
||||
response = new RpcMessage(message.id, handler.apply(message.args));
|
||||
} catch (Exception e) {
|
||||
response = new RpcMessage(message.id, e.toString());
|
||||
}
|
||||
toSend = response;
|
||||
}
|
||||
GSON.toJson(toSend, writer);
|
||||
writer.write('\n');
|
||||
writer.flush();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
} catch (InterruptedException ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
private void readThread() {
|
||||
try {
|
||||
final BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
|
||||
while (true) {
|
||||
final RpcMessage message = GSON.fromJson(reader.readLine(), MESSAGE_TYPE);
|
||||
if (message.method == null) {
|
||||
final ResponseWaiter<?> waiter = awaitingResponse.get(message.id);
|
||||
if (waiter != null) {
|
||||
handleResponse(waiter, message);
|
||||
}
|
||||
} else {
|
||||
mainThreadQueue.put(message);
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
} catch (InterruptedException ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
private <T> void handleResponse(ResponseWaiter<T> waiter, RpcMessage message) {
|
||||
if (message.error != null) {
|
||||
waiter.future.completeExceptionally(new RpcMethodException(message.error));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
waiter.future.complete(GSON.fromJson(message.response, waiter.type));
|
||||
} catch (JsonSyntaxException e) {
|
||||
waiter.future.completeExceptionally(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static class RpcMessage {
|
||||
final UUID id;
|
||||
final String method; // Optional
|
||||
final JsonElement[] args; // Optional
|
||||
final JsonElement response; // Optional
|
||||
final String error; // Optional
|
||||
final transient boolean isForSending;
|
||||
|
||||
RpcMessage(String method, JsonElement[] args) {
|
||||
id = UUID.randomUUID();
|
||||
this.method = method;
|
||||
this.args = args;
|
||||
response = null;
|
||||
error = null;
|
||||
isForSending = true;
|
||||
}
|
||||
|
||||
RpcMessage(UUID id, JsonElement response) {
|
||||
this.id = id;
|
||||
method = null;
|
||||
args = null;
|
||||
this.response = response;
|
||||
error = null;
|
||||
isForSending = true;
|
||||
}
|
||||
|
||||
RpcMessage(UUID id, String error) {
|
||||
this.id = id;
|
||||
method = null;
|
||||
args = null;
|
||||
response = null;
|
||||
this.error = error;
|
||||
isForSending = true;
|
||||
}
|
||||
}
|
||||
|
||||
private static class ResponseWaiter<T> {
|
||||
final TypeToken<T> type;
|
||||
final CompletableFuture<T> future = new CompletableFuture<>();
|
||||
|
||||
ResponseWaiter(TypeToken<T> type) {
|
||||
this.type = type;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN skipped_update TEXT NULL;
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN pending_update_toast_for_version TEXT NULL;
|
||||
ALTER TABLE settings
|
||||
ADD COLUMN auto_download_updates INT NULL;
|
||||
|
Before Width: | Height: | Size: 7.0 KiB |
BIN
packages/app-lib/src/api/minecraft_skins/assets/test/legacy.png
Normal file
|
After Width: | Height: | Size: 435 B |
|
After Width: | Height: | Size: 1.8 KiB |
BIN
packages/app-lib/src/api/minecraft_skins/assets/test/notch.png
Normal file
|
After Width: | Height: | Size: 409 B |
|
After Width: | Height: | Size: 1.2 KiB |
|
After Width: | Height: | Size: 934 B |
|
After Width: | Height: | Size: 1.7 KiB |
@@ -1,12 +1,14 @@
|
||||
//! Miscellaneous PNG utilities for Minecraft skins.
|
||||
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
|
||||
use base64::Engine;
|
||||
use bytemuck::{AnyBitPattern, NoUninit};
|
||||
use bytes::Bytes;
|
||||
use data_url::DataUrl;
|
||||
use futures::{Stream, TryStreamExt, future::Either, stream};
|
||||
use itertools::Itertools;
|
||||
use rgb::Rgba;
|
||||
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::SyncIoBridge};
|
||||
use url::Url;
|
||||
|
||||
@@ -84,10 +86,10 @@ pub fn dimensions(png_data: &[u8]) -> crate::Result<(u32, u32)> {
|
||||
Ok((width, height))
|
||||
}
|
||||
|
||||
/// Normalizes the texture of a Minecraft skin to the modern 64x64 format, handling
|
||||
/// legacy 64x32 skins as the vanilla game client does. This function prioritizes
|
||||
/// PNG encoding speed over compression density, so the resulting textures are better
|
||||
/// suited for display purposes, not persistent storage or transmission.
|
||||
/// Normalizes the texture of a Minecraft skin to the modern 64x64 format, handling legacy 64x32
|
||||
/// skins, doing "Notch transparency hack" and making inner parts opaque as the vanilla game client
|
||||
/// does. This function prioritizes PNG encoding speed over compression density, so the resulting
|
||||
/// textures are better suited for display purposes, not persistent storage or transmission.
|
||||
///
|
||||
/// The normalized, processed is returned texture as a byte array in PNG format.
|
||||
pub async fn normalize_skin_texture(
|
||||
@@ -131,43 +133,30 @@ pub async fn normalize_skin_texture(
|
||||
}
|
||||
|
||||
let is_legacy_skin = png_reader.info().height == 32;
|
||||
|
||||
let mut texture_buf = if is_legacy_skin {
|
||||
// Legacy skins have half the height, so duplicate the rows to
|
||||
// turn them into a 64x64 texture
|
||||
vec![0; png_reader.output_buffer_size() * 2]
|
||||
} else {
|
||||
// Modern skins are left as-is
|
||||
vec![0; png_reader.output_buffer_size()]
|
||||
};
|
||||
|
||||
let texture_buf_color_type = png_reader.output_color_type().0;
|
||||
png_reader.next_frame(&mut texture_buf)?;
|
||||
|
||||
let mut texture_buf =
|
||||
get_skin_texture_buffer(&mut png_reader, is_legacy_skin)?;
|
||||
if is_legacy_skin {
|
||||
convert_legacy_skin_texture(
|
||||
&mut texture_buf,
|
||||
texture_buf_color_type,
|
||||
png_reader.info(),
|
||||
)?;
|
||||
convert_legacy_skin_texture(&mut texture_buf, png_reader.info());
|
||||
do_notch_transparency_hack(&mut texture_buf, png_reader.info());
|
||||
}
|
||||
make_inner_parts_opaque(&mut texture_buf, png_reader.info());
|
||||
|
||||
let mut encoded_png = vec![];
|
||||
|
||||
let mut png_encoder = png::Encoder::new(&mut encoded_png, 64, 64);
|
||||
png_encoder.set_color(texture_buf_color_type);
|
||||
png_encoder.set_color(png::ColorType::Rgba);
|
||||
png_encoder.set_depth(png::BitDepth::Eight);
|
||||
png_encoder.set_filter(png::FilterType::NoFilter);
|
||||
png_encoder.set_compression(png::Compression::Fast);
|
||||
|
||||
// Keeping color space information properly set, to handle the occasional
|
||||
// strange PNG with non-sRGB chromacities and/or different grayscale spaces
|
||||
// strange PNG with non-sRGB chromaticities and/or different grayscale spaces
|
||||
// that keeps most people wondering, is what sets a carefully crafted image
|
||||
// manipulation routine apart :)
|
||||
if let Some(source_chromacities) =
|
||||
if let Some(source_chromaticities) =
|
||||
png_reader.info().source_chromaticities.as_ref().copied()
|
||||
{
|
||||
png_encoder.set_source_chromaticities(source_chromacities);
|
||||
png_encoder.set_source_chromaticities(source_chromaticities);
|
||||
}
|
||||
if let Some(source_gamma) =
|
||||
png_reader.info().source_gamma.as_ref().copied()
|
||||
@@ -178,8 +167,10 @@ pub async fn normalize_skin_texture(
|
||||
png_encoder.set_source_srgb(source_srgb);
|
||||
}
|
||||
|
||||
let png_buf = bytemuck::try_cast_slice(&texture_buf)
|
||||
.map_err(|_| ErrorKind::InvalidPng)?;
|
||||
let mut png_writer = png_encoder.write_header()?;
|
||||
png_writer.write_image_data(&texture_buf)?;
|
||||
png_writer.write_image_data(png_buf)?;
|
||||
png_writer.finish()?;
|
||||
|
||||
Ok(encoded_png.into())
|
||||
@@ -187,16 +178,71 @@ pub async fn normalize_skin_texture(
|
||||
.await?
|
||||
}
|
||||
|
||||
/// Reads a skin texture and returns a 64x64 buffer in RGBA format.
|
||||
fn get_skin_texture_buffer<R: Read>(
|
||||
png_reader: &mut png::Reader<R>,
|
||||
is_legacy_skin: bool,
|
||||
) -> crate::Result<Vec<Rgba<u8>>> {
|
||||
let mut png_buf = if is_legacy_skin {
|
||||
// Legacy skins have half the height, so duplicate the rows to
|
||||
// turn them into a 64x64 texture
|
||||
vec![0; png_reader.output_buffer_size() * 2]
|
||||
} else {
|
||||
// Modern skins are left as-is
|
||||
vec![0; png_reader.output_buffer_size()]
|
||||
};
|
||||
png_reader.next_frame(&mut png_buf)?;
|
||||
|
||||
let mut texture_buf = match png_reader.output_color_type().0 {
|
||||
png::ColorType::Grayscale => png_buf
|
||||
.iter()
|
||||
.map(|&value| Rgba {
|
||||
r: value,
|
||||
g: value,
|
||||
b: value,
|
||||
a: 255,
|
||||
})
|
||||
.collect_vec(),
|
||||
png::ColorType::GrayscaleAlpha => png_buf
|
||||
.chunks_exact(2)
|
||||
.map(|chunk| Rgba {
|
||||
r: chunk[0],
|
||||
g: chunk[0],
|
||||
b: chunk[0],
|
||||
a: chunk[1],
|
||||
})
|
||||
.collect_vec(),
|
||||
png::ColorType::Rgb => png_buf
|
||||
.chunks_exact(3)
|
||||
.map(|chunk| Rgba {
|
||||
r: chunk[0],
|
||||
g: chunk[1],
|
||||
b: chunk[2],
|
||||
a: 255,
|
||||
})
|
||||
.collect_vec(),
|
||||
png::ColorType::Rgba => bytemuck::try_cast_vec(png_buf)
|
||||
.map_err(|_| ErrorKind::InvalidPng)?,
|
||||
_ => Err(ErrorKind::InvalidPng)?, // Cannot happen by PNG spec after transformations
|
||||
};
|
||||
|
||||
// Make the added bottom half of the expanded legacy skin buffer transparent
|
||||
if is_legacy_skin {
|
||||
set_alpha(&mut texture_buf, png_reader.info(), 0, 32, 64, 64, 0);
|
||||
}
|
||||
|
||||
Ok(texture_buf)
|
||||
}
|
||||
|
||||
/// Converts a legacy skin texture (32x64 pixels) within a 64x64 buffer to the
|
||||
/// native 64x64 format used by modern Minecraft clients.
|
||||
///
|
||||
/// See also 25w16a's `SkinTextureDownloader#processLegacySkin` method.
|
||||
#[inline]
|
||||
fn convert_legacy_skin_texture(
|
||||
texture_buf: &mut [u8],
|
||||
texture_color_type: png::ColorType,
|
||||
texture_buf: &mut [Rgba<u8, u8>],
|
||||
texture_info: &png::Info,
|
||||
) -> crate::Result<()> {
|
||||
) {
|
||||
/// The skin faces the game client copies around, in order, when converting a
|
||||
/// legacy skin to the native 64x64 format.
|
||||
const FACE_COPY_PARAMETERS: &[(
|
||||
@@ -222,33 +268,55 @@ fn convert_legacy_skin_texture(
|
||||
];
|
||||
|
||||
for (x, y, off_x, off_y, width, height) in FACE_COPY_PARAMETERS {
|
||||
macro_rules! do_copy {
|
||||
($pixel_type:ty) => {
|
||||
copy_rect_mirror_horizontally::<$pixel_type>(
|
||||
// This cast should never fail because all pixels have a depth of 8 bits
|
||||
// after the transformations applied during decoding
|
||||
::bytemuck::try_cast_slice_mut(texture_buf).map_err(|_| ErrorKind::InvalidPng)?,
|
||||
&texture_info,
|
||||
*x,
|
||||
*y,
|
||||
*off_x,
|
||||
*off_y,
|
||||
*width,
|
||||
*height,
|
||||
)
|
||||
};
|
||||
}
|
||||
copy_rect_mirror_horizontally(
|
||||
texture_buf,
|
||||
texture_info,
|
||||
*x,
|
||||
*y,
|
||||
*off_x,
|
||||
*off_y,
|
||||
*width,
|
||||
*height,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
match texture_color_type.samples() {
|
||||
1 => do_copy!(rgb::Gray<u8>),
|
||||
2 => do_copy!(rgb::GrayAlpha<u8>),
|
||||
3 => do_copy!(rgb::Rgb<u8>),
|
||||
4 => do_copy!(rgb::Rgba<u8>),
|
||||
_ => Err(ErrorKind::InvalidPng)?, // Cannot happen by PNG spec after transformations
|
||||
};
|
||||
/// Makes outer head layer transparent if every pixel has alpha greater or equal to 128.
|
||||
///
|
||||
/// See also 25w16a's `SkinTextureDownloader#doNotchTransparencyHack` method.
|
||||
fn do_notch_transparency_hack(
|
||||
texture_buf: &mut [Rgba<u8, u8>],
|
||||
texture_info: &png::Info,
|
||||
) {
|
||||
// The skin part the game client makes transparent
|
||||
let (x1, y1, x2, y2) = (32, 0, 64, 32);
|
||||
|
||||
for y in y1..y2 {
|
||||
for x in x1..x2 {
|
||||
if texture_buf[x + y * texture_info.width as usize].a < 128 {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
set_alpha(texture_buf, texture_info, x1, y1, x2, y2, 0);
|
||||
}
|
||||
|
||||
/// Makes inner parts of a skin texture opaque.
|
||||
///
|
||||
/// See also 25w16a's `SkinTextureDownloader#processLegacySkin` method.
|
||||
#[inline]
|
||||
fn make_inner_parts_opaque(
|
||||
texture_buf: &mut [Rgba<u8, u8>],
|
||||
texture_info: &png::Info,
|
||||
) {
|
||||
/// The skin parts the game client makes opaque.
|
||||
const OPAQUE_PART_PARAMETERS: &[(usize, usize, usize, usize)] =
|
||||
&[(0, 0, 32, 16), (0, 16, 64, 32), (16, 48, 48, 64)];
|
||||
|
||||
for (x1, y1, x2, y2) in OPAQUE_PART_PARAMETERS {
|
||||
set_alpha(texture_buf, texture_info, *x1, *y1, *x2, *y2, 255);
|
||||
}
|
||||
}
|
||||
|
||||
/// Copies a `width` pixels wide, `height` pixels tall rectangle of pixels within `texture_buf`
|
||||
@@ -260,8 +328,8 @@ fn convert_legacy_skin_texture(
|
||||
/// boolean, boolean)` method, but with the last two parameters fixed to `true` and `false`,
|
||||
/// respectively.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn copy_rect_mirror_horizontally<PixelType: NoUninit + AnyBitPattern>(
|
||||
texture_buf: &mut [PixelType],
|
||||
fn copy_rect_mirror_horizontally(
|
||||
texture_buf: &mut [Rgba<u8, u8>],
|
||||
texture_info: &png::Info,
|
||||
x: usize,
|
||||
y: usize,
|
||||
@@ -283,18 +351,27 @@ fn copy_rect_mirror_horizontally<PixelType: NoUninit + AnyBitPattern>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets alpha for every pixel of a rectangle within `texture_buf`
|
||||
/// whose top-left corner is at `(x1, y1)` and bottom-right corner is at `(x2 - 1, y2 - 1)`.
|
||||
fn set_alpha(
|
||||
texture_buf: &mut [Rgba<u8, u8>],
|
||||
texture_info: &png::Info,
|
||||
x1: usize,
|
||||
y1: usize,
|
||||
x2: usize,
|
||||
y2: usize,
|
||||
alpha: u8,
|
||||
) {
|
||||
for y in y1..y2 {
|
||||
for x in x1..x2 {
|
||||
texture_buf[x + y * texture_info.width as usize].a = alpha;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[tokio::test]
|
||||
async fn normalize_skin_texture_works() {
|
||||
let legacy_png_data = &include_bytes!("assets/default/MissingNo.png")[..];
|
||||
let expected_normalized_png_data =
|
||||
&include_bytes!("assets/test/MissingNo_normalized.png")[..];
|
||||
|
||||
let normalized_png_data =
|
||||
normalize_skin_texture(&UrlOrBlob::Blob(legacy_png_data.into()))
|
||||
.await
|
||||
.expect("Failed to normalize skin texture");
|
||||
|
||||
let decode_to_pixels = |png_data: &[u8]| {
|
||||
let decoder = png::Decoder::new(png_data);
|
||||
let mut reader = decoder.read_info().expect("Failed to read PNG info");
|
||||
@@ -305,19 +382,55 @@ async fn normalize_skin_texture_works() {
|
||||
(buffer, reader.info().clone())
|
||||
};
|
||||
|
||||
let (normalized_pixels, normalized_info) =
|
||||
decode_to_pixels(&normalized_png_data);
|
||||
let (expected_pixels, expected_info) =
|
||||
decode_to_pixels(expected_normalized_png_data);
|
||||
let test_data = [
|
||||
(
|
||||
"legacy",
|
||||
&include_bytes!("assets/test/legacy.png")[..],
|
||||
&include_bytes!("assets/test/legacy_normalized.png")[..],
|
||||
),
|
||||
(
|
||||
"notch",
|
||||
&include_bytes!("assets/test/notch.png")[..],
|
||||
&include_bytes!("assets/test/notch_normalized.png")[..],
|
||||
),
|
||||
(
|
||||
"transparent",
|
||||
&include_bytes!("assets/test/transparent.png")[..],
|
||||
&include_bytes!("assets/test/transparent_normalized.png")[..],
|
||||
),
|
||||
];
|
||||
|
||||
// Check that dimensions match
|
||||
assert_eq!(normalized_info.width, expected_info.width);
|
||||
assert_eq!(normalized_info.height, expected_info.height);
|
||||
assert_eq!(normalized_info.color_type, expected_info.color_type);
|
||||
for (skin_name, original_png_data, expected_normalized_png_data) in
|
||||
test_data
|
||||
{
|
||||
let normalized_png_data =
|
||||
normalize_skin_texture(&UrlOrBlob::Blob(original_png_data.into()))
|
||||
.await
|
||||
.expect("Failed to normalize skin texture");
|
||||
|
||||
// Check that pixel data matches
|
||||
assert_eq!(
|
||||
normalized_pixels, expected_pixels,
|
||||
"Pixel data doesn't match"
|
||||
);
|
||||
let (normalized_pixels, normalized_info) =
|
||||
decode_to_pixels(&normalized_png_data);
|
||||
let (expected_pixels, expected_info) =
|
||||
decode_to_pixels(expected_normalized_png_data);
|
||||
|
||||
// Check that dimensions match
|
||||
assert_eq!(
|
||||
normalized_info.width, expected_info.width,
|
||||
"Widths don't match for {skin_name}"
|
||||
);
|
||||
assert_eq!(
|
||||
normalized_info.height, expected_info.height,
|
||||
"Heights don't match for {skin_name}"
|
||||
);
|
||||
assert_eq!(
|
||||
normalized_info.color_type, expected_info.color_type,
|
||||
"Color types don't match for {skin_name}"
|
||||
);
|
||||
|
||||
// Check that pixel data matches
|
||||
assert_eq!(
|
||||
normalized_pixels, expected_pixels,
|
||||
"Pixel data doesn't match for {skin_name}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,9 @@ pub mod prelude {
|
||||
jre, metadata, minecraft_auth, mr_auth, pack, process,
|
||||
profile::{self, Profile, create},
|
||||
settings,
|
||||
util::io::{IOError, canonicalize},
|
||||
util::{
|
||||
io::{IOError, canonicalize},
|
||||
network::{is_network_metered, tcp_listen_any_loopback},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::state::{CachedEntry, LinkedData, ProfileInstallStage, SideType};
|
||||
use crate::util::fetch::{fetch, fetch_advanced, write_cached_icon};
|
||||
use crate::util::io;
|
||||
|
||||
use path_util::SafeRelativeUtf8UnixPathBuf;
|
||||
use reqwest::Method;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
@@ -27,7 +28,7 @@ pub struct PackFormat {
|
||||
#[derive(Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PackFile {
|
||||
pub path: String,
|
||||
pub path: SafeRelativeUtf8UnixPathBuf,
|
||||
pub hashes: HashMap<PackFileHash, String>,
|
||||
pub env: Option<HashMap<EnvType, SideType>>,
|
||||
pub downloads: Vec<String>,
|
||||
|
||||
@@ -18,8 +18,8 @@ use super::install_from::{
|
||||
generate_pack_from_version_id,
|
||||
};
|
||||
use crate::data::ProjectType;
|
||||
use std::io::Cursor;
|
||||
use std::path::{Component, PathBuf};
|
||||
use std::io::{Cursor, ErrorKind};
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Install a pack
|
||||
/// Wrapper around install_pack_files that generates a pack creation description, and
|
||||
@@ -169,31 +169,22 @@ pub async fn install_zipped_mrpack_files(
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project_path = project.path.to_string();
|
||||
let path = profile::get_full_path(&profile_path)
|
||||
.await?
|
||||
.join(project.path.as_str());
|
||||
|
||||
let path =
|
||||
std::path::Path::new(&project_path).components().next();
|
||||
if let Some(Component::CurDir | Component::Normal(_)) = path
|
||||
{
|
||||
let path = profile::get_full_path(&profile_path)
|
||||
.await?
|
||||
.join(&project_path);
|
||||
cache_file_hash(
|
||||
file.clone(),
|
||||
&profile_path,
|
||||
project.path.as_str(),
|
||||
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
|
||||
ProjectType::get_from_parent_folder(&path),
|
||||
&state.pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
cache_file_hash(
|
||||
file.clone(),
|
||||
&profile_path,
|
||||
&project_path,
|
||||
project
|
||||
.hashes
|
||||
.get(&PackFileHash::Sha1)
|
||||
.map(|x| &**x),
|
||||
ProjectType::get_from_parent_folder(&path),
|
||||
&state.pool,
|
||||
)
|
||||
.await?;
|
||||
write(&path, &file, &state.io_semaphore).await?;
|
||||
|
||||
write(&path, &file, &state.io_semaphore).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
@@ -377,9 +368,10 @@ pub async fn remove_all_related_files(
|
||||
if let Some(metadata) = &project.metadata
|
||||
&& to_remove.contains(&metadata.project_id)
|
||||
{
|
||||
let path = profile_full_path.join(file_path);
|
||||
if path.exists() {
|
||||
io::remove_file(&path).await?;
|
||||
match io::remove_file(profile_full_path.join(file_path)).await {
|
||||
Ok(_) => (),
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -387,9 +379,12 @@ pub async fn remove_all_related_files(
|
||||
// Iterate over all Modrinth project file paths in the json, and remove them
|
||||
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
|
||||
for file in pack.files {
|
||||
let path: PathBuf = profile_full_path.join(file.path);
|
||||
if path.exists() {
|
||||
io::remove_file(&path).await?;
|
||||
match io::remove_file(profile_full_path.join(file.path.as_str()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => (),
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -412,11 +407,16 @@ pub async fn remove_all_related_files(
|
||||
}
|
||||
|
||||
// Remove this file if a corresponding one exists in the filesystem
|
||||
let existing_file = profile::get_full_path(&profile_path)
|
||||
.await?
|
||||
.join(&new_path);
|
||||
if existing_file.exists() {
|
||||
io::remove_file(&existing_file).await?;
|
||||
match io::remove_file(
|
||||
profile::get_full_path(&profile_path)
|
||||
.await?
|
||||
.join(&new_path),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => (),
|
||||
Err(err) if err.kind() == ErrorKind::NotFound => (),
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ use crate::util::io::{self, IOError};
|
||||
pub use crate::{State, state::Profile};
|
||||
use async_zip::tokio::write::ZipFileWriter;
|
||||
use async_zip::{Compression, ZipEntryBuilder};
|
||||
use path_util::SafeRelativeUtf8UnixPathBuf;
|
||||
use serde_json::json;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
@@ -497,11 +498,12 @@ pub async fn export_mrpack(
|
||||
let version_id = version_id.unwrap_or("1.0.0".to_string());
|
||||
let mut packfile =
|
||||
create_mrpack_json(&profile, version_id, description).await?;
|
||||
let included_candidates_set =
|
||||
HashSet::<_>::from_iter(included_export_candidates.iter());
|
||||
let included_candidates_set = HashSet::<_>::from_iter(
|
||||
included_export_candidates.iter().map(|x| x.as_str()),
|
||||
);
|
||||
packfile
|
||||
.files
|
||||
.retain(|f| included_candidates_set.contains(&f.path));
|
||||
.retain(|f| included_candidates_set.contains(f.path.as_str()));
|
||||
|
||||
// Build vec of all files in the folder
|
||||
let mut path_list = Vec::new();
|
||||
@@ -575,8 +577,8 @@ pub async fn export_mrpack(
|
||||
#[tracing::instrument]
|
||||
pub async fn get_pack_export_candidates(
|
||||
profile_path: &str,
|
||||
) -> crate::Result<Vec<String>> {
|
||||
let mut path_list: Vec<String> = Vec::new();
|
||||
) -> crate::Result<Vec<SafeRelativeUtf8UnixPathBuf>> {
|
||||
let mut path_list = Vec::new();
|
||||
|
||||
let profile_base_dir = get_full_path(profile_path).await?;
|
||||
let mut read_dir = io::read_dir(&profile_base_dir).await?;
|
||||
@@ -610,18 +612,19 @@ pub async fn get_pack_export_candidates(
|
||||
fn pack_get_relative_path(
|
||||
profile_path: &PathBuf,
|
||||
path: &PathBuf,
|
||||
) -> crate::Result<String> {
|
||||
Ok(path
|
||||
.strip_prefix(profile_path)
|
||||
.map_err(|_| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Path {path:?} does not correspond to a profile"
|
||||
))
|
||||
})?
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("/"))
|
||||
) -> crate::Result<SafeRelativeUtf8UnixPathBuf> {
|
||||
Ok(SafeRelativeUtf8UnixPathBuf::try_from(
|
||||
path.strip_prefix(profile_path)
|
||||
.map_err(|_| {
|
||||
crate::ErrorKind::FSError(format!(
|
||||
"Path {path:?} does not correspond to a profile"
|
||||
))
|
||||
})?
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
.join("/"),
|
||||
)?)
|
||||
}
|
||||
|
||||
/// Run Minecraft using a profile and the default credentials, logged in credentials,
|
||||
@@ -896,7 +899,15 @@ pub async fn create_mrpack_json(
|
||||
.collect();
|
||||
|
||||
Some(Ok(PackFile {
|
||||
path,
|
||||
path: match path.try_into() {
|
||||
Ok(path) => path,
|
||||
Err(_) => {
|
||||
return Some(Err(crate::ErrorKind::OtherError(
|
||||
"Invalid file path in project".into(),
|
||||
)
|
||||
.as_error()));
|
||||
}
|
||||
},
|
||||
hashes,
|
||||
env: Some(env),
|
||||
downloads,
|
||||
|
||||
@@ -120,11 +120,11 @@ fn parse_server_address_inner(
|
||||
let mut port = None;
|
||||
if !port_str.is_empty() {
|
||||
if port_str.starts_with('+') {
|
||||
return Err(format!("Unparseable port number: {port_str}"));
|
||||
return Err(format!("Unparsable port number: {port_str}"));
|
||||
}
|
||||
port = port_str.parse::<u16>().ok();
|
||||
if port.is_none() {
|
||||
return Err(format!("Unparseable port number: {port_str}"));
|
||||
return Err(format!("Unparsable port number: {port_str}"));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,8 +3,17 @@ use std::sync::Arc;
|
||||
|
||||
use crate::{profile, util};
|
||||
use data_url::DataUrlError;
|
||||
use derive_more::Display;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing_error::InstrumentError;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Display)]
|
||||
#[display("{description}")]
|
||||
pub struct LabrinthError {
|
||||
pub error: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum ErrorKind {
|
||||
#[error("Filesystem error: {0}")]
|
||||
@@ -56,6 +65,9 @@ pub enum ErrorKind {
|
||||
#[error("Error fetching URL: {0}")]
|
||||
FetchError(#[from] reqwest::Error),
|
||||
|
||||
#[error("{0}")]
|
||||
LabrinthError(LabrinthError),
|
||||
|
||||
#[error("Websocket error: {0}")]
|
||||
WSError(#[from] async_tungstenite::tungstenite::Error),
|
||||
|
||||
@@ -186,6 +198,18 @@ pub enum ErrorKind {
|
||||
ParseError {
|
||||
reason: String,
|
||||
},
|
||||
#[error("RPC error: {0}")]
|
||||
RpcError(String),
|
||||
|
||||
#[cfg(windows)]
|
||||
#[error("Windows error: {0}")]
|
||||
WindowsError(#[from] windows_core::Error),
|
||||
|
||||
#[error("zbus error: {0}")]
|
||||
ZbusError(#[from] zbus::Error),
|
||||
|
||||
#[error("Deserialization error: {0}")]
|
||||
DeserializationError(#[from] serde::de::value::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
||||
@@ -16,14 +16,14 @@ use uuid::Uuid;
|
||||
const CLI_PROGRESS_BAR_TOTAL: u64 = 1000;
|
||||
|
||||
/*
|
||||
Events are a way we can communciate with the Tauri frontend from the Rust backend.
|
||||
Events are a way we can communicate with the Tauri frontend from the Rust backend.
|
||||
We include a feature flag for Tauri, so that we can compile this code without Tauri.
|
||||
|
||||
To use events, we need to do the following:
|
||||
1) Make sure we are using the tauri feature flag
|
||||
2) Initialize the EventState with EventState::init() *before* initializing the theseus State
|
||||
3) Call emit_x functions to send events to the frontend
|
||||
For emit_loading() specifically, we need to inialize the loading bar with init_loading() first and pass the received loader in
|
||||
For emit_loading() specifically, we need to initialize the loading bar with init_loading() first and pass the received loader in
|
||||
|
||||
For example:
|
||||
pub async fn loading_function() -> crate::Result<()> {
|
||||
@@ -306,7 +306,7 @@ pub async fn emit_friend(payload: FriendPayload) -> crate::Result<()> {
|
||||
// loading_join! macro
|
||||
// loading_join!(key: Option<&LoadingBarId>, total: f64, message: Option<&str>; task1, task2, task3...)
|
||||
// This will submit a loading event with the given message for each task as they complete
|
||||
// task1, task2, task3 are async tasks that yuo want to to join on await on
|
||||
// task1, task2, task3 are async tasks that you want to to join on await on
|
||||
// Key is the key to use for which loading bar to submit these results to- a LoadingBarId. If None, it does nothing
|
||||
// Total is the total amount of progress that the loading bar should take up by all futures in this (will be split evenly amongst them).
|
||||
// If message is Some(t) you will overwrite this loading bar's message with a custom one
|
||||
|
||||
@@ -179,7 +179,6 @@ pub enum LoadingBarType {
|
||||
CurseForgeProfileDownload {
|
||||
profile_name: String,
|
||||
},
|
||||
CheckingForUpdates,
|
||||
LauncherUpdate {
|
||||
version: String,
|
||||
current_version: String,
|
||||
|
||||
@@ -14,8 +14,9 @@ use daedalus::{
|
||||
modded::SidedDataEntry,
|
||||
};
|
||||
use dunce::canonicalize;
|
||||
use hashlink::LinkedHashSet;
|
||||
use std::io::{BufRead, BufReader};
|
||||
use itertools::Itertools;
|
||||
use std::io::{BufRead, BufReader, ErrorKind};
|
||||
use std::net::SocketAddr;
|
||||
use std::{collections::HashMap, path::Path};
|
||||
use uuid::Uuid;
|
||||
|
||||
@@ -29,9 +30,21 @@ pub fn get_class_paths(
|
||||
java_arch: &str,
|
||||
minecraft_updated: bool,
|
||||
) -> crate::Result<String> {
|
||||
let mut cps = libraries
|
||||
launcher_class_path
|
||||
.iter()
|
||||
.filter_map(|library| {
|
||||
.map(|path| {
|
||||
Ok(canonicalize(path)
|
||||
.map_err(|_| {
|
||||
crate::ErrorKind::LauncherError(format!(
|
||||
"Specified class path {} does not exist",
|
||||
path.to_string_lossy()
|
||||
))
|
||||
.as_error()
|
||||
})?
|
||||
.to_string_lossy()
|
||||
.to_string())
|
||||
})
|
||||
.chain(libraries.iter().filter_map(|library| {
|
||||
if let Some(rules) = &library.rules
|
||||
&& !parse_rules(
|
||||
rules,
|
||||
@@ -47,29 +60,15 @@ pub fn get_class_paths(
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(get_lib_path(libraries_path, &library.name, false))
|
||||
Some(get_lib_path(
|
||||
libraries_path,
|
||||
&library.name,
|
||||
library.natives_os_key_and_classifiers(java_arch).is_some(),
|
||||
))
|
||||
}))
|
||||
.process_results(|iter| {
|
||||
iter.unique().join(classpath_separator(java_arch))
|
||||
})
|
||||
.collect::<Result<LinkedHashSet<_>, _>>()?;
|
||||
|
||||
for launcher_path in launcher_class_path {
|
||||
cps.insert(
|
||||
canonicalize(launcher_path)
|
||||
.map_err(|_| {
|
||||
crate::ErrorKind::LauncherError(format!(
|
||||
"Specified class path {} does not exist",
|
||||
launcher_path.to_string_lossy()
|
||||
))
|
||||
.as_error()
|
||||
})?
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(cps
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>()
|
||||
.join(classpath_separator(java_arch)))
|
||||
}
|
||||
|
||||
pub fn get_class_paths_jar<T: AsRef<str>>(
|
||||
@@ -90,21 +89,21 @@ pub fn get_lib_path(
|
||||
lib: &str,
|
||||
allow_not_exist: bool,
|
||||
) -> crate::Result<String> {
|
||||
let path = libraries_path
|
||||
.to_path_buf()
|
||||
.join(get_path_from_artifact(lib)?);
|
||||
let path = libraries_path.join(get_path_from_artifact(lib)?);
|
||||
|
||||
if !path.exists() && allow_not_exist {
|
||||
return Ok(path.to_string_lossy().to_string());
|
||||
}
|
||||
|
||||
let path = &canonicalize(&path).map_err(|_| {
|
||||
crate::ErrorKind::LauncherError(format!(
|
||||
"Library file at path {} does not exist",
|
||||
path.to_string_lossy()
|
||||
))
|
||||
.as_error()
|
||||
})?;
|
||||
let path = match canonicalize(&path) {
|
||||
Ok(p) => p,
|
||||
Err(err) if err.kind() == ErrorKind::NotFound && allow_not_exist => {
|
||||
path
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(crate::ErrorKind::LauncherError(format!(
|
||||
"Could not canonicalize library path {}: {err}",
|
||||
path.display()
|
||||
))
|
||||
.as_error());
|
||||
}
|
||||
};
|
||||
|
||||
Ok(path.to_string_lossy().to_string())
|
||||
}
|
||||
@@ -124,6 +123,7 @@ pub fn get_jvm_arguments(
|
||||
quick_play_type: &QuickPlayType,
|
||||
quick_play_version: QuickPlayVersion,
|
||||
log_config: Option<&LoggingConfiguration>,
|
||||
ipc_addr: SocketAddr,
|
||||
) -> crate::Result<Vec<String>> {
|
||||
let mut parsed_arguments = Vec::new();
|
||||
|
||||
@@ -181,6 +181,11 @@ pub fn get_jvm_arguments(
|
||||
.to_string_lossy()
|
||||
));
|
||||
|
||||
parsed_arguments
|
||||
.push(format!("-Dmodrinth.internal.ipc.host={}", ipc_addr.ip()));
|
||||
parsed_arguments
|
||||
.push(format!("-Dmodrinth.internal.ipc.port={}", ipc_addr.port()));
|
||||
|
||||
parsed_arguments.push(format!(
|
||||
"-Dmodrinth.internal.quickPlay.serverVersion={}",
|
||||
serde_json::to_value(quick_play_version.server)?
|
||||
|
||||
@@ -8,13 +8,13 @@ use crate::{
|
||||
emit::{emit_loading, loading_try_for_each_concurrent},
|
||||
},
|
||||
state::State,
|
||||
util::{fetch::*, io, platform::OsExt},
|
||||
util::{fetch::*, io},
|
||||
};
|
||||
use daedalus::minecraft::{LoggingConfiguration, LoggingSide};
|
||||
use daedalus::{
|
||||
self as d,
|
||||
minecraft::{
|
||||
Asset, AssetsIndex, Library, Os, Version as GameVersion,
|
||||
Asset, AssetsIndex, Library, Version as GameVersion,
|
||||
VersionInfo as GameVersionInfo,
|
||||
},
|
||||
modded::LoaderVersion,
|
||||
@@ -288,90 +288,132 @@ pub async fn download_libraries(
|
||||
}?;
|
||||
let num_files = libraries.len();
|
||||
loading_try_for_each_concurrent(
|
||||
stream::iter(libraries.iter())
|
||||
.map(Ok::<&Library, crate::Error>), None, loading_bar,loading_amount,num_files, None,|library| async move {
|
||||
if let Some(rules) = &library.rules
|
||||
&& !parse_rules(rules, java_arch, &QuickPlayType::None, minecraft_updated) {
|
||||
tracing::trace!("Skipped library {}", &library.name);
|
||||
return Ok(());
|
||||
}
|
||||
stream::iter(libraries.iter()).map(Ok::<&Library, crate::Error>),
|
||||
None,
|
||||
loading_bar,
|
||||
loading_amount,
|
||||
num_files,
|
||||
None,
|
||||
|library| async move {
|
||||
if let Some(rules) = &library.rules
|
||||
&& !parse_rules(
|
||||
rules,
|
||||
java_arch,
|
||||
&QuickPlayType::None,
|
||||
minecraft_updated,
|
||||
)
|
||||
{
|
||||
tracing::trace!("Skipped library {}", &library.name);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !library.downloadable {
|
||||
tracing::trace!("Skipped non-downloadable library {}", &library.name);
|
||||
if !library.downloadable {
|
||||
tracing::trace!(
|
||||
"Skipped non-downloadable library {}",
|
||||
&library.name
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// When a library has natives, we only need to download such natives, as PrismLauncher does
|
||||
if let Some((os_key, classifiers)) =
|
||||
library.natives_os_key_and_classifiers(java_arch)
|
||||
{
|
||||
let parsed_key = os_key
|
||||
.replace("${arch}", crate::util::platform::ARCH_WIDTH);
|
||||
|
||||
if let Some(native) = classifiers.get(&parsed_key) {
|
||||
let data = fetch(
|
||||
&native.url,
|
||||
Some(&native.sha1),
|
||||
&st.fetch_semaphore,
|
||||
&st.pool,
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Ok(mut archive) =
|
||||
zip::ZipArchive::new(std::io::Cursor::new(&data))
|
||||
{
|
||||
match archive.extract(
|
||||
st.directories.version_natives_dir(version),
|
||||
) {
|
||||
Ok(_) => tracing::debug!(
|
||||
"Fetched native {}",
|
||||
&library.name
|
||||
),
|
||||
Err(err) => tracing::error!(
|
||||
"Failed extracting native {}. err: {err}",
|
||||
&library.name
|
||||
),
|
||||
}
|
||||
} else {
|
||||
tracing::error!(
|
||||
"Failed extracting native {}",
|
||||
&library.name
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let artifact_path = d::get_path_from_artifact(&library.name)?;
|
||||
let path = st.directories.libraries_dir().join(&artifact_path);
|
||||
|
||||
if path.exists() && !force {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tokio::try_join! {
|
||||
async {
|
||||
let artifact_path = d::get_path_from_artifact(&library.name)?;
|
||||
let path = st.directories.libraries_dir().join(&artifact_path);
|
||||
if let Some(d::minecraft::LibraryDownloads {
|
||||
artifact: Some(ref artifact),
|
||||
..
|
||||
}) = library.downloads
|
||||
&& !artifact.url.is_empty()
|
||||
{
|
||||
let bytes = fetch(
|
||||
&artifact.url,
|
||||
Some(&artifact.sha1),
|
||||
&st.fetch_semaphore,
|
||||
&st.pool,
|
||||
)
|
||||
.await?;
|
||||
write(&path, &bytes, &st.io_semaphore).await?;
|
||||
|
||||
if path.exists() && !force {
|
||||
return Ok(());
|
||||
}
|
||||
tracing::trace!(
|
||||
"Fetched library {} to path {:?}",
|
||||
&library.name,
|
||||
&path
|
||||
);
|
||||
} else {
|
||||
// We lack an artifact URL, so fall back to constructing one ourselves.
|
||||
// PrismLauncher just ignores the library if this is the case, so it's
|
||||
// probably not needed, but previous code revisions of the Modrinth App
|
||||
// intended to do this, so we keep that behavior for compatibility.
|
||||
|
||||
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads
|
||||
&& !artifact.url.is_empty(){
|
||||
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
|
||||
.await?;
|
||||
write(&path, &bytes, &st.io_semaphore).await?;
|
||||
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
|
||||
return Ok::<_, crate::Error>(());
|
||||
}
|
||||
let url = format!(
|
||||
"{}{artifact_path}",
|
||||
library
|
||||
.url
|
||||
.as_deref()
|
||||
.unwrap_or("https://libraries.minecraft.net/")
|
||||
);
|
||||
|
||||
let url = [
|
||||
library
|
||||
.url
|
||||
.as_deref()
|
||||
.unwrap_or("https://libraries.minecraft.net/"),
|
||||
&artifact_path
|
||||
].concat();
|
||||
let bytes =
|
||||
fetch(&url, None, &st.fetch_semaphore, &st.pool)
|
||||
.await?;
|
||||
|
||||
let bytes = fetch(&url, None, &st.fetch_semaphore, &st.pool).await?;
|
||||
write(&path, &bytes, &st.io_semaphore).await?;
|
||||
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
|
||||
Ok::<_, crate::Error>(())
|
||||
},
|
||||
async {
|
||||
// HACK: pseudo try block using or else
|
||||
if let Some((os_key, classifiers)) = None.or_else(|| Some((
|
||||
library
|
||||
.natives
|
||||
.as_ref()?
|
||||
.get(&Os::native_arch(java_arch))?,
|
||||
library
|
||||
.downloads
|
||||
.as_ref()?
|
||||
.classifiers
|
||||
.as_ref()?
|
||||
))) {
|
||||
let parsed_key = os_key.replace(
|
||||
"${arch}",
|
||||
crate::util::platform::ARCH_WIDTH,
|
||||
);
|
||||
write(&path, &bytes, &st.io_semaphore).await?;
|
||||
|
||||
if let Some(native) = classifiers.get(&parsed_key) {
|
||||
let data = fetch(&native.url, Some(&native.sha1), &st.fetch_semaphore, &st.pool).await?;
|
||||
let reader = std::io::Cursor::new(&data);
|
||||
if let Ok(mut archive) = zip::ZipArchive::new(reader) {
|
||||
match archive.extract(st.directories.version_natives_dir(version)) {
|
||||
Ok(_) => tracing::debug!("Fetched native {}", &library.name),
|
||||
Err(err) => tracing::error!("Failed extracting native {}. err: {}", &library.name, err)
|
||||
}
|
||||
} else {
|
||||
tracing::error!("Failed extracting native {}", &library.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}?;
|
||||
|
||||
tracing::debug!("Loaded library {}", library.name);
|
||||
Ok(())
|
||||
tracing::trace!(
|
||||
"Fetched library {} to path {:?}",
|
||||
&library.name,
|
||||
&path
|
||||
);
|
||||
}
|
||||
}
|
||||
).await?;
|
||||
|
||||
tracing::debug!("Loaded library {}", library.name);
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
tracing::debug!("Done loading libraries!");
|
||||
Ok(())
|
||||
|
||||
@@ -12,6 +12,7 @@ use crate::state::{
|
||||
AccountType, Credentials, JavaVersion, ProcessMetadata, ProfileInstallStage,
|
||||
};
|
||||
use crate::util::{io, utils};
|
||||
use crate::util::rpc::RpcServerBuilder;
|
||||
use crate::{State, get_resource_file, process, state as st};
|
||||
use chrono::Utc;
|
||||
use daedalus as d;
|
||||
@@ -23,7 +24,6 @@ use serde::Deserialize;
|
||||
use st::Profile;
|
||||
use std::fmt::Write;
|
||||
use std::path::PathBuf;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tokio::process::Command;
|
||||
|
||||
mod args;
|
||||
@@ -611,6 +611,8 @@ pub async fn launch_minecraft(
|
||||
let (main_class_keep_alive, main_class_path) =
|
||||
get_resource_file!(env "JAVA_JARS_DIR" / "theseus.jar")?;
|
||||
|
||||
let rpc_server = RpcServerBuilder::new().launch().await?;
|
||||
|
||||
command.args(
|
||||
args::get_jvm_arguments(
|
||||
args.get(&d::minecraft::ArgumentType::Jvm)
|
||||
@@ -636,6 +638,7 @@ pub async fn launch_minecraft(
|
||||
.logging
|
||||
.as_ref()
|
||||
.and_then(|x| x.get(&LoggingSide::Client)),
|
||||
rpc_server.address(),
|
||||
)?
|
||||
.into_iter(),
|
||||
);
|
||||
@@ -800,7 +803,8 @@ pub async fn launch_minecraft(
|
||||
state.directories.profile_logs_dir(&profile.path),
|
||||
version_info.logging.is_some(),
|
||||
main_class_keep_alive,
|
||||
async |process: &ProcessMetadata, stdin| {
|
||||
rpc_server,
|
||||
async |process: &ProcessMetadata, rpc_server| {
|
||||
let process_start_time = process.start_time.to_rfc3339();
|
||||
let profile_created_time = profile.created.to_rfc3339();
|
||||
let profile_modified_time = profile.modified.to_rfc3339();
|
||||
@@ -823,14 +827,11 @@ pub async fn launch_minecraft(
|
||||
let Some(value) = value else {
|
||||
continue;
|
||||
};
|
||||
stdin.write_all(b"property\t").await?;
|
||||
stdin.write_all(key.as_bytes()).await?;
|
||||
stdin.write_u8(b'\t').await?;
|
||||
stdin.write_all(value.as_bytes()).await?;
|
||||
stdin.write_u8(b'\n').await?;
|
||||
rpc_server
|
||||
.call_method_2::<()>("set_system_property", key, value)
|
||||
.await?;
|
||||
}
|
||||
stdin.write_all(b"launch\n").await?;
|
||||
stdin.flush().await?;
|
||||
rpc_server.call_method::<()>("launch").await?;
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
|
||||
@@ -25,3 +25,9 @@ pub use event::{
|
||||
};
|
||||
pub use logger::start_logger;
|
||||
pub use state::State;
|
||||
|
||||
pub const LAUNCHER_USER_AGENT: &str = concat!(
|
||||
"modrinth/theseus/",
|
||||
env!("CARGO_PKG_VERSION"),
|
||||
" (support@modrinth.com)"
|
||||
);
|
||||
|
||||
@@ -25,12 +25,11 @@ pub fn start_logger() -> Option<()> {
|
||||
.unwrap_or_else(|_| {
|
||||
tracing_subscriber::EnvFilter::new("theseus=info,theseus_gui=info")
|
||||
});
|
||||
let subscriber = tracing_subscriber::registry()
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.with(filter)
|
||||
.with(tracing_error::ErrorLayer::default());
|
||||
tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("setting default subscriber failed");
|
||||
.with(tracing_error::ErrorLayer::default())
|
||||
.init();
|
||||
Some(())
|
||||
}
|
||||
|
||||
@@ -76,7 +75,7 @@ pub fn start_logger() -> Option<()> {
|
||||
let filter = tracing_subscriber::EnvFilter::try_from_default_env()
|
||||
.unwrap_or_else(|_| tracing_subscriber::EnvFilter::new("theseus=info"));
|
||||
|
||||
let subscriber = tracing_subscriber::registry()
|
||||
tracing_subscriber::registry()
|
||||
.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.with_writer(file)
|
||||
@@ -84,10 +83,8 @@ pub fn start_logger() -> Option<()> {
|
||||
.with_timer(ChronoLocal::rfc_3339()),
|
||||
)
|
||||
.with(filter)
|
||||
.with(tracing_error::ErrorLayer::default());
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber)
|
||||
.expect("Setting default subscriber failed");
|
||||
.with(tracing_error::ErrorLayer::default())
|
||||
.init();
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
@@ -519,11 +519,14 @@ impl CacheValue {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Copy, Clone)]
|
||||
#[derive(
|
||||
Deserialize, Serialize, PartialEq, Eq, Debug, Copy, Clone, Default,
|
||||
)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum CacheBehaviour {
|
||||
/// Serve expired data. If fetch fails / launcher is offline, errors are ignored
|
||||
/// and expired data is served
|
||||
#[default]
|
||||
StaleWhileRevalidateSkipOffline,
|
||||
// Serve expired data, revalidate in background
|
||||
StaleWhileRevalidate,
|
||||
@@ -533,12 +536,6 @@ pub enum CacheBehaviour {
|
||||
Bypass,
|
||||
}
|
||||
|
||||
impl Default for CacheBehaviour {
|
||||
fn default() -> Self {
|
||||
Self::StaleWhileRevalidateSkipOffline
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CachedEntry {
|
||||
id: String,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use crate::ErrorKind;
|
||||
use crate::LAUNCHER_USER_AGENT;
|
||||
use crate::data::ModrinthCredentials;
|
||||
use crate::event::FriendPayload;
|
||||
use crate::event::emit::emit_friend;
|
||||
@@ -81,13 +83,9 @@ impl FriendsSocket {
|
||||
)
|
||||
.into_client_request()?;
|
||||
|
||||
let user_agent = format!(
|
||||
"modrinth/theseus/{} (support@modrinth.com)",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
request.headers_mut().insert(
|
||||
"User-Agent",
|
||||
HeaderValue::from_str(&user_agent).unwrap(),
|
||||
HeaderValue::from_str(LAUNCHER_USER_AGENT).unwrap(),
|
||||
);
|
||||
|
||||
let res = connect_async(request).await;
|
||||
@@ -322,7 +320,7 @@ impl FriendsSocket {
|
||||
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
|
||||
semaphore: &FetchSemaphore,
|
||||
) -> crate::Result<()> {
|
||||
fetch_advanced(
|
||||
let result = fetch_advanced(
|
||||
Method::POST,
|
||||
&format!("{}friend/{user_id}", env!("MODRINTH_API_URL_V3")),
|
||||
None,
|
||||
@@ -332,7 +330,18 @@ impl FriendsSocket {
|
||||
semaphore,
|
||||
exec,
|
||||
)
|
||||
.await?;
|
||||
.await;
|
||||
|
||||
if let Err(ref e) = result
|
||||
&& let ErrorKind::LabrinthError(e) = &*e.raw
|
||||
&& e.error == "not_found"
|
||||
{
|
||||
return Err(ErrorKind::OtherError(format!(
|
||||
"No user found with username \"{user_id}\""
|
||||
))
|
||||
.into());
|
||||
}
|
||||
result?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ pub async fn init_watcher() -> crate::Result<FileWatcher> {
|
||||
|
||||
tokio::task::spawn(async move {
|
||||
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
|
||||
tracing::info!(parent: &span, "Initting watcher");
|
||||
tracing::info!(parent: &span, "Initing watcher");
|
||||
while let Some(res) = rx.recv().await {
|
||||
let _span = span.enter();
|
||||
|
||||
@@ -170,38 +170,22 @@ pub(crate) async fn watch_profile(
|
||||
let profile_path = dirs.profiles_dir().join(profile_path);
|
||||
|
||||
if profile_path.exists() && profile_path.is_dir() {
|
||||
for sub_path in ProjectType::iterator().map(|x| x.get_folder()).chain([
|
||||
"crash-reports",
|
||||
"saves",
|
||||
"servers.dat",
|
||||
]) {
|
||||
for sub_path in ProjectType::iterator()
|
||||
.map(|x| x.get_folder())
|
||||
.chain(["crash-reports", "saves"])
|
||||
{
|
||||
let full_path = profile_path.join(sub_path);
|
||||
|
||||
if !full_path.exists() && !full_path.is_symlink() {
|
||||
if !sub_path.contains(".") {
|
||||
if let Err(e) =
|
||||
crate::util::io::create_dir_all(&full_path).await
|
||||
{
|
||||
tracing::error!(
|
||||
"Failed to create directory for watcher {full_path:?}: {e}"
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else if sub_path == "servers.dat" {
|
||||
const EMPTY_NBT: &[u8] = &[
|
||||
10, // Compound tag
|
||||
0, 0, // Empty name
|
||||
0, // End of compound tag
|
||||
];
|
||||
if let Err(e) =
|
||||
crate::util::io::write(&full_path, EMPTY_NBT).await
|
||||
{
|
||||
tracing::error!(
|
||||
"Failed to create file for watcher {full_path:?}: {e}"
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if !full_path.exists()
|
||||
&& !full_path.is_symlink()
|
||||
&& !sub_path.contains(".")
|
||||
&& let Err(e) =
|
||||
crate::util::io::create_dir_all(&full_path).await
|
||||
{
|
||||
tracing::error!(
|
||||
"Failed to create directory for watcher {full_path:?}: {e}"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let mut watcher = watcher.write().await;
|
||||
@@ -215,6 +199,16 @@ pub(crate) async fn watch_profile(
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let mut watcher = watcher.write().await;
|
||||
if let Err(e) = watcher
|
||||
.watcher()
|
||||
.watch(&profile_path, RecursiveMode::NonRecursive)
|
||||
{
|
||||
tracing::error!(
|
||||
"Failed to watch root profile directory for watcher {profile_path:?}: {e}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::event::emit::{emit_process, emit_profile};
|
||||
use crate::event::{ProcessPayloadType, ProfilePayloadType};
|
||||
use crate::profile;
|
||||
use crate::util::io::IOError;
|
||||
use crate::util::rpc::RpcServer;
|
||||
use chrono::{DateTime, NaiveDateTime, TimeZone, Utc};
|
||||
use dashmap::DashMap;
|
||||
use quick_xml::Reader;
|
||||
@@ -15,7 +16,7 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::ExitStatus;
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::{AsyncBufReadExt, BufReader};
|
||||
use tokio::process::{Child, ChildStdin, Command};
|
||||
use tokio::process::{Child, Command};
|
||||
use uuid::Uuid;
|
||||
|
||||
const LAUNCHER_LOG_PATH: &str = "launcher_log.txt";
|
||||
@@ -46,9 +47,10 @@ impl ProcessManager {
|
||||
logs_folder: PathBuf,
|
||||
xml_logging: bool,
|
||||
main_class_keep_alive: TempDir,
|
||||
rpc_server: RpcServer,
|
||||
post_process_init: impl AsyncFnOnce(
|
||||
&ProcessMetadata,
|
||||
&mut ChildStdin,
|
||||
&RpcServer,
|
||||
) -> crate::Result<()>,
|
||||
) -> crate::Result<ProcessMetadata> {
|
||||
mc_command.stdout(std::process::Stdio::piped());
|
||||
@@ -67,14 +69,12 @@ impl ProcessManager {
|
||||
profile_path: profile_path.to_string(),
|
||||
},
|
||||
child: mc_proc,
|
||||
rpc_server,
|
||||
_main_class_keep_alive: main_class_keep_alive,
|
||||
};
|
||||
|
||||
if let Err(e) = post_process_init(
|
||||
&process.metadata,
|
||||
&mut process.child.stdin.as_mut().unwrap(),
|
||||
)
|
||||
.await
|
||||
if let Err(e) =
|
||||
post_process_init(&process.metadata, &process.rpc_server).await
|
||||
{
|
||||
tracing::error!("Failed to run post-process init: {e}");
|
||||
let _ = process.child.kill().await;
|
||||
@@ -165,6 +165,10 @@ impl ProcessManager {
|
||||
self.processes.get(&id).map(|x| x.metadata.clone())
|
||||
}
|
||||
|
||||
pub fn get_rpc(&self, id: Uuid) -> Option<RpcServer> {
|
||||
self.processes.get(&id).map(|x| x.rpc_server.clone())
|
||||
}
|
||||
|
||||
pub fn get_all(&self) -> Vec<ProcessMetadata> {
|
||||
self.processes
|
||||
.iter()
|
||||
@@ -215,6 +219,7 @@ struct Process {
|
||||
metadata: ProcessMetadata,
|
||||
child: Child,
|
||||
_main_class_keep_alive: TempDir,
|
||||
rpc_server: RpcServer,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
||||
@@ -38,6 +38,10 @@ pub struct Settings {
|
||||
|
||||
pub developer_mode: bool,
|
||||
pub feature_flags: HashMap<FeatureFlag, bool>,
|
||||
|
||||
pub skipped_update: Option<String>,
|
||||
pub pending_update_toast_for_version: Option<String>,
|
||||
pub auto_download_updates: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, Hash, PartialEq)]
|
||||
@@ -63,7 +67,8 @@ impl Settings {
|
||||
json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,
|
||||
mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,
|
||||
hook_pre_launch, hook_wrapper, hook_post_exit,
|
||||
custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar
|
||||
custom_dir, prev_custom_dir, migrated, json(feature_flags) feature_flags, toggle_sidebar,
|
||||
skipped_update, pending_update_toast_for_version, auto_download_updates
|
||||
FROM settings
|
||||
"
|
||||
)
|
||||
@@ -117,6 +122,10 @@ impl Settings {
|
||||
.as_ref()
|
||||
.and_then(|x| serde_json::from_str(x).ok())
|
||||
.unwrap_or_default(),
|
||||
skipped_update: res.skipped_update,
|
||||
pending_update_toast_for_version: res
|
||||
.pending_update_toast_for_version,
|
||||
auto_download_updates: res.auto_download_updates.map(|x| x == 1),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -170,7 +179,11 @@ impl Settings {
|
||||
|
||||
toggle_sidebar = $26,
|
||||
feature_flags = $27,
|
||||
hide_nametag_skins_page = $28
|
||||
hide_nametag_skins_page = $28,
|
||||
|
||||
skipped_update = $29,
|
||||
pending_update_toast_for_version = $30,
|
||||
auto_download_updates = $31
|
||||
",
|
||||
max_concurrent_writes,
|
||||
max_concurrent_downloads,
|
||||
@@ -199,7 +212,10 @@ impl Settings {
|
||||
self.migrated,
|
||||
self.toggle_sidebar,
|
||||
feature_flags,
|
||||
self.hide_nametag_skins_page
|
||||
self.hide_nametag_skins_page,
|
||||
self.skipped_update,
|
||||
self.pending_update_toast_for_version,
|
||||
self.auto_download_updates,
|
||||
)
|
||||
.execute(exec)
|
||||
.await?;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
//! Functions for fetching information from the Internet
|
||||
use super::io::{self, IOError};
|
||||
use crate::ErrorKind;
|
||||
use crate::LAUNCHER_USER_AGENT;
|
||||
use crate::event::LoadingBarId;
|
||||
use crate::event::emit::emit_loading;
|
||||
use bytes::Bytes;
|
||||
@@ -19,11 +21,8 @@ pub struct FetchSemaphore(pub Semaphore);
|
||||
|
||||
pub static REQWEST_CLIENT: LazyLock<reqwest::Client> = LazyLock::new(|| {
|
||||
let mut headers = reqwest::header::HeaderMap::new();
|
||||
let header = reqwest::header::HeaderValue::from_str(&format!(
|
||||
"modrinth/theseus/{} (support@modrinth.com)",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
))
|
||||
.unwrap();
|
||||
let header =
|
||||
reqwest::header::HeaderValue::from_str(LAUNCHER_USER_AGENT).unwrap();
|
||||
headers.insert(reqwest::header::USER_AGENT, header);
|
||||
reqwest::Client::builder()
|
||||
.tcp_keepalive(Some(time::Duration::from_secs(10)))
|
||||
@@ -108,32 +107,31 @@ pub async fn fetch_advanced(
|
||||
|
||||
let result = req.send().await;
|
||||
match result {
|
||||
Ok(x) => {
|
||||
if x.status().is_server_error() {
|
||||
if attempt <= FETCH_ATTEMPTS {
|
||||
continue;
|
||||
} else {
|
||||
return Err(crate::Error::from(
|
||||
crate::ErrorKind::OtherError(
|
||||
"Server error when fetching content"
|
||||
.to_string(),
|
||||
),
|
||||
));
|
||||
Ok(resp) => {
|
||||
if resp.status().is_server_error() && attempt <= FETCH_ATTEMPTS
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if resp.status().is_client_error()
|
||||
|| resp.status().is_server_error()
|
||||
{
|
||||
let backup_error = resp.error_for_status_ref().unwrap_err();
|
||||
if let Ok(error) = resp.json().await {
|
||||
return Err(ErrorKind::LabrinthError(error).into());
|
||||
}
|
||||
return Err(backup_error.into());
|
||||
}
|
||||
|
||||
let bytes = if let Some((bar, total)) = &loading_bar {
|
||||
let length = x.content_length();
|
||||
let length = resp.content_length();
|
||||
if let Some(total_size) = length {
|
||||
use futures::StreamExt;
|
||||
let mut stream = x.bytes_stream();
|
||||
let mut stream = resp.bytes_stream();
|
||||
let mut bytes = Vec::new();
|
||||
while let Some(item) = stream.next().await {
|
||||
let chunk = item.or(Err(
|
||||
crate::error::ErrorKind::NoValueFor(
|
||||
"fetch bytes".to_string(),
|
||||
),
|
||||
))?;
|
||||
let chunk = item.or(Err(ErrorKind::NoValueFor(
|
||||
"fetch bytes".to_string(),
|
||||
)))?;
|
||||
bytes.append(&mut chunk.to_vec());
|
||||
emit_loading(
|
||||
bar,
|
||||
@@ -145,10 +143,10 @@ pub async fn fetch_advanced(
|
||||
|
||||
Ok(bytes::Bytes::from(bytes))
|
||||
} else {
|
||||
x.bytes().await
|
||||
resp.bytes().await
|
||||
}
|
||||
} else {
|
||||
x.bytes().await
|
||||
resp.bytes().await
|
||||
};
|
||||
|
||||
if let Ok(bytes) = bytes {
|
||||
@@ -158,7 +156,7 @@ pub async fn fetch_advanced(
|
||||
if attempt <= FETCH_ATTEMPTS {
|
||||
continue;
|
||||
} else {
|
||||
return Err(crate::ErrorKind::HashError(
|
||||
return Err(ErrorKind::HashError(
|
||||
sha1.to_string(),
|
||||
hash,
|
||||
)
|
||||
@@ -194,10 +192,9 @@ pub async fn fetch_mirrors(
|
||||
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
|
||||
) -> crate::Result<Bytes> {
|
||||
if mirrors.is_empty() {
|
||||
return Err(crate::ErrorKind::InputError(
|
||||
"No mirrors provided!".to_string(),
|
||||
)
|
||||
.into());
|
||||
return Err(
|
||||
ErrorKind::InputError("No mirrors provided!".to_string()).into()
|
||||
);
|
||||
}
|
||||
|
||||
for (index, mirror) in mirrors.iter().enumerate() {
|
||||
@@ -276,8 +273,8 @@ pub async fn write(
|
||||
}
|
||||
|
||||
pub async fn copy(
|
||||
src: impl AsRef<std::path::Path>,
|
||||
dest: impl AsRef<std::path::Path>,
|
||||
src: impl AsRef<Path>,
|
||||
dest: impl AsRef<Path>,
|
||||
semaphore: &IoSemaphore,
|
||||
) -> crate::Result<()> {
|
||||
let src: &Path = src.as_ref();
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
// IO error
|
||||
// A wrapper around the tokio IO functions that adds the path to the error message, instead of the uninformative std::io::Error.
|
||||
|
||||
use std::{io::Write, path::Path};
|
||||
use std::{
|
||||
io::{ErrorKind, Write},
|
||||
path::Path,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
@@ -32,6 +35,13 @@ impl IOError {
|
||||
path: path.to_string_lossy().to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> ErrorKind {
|
||||
match self {
|
||||
IOError::IOPathError { source, .. } => source.kind(),
|
||||
IOError::IOError(source) => source.kind(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn canonicalize(
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
pub mod fetch;
|
||||
pub mod io;
|
||||
pub mod jre;
|
||||
pub mod network;
|
||||
pub mod platform;
|
||||
pub mod utils; // [AR] Feature
|
||||
pub mod protocol_version;
|
||||
pub mod rpc;
|
||||
pub mod server_ping;
|
||||
|
||||
93
packages/app-lib/src/util/network.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use crate::Result;
|
||||
use std::io;
|
||||
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr};
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
pub async fn tcp_listen_any_loopback() -> io::Result<TcpListener> {
|
||||
// IPv4 is tried first for the best compatibility and performance with most systems.
|
||||
// IPv6 is also tried in case IPv4 is not available. Resolving "localhost" is avoided
|
||||
// to prevent failures deriving from improper name resolution setup. Any available
|
||||
// ephemeral port is used to prevent conflicts with other services. This is all as per
|
||||
// RFC 8252's recommendations
|
||||
const ANY_LOOPBACK_SOCKET: &[SocketAddr] = &[
|
||||
SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),
|
||||
SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 0),
|
||||
];
|
||||
|
||||
TcpListener::bind(ANY_LOOPBACK_SOCKET).await
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub async fn is_network_metered() -> Result<bool> {
|
||||
use windows::Networking::Connectivity::{
|
||||
NetworkCostType, NetworkInformation,
|
||||
};
|
||||
|
||||
let cost_type = NetworkInformation::GetInternetConnectionProfile()?
|
||||
.GetConnectionCost()?
|
||||
.NetworkCostType()?;
|
||||
Ok(matches!(
|
||||
cost_type,
|
||||
NetworkCostType::Fixed | NetworkCostType::Variable
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub async fn is_network_metered() -> Result<bool> {
|
||||
use crate::ErrorKind;
|
||||
use cidre::dispatch::Queue;
|
||||
use cidre::nw::PathMonitor;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::mpsc;
|
||||
use tokio_util::future::FutureExt;
|
||||
|
||||
let (sender, mut receiver) = mpsc::channel(1);
|
||||
|
||||
let queue = Queue::new();
|
||||
let mut monitor = PathMonitor::new();
|
||||
monitor.set_queue(&queue);
|
||||
monitor.set_update_handler(move |path| {
|
||||
let _ = sender.try_send(path.is_constrained() || path.is_expensive());
|
||||
});
|
||||
|
||||
monitor.start();
|
||||
let result = receiver
|
||||
.recv()
|
||||
.timeout(Duration::from_millis(100))
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
monitor.cancel();
|
||||
|
||||
result.ok_or_else(|| {
|
||||
ErrorKind::OtherError(
|
||||
"NWPathMonitor didn't provide an NWPath in time".to_string(),
|
||||
)
|
||||
.into()
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub async fn is_network_metered() -> Result<bool> {
|
||||
// Thanks to https://github.com/Hakanbaban53/rclone-manager for showing how to do this
|
||||
use zbus::{Connection, Proxy};
|
||||
|
||||
let connection = Connection::system().await?;
|
||||
let proxy = Proxy::new(
|
||||
&connection,
|
||||
"org.freedesktop.NetworkManager",
|
||||
"/org/freedesktop/NetworkManager",
|
||||
"org.freedesktop.NetworkManager",
|
||||
)
|
||||
.await?;
|
||||
let metered = proxy.get_property("Metered").await?;
|
||||
Ok(matches!(metered, 1 | 3))
|
||||
}
|
||||
|
||||
#[cfg(not(any(windows, target_os = "macos", target_os = "linux")))]
|
||||
pub async fn is_network_metered() -> Result<bool> {
|
||||
tracing::warn!(
|
||||
"is_network_metered called on unsupported platform. Assuming unmetered."
|
||||
);
|
||||
Ok(false)
|
||||
}
|
||||
@@ -1,65 +1,6 @@
|
||||
//! Platform-related code
|
||||
use daedalus::minecraft::{Os, OsRule};
|
||||
|
||||
// OS detection
|
||||
pub trait OsExt {
|
||||
/// Get the OS of the current system
|
||||
fn native() -> Self;
|
||||
|
||||
/// Gets the OS + Arch of the current system
|
||||
fn native_arch(java_arch: &str) -> Self;
|
||||
|
||||
/// Gets the OS from an OS + Arch
|
||||
fn get_os(&self) -> Self;
|
||||
}
|
||||
|
||||
impl OsExt for Os {
|
||||
fn native() -> Self {
|
||||
match std::env::consts::OS {
|
||||
"windows" => Self::Windows,
|
||||
"macos" => Self::Osx,
|
||||
"linux" => Self::Linux,
|
||||
_ => Self::Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
fn native_arch(java_arch: &str) -> Self {
|
||||
if std::env::consts::OS == "windows" {
|
||||
if java_arch == "aarch64" {
|
||||
Os::WindowsArm64
|
||||
} else {
|
||||
Os::Windows
|
||||
}
|
||||
} else if std::env::consts::OS == "linux" {
|
||||
if java_arch == "aarch64" {
|
||||
Os::LinuxArm64
|
||||
} else if java_arch == "arm" {
|
||||
Os::LinuxArm32
|
||||
} else {
|
||||
Os::Linux
|
||||
}
|
||||
} else if std::env::consts::OS == "macos" {
|
||||
if java_arch == "aarch64" {
|
||||
Os::OsxArm64
|
||||
} else {
|
||||
Os::Osx
|
||||
}
|
||||
} else {
|
||||
Os::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn get_os(&self) -> Self {
|
||||
match self {
|
||||
Os::OsxArm64 => Os::Osx,
|
||||
Os::LinuxArm32 => Os::Linux,
|
||||
Os::LinuxArm64 => Os::Linux,
|
||||
Os::WindowsArm64 => Os::Windows,
|
||||
_ => self.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bit width
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
pub const ARCH_WIDTH: &str = "64";
|
||||
|
||||
258
packages/app-lib/src/util/rpc.rs
Normal file
@@ -0,0 +1,258 @@
|
||||
use crate::prelude::tcp_listen_any_loopback;
|
||||
use crate::{ErrorKind, Result};
|
||||
use futures::{SinkExt, StreamExt};
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::net::SocketAddr;
|
||||
use std::pin::Pin;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use tokio::task::AbortHandle;
|
||||
use tokio_util::codec::{Decoder, LinesCodec, LinesCodecError};
|
||||
use uuid::Uuid;
|
||||
|
||||
type HandlerFuture = Pin<Box<dyn Send + Future<Output = Result<Value>>>>;
|
||||
type HandlerMethod = Box<dyn Send + Sync + Fn(Vec<Value>) -> HandlerFuture>;
|
||||
type HandlerMap = HashMap<&'static str, HandlerMethod>;
|
||||
type WaitingResponsesMap =
|
||||
Arc<Mutex<HashMap<Uuid, oneshot::Sender<Result<Value>>>>>;
|
||||
|
||||
pub struct RpcServerBuilder {
|
||||
handlers: HandlerMap,
|
||||
}
|
||||
|
||||
impl RpcServerBuilder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
handlers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// We'll use this function in the future. Please remove this #[allow] when we do.
|
||||
#[allow(dead_code)]
|
||||
pub fn handler(
|
||||
mut self,
|
||||
function_name: &'static str,
|
||||
handler: HandlerMethod,
|
||||
) -> Self {
|
||||
self.handlers.insert(function_name, Box::new(handler));
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn launch(self) -> Result<RpcServer> {
|
||||
let socket = tcp_listen_any_loopback().await?;
|
||||
let address = socket.local_addr()?;
|
||||
let (message_sender, message_receiver) = mpsc::unbounded_channel();
|
||||
let waiting_responses = Arc::new(Mutex::new(HashMap::new()));
|
||||
|
||||
let join_handle = {
|
||||
let waiting_responses = waiting_responses.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut server = RunningRpcServer {
|
||||
message_receiver,
|
||||
handlers: self.handlers,
|
||||
waiting_responses: waiting_responses.clone(),
|
||||
};
|
||||
if let Err(e) = server.run(socket).await {
|
||||
tracing::error!("Failed to run RPC server: {e}");
|
||||
}
|
||||
waiting_responses.lock().unwrap().clear();
|
||||
})
|
||||
};
|
||||
Ok(RpcServer {
|
||||
address,
|
||||
message_sender,
|
||||
waiting_responses,
|
||||
abort_handle: join_handle.abort_handle(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RpcServer {
|
||||
address: SocketAddr,
|
||||
message_sender: mpsc::UnboundedSender<RpcMessage>,
|
||||
waiting_responses: WaitingResponsesMap,
|
||||
abort_handle: AbortHandle,
|
||||
}
|
||||
|
||||
impl RpcServer {
|
||||
pub fn address(&self) -> SocketAddr {
|
||||
self.address
|
||||
}
|
||||
|
||||
pub async fn call_method<R: DeserializeOwned>(
|
||||
&self,
|
||||
method: &str,
|
||||
) -> Result<R> {
|
||||
self.call_method_any(method, vec![]).await
|
||||
}
|
||||
|
||||
pub async fn call_method_2<R: DeserializeOwned>(
|
||||
&self,
|
||||
method: &str,
|
||||
arg1: impl Serialize,
|
||||
arg2: impl Serialize,
|
||||
) -> Result<R> {
|
||||
self.call_method_any(
|
||||
method,
|
||||
vec![serde_json::to_value(arg1)?, serde_json::to_value(arg2)?],
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn call_method_any<R: DeserializeOwned>(
|
||||
&self,
|
||||
method: &str,
|
||||
args: Vec<Value>,
|
||||
) -> Result<R> {
|
||||
if self.message_sender.is_closed() {
|
||||
return Err(ErrorKind::RpcError(
|
||||
"RPC connection closed".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
let id = Uuid::new_v4();
|
||||
let (send, recv) = oneshot::channel();
|
||||
self.waiting_responses.lock().unwrap().insert(id, send);
|
||||
|
||||
let message = RpcMessage {
|
||||
id,
|
||||
body: RpcMessageBody::Call {
|
||||
method: method.to_owned(),
|
||||
args,
|
||||
},
|
||||
};
|
||||
if self.message_sender.send(message).is_err() {
|
||||
self.waiting_responses.lock().unwrap().remove(&id);
|
||||
return Err(ErrorKind::RpcError(
|
||||
"RPC connection closed while sending".to_string(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
tracing::debug!("Waiting on result for {id}");
|
||||
let Ok(result) = recv.await else {
|
||||
self.waiting_responses.lock().unwrap().remove(&id);
|
||||
return Err(ErrorKind::RpcError(
|
||||
"RPC connection closed while waiting for response".to_string(),
|
||||
)
|
||||
.into());
|
||||
};
|
||||
result.and_then(|x| Ok(serde_json::from_value(x)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for RpcServer {
|
||||
fn drop(&mut self) {
|
||||
self.abort_handle.abort();
|
||||
}
|
||||
}
|
||||
|
||||
struct RunningRpcServer {
|
||||
message_receiver: mpsc::UnboundedReceiver<RpcMessage>,
|
||||
handlers: HandlerMap,
|
||||
waiting_responses: WaitingResponsesMap,
|
||||
}
|
||||
|
||||
impl RunningRpcServer {
|
||||
async fn run(&mut self, listener: TcpListener) -> Result<()> {
|
||||
let (socket, _) = listener.accept().await?;
|
||||
drop(listener);
|
||||
|
||||
let mut socket = LinesCodec::new().framed(socket);
|
||||
loop {
|
||||
let to_send = tokio::select! {
|
||||
message = self.message_receiver.recv() => {
|
||||
if message.is_none() {
|
||||
break;
|
||||
}
|
||||
message
|
||||
},
|
||||
message = socket.next() => {
|
||||
let message: RpcMessage = match message {
|
||||
None => break,
|
||||
Some(Ok(message)) => serde_json::from_str(&message)?,
|
||||
Some(Err(LinesCodecError::Io(e))) => Err(e)?,
|
||||
Some(Err(LinesCodecError::MaxLineLengthExceeded)) => unreachable!(),
|
||||
};
|
||||
self.handle_message(message).await?
|
||||
},
|
||||
};
|
||||
if let Some(message) = to_send {
|
||||
let json = serde_json::to_string(&message)?;
|
||||
match socket.send(json).await {
|
||||
Ok(()) => {}
|
||||
Err(LinesCodecError::Io(e)) => Err(e)?,
|
||||
Err(LinesCodecError::MaxLineLengthExceeded) => {
|
||||
unreachable!()
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_message(
|
||||
&self,
|
||||
message: RpcMessage,
|
||||
) -> Result<Option<RpcMessage>> {
|
||||
if let RpcMessageBody::Call { method, args } = message.body {
|
||||
let response = match self.handlers.get(method.as_str()) {
|
||||
Some(handler) => match handler(args).await {
|
||||
Ok(result) => RpcMessageBody::Respond { response: result },
|
||||
Err(e) => RpcMessageBody::Error {
|
||||
error: e.to_string(),
|
||||
},
|
||||
},
|
||||
None => RpcMessageBody::Error {
|
||||
error: format!("Unknown theseus RPC method {method}"),
|
||||
},
|
||||
};
|
||||
Ok(Some(RpcMessage {
|
||||
id: message.id,
|
||||
body: response,
|
||||
}))
|
||||
} else if let Some(sender) =
|
||||
self.waiting_responses.lock().unwrap().remove(&message.id)
|
||||
{
|
||||
let _ = sender.send(match message.body {
|
||||
RpcMessageBody::Respond { response } => Ok(response),
|
||||
RpcMessageBody::Error { error } => {
|
||||
Err(ErrorKind::RpcError(error).into())
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct RpcMessage {
|
||||
id: Uuid,
|
||||
#[serde(flatten)]
|
||||
body: RpcMessageBody,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum RpcMessageBody {
|
||||
Call {
|
||||
method: String,
|
||||
args: Vec<Value>,
|
||||
},
|
||||
Respond {
|
||||
#[serde(default, skip_serializing_if = "Value::is_null")]
|
||||
response: Value,
|
||||
},
|
||||
Error {
|
||||
error: String,
|
||||
},
|
||||
}
|
||||