Migrate to SQLite for Internal Launcher Data (#1300)

* initial migration

* barebones profiles

* Finish profiles

* Add back file watcher

* UI support progress

* Finish most of cache

* Fix options page

* Fix forge, finish modrinth auth

* Accounts, process cache

* Run SQLX prepare

* Finish

* Run lint + actions

* Fix version to be compat with windows

* fix lint

* actually fix lint

* actually fix lint again
This commit is contained in:
Geometrically
2024-07-24 11:03:19 -07:00
committed by GitHub
parent 90f74427d9
commit 49a20a303a
156 changed files with 9208 additions and 8547 deletions

View File

@@ -0,0 +1,158 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n max_concurrent_writes, max_concurrent_downloads,\n theme, default_page, collapsed_navigation, advanced_rendering, native_decorations,\n discord_rpc, developer_mode, telemetry,\n onboarded,\n json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,\n mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,\n hook_pre_launch, hook_wrapper, hook_post_exit,\n custom_dir, prev_custom_dir, migrated\n FROM settings\n ",
"describe": {
"columns": [
{
"name": "max_concurrent_writes",
"ordinal": 0,
"type_info": "Int64"
},
{
"name": "max_concurrent_downloads",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "theme",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "default_page",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "collapsed_navigation",
"ordinal": 4,
"type_info": "Int64"
},
{
"name": "advanced_rendering",
"ordinal": 5,
"type_info": "Int64"
},
{
"name": "native_decorations",
"ordinal": 6,
"type_info": "Int64"
},
{
"name": "discord_rpc",
"ordinal": 7,
"type_info": "Int64"
},
{
"name": "developer_mode",
"ordinal": 8,
"type_info": "Int64"
},
{
"name": "telemetry",
"ordinal": 9,
"type_info": "Int64"
},
{
"name": "onboarded",
"ordinal": 10,
"type_info": "Int64"
},
{
"name": "extra_launch_args",
"ordinal": 11,
"type_info": "Text"
},
{
"name": "custom_env_vars",
"ordinal": 12,
"type_info": "Text"
},
{
"name": "mc_memory_max",
"ordinal": 13,
"type_info": "Int64"
},
{
"name": "mc_force_fullscreen",
"ordinal": 14,
"type_info": "Int64"
},
{
"name": "mc_game_resolution_x",
"ordinal": 15,
"type_info": "Int64"
},
{
"name": "mc_game_resolution_y",
"ordinal": 16,
"type_info": "Int64"
},
{
"name": "hide_on_process_start",
"ordinal": 17,
"type_info": "Int64"
},
{
"name": "hook_pre_launch",
"ordinal": 18,
"type_info": "Text"
},
{
"name": "hook_wrapper",
"ordinal": 19,
"type_info": "Text"
},
{
"name": "hook_post_exit",
"ordinal": 20,
"type_info": "Text"
},
{
"name": "custom_dir",
"ordinal": 21,
"type_info": "Text"
},
{
"name": "prev_custom_dir",
"ordinal": 22,
"type_info": "Text"
},
{
"name": "migrated",
"ordinal": 23,
"type_info": "Int64"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
false,
null,
null,
false,
false,
false,
false,
false,
true,
true,
true,
true,
true,
false
]
},
"hash": "03d1aeddf7788320530c447a82342aecdb4099ce183dd9106c4bcc47604cb080"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO minecraft_device_tokens (id, uuid, private_key, x, y, issue_instant, not_after, token, display_claims)\n VALUES (0, $1, $2, $3, $4, $5, $6, $7, $8)\n ON CONFLICT (id) DO UPDATE SET\n uuid = $1,\n private_key = $2,\n x = $3,\n y = $4,\n issue_instant = $5,\n not_after = $6,\n token = $7,\n display_claims = jsonb($8)\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 8
},
"nullable": []
},
"hash": "0cfb12e0553411b01b721d1c38ef27acd240bb2ff3e07dee962bf67e20f81f36"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n UPDATE minecraft_users\n SET active = FALSE\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "12f8b2b9f0acca2ea29aa6a77266b2b27efc6a0433bab1d4bbe10c69fd417494"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n full_version, architecture, path\n FROM java_versions\n WHERE major_version = $1\n ",
"describe": {
"columns": [
{
"name": "full_version",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "architecture",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "path",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false
]
},
"hash": "1397c1825096fb402cdd3b5dae8cd3910b1719f433a0c34d40415dd7681ab272"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM profiles\n WHERE path = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "169ce6afb8e9739dacff3f4bea024ed28df292a063d615514c67a38301d71806"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM processes WHERE pid = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "1769b7033985bfdd04ee8912d9f28e0d15a8b893db47aca3aec054c7134f1f3f"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, active, session_id, expires\n FROM modrinth_users\n WHERE active = TRUE\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "active",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "session_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "expires",
"ordinal": 3,
"type_info": "Int64"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "18881c0c2ec1b0cc73fa13b4c242dfc577061b92479ce96ffb30a457939b5ffe"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n major_version, full_version, architecture, path\n FROM java_versions\n ",
"describe": {
"columns": [
{
"name": "major_version",
"ordinal": 0,
"type_info": "Int64"
},
{
"name": "full_version",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "architecture",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "path",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "265f9c9ad992da0aeaf69c3f0077b54a186b98796ec549c9d891089ea33cf3fc"
}

View File

@@ -0,0 +1,44 @@
{
"db_name": "SQLite",
"query": "\n SELECT id, data_type, json(data) as \"data?: serde_json::Value\", alias, expires\n FROM cache\n WHERE data_type = $1 AND (\n id IN (SELECT value FROM json_each($2))\n OR\n alias IN (SELECT value FROM json_each($3))\n )\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "data_type",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "data?: serde_json::Value",
"ordinal": 2,
"type_info": "Null"
},
{
"name": "alias",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "expires",
"ordinal": 4,
"type_info": "Int64"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
false,
null,
true,
false
]
},
"hash": "28b3e3132d75e551c1fa14b8d3be36adca581f8ad1b90f85d3ec3d92ec61e65e"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n pid, start_time, name, executable, profile_path, post_exit_command\n FROM processes\n WHERE 1=$1",
"describe": {
"columns": [
{
"name": "pid",
"ordinal": 0,
"type_info": "Int64"
},
{
"name": "start_time",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "name",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "executable",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "profile_path",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "post_exit_command",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
true
]
},
"hash": "3cac786ad15ef1167bc50ca846d98facb3dee35c9e421209c1161ee7380b7a74"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n UPDATE modrinth_users\n SET active = FALSE\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 0
},
"nullable": []
},
"hash": "45c692b305b36540139b5956dcff5bd5aeacec7d0a8abd640a7365902e57a2fd"
}

View File

@@ -0,0 +1,170 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n path, install_stage, name, icon_path,\n game_version, mod_loader, mod_loader_version,\n json(groups) as \"groups!: serde_json::Value\",\n linked_project_id, linked_version_id, locked,\n created, modified, last_played,\n submitted_time_played, recent_time_played,\n override_java_path,\n json(override_extra_launch_args) as \"override_extra_launch_args!: serde_json::Value\", json(override_custom_env_vars) as \"override_custom_env_vars!: serde_json::Value\",\n override_mc_memory_max, override_mc_force_fullscreen, override_mc_game_resolution_x, override_mc_game_resolution_y,\n override_hook_pre_launch, override_hook_wrapper, override_hook_post_exit\n FROM profiles\n WHERE 1=$1",
"describe": {
"columns": [
{
"name": "path",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "install_stage",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "icon_path",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "game_version",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "mod_loader",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "mod_loader_version",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "groups!: serde_json::Value",
"ordinal": 7,
"type_info": "Null"
},
{
"name": "linked_project_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "linked_version_id",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "locked",
"ordinal": 10,
"type_info": "Int64"
},
{
"name": "created",
"ordinal": 11,
"type_info": "Int64"
},
{
"name": "modified",
"ordinal": 12,
"type_info": "Int64"
},
{
"name": "last_played",
"ordinal": 13,
"type_info": "Int64"
},
{
"name": "submitted_time_played",
"ordinal": 14,
"type_info": "Int64"
},
{
"name": "recent_time_played",
"ordinal": 15,
"type_info": "Int64"
},
{
"name": "override_java_path",
"ordinal": 16,
"type_info": "Text"
},
{
"name": "override_extra_launch_args!: serde_json::Value",
"ordinal": 17,
"type_info": "Null"
},
{
"name": "override_custom_env_vars!: serde_json::Value",
"ordinal": 18,
"type_info": "Null"
},
{
"name": "override_mc_memory_max",
"ordinal": 19,
"type_info": "Int64"
},
{
"name": "override_mc_force_fullscreen",
"ordinal": 20,
"type_info": "Int64"
},
{
"name": "override_mc_game_resolution_x",
"ordinal": 21,
"type_info": "Int64"
},
{
"name": "override_mc_game_resolution_y",
"ordinal": 22,
"type_info": "Int64"
},
{
"name": "override_hook_pre_launch",
"ordinal": 23,
"type_info": "Text"
},
{
"name": "override_hook_wrapper",
"ordinal": 24,
"type_info": "Text"
},
{
"name": "override_hook_post_exit",
"ordinal": 25,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true,
false,
false,
true,
null,
true,
true,
true,
false,
false,
true,
false,
false,
true,
null,
null,
true,
true,
true,
true,
true,
true,
true
]
},
"hash": "4acd47f6bad3d2d4df5e5d43b3441fa2714cb8ad978adc108acc67f042380df1"
}

View File

@@ -0,0 +1,170 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n path, install_stage, name, icon_path,\n game_version, mod_loader, mod_loader_version,\n json(groups) as \"groups!: serde_json::Value\",\n linked_project_id, linked_version_id, locked,\n created, modified, last_played,\n submitted_time_played, recent_time_played,\n override_java_path,\n json(override_extra_launch_args) as \"override_extra_launch_args!: serde_json::Value\", json(override_custom_env_vars) as \"override_custom_env_vars!: serde_json::Value\",\n override_mc_memory_max, override_mc_force_fullscreen, override_mc_game_resolution_x, override_mc_game_resolution_y,\n override_hook_pre_launch, override_hook_wrapper, override_hook_post_exit\n FROM profiles\n WHERE path IN (SELECT value FROM json_each($1))",
"describe": {
"columns": [
{
"name": "path",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "install_stage",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "name",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "icon_path",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "game_version",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "mod_loader",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "mod_loader_version",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "groups!: serde_json::Value",
"ordinal": 7,
"type_info": "Null"
},
{
"name": "linked_project_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "linked_version_id",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "locked",
"ordinal": 10,
"type_info": "Int64"
},
{
"name": "created",
"ordinal": 11,
"type_info": "Int64"
},
{
"name": "modified",
"ordinal": 12,
"type_info": "Int64"
},
{
"name": "last_played",
"ordinal": 13,
"type_info": "Int64"
},
{
"name": "submitted_time_played",
"ordinal": 14,
"type_info": "Int64"
},
{
"name": "recent_time_played",
"ordinal": 15,
"type_info": "Int64"
},
{
"name": "override_java_path",
"ordinal": 16,
"type_info": "Text"
},
{
"name": "override_extra_launch_args!: serde_json::Value",
"ordinal": 17,
"type_info": "Null"
},
{
"name": "override_custom_env_vars!: serde_json::Value",
"ordinal": 18,
"type_info": "Null"
},
{
"name": "override_mc_memory_max",
"ordinal": 19,
"type_info": "Int64"
},
{
"name": "override_mc_force_fullscreen",
"ordinal": 20,
"type_info": "Int64"
},
{
"name": "override_mc_game_resolution_x",
"ordinal": 21,
"type_info": "Int64"
},
{
"name": "override_mc_game_resolution_y",
"ordinal": 22,
"type_info": "Int64"
},
{
"name": "override_hook_pre_launch",
"ordinal": 23,
"type_info": "Text"
},
{
"name": "override_hook_wrapper",
"ordinal": 24,
"type_info": "Text"
},
{
"name": "override_hook_post_exit",
"ordinal": 25,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
true,
false,
false,
true,
null,
true,
true,
true,
false,
false,
true,
false,
false,
true,
null,
null,
true,
true,
true,
true,
true,
true,
true
]
},
"hash": "5265d5ad85da898855d628f6b45e39026908fc950aad3c7797be37b5d0b74094"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM modrinth_users WHERE id = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "554805c9902e5a1cc4c0f03b4a633e6dc5b1d46f9c2454075eefe8df9a38f582"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO java_versions (major_version, full_version, architecture, path)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (major_version) DO UPDATE SET\n full_version = $2,\n architecture = $3,\n path = $4\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "55ad9c6b0b3172f0528e7ccd60f7c51c77946643b8f912fe265207da275a280f"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n pid, start_time, name, executable, profile_path, post_exit_command\n FROM processes\n WHERE profile_path = $1",
"describe": {
"columns": [
{
"name": "pid",
"ordinal": 0,
"type_info": "Int64"
},
{
"name": "start_time",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "name",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "executable",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "profile_path",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "post_exit_command",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
true
]
},
"hash": "5f07a8b45063167074db8b3da51e220a7a0f5879fb8978d4033e259102ae3790"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n id, active, session_id, expires\n FROM modrinth_users\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "active",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "session_id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "expires",
"ordinal": 3,
"type_info": "Int64"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "6d7ebc0f233dc730fa8c99c750421065f5e35f321954a9d5ae9cde907d5ce823"
}

View File

@@ -0,0 +1,62 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n uuid, private_key, x, y, issue_instant, not_after, token, json(display_claims) as \"display_claims!: serde_json::Value\"\n FROM minecraft_device_tokens\n ",
"describe": {
"columns": [
{
"name": "uuid",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "private_key",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "x",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "y",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "issue_instant",
"ordinal": 4,
"type_info": "Int64"
},
{
"name": "not_after",
"ordinal": 5,
"type_info": "Int64"
},
{
"name": "token",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "display_claims!: serde_json::Value",
"ordinal": 7,
"type_info": "Null"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
false,
false,
false,
null
]
},
"hash": "6e3fa492c085ebb8e7280dd4d55cdcf73da199ea6ac05ee3ee798ece80d877cf"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n uuid, active, username, access_token, refresh_token, expires\n FROM minecraft_users\n ",
"describe": {
"columns": [
{
"name": "uuid",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "active",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "username",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "access_token",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "refresh_token",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "expires",
"ordinal": 5,
"type_info": "Int64"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
false,
false
]
},
"hash": "727e3e1bc8625bbcb833920059bb8cea926ac6c65d613904eff1d740df30acda"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO modrinth_users (id, active, session_id, expires)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (id) DO UPDATE SET\n active = $2,\n session_id = $3,\n expires = $4\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "81a80df2f3fdbbb78d45e7420609c3ae945bc499b4229906c487533d1dcb280c"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n uuid, active, username, access_token, refresh_token, expires\n FROM minecraft_users\n WHERE active = TRUE\n ",
"describe": {
"columns": [
{
"name": "uuid",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "active",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "username",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "access_token",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "refresh_token",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "expires",
"ordinal": 5,
"type_info": "Int64"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false,
false,
false
]
},
"hash": "bf7d47350092d87c478009adaab131168e87bb37aa65c2156ad2cb6198426d8c"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO processes (pid, start_time, name, executable, profile_path, post_exit_command)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (pid) DO UPDATE SET\n start_time = $2,\n name = $3,\n executable = $4,\n profile_path = $5,\n post_exit_command = $6\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "d1b8f27c8150f9ae514a7c9ddc68f4a59f08b7df1c65758539220d7211ade682"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n DELETE FROM minecraft_users WHERE uuid = $1\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "d21e8a5116c43a3b511321a2655d8217f8c46b816a2f4e60c11dfcd173120e7e"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO cache (id, data_type, alias, data, expires)\n SELECT\n json_extract(value, '$.id') AS id,\n json_extract(value, '$.data_type') AS data_type,\n json_extract(value, '$.alias') AS alias,\n json_extract(value, '$.data') AS data,\n json_extract(value, '$.expires') AS expires\n FROM\n json_each($1)\n WHERE TRUE\n ON CONFLICT (id, data_type) DO UPDATE SET\n alias = excluded.alias,\n data = excluded.data,\n expires = excluded.expires\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 1
},
"nullable": []
},
"hash": "d63935a6e411b5ea145dfa1d4772899303d9b82b1ecd2e30dc71b411ee538f54"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n UPDATE settings\n SET\n max_concurrent_writes = $1,\n max_concurrent_downloads = $2,\n\n theme = $3,\n default_page = $4,\n collapsed_navigation = $5,\n advanced_rendering = $6,\n native_decorations = $7,\n\n discord_rpc = $8,\n developer_mode = $9,\n telemetry = $10,\n\n onboarded = $11,\n\n extra_launch_args = jsonb($12),\n custom_env_vars = jsonb($13),\n mc_memory_max = $14,\n mc_force_fullscreen = $15,\n mc_game_resolution_x = $16,\n mc_game_resolution_y = $17,\n hide_on_process_start = $18,\n\n hook_pre_launch = $19,\n hook_wrapper = $20,\n hook_post_exit = $21,\n\n custom_dir = $22,\n prev_custom_dir = $23,\n migrated = $24\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 24
},
"nullable": []
},
"hash": "d645daf951ff6fead3c86df685d99bacc81cb0a999c0f8d2ff7755b0089a79d8"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO minecraft_users (uuid, active, username, access_token, refresh_token, expires)\n VALUES ($1, $2, $3, $4, $5, $6)\n ON CONFLICT (uuid) DO UPDATE SET\n active = $2,\n username = $3,\n access_token = $4,\n refresh_token = $5,\n expires = $6\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 6
},
"nullable": []
},
"hash": "d719cf2f6f87c5ea7ea6ace2d6a1828ee58a724f06a91633b8a40b4e04d0b9a0"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "\n INSERT INTO profiles (\n path, install_stage, name, icon_path,\n game_version, mod_loader, mod_loader_version,\n groups,\n linked_project_id, linked_version_id, locked,\n created, modified, last_played,\n submitted_time_played, recent_time_played,\n override_java_path, override_extra_launch_args, override_custom_env_vars,\n override_mc_memory_max, override_mc_force_fullscreen, override_mc_game_resolution_x, override_mc_game_resolution_y,\n override_hook_pre_launch, override_hook_wrapper, override_hook_post_exit\n )\n VALUES (\n $1, $2, $3, $4,\n $5, $6, $7,\n jsonb($8),\n $9, $10, $11,\n $12, $13, $14,\n $15, $16,\n $17, jsonb($18), jsonb($19),\n $20, $21, $22, $23,\n $24, $25, $26\n )\n ON CONFLICT (path) DO UPDATE SET\n install_stage = $2,\n name = $3,\n icon_path = $4,\n\n game_version = $5,\n mod_loader = $6,\n mod_loader_version = $7,\n\n groups = jsonb($8),\n\n linked_project_id = $9,\n linked_version_id = $10,\n locked = $11,\n\n created = $12,\n modified = $13,\n last_played = $14,\n\n submitted_time_played = $15,\n recent_time_played = $16,\n\n override_java_path = $17,\n override_extra_launch_args = jsonb($18),\n override_custom_env_vars = jsonb($19),\n override_mc_memory_max = $20,\n override_mc_force_fullscreen = $21,\n override_mc_game_resolution_x = $22,\n override_mc_game_resolution_y = $23,\n\n override_hook_pre_launch = $24,\n override_hook_wrapper = $25,\n override_hook_post_exit = $26\n ",
"describe": {
"columns": [],
"parameters": {
"Right": 26
},
"nullable": []
},
"hash": "db1f94b9c17c790c029a7691620d6bbdcbdfcce4b069b8ed46dc3abd2f5f4e58"
}

View File

@@ -0,0 +1,50 @@
{
"db_name": "SQLite",
"query": "\n SELECT\n pid, start_time, name, executable, profile_path, post_exit_command\n FROM processes\n WHERE pid = $1",
"describe": {
"columns": [
{
"name": "pid",
"ordinal": 0,
"type_info": "Int64"
},
{
"name": "start_time",
"ordinal": 1,
"type_info": "Int64"
},
{
"name": "name",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "executable",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "profile_path",
"ordinal": 4,
"type_info": "Text"
},
{
"name": "post_exit_command",
"ordinal": 5,
"type_info": "Text"
}
],
"parameters": {
"Right": 1
},
"nullable": [
false,
false,
false,
false,
false,
true
]
},
"hash": "e18e960d33a140e522ca20b91d63560b921b922701b69d868dc231f6b0f4cf1c"
}

View File

@@ -1,14 +1,12 @@
[package]
name = "theseus"
version = "0.7.2"
version = "0.0.0"
authors = ["Jai A <jaiagr+gpg@pm.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
theseus_macros = { path = "../app-macros" }
bytes = "1"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
@@ -23,10 +21,10 @@ async_zip = { version = "0.0.17", features = ["full"] }
flate2 = "1.0.28"
tempfile = "3.5.0"
urlencoding = "2.1.3"
dashmap = { version = "6.0.1", features = ["serde"] }
chrono = { version = "0.4.19", features = ["serde"] }
daedalus = { version = "0.1.25" }
daedalus = { version = "0.2.2" }
dirs = "5.0.1"
regex = "1.5"
@@ -59,13 +57,18 @@ dunce = "1.0.3"
whoami = "1.4.0"
discord-rich-presence = "0.2.3"
discord-rich-presence = "0.2.4"
p256 = { version = "0.13.2", features = ["ecdsa"] }
rand = "0.8"
byteorder = "1.5.0"
base64 = "0.22.0"
# TODO: Remove when new SQLX version is released
# We force-upgrade SQLite so JSONB support is added (theseus)
# https://github.com/launchbadge/sqlx/commit/352b02de6af70f1ff1bfbd15329120589a0f7337
sqlx = { git = "https://github.com/launchbadge/sqlx.git", rev = "352b02de6af70f1ff1bfbd15329120589a0f7337", features = [ "runtime-tokio", "sqlite", "macros"] }
[target.'cfg(windows)'.dependencies]
winreg = "0.52.0"

View File

@@ -0,0 +1,158 @@
CREATE TABLE settings (
id INTEGER NOT NULL CHECK (id = 0),
max_concurrent_downloads INTEGER NOT NULL DEFAULT 10,
max_concurrent_writes INTEGER NOT NULL DEFAULT 10,
theme TEXT NOT NULL DEFAULT 'dark',
default_page TEXT NOT NULL DEFAULT 'home',
collapsed_navigation INTEGER NOT NULL DEFAULT TRUE,
advanced_rendering INTEGER NOT NULL DEFAULT TRUE,
native_decorations INTEGER NOT NULL DEFAULT FALSE,
telemetry INTEGER NOT NULL DEFAULT TRUE,
discord_rpc INTEGER NOT NULL DEFAULT TRUE,
developer_mode INTEGER NOT NULL DEFAULT FALSE,
onboarded INTEGER NOT NULL DEFAULT FALSE,
-- array of strings
extra_launch_args JSONB NOT NULL,
-- array of (string, string)
custom_env_vars JSONB NOT NULL,
mc_memory_max INTEGER NOT NULL DEFAULT 2048,
mc_force_fullscreen INTEGER NOT NULL DEFAULT FALSE,
mc_game_resolution_x INTEGER NOT NULL DEFAULT 854,
mc_game_resolution_y INTEGER NOT NULL DEFAULT 480,
hide_on_process_start INTEGER NOT NULL DEFAULT FALSE,
hook_pre_launch TEXT NULL,
hook_wrapper TEXT NULL,
hook_post_exit TEXT NULL,
custom_dir TEXT NULL,
prev_custom_dir TEXT NULL,
migrated INTEGER NOT NULL DEFAULT FALSE,
PRIMARY KEY (id)
);
INSERT INTO settings (id, extra_launch_args, custom_env_vars) VALUES (0, jsonb_array(), jsonb_array());
CREATE TABLE java_versions (
major_version INTEGER NOT NULL,
full_version TEXT NOT NULL,
architecture TEXT NOT NULL,
path TEXT NOT NULL,
PRIMARY KEY (major_version)
);
CREATE TABLE minecraft_users (
uuid TEXT NOT NULL,
active INTEGER NOT NULL DEFAULT FALSE,
username TEXT NOT NULL,
access_token TEXT NOT NULL,
refresh_token TEXT NOT NULL,
expires INTEGER NOT NULL,
PRIMARY KEY (uuid)
);
CREATE UNIQUE INDEX minecraft_users_active ON minecraft_users(active);
CREATE TABLE minecraft_device_tokens (
id INTEGER NOT NULL CHECK (id = 0),
uuid TEXT NOT NULL,
private_key TEXT NOT NULL,
x TEXT NOT NULL,
y TEXT NOT NULL,
issue_instant INTEGER NOT NULL,
not_after INTEGER NOT NULL,
token TEXT NOT NULL,
display_claims JSONB NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE modrinth_users (
id TEXT NOT NULL,
active INTEGER NOT NULL DEFAULT FALSE,
session_id TEXT NOT NULL,
expires INTEGER NOT NULL,
PRIMARY KEY (id)
);
CREATE UNIQUE INDEX modrinth_users_active ON modrinth_users(active);
CREATE TABLE cache (
id TEXT NOT NULL,
data_type TEXT NOT NULL,
alias TEXT NULL,
data JSONB NULL,
expires INTEGER NOT NULL,
UNIQUE (data_type, alias),
PRIMARY KEY (id, data_type)
);
CREATE TABLE profiles (
path TEXT NOT NULL,
install_stage TEXT NOT NULL,
name TEXT NOT NULL,
icon_path TEXT NULL,
game_version TEXT NOT NULL,
mod_loader TEXT NOT NULL,
mod_loader_version TEXT NULL,
-- array of strings
groups JSONB NOT NULL,
linked_project_id TEXT NULL,
linked_version_id TEXT NULL,
locked INTEGER NULL,
created INTEGER NOT NULL,
modified INTEGER NOT NULL,
last_played INTEGER NULL,
submitted_time_played INTEGER NOT NULL DEFAULT 0,
recent_time_played INTEGER NOT NULL DEFAULT 0,
override_java_path TEXT NULL,
-- array of strings
override_extra_launch_args JSONB NOT NULL,
-- array of (string, string)
override_custom_env_vars JSONB NOT NULL,
override_mc_memory_max INTEGER NULL,
override_mc_force_fullscreen INTEGER NULL,
override_mc_game_resolution_x INTEGER NULL,
override_mc_game_resolution_y INTEGER NULL,
override_hook_pre_launch TEXT NULL,
override_hook_wrapper TEXT NULL,
override_hook_post_exit TEXT NULL,
PRIMARY KEY (path)
);
CREATE TABLE processes (
pid INTEGER NOT NULL,
start_time INTEGER NOT NULL,
name TEXT NOT NULL,
executable TEXT NOT NULL,
profile_path TEXT NOT NULL,
post_exit_command TEXT NULL,
UNIQUE (pid),
PRIMARY KEY (pid),
FOREIGN KEY (profile_path) REFERENCES profiles(path)
);
CREATE INDEX processes_profile_path ON processes(profile_path);

View File

@@ -5,8 +5,5 @@
"lint": "cargo fmt --check && cargo clippy -- -D warnings",
"fix": "cargo fmt && cargo clippy --fix",
"test": "cargo test"
},
"dependencies": {
"@modrinth/app-macros": "workspace:*"
}
}

View File

@@ -0,0 +1,42 @@
use crate::state::{
CachedEntry, Organization, Project, SearchResults, TeamMember, User,
Version,
};
macro_rules! impl_cache_methods {
($(($variant:ident, $type:ty)),*) => {
$(
paste::paste! {
#[tracing::instrument]
pub async fn [<get_ $variant:snake>](
id: &str,
) -> crate::Result<Option<$type>>
{
let state = crate::State::get().await?;
Ok(CachedEntry::[<get_ $variant:snake _many>](&[id], None, &state.pool, &state.api_semaphore).await?.into_iter().next())
}
#[tracing::instrument]
pub async fn [<get_ $variant:snake _many>](
ids: &[&str],
) -> crate::Result<Vec<$type>>
{
let state = crate::State::get().await?;
let entries =
CachedEntry::[<get_ $variant:snake _many>](ids, None, &state.pool, &state.api_semaphore).await?;
Ok(entries)
}
}
)*
}
}
impl_cache_methods!(
(Project, Project),
(Version, Version),
(User, User),
(Team, Vec<TeamMember>),
(Organization, Organization),
(SearchResults, SearchResults)
);

View File

@@ -1,19 +1,31 @@
//! Authentication flow interface
use crate::event::emit::{emit_loading, init_loading};
use crate::state::JavaVersion;
use crate::util::fetch::{fetch_advanced, fetch_json};
use dashmap::DashMap;
use reqwest::Method;
use serde::Deserialize;
use std::path::PathBuf;
use crate::event::emit::{emit_loading, init_loading};
use crate::state::CredentialsStore;
use crate::util::fetch::{fetch_advanced, fetch_json};
use crate::util::io;
use crate::util::jre::extract_java_majorminor_version;
use crate::{
util::jre::{self, JavaVersion},
util::jre::{self},
LoadingBarType, State,
};
pub async fn get_java_versions() -> crate::Result<DashMap<u32, JavaVersion>> {
let state = State::get().await?;
JavaVersion::get_all(&state.pool).await
}
pub async fn set_java_version(java_version: JavaVersion) -> crate::Result<()> {
let state = State::get().await?;
java_version.upsert(&state.pool).await?;
Ok(())
}
// Searches for jres on the system given a java version (ex: 1.8, 1.17, 1.18)
// Allow higher allows for versions higher than the given version to be returned ('at least')
pub async fn find_filtered_jres(
@@ -38,7 +50,6 @@ pub async fn find_filtered_jres(
})
}
#[theseus_macros::debug_pin]
pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
let state = State::get().await?;
@@ -67,7 +78,7 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
None,
None,
&state.fetch_semaphore,
&CredentialsStore(None),
&state.pool,
).await?;
emit_loading(&loading_bar, 10.0, Some("Downloading java version")).await?;
@@ -80,11 +91,11 @@ pub async fn auto_install_java(java_version: u32) -> crate::Result<PathBuf> {
None,
Some((&loading_bar, 80.0)),
&state.fetch_semaphore,
&CredentialsStore(None),
&state.pool,
)
.await?;
let path = state.directories.java_versions_dir().await;
let path = state.directories.java_versions_dir();
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(file))
.map_err(|_| {

View File

@@ -9,9 +9,9 @@ use tokio::{
};
use crate::{
prelude::{Credentials, DirectoryInfo},
prelude::Credentials,
util::io::{self, IOError},
{state::ProfilePathId, State},
State,
};
#[derive(Serialize, Debug)]
@@ -66,7 +66,7 @@ impl Logs {
async fn build(
log_type: LogType,
age: SystemTime,
profile_subpath: &ProfilePathId,
profile_subpath: &str,
filename: String,
clear_contents: Option<bool>,
) -> crate::Result<Self> {
@@ -95,19 +95,20 @@ impl Logs {
#[tracing::instrument]
pub async fn get_logs_from_type(
profile_path: &ProfilePathId,
profile_path: &str,
log_type: LogType,
clear_contents: Option<bool>,
logs: &mut Vec<crate::Result<Logs>>,
) -> crate::Result<()> {
let state = State::get().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(profile_path).await?
}
LogType::InfoLog => state.directories.profile_logs_dir(profile_path),
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(profile_path).await?
state.directories.crash_reports_dir(profile_path)
}
};
if logs_folder.exists() {
for entry in std::fs::read_dir(&logs_folder)
.map_err(|e| IOError::with_path(e, &logs_folder))?
@@ -142,21 +143,19 @@ pub async fn get_logs_from_type(
#[tracing::instrument]
pub async fn get_logs(
profile_path_id: ProfilePathId,
profile_path_id: &str,
clear_contents: Option<bool>,
) -> crate::Result<Vec<Logs>> {
let profile_path = profile_path_id.profile_path().await?;
let mut logs = Vec::new();
get_logs_from_type(
&profile_path,
profile_path_id,
LogType::InfoLog,
clear_contents,
&mut logs,
)
.await?;
get_logs_from_type(
&profile_path,
profile_path_id,
LogType::CrashReport,
clear_contents,
&mut logs,
@@ -170,54 +169,47 @@ pub async fn get_logs(
#[tracing::instrument]
pub async fn get_logs_by_filename(
profile_path_id: ProfilePathId,
profile_path: &str,
log_type: LogType,
filename: String,
) -> crate::Result<Logs> {
let profile_path = profile_path_id.profile_path().await?;
let state = State::get().await?;
let path = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(&profile_path).await
}
LogType::InfoLog => state.directories.profile_logs_dir(profile_path),
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(&profile_path).await
state.directories.crash_reports_dir(profile_path)
}
}?
}
.join(&filename);
let metadata = std::fs::metadata(&path)?;
let age = metadata.created().unwrap_or(SystemTime::UNIX_EPOCH);
Logs::build(log_type, age, &profile_path, filename, Some(true)).await
Logs::build(log_type, age, profile_path, filename, Some(true)).await
}
#[tracing::instrument]
pub async fn get_output_by_filename(
profile_subpath: &ProfilePathId,
profile_subpath: &str,
log_type: LogType,
file_name: &str,
) -> crate::Result<CensoredString> {
let state = State::get().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(profile_subpath).await?
}
LogType::InfoLog => state.directories.profile_logs_dir(profile_subpath),
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(profile_subpath).await?
state.directories.crash_reports_dir(profile_subpath)
}
};
let path = logs_folder.join(file_name);
let credentials: Vec<Credentials> = state
.users
.read()
.await
.users
.clone()
.into_values()
let credentials = Credentials::get_all(&state.pool)
.await?
.into_iter()
.map(|x| x.1)
.collect();
// Load .gz file into String
@@ -265,10 +257,10 @@ pub async fn get_output_by_filename(
}
#[tracing::instrument]
pub async fn delete_logs(profile_path_id: ProfilePathId) -> crate::Result<()> {
let profile_path = profile_path_id.profile_path().await?;
pub async fn delete_logs(profile_path_id: &str) -> crate::Result<()> {
let state = State::get().await?;
let logs_folder = DirectoryInfo::profile_logs_dir(&profile_path).await?;
let logs_folder = state.directories.profile_logs_dir(profile_path_id);
for entry in std::fs::read_dir(&logs_folder)
.map_err(|e| IOError::with_path(e, &logs_folder))?
{
@@ -283,20 +275,18 @@ pub async fn delete_logs(profile_path_id: ProfilePathId) -> crate::Result<()> {
#[tracing::instrument]
pub async fn delete_logs_by_filename(
profile_path_id: ProfilePathId,
profile_path_id: &str,
log_type: LogType,
filename: &str,
) -> crate::Result<()> {
let profile_path = profile_path_id.profile_path().await?;
let state = State::get().await?;
let logs_folder = match log_type {
LogType::InfoLog => {
DirectoryInfo::profile_logs_dir(&profile_path).await
}
LogType::InfoLog => state.directories.profile_logs_dir(profile_path_id),
LogType::CrashReport => {
DirectoryInfo::crash_reports_dir(&profile_path).await
state.directories.crash_reports_dir(profile_path_id)
}
}?;
};
let path = logs_folder.join(filename);
io::remove_dir_all(&path).await?;
@@ -305,7 +295,7 @@ pub async fn delete_logs_by_filename(
#[tracing::instrument]
pub async fn get_latest_log_cursor(
profile_path: ProfilePathId,
profile_path: &str,
cursor: u64, // 0 to start at beginning of file
) -> crate::Result<LatestLogCursor> {
get_generic_live_log_cursor(profile_path, "latest.log", cursor).await
@@ -313,14 +303,12 @@ pub async fn get_latest_log_cursor(
#[tracing::instrument]
pub async fn get_generic_live_log_cursor(
profile_path_id: ProfilePathId,
profile_path_id: &str,
log_file_name: &str,
mut cursor: u64, // 0 to start at beginning of file
) -> crate::Result<LatestLogCursor> {
let profile_path = profile_path_id.profile_path().await?;
let state = State::get().await?;
let logs_folder = DirectoryInfo::profile_logs_dir(&profile_path).await?;
let logs_folder = state.directories.profile_logs_dir(profile_path_id);
let path = logs_folder.join(log_file_name);
if !path.exists() {
// Allow silent failure if latest.log doesn't exist (as the instance may have been launched, but not yet created the file)
@@ -358,13 +346,10 @@ pub async fn get_generic_live_log_cursor(
let output = String::from_utf8_lossy(&buffer).to_string(); // Convert to String
let cursor = cursor + bytes_read as u64; // Update cursor
let credentials: Vec<Credentials> = state
.users
.read()
.await
.users
.clone()
.into_values()
let credentials = Credentials::get_all(&state.pool)
.await?
.into_iter()
.map(|x| x.1)
.collect();
let output = CensoredString::censor(output, &credentials);
Ok(LatestLogCursor {

View File

@@ -1,3 +1,4 @@
use crate::state::CachedEntry;
use crate::State;
pub use daedalus::minecraft::VersionManifest;
pub use daedalus::modded::Manifest;
@@ -5,39 +6,32 @@ pub use daedalus::modded::Manifest;
#[tracing::instrument]
pub async fn get_minecraft_versions() -> crate::Result<VersionManifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.minecraft.clone();
let minecraft_versions = CachedEntry::get_minecraft_manifest(
None,
&state.pool,
&state.api_semaphore,
)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor("minecraft versions".to_string())
})?;
Ok(tags)
Ok(minecraft_versions)
}
#[tracing::instrument]
pub async fn get_fabric_versions() -> crate::Result<Manifest> {
pub async fn get_loader_versions(loader: &str) -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.fabric.clone();
let loaders = CachedEntry::get_loader_manifest(
loader,
None,
&state.pool,
&state.api_semaphore,
)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor(format!("{} loader versions", loader))
})?;
Ok(tags)
}
#[tracing::instrument]
pub async fn get_forge_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.forge.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_quilt_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.quilt.clone();
Ok(tags)
}
#[tracing::instrument]
pub async fn get_neoforge_versions() -> crate::Result<Manifest> {
let state = State::get().await?;
let tags = state.metadata.read().await.neoforge.clone();
Ok(tags)
Ok(loaders.manifest)
}

View File

@@ -1,13 +1,13 @@
//! Authentication flow interface
use crate::state::{Credentials, MinecraftLoginFlow};
use crate::State;
#[tracing::instrument]
pub async fn begin_login() -> crate::Result<MinecraftLoginFlow> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.login_begin().await
crate::state::login_begin(&state.pool).await
}
#[tracing::instrument]
@@ -16,34 +16,51 @@ pub async fn finish_login(
flow: MinecraftLoginFlow,
) -> crate::Result<Credentials> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.login_finish(code, flow).await
crate::state::login_finish(code, flow, &state.pool).await
}
#[tracing::instrument]
pub async fn get_default_user() -> crate::Result<Option<uuid::Uuid>> {
let state = State::get().await?;
let users = state.users.read().await;
Ok(users.default_user)
let users = Credentials::get_active(&state.pool).await?;
Ok(users.map(|x| x.id))
}
#[tracing::instrument]
pub async fn set_default_user(user: uuid::Uuid) -> crate::Result<()> {
let user = get_user(user).await?;
let state = State::get().await?;
let mut users = state.users.write().await;
users.default_user = Some(user.id);
users.save().await?;
let users = Credentials::get_all(&state.pool).await?;
let (_, mut user) = users.remove(&user).ok_or_else(|| {
crate::ErrorKind::OtherError(format!(
"Tried to get nonexistent user with ID {user}"
))
.as_error()
})?;
user.active = true;
user.upsert(&state.pool).await?;
Ok(())
}
/// Remove a user account from the database
#[tracing::instrument]
pub async fn remove_user(user: uuid::Uuid) -> crate::Result<()> {
pub async fn remove_user(uuid: uuid::Uuid) -> crate::Result<()> {
let state = State::get().await?;
let mut users = state.users.write().await;
users.remove(user).await?;
let users = Credentials::get_all(&state.pool).await?;
if let Some((uuid, user)) = users.remove(&uuid) {
Credentials::remove(uuid, &state.pool).await?;
if user.active {
if let Some((_, mut user)) = users.into_iter().next() {
user.active = true;
user.upsert(&state.pool).await?;
}
}
}
Ok(())
}
@@ -52,25 +69,6 @@ pub async fn remove_user(user: uuid::Uuid) -> crate::Result<()> {
#[tracing::instrument]
pub async fn users() -> crate::Result<Vec<Credentials>> {
let state = State::get().await?;
let users = state.users.read().await;
Ok(users.users.values().cloned().collect())
}
/// Get a specific user by user ID
/// Prefer to use 'refresh' instead of this function
#[tracing::instrument]
pub async fn get_user(user: uuid::Uuid) -> crate::Result<Credentials> {
let state = State::get().await?;
let users = state.users.read().await;
let user = users
.users
.get(&user)
.ok_or_else(|| {
crate::ErrorKind::OtherError(format!(
"Tried to get nonexistent user with ID {user}"
))
.as_error()
})?
.clone();
Ok(user)
let users = Credentials::get_all(&state.pool).await?;
Ok(users.into_iter().map(|x| x.1).collect())
}

View File

@@ -1,4 +1,5 @@
//! API for interacting with Theseus
pub mod cache;
pub mod handler;
pub mod jre;
pub mod logs;
@@ -8,17 +9,16 @@ pub mod mr_auth;
pub mod pack;
pub mod process;
pub mod profile;
pub mod safety;
pub mod settings;
pub mod tags;
pub mod data {
pub use crate::state::{
Credentials, DirectoryInfo, Hooks, JavaSettings, LinkedData,
Credentials, Dependency, DirectoryInfo, Hooks, JavaVersion, LinkedData,
MemorySettings, ModLoader, ModrinthCredentials,
ModrinthCredentialsResult, ModrinthProject, ModrinthTeamMember,
ModrinthUser, ModrinthVersion, ProfileMetadata, ProjectMetadata,
Settings, Theme, WindowSize,
ModrinthCredentialsResult, Organization, Process, ProfileFile, Project,
ProjectType, SearchResult, SearchResults, Settings, TeamMember, Theme,
User, Version, WindowSize,
};
}
@@ -26,15 +26,10 @@ pub mod prelude {
pub use crate::{
data::*,
event::CommandPayload,
jre, metadata, minecraft_auth, pack, process,
jre, metadata, minecraft_auth, mr_auth, pack, process,
profile::{self, create, Profile},
settings,
state::JavaGlobals,
state::{Dependency, ProfilePathId, ProjectPathId},
util::{
io::{canonicalize, IOError},
jre::JavaVersion,
},
util::io::{canonicalize, IOError},
State,
};
}

View File

@@ -1,56 +1,30 @@
use crate::state::{
ModrinthAuthFlow, ModrinthCredentials, ModrinthCredentialsResult,
};
use crate::ErrorKind;
use crate::state::{ModrinthCredentials, ModrinthCredentialsResult};
use serde_json::Value;
use std::collections::HashMap;
#[tracing::instrument]
pub async fn authenticate_begin_flow(provider: &str) -> crate::Result<String> {
let state = crate::State::get().await?;
// Don't start an uncompleteable new flow if there's an existing locked one
let mut write: tokio::sync::RwLockWriteGuard<'_, Option<ModrinthAuthFlow>> =
state.modrinth_auth_flow.write().await;
let mut flow = ModrinthAuthFlow::new(provider).await?;
let url = flow.prepare_login_url().await?;
*write = Some(flow);
Ok(url)
pub fn authenticate_begin_flow(provider: &str) -> String {
crate::state::get_login_url(provider)
}
#[tracing::instrument]
pub async fn authenticate_await_complete_flow(
pub async fn authenticate_finish_flow(
response: HashMap<String, Value>,
) -> crate::Result<ModrinthCredentialsResult> {
let state = crate::State::get().await?;
let mut write = state.modrinth_auth_flow.write().await;
if let Some(ref mut flow) = *write {
let creds = flow.extract_credentials(&state.fetch_semaphore).await?;
let creds = crate::state::finish_login_flow(
response,
&state.api_semaphore,
&state.pool,
)
.await?;
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
}
Ok(creds)
} else {
Err(ErrorKind::OtherError(
"No active Modrinth authenication flow!".to_string(),
)
.into())
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
creds.upsert(&state.pool).await?;
}
}
#[tracing::instrument]
pub async fn cancel_flow() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.modrinth_auth_flow.write().await;
if let Some(ref mut flow) = *write {
flow.close().await?;
}
*write = None;
Ok(())
Ok(creds)
}
pub async fn login_password(
@@ -63,13 +37,13 @@ pub async fn login_password(
username,
password,
challenge,
&state.fetch_semaphore,
&state.api_semaphore,
&state.pool,
)
.await?;
if let ModrinthCredentialsResult::Credentials(creds) = &creds {
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
creds.upsert(&state.pool).await?;
}
Ok(creds)
@@ -82,10 +56,10 @@ pub async fn login_2fa(
) -> crate::Result<ModrinthCredentials> {
let state = crate::State::get().await?;
let creds =
crate::state::login_2fa(code, flow, &state.fetch_semaphore).await?;
crate::state::login_2fa(code, flow, &state.api_semaphore, &state.pool)
.await?;
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
creds.upsert(&state.pool).await?;
Ok(creds)
}
@@ -105,32 +79,24 @@ pub async fn create_account(
password,
challenge,
sign_up_newsletter,
&state.fetch_semaphore,
&state.api_semaphore,
&state.pool,
)
.await?;
let mut write = state.credentials.write().await;
write.login(creds.clone()).await?;
creds.upsert(&state.pool).await?;
Ok(creds)
}
#[tracing::instrument]
pub async fn refresh() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.credentials.write().await;
crate::state::refresh_credentials(&mut write, &state.fetch_semaphore)
.await?;
Ok(())
}
#[tracing::instrument]
pub async fn logout() -> crate::Result<()> {
let state = crate::State::get().await?;
let mut write = state.credentials.write().await;
write.logout().await?;
let current = ModrinthCredentials::get_active(&state.pool).await?;
if let Some(current) = current {
ModrinthCredentials::remove(&current.user_id, &state.pool).await?;
}
Ok(())
}
@@ -138,7 +104,9 @@ pub async fn logout() -> crate::Result<()> {
#[tracing::instrument]
pub async fn get_credentials() -> crate::Result<Option<ModrinthCredentials>> {
let state = crate::State::get().await?;
let read = state.credentials.read().await;
let current =
ModrinthCredentials::get_and_refresh(&state.pool, &state.api_semaphore)
.await?;
Ok(read.0.clone())
Ok(current)
}

View File

@@ -8,7 +8,7 @@ use crate::{
import::{self, copy_dotminecraft},
install_from::CreatePackDescription,
},
prelude::{ModLoader, Profile, ProfilePathId},
prelude::ModLoader,
state::{LinkedData, ProfileInstallStage},
util::io,
State,
@@ -116,11 +116,11 @@ pub async fn is_valid_atlauncher(instance_folder: PathBuf) -> bool {
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn import_atlauncher(
atlauncher_base_path: PathBuf, // path to base atlauncher folder
instance_folder: String, // instance folder in atlauncher_base_path
profile_path: ProfilePathId, // path to profile
profile_path: &str, // path to profile
) -> crate::Result<()> {
let atlauncher_instance_path = atlauncher_base_path
.join("instances")
@@ -159,7 +159,7 @@ pub async fn import_atlauncher(
project_id: None,
version_id: None,
existing_loading_bar: None,
profile_path: profile_path.clone(),
profile_path: profile_path.to_string(),
};
let backup_name = format!("ATLauncher-{}", instance_folder);
@@ -177,7 +177,7 @@ pub async fn import_atlauncher(
}
async fn import_atlauncher_unmanaged(
profile_path: ProfilePathId,
profile_path: &str,
minecraft_folder: PathBuf,
backup_name: String,
description: CreatePackDescription,
@@ -198,10 +198,10 @@ async fn import_atlauncher_unmanaged(
let game_version = atinstance.id;
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
crate::launcher::get_loader_version_from_profile(
&game_version,
mod_loader,
Some(atinstance.launcher.loader_version.version.clone()),
Some(&atinstance.launcher.loader_version.version),
)
.await?
} else {
@@ -209,24 +209,30 @@ async fn import_atlauncher_unmanaged(
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = description
crate::api::profile::edit(profile_path, |prof| {
prof.name = description
.override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.linked_data = Some(LinkedData {
project_id: description.project_id.clone(),
version_id: description.version_id.clone(),
locked: Some(
description.project_id.is_some()
&& description.version_id.is_some(),
),
});
prof.metadata.icon.clone_from(&description.icon);
prof.metadata.game_version.clone_from(&game_version);
prof.metadata.loader_version.clone_from(&loader_version);
prof.metadata.loader = mod_loader;
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: true,
})
}
}
prof.icon_path = description
.icon
.clone()
.map(|x| x.to_string_lossy().to_string());
prof.game_version.clone_from(&game_version);
prof.loader_version = loader_version.clone().map(|x| x.id);
prof.loader = mod_loader;
async { Ok(()) }
})
@@ -235,32 +241,20 @@ async fn import_atlauncher_unmanaged(
// Moves .minecraft folder over (ie: overrides such as resourcepacks, mods, etc)
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
profile_path,
minecraft_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
if let Some(profile_val) = crate::api::profile::get(profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -2,10 +2,8 @@ use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::prelude::Profile;
use crate::state::CredentialsStore;
use crate::{
prelude::{ModLoader, ProfilePathId},
prelude::ModLoader,
state::ProfileInstallStage,
util::{
fetch::{fetch, write_cached_icon},
@@ -49,7 +47,7 @@ pub async fn is_valid_curseforge(instance_folder: PathBuf) -> bool {
pub async fn import_curseforge(
curseforge_instance_folder: PathBuf, // instance's folder
profile_path: ProfilePathId, // path to profile
profile_path: &str, // path to profile
) -> crate::Result<()> {
// Load minecraftinstance.json
let minecraft_instance: String = io::read_to_string(
@@ -77,13 +75,9 @@ pub async fn import_curseforge(
thumbnail_url: Some(thumbnail_url),
}) = minecraft_instance.installed_modpack.clone()
{
let icon_bytes = fetch(
&thumbnail_url,
None,
&state.fetch_semaphore,
&CredentialsStore(None),
)
.await?;
let icon_bytes =
fetch(&thumbnail_url, None, &state.fetch_semaphore, &state.pool)
.await?;
let filename = thumbnail_url.rsplit('/').last();
if let Some(filename) = filename {
icon = Some(
@@ -121,10 +115,10 @@ pub async fn import_curseforge(
let mod_loader = mod_loader.unwrap_or(ModLoader::Vanilla);
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
crate::launcher::get_loader_version_from_profile(
&game_version,
mod_loader,
loader_version,
loader_version.as_deref(),
)
.await?
} else {
@@ -132,31 +126,32 @@ pub async fn import_curseforge(
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
crate::api::profile::edit(profile_path, |prof| {
prof.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.icon.clone_from(&icon);
prof.metadata.game_version.clone_from(&game_version);
prof.metadata.loader_version.clone_from(&loader_version);
prof.metadata.loader = mod_loader;
prof.icon_path =
icon.clone().map(|x| x.to_string_lossy().to_string());
prof.game_version.clone_from(&game_version);
prof.loader_version = loader_version.clone().map(|x| x.id);
prof.loader = mod_loader;
async { Ok(()) }
})
.await?;
} else {
// create a vanilla profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
crate::api::profile::edit(profile_path, |prof| {
prof.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.metadata.icon.clone_from(&icon);
prof.metadata
.game_version
prof.icon_path =
icon.clone().map(|x| x.to_string_lossy().to_string());
prof.game_version
.clone_from(&minecraft_instance.game_version);
prof.metadata.loader_version = None;
prof.metadata.loader = ModLoader::Vanilla;
prof.loader_version = None;
prof.loader = ModLoader::Vanilla;
async { Ok(()) }
})
@@ -166,33 +161,20 @@ pub async fn import_curseforge(
// Copy in contained folders as overrides
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
profile_path,
curseforge_instance_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
if let Some(profile_val) = crate::api::profile::get(profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())

View File

@@ -2,12 +2,7 @@ use std::path::PathBuf;
use serde::{Deserialize, Serialize};
use crate::{
prelude::{ModLoader, Profile, ProfilePathId},
state::ProfileInstallStage,
util::io,
State,
};
use crate::{prelude::ModLoader, state::ProfileInstallStage, util::io, State};
use super::{copy_dotminecraft, recache_icon};
@@ -41,7 +36,7 @@ pub async fn is_valid_gdlauncher(instance_folder: PathBuf) -> bool {
pub async fn import_gdlauncher(
gdlauncher_instance_folder: PathBuf, // instance's folder
profile_path: ProfilePathId, // path to profile
profile_path: &str, // path to profile
) -> crate::Result<()> {
// Load config.json
let config: String =
@@ -74,10 +69,10 @@ pub async fn import_gdlauncher(
let loader_version = config.loader.loader_version;
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
crate::launcher::get_loader_version_from_profile(
&game_version,
mod_loader,
loader_version,
loader_version.as_deref(),
)
.await?
} else {
@@ -85,15 +80,15 @@ pub async fn import_gdlauncher(
};
// Set profile data to created default profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = override_title
crate::api::profile::edit(profile_path, |prof| {
prof.name = override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
prof.metadata.icon.clone_from(&icon);
prof.metadata.game_version.clone_from(&game_version);
prof.metadata.loader_version.clone_from(&loader_version);
prof.metadata.loader = mod_loader;
prof.icon_path = icon.clone().map(|x| x.to_string_lossy().to_string());
prof.game_version.clone_from(&game_version);
prof.loader_version = loader_version.clone().map(|x| x.id);
prof.loader = mod_loader;
async { Ok(()) }
})
@@ -102,32 +97,20 @@ pub async fn import_gdlauncher(
// Copy in contained folders as overrides
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
profile_path,
gdlauncher_instance_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
if let Some(profile_val) = crate::api::profile::get(profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())

View File

@@ -7,7 +7,6 @@ use crate::{
import::{self, copy_dotminecraft},
install_from::{self, CreatePackDescription, PackDependency},
},
prelude::{Profile, ProfilePathId},
util::io,
State,
};
@@ -176,11 +175,11 @@ async fn load_instance_cfg(file_path: &Path) -> crate::Result<MMCInstance> {
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn import_mmc(
mmc_base_path: PathBuf, // path to base mmc folder
instance_folder: String, // instance folder in mmc_base_path
profile_path: ProfilePathId, // path to profile
mmc_base_path: PathBuf, // path to base mmc folder
instance_folder: String, // instance folder in mmc_base_path
profile_path: &str, // path to profile
) -> crate::Result<()> {
let mmc_instance_path = mmc_base_path
.join("instances")
@@ -202,13 +201,13 @@ pub async fn import_mmc(
};
// Create description from instance.cfg
let description = CreatePackDescription {
let mut description = CreatePackDescription {
icon,
override_title: instance_cfg.name,
project_id: instance_cfg.managed_pack_id,
version_id: instance_cfg.managed_pack_version_id,
project_id: None,
version_id: None,
existing_loading_bar: None,
profile_path: profile_path.clone(),
profile_path: profile_path.to_string(),
};
// Managed pack
@@ -217,6 +216,9 @@ pub async fn import_mmc(
if instance_cfg.managed_pack.unwrap_or(false) {
match instance_cfg.managed_pack_type {
Some(MMCManagedPackType::Modrinth) => {
description.project_id = instance_cfg.managed_pack_id;
description.version_id = instance_cfg.managed_pack_version_id;
// Modrinth Managed Pack
// Kept separate as we may in the future want to add special handling for modrinth managed packs
let backup_name = "Imported Modrinth Modpack".to_string();
@@ -260,7 +262,7 @@ pub async fn import_mmc(
}
async fn import_mmc_unmanaged(
profile_path: ProfilePathId,
profile_path: &str,
minecraft_folder: PathBuf,
backup_name: String,
description: CreatePackDescription,
@@ -302,7 +304,7 @@ async fn import_mmc_unmanaged(
// Sets profile information to be that loaded from mmc-pack.json and instance.cfg
install_from::set_profile_information(
profile_path.clone(),
profile_path.to_string(),
&description,
&backup_name,
&dependencies,
@@ -313,32 +315,20 @@ async fn import_mmc_unmanaged(
// Moves .minecraft folder over (ie: overrides such as resourcepacks, mods, etc)
let state = State::get().await?;
let loading_bar = copy_dotminecraft(
profile_path.clone(),
profile_path,
minecraft_folder,
&state.io_semaphore,
None,
)
.await?;
if let Some(profile_val) =
crate::api::profile::get(&profile_path, None).await?
{
if let Some(profile_val) = crate::api::profile::get(profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile_val.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
State::sync().await?;
}
Ok(())
}

View File

@@ -11,8 +11,6 @@ use crate::{
emit::{emit_loading, init_or_edit_loading},
LoadingBarId,
},
prelude::ProfilePathId,
state::Profiles,
util::{
fetch::{self, IoSemaphore},
io,
@@ -105,10 +103,10 @@ pub async fn get_importable_instances(
// Import an instance from a launcher type and base path
// Note: this *deletes* the submitted empty profile
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn import_instance(
profile_path: ProfilePathId, // This should be a blank profile
profile_path: &str, // This should be a blank profile
launcher_type: ImportLauncherType,
base_path: PathBuf,
instance_folder: String,
@@ -117,31 +115,31 @@ pub async fn import_instance(
let res = match launcher_type {
ImportLauncherType::MultiMC | ImportLauncherType::PrismLauncher => {
mmc::import_mmc(
base_path, // path to base mmc folder
instance_folder, // instance folder in mmc_base_path
profile_path.clone(), // path to profile
base_path, // path to base mmc folder
instance_folder, // instance folder in mmc_base_path
profile_path, // path to profile
)
.await
}
ImportLauncherType::ATLauncher => {
atlauncher::import_atlauncher(
base_path, // path to atlauncher folder
instance_folder, // instance folder in atlauncher
profile_path.clone(), // path to profile
base_path, // path to atlauncher folder
instance_folder, // instance folder in atlauncher
profile_path, // path to profile
)
.await
}
ImportLauncherType::GDLauncher => {
gdlauncher::import_gdlauncher(
base_path.join("instances").join(instance_folder), // path to gdlauncher folder
profile_path.clone(), // path to profile
profile_path, // path to profile
)
.await
}
ImportLauncherType::Curseforge => {
curseforge::import_curseforge(
base_path.join("Instances").join(instance_folder), // path to curseforge folder
profile_path.clone(), // path to profile
profile_path, // path to profile
)
.await
}
@@ -158,14 +156,11 @@ pub async fn import_instance(
Ok(_) => {}
Err(e) => {
tracing::warn!("Import failed: {:?}", e);
let _ = crate::api::profile::remove(&profile_path).await;
let _ = crate::api::profile::remove(profile_path).await;
return Err(e);
}
}
// Check existing managed packs for potential updates
tokio::task::spawn(Profiles::update_modrinth_versions());
tracing::debug!("Completed import.");
Ok(())
}
@@ -200,7 +195,7 @@ pub fn get_default_launcher_path(
}
/// Checks if this PathBuf is a valid instance for the given launcher type
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn is_valid_importable_instance(
instance_path: PathBuf,
@@ -224,7 +219,7 @@ pub async fn is_valid_importable_instance(
}
/// Caches an image file in the filesystem into the cache directory, and returns the path to the cached file.
#[theseus_macros::debug_pin]
#[tracing::instrument]
pub async fn recache_icon(
icon_path: PathBuf,
@@ -252,13 +247,14 @@ pub async fn recache_icon(
}
pub async fn copy_dotminecraft(
profile_path_id: ProfilePathId,
profile_path_id: &str,
dotminecraft: PathBuf,
io_semaphore: &IoSemaphore,
existing_loading_bar: Option<LoadingBarId>,
) -> crate::Result<LoadingBarId> {
// Get full path to profile
let profile_path = profile_path_id.get_full_path().await?;
let profile_path =
crate::api::profile::get_full_path(profile_path_id).await?;
// Gets all subfiles recursively in src
let subfiles = get_all_subfiles(&dotminecraft).await?;
@@ -298,7 +294,7 @@ pub async fn copy_dotminecraft(
/// Recursively get a list of all subfiles in src
/// uses async recursion
#[theseus_macros::debug_pin]
#[async_recursion::async_recursion]
#[tracing::instrument]
pub async fn get_all_subfiles(src: &Path) -> crate::Result<Vec<PathBuf>> {

View File

@@ -1,16 +1,10 @@
use crate::config::MODRINTH_API_URL;
use crate::data::ModLoader;
use crate::event::emit::{emit_loading, init_loading};
use crate::event::{LoadingBarId, LoadingBarType};
use crate::prelude::ProfilePathId;
use crate::state::{
LinkedData, ModrinthProject, ModrinthVersion, ProfileInstallStage, SideType,
};
use crate::util::fetch::{
fetch, fetch_advanced, fetch_json, write_cached_icon,
};
use crate::state::{CachedEntry, LinkedData, ProfileInstallStage, SideType};
use crate::util::fetch::{fetch, fetch_advanced, write_cached_icon};
use crate::util::io;
use crate::{InnerProjectPathUnix, State};
use crate::State;
use reqwest::Method;
use serde::{Deserialize, Serialize};
@@ -33,7 +27,7 @@ pub struct PackFormat {
#[derive(Serialize, Deserialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct PackFile {
pub path: InnerProjectPathUnix,
pub path: String,
pub hashes: HashMap<PackFileHash, String>,
pub env: Option<HashMap<EnvType, SideType>>,
pub downloads: Vec<String>,
@@ -84,7 +78,7 @@ pub enum PackDependency {
Minecraft,
}
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase", tag = "type")]
pub enum CreatePackLocation {
// Create a pack from a modrinth version ID (such as a modpack)
@@ -144,7 +138,7 @@ pub struct CreatePackDescription {
pub project_id: Option<String>,
pub version_id: Option<String>,
pub existing_loading_bar: Option<LoadingBarId>,
pub profile_path: ProfilePathId,
pub profile_path: String,
}
pub fn get_profile_from_pack(
@@ -160,9 +154,9 @@ pub fn get_profile_from_pack(
name: title,
icon_url,
linked_data: Some(LinkedData {
project_id: Some(project_id),
version_id: Some(version_id),
locked: Some(true),
project_id,
version_id,
locked: true,
}),
..Default::default()
},
@@ -182,13 +176,13 @@ pub fn get_profile_from_pack(
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn generate_pack_from_version_id(
project_id: String,
version_id: String,
title: String,
icon_url: Option<String>,
profile_path: ProfilePathId,
profile_path: String,
// Existing loading bar. Unlike when existing_loading_bar is used, this one is pre-initialized with PackFileDownload
// For example, you might use this if multiple packs are being downloaded at once and you want to use the same loading bar
@@ -202,7 +196,7 @@ pub async fn generate_pack_from_version_id(
} else {
init_loading(
LoadingBarType::PackFileDownload {
profile_path: profile_path.get_full_path().await?,
profile_path: profile_path.clone(),
pack_name: title,
icon: icon_url,
pack_version: version_id.clone(),
@@ -214,16 +208,18 @@ pub async fn generate_pack_from_version_id(
};
emit_loading(&loading_bar, 0.0, Some("Fetching version")).await?;
let creds = state.credentials.read().await;
let version: ModrinthVersion = fetch_json(
Method::GET,
&format!("{}version/{}", MODRINTH_API_URL, version_id),
let version = CachedEntry::get_version(
&version_id,
None,
None,
&state.fetch_semaphore,
&creds,
&state.pool,
&state.api_semaphore,
)
.await?;
.await?
.ok_or_else(|| {
crate::ErrorKind::InputError(
"Invalid version ID specified!".to_string(),
)
})?;
emit_loading(&loading_bar, 10.0, None).await?;
let (url, hash) =
@@ -249,27 +245,29 @@ pub async fn generate_pack_from_version_id(
None,
Some((&loading_bar, 70.0)),
&state.fetch_semaphore,
&creds,
&state.pool,
)
.await?;
emit_loading(&loading_bar, 0.0, Some("Fetching project metadata")).await?;
let project: ModrinthProject = fetch_json(
Method::GET,
&format!("{}project/{}", MODRINTH_API_URL, version.project_id),
let project = CachedEntry::get_project(
&version.project_id,
None,
None,
&state.fetch_semaphore,
&creds,
&state.pool,
&state.api_semaphore,
)
.await?;
.await?
.ok_or_else(|| {
crate::ErrorKind::InputError(
"Invalid project ID specified!".to_string(),
)
})?;
emit_loading(&loading_bar, 10.0, Some("Retrieving icon")).await?;
let icon = if let Some(icon_url) = project.icon_url {
let state = State::get().await?;
let icon_bytes =
fetch(&icon_url, None, &state.fetch_semaphore, &creds).await?;
drop(creds);
fetch(&icon_url, None, &state.fetch_semaphore, &state.pool).await?;
let filename = icon_url.rsplit('/').next();
@@ -305,10 +303,10 @@ pub async fn generate_pack_from_version_id(
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn generate_pack_from_file(
path: PathBuf,
profile_path: ProfilePathId,
profile_path: String,
) -> crate::Result<CreatePack> {
let file = io::read(&path).await?;
Ok(CreatePack {
@@ -326,9 +324,9 @@ pub async fn generate_pack_from_file(
/// Sets generated profile attributes to the pack ones (using profile::edit)
/// This includes the pack name, icon, game version, loader version, and loader
#[theseus_macros::debug_pin]
pub async fn set_profile_information(
profile_path: ProfilePathId,
profile_path: String,
description: &CreatePackDescription,
backup_name: &str,
dependencies: &HashMap<PackDependency, String>,
@@ -371,10 +369,10 @@ pub async fn set_profile_information(
let mod_loader = mod_loader.unwrap_or(ModLoader::Vanilla);
let loader_version = if mod_loader != ModLoader::Vanilla {
crate::profile::create::get_loader_version_from_loader(
game_version.clone(),
crate::launcher::get_loader_version_from_profile(
game_version,
mod_loader,
loader_version.cloned(),
loader_version.cloned().as_deref(),
)
.await?
} else {
@@ -382,35 +380,36 @@ pub async fn set_profile_information(
};
// Sets values in profile
crate::api::profile::edit(&profile_path, |prof| {
prof.metadata.name = description
prof.name = description
.override_title
.clone()
.unwrap_or_else(|| backup_name.to_string());
prof.install_stage = ProfileInstallStage::PackInstalling;
let project_id = description.project_id.clone();
let version_id = description.version_id.clone();
if let Some(ref project_id) = description.project_id {
if let Some(ref version_id) = description.version_id {
prof.linked_data = Some(LinkedData {
project_id: project_id.clone(),
version_id: version_id.clone(),
locked: if !ignore_lock {
true
} else {
prof.linked_data
.as_ref()
.map(|x| x.locked)
.unwrap_or(true)
},
})
}
}
prof.metadata.linked_data = if project_id.is_some()
&& version_id.is_some()
{
Some(LinkedData {
project_id,
version_id,
locked: if !ignore_lock {
Some(true)
} else {
prof.metadata.linked_data.as_ref().and_then(|x| x.locked)
},
})
} else {
None
};
prof.metadata.icon.clone_from(&description.icon);
prof.metadata.game_version.clone_from(game_version);
prof.metadata.loader_version.clone_from(&loader_version);
prof.metadata.loader = mod_loader;
prof.icon_path = description
.icon
.clone()
.map(|x| x.to_string_lossy().to_string());
prof.game_version.clone_from(game_version);
prof.loader_version = loader_version.clone().map(|x| x.id);
prof.loader = mod_loader;
async { Ok(()) }
})

View File

@@ -1,4 +1,3 @@
use crate::config::MODRINTH_API_URL;
use crate::event::emit::{
emit_loading, init_or_edit_loading, loading_try_for_each_concurrent,
};
@@ -6,16 +5,14 @@ use crate::event::LoadingBarType;
use crate::pack::install_from::{
set_profile_information, EnvType, PackFile, PackFileHash,
};
use crate::prelude::{ModrinthVersion, ProfilePathId, ProjectMetadata};
use crate::state::{ProfileInstallStage, Profiles, SideType};
use crate::util::fetch::{fetch_json, fetch_mirrors, write};
use crate::state::{
cache_file_hash, CacheBehaviour, CachedEntry, ProfileInstallStage, SideType,
};
use crate::util::fetch::{fetch_mirrors, write};
use crate::util::io;
use crate::{profile, State};
use async_zip::base::read::seek::ZipFileReader;
use reqwest::Method;
use serde_json::json;
use std::collections::HashMap;
use std::io::Cursor;
use std::path::{Component, PathBuf};
@@ -28,11 +25,11 @@ use super::install_from::{
/// Wrapper around install_pack_files that generates a pack creation description, and
/// attempts to install the pack files. If it fails, it will remove the profile (fail safely)
/// Install a modpack from a mrpack file (a modrinth .zip format)
#[theseus_macros::debug_pin]
pub async fn install_zipped_mrpack(
location: CreatePackLocation,
profile_path: ProfilePathId,
) -> crate::Result<ProfilePathId> {
profile_path: String,
) -> crate::Result<String> {
// Get file from description
let create_pack: CreatePack = match location {
CreatePackLocation::FromVersionId {
@@ -59,9 +56,6 @@ pub async fn install_zipped_mrpack(
// Install pack files, and if it fails, fail safely by removing the profile
let result = install_zipped_mrpack_files(create_pack, false).await;
// Check existing managed packs for potential updates
tokio::task::spawn(Profiles::update_modrinth_versions());
match result {
Ok(profile) => Ok(profile),
Err(err) => {
@@ -74,11 +68,11 @@ pub async fn install_zipped_mrpack(
/// Install all pack files from a description
/// Does not remove the profile if it fails
#[theseus_macros::debug_pin]
pub async fn install_zipped_mrpack_files(
create_pack: CreatePack,
ignore_lock: bool,
) -> crate::Result<ProfilePathId> {
) -> crate::Result<String> {
let state = &State::get().await?;
let file = create_pack.file;
@@ -132,7 +126,7 @@ pub async fn install_zipped_mrpack_files(
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::PackDownload {
profile_path: profile_path.get_full_path().await?.clone(),
profile_path: profile_path.clone(),
pack_name: pack.name.clone(),
icon,
pack_id: project_id,
@@ -167,7 +161,6 @@ pub async fn install_zipped_mrpack_files(
}
}
let creds = state.credentials.read().await;
let file = fetch_mirrors(
&project
.downloads
@@ -176,10 +169,9 @@ pub async fn install_zipped_mrpack_files(
.collect::<Vec<&str>>(),
project.hashes.get(&PackFileHash::Sha1).map(|x| &**x),
&state.fetch_semaphore,
&creds,
&state.pool,
)
.await?;
drop(creds);
let project_path = project.path.to_string();
@@ -188,10 +180,23 @@ pub async fn install_zipped_mrpack_files(
if let Some(path) = path {
match path {
Component::CurDir | Component::Normal(_) => {
let path = profile_path
.get_full_path()
.await?
.join(&project_path);
let path =
profile::get_full_path(&profile_path)
.await?
.join(&project_path);
cache_file_hash(
file.clone(),
&profile_path,
&project_path,
project
.hashes
.get(&PackFileHash::Sha1)
.map(|x| &**x),
&state.pool,
)
.await?;
write(&path, &file, &state.io_semaphore)
.await?;
}
@@ -243,9 +248,22 @@ pub async fn install_zipped_mrpack_files(
}
if new_path.file_name().is_some() {
let bytes = bytes::Bytes::from(content);
cache_file_hash(
bytes.clone(),
&profile_path,
&new_path.to_string_lossy(),
None,
&state.pool,
)
.await?;
write(
&profile_path.get_full_path().await?.join(new_path),
&content,
&profile::get_full_path(&profile_path)
.await?
.join(new_path),
&bytes,
&state.io_semaphore,
)
.await?;
@@ -265,24 +283,23 @@ pub async fn install_zipped_mrpack_files(
// If the icon doesn't exist, we expect icon.png to be a potential icon.
// If it doesn't exist, and an override to icon.png exists, cache and use that
let potential_icon =
profile_path.get_full_path().await?.join("icon.png");
let potential_icon = profile::get_full_path(&profile_path)
.await?
.join("icon.png");
if !icon_exists && potential_icon.exists() {
profile::edit_icon(&profile_path, Some(&potential_icon)).await?;
}
if let Some(profile_val) = profile::get(&profile_path, None).await? {
if let Some(profile_val) = profile::get(&profile_path).await? {
crate::launcher::install_minecraft(
&profile_val,
Some(loading_bar),
false,
)
.await?;
State::sync().await?;
}
Ok::<ProfilePathId, crate::Error>(profile_path.clone())
Ok::<String, crate::Error>(profile_path.clone())
} else {
Err(crate::Error::from(crate::ErrorKind::InputError(
"No pack manifest found in mrpack".to_string(),
@@ -291,9 +308,9 @@ pub async fn install_zipped_mrpack_files(
}
#[tracing::instrument(skip(mrpack_file))]
#[theseus_macros::debug_pin]
pub async fn remove_all_related_files(
profile_path: ProfilePathId,
profile_path: String,
mrpack_file: bytes::Bytes,
) -> crate::Result<()> {
let reader: Cursor<&bytes::Bytes> = Cursor::new(&mrpack_file);
@@ -339,43 +356,39 @@ pub async fn remove_all_related_files(
let all_hashes = pack
.files
.iter()
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha512)?.clone()))
.filter_map(|f| Some(f.hashes.get(&PackFileHash::Sha1)?.clone()))
.collect::<Vec<_>>();
let creds = state.credentials.read().await;
// First, get project info by hash
let files_url = format!("{}version_files", MODRINTH_API_URL);
let hash_projects = fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
let file_infos = CachedEntry::get_file_many(
&all_hashes.iter().map(|x| &**x).collect::<Vec<_>>(),
None,
Some(json!({
"hashes": all_hashes,
"algorithm": "sha512",
})),
&state.fetch_semaphore,
&creds,
&state.pool,
&state.api_semaphore,
)
.await?;
let to_remove = hash_projects
.into_values()
let to_remove = file_infos
.into_iter()
.map(|p| p.project_id)
.collect::<Vec<_>>();
let profile =
profile::get(&profile_path, None).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(
profile_path.to_string(),
)
})?;
for (project_id, project) in &profile.projects {
if let ProjectMetadata::Modrinth { project, .. } = &project.metadata
{
if to_remove.contains(&project.id) {
let path = profile
.get_profile_full_path()
.await?
.join(project_id.0.clone());
let profile = profile::get(&profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
})?;
let profile_full_path = profile::get_full_path(&profile_path).await?;
for (file_path, project) in profile
.get_projects(
Some(CacheBehaviour::MustRevalidate),
&state.pool,
&state.api_semaphore,
)
.await?
{
if let Some(metadata) = &project.metadata {
if to_remove.contains(&metadata.project_id) {
let path = profile_full_path.join(file_path);
if path.exists() {
io::remove_file(&path).await?;
}
@@ -386,10 +399,7 @@ pub async fn remove_all_related_files(
// Iterate over all Modrinth project file paths in the json, and remove them
// (There should be few, but this removes any files the .mrpack intended as Modrinth projects but were unrecognized)
for file in pack.files {
let path: PathBuf = profile_path
.get_full_path()
.await?
.join(file.path.to_string());
let path: PathBuf = profile_full_path.join(file.path);
if path.exists() {
io::remove_file(&path).await?;
}
@@ -414,8 +424,9 @@ pub async fn remove_all_related_files(
}
// Remove this file if a corresponding one exists in the filesystem
let existing_file =
profile_path.get_full_path().await?.join(&new_path);
let existing_file = profile::get_full_path(&profile_path)
.await?
.join(&new_path);
if existing_file.exists() {
io::remove_file(&existing_file).await?;
}

View File

@@ -1,128 +1,56 @@
//! Theseus process management interface
use uuid::Uuid;
use crate::state::{MinecraftChild, ProfilePathId};
use crate::state::Process;
pub use crate::{
state::{
Hooks, JavaSettings, MemorySettings, Profile, Settings, WindowSize,
},
state::{Hooks, MemorySettings, Profile, Settings, WindowSize},
State,
};
// Gets whether a child process stored in the state by UUID has finished
#[tracing::instrument]
pub async fn has_finished_by_uuid(uuid: Uuid) -> crate::Result<bool> {
Ok(get_exit_status_by_uuid(uuid).await?.is_some())
}
// Gets the exit status of a child process stored in the state by UUID
#[tracing::instrument]
pub async fn get_exit_status_by_uuid(uuid: Uuid) -> crate::Result<Option<i32>> {
let state = State::get().await?;
let children = state.children.read().await;
children.exit_status(uuid).await
}
// Gets the UUID of each stored process in the state
#[tracing::instrument]
pub async fn get_all_uuids() -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
Ok(children.keys())
}
// Gets the UUID of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_uuids() -> crate::Result<Vec<Uuid>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_keys().await
}
// Gets the Profile paths of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_profile_paths() -> crate::Result<Vec<ProfilePathId>>
{
pub async fn get_all() -> crate::Result<Vec<Process>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_profile_paths().await
}
// Gets the Profiles (cloned) of each *running* stored process in the state
#[tracing::instrument]
pub async fn get_all_running_profiles() -> crate::Result<Vec<Profile>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_profiles().await
let processes = Process::get_all(&state.pool).await?;
Ok(processes)
}
// Gets the UUID of each stored process in the state by profile path
#[tracing::instrument]
pub async fn get_uuids_by_profile_path(
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
pub async fn get_by_profile_path(
profile_path: &str,
) -> crate::Result<Vec<Process>> {
let state = State::get().await?;
let children = state.children.read().await;
children.running_keys_with_profile(profile_path).await
let processes =
Process::get_from_profile(profile_path, &state.pool).await?;
Ok(processes)
}
// Kill a child process stored in the state by UUID, as a string
#[tracing::instrument]
pub async fn kill_by_uuid(uuid: Uuid) -> crate::Result<()> {
pub async fn kill(pid: i32) -> crate::Result<()> {
let state = State::get().await?;
let children = state.children.read().await;
if let Some(mchild) = children.get(uuid) {
let mut mchild = mchild.write().await;
kill(&mut mchild).await
let process = Process::get(pid, &state.pool).await?;
if let Some(process) = process {
process.kill().await?;
Ok(())
} else {
// No error returned for already finished process
Ok(())
}
}
// Wait for a child process stored in the state by UUID
#[tracing::instrument]
pub async fn wait_for_by_uuid(uuid: Uuid) -> crate::Result<()> {
pub async fn wait_for(pid: i32) -> crate::Result<()> {
let state = State::get().await?;
let children = state.children.read().await;
// No error returned for already killed process
if let Some(mchild) = children.get(uuid) {
let mut mchild = mchild.write().await;
wait_for(&mut mchild).await
let process = Process::get(pid, &state.pool).await?;
if let Some(process) = process {
process.wait_for().await?;
Ok(())
} else {
// No error returned for already finished process
Ok(())
}
}
// Kill a running child process directly
#[tracing::instrument(skip(running))]
pub async fn kill(running: &mut MinecraftChild) -> crate::Result<()> {
running.current_child.write().await.kill().await?;
Ok(())
}
// Await on the completion of a child process directly
#[tracing::instrument(skip(running))]
pub async fn wait_for(running: &mut MinecraftChild) -> crate::Result<()> {
// We do not wait on the Child directly, but wait on the thread manager.
// This way we can still run all cleanup hook functions that happen after.
running
.manager
.take()
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Process manager already completed or missing for process {}",
running.uuid
))
})?
.await?
.map_err(|err| {
crate::ErrorKind::LauncherError(format!(
"Error running minecraft: {err}"
))
})?;
Ok(())
}

View File

@@ -1,55 +1,45 @@
//! Theseus profile management interface
use crate::pack::install_from::CreatePackProfile;
use crate::prelude::ProfilePathId;
use crate::state::LinkedData;
use crate::launcher::get_loader_version_from_profile;
use crate::settings::Hooks;
use crate::state::{LinkedData, ProfileInstallStage};
use crate::util::io::{self, canonicalize};
use crate::{
event::{emit::emit_profile, ProfilePayloadType},
prelude::ModLoader,
};
use crate::{pack, profile, ErrorKind};
pub use crate::{
state::{JavaSettings, Profile},
State,
};
use daedalus::modded::LoaderVersion;
pub use crate::{state::Profile, State};
use chrono::Utc;
use std::path::PathBuf;
use tracing::{info, trace};
use uuid::Uuid;
// Creates a profile of a given name and adds it to the in-memory state
// Returns relative filepath as ProfilePathId which can be used to access it in the State
#[tracing::instrument]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn profile_create(
mut name: String, // the name of the profile, and relative path
name: String, // the name of the profile, and relative path
game_version: String, // the game version of the profile
modloader: ModLoader, // the modloader to use
loader_version: Option<String>, // the modloader version to use, set to "latest", "stable", or the ID of your chosen loader. defaults to latest
icon: Option<PathBuf>, // the icon for the profile
icon_url: Option<String>, // the URL icon for a profile (ONLY USED FOR TEMPORARY PROFILES)
icon_path: Option<String>, // the icon for the profile
linked_data: Option<LinkedData>, // the linked project ID (mainly for modpacks)- used for updating
skip_install_profile: Option<bool>,
no_watch: Option<bool>,
) -> crate::Result<ProfilePathId> {
name = profile::sanitize_profile_name(&name);
) -> crate::Result<String> {
trace!("Creating new profile. {}", name);
let state = State::get().await?;
let uuid = Uuid::new_v4();
let mut path = state.directories.profiles_dir().await.join(&name);
if path.exists() {
let mut new_name;
let mut path = profile::sanitize_profile_name(&name);
let mut full_path = state.directories.profiles_dir().join(&path);
if full_path.exists() {
let mut new_path;
let mut new_full_path;
let mut which = 1;
loop {
new_name = format!("{name} ({which})");
new_path = state.directories.profiles_dir().await.join(&new_name);
if !new_path.exists() {
new_path = format!("{path} ({which})");
new_full_path = state.directories.profiles_dir().join(&new_path);
if !new_full_path.exists() {
break;
}
which += 1;
@@ -57,32 +47,60 @@ pub async fn profile_create(
tracing::debug!(
"Folder collision: {}, renaming to: {}",
path.display(),
new_path.display()
full_path.display(),
new_full_path.display()
);
path = new_path;
name = new_name;
full_path = new_full_path;
}
io::create_dir_all(&path).await?;
io::create_dir_all(&full_path).await?;
info!(
"Creating profile at path {}",
&canonicalize(&path)?.display()
&canonicalize(&full_path)?.display()
);
let loader = if modloader != ModLoader::Vanilla {
get_loader_version_from_loader(
game_version.clone(),
get_loader_version_from_profile(
&game_version,
modloader,
loader_version,
loader_version.as_deref(),
)
.await?
} else {
None
};
let mut profile = Profile::new(uuid, name, game_version).await?;
let mut profile = Profile {
path: path.clone(),
install_stage: ProfileInstallStage::NotInstalled,
name,
icon_path: None,
game_version,
loader: modloader,
loader_version: loader.map(|x| x.id),
groups: Vec::new(),
linked_data,
created: Utc::now(),
modified: Utc::now(),
last_played: None,
submitted_time_played: 0,
recent_time_played: 0,
java_path: None,
extra_launch_args: None,
custom_env_vars: None,
memory: None,
force_fullscreen: None,
game_resolution: None,
hooks: Hooks {
pre_launch: None,
wrapper: None,
post_exit: None,
},
};
let result = async {
if let Some(ref icon) = icon {
if let Some(ref icon) = icon_path {
let bytes =
io::read(state.directories.caches_dir().join(icon)).await?;
profile
@@ -90,93 +108,55 @@ pub async fn profile_create(
&state.directories.caches_dir(),
&state.io_semaphore,
bytes::Bytes::from(bytes),
&icon.to_string_lossy(),
icon,
)
.await?;
}
profile.metadata.icon_url = icon_url;
if let Some(loader_version) = loader {
profile.metadata.loader = modloader;
profile.metadata.loader_version = Some(loader_version);
}
profile.metadata.linked_data = linked_data;
if let Some(linked_data) = &mut profile.metadata.linked_data {
linked_data.locked = Some(
linked_data.project_id.is_some()
&& linked_data.version_id.is_some(),
);
}
emit_profile(
uuid,
&profile.profile_id(),
&profile.metadata.name,
ProfilePayloadType::Created,
crate::state::fs_watcher::watch_profile(
&profile.path,
&state.file_watcher,
&state.directories,
)
.await?;
{
let mut profiles = state.profiles.write().await;
profiles
.insert(profile.clone(), no_watch.unwrap_or_default())
.await?;
}
profile.upsert(&state.pool).await?;
emit_profile(&profile.path, ProfilePayloadType::Created).await?;
if !skip_install_profile.unwrap_or(false) {
crate::launcher::install_minecraft(&profile, None, false).await?;
}
State::sync().await?;
Ok(profile.profile_id())
Ok(profile.path)
}
.await;
match result {
Ok(profile) => Ok(profile),
Err(err) => {
let _ = crate::api::profile::remove(&profile.profile_id()).await;
let _ = profile::remove(&path).await;
Err(err)
}
}
}
pub async fn profile_create_from_creator(
profile: CreatePackProfile,
) -> crate::Result<ProfilePathId> {
profile_create(
profile.name,
profile.game_version,
profile.modloader,
profile.loader_version,
profile.icon,
profile.icon_url,
profile.linked_data,
profile.skip_install_profile,
profile.no_watch,
)
.await
}
pub async fn profile_create_from_duplicate(
copy_from: ProfilePathId,
) -> crate::Result<ProfilePathId> {
copy_from: &str,
) -> crate::Result<String> {
// Original profile
let profile = profile::get(&copy_from, None).await?.ok_or_else(|| {
let profile = profile::get(copy_from).await?.ok_or_else(|| {
ErrorKind::UnmanagedProfileError(copy_from.to_string())
})?;
let profile_path_id = profile_create(
profile.metadata.name.clone(),
profile.metadata.game_version.clone(),
profile.metadata.loader,
profile.metadata.loader_version.clone().map(|it| it.id),
profile.metadata.icon.clone(),
profile.metadata.icon_url.clone(),
profile.metadata.linked_data.clone(),
Some(true),
profile.name.clone(),
profile.game_version.clone(),
profile.loader,
profile.loader_version.clone(),
profile.icon_path.clone(),
profile.linked_data.clone(),
Some(true),
)
.await?;
@@ -184,114 +164,26 @@ pub async fn profile_create_from_duplicate(
// Copy it over using the import system (essentially importing from the same profile)
let state = State::get().await?;
let bar = pack::import::copy_dotminecraft(
profile_path_id.clone(),
copy_from.get_full_path().await?,
&profile_path_id,
profile::get_full_path(copy_from).await?,
&state.io_semaphore,
None,
)
.await?;
let duplicated_profile =
profile::get(&profile_path_id, None).await?.ok_or_else(|| {
profile::get(&profile_path_id).await?.ok_or_else(|| {
ErrorKind::UnmanagedProfileError(profile_path_id.to_string())
})?;
crate::launcher::install_minecraft(&duplicated_profile, Some(bar), false)
.await?;
{
let state = State::get().await?;
let mut file_watcher = state.file_watcher.write().await;
Profile::watch_fs(
&profile.get_profile_full_path().await?,
&mut file_watcher,
)
.await?;
}
// emit profile edited
emit_profile(
profile.uuid,
&profile.profile_id(),
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
State::sync().await?;
emit_profile(&profile.path, ProfilePayloadType::Edited).await?;
Ok(profile_path_id)
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub(crate) async fn get_loader_version_from_loader(
game_version: String,
loader: ModLoader,
loader_version: Option<String>,
) -> crate::Result<Option<LoaderVersion>> {
let state = State::get().await?;
let metadata = state.metadata.read().await;
let version = loader_version.unwrap_or_else(|| "latest".to_string());
let filter = |it: &LoaderVersion| match version.as_str() {
"latest" => true,
"stable" => it.stable,
id => {
it.id == *id
|| format!("{}-{}", game_version, id) == it.id
|| format!("{}-{}-{}", game_version, id, game_version) == it.id
}
};
let loader_data = match loader {
ModLoader::Forge => &metadata.forge,
ModLoader::Fabric => &metadata.fabric,
ModLoader::Quilt => &metadata.quilt,
ModLoader::NeoForge => &metadata.neoforge,
_ => {
return Err(
ProfileCreationError::NoManifest(loader.to_string()).into()
)
}
};
let loaders = &loader_data
.game_versions
.iter()
.find(|it| {
it.id
.replace(daedalus::modded::DUMMY_REPLACE_STRING, &game_version)
== game_version
})
.ok_or_else(|| {
ProfileCreationError::ModloaderUnsupported(
loader.to_string(),
game_version.clone(),
)
})?
.loaders;
let loader_version = loaders
.iter()
.find(|&x| filter(x))
.cloned()
.or(
// If stable was searched for but not found, return latest by default
if version == "stable" {
loaders.iter().next().cloned()
} else {
None
},
)
.ok_or_else(|| {
ProfileCreationError::InvalidVersionModloader(
version,
loader.to_string(),
)
})?;
Ok(Some(loader_version))
}
#[derive(thiserror::Error, Debug)]
pub enum ProfileCreationError {
#[error("Profile .json exists: {0}")]

File diff suppressed because it is too large Load Diff

View File

@@ -1,24 +1,24 @@
use crate::state::CacheBehaviour;
use crate::{
event::{
emit::{emit_profile, init_loading, loading_try_for_each_concurrent},
emit::{emit_profile, init_loading},
ProfilePayloadType,
},
pack::{self, install_from::generate_pack_from_version_id},
prelude::{ProfilePathId, ProjectPathId},
profile::get,
state::{ProfileInstallStage, Project},
LoadingBarType, State,
state::ProfileInstallStage,
LoadingBarType,
};
use futures::try_join;
/// Updates a managed modrinth pack to the version specified by new_version_id
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update_managed_modrinth_version(
profile_path: &ProfilePathId,
profile_path: &String,
new_version_id: &String,
) -> crate::Result<()> {
let profile = get(profile_path, None).await?.ok_or_else(|| {
let profile = get(profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
.as_error()
})?;
@@ -30,46 +30,29 @@ pub async fn update_managed_modrinth_version(
};
// Extract modrinth pack information, if appropriate
let linked_data = profile
.metadata
.linked_data
.as_ref()
.ok_or_else(unmanaged_err)?;
let project_id: &String =
linked_data.project_id.as_ref().ok_or_else(unmanaged_err)?;
let version_id =
linked_data.version_id.as_ref().ok_or_else(unmanaged_err)?;
let linked_data = profile.linked_data.as_ref().ok_or_else(unmanaged_err)?;
// Replace the pack with the new version
replace_managed_modrinth(
profile_path,
&profile,
project_id,
version_id,
&linked_data.project_id,
&linked_data.version_id,
Some(new_version_id),
true, // switching versions should ignore the lock
)
.await?;
emit_profile(
profile.uuid,
profile_path,
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
emit_profile(profile_path, ProfilePayloadType::Edited).await?;
State::sync().await?;
Ok(())
}
/// Repair a managed modrinth pack by 'updating' it to the current version
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn repair_managed_modrinth(
profile_path: &ProfilePathId,
) -> crate::Result<()> {
let profile = get(profile_path, None).await?.ok_or_else(|| {
pub async fn repair_managed_modrinth(profile_path: &str) -> crate::Result<()> {
let profile = get(profile_path).await?.ok_or_else(|| {
crate::ErrorKind::UnmanagedProfileError(profile_path.to_string())
.as_error()
})?;
@@ -83,69 +66,44 @@ pub async fn repair_managed_modrinth(
// For repairing specifically, first we remove all installed projects (to ensure we do remove ones that aren't in the pack)
// We do a project removal followed by removing everything in the .mrpack, to ensure we only
// remove relevant projects and not things like save files
let projects_map = profile.projects.clone();
let stream = futures::stream::iter(
projects_map
.into_iter()
.map(Ok::<(ProjectPathId, Project), crate::Error>),
);
loading_try_for_each_concurrent(
stream,
None,
None,
0.0,
0,
None,
|(project_id, _)| {
let profile = profile.clone();
async move {
profile.remove_project(&project_id, Some(true)).await?;
Ok(())
}
},
)
.await?;
let state = crate::State::get().await?;
let projects_map = profile
.get_projects(
Some(CacheBehaviour::MustRevalidate),
&state.pool,
&state.api_semaphore,
)
.await?;
for (file, _) in projects_map {
crate::state::Profile::remove_project(&profile.path, &file).await?;
}
// Extract modrinth pack information, if appropriate
let linked_data = profile
.metadata
.linked_data
.as_ref()
.ok_or_else(unmanaged_err)?;
let project_id: &String =
linked_data.project_id.as_ref().ok_or_else(unmanaged_err)?;
let version_id =
linked_data.version_id.as_ref().ok_or_else(unmanaged_err)?;
let linked_data = profile.linked_data.as_ref().ok_or_else(unmanaged_err)?;
// Replace the pack with the same version
replace_managed_modrinth(
profile_path,
&profile,
project_id,
version_id,
&linked_data.project_id,
&linked_data.version_id,
None,
false, // do not ignore lock, as repairing can reset the lock
)
.await?;
emit_profile(
profile.uuid,
profile_path,
&profile.metadata.name,
ProfilePayloadType::Edited,
)
.await?;
emit_profile(profile_path, ProfilePayloadType::Edited).await?;
State::sync().await?;
Ok(())
}
/// Replace a managed modrinth pack with a new version
/// If new_version_id is None, the pack is 'reinstalled' in-place
#[tracing::instrument(skip(profile))]
#[theseus_macros::debug_pin]
async fn replace_managed_modrinth(
profile_path: &ProfilePathId,
profile_path: &str,
profile: &crate::state::Profile,
project_id: &String,
version_id: &String,
@@ -161,59 +119,63 @@ async fn replace_managed_modrinth(
// Fetch .mrpacks for both old and new versions
// TODO: this will need to be updated if we revert the hacky pack method we needed for compiler speed
let (old_pack_creator, new_pack_creator) =
if let Some(new_version_id) = new_version_id {
let shared_loading_bar = init_loading(
LoadingBarType::PackFileDownload {
profile_path: profile_path.get_full_path().await?,
pack_name: profile.metadata.name.clone(),
icon: None,
pack_version: version_id.clone(),
},
200.0, // These two downloads will share the same loading bar
"Downloading pack file",
)
.await?;
let (old_pack_creator, new_pack_creator) = if let Some(new_version_id) =
new_version_id
{
let shared_loading_bar = init_loading(
LoadingBarType::PackFileDownload {
profile_path: crate::api::profile::get_full_path(profile_path)
.await?
.to_string_lossy()
.to_string(),
pack_name: profile.name.clone(),
icon: None,
pack_version: version_id.clone(),
},
200.0, // These two downloads will share the same loading bar
"Downloading pack file",
)
.await?;
// download in parallel, then join.
try_join!(
generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
Some(shared_loading_bar.clone())
),
generate_pack_from_version_id(
project_id.clone(),
new_version_id.clone(),
profile.metadata.name.clone(),
None,
profile_path.clone(),
Some(shared_loading_bar)
)
)?
} else {
// If new_version_id is None, we don't need to download the new pack, so we clone the old one
let mut old_pack_creator = generate_pack_from_version_id(
// download in parallel, then join.
try_join!(
generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.metadata.name.clone(),
profile.name.clone(),
None,
profile_path.clone(),
profile_path.to_string(),
Some(shared_loading_bar.clone())
),
generate_pack_from_version_id(
project_id.clone(),
new_version_id.clone(),
profile.name.clone(),
None,
profile_path.to_string(),
Some(shared_loading_bar)
)
.await?;
old_pack_creator.description.existing_loading_bar = None;
(old_pack_creator.clone(), old_pack_creator)
};
)?
} else {
// If new_version_id is None, we don't need to download the new pack, so we clone the old one
let mut old_pack_creator = generate_pack_from_version_id(
project_id.clone(),
version_id.clone(),
profile.name.clone(),
None,
profile_path.to_string(),
None,
)
.await?;
old_pack_creator.description.existing_loading_bar = None;
(old_pack_creator.clone(), old_pack_creator)
};
// Removal - remove all files that were added by the old pack
// - remove all installed projects
// - remove all overrides
pack::install_mrpack::remove_all_related_files(
profile_path.clone(),
profile_path.to_string(),
old_pack_creator.file,
)
.await?;

View File

@@ -1,5 +0,0 @@
use crate::state::{ProcessType, SafeProcesses};
pub async fn check_safe_loading_bars() -> crate::Result<bool> {
SafeProcesses::is_complete(ProcessType::LoadingBar).await
}

View File

@@ -1,21 +1,7 @@
//! Theseus profile management interface
use std::path::{Path, PathBuf};
use tokio::fs;
use io::IOError;
use tokio::sync::RwLock;
use crate::{
event::emit::{emit_loading, init_loading},
prelude::DirectoryInfo,
state::{self, Profiles},
util::{fetch, io},
};
pub use crate::{
state::{
Hooks, JavaSettings, MemorySettings, Profile, Settings, WindowSize,
},
state::{Hooks, MemorySettings, Profile, Settings, WindowSize},
State,
};
@@ -23,224 +9,15 @@ pub use crate::{
#[tracing::instrument]
pub async fn get() -> crate::Result<Settings> {
let state = State::get().await?;
let settings = state.settings.read().await;
Ok(settings.clone())
let settings = Settings::get(&state.pool).await?;
Ok(settings)
}
/// Sets entire settings
#[tracing::instrument]
pub async fn set(settings: Settings) -> crate::Result<()> {
let state = State::get().await?;
settings.update(&state.pool).await?;
if settings.loaded_config_dir
!= state.settings.read().await.loaded_config_dir
{
return Err(crate::ErrorKind::OtherError(
"Cannot change config directory as setting".to_string(),
)
.as_error());
}
let (reset_io, reset_fetch) = async {
let read = state.settings.read().await;
(
settings.max_concurrent_writes != read.max_concurrent_writes,
settings.max_concurrent_downloads != read.max_concurrent_downloads,
)
}
.await;
let updated_discord_rpc = {
let read = state.settings.read().await;
settings.disable_discord_rpc != read.disable_discord_rpc
};
{
*state.settings.write().await = settings;
}
if updated_discord_rpc {
state.discord_rpc.clear_to_default(true).await?;
}
if reset_io {
state.reset_io_semaphore().await;
}
if reset_fetch {
state.reset_fetch_semaphore().await;
}
State::sync().await?;
Ok(())
}
/// Sets the new config dir, the location of all Theseus data except for the settings.json and caches
/// Takes control of the entire state and blocks until completion
pub async fn set_config_dir(new_config_dir: PathBuf) -> crate::Result<()> {
tracing::trace!("Changing config dir to: {}", new_config_dir.display());
if !new_config_dir.is_dir() {
return Err(crate::ErrorKind::FSError(format!(
"New config dir is not a folder: {}",
new_config_dir.display()
))
.as_error());
}
if !is_dir_writeable(new_config_dir.clone()).await? {
return Err(crate::ErrorKind::FSError(format!(
"New config dir is not writeable: {}",
new_config_dir.display()
))
.as_error());
}
let loading_bar = init_loading(
crate::LoadingBarType::ConfigChange {
new_path: new_config_dir.clone(),
},
100.0,
"Changing configuration directory",
)
.await?;
tracing::trace!("Changing config dir, taking control of the state");
// Take control of the state
let mut state_write = State::get_write().await?;
let old_config_dir =
state_write.directories.config_dir.read().await.clone();
// Reset file watcher
tracing::trace!("Reset file watcher");
let file_watcher = state::init_watcher().await?;
state_write.file_watcher = RwLock::new(file_watcher);
// Getting files to be moved
let mut config_entries = io::read_dir(&old_config_dir).await?;
let across_drives = is_different_drive(&old_config_dir, &new_config_dir);
let mut entries = vec![];
let mut deletable_entries = vec![];
while let Some(entry) = config_entries
.next_entry()
.await
.map_err(|e| IOError::with_path(e, &old_config_dir))?
{
let entry_path = entry.path();
if let Some(file_name) = entry_path.file_name() {
// We are only moving the profiles and metadata folders
if file_name == state::PROFILES_FOLDER_NAME
|| file_name == state::METADATA_FOLDER_NAME
{
if across_drives {
entries.extend(
crate::pack::import::get_all_subfiles(&entry_path)
.await?,
);
deletable_entries.push(entry_path.clone());
} else {
entries.push(entry_path.clone());
}
}
}
}
tracing::trace!("Moving files");
let semaphore = &state_write.io_semaphore;
let num_entries = entries.len() as f64;
for entry_path in entries {
let relative_path = entry_path.strip_prefix(&old_config_dir)?;
let new_path = new_config_dir.join(relative_path);
if across_drives {
fetch::copy(&entry_path, &new_path, semaphore).await?;
} else {
io::rename(entry_path.clone(), new_path.clone()).await?;
}
emit_loading(&loading_bar, 80.0 * (1.0 / num_entries), None).await?;
}
tracing::trace!("Setting configuration setting");
// Set load config dir setting
let settings = {
let mut settings = state_write.settings.write().await;
settings.loaded_config_dir = Some(new_config_dir.clone());
// Some java paths are hardcoded to within our config dir, so we need to update them
tracing::trace!("Updating java keys");
for key in settings.java_globals.keys() {
if let Some(java) = settings.java_globals.get_mut(&key) {
// If the path is within the old config dir path, update it to the new config dir
if let Ok(relative_path) = PathBuf::from(java.path.clone())
.strip_prefix(&old_config_dir)
{
java.path = new_config_dir
.join(relative_path)
.to_string_lossy()
.to_string();
}
}
}
tracing::trace!("Syncing settings");
settings
.sync(&state_write.directories.settings_file())
.await?;
settings.clone()
};
tracing::trace!("Reinitializing directory");
// Set new state information
state_write.directories = DirectoryInfo::init(&settings)?;
// Delete entries that were from a different drive
let deletable_entries_len = deletable_entries.len();
if deletable_entries_len > 0 {
tracing::trace!("Deleting old files");
}
for entry in deletable_entries {
io::remove_dir_all(entry).await?;
emit_loading(
&loading_bar,
10.0 * (1.0 / deletable_entries_len as f64),
None,
)
.await?;
}
// Reset file watcher
tracing::trace!("Reset file watcher");
let mut file_watcher = state::init_watcher().await?;
// Reset profiles (for filepaths, file watcher, etc)
state_write.profiles = RwLock::new(
Profiles::init(&state_write.directories, &mut file_watcher).await?,
);
state_write.file_watcher = RwLock::new(file_watcher);
emit_loading(&loading_bar, 10.0, None).await?;
tracing::info!(
"Successfully switched config folder to: {}",
new_config_dir.display()
);
Ok(())
}
// Function to check if two paths are on different drives/roots
fn is_different_drive(path1: &Path, path2: &Path) -> bool {
let root1 = path1.components().next();
let root2 = path2.components().next();
root1 != root2
}
pub async fn is_dir_writeable(new_config_dir: PathBuf) -> crate::Result<bool> {
let temp_path = new_config_dir.join(".tmp");
match fs::write(temp_path.clone(), "test").await {
Ok(_) => {
fs::remove_file(temp_path).await?;
Ok(true)
}
Err(e) => {
tracing::error!("Error writing to new config dir: {}", e);
Ok(false)
}
}
}

View File

@@ -1,52 +1,64 @@
//! Theseus tag management interface
use crate::state::CachedEntry;
pub use crate::{
state::{Category, DonationPlatform, GameVersion, Loader, Tags},
state::{Category, DonationPlatform, GameVersion, Loader},
State,
};
// Get bundled set of tags
#[tracing::instrument]
pub async fn get_tag_bundle() -> crate::Result<Tags> {
let state = State::get().await?;
let tags = state.tags.read().await;
Ok(tags.get_tag_bundle())
}
/// Get category tags
#[tracing::instrument]
pub async fn get_category_tags() -> crate::Result<Vec<Category>> {
let state = State::get().await?;
let tags = state.tags.read().await;
let categories =
CachedEntry::get_categories(None, &state.pool, &state.api_semaphore)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor("category tags".to_string())
})?;
Ok(tags.get_categories())
Ok(categories)
}
/// Get report type tags
#[tracing::instrument]
pub async fn get_report_type_tags() -> crate::Result<Vec<String>> {
let state = State::get().await?;
let tags = state.tags.read().await;
let report_types =
CachedEntry::get_report_types(None, &state.pool, &state.api_semaphore)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor("report type tags".to_string())
})?;
Ok(tags.get_report_types())
Ok(report_types)
}
/// Get loader tags
#[tracing::instrument]
pub async fn get_loader_tags() -> crate::Result<Vec<Loader>> {
let state = State::get().await?;
let tags = state.tags.read().await;
let loaders =
CachedEntry::get_loaders(None, &state.pool, &state.api_semaphore)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor("loader tags".to_string())
})?;
Ok(tags.get_loaders())
Ok(loaders)
}
/// Get game version tags
#[tracing::instrument]
pub async fn get_game_version_tags() -> crate::Result<Vec<GameVersion>> {
let state = State::get().await?;
let tags = state.tags.read().await;
let game_versions =
CachedEntry::get_game_versions(None, &state.pool, &state.api_semaphore)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor("game version tags".to_string())
})?;
Ok(tags.get_game_versions())
Ok(game_versions)
}
/// Get donation platform tags
@@ -54,7 +66,15 @@ pub async fn get_game_version_tags() -> crate::Result<Vec<GameVersion>> {
pub async fn get_donation_platform_tags() -> crate::Result<Vec<DonationPlatform>>
{
let state = State::get().await?;
let tags = state.tags.read().await;
let donation_platforms = CachedEntry::get_donation_platforms(
None,
&state.pool,
&state.api_semaphore,
)
.await?
.ok_or_else(|| {
crate::ErrorKind::NoValueFor("donation platform tags".to_string())
})?;
Ok(tags.get_donation_platforms())
Ok(donation_platforms)
}

View File

@@ -1,3 +1,6 @@
//! Configuration structs
pub const MODRINTH_API_URL: &str = "https://api.modrinth.com/v2/";
pub const MODRINTH_API_URL_V3: &str = "https://api.modrinth.com/v3/";
pub const META_URL: &str = "https://launcher-meta.modrinth.com/";

View File

@@ -102,6 +102,12 @@ pub enum ErrorKind {
#[cfg(feature = "tauri")]
#[error("Tauri error: {0}")]
TauriError(#[from] tauri::Error),
#[error("Error interacting with database: {0}")]
Sqlx(#[from] sqlx::Error),
#[error("Error while applying migrations: {0}")]
SqlxMigrate(#[from] sqlx::migrate::MigrateError),
}
#[derive(Debug)]

View File

@@ -1,11 +1,7 @@
use super::LoadingBarId;
use crate::{
event::{
CommandPayload, EventError, LoadingBar, LoadingBarType,
ProcessPayloadType, ProfilePayloadType,
},
prelude::ProfilePathId,
state::{ProcessType, SafeProcesses},
use crate::event::{
CommandPayload, EventError, LoadingBar, LoadingBarType, ProcessPayloadType,
ProfilePayloadType,
};
use futures::prelude::*;
@@ -49,20 +45,19 @@ const CLI_PROGRESS_BAR_TOTAL: u64 = 1000;
/// total is the total amount of work to be done- all emissions will be considered a fraction of this value (should be 1 or 100 for simplicity)
/// title is the title of the loading bar
/// The app will wait for this loading bar to finish before exiting, as it is considered safe.
#[theseus_macros::debug_pin]
pub async fn init_loading(
bar_type: LoadingBarType,
total: f64,
title: &str,
) -> crate::Result<LoadingBarId> {
let key = init_loading_unsafe(bar_type, total, title).await?;
SafeProcesses::add_uuid(ProcessType::LoadingBar, key.0).await?;
Ok(key)
}
/// An unsafe loading bar can be created without adding it to the SafeProcesses list,
/// meaning that the app won't ask to wait for it to finish before exiting.
#[theseus_macros::debug_pin]
pub async fn init_loading_unsafe(
bar_type: LoadingBarType,
total: f64,
@@ -149,7 +144,7 @@ pub async fn edit_loading(
// By convention, fraction is the fraction of the progress bar that is filled
#[allow(unused_variables)]
#[tracing::instrument(level = "debug")]
#[theseus_macros::debug_pin]
pub async fn emit_loading(
key: &LoadingBarId,
increment_frac: f64,
@@ -233,22 +228,6 @@ pub async fn emit_warning(message: &str) -> crate::Result<()> {
Ok(())
}
// emit_offline(bool)
// This is used to emit an event to the frontend that the app is offline after a refresh (or online)
#[allow(dead_code)]
#[allow(unused_variables)]
pub async fn emit_offline(offline: bool) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all("offline", offline)
.map_err(EventError::from)?;
}
Ok(())
}
// emit_command(CommandPayload::Something { something })
// ie: installing a pack, opening an .mrpack, etc
// Generally used for url deep links and file opens that we we want to handle in the frontend
@@ -270,7 +249,7 @@ pub async fn emit_command(command: CommandPayload) -> crate::Result<()> {
// emit_process(uuid, pid, event, message)
#[allow(unused_variables)]
pub async fn emit_process(
uuid: Uuid,
profile_path: &str,
pid: u32,
event: ProcessPayloadType,
message: &str,
@@ -283,7 +262,7 @@ pub async fn emit_process(
.emit_all(
"process",
ProcessPayload {
uuid,
profile_path_id: profile_path.to_string(),
pid,
event,
message: message.to_string(),
@@ -297,24 +276,18 @@ pub async fn emit_process(
// emit_profile(path, event)
#[allow(unused_variables)]
pub async fn emit_profile(
uuid: Uuid,
profile_path_id: &ProfilePathId,
name: &str,
profile_path_id: &str,
event: ProfilePayloadType,
) -> crate::Result<()> {
#[cfg(feature = "tauri")]
{
let path = profile_path_id.get_full_path().await?;
let event_state = crate::EventState::get().await?;
event_state
.app
.emit_all(
"profile",
ProfilePayload {
uuid,
profile_path_id: profile_path_id.clone(),
path,
name: name.to_string(),
profile_path_id: profile_path_id.to_string(),
event,
},
)
@@ -382,7 +355,7 @@ macro_rules! loading_join {
// If message is Some(t) you will overwrite this loading bar's message with a custom one
// num_futs is the number of futures that will be run, which is needed as we allow Iterator to be passed in, which doesn't have a size
#[tracing::instrument(skip(stream, f))]
#[theseus_macros::debug_pin]
pub async fn loading_try_for_each_concurrent<I, F, Fut, T>(
stream: I,
limit: Option<usize>,

View File

@@ -5,9 +5,6 @@ use tokio::sync::OnceCell;
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::prelude::ProfilePathId;
use crate::state::SafeProcesses;
pub mod emit;
// Global event state
@@ -140,15 +137,6 @@ impl Drop for LoadingBarId {
#[cfg(not(any(feature = "tauri", feature = "cli")))]
bars.remove(&loader_uuid);
}
// complete calls state, and since a LoadingBarId is created in state initialization, we only complete if its already initializaed
// to avoid an infinite loop.
if crate::State::initialized() {
let _ = SafeProcesses::complete(
crate::state::ProcessType::LoadingBar,
loader_uuid,
)
.await;
}
});
}
}
@@ -162,28 +150,28 @@ pub enum LoadingBarType {
version: u32,
},
PackFileDownload {
profile_path: PathBuf,
profile_path: String,
pack_name: String,
icon: Option<String>,
pack_version: String,
},
PackDownload {
profile_path: PathBuf,
profile_path: String,
pack_name: String,
icon: Option<PathBuf>,
pack_id: Option<String>,
pack_version: Option<String>,
},
MinecraftDownload {
profile_path: PathBuf,
profile_path: String,
profile_name: String,
},
ProfileUpdate {
profile_path: PathBuf,
profile_path: String,
profile_name: String,
},
ZipExtract {
profile_path: PathBuf,
profile_path: String,
profile_name: String,
},
ConfigChange {
@@ -233,7 +221,7 @@ pub enum CommandPayload {
#[derive(Serialize, Clone)]
pub struct ProcessPayload {
pub uuid: Uuid, // processes in state are going to be identified by UUIDs, as they might change to different processes
pub profile_path_id: String,
pub pid: u32,
pub event: ProcessPayloadType,
pub message: String,
@@ -242,23 +230,18 @@ pub struct ProcessPayload {
#[serde(rename_all = "snake_case")]
pub enum ProcessPayloadType {
Launched,
Updated, // eg: if the MinecraftChild changes to its post-command process instead of the Minecraft process
Finished,
}
#[derive(Serialize, Clone)]
pub struct ProfilePayload {
pub uuid: Uuid,
pub profile_path_id: ProfilePathId,
pub path: PathBuf,
pub name: String,
pub profile_path_id: String,
pub event: ProfilePayloadType,
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum ProfilePayloadType {
Created,
Added, // also triggered when Created
Synced,
Edited,
Removed,

View File

@@ -1,7 +1,6 @@
//! Downloader for Minecraft data
use crate::launcher::parse_rules;
use crate::state::CredentialsStore;
use crate::{
event::{
emit::{emit_loading, loading_try_for_each_concurrent},
@@ -19,6 +18,7 @@ use daedalus::{
modded::LoaderVersion,
};
use futures::prelude::*;
use reqwest::Method;
use tokio::sync::OnceCell;
#[tracing::instrument(skip(st, version))]
@@ -58,7 +58,7 @@ pub async fn download_minecraft(
}
#[tracing::instrument(skip_all, fields(version = version.id.as_str(), loader = ?loader))]
#[theseus_macros::debug_pin]
pub async fn download_version_info(
st: &State,
version: &GameVersion,
@@ -72,7 +72,6 @@ pub async fn download_version_info(
let path = st
.directories
.version_dir(&version_id)
.await
.join(format!("{version_id}.json"));
let res = if path.exists() && !force.unwrap_or(false) {
@@ -82,10 +81,26 @@ pub async fn download_version_info(
.and_then(|ref it| Ok(serde_json::from_slice(it)?))
} else {
tracing::info!("Downloading version info for version {}", &version.id);
let mut info = d::minecraft::fetch_version_info(version).await?;
let mut info = fetch_json(
Method::GET,
&version.url,
None,
None,
&st.api_semaphore,
&st.pool,
)
.await?;
if let Some(loader) = loader {
let partial = d::modded::fetch_partial_version(&loader.url).await?;
let partial: d::modded::PartialVersionInfo = fetch_json(
Method::GET,
&loader.url,
None,
None,
&st.api_semaphore,
&st.pool,
)
.await?;
info = d::modded::merge_partial_version(partial, info);
}
@@ -104,7 +119,7 @@ pub async fn download_version_info(
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
pub async fn download_client(
st: &State,
version_info: &GameVersionInfo,
@@ -125,7 +140,6 @@ pub async fn download_client(
let path = st
.directories
.version_dir(version)
.await
.join(format!("{version}.jar"));
if !path.exists() || force {
@@ -133,7 +147,7 @@ pub async fn download_client(
&client_download.url,
Some(&client_download.sha1),
&st.fetch_semaphore,
&CredentialsStore(None),
&st.pool,
)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
@@ -148,7 +162,7 @@ pub async fn download_client(
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
pub async fn download_assets_index(
st: &State,
version: &GameVersionInfo,
@@ -159,7 +173,6 @@ pub async fn download_assets_index(
let path = st
.directories
.assets_index_dir()
.await
.join(format!("{}.json", &version.asset_index.id));
let res = if path.exists() && !force {
@@ -168,7 +181,15 @@ pub async fn download_assets_index(
.await
.and_then(|ref it| Ok(serde_json::from_slice(it)?))
} else {
let index = d::minecraft::fetch_assets_index(version).await?;
let index = fetch_json(
Method::GET,
&version.asset_index.url,
None,
None,
&st.fetch_semaphore,
&st.pool,
)
.await?;
write(&path, &serde_json::to_vec(&index)?, &st.io_semaphore).await?;
tracing::info!("Fetched assets index");
Ok(index)
@@ -182,7 +203,7 @@ pub async fn download_assets_index(
}
#[tracing::instrument(skip(st, index))]
#[theseus_macros::debug_pin]
pub async fn download_assets(
st: &State,
with_legacy: bool,
@@ -204,7 +225,7 @@ pub async fn download_assets(
None,
|(name, asset)| async move {
let hash = &asset.hash;
let resource_path = st.directories.object_dir(hash).await;
let resource_path = st.directories.object_dir(hash);
let url = format!(
"https://resources.download.minecraft.net/{sub_hash}/{hash}",
sub_hash = &hash[..2]
@@ -215,7 +236,7 @@ pub async fn download_assets(
async {
if !resource_path.exists() || force {
let resource = fetch_cell
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &CredentialsStore(None)))
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &st.pool))
.await?;
write(&resource_path, resource, &st.io_semaphore).await?;
tracing::trace!("Fetched asset with hash {hash}");
@@ -223,13 +244,13 @@ pub async fn download_assets(
Ok::<_, crate::Error>(())
},
async {
let resource_path = st.directories.legacy_assets_dir().await.join(
let resource_path = st.directories.legacy_assets_dir().join(
name.replace('/', &String::from(std::path::MAIN_SEPARATOR))
);
if with_legacy && !resource_path.exists() || force {
let resource = fetch_cell
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &CredentialsStore(None)))
.get_or_try_init(|| fetch(&url, Some(hash), &st.fetch_semaphore, &st.pool))
.await?;
write(&resource_path, resource, &st.io_semaphore).await?;
tracing::trace!("Fetched legacy asset with hash {hash}");
@@ -246,7 +267,6 @@ pub async fn download_assets(
}
#[tracing::instrument(skip(st, libraries))]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn download_libraries(
st: &State,
@@ -261,8 +281,8 @@ pub async fn download_libraries(
tracing::debug!("Loading libraries");
tokio::try_join! {
io::create_dir_all(st.directories.libraries_dir().await),
io::create_dir_all(st.directories.version_natives_dir(version).await)
io::create_dir_all(st.directories.libraries_dir()),
io::create_dir_all(st.directories.version_natives_dir(version))
}?;
let num_files = libraries.len();
loading_try_for_each_concurrent(
@@ -275,38 +295,42 @@ pub async fn download_libraries(
}
}
if !library.downloadable {
tracing::trace!("Skipped non-downloadable library {}", &library.name);
return Ok(());
}
tokio::try_join! {
async {
let artifact_path = d::get_path_from_artifact(&library.name)?;
let path = st.directories.libraries_dir().await.join(&artifact_path);
let path = st.directories.libraries_dir().join(&artifact_path);
match library.downloads {
_ if path.exists() && !force => Ok(()),
Some(d::minecraft::LibraryDownloads {
artifact: Some(ref artifact),
..
}) => {
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &CredentialsStore(None))
if path.exists() && !force {
return Ok(());
}
if let Some(d::minecraft::LibraryDownloads { artifact: Some(ref artifact), ..}) = library.downloads {
if !artifact.url.is_empty(){
let bytes = fetch(&artifact.url, Some(&artifact.sha1), &st.fetch_semaphore, &st.pool)
.await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
}
_ => {
let url = [
library
.url
.as_deref()
.unwrap_or("https://libraries.minecraft.net/"),
&artifact_path
].concat();
let bytes = fetch(&url, None, &st.fetch_semaphore, &CredentialsStore(None)).await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
return Ok::<_, crate::Error>(());
}
}
let url = [
library
.url
.as_deref()
.unwrap_or("https://libraries.minecraft.net/"),
&artifact_path
].concat();
let bytes = fetch(&url, None, &st.fetch_semaphore, &st.pool).await?;
write(&path, &bytes, &st.io_semaphore).await?;
tracing::trace!("Fetched library {} to path {:?}", &library.name, &path);
Ok::<_, crate::Error>(())
},
async {
// HACK: pseudo try block using or else
@@ -327,10 +351,10 @@ pub async fn download_libraries(
);
if let Some(native) = classifiers.get(&parsed_key) {
let data = fetch(&native.url, Some(&native.sha1), &st.fetch_semaphore, &CredentialsStore(None)).await?;
let data = fetch(&native.url, Some(&native.sha1), &st.fetch_semaphore, &st.pool).await?;
let reader = std::io::Cursor::new(&data);
if let Ok(mut archive) = zip::ZipArchive::new(reader) {
match archive.extract(st.directories.version_natives_dir(version).await) {
match archive.extract(st.directories.version_natives_dir(version)) {
Ok(_) => tracing::debug!("Fetched native {}", &library.name),
Err(err) => tracing::error!("Failed extracting native {}. err: {}", &library.name, err)
}

View File

@@ -1,23 +1,22 @@
//! Logic for launching Minecraft
use crate::data::ModLoader;
use crate::event::emit::{emit_loading, init_or_edit_loading};
use crate::event::{LoadingBarId, LoadingBarType};
use crate::launcher::io::IOError;
use crate::prelude::JavaVersion;
use crate::state::{Credentials, ProfileInstallStage};
use crate::state::{Credentials, JavaVersion, Process, ProfileInstallStage};
use crate::util::io;
use crate::{
process,
state::{self as st, MinecraftChild},
state::{self as st},
State,
};
use chrono::Utc;
use daedalus as d;
use daedalus::minecraft::{RuleAction, VersionInfo};
use daedalus::modded::LoaderVersion;
use st::Profile;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::process::Command;
use uuid::Uuid;
mod args;
@@ -114,86 +113,127 @@ pub async fn get_java_version_from_profile(
profile: &Profile,
version_info: &VersionInfo,
) -> crate::Result<Option<JavaVersion>> {
if let Some(java) = profile.java.clone().and_then(|x| x.override_version) {
Ok(Some(java))
} else {
let key = version_info
.java_version
.as_ref()
.map(|it| it.major_version)
.unwrap_or(8);
if let Some(java) = profile.java_path.as_ref() {
let java = crate::api::jre::check_jre(std::path::PathBuf::from(java))
.await
.ok()
.flatten();
let state = State::get().await?;
let settings = state.settings.read().await;
if let Some(java) = settings.java_globals.get(&format!("JAVA_{key}")) {
return Ok(Some(java.clone()));
if let Some(java) = java {
return Ok(Some(java));
}
}
let key = version_info
.java_version
.as_ref()
.map(|it| it.major_version)
.unwrap_or(8);
let state = State::get().await?;
let java_version = JavaVersion::get(key, &state.pool).await?;
Ok(java_version)
}
pub async fn get_loader_version_from_profile(
game_version: &str,
loader: ModLoader,
loader_version: Option<&str>,
) -> crate::Result<Option<LoaderVersion>> {
if loader == ModLoader::Vanilla {
return Ok(None);
}
let version = loader_version.unwrap_or("latest");
let filter = |it: &LoaderVersion| match version {
"latest" => true,
"stable" => it.stable,
id => it.id == *id,
};
let versions =
crate::api::metadata::get_loader_versions(loader.as_meta_str()).await?;
let loaders = versions.game_versions.into_iter().find(|x| {
x.id.replace(daedalus::modded::DUMMY_REPLACE_STRING, game_version)
== game_version
});
if let Some(loaders) = loaders {
let loader_version = loaders.loaders.iter().find(|x| filter(x)).or(
if version == "stable" {
loaders.loaders.first()
} else {
None
},
);
Ok(loader_version.cloned())
} else {
Ok(None)
}
}
#[tracing::instrument(skip(profile))]
#[theseus_macros::debug_pin]
pub async fn install_minecraft(
profile: &Profile,
existing_loading_bar: Option<LoadingBarId>,
repairing: bool,
) -> crate::Result<()> {
let sync_projects = existing_loading_bar.is_some();
let loading_bar = init_or_edit_loading(
existing_loading_bar,
LoadingBarType::MinecraftDownload {
// If we are downloading minecraft for a profile, provide its name and uuid
profile_name: profile.metadata.name.clone(),
profile_path: profile.get_profile_full_path().await?,
profile_name: profile.name.clone(),
profile_path: profile.path.clone(),
},
100.0,
"Downloading Minecraft",
)
.await?;
crate::api::profile::edit(&profile.profile_id(), |prof| {
crate::api::profile::edit(&profile.path, |prof| {
prof.install_stage = ProfileInstallStage::Installing;
async { Ok(()) }
})
.await?;
State::sync().await?;
if sync_projects {
Profile::sync_projects_task(profile.profile_id(), true);
}
let state = State::get().await?;
let instance_path =
&io::canonicalize(profile.get_profile_full_path().await?)?;
let metadata = state.metadata.read().await;
let version_index = metadata
.minecraft
let instance_path =
crate::api::profile::get_full_path(&profile.path).await?;
let minecraft = crate::api::metadata::get_minecraft_versions().await?;
let version_index = minecraft
.versions
.iter()
.position(|it| it.id == profile.metadata.game_version)
.position(|it| it.id == profile.game_version)
.ok_or(crate::ErrorKind::LauncherError(format!(
"Invalid game version: {}",
profile.metadata.game_version
profile.game_version
)))?;
let version = &metadata.minecraft.versions[version_index];
let version = &minecraft.versions[version_index];
let minecraft_updated = version_index
<= metadata
.minecraft
<= minecraft
.versions
.iter()
.position(|x| x.id == "22w16a")
.unwrap_or(0);
let version_jar = profile
.metadata
.loader_version
.as_ref()
.map_or(version.id.clone(), |it| {
let loader_version = get_loader_version_from_profile(
&profile.game_version,
profile.loader,
profile.loader_version.as_deref(),
)
.await?;
let version_jar =
loader_version.as_ref().map_or(version.id.clone(), |it| {
format!("{}-{}", version.id.clone(), it.id.clone())
});
@@ -201,14 +241,12 @@ pub async fn install_minecraft(
let mut version_info = download::download_version_info(
&state,
version,
profile.metadata.loader_version.as_ref(),
loader_version.as_ref(),
Some(repairing),
Some(&loading_bar),
)
.await?;
// TODO: check if java exists, if not install it add to install step
let key = version_info
.java_version
.as_ref()
@@ -235,13 +273,7 @@ pub async fn install_minecraft(
})?;
if set_java {
{
let mut settings = state.settings.write().await;
settings
.java_globals
.insert(format!("JAVA_{key}"), java_version.clone());
}
State::sync().await?;
java_version.upsert(&state.pool).await?;
}
// Download minecraft (5-90)
@@ -259,10 +291,9 @@ pub async fn install_minecraft(
let client_path = state
.directories
.version_dir(&version_jar)
.await
.join(format!("{version_jar}.jar"));
let libraries_dir = state.directories.libraries_dir().await;
let libraries_dir = state.directories.libraries_dir();
if let Some(ref mut data) = version_info.data {
processor_rules! {
@@ -274,7 +305,7 @@ pub async fn install_minecraft(
client => client_path.to_string_lossy(),
server => "";
"MINECRAFT_VERSION":
client => profile.metadata.game_version.clone(),
client => profile.game_version.clone(),
server => "";
"ROOT":
client => instance_path.to_string_lossy(),
@@ -356,20 +387,18 @@ pub async fn install_minecraft(
}
}
crate::api::profile::edit(&profile.profile_id(), |prof| {
crate::api::profile::edit(&profile.path, |prof| {
prof.install_stage = ProfileInstallStage::Installed;
async { Ok(()) }
})
.await?;
State::sync().await?;
emit_loading(&loading_bar, 1.0, Some("Finished installing")).await?;
Ok(())
}
#[tracing::instrument(skip_all)]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn launch_minecraft(
java_args: &[String],
@@ -381,7 +410,7 @@ pub async fn launch_minecraft(
credentials: &Credentials,
post_exit_hook: Option<String>,
profile: &Profile,
) -> crate::Result<Arc<tokio::sync::RwLock<MinecraftChild>>> {
) -> crate::Result<Process> {
if profile.install_stage == ProfileInstallStage::PackInstalling
|| profile.install_stage == ProfileInstallStage::Installing
{
@@ -396,41 +425,43 @@ pub async fn launch_minecraft(
}
let state = State::get().await?;
let metadata = state.metadata.read().await;
let instance_path = profile.get_profile_full_path().await?;
let instance_path = &io::canonicalize(instance_path)?;
let instance_path =
crate::api::profile::get_full_path(&profile.path).await?;
let version_index = metadata
.minecraft
let minecraft = crate::api::metadata::get_minecraft_versions().await?;
let version_index = minecraft
.versions
.iter()
.position(|it| it.id == profile.metadata.game_version)
.position(|it| it.id == profile.game_version)
.ok_or(crate::ErrorKind::LauncherError(format!(
"Invalid game version: {}",
profile.metadata.game_version
profile.game_version
)))?;
let version = &metadata.minecraft.versions[version_index];
let version = &minecraft.versions[version_index];
let minecraft_updated = version_index
<= metadata
.minecraft
<= minecraft
.versions
.iter()
.position(|x| x.id == "22w16a")
.unwrap_or(0);
let version_jar = profile
.metadata
.loader_version
.as_ref()
.map_or(version.id.clone(), |it| {
let loader_version = get_loader_version_from_profile(
&profile.game_version,
profile.loader,
profile.loader_version.as_deref(),
)
.await?;
let version_jar =
loader_version.as_ref().map_or(version.id.clone(), |it| {
format!("{}-{}", version.id.clone(), it.id.clone())
});
let version_info = download::download_version_info(
&state,
version,
profile.metadata.loader_version.as_ref(),
loader_version.as_ref(),
None,
None,
)
@@ -458,7 +489,6 @@ pub async fn launch_minecraft(
let client_path = state
.directories
.version_dir(&version_jar)
.await
.join(format!("{version_jar}.jar"));
let args = version_info.arguments.clone().unwrap_or_default();
@@ -474,11 +504,11 @@ pub async fn launch_minecraft(
// Check if profile has a running profile, and reject running the command if it does
// Done late so a quick double call doesn't launch two instances
let existing_processes =
process::get_uuids_by_profile_path(profile.profile_id()).await?;
if let Some(uuid) = existing_processes.first() {
process::get_by_profile_path(&profile.path).await?;
if let Some(process) = existing_processes.first() {
return Err(crate::ErrorKind::LauncherError(format!(
"Profile {} is already running at UUID: {uuid}",
profile.profile_id()
"Profile {} is already running at path: {}",
profile.path, process.pid
))
.as_error());
}
@@ -487,10 +517,10 @@ pub async fn launch_minecraft(
args::get_jvm_arguments(
args.get(&d::minecraft::ArgumentType::Jvm)
.map(|x| x.as_slice()),
&state.directories.version_natives_dir(&version_jar).await,
&state.directories.libraries_dir().await,
&state.directories.version_natives_dir(&version_jar),
&state.directories.libraries_dir(),
&args::get_class_paths(
&state.directories.libraries_dir().await,
&state.directories.libraries_dir(),
version_info.libraries.as_slice(),
&client_path,
&java_version.architecture,
@@ -513,8 +543,8 @@ pub async fn launch_minecraft(
credentials,
&version.id,
&version_info.asset_index.id,
instance_path,
&state.directories.assets_dir().await,
&instance_path,
&state.directories.assets_dir(),
&version.type_,
*resolution,
&java_version.architecture,
@@ -561,13 +591,12 @@ pub async fn launch_minecraft(
io::write(&options_path, options_string).await?;
}
crate::api::profile::edit(&profile.profile_id(), |prof| {
prof.metadata.last_played = Some(Utc::now());
crate::api::profile::edit(&profile.path, |prof| {
prof.last_played = Some(Utc::now());
async { Ok(()) }
})
.await?;
State::sync().await?;
let mut censor_strings = HashMap::new();
let username = whoami::username();
@@ -603,31 +632,25 @@ pub async fn launch_minecraft(
let window = EventState::get_main_window().await?;
if let Some(window) = window {
let settings = state.settings.read().await;
if settings.hide_on_process {
let settings = crate::state::Settings::get(&state.pool).await?;
if settings.hide_on_process_start {
window.minimize()?;
}
}
}
if !*state.offline.read().await {
// Add game played to discord rich presence
let _ = state
.discord_rpc
.set_activity(&format!("Playing {}", profile.metadata.name), true)
.await;
}
let _ = state
.discord_rpc
.set_activity(&format!("Playing {}", profile.name), true)
.await;
// Create Minecraft child by inserting it into the state
// This also spawns the process and prepares the subsequent processes
let mut state_children = state.children.write().await;
state_children
.insert_new_process(
Uuid::new_v4(),
profile.profile_id(),
command,
post_exit_hook,
censor_strings,
)
.await
Process::insert_new_process(
&profile.path,
command,
post_exit_hook,
&state.pool,
)
.await
}

View File

@@ -22,5 +22,4 @@ pub use api::*;
pub use error::*;
pub use event::{EventState, LoadingBar, LoadingBarType};
pub use logger::start_logger;
pub use state::InnerProjectPathUnix;
pub use state::State;

File diff suppressed because it is too large Load Diff

View File

@@ -1,728 +0,0 @@
use super::{Profile, ProfilePathId};
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde::Serialize;
use std::{collections::HashMap, sync::Arc};
use tokio::process::Child;
use tokio::process::Command;
use tokio::sync::RwLock;
use crate::event::emit::emit_process;
use crate::event::ProcessPayloadType;
use crate::util::fetch::read_json;
use crate::util::io::IOError;
use crate::{profile, ErrorKind};
use tokio::task::JoinHandle;
use uuid::Uuid;
const PROCESSES_JSON: &str = "processes.json";
// Child processes (instances of Minecraft)
// A wrapper over a Hashmap connecting PID -> MinecraftChild
pub struct Children(HashMap<Uuid, Arc<RwLock<MinecraftChild>>>);
#[derive(Debug)]
pub enum ChildType {
// A child process that is being managed by tokio
TokioChild(Child),
// A child process that was rescued from a cache (e.g. a process that was launched by theseus before the launcher was restarted)
// This may not have all the same functionality as a TokioChild
RescuedPID(u32),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ProcessCache {
pub pid: u32,
pub uuid: Uuid,
pub start_time: u64,
pub name: String,
pub exe: String,
pub profile_relative_path: ProfilePathId,
pub post_command: Option<String>,
}
impl ChildType {
pub async fn try_wait(&mut self) -> crate::Result<Option<i32>> {
match self {
ChildType::TokioChild(child) => Ok(child
.try_wait()
.map_err(IOError::from)?
.map(|x| x.code().unwrap_or(0))),
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if !system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
return Ok(Some(0));
}
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
if process.status() == sysinfo::ProcessStatus::Run {
Ok(None)
} else {
Ok(Some(0))
}
} else {
Ok(Some(0))
}
}
}
}
pub async fn kill(&mut self) -> crate::Result<()> {
match self {
ChildType::TokioChild(child) => {
Ok(child.kill().await.map_err(IOError::from)?)
}
ChildType::RescuedPID(pid) => {
let mut system = sysinfo::System::new();
if system.refresh_process(sysinfo::Pid::from_u32(*pid)) {
let process = system.process(sysinfo::Pid::from_u32(*pid));
if let Some(process) = process {
process.kill();
}
}
Ok(())
}
}
}
pub fn id(&self) -> Option<u32> {
match self {
ChildType::TokioChild(child) => child.id(),
ChildType::RescuedPID(pid) => Some(*pid),
}
}
// Caches the process so that it can be restored if the launcher is restarted
// Stored in the caches/metadata/processes.json file
pub async fn cache_process(
&self,
uuid: uuid::Uuid,
profile_path_id: ProfilePathId,
post_command: Option<String>,
) -> crate::Result<()> {
let pid = match self {
ChildType::TokioChild(child) => child.id().unwrap_or(0),
ChildType::RescuedPID(pid) => *pid,
};
let state = crate::State::get().await?;
let mut system = sysinfo::System::new();
system.refresh_processes();
let process =
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
pid
))
})?;
let start_time = process.start_time();
let name = process.name().to_string();
let Some(path) = process.exe() else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
pid
))
.into());
};
let exe = path.to_string_lossy().to_string();
let cached_process = ProcessCache {
pid,
start_time,
name,
exe,
post_command,
uuid,
profile_relative_path: profile_path_id,
};
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.insert(uuid, cached_process);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
// Removes the process from the cache (ie: on process exit)
pub async fn remove_cache(&self, uuid: uuid::Uuid) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
children_json
} else {
HashMap::new()
};
children_caches.remove(&uuid);
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&children_caches)?,
&state.io_semaphore,
)
.await?;
Ok(())
}
}
// Minecraft Child, bundles together the PID, the actual Child, and the easily queryable stdout and stderr streams (if needed)
#[derive(Debug)]
pub struct MinecraftChild {
pub uuid: Uuid,
pub profile_relative_path: ProfilePathId,
pub manager: Option<JoinHandle<crate::Result<i32>>>, // None when future has completed and been handled
pub current_child: Arc<RwLock<ChildType>>,
pub last_updated_playtime: DateTime<Utc>, // The last time we updated the playtime for the associated profile
}
impl Children {
pub fn new() -> Self {
Children(HashMap::new())
}
// Loads cached processes from the caches/metadata/processes.json file, re-inserts them into the hashmap, and removes them from the file
// This will only be called once, on startup. Only processes who match a cached process (name, time started, pid, etc) will be re-inserted
pub async fn rescue_cache(&mut self) -> crate::Result<()> {
let state = crate::State::get().await?;
let children_path = state
.directories
.caches_meta_dir()
.await
.join(PROCESSES_JSON);
let mut children_caches = if let Ok(children_json) =
read_json::<HashMap<uuid::Uuid, ProcessCache>>(
&children_path,
&state.io_semaphore,
)
.await
{
// Overwrite the file with an empty hashmap- we will re-insert the cached processes
let empty = HashMap::<uuid::Uuid, ProcessCache>::new();
crate::util::fetch::write(
&children_path,
&serde_json::to_vec(&empty)?,
&state.io_semaphore,
)
.await?;
// Return the cached processes
children_json
} else {
HashMap::new()
};
for (_, cache) in children_caches.drain() {
let uuid = cache.uuid;
match self.insert_cached_process(cache).await {
Ok(child) => {
self.0.insert(uuid, child);
}
Err(e) => tracing::warn!(
"Failed to rescue cached process {}: {}",
uuid,
e
),
}
}
Ok(())
}
// Runs the command in process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// The threads for stdout and stderr are spawned here
// Unlike a Hashmap's 'insert', this directly returns the reference to the MinecraftChild rather than any previously stored MinecraftChild that may exist
#[tracing::instrument(skip(
self,
uuid,
mc_command,
post_command,
censor_strings
))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_new_process(
&mut self,
uuid: Uuid,
profile_relative_path: ProfilePathId,
mut mc_command: Command,
post_command: Option<String>, // Command to run after minecraft.
censor_strings: HashMap<String, String>,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
// Takes the first element of the commands vector and spawns it
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
let child = ChildType::TokioChild(mc_proc);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Caches process so that it can be restored if the launcher is restarted
child
.cache_process(
uuid,
profile_relative_path.clone(),
post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
uuid,
post_command,
pid,
current_child.clone(),
profile_relative_path.clone(),
)));
emit_process(
uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid,
profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(uuid, mchild.clone());
Ok(mchild)
}
// Rescues a cached process, inserts a child process to keep track of, and returns a reference to the container struct MinecraftChild
// Essentially 'reconnects' to a process that was launched by theseus before the launcher was restarted
// However, this may not have all the same functionality as a TokioChild, as we only have the PID and not the actual Child
// Only processes who match a cached process (name, time started, pid, etc) will be re-inserted. The function fails with an error if the process is notably different.
#[tracing::instrument(skip(self, cached_process,))]
#[tracing::instrument(level = "trace", skip(self))]
#[theseus_macros::debug_pin]
pub async fn insert_cached_process(
&mut self,
cached_process: ProcessCache,
) -> crate::Result<Arc<RwLock<MinecraftChild>>> {
let _state = crate::State::get().await?;
// Takes the first element of the commands vector and spawns it
// Checks processes, compares cached process to actual process
// Fails if notably different (meaning that the PID was reused, and we shouldn't reconnect to it)
{
let mut system = sysinfo::System::new();
system.refresh_processes();
let process = system
.process(sysinfo::Pid::from_u32(cached_process.pid))
.ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
cached_process.pid
))
})?;
if cached_process.start_time != process.start_time() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different start time than actual process {}", cached_process.pid, process.start_time())).into());
}
if cached_process.name != process.name() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different name than actual process {}", cached_process.pid, process.name())).into());
}
if let Some(path) = process.exe() {
if cached_process.exe != path.to_string_lossy() {
return Err(ErrorKind::LauncherError(format!("Cached process {} has different exe than actual process {}", cached_process.pid, path.to_string_lossy())).into());
}
} else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessable path",
cached_process.pid
))
.into());
}
}
let child = ChildType::RescuedPID(cached_process.pid);
// Slots child into manager
let pid = child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
// Re-caches process so that it can be restored if the launcher is restarted
child
.cache_process(
cached_process.uuid,
cached_process.profile_relative_path.clone(),
cached_process.post_command.clone(),
)
.await?;
let current_child = Arc::new(RwLock::new(child));
let manager = Some(tokio::spawn(Self::sequential_process_manager(
cached_process.uuid,
cached_process.post_command,
pid,
current_child.clone(),
cached_process.profile_relative_path.clone(),
)));
emit_process(
cached_process.uuid,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
let last_updated_playtime = Utc::now();
// Create MinecraftChild
let mchild = MinecraftChild {
uuid: cached_process.uuid,
profile_relative_path: cached_process.profile_relative_path,
current_child,
manager,
last_updated_playtime,
};
let mchild = Arc::new(RwLock::new(mchild));
self.0.insert(cached_process.uuid, mchild.clone());
Ok(mchild)
}
// Spawns a new child process and inserts it into the hashmap
// Also, as the process ends, it spawns the follow-up process if it exists
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
#[tracing::instrument(skip(current_child))]
#[theseus_macros::debug_pin]
async fn sequential_process_manager(
uuid: Uuid,
post_command: Option<String>,
mut current_pid: u32,
current_child: Arc<RwLock<ChildType>>,
associated_profile: ProfilePathId,
) -> crate::Result<i32> {
let current_child = current_child.clone();
// Wait on current Minecraft Child
let mut mc_exit_status;
let mut last_updated_playtime = Utc::now();
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
// Auto-update playtime every minute
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if diff >= 60 {
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
last_updated_playtime = Utc::now();
}
}
// Now fully complete- update playtime one last time
let diff = Utc::now()
.signed_duration_since(last_updated_playtime)
.num_seconds();
if let Err(e) = profile::edit(&associated_profile, |prof| {
prof.metadata.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile,
e
);
}
// Publish play time update
// Allow failure, it will be stored locally and sent next time
// Sent in another thread as first call may take a couple seconds and hold up process ending
let associated_profile_clone = associated_profile.clone();
tokio::spawn(async move {
if let Err(e) =
profile::try_update_playtime(&associated_profile_clone.clone())
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&associated_profile_clone,
e
);
}
});
{
// Clear game played for Discord RPC
// May have other active processes, so we clear to the next running process
let state = crate::State::get().await?;
let _ = state.discord_rpc.clear_to_default(true).await;
}
// If in tauri, window should show itself again after process exists if it was hidden
#[cfg(feature = "tauri")]
{
let window = crate::EventState::get_main_window().await?;
if let Some(window) = window {
window.unminimize()?;
}
}
{
let current_child = current_child.write().await;
current_child.remove_cache(uuid).await?;
}
if !mc_exit_status == 0 {
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
return Ok(mc_exit_status); // Err for a non-zero exit is handled in helper
}
// If a post-command exist, switch to it and wait on it
// First, create the command by splitting arguments
let post_command = if let Some(hook) = post_command {
let mut cmd = hook.split(' ');
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command
.args(&cmd.collect::<Vec<&str>>())
.current_dir(associated_profile.get_full_path().await?);
Some(command)
} else {
None
}
} else {
None
};
if let Some(mut m_command) = post_command {
{
let mut current_child: tokio::sync::RwLockWriteGuard<
'_,
ChildType,
> = current_child.write().await;
let new_child = m_command.spawn().map_err(IOError::from)?;
current_pid = new_child.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID"
.to_string(),
)
})?;
*current_child = ChildType::TokioChild(new_child);
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Updated,
"Completed Minecraft, switching to post-commands",
)
.await?;
loop {
if let Some(t) = current_child.write().await.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(10))
.await;
}
}
emit_process(
uuid,
current_pid,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
Ok(mc_exit_status)
}
// Returns a ref to the child
pub fn get(&self, uuid: Uuid) -> Option<Arc<RwLock<MinecraftChild>>> {
self.0.get(&uuid).cloned()
}
// Gets all PID keys
pub fn keys(&self) -> Vec<Uuid> {
self.0.keys().cloned().collect()
}
// Get exit status of a child by PID
// Returns None if the child is still running
pub async fn exit_status(&self, uuid: Uuid) -> crate::Result<Option<i32>> {
if let Some(child) = self.get(uuid) {
let child = child.write().await;
let status = child.current_child.write().await.try_wait().await?;
Ok(status)
} else {
Ok(None)
}
}
// Gets all PID keys of running children
pub async fn running_keys(&self) -> crate::Result<Vec<Uuid>> {
let mut keys = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all PID keys of running children with a given profile path
pub async fn running_keys_with_profile(
&self,
profile_path: ProfilePathId,
) -> crate::Result<Vec<Uuid>> {
let running_keys = self.running_keys().await?;
let mut keys = Vec::new();
for key in running_keys {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.read().await;
if child.profile_relative_path == profile_path {
keys.push(key);
}
}
}
Ok(keys)
}
// Gets all profiles of running children
pub async fn running_profile_paths(
&self,
) -> crate::Result<Vec<ProfilePathId>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
profiles.push(child.profile_relative_path.clone());
}
}
}
Ok(profiles)
}
// Gets all profiles of running children
// Returns clones because it would be serialized anyway
pub async fn running_profiles(&self) -> crate::Result<Vec<Profile>> {
let mut profiles = Vec::new();
for key in self.keys() {
if let Some(child) = self.get(key) {
let child = child.clone();
let child = child.write().await;
if child
.current_child
.write()
.await
.try_wait()
.await?
.is_none()
{
if let Some(prof) = crate::api::profile::get(
&child.profile_relative_path.clone(),
None,
)
.await?
{
profiles.push(prof);
}
}
}
}
Ok(profiles)
}
}
impl Default for Children {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,31 @@
use crate::state::DirectoryInfo;
use sqlx::migrate::MigrateDatabase;
use sqlx::sqlite::SqlitePoolOptions;
use sqlx::{Pool, Sqlite};
pub(crate) async fn connect() -> crate::Result<Pool<Sqlite>> {
let settings_dir = DirectoryInfo::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
if !settings_dir.exists() {
crate::util::io::create_dir_all(&settings_dir).await?;
}
let uri = format!("sqlite:{}", settings_dir.join("app.db").display());
if !Sqlite::database_exists(&uri).await? {
Sqlite::create_database(&uri).await?;
}
let pool = SqlitePoolOptions::new()
.max_connections(100)
.connect(&uri)
.await?;
sqlx::migrate!().run(&pool).await?;
Ok(pool)
}

View File

@@ -1,12 +1,9 @@
//! Theseus directory information
use std::fs;
use std::path::PathBuf;
use crate::state::{JavaVersion, Settings};
use crate::util::fetch::IoSemaphore;
use std::path::{Path, PathBuf};
use tokio::fs;
use tokio::sync::RwLock;
use super::{ProfilePathId, Settings};
pub const SETTINGS_FILE_NAME: &str = "settings.json";
pub const CACHES_FOLDER_NAME: &str = "caches";
pub const LAUNCHER_LOGS_FOLDER_NAME: &str = "launcher_logs";
pub const PROFILES_FOLDER_NAME: &str = "profiles";
@@ -15,8 +12,7 @@ pub const METADATA_FOLDER_NAME: &str = "meta";
#[derive(Debug)]
pub struct DirectoryInfo {
pub settings_dir: PathBuf, // Base settings directory- settings.json and icon cache.
pub config_dir: RwLock<PathBuf>, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
pub working_dir: PathBuf,
pub config_dir: PathBuf, // Base config directory- instances, minecraft downloads, etc. Changeable as a setting.
}
impl DirectoryInfo {
@@ -24,154 +20,128 @@ impl DirectoryInfo {
// init() is not needed for this function
pub fn get_initial_settings_dir() -> Option<PathBuf> {
Self::env_path("THESEUS_CONFIG_DIR")
.or_else(|| Some(dirs::config_dir()?.join("com.modrinth.theseus")))
}
#[inline]
pub fn get_initial_settings_file() -> crate::Result<PathBuf> {
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
Ok(settings_dir.join("settings.json"))
.or_else(|| Some(dirs::data_dir()?.join("ModrinthApp")))
}
/// Get all paths needed for Theseus to operate properly
#[tracing::instrument]
pub fn init(settings: &Settings) -> crate::Result<Self> {
// Working directory
let working_dir = std::env::current_dir().map_err(|err| {
crate::ErrorKind::FSError(format!(
"Could not open working directory: {err}"
))
})?;
pub async fn init(config_dir: Option<String>) -> crate::Result<Self> {
let settings_dir = Self::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid settings dir".to_string(),
),
)?;
fs::create_dir_all(&settings_dir).map_err(|err| {
fs::create_dir_all(&settings_dir).await.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error creating Theseus config directory: {err}"
))
})?;
// config directory (for instances, etc.)
// by default this is the same as the settings directory
let config_dir = settings.loaded_config_dir.clone().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
let config_dir = config_dir
.map(PathBuf::from)
.unwrap_or_else(|| settings_dir.clone());
Ok(Self {
settings_dir,
config_dir: RwLock::new(config_dir),
working_dir,
config_dir,
})
}
/// Get the Minecraft instance metadata directory
#[inline]
pub async fn metadata_dir(&self) -> PathBuf {
self.config_dir.read().await.join(METADATA_FOLDER_NAME)
pub fn metadata_dir(&self) -> PathBuf {
self.config_dir.join(METADATA_FOLDER_NAME)
}
/// Get the Minecraft java versions metadata directory
#[inline]
pub async fn java_versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("java_versions")
pub fn java_versions_dir(&self) -> PathBuf {
self.metadata_dir().join("java_versions")
}
/// Get the Minecraft versions metadata directory
#[inline]
pub async fn versions_dir(&self) -> PathBuf {
self.metadata_dir().await.join("versions")
pub fn versions_dir(&self) -> PathBuf {
self.metadata_dir().join("versions")
}
/// Get the metadata directory for a given version
#[inline]
pub async fn version_dir(&self, version: &str) -> PathBuf {
self.versions_dir().await.join(version)
pub fn version_dir(&self, version: &str) -> PathBuf {
self.versions_dir().join(version)
}
/// Get the Minecraft libraries metadata directory
#[inline]
pub async fn libraries_dir(&self) -> PathBuf {
self.metadata_dir().await.join("libraries")
pub fn libraries_dir(&self) -> PathBuf {
self.metadata_dir().join("libraries")
}
/// Get the Minecraft assets metadata directory
#[inline]
pub async fn assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("assets")
pub fn assets_dir(&self) -> PathBuf {
self.metadata_dir().join("assets")
}
/// Get the assets index directory
#[inline]
pub async fn assets_index_dir(&self) -> PathBuf {
self.assets_dir().await.join("indexes")
pub fn assets_index_dir(&self) -> PathBuf {
self.assets_dir().join("indexes")
}
/// Get the assets objects directory
#[inline]
pub async fn objects_dir(&self) -> PathBuf {
self.assets_dir().await.join("objects")
pub fn objects_dir(&self) -> PathBuf {
self.assets_dir().join("objects")
}
/// Get the directory for a specific object
#[inline]
pub async fn object_dir(&self, hash: &str) -> PathBuf {
self.objects_dir().await.join(&hash[..2]).join(hash)
pub fn object_dir(&self, hash: &str) -> PathBuf {
self.objects_dir().join(&hash[..2]).join(hash)
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn legacy_assets_dir(&self) -> PathBuf {
self.metadata_dir().await.join("resources")
pub fn legacy_assets_dir(&self) -> PathBuf {
self.metadata_dir().join("resources")
}
/// Get the Minecraft legacy assets metadata directory
#[inline]
pub async fn natives_dir(&self) -> PathBuf {
self.metadata_dir().await.join("natives")
pub fn natives_dir(&self) -> PathBuf {
self.metadata_dir().join("natives")
}
/// Get the natives directory for a version of Minecraft
#[inline]
pub async fn version_natives_dir(&self, version: &str) -> PathBuf {
self.natives_dir().await.join(version)
pub fn version_natives_dir(&self, version: &str) -> PathBuf {
self.natives_dir().join(version)
}
/// Get the directory containing instance icons
#[inline]
pub async fn icon_dir(&self) -> PathBuf {
self.config_dir.read().await.join("icons")
pub fn icon_dir(&self) -> PathBuf {
self.config_dir.join("icons")
}
/// Get the profiles directory for created profiles
#[inline]
pub async fn profiles_dir(&self) -> PathBuf {
self.config_dir.read().await.join(PROFILES_FOLDER_NAME)
pub fn profiles_dir(&self) -> PathBuf {
self.config_dir.join(PROFILES_FOLDER_NAME)
}
/// Gets the logs dir for a given profile
#[inline]
pub async fn profile_logs_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("logs"))
pub fn profile_logs_dir(&self, profile_path: &str) -> PathBuf {
self.profiles_dir().join(profile_path).join("logs")
}
/// Gets the crash reports dir for a given profile
#[inline]
pub async fn crash_reports_dir(
profile_id: &ProfilePathId,
) -> crate::Result<PathBuf> {
Ok(profile_id.get_full_path().await?.join("crash-reports"))
pub fn crash_reports_dir(&self, profile_path: &str) -> PathBuf {
self.profiles_dir().join(profile_path).join("crash-reports")
}
#[inline]
@@ -180,32 +150,140 @@ impl DirectoryInfo {
.map(|d| d.join(LAUNCHER_LOGS_FOLDER_NAME))
}
/// Get the file containing the global database
#[inline]
pub async fn database_file(&self) -> PathBuf {
self.config_dir.read().await.join("data.bin")
}
/// Get the settings file for Theseus
#[inline]
pub fn settings_file(&self) -> PathBuf {
self.settings_dir.join(SETTINGS_FILE_NAME)
}
/// Get the cache directory for Theseus
#[inline]
pub fn caches_dir(&self) -> PathBuf {
self.settings_dir.join(CACHES_FOLDER_NAME)
}
#[inline]
pub async fn caches_meta_dir(&self) -> PathBuf {
self.caches_dir().join("metadata")
}
/// Get path from environment variable
#[inline]
fn env_path(name: &str) -> Option<PathBuf> {
std::env::var_os(name).map(PathBuf::from)
}
pub async fn move_launcher_directory<'a, E>(
settings: &mut Settings,
exec: E,
io_semaphore: &IoSemaphore,
) -> crate::Result<()>
where
E: sqlx::Executor<'a, Database = sqlx::Sqlite> + Copy,
{
if let Some(ref prev_custom_dir) = settings.prev_custom_dir {
let prev_dir = PathBuf::from(prev_custom_dir);
let app_dir = DirectoryInfo::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
let move_dir = settings
.custom_dir
.as_ref()
.map(PathBuf::from)
.unwrap_or_else(|| app_dir.clone());
async fn is_dir_writeable(
new_config_dir: &Path,
) -> crate::Result<bool> {
let temp_path = new_config_dir.join(".tmp");
match fs::write(temp_path.clone(), "test").await {
Ok(_) => {
fs::remove_file(temp_path).await?;
Ok(true)
}
Err(e) => {
tracing::error!(
"Error writing to new config dir: {}",
e
);
Ok(false)
}
}
}
async fn move_directory(
source: &Path,
destination: &Path,
io_semaphore: &IoSemaphore,
) -> crate::Result<()> {
if !source.exists() {
crate::util::io::create_dir_all(source).await?;
}
if !destination.exists() {
crate::util::io::create_dir_all(destination).await?;
}
for entry_path in
crate::pack::import::get_all_subfiles(source).await?
{
let relative_path = entry_path.strip_prefix(source)?;
let new_path = destination.join(relative_path);
crate::util::fetch::copy(
&entry_path,
&new_path,
io_semaphore,
)
.await?;
}
Ok(())
}
let new_dir = move_dir.to_string_lossy().to_string();
if prev_dir != move_dir {
if !is_dir_writeable(&move_dir).await? {
settings.custom_dir = Some(prev_custom_dir.clone());
return Ok(());
}
move_directory(
&prev_dir.join(CACHES_FOLDER_NAME),
&app_dir.join(CACHES_FOLDER_NAME),
io_semaphore,
)
.await?;
move_directory(
&prev_dir.join(LAUNCHER_LOGS_FOLDER_NAME),
&app_dir.join(LAUNCHER_LOGS_FOLDER_NAME),
io_semaphore,
)
.await?;
move_directory(
&prev_dir.join(PROFILES_FOLDER_NAME),
&move_dir.join(PROFILES_FOLDER_NAME),
io_semaphore,
)
.await?;
move_directory(
&prev_dir.join(METADATA_FOLDER_NAME),
&move_dir.join(METADATA_FOLDER_NAME),
io_semaphore,
)
.await?;
let java_versions = JavaVersion::get_all(exec).await?;
for (_, mut java_version) in java_versions {
java_version.path = java_version.path.replace(
prev_custom_dir,
new_dir.trim_end_matches('/').trim_end_matches('\\'),
);
java_version.upsert(exec).await?;
}
}
settings.custom_dir = Some(new_dir.clone());
settings.prev_custom_dir = Some(new_dir);
settings.update(exec).await?;
}
Ok(())
}
}

View File

@@ -6,6 +6,7 @@ use discord_rich_presence::{
};
use tokio::sync::RwLock;
use crate::state::{Process, Profile};
use crate::State;
pub struct DiscordGuard {
@@ -16,7 +17,7 @@ pub struct DiscordGuard {
impl DiscordGuard {
/// Initialize discord IPC client, and attempt to connect to it
/// If it fails, it will still return a DiscordGuard, but the client will be unconnected
pub async fn init(is_offline: bool) -> crate::Result<DiscordGuard> {
pub async fn init() -> crate::Result<DiscordGuard> {
let mut dipc =
DiscordIpcClient::new("1123683254248148992").map_err(|e| {
crate::ErrorKind::OtherError(format!(
@@ -25,13 +26,9 @@ impl DiscordGuard {
))
})?;
let connected = if !is_offline {
let res = dipc.connect(); // Do not need to connect to Discord to use app
if res.is_ok() {
Arc::new(AtomicBool::new(true))
} else {
Arc::new(AtomicBool::new(false))
}
let res = dipc.connect(); // Do not need to connect to Discord to use app
let connected = if res.is_ok() {
Arc::new(AtomicBool::new(true))
} else {
Arc::new(AtomicBool::new(false))
};
@@ -56,19 +53,6 @@ impl DiscordGuard {
true
}
// check online
pub async fn check_online(&self) -> bool {
let state = match State::get().await {
Ok(s) => s,
Err(_) => return false,
};
let offline = state.offline.read().await;
if *offline {
return false;
}
true
}
/// Set the activity to the given message
/// First checks if discord is disabled, and if so, clear the activity instead
pub async fn set_activity(
@@ -76,14 +60,10 @@ impl DiscordGuard {
msg: &str,
reconnect_if_fail: bool,
) -> crate::Result<()> {
if !self.check_online().await {
return Ok(());
}
// Check if discord is disabled, and if so, clear the activity instead
let state = State::get().await?;
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
let settings = crate::state::Settings::get(&state.pool).await?;
if !settings.discord_rpc {
Ok(self.clear_activity(true).await?)
} else {
Ok(self.force_set_activity(msg, reconnect_if_fail).await?)
@@ -145,7 +125,7 @@ impl DiscordGuard {
reconnect_if_fail: bool,
) -> crate::Result<()> {
// Attempt to connect if not connected. Do not continue if it fails, as the client.clear_activity can panic if it never was connected
if !self.check_online().await || !self.retry_if_not_ready().await {
if !self.retry_if_not_ready().await {
return Ok(());
}
@@ -184,30 +164,25 @@ impl DiscordGuard {
&self,
reconnect_if_fail: bool,
) -> crate::Result<()> {
let state: Arc<tokio::sync::RwLockReadGuard<'_, State>> =
State::get().await?;
let state = State::get().await?;
{
let settings = state.settings.read().await;
if settings.disable_discord_rpc {
println!("Discord is disabled, clearing activity");
return self.clear_activity(true).await;
}
let settings = crate::state::Settings::get(&state.pool).await?;
if !settings.discord_rpc {
println!("Discord is disabled, clearing activity");
return self.clear_activity(true).await;
}
if let Some(existing_child) = state
.children
.read()
.await
.running_profile_paths()
.await?
.first()
{
self.set_activity(
&format!("Playing {}", existing_child),
reconnect_if_fail,
)
.await?;
let running_profiles = Process::get_all(&state.pool).await?;
if let Some(existing_child) = running_profiles.first() {
let prof =
Profile::get(&existing_child.profile_path, &state.pool).await?;
if let Some(prof) = prof {
self.set_activity(
&format!("Playing {}", prof.name),
reconnect_if_fail,
)
.await?;
}
} else {
self.set_activity("Idling...", reconnect_if_fail).await?;
}

View File

@@ -0,0 +1,155 @@
use crate::event::emit::{emit_profile, emit_warning};
use crate::event::ProfilePayloadType;
use crate::state::{DirectoryInfo, ProfileInstallStage, ProjectType};
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
use notify::{RecommendedWatcher, RecursiveMode};
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
use std::time::Duration;
use tokio::sync::RwLock;
pub type FileWatcher = RwLock<Debouncer<RecommendedWatcher>>;
pub async fn init_watcher() -> crate::Result<FileWatcher> {
let (mut tx, mut rx) = channel(1);
let file_watcher = new_debouncer(
Duration::from_secs_f32(1.0),
move |res: DebounceEventResult| {
futures::executor::block_on(async {
tx.send(res).await.unwrap();
})
},
)?;
tokio::task::spawn(async move {
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
tracing::info!(parent: &span, "Initting watcher");
while let Some(res) = rx.next().await {
let _span = span.enter();
match res {
Ok(events) => {
let mut visited_profiles = Vec::new();
events.iter().for_each(|e| {
let mut profile_path = None;
let mut found = false;
for component in e.path.components() {
if found {
profile_path = Some(
component.as_os_str().to_string_lossy(),
);
break;
}
if component.as_os_str()
== crate::state::dirs::PROFILES_FOLDER_NAME
{
found = true;
}
}
if let Some(profile_path) = profile_path {
if e.path
.components()
.any(|x| x.as_os_str() == "crash-reports")
&& e.path
.extension()
.map(|x| x == "txt")
.unwrap_or(false)
{
crash_task(profile_path.to_string());
} else if !visited_profiles.contains(&profile_path)
{
let path = profile_path.to_string();
tokio::spawn(async move {
let _ = emit_profile(
&path,
ProfilePayloadType::Synced,
)
.await;
});
visited_profiles.push(profile_path);
}
}
});
}
Err(error) => tracing::warn!("Unable to watch file: {error}"),
}
}
});
Ok(RwLock::new(file_watcher))
}
/// Watches all existing profiles
pub(crate) async fn watch_profiles_init(
watcher: &FileWatcher,
dirs: &DirectoryInfo,
) -> crate::Result<()> {
if let Ok(profiles_dir) = std::fs::read_dir(&dirs.profiles_dir()) {
for profile_dir in profiles_dir {
if let Ok(file_name) = profile_dir.map(|x| x.file_name()) {
if let Some(file_name) = file_name.to_str() {
if file_name.starts_with(".DS_Store") {
continue;
};
watch_profile(file_name, watcher, dirs).await?;
}
}
}
}
Ok(())
}
pub(crate) async fn watch_profile(
profile_path: &str,
watcher: &FileWatcher,
dirs: &DirectoryInfo,
) -> crate::Result<()> {
let profile_path = dirs.profiles_dir().join(profile_path);
for folder in ProjectType::iterator()
.map(|x| x.get_folder())
.chain(["crash-reports"])
{
let path = profile_path.join(folder);
if !path.exists() {
crate::util::io::create_dir_all(&path).await?;
}
let mut watcher = watcher.write().await;
watcher.watcher().watch(&path, RecursiveMode::Recursive)?;
}
Ok(())
}
fn crash_task(path: String) {
tokio::task::spawn(async move {
let res = async {
let profile = crate::api::profile::get(&path).await?;
if let Some(profile) = profile {
// Hide warning if profile is not yet installed
if profile.install_stage == ProfileInstallStage::Installed {
emit_warning(&format!("Profile {} has crashed! Visit the logs page to see a crash report.", profile.name)).await?;
}
}
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to send crash report to frontend: {err}")
}
};
});
}

View File

@@ -1,66 +1,94 @@
use dashmap::DashMap;
use futures::TryStreamExt;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
use crate::prelude::JavaVersion;
use crate::util::jre;
// All stored Java versions, chosen by the user
// A wrapper over a Hashmap connecting key -> java version
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct JavaGlobals(HashMap<String, JavaVersion>);
impl JavaGlobals {
pub fn new() -> JavaGlobals {
JavaGlobals(HashMap::new())
}
pub fn insert(&mut self, key: String, java: JavaVersion) {
self.0.insert(key, java);
}
pub fn remove(&mut self, key: &String) {
self.0.remove(key);
}
pub fn get(&self, key: &String) -> Option<&JavaVersion> {
self.0.get(key)
}
pub fn get_mut(&mut self, key: &String) -> Option<&mut JavaVersion> {
self.0.get_mut(key)
}
pub fn count(&self) -> usize {
self.0.len()
}
pub fn keys(&self) -> Vec<String> {
self.0.keys().cloned().collect()
}
// Validates that every path here is a valid Java version and that the version matches the version stored here
// If false, when checked, the user should be prompted to reselect the Java version
pub async fn is_all_valid(&self) -> bool {
for (_, java) in self.0.iter() {
let jre = jre::check_java_at_filepath(
PathBuf::from(&java.path).as_path(),
)
.await;
if let Some(jre) = jre {
if jre.version != java.version {
return false;
}
} else {
return false;
}
}
true
}
#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone)]
pub struct JavaVersion {
pub major_version: u32,
pub version: String,
pub architecture: String,
pub path: String,
}
impl Default for JavaGlobals {
fn default() -> Self {
Self::new()
impl JavaVersion {
pub async fn get(
major_version: u32,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<JavaVersion>> {
let version = major_version as i32;
let res = sqlx::query!(
"
SELECT
full_version, architecture, path
FROM java_versions
WHERE major_version = $1
",
version
)
.fetch_optional(exec)
.await?;
Ok(res.map(|x| JavaVersion {
major_version,
version: x.full_version,
architecture: x.architecture,
path: x.path,
}))
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<DashMap<u32, Self>> {
let res = sqlx::query!(
"
SELECT
major_version, full_version, architecture, path
FROM java_versions
"
)
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
acc.insert(
x.major_version as u32,
JavaVersion {
major_version: x.major_version as u32,
version: x.full_version,
architecture: x.architecture,
path: x.path,
},
);
async move { Ok(acc) }
})
.await?;
Ok(res)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let major_version = self.major_version as i32;
sqlx::query!(
"
INSERT INTO java_versions (major_version, full_version, architecture, path)
VALUES ($1, $2, $3, $4)
ON CONFLICT (major_version) DO UPDATE SET
full_version = $2,
architecture = $3,
path = $4
",
major_version,
self.version,
self.architecture,
self.path,
)
.execute(exec)
.await?;
Ok(())
}
}

View File

@@ -0,0 +1,513 @@
use crate::data::DirectoryInfo;
use crate::jre::check_jre;
use crate::prelude::ModLoader;
use crate::state;
use crate::state::{
Credentials, DefaultPage, DeviceToken, DeviceTokenKey, DeviceTokenPair,
Hooks, LinkedData, MemorySettings, ModrinthCredentials, Profile,
ProfileInstallStage, Theme, WindowSize,
};
use crate::util::fetch::{read_json, IoSemaphore};
use chrono::{DateTime, Utc};
use p256::ecdsa::SigningKey;
use p256::pkcs8::DecodePrivateKey;
use serde::Deserialize;
use std::collections::HashMap;
use std::path::PathBuf;
use tokio::sync::Semaphore;
use uuid::Uuid;
pub async fn migrate_legacy_data<'a, E>(exec: E) -> crate::Result<()>
where
E: sqlx::Executor<'a, Database = sqlx::Sqlite> + Copy,
{
let mut settings = state::Settings::get(exec).await?;
if settings.migrated {
return Ok(());
};
let old_launcher_root = if let Some(dir) = default_settings_dir() {
dir
} else {
return Ok(());
};
let old_launcher_root_str = old_launcher_root.to_string_lossy().to_string();
let new_launcher_root = DirectoryInfo::get_initial_settings_dir().ok_or(
crate::ErrorKind::FSError(
"Could not find valid config dir".to_string(),
),
)?;
let new_launcher_root_str = new_launcher_root
.to_string_lossy()
.to_string()
.trim_end_matches('/')
.trim_end_matches('\\')
.to_string();
let io_semaphore = IoSemaphore(Semaphore::new(10));
let settings_path = old_launcher_root.join("settings.json");
if let Ok(legacy_settings) =
read_json::<LegacySettings>(&settings_path, &io_semaphore).await
{
settings.max_concurrent_writes = legacy_settings.max_concurrent_writes;
settings.max_concurrent_downloads =
legacy_settings.max_concurrent_downloads;
settings.theme = match legacy_settings.theme {
LegacyTheme::Dark => Theme::Dark,
LegacyTheme::Light => Theme::Light,
LegacyTheme::Oled => Theme::Oled,
};
settings.default_page = match legacy_settings.default_page {
LegacyDefaultPage::Home => DefaultPage::Home,
LegacyDefaultPage::Library => DefaultPage::Library,
};
settings.collapsed_navigation = legacy_settings.collapsed_navigation;
settings.advanced_rendering = legacy_settings.advanced_rendering;
settings.native_decorations = legacy_settings.native_decorations;
settings.telemetry = !legacy_settings.opt_out_analytics;
settings.discord_rpc = !legacy_settings.disable_discord_rpc;
settings.developer_mode = legacy_settings.developer_mode;
settings.onboarded = legacy_settings.fully_onboarded;
settings.extra_launch_args = legacy_settings.custom_java_args;
settings.custom_env_vars = legacy_settings.custom_env_args;
settings.memory.maximum = legacy_settings.memory.maximum;
settings.force_fullscreen = legacy_settings.force_fullscreen;
settings.game_resolution.0 = legacy_settings.game_resolution.0;
settings.game_resolution.1 = legacy_settings.game_resolution.1;
settings.hide_on_process_start = legacy_settings.hide_on_process;
settings.hooks.pre_launch = legacy_settings.hooks.pre_launch;
settings.hooks.wrapper = legacy_settings.hooks.wrapper;
settings.hooks.post_exit = legacy_settings.hooks.post_exit;
if let Some(path) = legacy_settings
.loaded_config_dir
.clone()
.and_then(|x| x.to_str().map(|x| x.to_string()))
{
if path != old_launcher_root_str {
settings.custom_dir = Some(path);
}
}
settings.prev_custom_dir = Some(old_launcher_root_str.clone());
for (_, legacy_version) in legacy_settings.java_globals.0 {
if let Ok(Some(mut java_version)) =
check_jre(PathBuf::from(legacy_version.path)).await
{
java_version.path = java_version
.path
.replace(&old_launcher_root_str, &new_launcher_root_str);
java_version.upsert(exec).await?;
}
}
let modrinth_auth_path =
old_launcher_root.join("caches/metadata/auth.json");
if let Ok(creds) = read_json::<LegacyModrinthCredentials>(
&modrinth_auth_path,
&io_semaphore,
)
.await
{
ModrinthCredentials {
session: creds.session,
expires: creds.expires_at,
user_id: creds.user.id,
active: true,
}
.upsert(exec)
.await?;
}
let minecraft_auth_path =
old_launcher_root.join("caches/metadata/minecraft_auth.json");
if let Ok(minecraft_auth) = read_json::<LegacyMinecraftAuthStore>(
&minecraft_auth_path,
&io_semaphore,
)
.await
{
let minecraft_users_len = minecraft_auth.users.len();
for (uuid, credential) in minecraft_auth.users {
Credentials {
id: credential.id,
username: credential.username,
access_token: credential.access_token,
refresh_token: credential.refresh_token,
expires: credential.expires,
active: minecraft_auth.default_user == Some(uuid)
|| minecraft_users_len == 1,
}
.upsert(exec)
.await?;
}
if let Some(device_token) = minecraft_auth.token {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&device_token.private_key)
{
if let Ok(uuid) = Uuid::parse_str(&device_token.id) {
DeviceTokenPair {
token: DeviceToken {
issue_instant: device_token.token.issue_instant,
not_after: device_token.token.not_after,
token: device_token.token.token,
display_claims: device_token
.token
.display_claims,
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: device_token.x,
y: device_token.y,
},
}
.upsert(exec)
.await?;
}
}
}
}
if let Ok(profiles_dir) = std::fs::read_dir(
&legacy_settings
.loaded_config_dir
.unwrap_or(old_launcher_root)
.join("profiles"),
) {
for entry in profiles_dir.flatten() {
if entry.path().is_dir() {
let profile_path = entry.path().join("profile.json");
if let Ok(profile) =
read_json::<LegacyProfile>(&profile_path, &io_semaphore)
.await
{
Profile {
path: profile.path,
install_stage: match profile.install_stage {
LegacyProfileInstallStage::Installed => {
ProfileInstallStage::Installed
}
LegacyProfileInstallStage::Installing => {
ProfileInstallStage::Installing
}
LegacyProfileInstallStage::PackInstalling => {
ProfileInstallStage::PackInstalling
}
LegacyProfileInstallStage::NotInstalled => {
ProfileInstallStage::NotInstalled
}
},
name: profile.metadata.name,
icon_path: profile.metadata.icon.map(|x| {
x.replace(
&old_launcher_root_str,
&new_launcher_root_str,
)
}),
game_version: profile.metadata.game_version,
loader: match profile.metadata.loader {
LegacyModLoader::Vanilla => ModLoader::Vanilla,
LegacyModLoader::Forge => ModLoader::Forge,
LegacyModLoader::Fabric => ModLoader::Fabric,
LegacyModLoader::Quilt => ModLoader::Quilt,
LegacyModLoader::NeoForge => {
ModLoader::NeoForge
}
},
loader_version: profile
.metadata
.loader_version
.map(|x| x.id),
groups: profile.metadata.groups,
linked_data: profile.metadata.linked_data.and_then(
|x| {
if let Some(project_id) = x.project_id {
if let Some(version_id) = x.version_id {
if let Some(locked) = x.locked {
return Some(LinkedData {
project_id,
version_id,
locked,
});
}
}
}
None
},
),
created: profile.metadata.date_created,
modified: profile.metadata.date_modified,
last_played: profile.metadata.last_played,
submitted_time_played: profile
.metadata
.submitted_time_played,
recent_time_played: profile
.metadata
.recent_time_played,
java_path: profile.java.as_ref().and_then(|x| {
x.override_version.clone().map(|x| {
x.path.replace(
&old_launcher_root_str,
&new_launcher_root_str,
)
})
}),
extra_launch_args: profile
.java
.as_ref()
.and_then(|x| x.extra_arguments.clone()),
custom_env_vars: profile
.java
.and_then(|x| x.custom_env_args),
memory: profile
.memory
.map(|x| MemorySettings { maximum: x.maximum }),
force_fullscreen: profile.fullscreen,
game_resolution: profile
.resolution
.map(|x| WindowSize(x.0, x.1)),
hooks: Hooks {
pre_launch: profile
.hooks
.as_ref()
.and_then(|x| x.pre_launch.clone()),
wrapper: profile
.hooks
.as_ref()
.and_then(|x| x.wrapper.clone()),
post_exit: profile
.hooks
.and_then(|x| x.post_exit),
},
}
.upsert(exec)
.await?;
}
}
}
}
settings.migrated = true;
settings.update(exec).await?;
}
Ok(())
}
#[derive(Deserialize, Debug, Clone)]
struct LegacySettings {
pub theme: LegacyTheme,
pub memory: LegacyMemorySettings,
#[serde(default)]
pub force_fullscreen: bool,
pub game_resolution: LegacyWindowSize,
pub custom_java_args: Vec<String>,
pub custom_env_args: Vec<(String, String)>,
pub java_globals: LegacyJavaGlobals,
pub hooks: LegacyHooks,
pub max_concurrent_downloads: usize,
pub max_concurrent_writes: usize,
pub collapsed_navigation: bool,
#[serde(default)]
pub disable_discord_rpc: bool,
#[serde(default)]
pub hide_on_process: bool,
#[serde(default)]
pub native_decorations: bool,
#[serde(default)]
pub default_page: LegacyDefaultPage,
#[serde(default)]
pub developer_mode: bool,
#[serde(default)]
pub opt_out_analytics: bool,
#[serde(default)]
pub advanced_rendering: bool,
#[serde(default)]
pub fully_onboarded: bool,
#[serde(default = "default_settings_dir")]
pub loaded_config_dir: Option<PathBuf>,
}
fn default_settings_dir() -> Option<PathBuf> {
Some(dirs::config_dir()?.join("com.modrinth.theseus"))
}
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum LegacyTheme {
Dark,
Light,
Oled,
}
#[derive(Deserialize, Default, Debug, Clone, Copy)]
enum LegacyDefaultPage {
#[default]
Home,
Library,
}
#[derive(Deserialize, Debug, Clone)]
struct LegacyHooks {
pub pre_launch: Option<String>,
pub wrapper: Option<String>,
pub post_exit: Option<String>,
}
#[derive(Deserialize, Debug, Clone, Copy)]
struct LegacyMemorySettings {
pub maximum: u32,
}
#[derive(Deserialize, Debug, Clone, Copy)]
struct LegacyWindowSize(pub u16, pub u16);
#[derive(Debug, Deserialize, Clone)]
struct LegacyJavaGlobals(HashMap<String, LegacyJavaVersion>);
#[derive(Debug, PartialEq, Eq, Hash, Deserialize, Clone)]
struct LegacyJavaVersion {
pub path: String,
pub version: String,
pub architecture: String,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyModrinthUser {
pub id: String,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyModrinthCredentials {
pub session: String,
pub expires_at: DateTime<Utc>,
pub user: LegacyModrinthUser,
}
#[derive(Deserialize, Debug)]
struct LegacyMinecraftAuthStore {
pub users: HashMap<Uuid, LegacyCredentials>,
pub token: Option<LegacySaveDeviceToken>,
pub default_user: Option<Uuid>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyCredentials {
pub id: Uuid,
pub username: String,
pub access_token: String,
pub refresh_token: String,
pub expires: DateTime<Utc>,
}
#[derive(Deserialize, Debug)]
struct LegacySaveDeviceToken {
pub id: String,
pub private_key: String,
pub x: String,
pub y: String,
pub token: LegacyDeviceToken,
}
#[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "PascalCase")]
struct LegacyDeviceToken {
pub issue_instant: DateTime<Utc>,
pub not_after: DateTime<Utc>,
pub token: String,
pub display_claims: HashMap<String, serde_json::Value>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyProfile {
#[serde(default)]
pub install_stage: LegacyProfileInstallStage,
#[serde(default)]
pub path: String,
pub metadata: LegacyProfileMetadata,
pub java: Option<LegacyJavaSettings>,
pub memory: Option<LegacyMemorySettings>,
pub resolution: Option<LegacyWindowSize>,
pub fullscreen: Option<bool>,
pub hooks: Option<LegacyHooks>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyProfileMetadata {
pub name: String,
pub icon: Option<String>,
#[serde(default)]
pub groups: Vec<String>,
pub game_version: String,
#[serde(default)]
pub loader: LegacyModLoader,
pub loader_version: Option<LegacyLoaderVersion>,
pub linked_data: Option<LegacyLinkedData>,
#[serde(default)]
pub date_created: DateTime<Utc>,
#[serde(default)]
pub date_modified: DateTime<Utc>,
pub last_played: Option<DateTime<Utc>>,
#[serde(default)]
pub submitted_time_played: u64,
#[serde(default)]
pub recent_time_played: u64,
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
enum LegacyModLoader {
#[default]
Vanilla,
Forge,
Fabric,
Quilt,
NeoForge,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyLinkedData {
pub project_id: Option<String>,
pub version_id: Option<String>,
#[serde(default = "default_locked")]
pub locked: Option<bool>,
}
fn default_locked() -> Option<bool> {
Some(true)
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyJavaSettings {
pub override_version: Option<LegacyJavaVersion>,
pub extra_arguments: Option<Vec<String>>,
pub custom_env_args: Option<Vec<(String, String)>>,
}
#[derive(Deserialize, Clone, Debug)]
struct LegacyLoaderVersion {
pub id: String,
}
#[derive(Deserialize, Clone, Copy, Debug, Default, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
enum LegacyProfileInstallStage {
Installed,
Installing,
PackInstalling,
#[default]
NotInstalled,
}

View File

@@ -1,173 +0,0 @@
//! Theseus metadata
use crate::data::DirectoryInfo;
use crate::util::fetch::{read_json, write, IoSemaphore};
use crate::State;
use daedalus::{
minecraft::{fetch_version_manifest, VersionManifest as MinecraftManifest},
modded::{
fetch_manifest as fetch_loader_manifest, Manifest as LoaderManifest,
},
};
use serde::{Deserialize, Serialize};
const METADATA_URL: &str = "https://meta.modrinth.com";
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Metadata {
pub minecraft: MinecraftManifest,
pub forge: LoaderManifest,
pub fabric: LoaderManifest,
pub quilt: LoaderManifest,
pub neoforge: LoaderManifest,
}
impl Metadata {
fn get_manifest(name: &str) -> String {
format!("{METADATA_URL}/{name}/v0/manifest.json")
}
pub async fn fetch() -> crate::Result<Self> {
let (minecraft, forge, fabric, quilt, neoforge) = tokio::try_join! {
async {
let url = Self::get_manifest("minecraft");
fetch_version_manifest(Some(&url)).await
},
async {
let url = Self::get_manifest("forge");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("fabric");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("quilt");
fetch_loader_manifest(&url).await
},
async {
let url = Self::get_manifest("neo");
fetch_loader_manifest(&url).await
}
}?;
Ok(Self {
minecraft,
forge,
fabric,
quilt,
neoforge,
})
}
// Attempt to fetch metadata and store in sled DB
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let mut metadata = None;
let metadata_path = dirs.caches_meta_dir().await.join("metadata.json");
let metadata_backup_path =
dirs.caches_meta_dir().await.join("metadata.json.bak");
if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_path, io_semaphore).await
{
metadata = Some(metadata_json);
} else if fetch_online {
let res = async {
let metadata_fetch = Self::fetch().await?;
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
write(
&metadata_backup_path,
&serde_json::to_vec(&metadata_fetch).unwrap_or_default(),
io_semaphore,
)
.await?;
metadata = Some(metadata_fetch);
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to fetch launcher metadata: {err}")
}
}
} else if let Ok(metadata_json) =
read_json::<Metadata>(&metadata_backup_path, io_semaphore).await
{
metadata = Some(metadata_json);
std::fs::copy(&metadata_backup_path, &metadata_path).map_err(
|err| {
crate::ErrorKind::FSError(format!(
"Error restoring metadata backup: {err}"
))
.as_error()
},
)?;
}
if let Some(meta) = metadata {
Ok(meta)
} else {
Err(
crate::ErrorKind::NoValueFor(String::from("launcher metadata"))
.as_error(),
)
}
}
pub async fn update() {
let res = async {
let metadata_fetch = Metadata::fetch().await?;
let state = State::get().await?;
let metadata_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json");
let metadata_backup_path = state
.directories
.caches_meta_dir()
.await
.join("metadata.json.bak");
if metadata_path.exists() {
std::fs::copy(&metadata_path, &metadata_backup_path)?;
}
write(
&metadata_path,
&serde_json::to_vec(&metadata_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_metadata = state.metadata.write().await;
*old_metadata = metadata_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher metadata: {err}")
}
};
}
}

View File

@@ -1,10 +1,11 @@
use crate::data::DirectoryInfo;
use crate::util::fetch::{read_json, write, IoSemaphore, REQWEST_CLIENT};
use crate::{ErrorKind, State};
use crate::util::fetch::REQWEST_CLIENT;
use crate::ErrorKind;
use base64::prelude::{BASE64_STANDARD, BASE64_URL_SAFE_NO_PAD};
use base64::Engine;
use byteorder::BigEndian;
use chrono::{DateTime, Duration, Utc};
use chrono::{DateTime, Duration, TimeZone, Utc};
use dashmap::DashMap;
use futures::TryStreamExt;
use p256::ecdsa::signature::Signer;
use p256::ecdsa::{Signature, SigningKey, VerifyingKey};
use p256::pkcs8::{DecodePrivateKey, EncodePrivateKey, LineEnding};
@@ -73,17 +74,6 @@ pub enum MinecraftAuthenticationError {
NoUserHash,
}
const AUTH_JSON: &str = "minecraft_auth.json";
#[derive(Serialize, Deserialize, Debug)]
pub struct SaveDeviceToken {
pub id: String,
pub private_key: String,
pub x: String,
pub y: String,
pub token: DeviceToken,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MinecraftLoginFlow {
pub verifier: String,
@@ -92,327 +82,119 @@ pub struct MinecraftLoginFlow {
pub redirect_uri: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MinecraftAuthStore {
pub users: HashMap<Uuid, Credentials>,
pub token: Option<SaveDeviceToken>,
pub default_user: Option<Uuid>,
}
impl MinecraftAuthStore {
#[tracing::instrument]
pub async fn init(
dirs: &DirectoryInfo,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
let store = read_json(&auth_path, io_semaphore).await.ok();
if let Some(store) = store {
Ok(store)
} else {
Ok(Self {
users: HashMap::new(),
token: None,
default_user: None,
})
}
}
#[tracing::instrument(skip(self))]
pub async fn save(&self) -> crate::Result<()> {
let state = State::get().await?;
let auth_path =
state.directories.caches_meta_dir().await.join(AUTH_JSON);
write(&auth_path, &serde_json::to_vec(&self)?, &state.io_semaphore)
#[tracing::instrument]
pub async fn login_begin(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<MinecraftLoginFlow> {
let (pair, current_date, valid_date) =
DeviceTokenPair::refresh_and_get_device_token(Utc::now(), false, exec)
.await?;
Ok(())
}
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
#[tracing::instrument(skip(self))]
async fn refresh_and_get_device_token(
&mut self,
current_date: DateTime<Utc>,
force_generate: bool,
) -> crate::Result<(DeviceTokenKey, DeviceToken, DateTime<Utc>, bool)> {
macro_rules! generate_key {
($self:ident, $generate_key:expr, $device_token:expr, $SaveDeviceToken:path) => {{
let key = generate_key()?;
let res = device_token(&key, current_date).await?;
self.token = Some(SaveDeviceToken {
id: key.id.clone(),
private_key: key
.key
.to_pkcs8_pem(LineEnding::default())
.map_err(|err| {
MinecraftAuthenticationError::PEMSerialize(err)
})?
.to_string(),
x: key.x.clone(),
y: key.y.clone(),
token: res.value.clone(),
});
self.save().await?;
(key, res.value, res.date, true)
}};
}
let (key, token, date, valid_date) = if let Some(ref token) = self.token
{
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&token.private_key)
{
if token.token.not_after > Utc::now() && !force_generate {
(
DeviceTokenKey {
id: token.id.clone(),
key: private_key,
x: token.x.clone(),
y: token.y.clone(),
},
token.token.clone(),
current_date,
match sisu_authenticate(
&pair.token.token,
&challenge,
&pair.key,
current_date,
)
.await
{
Ok((session_id, redirect_uri)) => Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
}),
Err(err) => {
if !valid_date {
let (pair, current_date, _) =
DeviceTokenPair::refresh_and_get_device_token(
Utc::now(),
false,
)
} else {
let key = DeviceTokenKey {
id: token.id.clone(),
key: private_key,
x: token.x.clone(),
y: token.y.clone(),
};
let res = device_token(&key, current_date).await?;
(key, res.value, res.date, true)
}
} else {
generate_key!(self, generate_key, device_token, SaveDeviceToken)
}
} else {
generate_key!(self, generate_key, device_token, SaveDeviceToken)
};
Ok((key, token, date, valid_date))
}
#[tracing::instrument(skip(self))]
pub async fn login_begin(&mut self) -> crate::Result<MinecraftLoginFlow> {
let (key, token, current_date, valid_date) =
self.refresh_and_get_device_token(Utc::now(), false).await?;
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
match sisu_authenticate(&token.token, &challenge, &key, current_date)
.await
{
Ok((session_id, redirect_uri)) => Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
}),
Err(err) => {
if !valid_date {
let (key, token, current_date, _) = self
.refresh_and_get_device_token(Utc::now(), false)
.await?;
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
let (session_id, redirect_uri) = sisu_authenticate(
&token.token,
&challenge,
&key,
current_date,
exec,
)
.await?;
Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
})
} else {
Err(crate::ErrorKind::from(err).into())
}
let verifier = generate_oauth_challenge();
let mut hasher = sha2::Sha256::new();
hasher.update(&verifier);
let result = hasher.finalize();
let challenge = BASE64_URL_SAFE_NO_PAD.encode(result);
let (session_id, redirect_uri) = sisu_authenticate(
&pair.token.token,
&challenge,
&pair.key,
current_date,
)
.await?;
Ok(MinecraftLoginFlow {
verifier,
challenge,
session_id,
redirect_uri: redirect_uri.value.msa_oauth_redirect,
})
} else {
Err(crate::ErrorKind::from(err).into())
}
}
}
}
#[tracing::instrument(skip(self))]
pub async fn login_finish(
&mut self,
code: &str,
flow: MinecraftLoginFlow,
) -> crate::Result<Credentials> {
let (key, token, _, _) =
self.refresh_and_get_device_token(Utc::now(), false).await?;
let oauth_token = oauth_token(code, &flow.verifier).await?;
let sisu_authorize = sisu_authorize(
Some(&flow.session_id),
&oauth_token.value.access_token,
&token.token,
&key,
oauth_token.date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&token.token,
&key,
sisu_authorize.date,
)
.await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
minecraft_entitlements(&minecraft_token.access_token).await?;
let profile = minecraft_profile(&minecraft_token.access_token).await?;
let profile_id = profile.id.unwrap_or_default();
let credentials = Credentials {
id: profile_id,
username: profile.name,
access_token: minecraft_token.access_token,
refresh_token: oauth_token.value.refresh_token,
expires: oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64),
};
self.users.insert(profile_id, credentials.clone());
if self.default_user.is_none() {
self.default_user = Some(profile_id);
}
self.save().await?;
Ok(credentials)
}
async fn refresh_token(
&mut self,
creds: &Credentials,
) -> crate::Result<Option<Credentials>> {
let cred_id = creds.id;
let profile_name = creds.username.clone();
let oauth_token = oauth_refresh(&creds.refresh_token).await?;
let (key, token, current_date, _) = self
.refresh_and_get_device_token(oauth_token.date, false)
#[tracing::instrument]
pub async fn login_finish(
code: &str,
flow: MinecraftLoginFlow,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<Credentials> {
let (pair, _, _) =
DeviceTokenPair::refresh_and_get_device_token(Utc::now(), false, exec)
.await?;
let sisu_authorize = sisu_authorize(
None,
&oauth_token.value.access_token,
&token.token,
&key,
current_date,
)
.await?;
let oauth_token = oauth_token(code, &flow.verifier).await?;
let sisu_authorize = sisu_authorize(
Some(&flow.session_id),
&oauth_token.value.access_token,
&pair.token.token,
&pair.key,
oauth_token.date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&token.token,
&key,
sisu_authorize.date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&pair.token.token,
&pair.key,
sisu_authorize.date,
)
.await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
minecraft_entitlements(&minecraft_token.access_token).await?;
let val = Credentials {
id: cred_id,
username: profile_name,
access_token: minecraft_token.access_token,
refresh_token: oauth_token.value.refresh_token,
expires: oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64),
};
let profile = minecraft_profile(&minecraft_token.access_token).await?;
self.users.insert(val.id, val.clone());
self.save().await?;
let profile_id = profile.id.unwrap_or_default();
Ok(Some(val))
}
let credentials = Credentials {
id: profile_id,
username: profile.name,
access_token: minecraft_token.access_token,
refresh_token: oauth_token.value.refresh_token,
expires: oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64),
active: true,
};
#[tracing::instrument(skip(self))]
pub async fn get_default_credential(
&mut self,
) -> crate::Result<Option<Credentials>> {
let credentials = if let Some(default_user) = self.default_user {
if let Some(creds) = self.users.get(&default_user) {
Some(creds)
} else {
self.users.values().next()
}
} else {
self.users.values().next()
};
credentials.upsert(exec).await?;
if let Some(creds) = credentials {
if self.default_user != Some(creds.id) {
self.default_user = Some(creds.id);
self.save().await?;
}
if creds.expires < Utc::now() {
let old_credentials = creds.clone();
let res = self.refresh_token(&old_credentials).await;
match res {
Ok(val) => Ok(val),
Err(err) => {
if let ErrorKind::MinecraftAuthenticationError(
MinecraftAuthenticationError::Request {
ref source,
..
},
) = *err.raw
{
if source.is_connect() || source.is_timeout() {
return Ok(Some(old_credentials));
}
}
Err(err)
}
}
} else {
Ok(Some(creds.clone()))
}
} else {
Ok(None)
}
}
#[tracing::instrument(skip(self))]
pub async fn remove(
&mut self,
id: Uuid,
) -> crate::Result<Option<Credentials>> {
let val = self.users.remove(&id);
self.save().await?;
Ok(val)
}
Ok(credentials)
}
#[derive(Serialize, Deserialize, Clone, Debug)]
@@ -422,6 +204,343 @@ pub struct Credentials {
pub access_token: String,
pub refresh_token: String,
pub expires: DateTime<Utc>,
pub active: bool,
}
impl Credentials {
async fn refresh(
&mut self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let oauth_token = oauth_refresh(&self.refresh_token).await?;
let (pair, current_date, _) =
DeviceTokenPair::refresh_and_get_device_token(
oauth_token.date,
false,
exec,
)
.await?;
let sisu_authorize = sisu_authorize(
None,
&oauth_token.value.access_token,
&pair.token.token,
&pair.key,
current_date,
)
.await?;
let xbox_token = xsts_authorize(
sisu_authorize.value,
&pair.token.token,
&pair.key,
sisu_authorize.date,
)
.await?;
let minecraft_token = minecraft_token(xbox_token.value).await?;
self.access_token = minecraft_token.access_token;
self.refresh_token = oauth_token.value.refresh_token;
self.expires = oauth_token.date
+ Duration::seconds(oauth_token.value.expires_in as i64);
self.upsert(exec).await?;
Ok(())
}
#[tracing::instrument]
pub async fn get_default_credential(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<Option<Credentials>> {
let credentials = Self::get_active(exec).await?;
if let Some(mut creds) = credentials {
if creds.expires < Utc::now() {
let res = creds.refresh(exec).await;
match res {
Ok(_) => Ok(Some(creds)),
Err(err) => {
if let ErrorKind::MinecraftAuthenticationError(
MinecraftAuthenticationError::Request {
ref source,
..
},
) = *err.raw
{
if source.is_connect() || source.is_timeout() {
return Ok(Some(creds));
}
}
Err(err)
}
}
} else {
Ok(Some(creds))
}
} else {
Ok(None)
}
}
pub async fn get_active(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = sqlx::query!(
"
SELECT
uuid, active, username, access_token, refresh_token, expires
FROM minecraft_users
WHERE active = TRUE
"
)
.fetch_optional(exec)
.await?;
Ok(res.map(|x| Self {
id: Uuid::parse_str(&x.uuid).unwrap_or_default(),
username: x.username,
access_token: x.access_token,
refresh_token: x.refresh_token,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
active: x.active == 1,
}))
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<DashMap<Uuid, Self>> {
let res = sqlx::query!(
"
SELECT
uuid, active, username, access_token, refresh_token, expires
FROM minecraft_users
"
)
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
let uuid = Uuid::parse_str(&x.uuid).unwrap_or_default();
acc.insert(
uuid,
Self {
id: uuid,
username: x.username,
access_token: x.access_token,
refresh_token: x.refresh_token,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
active: x.active == 1,
},
);
async move { Ok(acc) }
})
.await?;
Ok(res)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let expires = self.expires.timestamp();
let uuid = self.id.as_hyphenated().to_string();
if self.active {
sqlx::query!(
"
UPDATE minecraft_users
SET active = FALSE
",
)
.execute(exec)
.await?;
}
sqlx::query!(
"
INSERT INTO minecraft_users (uuid, active, username, access_token, refresh_token, expires)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (uuid) DO UPDATE SET
active = $2,
username = $3,
access_token = $4,
refresh_token = $5,
expires = $6
",
uuid,
self.active,
self.username,
self.access_token,
self.refresh_token,
expires,
)
.execute(exec)
.await?;
Ok(())
}
pub async fn remove(
uuid: Uuid,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let uuid = uuid.as_hyphenated().to_string();
sqlx::query!(
"
DELETE FROM minecraft_users WHERE uuid = $1
",
uuid,
)
.execute(exec)
.await?;
Ok(())
}
}
pub struct DeviceTokenPair {
pub token: DeviceToken,
pub key: DeviceTokenKey,
}
impl DeviceTokenPair {
#[tracing::instrument(skip(exec))]
async fn refresh_and_get_device_token(
current_date: DateTime<Utc>,
force_generate: bool,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<(Self, DateTime<Utc>, bool)> {
let pair = Self::get(exec).await?;
if let Some(mut pair) = pair {
if pair.token.not_after > Utc::now() && !force_generate {
Ok((pair, current_date, false))
} else {
let res = device_token(&pair.key, current_date).await?;
pair.token = res.value;
pair.upsert(exec).await?;
Ok((pair, res.date, true))
}
} else {
let key = generate_key()?;
let res = device_token(&key, current_date).await?;
let pair = Self {
key,
token: res.value,
};
pair.upsert(exec).await?;
Ok((pair, res.date, true))
}
}
async fn get(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = sqlx::query!(
r#"
SELECT
uuid, private_key, x, y, issue_instant, not_after, token, json(display_claims) as "display_claims!: serde_json::Value"
FROM minecraft_device_tokens
"#
)
.fetch_optional(exec)
.await?;
if let Some(x) = res {
if let Ok(uuid) = Uuid::parse_str(&x.uuid) {
if let Ok(private_key) =
SigningKey::from_pkcs8_pem(&x.private_key)
{
return Ok(Some(Self {
token: DeviceToken {
issue_instant: Utc
.timestamp_opt(x.issue_instant, 0)
.single()
.unwrap_or_else(Utc::now),
not_after: Utc
.timestamp_opt(x.not_after, 0)
.single()
.unwrap_or_else(Utc::now),
token: x.token,
display_claims: serde_json::from_value(
x.display_claims,
)
.unwrap_or_default(),
},
key: DeviceTokenKey {
id: uuid,
key: private_key,
x: x.x,
y: x.y,
},
}));
}
}
}
Ok(None)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let uuid = self.key.id.as_hyphenated().to_string();
let issue_instant = self.token.issue_instant.timestamp();
let not_after = self.token.not_after.timestamp();
let key = self
.key
.key
.to_pkcs8_pem(LineEnding::default())
.map_err(MinecraftAuthenticationError::PEMSerialize)?
.to_string();
let display_claims = serde_json::to_string(&self.token.display_claims)?;
sqlx::query!(
"
INSERT INTO minecraft_device_tokens (id, uuid, private_key, x, y, issue_instant, not_after, token, display_claims)
VALUES (0, $1, $2, $3, $4, $5, $6, $7, $8)
ON CONFLICT (id) DO UPDATE SET
uuid = $1,
private_key = $2,
x = $3,
y = $4,
issue_instant = $5,
not_after = $6,
token = $7,
display_claims = jsonb($8)
",
uuid,
key,
self.key.x,
self.key.y,
issue_instant,
not_after,
self.token.token,
display_claims,
)
.execute(exec)
.await?;
Ok(())
}
}
const MICROSOFT_CLIENT_ID: &str = "00000000402b5328";
@@ -455,7 +574,7 @@ pub async fn device_token(
json!({
"Properties": {
"AuthMethod": "ProofOfPossession",
"Id": format!("{{{}}}", key.id),
"Id": format!("{{{}}}", key.id.to_string().to_uppercase()),
"DeviceType": "Win32",
"Version": "10.16.0",
"ProofKey": {
@@ -905,7 +1024,7 @@ where
}
pub struct DeviceTokenKey {
pub id: String,
pub id: Uuid,
pub key: SigningKey,
pub x: String,
pub y: String,
@@ -913,7 +1032,7 @@ pub struct DeviceTokenKey {
#[tracing::instrument]
fn generate_key() -> Result<DeviceTokenKey, MinecraftAuthenticationError> {
let id = Uuid::new_v4().to_string().to_uppercase();
let uuid = Uuid::new_v4();
let signing_key = SigningKey::random(&mut OsRng);
let public_key = VerifyingKey::from(&signing_key);
@@ -921,7 +1040,7 @@ fn generate_key() -> Result<DeviceTokenKey, MinecraftAuthenticationError> {
let encoded_point = public_key.to_encoded_point(false);
Ok(DeviceTokenKey {
id,
id: uuid,
key: signing_key,
x: BASE64_URL_SAFE_NO_PAD.encode(
encoded_point.x().ok_or_else(|| {

View File

@@ -1,125 +1,91 @@
//! Theseus state management system
use crate::event::emit::{emit_loading, emit_offline, init_loading_unsafe};
use std::path::PathBuf;
use crate::event::emit::{emit_loading, init_loading_unsafe};
use crate::event::LoadingBarType;
use crate::loading_join;
use crate::util::fetch::{self, FetchSemaphore, IoSemaphore};
use notify::RecommendedWatcher;
use notify_debouncer_mini::{new_debouncer, DebounceEventResult, Debouncer};
use crate::util::fetch::{FetchSemaphore, IoSemaphore};
use std::sync::Arc;
use std::time::Duration;
use tokio::join;
use tokio::sync::{OnceCell, RwLock, Semaphore};
use tokio::sync::{OnceCell, Semaphore};
use futures::{channel::mpsc::channel, SinkExt, StreamExt};
use crate::state::fs_watcher::FileWatcher;
use sqlx::SqlitePool;
// Submodules
mod dirs;
pub use self::dirs::*;
mod metadata;
pub use self::metadata::*;
mod profiles;
pub use self::profiles::*;
mod settings;
pub use self::settings::*;
mod projects;
pub use self::projects::*;
mod children;
pub use self::children::*;
mod tags;
pub use self::tags::*;
mod process;
pub use self::process::*;
mod java_globals;
pub use self::java_globals::*;
mod safe_processes;
pub use self::safe_processes::*;
mod discord;
pub use self::discord::*;
mod minecraft_auth;
pub use self::minecraft_auth::*;
mod cache;
pub use self::cache::*;
mod db;
pub mod fs_watcher;
mod mr_auth;
pub use self::mr_auth::*;
mod legacy_converter;
// Global state
// RwLock on state only has concurrent reads, except for config dir change which takes control of the State
static LAUNCHER_STATE: OnceCell<RwLock<State>> = OnceCell::const_new();
static LAUNCHER_STATE: OnceCell<Arc<State>> = OnceCell::const_new();
pub struct State {
/// Whether or not the launcher is currently operating in 'offline mode'
pub offline: RwLock<bool>,
/// Information on the location of files used in the launcher
pub directories: DirectoryInfo,
/// Semaphore used to limit concurrent network requests and avoid errors
pub fetch_semaphore: FetchSemaphore,
/// Stored maximum number of sempahores of current fetch_semaphore
pub fetch_semaphore_max: RwLock<u32>,
/// Semaphore used to limit concurrent I/O and avoid errors
pub io_semaphore: IoSemaphore,
/// Stored maximum number of sempahores of current io_semaphore
pub io_semaphore_max: RwLock<u32>,
/// Launcher metadata
pub metadata: RwLock<Metadata>,
/// Launcher configuration
pub settings: RwLock<Settings>,
/// Reference to minecraft process children
pub children: RwLock<Children>,
/// Launcher profile metadata
pub(crate) profiles: RwLock<Profiles>,
/// Launcher tags
pub(crate) tags: RwLock<Tags>,
/// Launcher processes that should be safely exited on shutdown
pub(crate) safety_processes: RwLock<SafeProcesses>,
/// Launcher user account info
pub(crate) users: RwLock<MinecraftAuthStore>,
/// Modrinth Credentials Store
pub credentials: RwLock<CredentialsStore>,
/// Modrinth auth flow
pub modrinth_auth_flow: RwLock<Option<ModrinthAuthFlow>>,
/// Semaphore to limit concurrent API requests. This is separate from the fetch semaphore
/// to keep API functionality while the app is performing intensive tasks.
pub api_semaphore: FetchSemaphore,
/// Discord RPC
pub discord_rpc: DiscordGuard,
/// File watcher debouncer
pub(crate) file_watcher: RwLock<Debouncer<RecommendedWatcher>>,
pub(crate) pool: SqlitePool,
pub(crate) file_watcher: FileWatcher,
}
impl State {
/// Get the current launcher state, initializing it if needed
pub async fn get(
) -> crate::Result<Arc<tokio::sync::RwLockReadGuard<'static, Self>>> {
Ok(Arc::new(
LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.read()
.await,
))
pub async fn init() -> crate::Result<()> {
let state = LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?;
Process::garbage_collect(&state.pool).await?;
Ok(())
}
/// Get the current launcher state, initializing it if needed
/// Takes writing control of the state, blocking all other uses of it
/// Only used for state change such as changing the config directory
pub async fn get_write(
) -> crate::Result<tokio::sync::RwLockWriteGuard<'static, Self>> {
Ok(LAUNCHER_STATE
.get_or_try_init(Self::initialize_state)
.await?
.write()
.await)
/// Get the current launcher state, waiting for initialization
pub async fn get() -> crate::Result<Arc<Self>> {
if !LAUNCHER_STATE.initialized() {
while !LAUNCHER_STATE.initialized() {}
}
Ok(Arc::clone(
LAUNCHER_STATE.get().expect("State is not initialized!"),
))
}
pub fn initialized() -> bool {
@@ -127,8 +93,7 @@ impl State {
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn initialize_state() -> crate::Result<RwLock<State>> {
async fn initialize_state() -> crate::Result<Arc<Self>> {
let loading_bar = init_loading_unsafe(
LoadingBarType::StateInit,
100.0,
@@ -136,275 +101,50 @@ impl State {
)
.await?;
// Settings
let settings =
Settings::init(&DirectoryInfo::get_initial_settings_file()?)
.await?;
let pool = db::connect().await?;
let directories = DirectoryInfo::init(&settings)?;
legacy_converter::migrate_legacy_data(&pool).await?;
emit_loading(&loading_bar, 10.0, None).await?;
let mut settings = Settings::get(&pool).await?;
let mut file_watcher = init_watcher().await?;
let fetch_semaphore =
FetchSemaphore(Semaphore::new(settings.max_concurrent_downloads));
let io_semaphore =
IoSemaphore(Semaphore::new(settings.max_concurrent_writes));
let api_semaphore =
FetchSemaphore(Semaphore::new(settings.max_concurrent_downloads));
let fetch_semaphore = FetchSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_downloads,
)));
let io_semaphore = IoSemaphore(RwLock::new(Semaphore::new(
settings.max_concurrent_writes,
)));
emit_loading(&loading_bar, 10.0, None).await?;
let is_offline = !fetch::check_internet(3).await;
let metadata_fut =
Metadata::init(&directories, !is_offline, &io_semaphore);
let profiles_fut = Profiles::init(&directories, &mut file_watcher);
let tags_fut = Tags::init(
&directories,
!is_offline,
DirectoryInfo::move_launcher_directory(
&mut settings,
&pool,
&io_semaphore,
&fetch_semaphore,
&CredentialsStore(None),
);
let users_fut = MinecraftAuthStore::init(&directories, &io_semaphore);
let creds_fut = CredentialsStore::init(&directories, &io_semaphore);
// Launcher data
let (metadata, profiles, tags, users, creds) = loading_join! {
Some(&loading_bar), 70.0, Some("Loading metadata");
metadata_fut,
profiles_fut,
tags_fut,
users_fut,
creds_fut,
}?;
)
.await?;
let safety_processes = SafeProcesses::new();
let directories = DirectoryInfo::init(settings.custom_dir).await?;
let discord_rpc = DiscordGuard::init(is_offline).await?;
if !settings.disable_discord_rpc && !is_offline {
emit_loading(&loading_bar, 10.0, None).await?;
let discord_rpc = DiscordGuard::init().await?;
if settings.discord_rpc {
// Add default Idling to discord rich presence
// Force add to avoid recursion
let _ = discord_rpc.force_set_activity("Idling...", true).await;
}
let children = Children::new();
// Starts a loop of checking if we are online, and updating
Self::offine_check_loop();
let file_watcher = fs_watcher::init_watcher().await?;
fs_watcher::watch_profiles_init(&file_watcher, &directories).await?;
emit_loading(&loading_bar, 10.0, None).await?;
Ok::<RwLock<Self>, crate::Error>(RwLock::new(Self {
offline: RwLock::new(is_offline),
Ok(Arc::new(Self {
directories,
fetch_semaphore,
fetch_semaphore_max: RwLock::new(
settings.max_concurrent_downloads as u32,
),
io_semaphore,
io_semaphore_max: RwLock::new(
settings.max_concurrent_writes as u32,
),
metadata: RwLock::new(metadata),
settings: RwLock::new(settings),
profiles: RwLock::new(profiles),
users: RwLock::new(users),
children: RwLock::new(children),
credentials: RwLock::new(creds),
tags: RwLock::new(tags),
api_semaphore,
discord_rpc,
safety_processes: RwLock::new(safety_processes),
file_watcher: RwLock::new(file_watcher),
modrinth_auth_flow: RwLock::new(None),
pool,
file_watcher,
}))
}
/// Starts a loop of checking if we are online, and updating
pub fn offine_check_loop() {
tokio::task::spawn(async {
loop {
let state = Self::get().await;
if let Ok(state) = state {
let _ = state.refresh_offline().await;
}
// Wait 5 seconds
tokio::time::sleep(Duration::from_secs(5)).await;
}
});
}
/// Updates state with data from the web, if we are online
pub fn update() {
tokio::task::spawn(async {
if let Ok(state) = crate::State::get().await {
if !*state.offline.read().await {
let res1 = Profiles::update_modrinth_versions();
let res2 = Tags::update();
let res3 = Metadata::update();
let res4 = Profiles::update_projects();
let res6 = CredentialsStore::update_creds();
let _ = join!(res1, res2, res3, res4, res6);
}
}
});
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
/// Synchronize in-memory state with persistent state
pub async fn sync() -> crate::Result<()> {
let state = Self::get().await?;
let sync_settings = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let reader = state.settings.read().await;
reader.sync(&state.directories.settings_file()).await?;
Ok::<_, crate::Error>(())
})
.await?
};
let sync_profiles = async {
let state = Arc::clone(&state);
tokio::spawn(async move {
let profiles = state.profiles.read().await;
profiles.sync().await?;
Ok::<_, crate::Error>(())
})
.await?
};
tokio::try_join!(sync_settings, sync_profiles)?;
Ok(())
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_io_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.io_semaphore.0.write().await;
let mut total_permits = self.io_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_writes as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_writes);
}
/// Reset IO semaphore to default values
/// This will block until all uses of the semaphore are complete, so it should only be called
/// when we are not in the middle of downloading something (ie: changing the settings!)
pub async fn reset_fetch_semaphore(&self) {
let settings = self.settings.read().await;
let mut io_semaphore = self.fetch_semaphore.0.write().await;
let mut total_permits = self.fetch_semaphore_max.write().await;
// Wait to get all permits back
let _ = io_semaphore.acquire_many(*total_permits).await;
// Reset the semaphore
io_semaphore.close();
*total_permits = settings.max_concurrent_downloads as u32;
*io_semaphore = Semaphore::new(settings.max_concurrent_downloads);
}
/// Refreshes whether or not the launcher should be offline, by whether or not there is an internet connection
pub async fn refresh_offline(&self) -> crate::Result<()> {
let is_online = fetch::check_internet(3).await;
let mut offline = self.offline.write().await;
if *offline != is_online {
return Ok(());
}
emit_offline(!is_online).await?;
*offline = !is_online;
Ok(())
}
}
pub async fn init_watcher() -> crate::Result<Debouncer<RecommendedWatcher>> {
let (mut tx, mut rx) = channel(1);
let file_watcher = new_debouncer(
Duration::from_secs_f32(2.0),
move |res: DebounceEventResult| {
futures::executor::block_on(async {
tx.send(res).await.unwrap();
})
},
)?;
tokio::task::spawn(async move {
let span = tracing::span!(tracing::Level::INFO, "init_watcher");
tracing::info!(parent: &span, "Initting watcher");
while let Some(res) = rx.next().await {
let _span = span.enter();
match res {
Ok(mut events) => {
let mut visited_paths = Vec::new();
// sort events by e.path
events.sort_by(|a, b| a.path.cmp(&b.path));
events.iter().for_each(|e| {
let mut new_path = PathBuf::new();
let mut components_iterator = e.path.components();
let mut found = false;
for component in components_iterator.by_ref() {
new_path.push(component);
if found {
break;
}
if component.as_os_str() == "profiles" {
found = true;
}
}
// if any remain, it's a subfile of the profile folder and not the profile folder itself
let subfile = components_iterator.next().is_some();
// At this point, new_path is the path to the profile, and subfile is whether it's a subfile of the profile or not
let profile_path_id =
ProfilePathId::new(PathBuf::from(
new_path.file_name().unwrap_or_default(),
));
if e.path
.components()
.any(|x| x.as_os_str() == "crash-reports")
&& e.path
.extension()
.map(|x| x == "txt")
.unwrap_or(false)
{
Profile::crash_task(profile_path_id);
} else if !visited_paths.contains(&new_path) {
if subfile {
Profile::sync_projects_task(
profile_path_id,
false,
);
visited_paths.push(new_path);
} else {
Profiles::sync_available_profiles_task(
profile_path_id,
);
}
}
});
}
Err(error) => tracing::warn!("Unable to watch file: {error}"),
}
}
});
Ok(file_watcher)
}

View File

@@ -1,37 +1,180 @@
use crate::config::MODRINTH_API_URL;
use crate::state::DirectoryInfo;
use crate::util::fetch::{
fetch_advanced, read_json, write, FetchSemaphore, IoSemaphore,
};
use crate::State;
use chrono::{DateTime, Duration, Utc};
use crate::util::fetch::{fetch_advanced, FetchSemaphore};
use chrono::{DateTime, Duration, TimeZone, Utc};
use dashmap::DashMap;
use futures::TryStreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
const AUTH_JSON: &str = "auth.json";
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthUser {
pub id: String,
pub username: String,
pub name: Option<String>,
pub avatar_url: Option<String>,
pub bio: Option<String>,
pub created: DateTime<Utc>,
pub role: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthCredentials {
pub session: String,
pub expires_at: DateTime<Utc>,
pub user: ModrinthUser,
pub expires: DateTime<Utc>,
pub user_id: String,
pub active: bool,
}
#[derive(Serialize)]
impl ModrinthCredentials {
pub async fn get_and_refresh(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
semaphore: &FetchSemaphore,
) -> crate::Result<Option<Self>> {
let creds = Self::get_active(exec).await?;
if let Some(mut creds) = creds {
if creds.expires < Utc::now() {
#[derive(Deserialize)]
struct Session {
session: String,
}
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}session/refresh"),
None,
None,
Some(("Authorization", &*creds.session)),
None,
semaphore,
exec,
)
.await
.ok()
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
if let Some(value) = resp {
creds.session = value.session;
creds.expires = Utc::now() + Duration::weeks(2);
creds.upsert(exec).await?;
Ok(Some(creds))
} else {
Self::remove(&creds.user_id, exec).await?;
Ok(None)
}
} else {
Ok(Some(creds))
}
} else {
Ok(None)
}
}
pub async fn get_active(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = sqlx::query!(
"
SELECT
id, active, session_id, expires
FROM modrinth_users
WHERE active = TRUE
"
)
.fetch_optional(exec)
.await?;
Ok(res.map(|x| Self {
session: x.session_id,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
user_id: x.id,
active: x.active == 1,
}))
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<DashMap<String, Self>> {
let res = sqlx::query!(
"
SELECT
id, active, session_id, expires
FROM modrinth_users
"
)
.fetch(exec)
.try_fold(DashMap::new(), |acc, x| {
acc.insert(
x.id.clone(),
Self {
session: x.session_id,
expires: Utc
.timestamp_opt(x.expires, 0)
.single()
.unwrap_or_else(Utc::now),
user_id: x.id,
active: x.active == 1,
},
);
async move { Ok(acc) }
})
.await?;
Ok(res)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let expires = self.expires.timestamp();
if self.active {
sqlx::query!(
"
UPDATE modrinth_users
SET active = FALSE
"
)
.execute(exec)
.await?;
}
sqlx::query!(
"
INSERT INTO modrinth_users (id, active, session_id, expires)
VALUES ($1, $2, $3, $4)
ON CONFLICT (id) DO UPDATE SET
active = $2,
session_id = $3,
expires = $4
",
self.user_id,
self.active,
self.session,
expires,
)
.execute(exec)
.await?;
Ok(())
}
pub async fn remove(
user_id: &str,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
sqlx::query!(
"
DELETE FROM modrinth_users WHERE id = $1
",
user_id,
)
.execute(exec)
.await?;
Ok(())
}
}
#[derive(Serialize, Debug)]
#[serde(tag = "type")]
#[serde(rename_all = "snake_case")]
pub enum ModrinthCredentialsResult {
@@ -39,159 +182,25 @@ pub enum ModrinthCredentialsResult {
Credentials(ModrinthCredentials),
}
#[derive(Debug)]
pub struct CredentialsStore(pub Option<ModrinthCredentials>);
impl CredentialsStore {
pub async fn init(
dirs: &DirectoryInfo,
io_semaphore: &IoSemaphore,
) -> crate::Result<Self> {
let auth_path = dirs.caches_meta_dir().await.join(AUTH_JSON);
let user = read_json(&auth_path, io_semaphore).await.ok();
if let Some(user) = user {
Ok(Self(Some(user)))
} else {
Ok(Self(None))
}
}
pub async fn save(&self) -> crate::Result<()> {
let state = State::get().await?;
let auth_path =
state.directories.caches_meta_dir().await.join(AUTH_JSON);
if let Some(creds) = &self.0 {
write(&auth_path, &serde_json::to_vec(creds)?, &state.io_semaphore)
.await?;
}
Ok(())
}
pub async fn login(
&mut self,
credentials: ModrinthCredentials,
) -> crate::Result<&Self> {
self.0 = Some(credentials);
self.save().await?;
Ok(self)
}
#[tracing::instrument]
pub async fn update_creds() {
let res = async {
let state = State::get().await?;
let mut creds_write = state.credentials.write().await;
refresh_credentials(&mut creds_write, &state.fetch_semaphore)
.await?;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update credentials: {err}")
}
};
}
pub async fn logout(&mut self) -> crate::Result<&Self> {
self.0 = None;
self.save().await?;
Ok(self)
}
}
pub struct ModrinthAuthFlow {
socket: async_tungstenite::WebSocketStream<
async_tungstenite::tokio::ConnectStream,
>,
}
impl ModrinthAuthFlow {
pub async fn new(provider: &str) -> crate::Result<Self> {
let (socket, _) = async_tungstenite::tokio::connect_async(format!(
"wss://api.modrinth.com/v2/auth/ws?provider={provider}"
))
.await?;
Ok(Self { socket })
}
pub async fn prepare_login_url(&mut self) -> crate::Result<String> {
let code_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
#[derive(Deserialize)]
struct Url {
url: String,
}
let response = serde_json::from_slice::<Url>(&code_resp)?;
Ok(response.url)
}
pub async fn extract_credentials(
&mut self,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
// Minecraft bearer token
let token_resp = self
.socket
.try_next()
.await?
.ok_or(
crate::ErrorKind::WSClosedError(String::from(
"login socket URL",
))
.as_error(),
)?
.into_data();
let response =
serde_json::from_slice::<HashMap<String, Value>>(&token_resp)?;
get_result_from_res("code", response, semaphore).await
}
pub async fn close(&mut self) -> crate::Result<()> {
self.socket.close(None).await?;
Ok(())
}
}
async fn get_result_from_res(
code_key: &str,
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentialsResult> {
if let Some(flow) = response.get("flow").and_then(|x| x.as_str()) {
Ok(ModrinthCredentialsResult::TwoFactorRequired {
flow: flow.to_string(),
})
} else if let Some(code) = response.get(code_key).and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
let info = fetch_info(code, semaphore, exec).await?;
Ok(ModrinthCredentialsResult::Credentials(
ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
expires: Utc::now() + Duration::weeks(2),
user_id: info.id,
active: true,
},
))
} else if let Some(error) =
@@ -209,48 +218,19 @@ async fn get_result_from_res(
}
}
#[derive(Deserialize)]
struct Session {
session: String,
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthCredentialsResult> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login"),
None,
Some(serde_json::json!({
"username": username,
"password": password,
"challenge": challenge,
})),
None,
None,
semaphore,
&CredentialsStore(None),
)
.await?;
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_result_from_res("session", value, semaphore).await
}
async fn get_creds_from_res(
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentials> {
if let Some(code) = response.get("session").and_then(|x| x.as_str()) {
let info = fetch_info(code, semaphore).await?;
let info = fetch_info(code, semaphore, exec).await?;
Ok(ModrinthCredentials {
session: code.to_string(),
expires_at: Utc::now() + Duration::weeks(2),
user: info,
expires: Utc::now() + Duration::weeks(2),
user_id: info.id,
active: true,
})
} else if let Some(error) =
response.get("description").and_then(|x| x.as_str())
@@ -267,10 +247,53 @@ async fn get_creds_from_res(
}
}
pub fn get_login_url(provider: &str) -> String {
format!(
"{MODRINTH_API_URL}auth/init?url={}&provider={provider}",
urlencoding::encode("https://launcher-files.modrinth.com/detect.txt")
)
}
pub async fn finish_login_flow(
response: HashMap<String, Value>,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<ModrinthCredentialsResult> {
get_result_from_res("code", response, semaphore, exec).await
}
pub async fn login_password(
username: &str,
password: &str,
challenge: &str,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<ModrinthCredentialsResult> {
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}auth/login"),
None,
Some(serde_json::json!({
"username": username,
"password": password,
"challenge": challenge,
})),
None,
None,
semaphore,
exec,
)
.await?;
let value = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_result_from_res("session", value, semaphore, exec).await
}
pub async fn login_2fa(
code: &str,
flow: &str,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
@@ -283,13 +306,13 @@ pub async fn login_2fa(
None,
None,
semaphore,
&CredentialsStore(None),
exec,
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
get_creds_from_res(response, semaphore, exec).await
}
pub async fn create_account(
@@ -299,6 +322,7 @@ pub async fn create_account(
challenge: &str,
sign_up_newsletter: bool,
semaphore: &FetchSemaphore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<ModrinthCredentials> {
let resp = fetch_advanced(
Method::POST,
@@ -314,51 +338,19 @@ pub async fn create_account(
None,
None,
semaphore,
&CredentialsStore(None),
exec,
)
.await?;
let response = serde_json::from_slice::<HashMap<String, Value>>(&resp)?;
get_creds_from_res(response, semaphore).await
}
pub async fn refresh_credentials(
credentials_store: &mut CredentialsStore,
semaphore: &FetchSemaphore,
) -> crate::Result<()> {
if let Some(ref mut credentials) = credentials_store.0 {
let token = &credentials.session;
let resp = fetch_advanced(
Method::POST,
&format!("{MODRINTH_API_URL}session/refresh"),
None,
None,
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
)
.await
.ok()
.and_then(|resp| serde_json::from_slice::<Session>(&resp).ok());
if let Some(value) = resp {
credentials.user = fetch_info(&value.session, semaphore).await?;
credentials.session = value.session;
credentials.expires_at = Utc::now() + Duration::weeks(2);
} else if credentials.expires_at < Utc::now() {
credentials_store.0 = None;
}
}
credentials_store.save().await?;
Ok(())
get_creds_from_res(response, semaphore, exec).await
}
async fn fetch_info(
token: &str,
semaphore: &FetchSemaphore,
) -> crate::Result<ModrinthUser> {
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<crate::state::cache::User> {
let result = fetch_advanced(
Method::GET,
&format!("{MODRINTH_API_URL}user"),
@@ -367,7 +359,7 @@ async fn fetch_info(
Some(("Authorization", token)),
None,
semaphore,
&CredentialsStore(None),
exec,
)
.await?;
let value = serde_json::from_slice(&result)?;

View File

@@ -0,0 +1,366 @@
use chrono::{DateTime, Utc};
use serde::Deserialize;
use serde::Serialize;
use tokio::process::Command;
use crate::event::emit::emit_process;
use crate::event::ProcessPayloadType;
use crate::util::io::IOError;
use crate::{profile, ErrorKind};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Process {
pub pid: i64,
pub start_time: i64,
pub name: String,
pub executable: String,
pub profile_path: String,
pub post_exit_command: Option<String>,
}
macro_rules! select_process_with_predicate {
($predicate:tt, $param:ident) => {
sqlx::query_as!(
Process,
r#"
SELECT
pid, start_time, name, executable, profile_path, post_exit_command
FROM processes
"#
+ $predicate,
$param
)
};
}
impl Process {
/// Runs on launcher startup. Queries all the cached processes and removes processes that no
/// longer exist. If a PID is found, they are "rescued" and passed to our process manager
pub async fn garbage_collect(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<()> {
let processes = Self::get_all(exec).await?;
let mut system = sysinfo::System::new();
system.refresh_processes();
for cached_process in processes {
let process = system
.process(sysinfo::Pid::from_u32(cached_process.pid as u32));
if let Some(process) = process {
if cached_process.start_time as u64 == process.start_time()
&& cached_process.name == process.name()
&& cached_process.executable
== process
.exe()
.map(|x| x.to_string_lossy())
.unwrap_or_default()
{
tokio::spawn(cached_process.sequential_process_manager());
break;
}
}
Self::remove(cached_process.pid as u32, exec).await?;
}
Ok(())
}
pub async fn insert_new_process(
profile_path: &str,
mut mc_command: Command,
post_exit_command: Option<String>, // Command to run after minecraft.
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Self> {
let mc_proc = mc_command.spawn().map_err(IOError::from)?;
let pid = mc_proc.id().ok_or_else(|| {
crate::ErrorKind::LauncherError(
"Process immediately failed, could not get PID".to_string(),
)
})?;
let mut system = sysinfo::System::new();
system.refresh_processes();
let process =
system.process(sysinfo::Pid::from_u32(pid)).ok_or_else(|| {
crate::ErrorKind::LauncherError(format!(
"Could not find process {}",
pid
))
})?;
let start_time = process.start_time();
let name = process.name().to_string();
let Some(path) = process.exe() else {
return Err(ErrorKind::LauncherError(format!(
"Cached process {} has no accessible path",
pid
))
.into());
};
let executable = path.to_string_lossy().to_string();
let process = Self {
pid: pid as i64,
start_time: start_time as i64,
name,
executable,
profile_path: profile_path.to_string(),
post_exit_command,
};
process.upsert(exec).await?;
tokio::spawn(process.clone().sequential_process_manager());
emit_process(
profile_path,
pid,
ProcessPayloadType::Launched,
"Launched Minecraft",
)
.await?;
Ok(process)
}
// Spawns a new child process and inserts it into the hashmap
// Also, as the process ends, it spawns the follow-up process if it exists
// By convention, ExitStatus is last command's exit status, and we exit on the first non-zero exit status
async fn sequential_process_manager(self) -> crate::Result<i32> {
async fn update_playtime(
last_updated_playtime: &mut DateTime<Utc>,
profile_path: &str,
force_update: bool,
) {
let diff = Utc::now()
.signed_duration_since(*last_updated_playtime)
.num_seconds();
if diff >= 60 || force_update {
if let Err(e) = profile::edit(profile_path, |prof| {
prof.recent_time_played += diff as u64;
async { Ok(()) }
})
.await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&profile_path,
e
);
}
*last_updated_playtime = Utc::now();
}
}
// Wait on current Minecraft Child
let mc_exit_status;
let mut last_updated_playtime = Utc::now();
loop {
if let Some(t) = self.try_wait().await? {
mc_exit_status = t;
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
// Auto-update playtime every minute
update_playtime(
&mut last_updated_playtime,
&self.profile_path,
false,
)
.await;
}
// Now fully complete- update playtime one last time
update_playtime(&mut last_updated_playtime, &self.profile_path, true)
.await;
// Publish play time update
// Allow failure, it will be stored locally and sent next time
// Sent in another thread as first call may take a couple seconds and hold up process ending
let profile_path = self.profile_path.clone();
tokio::spawn(async move {
if let Err(e) =
profile::try_update_playtime(&profile_path.clone()).await
{
tracing::warn!(
"Failed to update playtime for profile {}: {}",
&profile_path,
e
);
}
});
let state = crate::State::get().await?;
let _ = state.discord_rpc.clear_to_default(true).await;
Self::remove(self.pid as u32, &state.pool).await?;
// If in tauri, window should show itself again after process exists if it was hidden
#[cfg(feature = "tauri")]
{
let window = crate::EventState::get_main_window().await?;
if let Some(window) = window {
window.unminimize()?;
}
}
if mc_exit_status == 0 {
// We do not wait on the post exist command to finish running! We let it spawn + run on its own.
// This behaviour may be changed in the future
if let Some(hook) = self.post_exit_command {
let mut cmd = hook.split(' ');
if let Some(command) = cmd.next() {
let mut command = Command::new(command);
command.args(&cmd.collect::<Vec<&str>>()).current_dir(
crate::api::profile::get_full_path(&self.profile_path)
.await?,
);
command.spawn().map_err(IOError::from)?;
}
}
}
emit_process(
&self.profile_path,
self.pid as u32,
ProcessPayloadType::Finished,
"Exited process",
)
.await?;
Ok(mc_exit_status)
}
async fn try_wait(&self) -> crate::Result<Option<i32>> {
let mut system = sysinfo::System::new();
if !system.refresh_process(sysinfo::Pid::from_u32(self.pid as u32)) {
return Ok(Some(0));
}
let process = system.process(sysinfo::Pid::from_u32(self.pid as u32));
if let Some(process) = process {
if process.status() == sysinfo::ProcessStatus::Run {
Ok(None)
} else {
Ok(Some(0))
}
} else {
Ok(Some(0))
}
}
pub async fn wait_for(&self) -> crate::Result<()> {
loop {
if self.try_wait().await?.is_some() {
break;
}
// sleep for 10ms
tokio::time::sleep(tokio::time::Duration::from_millis(50)).await;
}
Ok(())
}
pub async fn kill(&self) -> crate::Result<()> {
let mut system = sysinfo::System::new();
if system.refresh_process(sysinfo::Pid::from_u32(self.pid as u32)) {
let process =
system.process(sysinfo::Pid::from_u32(self.pid as u32));
if let Some(process) = process {
process.kill();
}
}
Ok(())
}
pub async fn get(
pid: i32,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Option<Self>> {
let res = select_process_with_predicate!("WHERE pid = $1", pid)
.fetch_optional(exec)
.await?;
Ok(res)
}
pub async fn get_from_profile(
profile_path: &str,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Vec<Self>> {
let results = select_process_with_predicate!(
"WHERE profile_path = $1",
profile_path
)
.fetch_all(exec)
.await?;
Ok(results)
}
pub async fn get_all(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Vec<Self>> {
let true_val = 1;
let results = select_process_with_predicate!("WHERE 1=$1", true_val)
.fetch_all(exec)
.await?;
Ok(results)
}
pub async fn upsert(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
sqlx::query!(
"
INSERT INTO processes (pid, start_time, name, executable, profile_path, post_exit_command)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT (pid) DO UPDATE SET
start_time = $2,
name = $3,
executable = $4,
profile_path = $5,
post_exit_command = $6
",
self.pid,
self.start_time,
self.name,
self.executable,
self.profile_path,
self.post_exit_command
)
.execute(exec)
.await?;
Ok(())
}
pub async fn remove(
pid: u32,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let pid = pid as i32;
sqlx::query!(
"
DELETE FROM processes WHERE pid = $1
",
pid,
)
.execute(exec)
.await?;
Ok(())
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,807 +0,0 @@
//! Project management + inference
use crate::config::MODRINTH_API_URL;
use crate::state::{CredentialsStore, ModrinthUser, Profile};
use crate::util::fetch::{
fetch_json, write_cached_icon, FetchSemaphore, IoSemaphore,
};
use crate::util::io::IOError;
use async_zip::tokio::read::fs::ZipFileReader;
use chrono::{DateTime, Utc};
use futures::StreamExt;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use serde_json::json;
use sha2::Digest;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use tokio::io::AsyncReadExt;
use super::ProjectPathId;
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum ProjectType {
Mod,
DataPack,
ResourcePack,
ShaderPack,
}
impl ProjectType {
pub fn get_from_loaders(loaders: Vec<String>) -> Option<Self> {
if loaders
.iter()
.any(|x| ["fabric", "forge", "quilt", "neoforge"].contains(&&**x))
{
Some(ProjectType::Mod)
} else if loaders.iter().any(|x| x == "datapack") {
Some(ProjectType::DataPack)
} else if loaders.iter().any(|x| ["iris", "optifine"].contains(&&**x)) {
Some(ProjectType::ShaderPack)
} else if loaders
.iter()
.any(|x| ["vanilla", "canvas", "minecraft"].contains(&&**x))
{
Some(ProjectType::ResourcePack)
} else {
None
}
}
pub fn get_from_parent_folder(path: PathBuf) -> Option<Self> {
// Get parent folder
let path = path.parent()?.file_name()?;
match path.to_str()? {
"mods" => Some(ProjectType::Mod),
"datapacks" => Some(ProjectType::DataPack),
"resourcepacks" => Some(ProjectType::ResourcePack),
"shaderpacks" => Some(ProjectType::ShaderPack),
_ => None,
}
}
pub fn get_name(&self) -> &'static str {
match self {
ProjectType::Mod => "mod",
ProjectType::DataPack => "datapack",
ProjectType::ResourcePack => "resourcepack",
ProjectType::ShaderPack => "shaderpack",
}
}
pub fn get_folder(&self) -> &'static str {
match self {
ProjectType::Mod => "mods",
ProjectType::DataPack => "datapacks",
ProjectType::ResourcePack => "resourcepacks",
ProjectType::ShaderPack => "shaderpacks",
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Project {
pub sha512: String,
pub disabled: bool,
pub metadata: ProjectMetadata,
pub file_name: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthProject {
pub id: String,
pub slug: Option<String>,
pub project_type: String,
pub team: String,
pub title: String,
pub description: String,
pub body: String,
pub published: DateTime<Utc>,
pub updated: DateTime<Utc>,
pub client_side: SideType,
pub server_side: SideType,
pub downloads: u32,
pub followers: u32,
pub categories: Vec<String>,
pub additional_categories: Vec<String>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
pub versions: Vec<String>,
pub icon_url: Option<String>,
}
/// A specific version of a project
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersion {
pub id: String,
pub project_id: String,
pub author_id: String,
pub featured: bool,
pub name: String,
pub version_number: String,
pub changelog: String,
pub changelog_url: Option<String>,
pub date_published: DateTime<Utc>,
pub downloads: u32,
pub version_type: String,
pub files: Vec<ModrinthVersionFile>,
pub dependencies: Vec<Dependency>,
pub game_versions: Vec<String>,
pub loaders: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthVersionFile {
pub hashes: HashMap<String, String>,
pub url: String,
pub filename: String,
pub primary: bool,
pub size: u32,
pub file_type: Option<FileType>,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Dependency {
pub version_id: Option<String>,
pub project_id: Option<String>,
pub file_name: Option<String>,
pub dependency_type: DependencyType,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ModrinthTeamMember {
pub team_id: String,
pub user: ModrinthUser,
pub role: String,
pub ordering: i64,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "lowercase")]
pub enum DependencyType {
Required,
Optional,
Incompatible,
Embedded,
}
#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)]
#[serde(rename_all = "kebab-case")]
pub enum SideType {
Required,
Optional,
Unsupported,
Unknown,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
#[serde(rename_all = "kebab-case")]
pub enum FileType {
RequiredResourcePack,
OptionalResourcePack,
Unknown,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ProjectMetadata {
Modrinth {
project: Box<ModrinthProject>,
version: Box<ModrinthVersion>,
members: Vec<ModrinthTeamMember>,
update_version: Option<Box<ModrinthVersion>>,
incompatible: bool,
},
Inferred {
title: Option<String>,
description: Option<String>,
authors: Vec<String>,
version: Option<String>,
icon: Option<PathBuf>,
project_type: Option<String>,
},
Unknown,
}
#[tracing::instrument(skip(io_semaphore))]
#[theseus_macros::debug_pin]
async fn read_icon_from_file(
icon_path: Option<String>,
cache_dir: &Path,
path: &PathBuf,
io_semaphore: &IoSemaphore,
) -> crate::Result<Option<PathBuf>> {
if let Some(icon_path) = icon_path {
// we have to repoen the zip twice here :(
let zip_file_reader = ZipFileReader::new(path).await;
if let Ok(zip_file_reader) = zip_file_reader {
// Get index of icon file and open it
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == icon_path
});
if let Some(zip_index) = zip_index_option {
let mut bytes = Vec::new();
if zip_file_reader
.reader_with_entry(zip_index)
.await?
.read_to_end_checked(&mut bytes)
.await
.is_ok()
{
let bytes = bytes::Bytes::from(bytes);
let path = write_cached_icon(
&icon_path,
cache_dir,
bytes,
io_semaphore,
)
.await?;
return Ok(Some(path));
}
}
}
}
Ok(None)
}
// Creates Project data from the existing files in the file system, for a given Profile
// Paths must be the full paths to the files in the FS, and not the relative paths
// eg: with get_profile_full_project_paths
#[tracing::instrument(skip(paths, profile, io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn infer_data_from_files(
profile: Profile,
paths: Vec<PathBuf>,
cache_dir: PathBuf,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<HashMap<ProjectPathId, Project>> {
let mut file_path_hashes = HashMap::new();
for path in paths {
if !path.exists() {
continue;
}
if let Some(ext) = path.extension() {
// Ignore txt configuration files
if ext == "txt" {
continue;
}
}
let mut file = tokio::fs::File::open(path.clone())
.await
.map_err(|e| IOError::with_path(e, &path))?;
let mut buffer = [0u8; 4096]; // Buffer to read chunks
let mut hasher = sha2::Sha512::new(); // Hasher
loop {
let bytes_read =
file.read(&mut buffer).await.map_err(IOError::from)?;
if bytes_read == 0 {
break;
}
hasher.update(&buffer[..bytes_read]);
}
let hash = format!("{:x}", hasher.finalize());
file_path_hashes.insert(hash, path.clone());
}
let files_url = format!("{}version_files", MODRINTH_API_URL);
let updates_url = format!("{}version_files/update", MODRINTH_API_URL);
let (files, update_versions) = tokio::try_join!(
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&files_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
})),
fetch_semaphore,
credentials,
),
fetch_json::<HashMap<String, ModrinthVersion>>(
Method::POST,
&updates_url,
None,
Some(json!({
"hashes": file_path_hashes.keys().collect::<Vec<_>>(),
"algorithm": "sha512",
"loaders": [profile.metadata.loader],
"game_versions": [profile.metadata.game_version]
})),
fetch_semaphore,
credentials,
)
)?;
let projects: Vec<ModrinthProject> = fetch_json(
Method::GET,
&format!(
"{}projects?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&files
.values()
.map(|x| x.project_id.clone())
.collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?;
let teams: Vec<ModrinthTeamMember> = fetch_json::<
Vec<Vec<ModrinthTeamMember>>,
>(
Method::GET,
&format!(
"{}teams?ids={}",
MODRINTH_API_URL,
serde_json::to_string(
&projects.iter().map(|x| x.team.clone()).collect::<Vec<_>>()
)?
),
None,
None,
fetch_semaphore,
credentials,
)
.await?
.into_iter()
.flatten()
.collect();
let mut return_projects: Vec<(PathBuf, Project)> = Vec::new();
let mut further_analyze_projects: Vec<(String, PathBuf)> = Vec::new();
for (hash, path) in file_path_hashes {
if let Some(version) = files.get(&hash) {
if let Some(project) =
projects.iter().find(|x| version.project_id == x.id)
{
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
return_projects.push((
path,
Project {
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Modrinth {
project: Box::new(project.clone()),
version: Box::new(version.clone()),
members: teams
.iter()
.filter(|x| x.team_id == project.team)
.cloned()
.collect::<Vec<_>>(),
update_version: if let Some(value) =
update_versions.get(&hash)
{
if value.id != version.id {
Some(Box::new(value.clone()))
} else {
None
}
} else {
None
},
incompatible: !version.loaders.contains(
&profile
.metadata
.loader
.as_api_str()
.to_string(),
) || version
.game_versions
.contains(&profile.metadata.game_version),
},
sha512: hash,
file_name,
},
));
continue;
}
}
further_analyze_projects.push((hash, path));
}
for (hash, path) in further_analyze_projects {
let file_name = path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let zip_file_reader = if let Ok(zip_file_reader) =
ZipFileReader::new(path.clone()).await
{
zip_file_reader
} else {
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
metadata: ProjectMetadata::Unknown,
file_name,
},
));
continue;
};
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default()
== "META-INF/mods.toml"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeModInfo {
pub mods: Vec<ForgeMod>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
mod_id: String,
version: Option<String>,
display_name: Option<String>,
description: Option<String>,
logo_file: Option<String>,
authors: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = toml::from_str::<ForgeModInfo>(&file_str) {
if let Some(pack) = pack.mods.first() {
let icon = read_icon_from_file(
pack.logo_file.clone(),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.display_name
.clone()
.unwrap_or_else(|| {
pack.mod_id.clone()
}),
),
description: pack.description.clone(),
authors: pack
.authors
.clone()
.map(|x| vec![x])
.unwrap_or_default(),
version: pack.version.clone(),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
}
// Forge
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "mcmod.info"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct ForgeMod {
modid: String,
name: String,
description: Option<String>,
version: Option<String>,
author_list: Option<Vec<String>>,
logo_file: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<ForgeMod>(&file_str) {
let icon = read_icon_from_file(
pack.logo_file,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(if pack.name.is_empty() {
pack.modid
} else {
pack.name
}),
description: pack.description,
authors: pack.author_list.unwrap_or_default(),
version: pack.version,
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Fabric
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "fabric.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
#[serde(untagged)]
enum FabricAuthor {
String(String),
Object { name: String },
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct FabricMod {
id: String,
version: String,
name: Option<String>,
description: Option<String>,
authors: Vec<FabricAuthor>,
icon: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<FabricMod>(&file_str) {
let icon = read_icon_from_file(
pack.icon,
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(pack.name.unwrap_or(pack.id)),
description: pack.description,
authors: pack
.authors
.into_iter()
.map(|x| match x {
FabricAuthor::String(name) => name,
FabricAuthor::Object { name } => name,
})
.collect(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Quilt
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "quilt.mod.json"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct QuiltMetadata {
pub name: Option<String>,
pub description: Option<String>,
pub contributors: Option<HashMap<String, String>>,
pub icon: Option<String>,
}
#[derive(Deserialize)]
struct QuiltMod {
id: String,
version: String,
metadata: Option<QuiltMetadata>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<QuiltMod>(&file_str) {
let icon = read_icon_from_file(
pack.metadata.as_ref().and_then(|x| x.icon.clone()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: Some(
pack.metadata
.as_ref()
.and_then(|x| x.name.clone())
.unwrap_or(pack.id),
),
description: pack
.metadata
.as_ref()
.and_then(|x| x.description.clone()),
authors: pack
.metadata
.map(|x| {
x.contributors
.unwrap_or_default()
.keys()
.cloned()
.collect()
})
.unwrap_or_default(),
version: Some(pack.version),
icon,
project_type: Some("mod".to_string()),
},
},
));
continue;
}
}
}
// Other
let zip_index_option =
zip_file_reader.file().entries().iter().position(|f| {
f.filename().as_str().unwrap_or_default() == "pack.mcmeta"
});
if let Some(index) = zip_index_option {
#[derive(Deserialize)]
struct Pack {
description: Option<String>,
}
let mut file_str = String::new();
if zip_file_reader
.reader_with_entry(index)
.await?
.read_to_string_checked(&mut file_str)
.await
.is_ok()
{
if let Ok(pack) = serde_json::from_str::<Pack>(&file_str) {
let icon = read_icon_from_file(
Some("pack.png".to_string()),
&cache_dir,
&path,
io_semaphore,
)
.await?;
// Guess the project type from the filepath
let project_type =
ProjectType::get_from_parent_folder(path.clone());
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Inferred {
title: None,
description: pack.description,
authors: Vec::new(),
version: None,
icon,
project_type: project_type
.map(|x| x.get_name().to_string()),
},
},
));
continue;
}
}
}
return_projects.push((
path.clone(),
Project {
sha512: hash,
disabled: file_name.ends_with(".disabled"),
file_name,
metadata: ProjectMetadata::Unknown,
},
));
}
// Project paths should be relative
let mut corrected_hashmap = HashMap::new();
let mut stream = tokio_stream::iter(return_projects);
while let Some((h, v)) = stream.next().await {
let h = ProjectPathId::from_fs_path(&h).await?;
corrected_hashmap.insert(h, v);
}
Ok(corrected_hashmap)
}

View File

@@ -1,69 +0,0 @@
use uuid::Uuid;
use crate::State;
// We implement a store for safe loading bars such that we can wait for them to complete
// We create this store separately from the loading bars themselves, because this may be extended as needed
pub struct SafeProcesses {
pub loading_bars: Vec<Uuid>,
}
#[derive(Debug, Copy, Clone)]
pub enum ProcessType {
LoadingBar,
// Potentially other types of processes (ie: IO operations?)
}
impl SafeProcesses {
// init
pub fn new() -> Self {
Self {
loading_bars: Vec::new(),
}
}
// Adds a new running safe process to the list by uuid
pub async fn add_uuid(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<Uuid> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.push(uuid);
}
}
Ok(uuid)
}
// Mark a safe process as finishing
pub async fn complete(
r#type: ProcessType,
uuid: Uuid,
) -> crate::Result<()> {
let state = State::get().await?;
let mut safe_processes = state.safety_processes.write().await;
match r#type {
ProcessType::LoadingBar => {
safe_processes.loading_bars.retain(|x| *x != uuid);
}
}
Ok(())
}
// Check if there are any pending safe processes of a given type
pub async fn is_complete(r#type: ProcessType) -> crate::Result<bool> {
let state = State::get().await?;
let safe_processes = state.safety_processes.read().await;
match r#type {
ProcessType::LoadingBar => {
if safe_processes.loading_bars.is_empty() {
return Ok(true);
}
}
}
Ok(false)
}
}

View File

@@ -1,127 +1,173 @@
//! Theseus settings file
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
use tokio::fs;
use super::{DirectoryInfo, JavaGlobals};
// TODO: convert to semver?
const CURRENT_FORMAT_VERSION: u32 = 1;
// Types
/// Global Theseus settings
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Settings {
pub theme: Theme,
pub memory: MemorySettings,
#[serde(default)]
pub force_fullscreen: bool,
pub game_resolution: WindowSize,
pub custom_java_args: Vec<String>,
pub custom_env_args: Vec<(String, String)>,
pub java_globals: JavaGlobals,
pub hooks: Hooks,
pub max_concurrent_downloads: usize,
pub max_concurrent_writes: usize,
pub version: u32,
pub collapsed_navigation: bool,
#[serde(default)]
pub disable_discord_rpc: bool,
#[serde(default)]
pub hide_on_process: bool,
#[serde(default)]
pub native_decorations: bool,
#[serde(default)]
pub theme: Theme,
pub default_page: DefaultPage,
#[serde(default)]
pub developer_mode: bool,
#[serde(default)]
pub opt_out_analytics: bool,
#[serde(default)]
pub collapsed_navigation: bool,
pub advanced_rendering: bool,
#[serde(default)]
pub fully_onboarded: bool,
#[serde(default = "DirectoryInfo::get_initial_settings_dir")]
pub loaded_config_dir: Option<PathBuf>,
pub native_decorations: bool,
pub telemetry: bool,
pub discord_rpc: bool,
pub developer_mode: bool,
pub onboarded: bool,
pub extra_launch_args: Vec<String>,
pub custom_env_vars: Vec<(String, String)>,
pub memory: MemorySettings,
pub force_fullscreen: bool,
pub game_resolution: WindowSize,
pub hide_on_process_start: bool,
pub hooks: Hooks,
pub custom_dir: Option<String>,
pub prev_custom_dir: Option<String>,
pub migrated: bool,
}
impl Settings {
#[tracing::instrument]
pub async fn init(file: &Path) -> crate::Result<Self> {
let mut rescued = false;
pub async fn get(
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Self> {
let res = sqlx::query!(
"
SELECT
max_concurrent_writes, max_concurrent_downloads,
theme, default_page, collapsed_navigation, advanced_rendering, native_decorations,
discord_rpc, developer_mode, telemetry,
onboarded,
json(extra_launch_args) extra_launch_args, json(custom_env_vars) custom_env_vars,
mc_memory_max, mc_force_fullscreen, mc_game_resolution_x, mc_game_resolution_y, hide_on_process_start,
hook_pre_launch, hook_wrapper, hook_post_exit,
custom_dir, prev_custom_dir, migrated
FROM settings
"
)
.fetch_one(exec)
.await?;
let settings = if file.exists() {
let loaded_settings = fs::read(&file)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error reading settings file: {err}"
))
.as_error()
})
.and_then(|it| {
serde_json::from_slice::<Settings>(&it)
.map_err(crate::Error::from)
});
// settings is corrupted. Back up the file and create a new one
if let Err(ref err) = loaded_settings {
tracing::error!("Failed to load settings file: {err}. ");
let backup_file = file.with_extension("json.bak");
tracing::error!("Corrupted settings file will be backed up as {}, and a new settings file will be created.", backup_file.display());
let _ = fs::rename(file, backup_file).await;
rescued = true;
}
loaded_settings.ok()
} else {
None
};
if let Some(settings) = settings {
Ok(settings)
} else {
// Create new settings file
let settings = Self {
theme: Theme::Dark,
memory: MemorySettings::default(),
force_fullscreen: false,
game_resolution: WindowSize::default(),
custom_java_args: Vec::new(),
custom_env_args: Vec::new(),
java_globals: JavaGlobals::new(),
hooks: Hooks::default(),
max_concurrent_downloads: 10,
max_concurrent_writes: 10,
version: CURRENT_FORMAT_VERSION,
collapsed_navigation: false,
disable_discord_rpc: false,
hide_on_process: false,
native_decorations: false,
default_page: DefaultPage::Home,
developer_mode: false,
opt_out_analytics: false,
advanced_rendering: true,
fully_onboarded: rescued, // If we rescued the settings file, we should consider the user fully onboarded
// By default, the config directory is the same as the settings directory
loaded_config_dir: DirectoryInfo::get_initial_settings_dir(),
};
if rescued {
settings.sync(file).await?;
}
Ok(settings)
}
Ok(Self {
max_concurrent_downloads: res.max_concurrent_downloads as usize,
max_concurrent_writes: res.max_concurrent_writes as usize,
theme: Theme::from_string(&res.theme),
default_page: DefaultPage::from_string(&res.default_page),
collapsed_navigation: res.collapsed_navigation == 1,
advanced_rendering: res.advanced_rendering == 1,
native_decorations: res.native_decorations == 1,
telemetry: res.telemetry == 1,
discord_rpc: res.discord_rpc == 1,
developer_mode: res.developer_mode == 1,
onboarded: res.onboarded == 1,
extra_launch_args: res
.extra_launch_args
.and_then(|x| serde_json::from_str(&x).ok())
.unwrap_or_default(),
custom_env_vars: res
.custom_env_vars
.and_then(|x| serde_json::from_str(&x).ok())
.unwrap_or_default(),
memory: MemorySettings {
maximum: res.mc_memory_max as u32,
},
force_fullscreen: res.mc_force_fullscreen == 1,
game_resolution: WindowSize(
res.mc_game_resolution_x as u16,
res.mc_game_resolution_y as u16,
),
hide_on_process_start: res.hide_on_process_start == 1,
hooks: Hooks {
pre_launch: res.hook_pre_launch,
wrapper: res.hook_wrapper,
post_exit: res.hook_post_exit,
},
custom_dir: res.custom_dir,
prev_custom_dir: res.prev_custom_dir,
migrated: res.migrated == 1,
})
}
#[tracing::instrument(skip(self))]
pub async fn sync(&self, to: &Path) -> crate::Result<()> {
fs::write(to, serde_json::to_vec(self)?)
.await
.map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error saving settings to file: {err}"
))
.as_error()
})?;
pub async fn update(
&self,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<()> {
let max_concurrent_writes = self.max_concurrent_writes as i32;
let max_concurrent_downloads = self.max_concurrent_downloads as i32;
let theme = self.theme.as_str();
let default_page = self.default_page.as_str();
let extra_launch_args = serde_json::to_string(&self.extra_launch_args)?;
let custom_env_vars = serde_json::to_string(&self.custom_env_vars)?;
sqlx::query!(
"
UPDATE settings
SET
max_concurrent_writes = $1,
max_concurrent_downloads = $2,
theme = $3,
default_page = $4,
collapsed_navigation = $5,
advanced_rendering = $6,
native_decorations = $7,
discord_rpc = $8,
developer_mode = $9,
telemetry = $10,
onboarded = $11,
extra_launch_args = jsonb($12),
custom_env_vars = jsonb($13),
mc_memory_max = $14,
mc_force_fullscreen = $15,
mc_game_resolution_x = $16,
mc_game_resolution_y = $17,
hide_on_process_start = $18,
hook_pre_launch = $19,
hook_wrapper = $20,
hook_post_exit = $21,
custom_dir = $22,
prev_custom_dir = $23,
migrated = $24
",
max_concurrent_writes,
max_concurrent_downloads,
theme,
default_page,
self.collapsed_navigation,
self.advanced_rendering,
self.native_decorations,
self.discord_rpc,
self.developer_mode,
self.telemetry,
self.onboarded,
extra_launch_args,
custom_env_vars,
self.memory.maximum,
self.force_fullscreen,
self.game_resolution.0,
self.game_resolution.1,
self.hide_on_process_start,
self.hooks.pre_launch,
self.hooks.wrapper,
self.hooks.post_exit,
self.custom_dir,
self.prev_custom_dir,
self.migrated
)
.execute(exec)
.await?;
Ok(())
}
}
@@ -135,37 +181,40 @@ pub enum Theme {
Oled,
}
impl Theme {
pub fn as_str(&self) -> &'static str {
match self {
Theme::Dark => "dark",
Theme::Light => "light",
Theme::Oled => "oled",
}
}
pub fn from_string(string: &str) -> Theme {
match string {
"dark" => Theme::Dark,
"light" => Theme::Light,
"oled" => Theme::Oled,
_ => Theme::Dark,
}
}
}
/// Minecraft memory settings
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct MemorySettings {
pub maximum: u32,
}
impl Default for MemorySettings {
fn default() -> Self {
Self { maximum: 2048 }
}
}
/// Game window size
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub struct WindowSize(pub u16, pub u16);
impl Default for WindowSize {
fn default() -> Self {
Self(854, 480)
}
}
/// Game initialization hooks
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
#[serde(default)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Hooks {
#[serde(skip_serializing_if = "Option::is_none")]
pub pre_launch: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub wrapper: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub post_exit: Option<String>,
}
@@ -176,8 +225,19 @@ pub enum DefaultPage {
Library,
}
impl Default for DefaultPage {
fn default() -> Self {
Self::Home
impl DefaultPage {
pub fn as_str(&self) -> &'static str {
match self {
DefaultPage::Home => "home",
DefaultPage::Library => "library",
}
}
pub fn from_string(string: &str) -> Self {
match string {
"home" => Self::Home,
"library" => Self::Library,
_ => Self::Home,
}
}
}

View File

@@ -1,261 +0,0 @@
use std::path::PathBuf;
use reqwest::Method;
use serde::{Deserialize, Serialize};
use crate::config::MODRINTH_API_URL;
use crate::data::DirectoryInfo;
use crate::state::CredentialsStore;
use crate::util::fetch::{
fetch_json, read_json, write, FetchSemaphore, IoSemaphore,
};
// Serializeable struct for all tags to be fetched together by the frontend
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tags {
pub categories: Vec<Category>,
pub loaders: Vec<Loader>,
pub game_versions: Vec<GameVersion>,
pub donation_platforms: Vec<DonationPlatform>,
pub report_types: Vec<String>,
}
impl Tags {
#[tracing::instrument(skip(io_semaphore, fetch_semaphore))]
#[theseus_macros::debug_pin]
pub async fn init(
dirs: &DirectoryInfo,
fetch_online: bool,
io_semaphore: &IoSemaphore,
fetch_semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let mut tags = None;
let tags_path = dirs.caches_meta_dir().await.join("tags.json");
let tags_path_backup =
dirs.caches_meta_dir().await.join("tags.json.bak");
if let Ok(tags_json) = read_json::<Self>(&tags_path, io_semaphore).await
{
tags = Some(tags_json);
} else if fetch_online {
match Self::fetch(fetch_semaphore, credentials).await {
Ok(tags_fetch) => tags = Some(tags_fetch),
Err(err) => {
tracing::warn!("Unable to fetch launcher tags: {err}")
}
}
} else if let Ok(tags_json) =
read_json::<Self>(&tags_path_backup, io_semaphore).await
{
tags = Some(tags_json);
std::fs::copy(&tags_path_backup, &tags_path).map_err(|err| {
crate::ErrorKind::FSError(format!(
"Error restoring tags backup: {err}"
))
.as_error()
})?;
}
if let Some(tags_data) = tags {
write(&tags_path, &serde_json::to_vec(&tags_data)?, io_semaphore)
.await?;
write(
&tags_path_backup,
&serde_json::to_vec(&tags_data)?,
io_semaphore,
)
.await?;
Ok(tags_data)
} else {
Err(crate::ErrorKind::NoValueFor(String::from("launcher tags"))
.as_error())
}
}
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn update() {
let res = async {
let state = crate::State::get().await?;
let creds = state.credentials.read().await;
let tags_fetch =
Tags::fetch(&state.fetch_semaphore, &creds).await?;
drop(creds);
let tags_path =
state.directories.caches_meta_dir().await.join("tags.json");
let tags_path_backup = state
.directories
.caches_meta_dir()
.await
.join("tags.json.bak");
if tags_path.exists() {
std::fs::copy(&tags_path, &tags_path_backup).unwrap();
}
write(
&tags_path,
&serde_json::to_vec(&tags_fetch)?,
&state.io_semaphore,
)
.await?;
let mut old_tags = state.tags.write().await;
*old_tags = tags_fetch;
Ok::<(), crate::Error>(())
}
.await;
match res {
Ok(()) => {}
Err(err) => {
tracing::warn!("Unable to update launcher tags: {err}")
}
};
}
// Checks the database for categories tag, returns a Vec::new() if it doesnt exist, otherwise returns the categories
#[tracing::instrument(skip(self))]
pub fn get_categories(&self) -> Vec<Category> {
self.categories.clone()
}
// Checks the database for loaders tag, returns a Vec::new() if it doesnt exist, otherwise returns the loaders
#[tracing::instrument(skip(self))]
pub fn get_loaders(&self) -> Vec<Loader> {
self.loaders.clone()
}
// Checks the database for game_versions tag, returns a Vec::new() if it doesnt exist, otherwise returns the game_versions
#[tracing::instrument(skip(self))]
pub fn get_game_versions(&self) -> Vec<GameVersion> {
self.game_versions.clone()
}
// Checks the database for donation_platforms tag, returns a Vec::new() if it doesnt exist, otherwise returns the donation_platforms
#[tracing::instrument(skip(self))]
pub fn get_donation_platforms(&self) -> Vec<DonationPlatform> {
self.donation_platforms.clone()
}
// Checks the database for report_types tag, returns a Vec::new() if it doesnt exist, otherwise returns the report_types
#[tracing::instrument(skip(self))]
pub fn get_report_types(&self) -> Vec<String> {
self.report_types.clone()
}
// Gets all tags together as a serializable bundle
#[tracing::instrument(skip(self))]
pub fn get_tag_bundle(&self) -> Tags {
self.clone()
}
// Fetches the tags from the Modrinth API and stores them in the database
pub async fn fetch(
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
) -> crate::Result<Self> {
let categories = format!("{MODRINTH_API_URL}tag/category");
let loaders = format!("{MODRINTH_API_URL}tag/loader");
let game_versions = format!("{MODRINTH_API_URL}tag/game_version");
let donation_platforms =
format!("{MODRINTH_API_URL}tag/donation_platform");
let report_types = format!("{MODRINTH_API_URL}tag/report_type");
let categories_fut = fetch_json::<Vec<Category>>(
Method::GET,
&categories,
None,
None,
semaphore,
credentials,
);
let loaders_fut = fetch_json::<Vec<Loader>>(
Method::GET,
&loaders,
None,
None,
semaphore,
credentials,
);
let game_versions_fut = fetch_json::<Vec<GameVersion>>(
Method::GET,
&game_versions,
None,
None,
semaphore,
credentials,
);
let donation_platforms_fut = fetch_json::<Vec<DonationPlatform>>(
Method::GET,
&donation_platforms,
None,
None,
semaphore,
credentials,
);
let report_types_fut = fetch_json::<Vec<String>>(
Method::GET,
&report_types,
None,
None,
semaphore,
credentials,
);
let (
categories,
loaders,
game_versions,
donation_platforms,
report_types,
) = tokio::try_join!(
categories_fut,
loaders_fut,
game_versions_fut,
donation_platforms_fut,
report_types_fut
)?;
Ok(Self {
categories,
loaders,
game_versions,
donation_platforms,
report_types,
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Category {
pub name: String,
pub project_type: String,
pub header: String,
pub icon: PathBuf,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Loader {
pub name: String,
pub icon: PathBuf,
pub supported_project_types: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DonationPlatform {
pub short: String,
pub name: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GameVersion {
pub version: String,
pub version_type: String,
pub date: String,
pub major: bool,
}

View File

@@ -1,23 +1,22 @@
//! Functions for fetching infromation from the Internet
use crate::event::emit::emit_loading;
use crate::event::LoadingBarId;
use crate::state::CredentialsStore;
use bytes::Bytes;
use lazy_static::lazy_static;
use reqwest::Method;
use serde::de::DeserializeOwned;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::time::{self, Duration};
use tokio::sync::{RwLock, Semaphore};
use std::time::{self};
use tokio::sync::Semaphore;
use tokio::{fs::File, io::AsyncWriteExt};
use super::io::{self, IOError};
#[derive(Debug)]
pub struct IoSemaphore(pub RwLock<Semaphore>);
pub struct IoSemaphore(pub Semaphore);
#[derive(Debug)]
pub struct FetchSemaphore(pub RwLock<Semaphore>);
pub struct FetchSemaphore(pub Semaphore);
lazy_static! {
pub static ref REQWEST_CLIENT: reqwest::Client = {
@@ -42,19 +41,10 @@ pub async fn fetch(
url: &str,
sha1: Option<&str>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Bytes> {
fetch_advanced(
Method::GET,
url,
sha1,
None,
None,
None,
semaphore,
credentials,
)
.await
fetch_advanced(Method::GET, url, sha1, None, None, None, semaphore, exec)
.await
}
#[tracing::instrument(skip(json_body, semaphore))]
@@ -64,20 +54,13 @@ pub async fn fetch_json<T>(
sha1: Option<&str>,
json_body: Option<serde_json::Value>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let result = fetch_advanced(
method,
url,
sha1,
json_body,
None,
None,
semaphore,
credentials,
method, url, sha1, json_body, None, None, semaphore, exec,
)
.await?;
let value = serde_json::from_slice(&result)?;
@@ -86,7 +69,6 @@ where
/// Downloads a file with retry and checksum functionality
#[tracing::instrument(skip(json_body, semaphore))]
#[theseus_macros::debug_pin]
#[allow(clippy::too_many_arguments)]
pub async fn fetch_advanced(
method: Method,
@@ -96,10 +78,21 @@ pub async fn fetch_advanced(
header: Option<(&str, &str)>,
loading_bar: Option<(&LoadingBarId, f64)>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<Bytes> {
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let _permit = semaphore.0.acquire().await?;
let creds = if !header
.as_ref()
.map(|x| &*x.0.to_lowercase() == "authorization")
.unwrap_or(false)
&& (url.starts_with("https://cdn.modrinth.com")
|| url.starts_with("https://api.modrinth.com"))
{
crate::state::ModrinthCredentials::get_active(exec).await?
} else {
None
};
for attempt in 1..=(FETCH_ATTEMPTS + 1) {
let mut req = REQWEST_CLIENT.request(method.clone(), url);
@@ -112,10 +105,8 @@ pub async fn fetch_advanced(
req = req.header(header.0, header.1);
}
if url.starts_with("https://cdn.modrinth.com") {
if let Some(creds) = &credentials.0 {
req = req.header("Authorization", &creds.session);
}
if let Some(ref creds) = creds {
req = req.header("Authorization", &creds.session);
}
let result = req.send().await;
@@ -187,12 +178,11 @@ pub async fn fetch_advanced(
/// Downloads a file from specified mirrors
#[tracing::instrument(skip(semaphore))]
#[theseus_macros::debug_pin]
pub async fn fetch_mirrors(
mirrors: &[&str],
sha1: Option<&str>,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite> + Copy,
) -> crate::Result<Bytes> {
if mirrors.is_empty() {
return Err(crate::ErrorKind::InputError(
@@ -202,7 +192,7 @@ pub async fn fetch_mirrors(
}
for (index, mirror) in mirrors.iter().enumerate() {
let result = fetch(mirror, sha1, semaphore, credentials).await;
let result = fetch(mirror, sha1, semaphore, exec).await;
if result.is_ok() || (result.is_err() && index == (mirrors.len() - 1)) {
return result;
@@ -212,35 +202,24 @@ pub async fn fetch_mirrors(
unreachable!()
}
/// Using labrinth API, checks if an internet response can be found, with a timeout in seconds
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn check_internet(timeout: u64) -> bool {
REQWEST_CLIENT
.get("https://launcher-files.modrinth.com/detect.txt")
.timeout(Duration::from_secs(timeout))
.send()
.await
.is_ok()
}
/// Posts a JSON to a URL
#[tracing::instrument(skip(json_body, semaphore))]
#[theseus_macros::debug_pin]
pub async fn post_json<T>(
url: &str,
json_body: serde_json::Value,
semaphore: &FetchSemaphore,
credentials: &CredentialsStore,
exec: impl sqlx::Executor<'_, Database = sqlx::Sqlite>,
) -> crate::Result<T>
where
T: DeserializeOwned,
{
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let _permit = semaphore.0.acquire().await?;
let mut req = REQWEST_CLIENT.post(url).json(&json_body);
if let Some(creds) = &credentials.0 {
if let Some(creds) =
crate::state::ModrinthCredentials::get_active(exec).await?
{
req = req.header("Authorization", &creds.session);
}
@@ -257,8 +236,7 @@ pub async fn read_json<T>(
where
T: DeserializeOwned,
{
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let _permit = semaphore.0.acquire().await?;
let json = io::read(path).await?;
let json = serde_json::from_slice::<T>(&json)?;
@@ -272,8 +250,7 @@ pub async fn write<'a>(
bytes: &[u8],
semaphore: &IoSemaphore,
) -> crate::Result<()> {
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let _permit = semaphore.0.acquire().await?;
if let Some(parent) = path.parent() {
io::create_dir_all(parent).await?;
@@ -297,8 +274,7 @@ pub async fn copy(
let src: &Path = src.as_ref();
let dest = dest.as_ref();
let io_semaphore = semaphore.0.read().await;
let _permit = io_semaphore.acquire().await?;
let _permit = semaphore.0.acquire().await?;
if let Some(parent) = dest.parent() {
io::create_dir_all(parent).await?;
@@ -335,7 +311,7 @@ pub async fn write_cached_icon(
Ok(path)
}
async fn sha1_async(bytes: Bytes) -> crate::Result<String> {
pub async fn sha1_async(bytes: Bytes) -> crate::Result<String> {
let hash = tokio::task::spawn_blocking(move || {
sha1_smol::Sha1::from(bytes).hexdigest()
})

View File

@@ -1,6 +1,6 @@
use super::io;
use crate::state::JavaVersion;
use futures::prelude::*;
use serde::{Deserialize, Serialize};
use std::env;
use std::path::PathBuf;
use std::process::Command;
@@ -14,13 +14,6 @@ use winreg::{
RegKey,
};
#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone)]
pub struct JavaVersion {
pub path: String,
pub version: String,
pub architecture: String,
}
// Entrypoint function (Windows)
// Returns a Vec of unique JavaVersions from the PATH, Windows Registry Keys and common Java locations
#[cfg(target_os = "windows")]
@@ -190,14 +183,14 @@ pub async fn get_all_jre() -> Result<Vec<JavaVersion>, JREError> {
// Gets all JREs from the PATH env variable
#[tracing::instrument]
#[theseus_macros::debug_pin]
async fn get_all_autoinstalled_jre_path() -> Result<HashSet<PathBuf>, JREError>
{
Box::pin(async move {
let state = State::get().await.map_err(|_| JREError::StateError)?;
let mut jre_paths = HashSet::new();
let base_path = state.directories.java_versions_dir().await;
let base_path = state.directories.java_versions_dir();
if base_path.is_dir() {
if let Ok(dir) = std::fs::read_dir(base_path) {
@@ -262,7 +255,7 @@ pub async fn check_java_at_filepaths(
// For example filepath 'path', attempt to resolve it and get a Java version at this path
// If no such path exists, or no such valid java at this path exists, returns None
#[tracing::instrument]
#[theseus_macros::debug_pin]
pub async fn check_java_at_filepath(path: &Path) -> Option<JavaVersion> {
// Attempt to canonicalize the potential java filepath
// If it fails, this path does not exist and None is returned (no Java here)
@@ -318,12 +311,17 @@ pub async fn check_java_at_filepath(path: &Path) -> Option<JavaVersion> {
// Extract version info from it
if let Some(arch) = java_arch {
if let Some(version) = java_version {
let path = java.to_string_lossy().to_string();
return Some(JavaVersion {
path,
version: version.to_string(),
architecture: arch.to_string(),
});
if let Ok((_, major_version)) =
extract_java_majorminor_version(version)
{
let path = java.to_string_lossy().to_string();
return Some(JavaVersion {
major_version,
path,
version: version.to_string(),
architecture: arch.to_string(),
});
}
}
}
None

View File

@@ -1,11 +0,0 @@
[package]
name = "theseus_macros"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0.58", features = ["full"] }
quote = "1.0"

View File

@@ -1,9 +0,0 @@
{
"name": "@modrinth/app-macros",
"scripts": {
"build": "cargo build --release",
"lint": "cargo fmt --check && cargo clippy -- -D warnings",
"fix": "cargo fmt && cargo clippy --fix",
"test": "cargo test"
}
}

View File

@@ -1,34 +0,0 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, ItemFn};
#[proc_macro_attribute]
pub fn debug_pin(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as ItemFn);
let attrs = &input.attrs;
let vis = &input.vis;
let sig = &input.sig;
let body = &input.block;
#[cfg(debug_assertions)]
let result = quote! {
#(#attrs)*
#vis #sig {
Box::pin(async move {
#body
}).await
}
};
#[cfg(not(debug_assertions))]
let result = quote! {
#(#attrs)*
#vis #sig {
#body
}
};
TokenStream::from(result)
}

View File

@@ -442,7 +442,6 @@ a,
display: flex;
align-items: center;
justify-content: center;
cursor: pointer;
width: fit-content;
height: fit-content;

View File

@@ -48,6 +48,12 @@ const props = defineProps({
type: Boolean,
default: true,
},
onHide: {
type: Function,
default() {
return () => {}
},
},
})
const shown = ref(false)
@@ -61,6 +67,7 @@ function show() {
}
function hide() {
props.onHide()
actuallyShown.value = false
setTimeout(() => {
shown.value = false