diff --git a/.dockerignore b/.dockerignore new file mode 120000 index 00000000..3e4e48b0 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +.gitignore \ No newline at end of file diff --git a/.github/workflows/astralrinth-build.yml b/.github/workflows/astralrinth-build.yml index b5ede000..c5a263d7 100644 --- a/.github/workflows/astralrinth-build.yml +++ b/.github/workflows/astralrinth-build.yml @@ -16,6 +16,7 @@ on: - 'packages/assets/**' - 'packages/ui/**' - 'packages/utils/**' + workflow_dispatch: jobs: build: @@ -24,12 +25,12 @@ jobs: fail-fast: false matrix: # platform: [macos-latest, windows-latest, ubuntu-latest] - platform: [ubuntu-latest] + platform: [windows-latest, ubuntu-latest] include: # - platform: macos-latest # artifact-target-name: universal-apple-darwin - # - platform: windows-latest - # artifact-target-name: x86_64-pc-windows-msvc + - platform: windows-latest + artifact-target-name: x86_64-pc-windows-msvc - platform: ubuntu-latest artifact-target-name: x86_64-unknown-linux-gnu @@ -41,6 +42,35 @@ jobs: with: fetch-depth: 2 + - name: 🔍 Validate Git config does not introduce CRLF + shell: bash + run: | + echo "🔍 Checking Git config for CRLF settings..." + + autocrlf=$(git config --get core.autocrlf || echo "unset") + eol_setting=$(git config --get core.eol || echo "unset") + + echo "core.autocrlf = $autocrlf" + echo "core.eol = $eol_setting" + + if [ "$autocrlf" = "true" ]; then + echo "⚠️ WARNING: core.autocrlf is set to 'true'. Consider setting it to 'input' or 'false'." + fi + + if [ "$eol_setting" = "crlf" ]; then + echo "⚠️ WARNING: core.eol is set to 'crlf'. Consider unsetting it or setting to 'lf'." + fi + + - name: 🔍 Check migration files line endings (LF only) + shell: bash + run: | + echo "🔍 Scanning migration SQL files for CR characters (\\r)..." + if grep -Iq $'\r' packages/app-lib/migrations/*.sql; then + echo "❌ ERROR: Some migration files contain CR (\\r) characters — expected only LF line endings." + exit 1 + fi + echo "✅ All migration files use LF line endings" + - name: 🧰 Setup Rust toolchain uses: actions-rust-lang/setup-rust-toolchain@v1 with: @@ -73,11 +103,11 @@ jobs: - name: 🧰 Install dependencies run: pnpm install - # - name: ✍️ Set up Windows code signing (jsign) - # if: matrix.platform == 'windows-latest' && env.SIGN_WINDOWS_BINARIES == 'true' - # shell: bash - # run: | - # choco install jsign --ignore-dependencies + - name: ✍️ Set up Windows code signing (jsign) + if: matrix.platform == 'windows-latest' && env.SIGN_WINDOWS_BINARIES == 'true' + shell: bash + run: | + choco install jsign --ignore-dependencies - name: 🗑️ Clean up cached bundles shell: bash @@ -99,15 +129,15 @@ jobs: TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} - # - name: 🔨 Build Windows app - # if: matrix.platform == 'windows-latest' - # shell: pwsh - # run: | - # $env:JAVA_HOME = "$env:JAVA_HOME_11_X64" - # pnpm --filter=@modrinth/app run tauri build --config tauri-release.conf.json --verbose --bundles 'nsis' - # env: - # TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} - # TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} + - name: 🔨 Build Windows app + if: matrix.platform == 'windows-latest' + shell: pwsh + run: | + $env:JAVA_HOME = "$env:JAVA_HOME_11_X64" + pnpm --filter=@modrinth/app run tauri build --config tauri-release.conf.json --verbose --bundles 'nsis' + env: + TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} + TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} - name: 📤 Upload app bundles uses: actions/upload-artifact@v3 diff --git a/apps/app-frontend/src/App.vue b/apps/app-frontend/src/App.vue index 913d0fc1..c837744c 100644 --- a/apps/app-frontend/src/App.vue +++ b/apps/app-frontend/src/App.vue @@ -42,7 +42,7 @@ import ModrinthLoadingIndicator from '@/components/LoadingIndicatorBar.vue' import { handleError, useNotifications } from '@/store/notifications.js' import { command_listener, warning_listener } from '@/helpers/events.js' import { type } from '@tauri-apps/plugin-os' -import { getOS, isDev, restartApp } from '@/helpers/utils.js' +import { getOS, isDev } from '@/helpers/utils.js' import { debugAnalytics, initAnalytics, optOutAnalytics, trackEvent } from '@/helpers/analytics' import { getCurrentWindow } from '@tauri-apps/api/window' import { getVersion } from '@tauri-apps/api/app' @@ -72,6 +72,9 @@ import QuickInstanceSwitcher from '@/components/ui/QuickInstanceSwitcher.vue' import { get_available_capes, get_available_skins } from './helpers/skins' import { generateSkinPreviews } from './helpers/rendering/batch-skin-renderer' +// [AR] Feature +import { getRemote, updateState } from '@/helpers/update.js' + const themeStore = useTheming() const news = ref([]) @@ -99,6 +102,7 @@ const isMaximized = ref(false) onMounted(async () => { await useCheckDisableMouseover() + await getRemote(false) // [AR] Check for updates document.querySelector('body').addEventListener('click', handleClick) document.querySelector('body').addEventListener('auxclick', handleAuxClick) @@ -161,11 +165,11 @@ async function setupApp() { initAnalytics() if (!telemetry) { - console.info("[AR] Telemetry disabled by default (Hard patched).") + console.info("[AR] • Telemetry disabled by default (Hard patched).") optOutAnalytics() } if (!personalized_ads) { - console.info("[AR] Personalized ads disabled by default (Hard patched).") + console.info("[AR] • Personalized ads disabled by default (Hard patched).") } if (dev) debugAnalytics() @@ -188,7 +192,7 @@ async function setupApp() { }), ) - // Patched by AstralRinth + /// [AR] Patch // useFetch( // `https://api.modrinth.com/appCriticalAnnouncement.json?version=${version}`, // 'criticalAnnouncements', @@ -465,12 +469,20 @@ function handleAuxClick(e) {
- + + + + -->
- + @@ -43,7 +43,7 @@ watch( longer be collected.

- + diff --git a/apps/app-frontend/src/helpers/jre.js b/apps/app-frontend/src/helpers/jre.js index 0814e9b0..207c0258 100644 --- a/apps/app-frontend/src/helpers/jre.js +++ b/apps/app-frontend/src/helpers/jre.js @@ -36,8 +36,8 @@ export async function get_jre(path) { // Tests JRE version by running 'java -version' on it. // Returns true if the version is valid, and matches given (after extraction) -export async function test_jre(path, majorVersion, minorVersion) { - return await invoke('plugin:jre|jre_test_jre', { path, majorVersion, minorVersion }) +export async function test_jre(path, majorVersion) { + return await invoke('plugin:jre|jre_test_jre', { path, majorVersion }) } // Automatically installs specified java version diff --git a/apps/app-frontend/src/helpers/rendering/batch-skin-renderer.ts b/apps/app-frontend/src/helpers/rendering/batch-skin-renderer.ts index 6495837d..fa02ad98 100644 --- a/apps/app-frontend/src/helpers/rendering/batch-skin-renderer.ts +++ b/apps/app-frontend/src/helpers/rendering/batch-skin-renderer.ts @@ -2,25 +2,40 @@ import * as THREE from 'three' import type { Skin, Cape } from '../skins' import { get_normalized_skin_texture, determineModelType } from '../skins' import { reactive } from 'vue' -import { setupSkinModel, disposeCaches } from '@modrinth/utils' +import { setupSkinModel, disposeCaches, loadTexture, applyCapeTexture } from '@modrinth/utils' import { skinPreviewStorage } from '../storage/skin-preview-storage' -import { CapeModel, ClassicPlayerModel, SlimPlayerModel } from '@modrinth/assets' +import { headStorage } from '../storage/head-storage' +import { ClassicPlayerModel, SlimPlayerModel } from '@modrinth/assets' export interface RenderResult { forwards: string backwards: string } +export interface RawRenderResult { + forwards: Blob + backwards: Blob +} + class BatchSkinRenderer { - private renderer: THREE.WebGLRenderer - private readonly scene: THREE.Scene - private readonly camera: THREE.PerspectiveCamera + private renderer: THREE.WebGLRenderer | null = null + private scene: THREE.Scene | null = null + private camera: THREE.PerspectiveCamera | null = null private currentModel: THREE.Group | null = null + private readonly width: number + private readonly height: number constructor(width: number = 360, height: number = 504) { + this.width = width + this.height = height + } + + private initializeRenderer(): void { + if (this.renderer) return + const canvas = document.createElement('canvas') - canvas.width = width - canvas.height = height + canvas.width = this.width + canvas.height = this.height this.renderer = new THREE.WebGLRenderer({ canvas: canvas, @@ -33,10 +48,10 @@ class BatchSkinRenderer { this.renderer.toneMapping = THREE.NoToneMapping this.renderer.toneMappingExposure = 10.0 this.renderer.setClearColor(0x000000, 0) - this.renderer.setSize(width, height) + this.renderer.setSize(this.width, this.height) this.scene = new THREE.Scene() - this.camera = new THREE.PerspectiveCamera(20, width / height, 0.4, 1000) + this.camera = new THREE.PerspectiveCamera(20, this.width / this.height, 0.4, 1000) const ambientLight = new THREE.AmbientLight(0xffffff, 2) const directionalLight = new THREE.DirectionalLight(0xffffff, 1.2) @@ -50,9 +65,12 @@ class BatchSkinRenderer { textureUrl: string, modelUrl: string, capeUrl?: string, - capeModelUrl?: string, - ): Promise { - await this.setupModel(modelUrl, textureUrl, capeModelUrl, capeUrl) + ): Promise { + this.initializeRenderer() + + this.clearScene() + + await this.setupModel(modelUrl, textureUrl, capeUrl) const headPart = this.currentModel!.getObjectByName('Head') let lookAtTarget: [number, number, number] @@ -77,35 +95,32 @@ class BatchSkinRenderer { private async renderView( cameraPosition: [number, number, number], lookAtPosition: [number, number, number], - ): Promise { + ): Promise { + if (!this.camera || !this.renderer || !this.scene) { + throw new Error('Renderer not initialized') + } + this.camera.position.set(...cameraPosition) this.camera.lookAt(...lookAtPosition) this.renderer.render(this.scene, this.camera) - return new Promise((resolve, reject) => { - this.renderer.domElement.toBlob((blob) => { - if (blob) { - const url = URL.createObjectURL(blob) - resolve(url) - } else { - reject(new Error('Failed to create blob from canvas')) - } - }, 'image/png') - }) + const dataUrl = this.renderer.domElement.toDataURL('image/webp', 0.9) + const response = await fetch(dataUrl) + return await response.blob() } - private async setupModel( - modelUrl: string, - textureUrl: string, - capeModelUrl?: string, - capeUrl?: string, - ): Promise { - if (this.currentModel) { - this.scene.remove(this.currentModel) + private async setupModel(modelUrl: string, textureUrl: string, capeUrl?: string): Promise { + if (!this.scene) { + throw new Error('Renderer not initialized') } - const { model } = await setupSkinModel(modelUrl, textureUrl, capeModelUrl, capeUrl) + const { model } = await setupSkinModel(modelUrl, textureUrl) + + if (capeUrl) { + const capeTexture = await loadTexture(capeUrl) + applyCapeTexture(model, capeTexture) + } const group = new THREE.Group() group.add(model) @@ -116,8 +131,39 @@ class BatchSkinRenderer { this.currentModel = group } + private clearScene(): void { + if (!this.scene) return + + while (this.scene.children.length > 0) { + const child = this.scene.children[0] + this.scene.remove(child) + + if (child instanceof THREE.Mesh) { + if (child.geometry) child.geometry.dispose() + if (child.material) { + if (Array.isArray(child.material)) { + child.material.forEach((material) => material.dispose()) + } else { + child.material.dispose() + } + } + } + } + + const ambientLight = new THREE.AmbientLight(0xffffff, 2) + const directionalLight = new THREE.DirectionalLight(0xffffff, 1.2) + directionalLight.castShadow = true + directionalLight.position.set(2, 4, 3) + this.scene.add(ambientLight) + this.scene.add(directionalLight) + + this.currentModel = null + } + public dispose(): void { - this.renderer.dispose() + if (this.renderer) { + this.renderer.dispose() + } disposeCaches() } } @@ -133,10 +179,25 @@ function getModelUrlForVariant(variant: string): string { } } -export const map = reactive(new Map()) -export const headMap = reactive(new Map()) +export const skinBlobUrlMap = reactive(new Map()) +export const headBlobUrlMap = reactive(new Map()) const DEBUG_MODE = false +let sharedRenderer: BatchSkinRenderer | null = null +function getSharedRenderer(): BatchSkinRenderer { + if (!sharedRenderer) { + sharedRenderer = new BatchSkinRenderer() + } + return sharedRenderer +} + +export function disposeSharedRenderer(): void { + if (sharedRenderer) { + sharedRenderer.dispose() + sharedRenderer = null + } +} + export async function cleanupUnusedPreviews(skins: Skin[]): Promise { const validKeys = new Set() const validHeadKeys = new Set() @@ -150,7 +211,7 @@ export async function cleanupUnusedPreviews(skins: Skin[]): Promise { try { await skinPreviewStorage.cleanupInvalidKeys(validKeys) - await skinPreviewStorage.cleanupInvalidKeys(validHeadKeys) + await headStorage.cleanupInvalidKeys(validHeadKeys) } catch (error) { console.warn('Failed to cleanup unused skin previews:', error) } @@ -229,13 +290,17 @@ export async function generatePlayerHeadBlob(skinUrl: string, size: number = 64) outputCtx.drawImage(hatCanvas, 0, 0, 8, 8, 0, 0, size, size) } - outputCanvas.toBlob((blob) => { - if (blob) { - resolve(blob) - } else { - reject(new Error('Failed to create blob from canvas')) - } - }, 'image/png') + outputCanvas.toBlob( + (blob) => { + if (blob) { + resolve(blob) + } else { + reject(new Error('Failed to create blob from canvas')) + } + }, + 'image/webp', + 0.9, + ) } catch (error) { reject(error) } @@ -252,35 +317,24 @@ export async function generatePlayerHeadBlob(skinUrl: string, size: number = 64) async function generateHeadRender(skin: Skin): Promise { const headKey = `${skin.texture_key}-head` - if (headMap.has(headKey)) { + if (headBlobUrlMap.has(headKey)) { if (DEBUG_MODE) { - const url = headMap.get(headKey)! + const url = headBlobUrlMap.get(headKey)! URL.revokeObjectURL(url) - headMap.delete(headKey) + headBlobUrlMap.delete(headKey) } else { - return headMap.get(headKey)! + return headBlobUrlMap.get(headKey)! } } - try { - const cached = await skinPreviewStorage.retrieve(headKey) - if (cached && typeof cached === 'string') { - headMap.set(headKey, cached) - return cached - } - } catch (error) { - console.warn('Failed to retrieve cached head render:', error) - } - const skinUrl = await get_normalized_skin_texture(skin) const headBlob = await generatePlayerHeadBlob(skinUrl, 64) const headUrl = URL.createObjectURL(headBlob) - headMap.set(headKey, headUrl) + headBlobUrlMap.set(headKey, headUrl) try { - // @ts-expect-error - skinPreviewStorage.store expects a RenderResult, but we are storing a string url. - await skinPreviewStorage.store(headKey, headUrl) + await headStorage.store(headKey, headBlob) } catch (error) { console.warn('Failed to store head render in persistent storage:', error) } @@ -293,30 +347,49 @@ export async function getPlayerHeadUrl(skin: Skin): Promise { } export async function generateSkinPreviews(skins: Skin[], capes: Cape[]): Promise { - const renderer = new BatchSkinRenderer() - try { + const skinKeys = skins.map( + (skin) => `${skin.texture_key}+${skin.variant}+${skin.cape_id ?? 'no-cape'}`, + ) + const headKeys = skins.map((skin) => `${skin.texture_key}-head`) + + const [cachedSkinPreviews, cachedHeadPreviews] = await Promise.all([ + skinPreviewStorage.batchRetrieve(skinKeys), + headStorage.batchRetrieve(headKeys), + ]) + + for (let i = 0; i < skins.length; i++) { + const skinKey = skinKeys[i] + const headKey = headKeys[i] + + const rawCached = cachedSkinPreviews[skinKey] + if (rawCached) { + const cached: RenderResult = { + forwards: URL.createObjectURL(rawCached.forwards), + backwards: URL.createObjectURL(rawCached.backwards), + } + skinBlobUrlMap.set(skinKey, cached) + } + + const cachedHead = cachedHeadPreviews[headKey] + if (cachedHead) { + headBlobUrlMap.set(headKey, URL.createObjectURL(cachedHead)) + } + } + for (const skin of skins) { const key = `${skin.texture_key}+${skin.variant}+${skin.cape_id ?? 'no-cape'}` - if (map.has(key)) { + if (skinBlobUrlMap.has(key)) { if (DEBUG_MODE) { - const result = map.get(key)! + const result = skinBlobUrlMap.get(key)! URL.revokeObjectURL(result.forwards) URL.revokeObjectURL(result.backwards) - map.delete(key) + skinBlobUrlMap.delete(key) } else continue } - try { - const cached = await skinPreviewStorage.retrieve(key) - if (cached) { - map.set(key, cached) - continue - } - } catch (error) { - console.warn('Failed to retrieve cached skin preview:', error) - } + const renderer = getSharedRenderer() let variant = skin.variant if (variant === 'UNKNOWN') { @@ -330,25 +403,35 @@ export async function generateSkinPreviews(skins: Skin[], capes: Cape[]): Promis const modelUrl = getModelUrlForVariant(variant) const cape: Cape | undefined = capes.find((_cape) => _cape.id === skin.cape_id) - const renderResult = await renderer.renderSkin( + const rawRenderResult = await renderer.renderSkin( await get_normalized_skin_texture(skin), modelUrl, cape?.texture, - CapeModel, ) - map.set(key, renderResult) + const renderResult: RenderResult = { + forwards: URL.createObjectURL(rawRenderResult.forwards), + backwards: URL.createObjectURL(rawRenderResult.backwards), + } + + skinBlobUrlMap.set(key, renderResult) try { - await skinPreviewStorage.store(key, renderResult) + await skinPreviewStorage.store(key, rawRenderResult) } catch (error) { console.warn('Failed to store skin preview in persistent storage:', error) } - await generateHeadRender(skin) + const headKey = `${skin.texture_key}-head` + if (!headBlobUrlMap.has(headKey)) { + await generateHeadRender(skin) + } } } finally { - renderer.dispose() + disposeSharedRenderer() await cleanupUnusedPreviews(skins) + + await skinPreviewStorage.debugCalculateStorage() + await headStorage.debugCalculateStorage() } } diff --git a/apps/app-frontend/src/helpers/storage/head-storage.ts b/apps/app-frontend/src/helpers/storage/head-storage.ts new file mode 100644 index 00000000..e088f575 --- /dev/null +++ b/apps/app-frontend/src/helpers/storage/head-storage.ts @@ -0,0 +1,229 @@ +interface StoredHead { + blob: Blob + timestamp: number +} + +export class HeadStorage { + private dbName = 'head-storage' + private version = 1 + private db: IDBDatabase | null = null + + async init(): Promise { + return new Promise((resolve, reject) => { + const request = indexedDB.open(this.dbName, this.version) + + request.onerror = () => reject(request.error) + request.onsuccess = () => { + this.db = request.result + resolve() + } + + request.onupgradeneeded = () => { + const db = request.result + if (!db.objectStoreNames.contains('heads')) { + db.createObjectStore('heads') + } + } + }) + } + + async store(key: string, blob: Blob): Promise { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['heads'], 'readwrite') + const store = transaction.objectStore('heads') + + const storedHead: StoredHead = { + blob, + timestamp: Date.now(), + } + + return new Promise((resolve, reject) => { + const request = store.put(storedHead, key) + + request.onsuccess = () => resolve() + request.onerror = () => reject(request.error) + }) + } + + async retrieve(key: string): Promise { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['heads'], 'readonly') + const store = transaction.objectStore('heads') + + return new Promise((resolve, reject) => { + const request = store.get(key) + + request.onsuccess = () => { + const result = request.result as StoredHead | undefined + + if (!result) { + resolve(null) + return + } + + const url = URL.createObjectURL(result.blob) + resolve(url) + } + request.onerror = () => reject(request.error) + }) + } + + async batchRetrieve(keys: string[]): Promise> { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['heads'], 'readonly') + const store = transaction.objectStore('heads') + const results: Record = {} + + return new Promise((resolve, _reject) => { + let completedRequests = 0 + + if (keys.length === 0) { + resolve(results) + return + } + + for (const key of keys) { + const request = store.get(key) + + request.onsuccess = () => { + const result = request.result as StoredHead | undefined + + if (result) { + results[key] = result.blob + } else { + results[key] = null + } + + completedRequests++ + if (completedRequests === keys.length) { + resolve(results) + } + } + + request.onerror = () => { + results[key] = null + completedRequests++ + if (completedRequests === keys.length) { + resolve(results) + } + } + } + }) + } + + async cleanupInvalidKeys(validKeys: Set): Promise { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['heads'], 'readwrite') + const store = transaction.objectStore('heads') + let deletedCount = 0 + + return new Promise((resolve, reject) => { + const request = store.openCursor() + + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result + + if (cursor) { + const key = cursor.primaryKey as string + + if (!validKeys.has(key)) { + const deleteRequest = cursor.delete() + deleteRequest.onsuccess = () => { + deletedCount++ + } + deleteRequest.onerror = () => { + console.warn('Failed to delete invalid head entry:', key) + } + } + + cursor.continue() + } else { + resolve(deletedCount) + } + } + + request.onerror = () => reject(request.error) + }) + } + + async debugCalculateStorage(): Promise { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['heads'], 'readonly') + const store = transaction.objectStore('heads') + + let totalSize = 0 + let count = 0 + const entries: Array<{ key: string; size: number }> = [] + + return new Promise((resolve, reject) => { + const request = store.openCursor() + + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result + + if (cursor) { + const key = cursor.primaryKey as string + const value = cursor.value as StoredHead + + const entrySize = value.blob.size + totalSize += entrySize + count++ + + entries.push({ + key, + size: entrySize, + }) + + cursor.continue() + } else { + console.group('🗄️ Head Storage Debug Info') + console.log(`Total entries: ${count}`) + console.log(`Total size: ${(totalSize / 1024 / 1024).toFixed(2)} MB`) + console.log( + `Average size per entry: ${count > 0 ? (totalSize / count / 1024).toFixed(2) : 0} KB`, + ) + + if (entries.length > 0) { + const sortedEntries = entries.sort((a, b) => b.size - a.size) + console.log( + 'Largest entry:', + sortedEntries[0].key, + '(' + (sortedEntries[0].size / 1024).toFixed(2) + ' KB)', + ) + console.log( + 'Smallest entry:', + sortedEntries[sortedEntries.length - 1].key, + '(' + (sortedEntries[sortedEntries.length - 1].size / 1024).toFixed(2) + ' KB)', + ) + } + + console.groupEnd() + resolve() + } + } + + request.onerror = () => reject(request.error) + }) + } + + async clearAll(): Promise { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['heads'], 'readwrite') + const store = transaction.objectStore('heads') + + return new Promise((resolve, reject) => { + const request = store.clear() + + request.onsuccess = () => resolve() + request.onerror = () => reject(request.error) + }) + } +} + +export const headStorage = new HeadStorage() diff --git a/apps/app-frontend/src/helpers/storage/skin-preview-storage.ts b/apps/app-frontend/src/helpers/storage/skin-preview-storage.ts index 2e499085..bcf27c8c 100644 --- a/apps/app-frontend/src/helpers/storage/skin-preview-storage.ts +++ b/apps/app-frontend/src/helpers/storage/skin-preview-storage.ts @@ -1,4 +1,4 @@ -import type { RenderResult } from '../rendering/batch-skin-renderer' +import type { RawRenderResult } from '../rendering/batch-skin-renderer' interface StoredPreview { forwards: Blob @@ -30,18 +30,15 @@ export class SkinPreviewStorage { }) } - async store(key: string, result: RenderResult): Promise { + async store(key: string, result: RawRenderResult): Promise { if (!this.db) await this.init() - const forwardsBlob = await fetch(result.forwards).then((r) => r.blob()) - const backwardsBlob = await fetch(result.backwards).then((r) => r.blob()) - const transaction = this.db!.transaction(['previews'], 'readwrite') const store = transaction.objectStore('previews') const storedPreview: StoredPreview = { - forwards: forwardsBlob, - backwards: backwardsBlob, + forwards: result.forwards, + backwards: result.backwards, timestamp: Date.now(), } @@ -53,7 +50,7 @@ export class SkinPreviewStorage { }) } - async retrieve(key: string): Promise { + async retrieve(key: string): Promise { if (!this.db) await this.init() const transaction = this.db!.transaction(['previews'], 'readonly') @@ -70,14 +67,56 @@ export class SkinPreviewStorage { return } - const forwards = URL.createObjectURL(result.forwards) - const backwards = URL.createObjectURL(result.backwards) - resolve({ forwards, backwards }) + resolve({ forwards: result.forwards, backwards: result.backwards }) } request.onerror = () => reject(request.error) }) } + async batchRetrieve(keys: string[]): Promise> { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['previews'], 'readonly') + const store = transaction.objectStore('previews') + const results: Record = {} + + return new Promise((resolve, _reject) => { + let completedRequests = 0 + + if (keys.length === 0) { + resolve(results) + return + } + + for (const key of keys) { + const request = store.get(key) + + request.onsuccess = () => { + const result = request.result as StoredPreview | undefined + + if (result) { + results[key] = { forwards: result.forwards, backwards: result.backwards } + } else { + results[key] = null + } + + completedRequests++ + if (completedRequests === keys.length) { + resolve(results) + } + } + + request.onerror = () => { + results[key] = null + completedRequests++ + if (completedRequests === keys.length) { + resolve(results) + } + } + } + }) + } + async cleanupInvalidKeys(validKeys: Set): Promise { if (!this.db) await this.init() @@ -113,6 +152,67 @@ export class SkinPreviewStorage { request.onerror = () => reject(request.error) }) } + + async debugCalculateStorage(): Promise { + if (!this.db) await this.init() + + const transaction = this.db!.transaction(['previews'], 'readonly') + const store = transaction.objectStore('previews') + + let totalSize = 0 + let count = 0 + const entries: Array<{ key: string; size: number }> = [] + + return new Promise((resolve, reject) => { + const request = store.openCursor() + + request.onsuccess = (event) => { + const cursor = (event.target as IDBRequest).result + + if (cursor) { + const key = cursor.primaryKey as string + const value = cursor.value as StoredPreview + + const entrySize = value.forwards.size + value.backwards.size + totalSize += entrySize + count++ + + entries.push({ + key, + size: entrySize, + }) + + cursor.continue() + } else { + console.group('🗄️ Skin Preview Storage Debug Info') + console.log(`Total entries: ${count}`) + console.log(`Total size: ${(totalSize / 1024 / 1024).toFixed(2)} MB`) + console.log( + `Average size per entry: ${count > 0 ? (totalSize / count / 1024).toFixed(2) : 0} KB`, + ) + + if (entries.length > 0) { + const sortedEntries = entries.sort((a, b) => b.size - a.size) + console.log( + 'Largest entry:', + sortedEntries[0].key, + '(' + (sortedEntries[0].size / 1024).toFixed(2) + ' KB)', + ) + console.log( + 'Smallest entry:', + sortedEntries[sortedEntries.length - 1].key, + '(' + (sortedEntries[sortedEntries.length - 1].size / 1024).toFixed(2) + ' KB)', + ) + } + + console.groupEnd() + resolve() + } + } + + request.onerror = () => reject(request.error) + }) + } } export const skinPreviewStorage = new SkinPreviewStorage() diff --git a/apps/app-frontend/src/helpers/update.js b/apps/app-frontend/src/helpers/update.js index 6f3d87a1..cea6c6bd 100644 --- a/apps/app-frontend/src/helpers/update.js +++ b/apps/app-frontend/src/helpers/update.js @@ -11,7 +11,7 @@ const releaseLink = `https://git.astralium.su/api/v1/repos/didirus/AstralRinth/r const failedFetch = [`Failed to fetch remote releases:`, `Failed to fetch remote commits:`] const osList = ['macos', 'windows', 'linux'] -const macExtensionList = ['.app', '.dmg'] +const macExtensionList = ['.dmg', '.pkg'] const windowsExtensionList = ['.exe', '.msi'] const blacklistPrefixes = [ diff --git a/apps/app-frontend/src/helpers/utils.js b/apps/app-frontend/src/helpers/utils.js index a6353504..ac950e17 100644 --- a/apps/app-frontend/src/helpers/utils.js +++ b/apps/app-frontend/src/helpers/utils.js @@ -10,11 +10,17 @@ export async function getOS() { return await invoke('plugin:utils|get_os') } +// [AR] Feature export async function getArtifact(downloadurl, filename, ostype, autoupdatesupported) { console.log('Downloading build', downloadurl, filename, ostype, autoupdatesupported) return await invoke('plugin:utils|get_artifact', { downloadurl, filename, ostype, autoupdatesupported }) } +// [AR] Patch fix +export async function applyMigrationFix(eol) { + return await invoke('plugin:utils|apply_migration_fix', { eol }) +} + export async function openPath(path) { return await invoke('plugin:utils|open_path', { path }) } diff --git a/apps/app-frontend/src/pages/Skins.vue b/apps/app-frontend/src/pages/Skins.vue index c2760e7a..8f275494 100644 --- a/apps/app-frontend/src/pages/Skins.vue +++ b/apps/app-frontend/src/pages/Skins.vue @@ -38,7 +38,7 @@ import { import { get as getSettings } from '@/helpers/settings.ts' import { get_default_user, login as login_flow, users } from '@/helpers/auth' import type { RenderResult } from '@/helpers/rendering/batch-skin-renderer.ts' -import { generateSkinPreviews, map } from '@/helpers/rendering/batch-skin-renderer.ts' +import { generateSkinPreviews, skinBlobUrlMap } from '@/helpers/rendering/batch-skin-renderer.ts' import { handleSevereError } from '@/store/error' import { trackEvent } from '@/helpers/analytics' import type AccountsCard from '@/components/ui/AccountsCard.vue' @@ -215,7 +215,7 @@ async function loadCurrentUser() { function getBakedSkinTextures(skin: Skin): RenderResult | undefined { const key = `${skin.texture_key}+${skin.variant}+${skin.cape_id ?? 'no-cape'}` - return map.get(key) + return skinBlobUrlMap.get(key) } async function login() { diff --git a/apps/app-frontend/src/pages/instance/Logs.vue b/apps/app-frontend/src/pages/instance/Logs.vue index e8750c62..83d0cbe8 100644 --- a/apps/app-frontend/src/pages/instance/Logs.vue +++ b/apps/app-frontend/src/pages/instance/Logs.vue @@ -483,7 +483,7 @@ onUnmounted(() => { display: flex; flex-direction: column; gap: 1rem; - height: calc(100vh - 11rem); + height: 100vh; } .button-row { diff --git a/apps/app/build.rs b/apps/app/build.rs index 59f78131..4942497a 100644 --- a/apps/app/build.rs +++ b/apps/app/build.rs @@ -218,6 +218,7 @@ fn main() { "utils", InlinedPlugin::new() .commands(&[ + "apply_migration_fix", "get_artifact", "get_os", "should_disable_mouseover", diff --git a/apps/app/src/api/utils.rs b/apps/app/src/api/utils.rs index dfe64509..45207915 100644 --- a/apps/app/src/api/utils.rs +++ b/apps/app/src/api/utils.rs @@ -11,10 +11,12 @@ use dashmap::DashMap; use std::path::{Path, PathBuf}; use theseus::prelude::canonicalize; use url::Url; +use theseus::util::utils; pub fn init() -> tauri::plugin::TauriPlugin { tauri::plugin::Builder::new("utils") .invoke_handler(tauri::generate_handler![ + apply_migration_fix, get_artifact, get_os, should_disable_mouseover, @@ -27,9 +29,17 @@ pub fn init() -> tauri::plugin::TauriPlugin { .build() } +/// [AR] Patch fix +#[tauri::command] +pub async fn apply_migration_fix(eol: &str) -> Result { + let result = utils::apply_migration_fix(eol).await?; + Ok(result) +} + +/// [AR] Feature #[tauri::command] pub async fn get_artifact(downloadurl: &str, filename: &str, ostype: &str, autoupdatesupported: bool) -> Result<()> { - theseus::download::init_download(downloadurl, filename, ostype, autoupdatesupported).await; + let _ = utils::init_download(downloadurl, filename, ostype, autoupdatesupported).await; Ok(()) } diff --git a/apps/app/src/main.rs b/apps/app/src/main.rs index dc3ee39e..94427776 100644 --- a/apps/app/src/main.rs +++ b/apps/app/src/main.rs @@ -157,7 +157,7 @@ fn main() { */ let _log_guard = theseus::start_logger(); - tracing::info!("Initialized tracing subscriber. Loading Modrinth App!"); + tracing::info!("Initialized tracing subscriber. Loading AstralRinth App!"); let mut builder = tauri::Builder::default(); diff --git a/apps/app/tauri.conf.json b/apps/app/tauri.conf.json index 26021c70..4ebb2aa5 100644 --- a/apps/app/tauri.conf.json +++ b/apps/app/tauri.conf.json @@ -41,7 +41,7 @@ ] }, "productName": "AstralRinth App", - "version": "0.10.302", + "version": "0.10.303", "mainBinaryName": "AstralRinth App", "identifier": "AstralRinthApp", "plugins": { diff --git a/apps/daedalus_client/Dockerfile b/apps/daedalus_client/Dockerfile index d33fc113..9ea70f9c 100644 --- a/apps/daedalus_client/Dockerfile +++ b/apps/daedalus_client/Dockerfile @@ -1,5 +1,4 @@ FROM rust:1.88.0 AS build -ENV PKG_CONFIG_ALLOW_CROSS=1 WORKDIR /usr/src/daedalus COPY . . @@ -10,11 +9,8 @@ FROM debian:bookworm-slim RUN apt-get update \ && apt-get install -y --no-install-recommends ca-certificates openssl \ - && apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN update-ca-certificates - COPY --from=build /usr/src/daedalus/target/release/daedalus_client /daedalus/daedalus_client WORKDIR /daedalus_client diff --git a/apps/frontend/src/composables/servers/modrinth-servers.ts b/apps/frontend/src/composables/servers/modrinth-servers.ts index e91bb853..8d07648d 100644 --- a/apps/frontend/src/composables/servers/modrinth-servers.ts +++ b/apps/frontend/src/composables/servers/modrinth-servers.ts @@ -102,7 +102,7 @@ export class ModrinthServer { try { const fileData = await useServersFetch(`/download?path=/server-icon-original.png`, { override: auth, - retry: false, + retry: 1, // Reduce retries for optional resources }); if (fileData instanceof Blob && import.meta.client) { @@ -124,8 +124,14 @@ export class ModrinthServer { return dataURL; } } catch (error) { - if (error instanceof ModrinthServerError && error.statusCode === 404) { - if (iconUrl) { + if (error instanceof ModrinthServerError) { + if (error.statusCode && error.statusCode >= 500) { + console.debug("Service unavailable, skipping icon processing"); + sharedImage.value = undefined; + return undefined; + } + + if (error.statusCode === 404 && iconUrl) { try { const response = await fetch(iconUrl); if (!response.ok) throw new Error("Failed to fetch icon"); @@ -187,6 +193,45 @@ export class ModrinthServer { return undefined; } + async testNodeReachability(): Promise { + if (!this.general?.datacenter) { + console.warn("No datacenter info available for ping test"); + return false; + } + + const datacenter = this.general.datacenter; + const wsUrl = `wss://${datacenter}.nodes.modrinth.com/pingtest`; + + try { + return await new Promise((resolve) => { + const socket = new WebSocket(wsUrl); + const timeout = setTimeout(() => { + socket.close(); + resolve(false); + }, 5000); + + socket.onopen = () => { + clearTimeout(timeout); + socket.send(performance.now().toString()); + }; + + socket.onmessage = () => { + clearTimeout(timeout); + socket.close(); + resolve(true); + }; + + socket.onerror = () => { + clearTimeout(timeout); + resolve(false); + }; + }); + } catch (error) { + console.error(`Failed to ping node ${wsUrl}:`, error); + return false; + } + } + async refresh( modules: ModuleName[] = [], options?: { @@ -200,6 +245,8 @@ export class ModrinthServer { : (["general", "content", "backups", "network", "startup", "ws", "fs"] as ModuleName[]); for (const module of modulesToRefresh) { + this.errors[module] = undefined; + try { switch (module) { case "general": { @@ -250,7 +297,7 @@ export class ModrinthServer { continue; } - if (error.statusCode === 503) { + if (error.statusCode && error.statusCode >= 500) { console.debug(`Temporary ${module} unavailable:`, error.message); continue; } diff --git a/apps/frontend/src/composables/servers/modules/fs.ts b/apps/frontend/src/composables/servers/modules/fs.ts index 1072789e..39fe75db 100644 --- a/apps/frontend/src/composables/servers/modules/fs.ts +++ b/apps/frontend/src/composables/servers/modules/fs.ts @@ -22,26 +22,49 @@ export class FSModule extends ServerModule { this.opsQueuedForModification = []; } - private async retryWithAuth(requestFn: () => Promise): Promise { + private async retryWithAuth( + requestFn: () => Promise, + ignoreFailure: boolean = false, + ): Promise { try { return await requestFn(); } catch (error) { if (error instanceof ModrinthServerError && error.statusCode === 401) { + console.debug("Auth failed, refreshing JWT and retrying"); await this.fetch(); // Refresh auth return await requestFn(); } + + const available = await this.server.testNodeReachability(); + if (!available && !ignoreFailure) { + this.server.moduleErrors.general = { + error: new ModrinthServerError( + "Unable to reach node. FS operation failed and subsequent ping test failed.", + 500, + error as Error, + "fs", + ), + timestamp: Date.now(), + }; + } + throw error; } } - listDirContents(path: string, page: number, pageSize: number): Promise { + listDirContents( + path: string, + page: number, + pageSize: number, + ignoreFailure: boolean = false, + ): Promise { return this.retryWithAuth(async () => { const encodedPath = encodeURIComponent(path); return await useServersFetch(`/list?path=${encodedPath}&page=${page}&page_size=${pageSize}`, { override: this.auth, retry: false, }); - }); + }, ignoreFailure); } createFileOrFolder(path: string, type: "file" | "directory"): Promise { @@ -150,7 +173,7 @@ export class FSModule extends ServerModule { }); } - downloadFile(path: string, raw?: boolean): Promise { + downloadFile(path: string, raw: boolean = false, ignoreFailure: boolean = false): Promise { return this.retryWithAuth(async () => { const encodedPath = encodeURIComponent(path); const fileData = await useServersFetch(`/download?path=${encodedPath}`, { @@ -161,7 +184,7 @@ export class FSModule extends ServerModule { return raw ? fileData : await fileData.text(); } return fileData; - }); + }, ignoreFailure); } extractFile( diff --git a/apps/frontend/src/composables/servers/modules/general.ts b/apps/frontend/src/composables/servers/modules/general.ts index b2f10065..e46e62b4 100644 --- a/apps/frontend/src/composables/servers/modules/general.ts +++ b/apps/frontend/src/composables/servers/modules/general.ts @@ -46,13 +46,18 @@ export class GeneralModule extends ServerModule implements ServerGeneral { data.image = (await this.server.processImage(data.project?.icon_url)) ?? undefined; } - const motd = await this.getMotd(); - if (motd === "A Minecraft Server") { - await this.setMotd( - `§b${data.project?.title || data.loader + " " + data.mc_version} §f♦ §aModrinth Servers`, - ); + try { + const motd = await this.getMotd(); + if (motd === "A Minecraft Server") { + await this.setMotd( + `§b${data.project?.title || data.loader + " " + data.mc_version} §f♦ §aModrinth Servers`, + ); + } + data.motd = motd; + } catch { + console.error("[Modrinth Servers] [General] Failed to fetch MOTD."); + data.motd = undefined; } - data.motd = motd; // Copy data to this module Object.assign(this, data); @@ -178,7 +183,7 @@ export class GeneralModule extends ServerModule implements ServerGeneral { async getMotd(): Promise { try { - const props = await this.server.fs.downloadFile("/server.properties"); + const props = await this.server.fs.downloadFile("/server.properties", false, true); if (props) { const lines = props.split("\n"); for (const line of lines) { diff --git a/apps/frontend/src/composables/servers/servers-fetch.ts b/apps/frontend/src/composables/servers/servers-fetch.ts index 137baea5..5b5d925b 100644 --- a/apps/frontend/src/composables/servers/servers-fetch.ts +++ b/apps/frontend/src/composables/servers/servers-fetch.ts @@ -42,6 +42,23 @@ export async function useServersFetch( retry = method === "GET" ? 3 : 0, } = options; + const circuitBreakerKey = `${module || "default"}_${path}`; + const failureCount = useState(`fetch_failures_${circuitBreakerKey}`, () => 0); + const lastFailureTime = useState(`last_failure_${circuitBreakerKey}`, () => 0); + + const now = Date.now(); + if (failureCount.value >= 3 && now - lastFailureTime.value < 30000) { + const error = new ModrinthServersFetchError( + "[Modrinth Servers] Circuit breaker open - too many recent failures", + 503, + ); + throw new ModrinthServerError("Service temporarily unavailable", 503, error, module); + } + + if (now - lastFailureTime.value > 30000) { + failureCount.value = 0; + } + const base = (import.meta.server ? config.pyroBaseUrl : config.public.pyroBaseUrl)?.replace( /\/$/, "", @@ -69,6 +86,7 @@ export async function useServersFetch( const headers: Record = { "User-Agent": "Modrinth/1.0 (https://modrinth.com)", + "X-Archon-Request": "true", Vary: "Accept, Origin", }; @@ -98,6 +116,7 @@ export async function useServersFetch( timeout: 10000, }); + failureCount.value = 0; return response; } catch (error) { lastError = error as Error; @@ -107,6 +126,11 @@ export async function useServersFetch( const statusCode = error.response?.status; const statusText = error.response?.statusText || "Unknown error"; + if (statusCode && statusCode >= 500) { + failureCount.value++; + lastFailureTime.value = now; + } + let v1Error: V1ErrorInfo | undefined; if (error.data?.error && error.data?.description) { v1Error = { @@ -134,9 +158,11 @@ export async function useServersFetch( ? errorMessages[statusCode] : `HTTP Error: ${statusCode || "unknown"} ${statusText}`; - const isRetryable = statusCode ? [408, 429, 500, 502, 504].includes(statusCode) : true; + const isRetryable = statusCode ? [408, 429].includes(statusCode) : false; + const is5xxRetryable = + statusCode && statusCode >= 500 && statusCode < 600 && method === "GET" && attempts === 1; - if (!isRetryable || attempts >= maxAttempts) { + if (!(isRetryable || is5xxRetryable) || attempts >= maxAttempts) { console.error("Fetch error:", error); const fetchError = new ModrinthServersFetchError( @@ -147,7 +173,8 @@ export async function useServersFetch( throw new ModrinthServerError(error.message, statusCode, fetchError, module, v1Error); } - const delay = Math.min(1000 * Math.pow(2, attempts - 1) + Math.random() * 1000, 10000); + const baseDelay = statusCode && statusCode >= 500 ? 5000 : 1000; + const delay = Math.min(baseDelay * Math.pow(2, attempts - 1) + Math.random() * 1000, 15000); console.warn(`Retrying request in ${delay}ms (attempt ${attempts}/${maxAttempts - 1})`); await new Promise((resolve) => setTimeout(resolve, delay)); continue; diff --git a/apps/frontend/src/pages/news/article/[slug].vue b/apps/frontend/src/pages/news/article/[slug].vue index fda42fae..c2e35cdf 100644 --- a/apps/frontend/src/pages/news/article/[slug].vue +++ b/apps/frontend/src/pages/news/article/[slug].vue @@ -1,9 +1,10 @@