Fix updating mod version if file hash is the same (#5138)

* Start fixing infinite update glitch

* adjust app cache logic

* more cache logic

* cleanup

* add v2 redirect

* add v2 version file route to config
This commit is contained in:
aecsocket
2026-01-19 21:40:44 +00:00
committed by GitHub
parent 976644d1e6
commit c94dde9b47
4 changed files with 157 additions and 57 deletions

View File

@@ -25,6 +25,7 @@ pub fn config(cfg: &mut web::ServiceConfig) {
web::scope("version_files") web::scope("version_files")
.service(get_versions_from_hashes) .service(get_versions_from_hashes)
.service(update_files) .service(update_files)
.service(update_files_many)
.service(update_individual_files), .service(update_individual_files),
); );
} }
@@ -291,27 +292,70 @@ pub async fn update_files(
hashes: update_data.hashes, hashes: update_data.hashes,
}; };
let response = let returned_versions = match v3::version_file::update_files(
v3::version_file::update_files(pool, redis, web::Json(update_data)) pool,
.await redis,
.or_else(v2_reroute::flatten_404_error)?; web::Json(update_data),
)
.await
{
Ok(resp) => resp,
Err(ApiError::NotFound) => return Ok(HttpResponse::NotFound().body("")),
Err(err) => return Err(err),
};
// Convert response to V2 format // Convert response to V2 format
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response) let v3_versions = returned_versions
.await .0
.into_iter()
.map(|(hash, version)| {
let v2_version = LegacyVersion::from(version);
(hash, v2_version)
})
.collect::<HashMap<_, _>>();
Ok(HttpResponse::Ok().json(v3_versions))
}
#[post("update_many")]
pub async fn update_files_many(
pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<HttpResponse, ApiError> {
let update_data = update_data.into_inner();
let update_data = v3::version_file::ManyUpdateData {
loaders: update_data.loaders.clone(),
version_types: update_data.version_types.clone(),
game_versions: update_data.game_versions.clone(),
algorithm: update_data.algorithm,
hashes: update_data.hashes,
};
let returned_versions = match v3::version_file::update_files_many(
pool,
redis,
web::Json(update_data),
)
.await
{ {
Ok(returned_versions) => { Ok(resp) => resp,
let v3_versions = returned_versions Err(ApiError::NotFound) => return Ok(HttpResponse::NotFound().body("")),
Err(err) => return Err(err),
};
// Convert response to V2 format
let v3_versions = returned_versions
.0
.into_iter()
.map(|(hash, versions)| {
let v2_versions = versions
.into_iter() .into_iter()
.map(|(hash, version)| { .map(LegacyVersion::from)
let v2_version = LegacyVersion::from(version); .collect::<Vec<_>>();
(hash, v2_version) (hash, v2_versions)
}) })
.collect::<HashMap<_, _>>(); .collect::<HashMap<_, _>>();
Ok(HttpResponse::Ok().json(v3_versions)) Ok(HttpResponse::Ok().json(v3_versions))
}
Err(response) => Ok(response),
}
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]

View File

@@ -28,7 +28,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
); );
cfg.service( cfg.service(
web::scope("version_files") web::scope("version_files")
// DEPRECATED - use `update_many` instead
// see `fn update_files` comment
.route("update", web::post().to(update_files)) .route("update", web::post().to(update_files))
.route("update_many", web::post().to(update_files_many))
.route("update_individual", web::post().to(update_individual_files)) .route("update_individual", web::post().to(update_individual_files))
.route("", web::post().to(get_versions_from_hashes)), .route("", web::post().to(get_versions_from_hashes)),
); );
@@ -331,11 +334,60 @@ pub struct ManyUpdateData {
pub version_types: Option<Vec<VersionType>>, pub version_types: Option<Vec<VersionType>>,
} }
pub async fn update_files_many(
pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<web::Json<HashMap<String, Vec<models::projects::Version>>>, ApiError>
{
update_files_internal(pool, redis, update_data)
.await
.map(web::Json)
}
// DEPRECATED - use `update_files_many` instead
//
// This returns a `HashMap<String, Version>` where the key is the file hash.
// But one file hash can have multiple versions associated with it.
// So you can end up in a situation where:
// - file with hash H is linked to versions V1, V2
// - user downloads mod with file hash H
// - every time the app checks for updates:
// - it asks the backend, what is the version of H?
// - backend says V1
// - the app asks, is there a compatible version newer than V1?
// - backend says, yes, V2
// - user updates to V2, but it's the same file, so it's the same hash H,
// and the update button stays
//
// By using `update_files_many`, we can have the app know that both V1 and V2
// are linked to H
//
// This endpoint is kept for backwards compat, since it still works in 99% of
// cases where H only maps to a single version, and for older clients. This
// endpoint will only take the first version for each file hash.
pub async fn update_files( pub async fn update_files(
pool: web::Data<ReadOnlyPgPool>, pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>, redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>, update_data: web::Json<ManyUpdateData>,
) -> Result<HttpResponse, ApiError> { ) -> Result<web::Json<HashMap<String, models::projects::Version>>, ApiError> {
let file_hashes_to_versions =
update_files_internal(pool, redis, update_data).await?;
let resp = file_hashes_to_versions
.into_iter()
.filter_map(|(hash, versions)| {
let first_version = versions.into_iter().next()?;
Some((hash, first_version))
})
.collect();
Ok(web::Json(resp))
}
async fn update_files_internal(
pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<HashMap<String, Vec<models::projects::Version>>, ApiError> {
let algorithm = update_data let algorithm = update_data
.algorithm .algorithm
.clone() .clone()
@@ -385,21 +437,25 @@ pub async fn update_files(
) )
.await?; .await?;
let mut response = HashMap::new(); let mut response = HashMap::<String, Vec<models::projects::Version>>::new();
for file in files { for file in files {
if let Some(version) = versions if let Some(version) = versions
.iter() .iter()
.find(|x| x.inner.project_id == file.project_id) .find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm) && let Some(hash) = file.hashes.get(&algorithm)
{ {
response.insert( // add the version info for this file hash
hash.clone(), // note: one file hash can have multiple versions associated with it
models::projects::Version::from(version.clone()), // just having a `HashMap<String, Version>` would mean that some version info is lost
); // so we return a vec of them instead
response
.entry(hash.clone())
.or_default()
.push(models::projects::Version::from(version.clone()));
} }
} }
Ok(HttpResponse::Ok().json(response)) Ok(response)
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]

View File

@@ -1307,9 +1307,9 @@ impl CachedEntry {
let variations = let variations =
futures::future::try_join_all(filtered_keys.iter().map( futures::future::try_join_all(filtered_keys.iter().map(
|((loaders_key, game_version), hashes)| { |((loaders_key, game_version), hashes)| {
fetch_json::<HashMap<String, Version>>( fetch_json::<HashMap<String, Vec<Version>>>(
Method::POST, Method::POST,
concat!(env!("MODRINTH_API_URL"), "version_files/update"), concat!(env!("MODRINTH_API_URL"), "version_files/update_many"),
None, None,
Some(serde_json::json!({ Some(serde_json::json!({
"algorithm": "sha1", "algorithm": "sha1",
@@ -1330,28 +1330,30 @@ impl CachedEntry {
&filtered_keys[index]; &filtered_keys[index];
for hash in hashes { for hash in hashes {
let version = variation.remove(hash); let versions = variation.remove(hash);
if let Some(version) = version { if let Some(versions) = versions {
let version_id = version.id.clone(); for version in versions {
vals.push(( let version_id = version.id.clone();
CacheValue::Version(version).get_entry(), vals.push((
false, CacheValue::Version(version).get_entry(),
)); false,
));
vals.push(( vals.push((
CacheValue::FileUpdate(CachedFileUpdate { CacheValue::FileUpdate(CachedFileUpdate {
hash: hash.clone(), hash: hash.clone(),
game_version: game_version.clone(), game_version: game_version.clone(),
loaders: loaders_key loaders: loaders_key
.split('+') .split('+')
.map(|x| x.to_string()) .map(|x| x.to_string())
.collect(), .collect(),
update_version_id: version_id, update_version_id: version_id,
}) })
.get_entry(), .get_entry(),
true, true,
)); ));
}
} else { } else {
vals.push(( vals.push((
CacheValueType::FileUpdate.get_empty_entry( CacheValueType::FileUpdate.get_empty_entry(

View File

@@ -1009,17 +1009,15 @@ impl Profile {
initial_file.file_name initial_file.file_name
); );
let update_version_id = if let Some(update) = file_updates let update_version_id = if let Some(metadata) = &file {
.iter() let update_ids: Vec<String> = file_updates
.find(|x| x.hash == hash.hash) .iter()
.map(|x| x.update_version_id.clone()) .filter(|x| x.hash == hash.hash)
{ .map(|x| x.update_version_id.clone())
if let Some(metadata) = &file { .collect();
if metadata.version_id != update {
Some(update) if !update_ids.contains(&metadata.version_id) {
} else { update_ids.into_iter().next()
None
}
} else { } else {
None None
} }