Skip the update right after the migration in Opower (#144088)

* Wait for the migration to finish in Opower

* Don't call async_block_till_done since this can timeout and seems to meant for tests

* Don't call async_block_till_done since this can timeout and seems to meant for tests
This commit is contained in:
tronikos 2025-05-03 12:12:01 -07:00 committed by GitHub
parent 30e4264aa9
commit 716b559e5d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -190,7 +190,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
return_sum = 0.0 return_sum = 0.0
last_stats_time = None last_stats_time = None
else: else:
await self._async_maybe_migrate_statistics( migrated = await self._async_maybe_migrate_statistics(
account.utility_account_id, account.utility_account_id,
{ {
cost_statistic_id: compensation_statistic_id, cost_statistic_id: compensation_statistic_id,
@ -203,6 +203,13 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
return_statistic_id: return_metadata, return_statistic_id: return_metadata,
}, },
) )
if migrated:
# Skip update to avoid working on old data since the migration is done
# asynchronously. Update the statistics in the next refresh in 12h.
_LOGGER.debug(
"Statistics migration completed. Skipping update for now"
)
continue
cost_reads = await self._async_get_cost_reads( cost_reads = await self._async_get_cost_reads(
account, account,
self.api.utility.timezone(), self.api.utility.timezone(),
@ -326,7 +333,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
utility_account_id: str, utility_account_id: str,
migration_map: dict[str, str], migration_map: dict[str, str],
metadata_map: dict[str, StatisticMetaData], metadata_map: dict[str, StatisticMetaData],
) -> None: ) -> bool:
"""Perform one-time statistics migration based on the provided map. """Perform one-time statistics migration based on the provided map.
Splits negative values from source IDs into target IDs. Splits negative values from source IDs into target IDs.
@ -339,7 +346,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
""" """
if not migration_map: if not migration_map:
return return False
need_migration_source_ids = set() need_migration_source_ids = set()
for source_id, target_id in migration_map.items(): for source_id, target_id in migration_map.items():
@ -354,7 +361,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
if not last_target_stat: if not last_target_stat:
need_migration_source_ids.add(source_id) need_migration_source_ids.add(source_id)
if not need_migration_source_ids: if not need_migration_source_ids:
return return False
_LOGGER.info("Starting one-time migration for: %s", need_migration_source_ids) _LOGGER.info("Starting one-time migration for: %s", need_migration_source_ids)
@ -416,7 +423,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
if not need_migration_source_ids: if not need_migration_source_ids:
_LOGGER.debug("No migration needed") _LOGGER.debug("No migration needed")
return return False
for stat_id, stats in processed_stats.items(): for stat_id, stats in processed_stats.items():
_LOGGER.debug("Applying %d migrated stats for %s", len(stats), stat_id) _LOGGER.debug("Applying %d migrated stats for %s", len(stats), stat_id)
@ -442,6 +449,8 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
}, },
) )
return True
async def _async_get_cost_reads( async def _async_get_cost_reads(
self, account: Account, time_zone_str: str, start_time: float | None = None self, account: Account, time_zone_str: str, start_time: float | None = None
) -> list[CostRead]: ) -> list[CostRead]: