mirror of
https://github.com/jpawlowski/hass.tibber_prices.git
synced 2026-03-29 21:03:40 +00:00
fix(coordinator): sync cached_user_data after API call for new integrations
When adding a new integration (no existing cache), metadata sensors (grid_company, estimated_annual_consumption, etc.) were marked as unavailable because coordinator._cached_user_data remained None even after successful API call. Root cause: update_user_data_if_needed() stored user data in _data_fetcher.cached_user_data, but the sync back to coordinator only happened during _load_cache() (before the API call). Solution: Added explicit sync of cached_user_data after handle_main_entry_update() completes, ensuring metadata is available when sensors first access get_user_homes(). Changes: - coordinator/core.py: Sync _cached_user_data after main entry update - __init__.py: Kept preload cache call (helps with HA restarts) Impact: Metadata sensors now show values immediately on fresh integration setup, without requiring a second update cycle or manual sensor activation.
This commit is contained in:
parent
2cbb35afd2
commit
ced6dcf104
2 changed files with 12 additions and 3 deletions
|
|
@ -64,6 +64,11 @@ async def async_setup_entry(
|
|||
version=str(integration.version) if integration.version else "unknown",
|
||||
)
|
||||
|
||||
# CRITICAL: Load cache BEFORE first refresh to ensure user_data is available
|
||||
# for metadata sensors (grid_company, estimated_annual_consumption, etc.)
|
||||
# This prevents sensors from being marked as "unavailable" on first setup
|
||||
await coordinator.load_cache()
|
||||
|
||||
entry.runtime_data = TibberPricesData(
|
||||
client=TibberPricesApiClient(
|
||||
access_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
|
|
|
|||
|
|
@ -458,7 +458,7 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||
|
||||
# Load cache if not already loaded
|
||||
if self._cached_price_data is None and self._cached_user_data is None:
|
||||
await self._load_cache()
|
||||
await self.load_cache()
|
||||
|
||||
# Initialize midnight check on first run
|
||||
if self._last_midnight_check is None:
|
||||
|
|
@ -481,11 +481,15 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||
if self.is_main_entry():
|
||||
# Main entry fetches data for all homes
|
||||
configured_home_ids = self._get_configured_home_ids()
|
||||
return await self._data_fetcher.handle_main_entry_update(
|
||||
result = await self._data_fetcher.handle_main_entry_update(
|
||||
current_time,
|
||||
configured_home_ids,
|
||||
self._transform_data_for_main_entry,
|
||||
)
|
||||
# CRITICAL: Sync cached_user_data after API call (for new integrations without cache)
|
||||
# handle_main_entry_update() may have fetched user_data via update_user_data_if_needed()
|
||||
self._cached_user_data = self._data_fetcher.cached_user_data
|
||||
return result
|
||||
# Subentries get data from main coordinator
|
||||
return await self._handle_subentry_update()
|
||||
|
||||
|
|
@ -544,7 +548,7 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
|||
|
||||
return home_ids
|
||||
|
||||
async def _load_cache(self) -> None:
|
||||
async def load_cache(self) -> None:
|
||||
"""Load cached data from storage."""
|
||||
await self._data_fetcher.load_cache()
|
||||
# Sync legacy references
|
||||
|
|
|
|||
Loading…
Reference in a new issue