refactor(coordinator): remove price_data from cache, delegate to Pool

Cache now stores only user metadata and timestamps. Price data is
managed exclusively by IntervalPool (single source of truth).

Changes:
- cache.py: Remove price_data and last_price_update fields
- core.py: Remove _cached_price_data, update references to use Pool
- core.py: Rename _data_fetcher to _price_data_manager
- AGENTS.md: Update class naming examples (DataFetcher → PriceDataManager)

This completes the Pool integration architecture where IntervalPool
handles all price data persistence and coordinator cache handles
only user account metadata.
This commit is contained in:
Julian Pawlowski 2025-12-23 14:15:26 +00:00
parent 9b34d416bc
commit 9eea984d1f
3 changed files with 102 additions and 162 deletions

View file

@ -1838,12 +1838,12 @@ This is a Home Assistant standard to avoid naming conflicts between integrations
# ✅ CORRECT - Integration prefix + semantic purpose # ✅ CORRECT - Integration prefix + semantic purpose
class TibberPricesApiClient: # Integration + semantic role class TibberPricesApiClient: # Integration + semantic role
class TibberPricesDataUpdateCoordinator: # Integration + semantic role class TibberPricesDataUpdateCoordinator: # Integration + semantic role
class TibberPricesDataFetcher: # Integration + semantic role class TibberPricesPriceDataManager: # Integration + semantic role
class TibberPricesSensor: # Integration + entity type class TibberPricesSensor: # Integration + entity type
class TibberPricesEntity: # Integration + entity type class TibberPricesEntity: # Integration + entity type
# ❌ INCORRECT - Missing integration prefix # ❌ INCORRECT - Missing integration prefix
class DataFetcher: # Should be: TibberPricesDataFetcher class PriceDataManager: # Should be: TibberPricesPriceDataManager
class TimeService: # Should be: TibberPricesTimeService class TimeService: # Should be: TibberPricesTimeService
class PeriodCalculator: # Should be: TibberPricesPeriodCalculator class PeriodCalculator: # Should be: TibberPricesPeriodCalculator
@ -1855,11 +1855,11 @@ class TibberPricesSensorCalculatorTrend: # Too verbose, import path shows loca
**IMPORTANT:** Do NOT include package hierarchy in class names. Python's import system provides the namespace: **IMPORTANT:** Do NOT include package hierarchy in class names. Python's import system provides the namespace:
```python ```python
# The import path IS the full namespace: # The import path IS the full namespace:
from custom_components.tibber_prices.coordinator.data_fetching import TibberPricesDataFetcher from custom_components.tibber_prices.coordinator.price_data_manager import TibberPricesPriceDataManager
from custom_components.tibber_prices.sensor.calculators.trend import TibberPricesTrendCalculator from custom_components.tibber_prices.sensor.calculators.trend import TibberPricesTrendCalculator
# Adding package names to class would be redundant: # Adding package names to class would be redundant:
# TibberPricesCoordinatorDataFetcher ❌ NO - unnecessarily verbose # TibberPricesCoordinatorPriceDataManager ❌ NO - unnecessarily verbose
# TibberPricesSensorCalculatorsTrendCalculator ❌ NO - ridiculously long # TibberPricesSensorCalculatorsTrendCalculator ❌ NO - ridiculously long
``` ```
@ -1905,14 +1905,14 @@ result = _InternalHelper().process()
**Example of genuine private class use case:** **Example of genuine private class use case:**
```python ```python
# In coordinator/data_fetching.py # In coordinator/price_data_manager.py
class _ApiRetryStateMachine: class _ApiRetryStateMachine:
"""Internal state machine for retry logic. Never used outside this file.""" """Internal state machine for retry logic. Never used outside this file."""
def __init__(self, max_retries: int) -> None: def __init__(self, max_retries: int) -> None:
self._attempts = 0 self._attempts = 0
self._max_retries = max_retries self._max_retries = max_retries
# Only used by DataFetcher methods in this file # Only used by PriceDataManager methods in this file
``` ```
In practice, most "helper" logic should be **functions**, not classes. Reserve classes for stateful components. In practice, most "helper" logic should be **functions**, not classes. Reserve classes for stateful components.

View file

@ -1,4 +1,28 @@
"""Cache management for coordinator module.""" """
Cache management for coordinator persistent storage.
This module handles persistent storage for the coordinator, storing:
- user_data: Account/home metadata (required, refreshed daily)
- Timestamps for cache validation and lifecycle tracking
**Storage Architecture (as of v0.25.0):**
There are TWO persistent storage files per config entry:
1. `tibber_prices.{entry_id}` (this module)
- user_data: Account info, home metadata, timezone, currency
- Timestamps: last_user_update, last_midnight_check
2. `tibber_prices.interval_pool.{entry_id}` (interval_pool/storage.py)
- Intervals: Deduplicated quarter-hourly price data (source of truth)
- Fetch metadata: When each interval was fetched
- Protected range: Which intervals to keep during cleanup
**Single Source of Truth:**
Price intervals are ONLY stored in IntervalPool. This cache stores only
user metadata and timestamps. The IntervalPool handles all price data
fetching, caching, and persistence independently.
"""
from __future__ import annotations from __future__ import annotations
@ -16,11 +40,9 @@ _LOGGER = logging.getLogger(__name__)
class TibberPricesCacheData(NamedTuple): class TibberPricesCacheData(NamedTuple):
"""Cache data structure.""" """Cache data structure for user metadata (price data is in IntervalPool)."""
price_data: dict[str, Any] | None
user_data: dict[str, Any] | None user_data: dict[str, Any] | None
last_price_update: datetime | None
last_user_update: datetime | None last_user_update: datetime | None
last_midnight_check: datetime | None last_midnight_check: datetime | None
@ -31,20 +53,16 @@ async def load_cache(
*, *,
time: TibberPricesTimeService, time: TibberPricesTimeService,
) -> TibberPricesCacheData: ) -> TibberPricesCacheData:
"""Load cached data from storage.""" """Load cached user data from storage (price data is in IntervalPool)."""
try: try:
stored = await store.async_load() stored = await store.async_load()
if stored: if stored:
cached_price_data = stored.get("price_data")
cached_user_data = stored.get("user_data") cached_user_data = stored.get("user_data")
# Restore timestamps # Restore timestamps
last_price_update = None
last_user_update = None last_user_update = None
last_midnight_check = None last_midnight_check = None
if last_price_update_str := stored.get("last_price_update"):
last_price_update = time.parse_datetime(last_price_update_str)
if last_user_update_str := stored.get("last_user_update"): if last_user_update_str := stored.get("last_user_update"):
last_user_update = time.parse_datetime(last_user_update_str) last_user_update = time.parse_datetime(last_user_update_str)
if last_midnight_check_str := stored.get("last_midnight_check"): if last_midnight_check_str := stored.get("last_midnight_check"):
@ -52,9 +70,7 @@ async def load_cache(
_LOGGER.debug("%s Cache loaded successfully", log_prefix) _LOGGER.debug("%s Cache loaded successfully", log_prefix)
return TibberPricesCacheData( return TibberPricesCacheData(
price_data=cached_price_data,
user_data=cached_user_data, user_data=cached_user_data,
last_price_update=last_price_update,
last_user_update=last_user_update, last_user_update=last_user_update,
last_midnight_check=last_midnight_check, last_midnight_check=last_midnight_check,
) )
@ -64,9 +80,7 @@ async def load_cache(
_LOGGER.warning("%s Failed to load cache: %s", log_prefix, ex) _LOGGER.warning("%s Failed to load cache: %s", log_prefix, ex)
return TibberPricesCacheData( return TibberPricesCacheData(
price_data=None,
user_data=None, user_data=None,
last_price_update=None,
last_user_update=None, last_user_update=None,
last_midnight_check=None, last_midnight_check=None,
) )
@ -77,11 +91,9 @@ async def save_cache(
cache_data: TibberPricesCacheData, cache_data: TibberPricesCacheData,
log_prefix: str, log_prefix: str,
) -> None: ) -> None:
"""Store cache data.""" """Store cache data (user metadata only, price data is in IntervalPool)."""
data = { data = {
"price_data": cache_data.price_data,
"user_data": cache_data.user_data, "user_data": cache_data.user_data,
"last_price_update": (cache_data.last_price_update.isoformat() if cache_data.last_price_update else None),
"last_user_update": (cache_data.last_user_update.isoformat() if cache_data.last_user_update else None), "last_user_update": (cache_data.last_user_update.isoformat() if cache_data.last_user_update else None),
"last_midnight_check": (cache_data.last_midnight_check.isoformat() if cache_data.last_midnight_check else None), "last_midnight_check": (cache_data.last_midnight_check.isoformat() if cache_data.last_midnight_check else None),
} }
@ -91,55 +103,3 @@ async def save_cache(
_LOGGER.debug("%s Cache stored successfully", log_prefix) _LOGGER.debug("%s Cache stored successfully", log_prefix)
except OSError: except OSError:
_LOGGER.exception("%s Failed to store cache", log_prefix) _LOGGER.exception("%s Failed to store cache", log_prefix)
def is_cache_valid(
cache_data: TibberPricesCacheData,
log_prefix: str,
*,
time: TibberPricesTimeService,
) -> bool:
"""
Validate if cached price data is still current.
Returns False if:
- No cached data exists
- Cached data is from a different calendar day (in local timezone)
- Midnight turnover has occurred since cache was saved
- Cache structure is outdated (pre-v0.15.0 multi-home format)
"""
if cache_data.price_data is None or cache_data.last_price_update is None:
return False
# Check for old cache structure (multi-home format from v0.14.0)
# Old format: {"homes": {home_id: {...}}}
# New format: {"home_id": str, "price_info": [...]}
if "homes" in cache_data.price_data:
_LOGGER.info(
"%s Cache has old multi-home structure (v0.14.0), invalidating to fetch fresh data",
log_prefix,
)
return False
# Check for missing required keys in new structure
if "price_info" not in cache_data.price_data:
_LOGGER.info(
"%s Cache missing 'price_info' key, invalidating to fetch fresh data",
log_prefix,
)
return False
current_local_date = time.as_local(time.now()).date()
last_update_local_date = time.as_local(cache_data.last_price_update).date()
if current_local_date != last_update_local_date:
_LOGGER.debug(
"%s Cache date mismatch: cached=%s, current=%s",
log_prefix,
last_update_local_date,
current_local_date,
)
return False
return True

View file

@ -35,11 +35,11 @@ from .constants import (
STORAGE_VERSION, STORAGE_VERSION,
UPDATE_INTERVAL, UPDATE_INTERVAL,
) )
from .data_fetching import TibberPricesDataFetcher
from .data_transformation import TibberPricesDataTransformer from .data_transformation import TibberPricesDataTransformer
from .listeners import TibberPricesListenerManager from .listeners import TibberPricesListenerManager
from .midnight_handler import TibberPricesMidnightHandler from .midnight_handler import TibberPricesMidnightHandler
from .periods import TibberPricesPeriodCalculator from .periods import TibberPricesPeriodCalculator
from .price_data_manager import TibberPricesPriceDataManager
from .repairs import TibberPricesRepairManager from .repairs import TibberPricesRepairManager
from .time_service import TibberPricesTimeService from .time_service import TibberPricesTimeService
@ -206,13 +206,14 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Initialize helper modules # Initialize helper modules
self._listener_manager = TibberPricesListenerManager(hass, self._log_prefix) self._listener_manager = TibberPricesListenerManager(hass, self._log_prefix)
self._midnight_handler = TibberPricesMidnightHandler() self._midnight_handler = TibberPricesMidnightHandler()
self._data_fetcher = TibberPricesDataFetcher( self._price_data_manager = TibberPricesPriceDataManager(
api=self.api, api=self.api,
store=self._store, store=self._store,
log_prefix=self._log_prefix, log_prefix=self._log_prefix,
user_update_interval=timedelta(days=1), user_update_interval=timedelta(days=1),
time=self.time, time=self.time,
home_id=self._home_id, home_id=self._home_id,
interval_pool=self.interval_pool,
) )
# Create period calculator BEFORE data transformer (transformer needs it in lambda) # Create period calculator BEFORE data transformer (transformer needs it in lambda)
self._period_calculator = TibberPricesPeriodCalculator( self._period_calculator = TibberPricesPeriodCalculator(
@ -236,17 +237,16 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Register options update listener to invalidate config caches # Register options update listener to invalidate config caches
config_entry.async_on_unload(config_entry.add_update_listener(self._handle_options_update)) config_entry.async_on_unload(config_entry.add_update_listener(self._handle_options_update))
# Legacy compatibility - keep references for methods that access directly # User data cache (price data is in IntervalPool)
self._cached_user_data: dict[str, Any] | None = None self._cached_user_data: dict[str, Any] | None = None
self._last_user_update: datetime | None = None self._last_user_update: datetime | None = None
self._user_update_interval = timedelta(days=1) self._user_update_interval = timedelta(days=1)
self._cached_price_data: dict[str, Any] | None = None
self._last_price_update: datetime | None = None
# Data lifecycle tracking for diagnostic sensor # Data lifecycle tracking for diagnostic sensor
self._lifecycle_state: str = ( self._lifecycle_state: str = (
"cached" # Current state: cached, fresh, refreshing, searching_tomorrow, turnover_pending, error "cached" # Current state: cached, fresh, refreshing, searching_tomorrow, turnover_pending, error
) )
self._last_price_update: datetime | None = None # Tracks when price data was last fetched (for cache_age)
self._api_calls_today: int = 0 # Counter for API calls today self._api_calls_today: int = 0 # Counter for API calls today
self._last_api_call_date: date | None = None # Date of last API call (for daily reset) self._last_api_call_date: date | None = None # Date of last API call (for daily reset)
self._is_fetching: bool = False # Flag to track active API fetch self._is_fetching: bool = False # Flag to track active API fetch
@ -268,14 +268,16 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self._data_transformer.invalidate_config_cache() self._data_transformer.invalidate_config_cache()
self._period_calculator.invalidate_config_cache() self._period_calculator.invalidate_config_cache()
# Re-transform existing cached data with new configuration # Re-transform existing data with new configuration
# This updates rating_levels, volatility, and period calculations # This updates rating_levels, volatility, and period calculations
# without needing to fetch new data from the API # without needing to fetch new data from the API
if self._cached_price_data: if self.data and "priceInfo" in self.data:
self.data = self._transform_data(self._cached_price_data) # Extract raw price_info and re-transform
raw_data = {"price_info": self.data["priceInfo"]}
self.data = self._transform_data(raw_data)
self.async_update_listeners() self.async_update_listeners()
else: else:
self._log("debug", "No cached data to re-transform") self._log("debug", "No data to re-transform")
@callback @callback
def async_add_time_sensitive_listener(self, update_callback: TimeServiceCallback) -> CALLBACK_TYPE: def async_add_time_sensitive_listener(self, update_callback: TimeServiceCallback) -> CALLBACK_TYPE:
@ -355,7 +357,7 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Update helper modules with fresh TimeService instance # Update helper modules with fresh TimeService instance
self.api.time = time_service self.api.time = time_service
self._data_fetcher.time = time_service self._price_data_manager.time = time_service
self._data_transformer.time = time_service self._data_transformer.time = time_service
self._period_calculator.time = time_service self._period_calculator.time = time_service
@ -455,18 +457,13 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
current_date, current_date,
) )
# With flat interval list architecture, no rotation needed! # With flat interval list architecture and IntervalPool as source of truth,
# get_intervals_for_day_offsets() automatically filters by date. # no data rotation needed! get_intervals_for_day_offsets() automatically
# Just update coordinator's data to trigger entity updates. # filters by date. Just re-transform to refresh enrichment.
if self.data and self._cached_price_data: if self.data and "priceInfo" in self.data:
# Re-transform data to ensure enrichment is refreshed # Re-transform data to ensure enrichment is refreshed for new day
self.data = self._transform_data(self._cached_price_data) raw_data = {"price_info": self.data["priceInfo"]}
self.data = self._transform_data(raw_data)
# CRITICAL: Update _last_price_update to current time after midnight
# This prevents cache_validity from showing "date_mismatch" after midnight
# The data is still valid (just rotated today→yesterday, tomorrow→today)
# Update timestamp to reflect that the data is current for the new day
self._last_price_update = now
# Mark turnover as done for today (atomic update) # Mark turnover as done for today (atomic update)
self._midnight_handler.mark_turnover_done(now) self._midnight_handler.mark_turnover_done(now)
@ -553,19 +550,20 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Transition lifecycle state from "fresh" to "cached" if enough time passed # Transition lifecycle state from "fresh" to "cached" if enough time passed
# (5 minutes threshold defined in lifecycle calculator) # (5 minutes threshold defined in lifecycle calculator)
if self._lifecycle_state == "fresh" and self._last_price_update: # Note: With Pool as source of truth, we track "fresh" state based on
age = current_time - self._last_price_update # when data was last fetched from the API (tracked by _api_calls_today counter)
if age.total_seconds() > FRESH_TO_CACHED_SECONDS: if self._lifecycle_state == "fresh":
self._lifecycle_state = "cached" # After 5 minutes, data is considered "cached" (no longer "just fetched")
self._lifecycle_state = "cached"
# Update helper modules with fresh TimeService instance # Update helper modules with fresh TimeService instance
self.api.time = self.time self.api.time = self.time
self._data_fetcher.time = self.time self._price_data_manager.time = self.time
self._data_transformer.time = self.time self._data_transformer.time = self.time
self._period_calculator.time = self.time self._period_calculator.time = self.time
# Load cache if not already loaded # Load cache if not already loaded (user data only, price data is in Pool)
if self._cached_price_data is None and self._cached_user_data is None: if self._cached_user_data is None:
await self.load_cache() await self.load_cache()
# Initialize midnight handler on first run # Initialize midnight handler on first run
@ -602,47 +600,33 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self._api_calls_today = 0 self._api_calls_today = 0
self._last_api_call_date = current_date self._last_api_call_date = current_date
# Track last_price_update timestamp before fetch to detect if data actually changed # Set _is_fetching flag - lifecycle sensor shows "refreshing" during fetch
old_price_update = self._last_price_update self._is_fetching = True
# Immediately notify lifecycle sensor about state change
self.async_update_listeners()
# CRITICAL: Check if we need to fetch data BEFORE starting the fetch # Get current price info to check if tomorrow data already exists
# This allows the lifecycle sensor to show "searching_tomorrow" status current_price_info = self.data.get("priceInfo", []) if self.data else []
# when we're actively looking for tomorrow's data after 13:00
should_update = self._data_fetcher.should_update_price_data(current_time)
# Set _is_fetching flag if we're about to fetch data result = await self._price_data_manager.handle_main_entry_update(
# This makes the lifecycle sensor show "refreshing" status during the API call
if should_update:
self._is_fetching = True
# Immediately notify lifecycle sensor about state change
# This ensures "refreshing" or "searching_tomorrow" appears DURING the fetch
self.async_update_listeners()
result = await self._data_fetcher.handle_main_entry_update(
current_time, current_time,
self._home_id, self._home_id,
self._transform_data, self._transform_data,
current_price_info=current_price_info,
) )
# CRITICAL: Reset fetching flag AFTER data fetch completes # CRITICAL: Reset fetching flag AFTER data fetch completes
self._is_fetching = False self._is_fetching = False
# CRITICAL: Sync cached data after API call # Sync user_data cache (price data is in IntervalPool)
# handle_main_entry_update() updates data_fetcher's cache, we need to sync: self._cached_user_data = self._price_data_manager.cached_user_data
# 1. cached_user_data (for new integrations, may be fetched via update_user_data_if_needed())
# 2. cached_price_data (CRITICAL: contains tomorrow data, needed for _needs_tomorrow_data())
# 3. _last_price_update (for lifecycle tracking: cache age, fresh state detection)
self._cached_user_data = self._data_fetcher.cached_user_data
self._cached_price_data = self._data_fetcher.cached_price_data
self._last_price_update = self._data_fetcher._last_price_update # noqa: SLF001 - Sync for lifecycle tracking
# Update lifecycle tracking only if we fetched NEW data (timestamp changed) # Update lifecycle tracking - Pool decides if API was called
# This prevents recorder spam from state changes when returning cached data # We track based on result having data
if self._last_price_update != old_price_update: if result and "priceInfo" in result and len(result["priceInfo"]) > 0:
self._last_price_update = current_time # Track when data was fetched
self._api_calls_today += 1 self._api_calls_today += 1
self._lifecycle_state = "fresh" # Data just fetched self._lifecycle_state = "fresh" # Data just fetched
# No separate lifecycle notification needed - normal async_update_listeners()
# will trigger all entities (including lifecycle sensor) after this return
except ( except (
TibberPricesApiClientAuthenticationError, TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError, TibberPricesApiClientCommunicationError,
@ -655,12 +639,12 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Track rate limit errors for repair system # Track rate limit errors for repair system
await self._track_rate_limit_error(err) await self._track_rate_limit_error(err)
# No separate lifecycle notification needed - error case returns data # Handle API error - will re-raise as ConfigEntryAuthFailed or UpdateFailed
# which triggers normal async_update_listeners() # Note: With IntervalPool, there's no local cache fallback here.
return await self._data_fetcher.handle_api_error( # The Pool has its own persistence for offline recovery.
err, await self._price_data_manager.handle_api_error(err)
self._transform_data, # Note: handle_api_error always raises, this is never reached
) return {} # Satisfy type checker
else: else:
# Check for repair conditions after successful update # Check for repair conditions after successful update
await self._check_repair_conditions(result, current_time) await self._check_repair_conditions(result, current_time)
@ -690,7 +674,7 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# 2. Tomorrow data availability (after 18:00) # 2. Tomorrow data availability (after 18:00)
if result and "priceInfo" in result: if result and "priceInfo" in result:
has_tomorrow_data = self._data_fetcher.has_tomorrow_data(result["priceInfo"]) has_tomorrow_data = self._price_data_manager.has_tomorrow_data(result["priceInfo"])
await self._repair_manager.check_tomorrow_data_availability( await self._repair_manager.check_tomorrow_data_availability(
has_tomorrow_data=has_tomorrow_data, has_tomorrow_data=has_tomorrow_data,
current_time=current_time, current_time=current_time,
@ -700,33 +684,29 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
await self._repair_manager.clear_rate_limit_tracking() await self._repair_manager.clear_rate_limit_tracking()
async def load_cache(self) -> None: async def load_cache(self) -> None:
"""Load cached data from storage.""" """Load cached user data from storage (price data is in IntervalPool)."""
await self._data_fetcher.load_cache() await self._price_data_manager.load_cache()
# Sync legacy references # Sync user data reference
self._cached_price_data = self._data_fetcher.cached_price_data self._cached_user_data = self._price_data_manager.cached_user_data
self._cached_user_data = self._data_fetcher.cached_user_data self._last_user_update = self._price_data_manager._last_user_update # noqa: SLF001 - Sync for lifecycle tracking
self._last_price_update = self._data_fetcher._last_price_update # noqa: SLF001 - Sync for lifecycle tracking
self._last_user_update = self._data_fetcher._last_user_update # noqa: SLF001 - Sync for lifecycle tracking
# CRITICAL: Restore midnight handler state from cache # Note: Midnight handler state is now based on current date
# If cache is from today, assume turnover already happened at midnight # Since price data is in IntervalPool (persistent), we just need to
# This allows proper turnover detection after HA restart # ensure turnover doesn't happen twice if HA restarts after midnight
if self._last_price_update: today_midnight = self.time.as_local(self.time.now()).replace(hour=0, minute=0, second=0, microsecond=0)
cache_date = self.time.as_local(self._last_price_update).date() # Mark today's midnight as done to prevent double turnover on HA restart
today_date = self.time.as_local(self.time.now()).date() self._midnight_handler.mark_turnover_done(today_midnight)
if cache_date == today_date:
# Cache is from today, so midnight turnover already happened
today_midnight = self.time.as_local(self.time.now()).replace(hour=0, minute=0, second=0, microsecond=0)
# Restore handler state: mark today's midnight as last turnover
self._midnight_handler.mark_turnover_done(today_midnight)
async def _store_cache(self) -> None: async def _store_cache(self) -> None:
"""Store cache data.""" """Store cache data (user metadata only, price data is in IntervalPool)."""
await self._data_fetcher.store_cache(self._midnight_handler.last_check_time) await self._price_data_manager.store_cache(self._midnight_handler.last_check_time)
def _needs_tomorrow_data(self) -> bool: def _needs_tomorrow_data(self) -> bool:
"""Check if tomorrow data is missing or invalid.""" """Check if tomorrow data is missing or invalid."""
return helpers.needs_tomorrow_data(self._cached_price_data) # Check self.data (from Pool) instead of _cached_price_data
if not self.data or "priceInfo" not in self.data:
return True
return helpers.needs_tomorrow_data({"price_info": self.data["priceInfo"]})
def _has_valid_tomorrow_data(self) -> bool: def _has_valid_tomorrow_data(self) -> bool:
"""Check if we have valid tomorrow data (inverse of _needs_tomorrow_data).""" """Check if we have valid tomorrow data (inverse of _needs_tomorrow_data)."""
@ -734,10 +714,10 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
@callback @callback
def _merge_cached_data(self) -> dict[str, Any]: def _merge_cached_data(self) -> dict[str, Any]:
"""Merge cached data into the expected format for main entry.""" """Return current data (from Pool)."""
if not self._cached_price_data: if not self.data:
return {} return {}
return self._transform_data(self._cached_price_data) return self.data
def _get_threshold_percentages(self) -> dict[str, int | float]: def _get_threshold_percentages(self) -> dict[str, int | float]:
"""Get threshold percentages from config options.""" """Get threshold percentages from config options."""