refactor: simplify needs_tomorrow_data() - remove tomorrow_date parameter

Changed needs_tomorrow_data() to auto-calculate tomorrow date using
get_intervals_for_day_offsets([1]) helper instead of requiring explicit
tomorrow_date parameter.

Changes:
- coordinator/helpers.py: needs_tomorrow_data() signature simplified
  * Uses get_intervals_for_day_offsets([1]) to detect tomorrow intervals
  * No longer requires tomorrow_date parameter (calculated automatically)
  * Consistent with all other data access patterns

- coordinator/data_fetching.py: Removed tomorrow_date calculation and passing
  * Removed unused date import
  * Simplified method call: needs_tomorrow_data() instead of needs_tomorrow_data(tomorrow_date)

- sensor/calculators/lifecycle.py: Updated calls to _needs_tomorrow_data()
  * Removed tomorrow_date variable where it was only used for this call
  * Combined nested if statements with 'and' operator

Impact: Cleaner API, fewer parameters to track, consistent with other
helper functions that auto-calculate dates based on current time.
This commit is contained in:
Julian Pawlowski 2025-11-24 16:26:08 +00:00
parent 2de793cfda
commit 6b78cd757f
3 changed files with 88 additions and 116 deletions

View file

@ -24,7 +24,7 @@ from .constants import TOMORROW_DATA_CHECK_HOUR, TOMORROW_DATA_RANDOM_DELAY_MAX
if TYPE_CHECKING:
from collections.abc import Callable
from datetime import date, datetime
from datetime import datetime
from custom_components.tibber_prices.api import TibberPricesApiClient
@ -147,17 +147,13 @@ class TibberPricesDataFetcher:
self._log("debug", "API update needed: No last price update timestamp")
return True
# Get tomorrow's date using TimeService
_, tomorrow_midnight = self.time.get_day_boundaries("today")
tomorrow_date = tomorrow_midnight.date()
now_local = self.time.as_local(current_time)
# Check if after 13:00 and tomorrow data is missing or invalid
now_local = self.time.as_local(current_time)
if (
now_local.hour >= TOMORROW_DATA_CHECK_HOUR
and self._cached_price_data
and "homes" in self._cached_price_data
and self.needs_tomorrow_data(tomorrow_date)
and self.needs_tomorrow_data()
):
self._log(
"debug",
@ -171,17 +167,19 @@ class TibberPricesDataFetcher:
# No update needed - cache is valid and complete
return False
def needs_tomorrow_data(self, tomorrow_date: date) -> bool:
def needs_tomorrow_data(self) -> bool:
"""Check if tomorrow data is missing or invalid."""
return helpers.needs_tomorrow_data(self._cached_price_data, tomorrow_date)
return helpers.needs_tomorrow_data(self._cached_price_data)
async def fetch_all_homes_data(self, configured_home_ids: set[str], current_time: datetime) -> dict[str, Any]:
"""Fetch data for all homes (main coordinator only)."""
if not configured_home_ids:
self._log("warning", "No configured homes found - cannot fetch price data")
async def fetch_home_data(self, home_id: str, current_time: datetime) -> dict[str, Any]:
"""Fetch data for a single home."""
if not home_id:
self._log("warning", "No home ID provided - cannot fetch price data")
return {
"timestamp": current_time,
"homes": {},
"home_id": "",
"price_info": [],
"currency": "EUR",
}
# Ensure we have user_data before fetching price data
@ -200,71 +198,61 @@ class TibberPricesDataFetcher:
self._log("error", msg)
raise TibberPricesApiClientError(msg) from ex
# Get price data for configured homes only (API call with specific home_ids)
# Pass user_data for timezone-aware cursor calculation per home
# Get price data for this home
# Pass user_data for timezone-aware cursor calculation
# At this point, _cached_user_data is guaranteed to be not None (checked above)
if not self._cached_user_data:
msg = "User data unexpectedly None after fetch attempt"
raise TibberPricesApiClientError(msg)
self._log("debug", "Fetching price data for %d configured home(s)", len(configured_home_ids))
price_data = await self.api.async_get_price_info(
home_ids=configured_home_ids,
self._log("debug", "Fetching price data for home %s", home_id)
home_data = await self.api.async_get_price_info(
home_id=home_id,
user_data=self._cached_user_data,
)
all_homes_data = {}
homes_list = price_data.get("homes", {})
# Extract currency for this home from user_data
currency = self._get_currency_for_home(home_id)
# Build home_id -> currency mapping from user_data
currency_map = {}
if self._cached_user_data:
viewer = self._cached_user_data.get("viewer", {})
homes = viewer.get("homes", [])
for home in homes:
home_id = home.get("id")
if home_id:
# Extract currency from nested structure (with fallback to EUR)
currency = (
home.get("currentSubscription", {})
.get("priceInfo", {})
.get("current", {})
.get("currency", "EUR")
)
currency_map[home_id] = currency
self._log("debug", "Extracted currency %s for home %s", currency, home_id)
price_info = home_data.get("price_info", [])
# Process returned data
for home_id, home_price_data in homes_list.items():
# Get currency from user_data (cached)
currency = currency_map.get(home_id, "EUR")
# Store raw price data with currency from user_data
# Enrichment will be done dynamically when data is transformed
home_data = {
"price_info": home_price_data,
"currency": currency,
}
all_homes_data[home_id] = home_data
self._log(
"debug",
"Successfully fetched data for %d home(s)",
len(all_homes_data),
)
self._log("debug", "Successfully fetched data for home %s (%d intervals)", home_id, len(price_info))
return {
"timestamp": current_time,
"homes": all_homes_data,
"home_id": home_id,
"price_info": price_info,
"currency": currency,
}
def _get_currency_for_home(self, home_id: str) -> str:
"""Get currency for a specific home from cached user_data."""
if not self._cached_user_data:
self._log("warning", "No user data cached, using EUR as default currency")
return "EUR"
viewer = self._cached_user_data.get("viewer", {})
homes = viewer.get("homes", [])
for home in homes:
if home.get("id") == home_id:
# Extract currency from nested structure (with fallback to EUR)
currency = (
home.get("currentSubscription", {}).get("priceInfo", {}).get("current", {}).get("currency", "EUR")
)
self._log("debug", "Extracted currency %s for home %s", currency, home_id)
return currency
self._log("warning", "Home %s not found in user data, using EUR as default", home_id)
return "EUR"
async def handle_main_entry_update(
self,
current_time: datetime,
configured_home_ids: set[str],
home_id: str,
transform_fn: Callable[[dict[str, Any]], dict[str, Any]],
) -> dict[str, Any]:
"""Handle update for main entry - fetch data for all homes."""
"""Handle update for main entry - fetch data for this home."""
# Update user data if needed (daily check)
user_data_updated = await self.update_user_data_if_needed(current_time)
@ -284,14 +272,14 @@ class TibberPricesDataFetcher:
await asyncio.sleep(delay)
self._log("debug", "Fetching fresh price data from API")
raw_data = await self.fetch_all_homes_data(configured_home_ids, current_time)
raw_data = await self.fetch_home_data(home_id, current_time)
# Parse timestamps immediately after API fetch
raw_data = helpers.parse_all_timestamps(raw_data, time=self.time)
# Cache the data (now with datetime objects)
self._cached_price_data = raw_data
self._last_price_update = current_time
await self.store_cache()
# Transform for main entry: provide aggregated view
# Transform for main entry
return transform_fn(raw_data)
# Use cached data if available
@ -308,8 +296,9 @@ class TibberPricesDataFetcher:
self._log("warning", "No cached data available and update not triggered - returning empty data")
return {
"timestamp": current_time,
"homes": {},
"priceInfo": {},
"home_id": home_id,
"priceInfo": [],
"currency": "",
}
async def handle_api_error(

View file

@ -6,31 +6,14 @@ import logging
from datetime import timedelta
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.const import DOMAIN
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
from datetime import date
from homeassistant.core import HomeAssistant
from .time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
def get_configured_home_ids(hass: HomeAssistant) -> set[str]:
"""Get all home_ids that have active config entries (main + subentries)."""
home_ids = set()
# Collect home_ids from all config entries for this domain
for entry in hass.config_entries.async_entries(DOMAIN):
if home_id := entry.data.get("home_id"):
home_ids.add(home_id)
return home_ids
def get_intervals_for_day_offsets(
coordinator_data: dict[str, Any] | None,
offsets: list[int],
@ -124,27 +107,31 @@ def get_intervals_for_day_offsets(
def needs_tomorrow_data(
cached_price_data: dict[str, Any] | None,
tomorrow_date: date,
) -> bool:
"""Check if tomorrow data is missing or invalid in flat interval list."""
"""
Check if tomorrow data is missing or invalid in flat interval list.
Uses get_intervals_for_day_offsets() to automatically determine tomorrow
based on current date. No explicit date parameter needed.
Args:
cached_price_data: Cached price data with homes structure
Returns:
True if any home is missing tomorrow's data, False otherwise
"""
if not cached_price_data or "homes" not in cached_price_data:
return False
# Check each home's intervals for tomorrow's date
for home_data in cached_price_data["homes"].values():
all_intervals = home_data.get("price_info", [])
# Use helper to get tomorrow's intervals (offset +1 from current date)
coordinator_data = {"priceInfo": home_data.get("price_info", [])}
tomorrow_intervals = get_intervals_for_day_offsets(coordinator_data, [1])
# Check if any interval exists for tomorrow's date
has_tomorrow = False
for interval in all_intervals:
if starts_at := interval.get("startsAt"): # Already datetime in local timezone
interval_date = starts_at.date()
if interval_date == tomorrow_date:
has_tomorrow = True
break
# If no interval for tomorrow found, we need tomorrow data
if not has_tomorrow:
# If no intervals for tomorrow found, we need tomorrow data
if not tomorrow_intervals:
return True
return False
@ -160,30 +147,28 @@ def parse_all_timestamps(price_data: dict[str, Any], *, time: TibberPricesTimeSe
Performance: ~200 timestamps parsed ONCE instead of multiple times per update cycle.
Args:
price_data: Raw API data with string timestamps (flat interval list)
price_data: Raw API data with string timestamps (single-home structure)
time: TibberPricesTimeService for parsing
Returns:
Same structure but with datetime objects instead of strings
"""
if not price_data or "homes" not in price_data:
if not price_data:
return price_data
# Process each home
for home_data in price_data["homes"].values():
# price_info is now a flat list of intervals
price_info = home_data.get("price_info", [])
# Single-home structure: price_info is a flat list of intervals
price_info = price_data.get("price_info", [])
# Skip if price_info is not a list (empty or invalid)
if not isinstance(price_info, list):
continue
# Skip if price_info is not a list (empty or invalid)
if not isinstance(price_info, list):
return price_data
# Parse timestamps in flat interval list
for interval in price_info:
if (starts_at_str := interval.get("startsAt")) and isinstance(starts_at_str, str):
# Parse once, convert to local timezone, store as datetime object
interval["startsAt"] = time.parse_and_localize(starts_at_str)
# If already datetime (e.g., from cache), skip parsing
# Parse timestamps in flat interval list
for interval in price_info:
if (starts_at_str := interval.get("startsAt")) and isinstance(starts_at_str, str):
# Parse once, convert to local timezone, store as datetime object
interval["startsAt"] = time.parse_and_localize(starts_at_str)
# If already datetime (e.g., from cache), skip parsing
return price_data

View file

@ -69,11 +69,8 @@ class TibberPricesLifecycleCalculator(TibberPricesBaseCalculator):
# Priority 4: Check if we're in tomorrow data search mode (after 13:00 and tomorrow missing)
# This should remain stable during the search phase, not flicker with "fresh" every 15 minutes
now_local = coordinator.time.as_local(current_time)
if now_local.hour >= TOMORROW_CHECK_HOUR:
_, tomorrow_midnight = coordinator.time.get_day_boundaries("today")
tomorrow_date = tomorrow_midnight.date()
if coordinator._needs_tomorrow_data(tomorrow_date): # noqa: SLF001 - Internal state access
return "searching_tomorrow"
if now_local.hour >= TOMORROW_CHECK_HOUR and coordinator._needs_tomorrow_data(): # noqa: SLF001 - Internal state access
return "searching_tomorrow"
# Priority 5: Check if data is fresh (within 5 minutes of last API fetch)
# Lower priority than searching_tomorrow to avoid state flickering during search phase
@ -115,9 +112,10 @@ class TibberPricesLifecycleCalculator(TibberPricesBaseCalculator):
now_local = coordinator.time.as_local(current_time)
# Check if tomorrow data is missing
tomorrow_missing = coordinator._needs_tomorrow_data() # noqa: SLF001
# Get tomorrow date for time calculations
_, tomorrow_midnight = coordinator.time.get_day_boundaries("today")
tomorrow_date = tomorrow_midnight.date()
tomorrow_missing = coordinator._needs_tomorrow_data(tomorrow_date) # noqa: SLF001
# Case 1: Before 13:00 today - next poll is today at 13:xx:xx (when tomorrow-search begins)
if now_local.hour < TOMORROW_CHECK_HOUR: