mirror of
https://github.com/jpawlowski/hass.tibber_prices.git
synced 2026-03-29 21:03:40 +00:00
refactoring
This commit is contained in:
parent
7c4ae98417
commit
0116d5ad62
7 changed files with 202 additions and 126 deletions
|
|
@ -32,6 +32,10 @@ from .const import (
|
|||
DEFAULT_PEAK_PRICE_FLEX,
|
||||
)
|
||||
|
||||
MIN_TOMORROW_INTERVALS_HOURLY = 24
|
||||
MIN_TOMORROW_INTERVALS_15MIN = 96
|
||||
TOMORROW_INTERVAL_COUNTS = {MIN_TOMORROW_INTERVALS_HOURLY, MIN_TOMORROW_INTERVALS_15MIN}
|
||||
|
||||
ENTITY_DESCRIPTIONS = (
|
||||
BinarySensorEntityDescription(
|
||||
key="peak_price_period",
|
||||
|
|
@ -52,6 +56,13 @@ ENTITY_DESCRIPTIONS = (
|
|||
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
BinarySensorEntityDescription(
|
||||
key="tomorrow_data_available",
|
||||
translation_key="tomorrow_data_available",
|
||||
name="Tomorrow's Data Available",
|
||||
icon="mdi:calendar-check",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -95,6 +106,8 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
|
|||
return self._best_price_state
|
||||
if key == "connection":
|
||||
return lambda: True if self.coordinator.data else None
|
||||
if key == "tomorrow_data_available":
|
||||
return self._tomorrow_data_available_state
|
||||
|
||||
return None
|
||||
|
||||
|
|
@ -131,19 +144,32 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
|
|||
threshold = max_price * (1 - flex)
|
||||
return current_price >= threshold
|
||||
|
||||
def _get_price_threshold_state(self, *, threshold_percentage: float, high_is_active: bool) -> bool | None:
|
||||
"""Deprecate: use _best_price_state or _peak_price_state for those sensors."""
|
||||
price_data = self._get_current_price_data()
|
||||
if not price_data:
|
||||
def _tomorrow_data_available_state(self) -> bool | None:
|
||||
"""Return True if tomorrow's data is fully available, False if not, None if unknown."""
|
||||
if not self.coordinator.data:
|
||||
return None
|
||||
price_info = self.coordinator.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
tomorrow_prices = price_info.get("tomorrow", [])
|
||||
interval_count = len(tomorrow_prices)
|
||||
return interval_count in TOMORROW_INTERVAL_COUNTS
|
||||
|
||||
prices, current_price = price_data
|
||||
threshold_index = int(len(prices) * threshold_percentage)
|
||||
|
||||
if high_is_active:
|
||||
return current_price >= prices[threshold_index]
|
||||
|
||||
return current_price <= prices[threshold_index]
|
||||
def _get_tomorrow_data_available_attributes(self) -> dict | None:
|
||||
"""Return attributes for tomorrow_data_available binary sensor."""
|
||||
if not self.coordinator.data:
|
||||
return None
|
||||
price_info = self.coordinator.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
tomorrow_prices = price_info.get("tomorrow", [])
|
||||
interval_count = len(tomorrow_prices)
|
||||
if interval_count == 0:
|
||||
status = "none"
|
||||
elif interval_count in TOMORROW_INTERVAL_COUNTS:
|
||||
status = "full"
|
||||
else:
|
||||
status = "partial"
|
||||
return {
|
||||
"intervals_available": interval_count,
|
||||
"status": status,
|
||||
}
|
||||
|
||||
def _get_attribute_getter(self) -> Callable | None:
|
||||
"""Return the appropriate attribute getter method based on the sensor type."""
|
||||
|
|
@ -153,6 +179,8 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
|
|||
return lambda: self._get_price_intervals_attributes(reverse_sort=True)
|
||||
if key == "best_price_period":
|
||||
return lambda: self._get_price_intervals_attributes(reverse_sort=False)
|
||||
if key == "tomorrow_data_available":
|
||||
return self._get_tomorrow_data_available_attributes
|
||||
|
||||
return None
|
||||
|
||||
|
|
@ -187,17 +215,16 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
|
|||
interval: dict,
|
||||
annotation_ctx: dict,
|
||||
) -> dict:
|
||||
"""Annotate a single interval with all required attributes."""
|
||||
"""Annotate a single interval with all required attributes for Home Assistant UI and automations."""
|
||||
interval_copy = interval.copy()
|
||||
interval_remaining = annotation_ctx["interval_count"] - annotation_ctx["interval_idx"]
|
||||
# Extract all interval-related fields first
|
||||
# Extract interval-related fields for attribute ordering and clarity
|
||||
interval_start = interval_copy.pop("interval_start", None)
|
||||
interval_end = interval_copy.pop("interval_end", None)
|
||||
interval_hour = interval_copy.pop("interval_hour", None)
|
||||
interval_minute = interval_copy.pop("interval_minute", None)
|
||||
interval_time = interval_copy.pop("interval_time", None)
|
||||
interval_length_minute = interval_copy.pop("interval_length_minute", annotation_ctx["interval_length"])
|
||||
# Extract price
|
||||
price = interval_copy.pop("price", None)
|
||||
new_interval = {
|
||||
"period_start": annotation_ctx["period_start"],
|
||||
|
|
@ -221,18 +248,20 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
|
|||
"interval_length_minute": interval_length_minute,
|
||||
"price": price,
|
||||
}
|
||||
# Add any remaining fields (should be only extra/unknowns)
|
||||
# Merge any extra fields from the original interval (future-proofing)
|
||||
new_interval.update(interval_copy)
|
||||
new_interval["price_ct"] = round(new_interval["price"] * 100, 2)
|
||||
price_diff = new_interval["price"] - annotation_ctx["ref_price"]
|
||||
new_interval[annotation_ctx["diff_key"]] = round(price_diff, 4)
|
||||
new_interval[annotation_ctx["diff_ct_key"]] = round(price_diff * 100, 2)
|
||||
# Calculate percent difference from reference price (min or max)
|
||||
price_diff_percent = (
|
||||
((new_interval["price"] - annotation_ctx["ref_price"]) / annotation_ctx["ref_price"]) * 100
|
||||
if annotation_ctx["ref_price"] != 0
|
||||
else 0.0
|
||||
)
|
||||
new_interval[annotation_ctx["diff_pct_key"]] = round(price_diff_percent, 2)
|
||||
# Calculate difference from average price for the day
|
||||
avg_diff = new_interval["price"] - annotation_ctx["avg_price"]
|
||||
new_interval["price_diff_from_avg"] = round(avg_diff, 4)
|
||||
new_interval["price_diff_from_avg_ct"] = round(avg_diff * 100, 2)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ from .api import (
|
|||
from .const import DOMAIN, LOGGER
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from .data import TibberPricesConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
|
@ -250,7 +252,10 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[TibberPricesData])
|
|||
async def _async_initialize(self) -> None:
|
||||
"""Load stored data."""
|
||||
stored = await self._store.async_load()
|
||||
LOGGER.debug("Loading stored data: %s", stored)
|
||||
if stored is None:
|
||||
LOGGER.warning("No cache file found or cache is empty on startup.")
|
||||
else:
|
||||
LOGGER.debug("Loading stored data: %s", stored)
|
||||
|
||||
if stored:
|
||||
# Load cached data
|
||||
|
|
@ -285,6 +290,14 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[TibberPricesData])
|
|||
self._last_rating_update_monthly,
|
||||
)
|
||||
|
||||
# Defensive: warn if any required data is missing
|
||||
if self._cached_price_data is None:
|
||||
LOGGER.warning("Cached price data missing after cache load!")
|
||||
if self._last_price_update is None:
|
||||
LOGGER.warning("Price update timestamp missing after cache load!")
|
||||
else:
|
||||
LOGGER.info("No cache loaded; will fetch fresh data on first update.")
|
||||
|
||||
async def _async_refresh_hourly(self, now: datetime | None = None) -> None:
|
||||
"""
|
||||
Handle the hourly refresh.
|
||||
|
|
@ -495,65 +508,76 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[TibberPricesData])
|
|||
|
||||
async def _store_cache(self) -> None:
|
||||
"""Store cache data."""
|
||||
# Recover any missing timestamps from the data
|
||||
if self._cached_price_data and not self._last_price_update:
|
||||
latest_timestamp = _get_latest_timestamp_from_prices(self._cached_price_data)
|
||||
if latest_timestamp:
|
||||
self._last_price_update = latest_timestamp
|
||||
LOGGER.debug(
|
||||
"Setting missing price update timestamp to: %s",
|
||||
self._last_price_update,
|
||||
)
|
||||
|
||||
rating_types = {
|
||||
"hourly": (
|
||||
self._cached_rating_data_hourly,
|
||||
self._last_rating_update_hourly,
|
||||
),
|
||||
"daily": (self._cached_rating_data_daily, self._last_rating_update_daily),
|
||||
"monthly": (
|
||||
self._cached_rating_data_monthly,
|
||||
self._last_rating_update_monthly,
|
||||
),
|
||||
}
|
||||
|
||||
for rating_type, (cached_data, last_update) in rating_types.items():
|
||||
if cached_data and not last_update:
|
||||
latest_timestamp = self._get_latest_timestamp_from_rating_type(cached_data, rating_type)
|
||||
def _recover_and_log_timestamp(
|
||||
data: TibberPricesData | None,
|
||||
last_update: datetime | None,
|
||||
get_latest_fn: Callable[[TibberPricesData], datetime | None],
|
||||
label: str,
|
||||
) -> datetime | None:
|
||||
if data and not last_update:
|
||||
latest_timestamp = get_latest_fn(data)
|
||||
if latest_timestamp:
|
||||
if rating_type == "hourly":
|
||||
self._last_rating_update_hourly = latest_timestamp
|
||||
elif rating_type == "daily":
|
||||
self._last_rating_update_daily = latest_timestamp
|
||||
else: # monthly
|
||||
self._last_rating_update_monthly = latest_timestamp
|
||||
LOGGER.debug(
|
||||
"Setting missing %s rating timestamp to: %s",
|
||||
rating_type,
|
||||
latest_timestamp,
|
||||
)
|
||||
LOGGER.debug("Setting missing %s timestamp to: %s", label, latest_timestamp)
|
||||
return latest_timestamp
|
||||
LOGGER.warning("Could not recover %s timestamp from data!", label)
|
||||
return last_update
|
||||
|
||||
self._last_price_update = _recover_and_log_timestamp(
|
||||
self._cached_price_data,
|
||||
self._last_price_update,
|
||||
_get_latest_timestamp_from_prices,
|
||||
"price update",
|
||||
)
|
||||
self._last_rating_update_hourly = _recover_and_log_timestamp(
|
||||
self._cached_rating_data_hourly,
|
||||
self._last_rating_update_hourly,
|
||||
lambda d: self._get_latest_timestamp_from_rating_type(d, "hourly"),
|
||||
"hourly rating",
|
||||
)
|
||||
self._last_rating_update_daily = _recover_and_log_timestamp(
|
||||
self._cached_rating_data_daily,
|
||||
self._last_rating_update_daily,
|
||||
lambda d: self._get_latest_timestamp_from_rating_type(d, "daily"),
|
||||
"daily rating",
|
||||
)
|
||||
self._last_rating_update_monthly = _recover_and_log_timestamp(
|
||||
self._cached_rating_data_monthly,
|
||||
self._last_rating_update_monthly,
|
||||
lambda d: self._get_latest_timestamp_from_rating_type(d, "monthly"),
|
||||
"monthly rating",
|
||||
)
|
||||
|
||||
data = {
|
||||
"price_data": self._cached_price_data,
|
||||
"rating_data_hourly": self._cached_rating_data_hourly,
|
||||
"rating_data_daily": self._cached_rating_data_daily,
|
||||
"rating_data_monthly": self._cached_rating_data_monthly,
|
||||
"last_price_update": self._last_price_update.isoformat() if self._last_price_update else None,
|
||||
"last_rating_update_hourly": self._last_rating_update_hourly.isoformat()
|
||||
if self._last_rating_update_hourly
|
||||
else None,
|
||||
"last_rating_update_daily": self._last_rating_update_daily.isoformat()
|
||||
if self._last_rating_update_daily
|
||||
else None,
|
||||
"last_rating_update_monthly": self._last_rating_update_monthly.isoformat()
|
||||
if self._last_rating_update_monthly
|
||||
else None,
|
||||
"last_price_update": (self._last_price_update.isoformat() if self._last_price_update else None),
|
||||
"last_rating_update_hourly": (
|
||||
self._last_rating_update_hourly.isoformat() if self._last_rating_update_hourly else None
|
||||
),
|
||||
"last_rating_update_daily": (
|
||||
self._last_rating_update_daily.isoformat() if self._last_rating_update_daily else None
|
||||
),
|
||||
"last_rating_update_monthly": (
|
||||
self._last_rating_update_monthly.isoformat() if self._last_rating_update_monthly else None
|
||||
),
|
||||
}
|
||||
LOGGER.debug(
|
||||
"Storing cache data with timestamps: %s",
|
||||
{k: v for k, v in data.items() if k.startswith("last_")},
|
||||
)
|
||||
await self._store.async_save(data)
|
||||
# Defensive: warn if any required data is missing before saving
|
||||
if data["price_data"] is None:
|
||||
LOGGER.warning("Attempting to store cache with missing price_data!")
|
||||
if data["last_price_update"] is None:
|
||||
LOGGER.warning("Attempting to store cache with missing last_price_update!")
|
||||
try:
|
||||
await self._store.async_save(data)
|
||||
LOGGER.debug("Cache successfully written to disk.")
|
||||
except OSError as ex:
|
||||
LOGGER.error("Failed to write cache to disk: %s", ex)
|
||||
|
||||
@callback
|
||||
def _should_update_price_data(self, current_time: datetime) -> bool:
|
||||
|
|
@ -920,3 +944,49 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[TibberPricesData])
|
|||
|
||||
except (KeyError, TypeError, ValueError) as ex:
|
||||
LOGGER.error("Error during midnight data rotation in hourly update: %s", ex)
|
||||
|
||||
def get_all_intervals(self) -> list[dict]:
|
||||
"""Return a combined, sorted list of all price intervals for yesterday, today, and tomorrow."""
|
||||
if not self.data:
|
||||
return []
|
||||
price_info = self.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
all_prices = price_info.get("yesterday", []) + price_info.get("today", []) + price_info.get("tomorrow", [])
|
||||
return sorted(all_prices, key=lambda p: p["startsAt"])
|
||||
|
||||
def get_interval_granularity(self) -> int | None:
|
||||
"""Return the interval granularity in minutes (e.g., 15 or 60) for today's data."""
|
||||
if not self.data:
|
||||
return None
|
||||
price_info = self.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
today_prices = price_info.get("today", [])
|
||||
from .sensor import detect_interval_granularity
|
||||
|
||||
return detect_interval_granularity(today_prices) if today_prices else None
|
||||
|
||||
def get_current_interval_data(self) -> dict | None:
|
||||
"""Return the price data for the current interval."""
|
||||
if not self.data:
|
||||
return None
|
||||
price_info = self.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
now = dt_util.now()
|
||||
interval_length = self.get_interval_granularity()
|
||||
from .sensor import find_price_data_for_interval
|
||||
|
||||
return find_price_data_for_interval(price_info, now, interval_length)
|
||||
|
||||
def get_combined_price_info(self) -> dict:
|
||||
"""Return a dict with all intervals under a single key 'all'."""
|
||||
return {"all": self.get_all_intervals()}
|
||||
|
||||
def is_tomorrow_data_available(self) -> bool | None:
|
||||
"""Return True if tomorrow's data is fully available, False if not, None if unknown."""
|
||||
if not self.data:
|
||||
return None
|
||||
price_info = self.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
tomorrow_prices = price_info.get("tomorrow", [])
|
||||
interval_count = len(tomorrow_prices)
|
||||
# Use the same logic as in binary_sensor.py
|
||||
min_tomorrow_intervals_hourly = 24
|
||||
min_tomorrow_intervals_15min = 96
|
||||
tomorrow_interval_counts = {min_tomorrow_intervals_hourly, min_tomorrow_intervals_15min}
|
||||
return interval_count in tomorrow_interval_counts
|
||||
|
|
|
|||
|
|
@ -102,12 +102,6 @@
|
|||
"long_description": "Zeigt den Zeitstempel des letzten Preisaktualisierung von Tibber",
|
||||
"usage_tips": "Nutze dies, um zu überprüfen, wann die Preisinformationen zuletzt aktualisiert wurden"
|
||||
},
|
||||
"tomorrow_data_available": {
|
||||
"name": "Datenstatus für morgen",
|
||||
"description": "Ob Preisdaten für morgen verfügbar sind",
|
||||
"long_description": "Gibt an, ob Preisdaten für den folgenden Tag von Tibber empfangen wurden",
|
||||
"usage_tips": "Nutze dies, um zu überprüfen, ob die Preise für morgen für die Planung verfügbar sind"
|
||||
},
|
||||
"price_forecast": {
|
||||
"name": "Preisprognose",
|
||||
"description": "Prognose der kommenden Strompreise",
|
||||
|
|
@ -116,6 +110,12 @@
|
|||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"tomorrow_data_available": {
|
||||
"name": "Datenstatus für morgen",
|
||||
"description": "Ob Preisdaten für morgen verfügbar sind",
|
||||
"long_description": "Gibt an, ob Preisdaten für den folgenden Tag von Tibber empfangen wurden",
|
||||
"usage_tips": "Nutze dies, um zu überprüfen, ob die Preise für morgen für die Planung verfügbar sind"
|
||||
},
|
||||
"peak_price_period": {
|
||||
"name": "Spitzenpreis-Periode",
|
||||
"description": "Ob das aktuelle Intervall zu den teuersten des Tages gehört",
|
||||
|
|
|
|||
|
|
@ -102,12 +102,6 @@
|
|||
"long_description": "Shows the timestamp of the most recent price data update from Tibber",
|
||||
"usage_tips": "Use this to check when price information was last updated"
|
||||
},
|
||||
"tomorrow_data_available": {
|
||||
"name": "Tomorrow's Data Status",
|
||||
"description": "Whether price data for tomorrow is available",
|
||||
"long_description": "Indicates if price data for the following day has been received from Tibber",
|
||||
"usage_tips": "Use this to check if tomorrow's prices are available for planning"
|
||||
},
|
||||
"price_forecast": {
|
||||
"name": "Price Forecast",
|
||||
"description": "Forecast of upcoming electricity prices",
|
||||
|
|
@ -116,6 +110,12 @@
|
|||
}
|
||||
},
|
||||
"binary_sensor": {
|
||||
"tomorrow_data_available": {
|
||||
"name": "Tomorrow's Data Status",
|
||||
"description": "Whether price data for tomorrow is available",
|
||||
"long_description": "Indicates if price data for the following day has been received from Tibber",
|
||||
"usage_tips": "Use this to check if tomorrow's prices are available for planning"
|
||||
},
|
||||
"peak_price_period": {
|
||||
"name": "Peak Price Periode",
|
||||
"description": "Whether the current interval is among the most expensive of the day",
|
||||
|
|
|
|||
|
|
@ -185,13 +185,6 @@ DIAGNOSTIC_SENSORS = (
|
|||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="tomorrow_data_available",
|
||||
translation_key="tomorrow_data_available",
|
||||
name="Tomorrow's Data Status",
|
||||
icon="mdi:calendar-check",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="price_forecast",
|
||||
translation_key="price_forecast",
|
||||
|
|
@ -270,7 +263,6 @@ class TibberPricesSensor(TibberPricesEntity, SensorEntity):
|
|||
"monthly_rating": lambda: self._get_rating_value(rating_type="monthly"),
|
||||
# Diagnostic sensors
|
||||
"data_timestamp": self._get_data_timestamp,
|
||||
"tomorrow_data_available": self._get_tomorrow_data_status,
|
||||
# Price forecast sensor
|
||||
"price_forecast": self._get_price_forecast_value,
|
||||
}
|
||||
|
|
@ -278,16 +270,8 @@ class TibberPricesSensor(TibberPricesEntity, SensorEntity):
|
|||
return handlers.get(key)
|
||||
|
||||
def _get_current_interval_data(self) -> dict | None:
|
||||
"""Get the price data for the current interval using adaptive interval detection."""
|
||||
if not self.coordinator.data:
|
||||
return None
|
||||
|
||||
# Get the current time and price info
|
||||
now = dt_util.now()
|
||||
price_info = self.coordinator.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
|
||||
# Use our adaptive price data finder
|
||||
return find_price_data_for_interval(price_info, now)
|
||||
"""Get the price data for the current interval using coordinator utility."""
|
||||
return self.coordinator.get_current_interval_data()
|
||||
|
||||
def _get_price_level_value(self) -> str | None:
|
||||
"""
|
||||
|
|
@ -301,7 +285,6 @@ class TibberPricesSensor(TibberPricesEntity, SensorEntity):
|
|||
"""
|
||||
current_interval_data = self._get_current_interval_data()
|
||||
if not current_interval_data or "level" not in current_interval_data:
|
||||
self._last_price_level = None
|
||||
return None
|
||||
level = current_interval_data["level"]
|
||||
self._last_price_level = level
|
||||
|
|
@ -384,23 +367,31 @@ class TibberPricesSensor(TibberPricesEntity, SensorEntity):
|
|||
if not self.coordinator.data:
|
||||
return None
|
||||
|
||||
price_info = self.coordinator.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
# Use coordinator utility for all intervals and granularity
|
||||
all_intervals = self.coordinator.get_all_intervals()
|
||||
granularity = self.coordinator.get_interval_granularity()
|
||||
if not all_intervals or granularity is None:
|
||||
return None
|
||||
|
||||
# Determine data granularity
|
||||
today_prices = price_info.get("today", [])
|
||||
data_granularity = detect_interval_granularity(today_prices) if today_prices else MINUTES_PER_INTERVAL
|
||||
|
||||
# Use HomeAssistant's dt_util to get the current time in the user's timezone
|
||||
now = dt_util.now()
|
||||
|
||||
# Calculate the target time based on detected granularity
|
||||
target_datetime = now + timedelta(minutes=interval_offset * data_granularity)
|
||||
# Find the current interval index
|
||||
current_idx = None
|
||||
for idx, interval in enumerate(all_intervals):
|
||||
starts_at = interval.get("startsAt")
|
||||
if starts_at:
|
||||
ts = dt_util.parse_datetime(starts_at)
|
||||
if ts and ts <= now < ts + timedelta(minutes=granularity):
|
||||
current_idx = idx
|
||||
break
|
||||
|
||||
# Find appropriate price data
|
||||
price_data = find_price_data_for_interval(price_info, target_datetime, data_granularity)
|
||||
if current_idx is None:
|
||||
return None
|
||||
|
||||
if price_data:
|
||||
return self._get_price_value(float(price_data["total"]), in_euro=in_euro)
|
||||
target_idx = current_idx + interval_offset
|
||||
if 0 <= target_idx < len(all_intervals):
|
||||
price = float(all_intervals[target_idx]["total"])
|
||||
return price if in_euro else round(price * 100, 2)
|
||||
|
||||
return None
|
||||
|
||||
|
|
@ -550,18 +541,6 @@ class TibberPricesSensor(TibberPricesEntity, SensorEntity):
|
|||
|
||||
return dt_util.as_utc(latest_timestamp) if latest_timestamp else None
|
||||
|
||||
def _get_tomorrow_data_status(self) -> str | None:
|
||||
"""Get tomorrow's data availability status."""
|
||||
if not self.coordinator.data:
|
||||
return None
|
||||
|
||||
price_info = self.coordinator.data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]
|
||||
tomorrow_prices = price_info.get("tomorrow", [])
|
||||
|
||||
if not tomorrow_prices:
|
||||
return "No"
|
||||
return "Yes" if len(tomorrow_prices) == HOURS_IN_DAY else "Partial"
|
||||
|
||||
# Add method to get future price intervals
|
||||
def _get_price_forecast_value(self) -> str | None:
|
||||
"""Get the highest or lowest price status for the price forecast entity."""
|
||||
|
|
@ -869,9 +848,7 @@ class TibberPricesSensor(TibberPricesEntity, SensorEntity):
|
|||
# Group sensors by type and delegate to specific handlers
|
||||
if key in ["current_price", "current_price_eur", "price_level"]:
|
||||
self._add_current_price_attributes(attributes)
|
||||
elif any(
|
||||
pattern in key for pattern in ["_price_today", "rating", "data_timestamp", "tomorrow_data_available"]
|
||||
):
|
||||
elif any(pattern in key for pattern in ["_price_today", "rating", "data_timestamp"]):
|
||||
self._add_statistics_attributes(attributes)
|
||||
elif key == "price_forecast":
|
||||
self._add_price_forecast_attributes(attributes)
|
||||
|
|
|
|||
|
|
@ -76,9 +76,6 @@
|
|||
"data_timestamp": {
|
||||
"name": "Zeitstempel der neuesten Daten"
|
||||
},
|
||||
"tomorrow_data_available": {
|
||||
"name": "Datenstatus für morgen"
|
||||
},
|
||||
"next_interval_price": {
|
||||
"name": "Strompreis nächstes Intervall"
|
||||
},
|
||||
|
|
@ -95,6 +92,9 @@
|
|||
},
|
||||
"connection": {
|
||||
"name": "Tibber API-Verbindung"
|
||||
},
|
||||
"tomorrow_data_available": {
|
||||
"name": "Daten für morgen verfügbar"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -98,9 +98,6 @@
|
|||
"data_timestamp": {
|
||||
"name": "Latest Data Available"
|
||||
},
|
||||
"tomorrow_data_available": {
|
||||
"name": "Tomorrow's Data Status"
|
||||
},
|
||||
"price_forecast": {
|
||||
"name": "Price Forecast"
|
||||
}
|
||||
|
|
@ -114,6 +111,9 @@
|
|||
},
|
||||
"connection": {
|
||||
"name": "Tibber API Connection"
|
||||
},
|
||||
"tomorrow_data_available": {
|
||||
"name": "Tomorrow's Data Available"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in a new issue