fix(imports): update imports after utils package reorganization

Updated all imports to reflect new module structure:

1. Utils package imports:
   - average_utils → utils.average
   - price_utils → utils.price
   - Added MINUTES_PER_INTERVAL imports from const.py

2. Entity utils imports:
   - Added entity_utils.helpers imports where needed
   - Fixed find_rolling_hour_center_index import paths
   - Added get_price_value import in binary_sensor

3. Type imports:
   - Added coordinator/period_handlers/types.py MINUTES_PER_INTERVAL
     re-export (with noqa:F401) for period handler modules

4. Platform imports:
   - Updated sensor platform imports (utils.average, utils.price)
   - Updated binary_sensor imports (entity_utils helpers)
   - Updated coordinator imports (utils packages)

All import paths validated:
✓ Integration loads successfully
✓ All service handlers importable
✓ No circular dependencies
✓ Lint checks passing

Impact: Clean import structure, no breaking changes to functionality.
All sensors and services work identically to before.
This commit is contained in:
Julian Pawlowski 2025-11-18 20:07:28 +00:00
parent 4876a2cc29
commit 5ab7703d90
13 changed files with 315 additions and 185 deletions

View file

@ -1,4 +1,18 @@
"""API client package for Tibber Prices integration."""
"""
Tibber GraphQL API client package.
This package handles all communication with Tibber's GraphQL API:
- GraphQL query construction and execution
- Authentication and session management
- Error handling and retry logic
- Response parsing and validation
Main components:
- client.py: TibberPricesApiClient (aiohttp-based GraphQL client)
- queries.py: GraphQL query definitions
- exceptions.py: API-specific error classes
- helpers.py: Response parsing utilities
"""
from .client import TibberPricesApiClient
from .exceptions import (

View file

@ -1,4 +1,17 @@
"""Binary sensor platform for tibber_prices."""
"""
Binary sensor platform for Tibber Prices integration.
Provides binary (on/off) sensors for price-based automation:
- Best price period detection (cheapest intervals)
- Peak price period detection (most expensive intervals)
- Price threshold indicators (below/above configured limits)
- Tomorrow data availability status
These sensors enable simple automations like "run dishwasher during
cheap periods" without complex template logic.
See definitions.py for complete binary sensor catalog.
"""
from __future__ import annotations

View file

@ -4,13 +4,8 @@ from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import (
CONF_EXTENDED_DESCRIPTIONS,
DEFAULT_EXTENDED_DESCRIPTIONS,
async_get_entity_description,
get_entity_description,
)
from custom_components.tibber_prices.entity_utils import add_icon_color_attribute
from custom_components.tibber_prices.utils.average import round_to_nearest_quarter_hour
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
@ -280,7 +275,7 @@ async def build_async_extra_state_attributes( # noqa: PLR0913
hass: HomeAssistant,
*,
config_entry: TibberPricesConfigEntry,
dynamic_attrs: dict | None = None,
sensor_attrs: dict | None = None,
is_on: bool | None = None,
) -> dict | None:
"""
@ -293,69 +288,44 @@ async def build_async_extra_state_attributes( # noqa: PLR0913
translation_key: Translation key for entity
hass: Home Assistant instance
config_entry: Config entry with options (keyword-only)
dynamic_attrs: Dynamic attributes from attribute getter (keyword-only)
sensor_attrs: Sensor-specific attributes (keyword-only)
is_on: Binary sensor state (keyword-only)
Returns:
Complete attributes dict with descriptions
"""
attributes = {}
# Calculate default timestamp: current time rounded to nearest quarter hour
# This ensures all binary sensors have a consistent reference time for when calculations were made
# Individual sensors can override this via sensor_attrs if needed
now = dt_util.now()
default_timestamp = round_to_nearest_quarter_hour(now)
# Add dynamic attributes first
if dynamic_attrs:
attributes = {
"timestamp": default_timestamp.isoformat(),
}
# Add sensor-specific attributes (may override timestamp)
if sensor_attrs:
# Copy and remove internal fields before exposing to user
clean_attrs = {k: v for k, v in dynamic_attrs.items() if not k.startswith("_")}
clean_attrs = {k: v for k, v in sensor_attrs.items() if not k.startswith("_")}
# Merge sensor attributes (can override default timestamp)
attributes.update(clean_attrs)
# Add icon_color for best/peak price period sensors using shared utility
add_icon_color_attribute(attributes, entity_key, is_on=is_on)
# Add description from the custom translations file
if translation_key and hass is not None:
# Get user's language preference
language = hass.config.language if hass.config.language else "en"
# Add description attributes (always last, via central utility)
from ..entity_utils import async_add_description_attributes # noqa: PLC0415, TID252
# Add basic description
description = await async_get_entity_description(
hass,
"binary_sensor",
translation_key,
language,
"description",
)
if description:
attributes["description"] = description
# Check if extended descriptions are enabled in the config
extended_descriptions = config_entry.options.get(
CONF_EXTENDED_DESCRIPTIONS,
config_entry.data.get(CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS),
)
# Add extended descriptions if enabled
if extended_descriptions:
# Add long description if available
long_desc = await async_get_entity_description(
hass,
"binary_sensor",
translation_key,
language,
"long_description",
)
if long_desc:
attributes["long_description"] = long_desc
# Add usage tips if available
usage_tips = await async_get_entity_description(
hass,
"binary_sensor",
translation_key,
language,
"usage_tips",
)
if usage_tips:
attributes["usage_tips"] = usage_tips
await async_add_description_attributes(
attributes,
"binary_sensor",
translation_key,
hass,
config_entry,
position="end",
)
return attributes if attributes else None
@ -366,7 +336,7 @@ def build_sync_extra_state_attributes( # noqa: PLR0913
hass: HomeAssistant,
*,
config_entry: TibberPricesConfigEntry,
dynamic_attrs: dict | None = None,
sensor_attrs: dict | None = None,
is_on: bool | None = None,
) -> dict | None:
"""
@ -379,65 +349,43 @@ def build_sync_extra_state_attributes( # noqa: PLR0913
translation_key: Translation key for entity
hass: Home Assistant instance
config_entry: Config entry with options (keyword-only)
dynamic_attrs: Dynamic attributes from attribute getter (keyword-only)
sensor_attrs: Sensor-specific attributes (keyword-only)
is_on: Binary sensor state (keyword-only)
Returns:
Complete attributes dict with cached descriptions
"""
attributes = {}
# Calculate default timestamp: current time rounded to nearest quarter hour
# This ensures all binary sensors have a consistent reference time for when calculations were made
# Individual sensors can override this via sensor_attrs if needed
now = dt_util.now()
default_timestamp = round_to_nearest_quarter_hour(now)
# Add dynamic attributes first
if dynamic_attrs:
attributes = {
"timestamp": default_timestamp.isoformat(),
}
# Add sensor-specific attributes (may override timestamp)
if sensor_attrs:
# Copy and remove internal fields before exposing to user
clean_attrs = {k: v for k, v in dynamic_attrs.items() if not k.startswith("_")}
clean_attrs = {k: v for k, v in sensor_attrs.items() if not k.startswith("_")}
# Merge sensor attributes (can override default timestamp)
attributes.update(clean_attrs)
# Add icon_color for best/peak price period sensors using shared utility
add_icon_color_attribute(attributes, entity_key, is_on=is_on)
# Add descriptions from the cache (non-blocking)
if translation_key and hass is not None:
# Get user's language preference
language = hass.config.language if hass.config.language else "en"
# Add description attributes (always last, via central utility)
from ..entity_utils import add_description_attributes # noqa: PLC0415, TID252
# Add basic description from cache
description = get_entity_description(
"binary_sensor",
translation_key,
language,
"description",
)
if description:
attributes["description"] = description
# Check if extended descriptions are enabled in the config
extended_descriptions = config_entry.options.get(
CONF_EXTENDED_DESCRIPTIONS,
config_entry.data.get(CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS),
)
# Add extended descriptions if enabled
if extended_descriptions:
# Add long description from cache
long_desc = get_entity_description(
"binary_sensor",
translation_key,
language,
"long_description",
)
if long_desc:
attributes["long_description"] = long_desc
# Add usage tips from cache
usage_tips = get_entity_description(
"binary_sensor",
translation_key,
language,
"usage_tips",
)
if usage_tips:
attributes["usage_tips"] = usage_tips
add_description_attributes(
attributes,
"binary_sensor",
translation_key,
hass,
config_entry,
position="end",
)
return attributes if attributes else None

View file

@ -46,8 +46,7 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{entity_description.key}"
self._state_getter: Callable | None = self._get_state_getter()
self._attribute_getter: Callable | None = self._get_attribute_getter()
self._state_getter: Callable | None = self._get_value_getter()
self._time_sensitive_remove_listener: Callable | None = None
async def async_added_to_hass(self) -> None:
@ -74,8 +73,8 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
"""Handle time-sensitive update from coordinator."""
self.async_write_ha_state()
def _get_state_getter(self) -> Callable | None:
"""Return the appropriate state getter method based on the sensor type."""
def _get_value_getter(self) -> Callable | None:
"""Return the appropriate value getter method based on the sensor type."""
key = self.entity_description.key
state_getters = {
@ -178,16 +177,21 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
"""Return attributes for tomorrow_data_available binary sensor."""
return get_tomorrow_data_available_attributes(self.coordinator.data)
def _get_attribute_getter(self) -> Callable | None:
"""Return the appropriate attribute getter method based on the sensor type."""
def _get_sensor_attributes(self) -> dict | None:
"""
Get sensor-specific attributes.
Returns a dictionary of sensor-specific attributes, or None if no
attributes are needed.
"""
key = self.entity_description.key
if key == "peak_price_period":
return lambda: get_price_intervals_attributes(self.coordinator.data, reverse_sort=True)
return get_price_intervals_attributes(self.coordinator.data, reverse_sort=True)
if key == "best_price_period":
return lambda: get_price_intervals_attributes(self.coordinator.data, reverse_sort=False)
return get_price_intervals_attributes(self.coordinator.data, reverse_sort=False)
if key == "tomorrow_data_available":
return self._get_tomorrow_data_available_attributes
return self._get_tomorrow_data_available_attributes()
return None
@ -241,10 +245,7 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
Returns True if any period starts between now and PERIOD_LOOKAHEAD_HOURS from now.
This provides a practical planning horizon instead of hard midnight cutoff.
"""
if not self._attribute_getter:
return False
attrs = self._attribute_getter()
attrs = self._get_sensor_attributes()
if not attrs or "periods" not in attrs:
return False
@ -271,13 +272,11 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
async def async_extra_state_attributes(self) -> dict | None:
"""Return additional state attributes asynchronously."""
try:
# Get the dynamic attributes if the getter is available
# Get the sensor-specific attributes
if not self.coordinator.data:
return None
dynamic_attrs = None
if self._attribute_getter:
dynamic_attrs = self._attribute_getter()
sensor_attrs = self._get_sensor_attributes()
# Use extracted function to build all attributes
return await build_async_extra_state_attributes(
@ -285,7 +284,7 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
self.entity_description.translation_key,
self.hass,
config_entry=self.coordinator.config_entry,
dynamic_attrs=dynamic_attrs,
sensor_attrs=sensor_attrs,
is_on=self.is_on,
)
@ -303,13 +302,11 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
def extra_state_attributes(self) -> dict | None:
"""Return additional state attributes synchronously."""
try:
# Get the dynamic attributes if the getter is available
# Get the sensor-specific attributes
if not self.coordinator.data:
return None
dynamic_attrs = None
if self._attribute_getter:
dynamic_attrs = self._attribute_getter()
sensor_attrs = self._get_sensor_attributes()
# Use extracted function to build all attributes
return build_sync_extra_state_attributes(
@ -317,7 +314,7 @@ class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity):
self.entity_description.translation_key,
self.hass,
config_entry=self.coordinator.config_entry,
dynamic_attrs=dynamic_attrs,
sensor_attrs=sensor_attrs,
is_on=self.is_on,
)

View file

@ -9,7 +9,6 @@ from homeassistant.components.binary_sensor import (
from homeassistant.const import EntityCategory
# Constants
MINUTES_PER_INTERVAL = 15
MIN_TOMORROW_INTERVALS_15MIN = 96
# Look-ahead window for future period detection (hours)

View file

@ -1,4 +1,21 @@
"""Config flow for Tibber Prices integration."""
"""
Configuration flow package for UI-based setup.
This package handles all user interaction for integration configuration:
- Initial setup: API token validation, home selection
- Subentry flow: Add additional Tibber homes
- Options flow: Multi-step configuration wizard
- Reauthentication: Token refresh when expired
Flow handlers:
- user_flow.py: Initial setup and reauth
- subentry_flow.py: Add additional homes
- options_flow.py: 6-step configuration wizard
Supporting modules:
- schemas.py: Form schema definitions (vol.Schema)
- validators.py: Input validation and API testing
"""
from __future__ import annotations

View file

@ -17,6 +17,11 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant
DOMAIN = "tibber_prices"
# Time constants
MINUTES_PER_INTERVAL = 15 # Tibber uses 15-minute intervals for price data
# Configuration keys
CONF_EXTENDED_DESCRIPTIONS = "extended_descriptions"
CONF_BEST_PRICE_FLEX = "best_price_flex"
CONF_PEAK_PRICE_FLEX = "peak_price_flex"

View file

@ -26,7 +26,7 @@ from custom_components.tibber_prices.api import (
TibberPricesApiClientError,
)
from custom_components.tibber_prices.const import DOMAIN
from custom_components.tibber_prices.price_utils import (
from custom_components.tibber_prices.utils.price import (
find_price_data_for_interval,
)

View file

@ -6,7 +6,7 @@ import logging
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices import const as _const
from custom_components.tibber_prices.price_utils import enrich_price_info_with_differences
from custom_components.tibber_prices.utils.price import enrich_price_info_with_differences
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:

View file

@ -13,7 +13,7 @@ if TYPE_CHECKING:
ThresholdConfig,
)
from custom_components.tibber_prices.price_utils import (
from custom_components.tibber_prices.utils.price import (
aggregate_period_levels,
aggregate_period_ratings,
calculate_volatility_level,

View file

@ -13,11 +13,9 @@ from custom_components.tibber_prices.const import (
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
MINUTES_PER_INTERVAL, # noqa: F401 - Re-exported for period handler modules
)
# Constants
MINUTES_PER_INTERVAL = 15
# Log indentation levels for visual hierarchy
INDENT_L0 = "" # Top level (calculate_periods_with_relaxation)
INDENT_L1 = " " # Per-day loop

View file

@ -1,4 +1,17 @@
"""Sensor platform for Tibber Prices integration."""
"""
Sensor platform for Tibber Prices integration.
Provides electricity price sensors organized by calculation method:
- Interval-based: Current/next/previous price intervals
- Rolling hour: 5-interval sliding windows (2h 30m periods)
- Daily statistics: Min/max/avg within calendar day boundaries
- 24h windows: Trailing/leading statistics from current interval
- Future forecast: N-hour price predictions
- Volatility: Price variation analysis
- Diagnostic: System information and metadata
See definitions.py for complete sensor catalog.
"""
from __future__ import annotations

View file

@ -11,12 +11,13 @@ from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.const import (
MINUTES_PER_INTERVAL,
PRICE_LEVEL_MAPPING,
PRICE_RATING_MAPPING,
)
from custom_components.tibber_prices.entity_utils import add_icon_color_attribute
from custom_components.tibber_prices.price_utils import (
MINUTES_PER_INTERVAL,
from custom_components.tibber_prices.utils.average import round_to_nearest_quarter_hour
from custom_components.tibber_prices.utils.price import (
calculate_volatility_level,
find_price_data_for_interval,
)
@ -27,6 +28,8 @@ if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.core import HomeAssistant
# Constants
MAX_FORECAST_INTERVALS = 8 # Show up to 8 future intervals (2 hours with 15-min intervals)
@ -67,21 +70,11 @@ def _add_cached_trend_attributes(attributes: dict, key: str, cached_data: dict)
if key.startswith("price_trend_") and cached_data.get("trend_attributes"):
attributes.update(cached_data["trend_attributes"])
elif key == "current_price_trend" and cached_data.get("current_trend_attributes"):
# Add timestamp of current interval FIRST (when calculation was made)
now = dt_util.now()
minute = (now.minute // 15) * 15
current_interval_timestamp = now.replace(minute=minute, second=0, microsecond=0)
attributes["timestamp"] = current_interval_timestamp.isoformat()
# Then add other cached attributes
# Add cached attributes (timestamp already set by platform)
attributes.update(cached_data["current_trend_attributes"])
elif key == "next_price_trend_change" and cached_data.get("trend_change_attributes"):
# Add timestamp of current interval FIRST (when calculation was made)
# Add cached attributes (timestamp already set by platform)
# State contains the timestamp of the trend change itself
now = dt_util.now()
minute = (now.minute // 15) * 15
current_interval_timestamp = now.replace(minute=minute, second=0, microsecond=0)
attributes["timestamp"] = current_interval_timestamp.isoformat()
# Then add other cached attributes
attributes.update(cached_data["trend_change_attributes"])
@ -168,7 +161,6 @@ def build_sensor_attributes(
add_statistics_attributes(
attributes=attributes,
key=key,
coordinator=coordinator,
cached_data=cached_data,
)
elif key == "price_forecast":
@ -247,27 +239,35 @@ def add_current_interval_price_attributes(
"current_hour_price_rating",
]
# Set timestamp and interval data based on sensor type
# Set interval data based on sensor type
# For sensors showing data from OTHER intervals (next/previous), override timestamp with that interval's startsAt
# For current interval sensors, keep the default platform timestamp (calculation time)
interval_data = None
if key in next_interval_sensors:
target_time = now + timedelta(minutes=MINUTES_PER_INTERVAL)
interval_data = find_price_data_for_interval(price_info, target_time)
attributes["timestamp"] = interval_data["startsAt"] if interval_data else None
# Override timestamp with the NEXT interval's startsAt (when that interval starts)
if interval_data:
attributes["timestamp"] = interval_data["startsAt"]
elif key in previous_interval_sensors:
target_time = now - timedelta(minutes=MINUTES_PER_INTERVAL)
interval_data = find_price_data_for_interval(price_info, target_time)
attributes["timestamp"] = interval_data["startsAt"] if interval_data else None
# Override timestamp with the PREVIOUS interval's startsAt
if interval_data:
attributes["timestamp"] = interval_data["startsAt"]
elif key in next_hour_sensors:
target_time = now + timedelta(hours=1)
interval_data = find_price_data_for_interval(price_info, target_time)
attributes["timestamp"] = interval_data["startsAt"] if interval_data else None
# Override timestamp with the center of the next rolling hour window
if interval_data:
attributes["timestamp"] = interval_data["startsAt"]
elif key in current_hour_sensors:
current_interval_data = get_current_interval_data(coordinator)
attributes["timestamp"] = current_interval_data["startsAt"] if current_interval_data else None
# Keep default timestamp (when calculation was made) for current hour sensors
else:
current_interval_data = get_current_interval_data(coordinator)
interval_data = current_interval_data # Use current_interval_data as interval_data for current_interval_price
attributes["timestamp"] = current_interval_data["startsAt"] if current_interval_data else None
# Keep default timestamp (current calculation time) for current interval sensors
# Add icon_color for price sensors (based on their price level)
if key in ["current_interval_price", "next_interval_price", "previous_interval_price"]:
@ -402,10 +402,22 @@ def add_price_rating_attributes(attributes: dict, rating: str) -> None:
add_icon_color_attribute(attributes, key="price_rating", state_value=rating)
def _get_day_midnight_timestamp(key: str) -> str:
"""Get midnight timestamp for a given day sensor key."""
now = dt_util.now()
local_midnight = dt_util.start_of_local_day(now)
if key.startswith("yesterday") or key == "average_price_yesterday":
local_midnight = local_midnight - timedelta(days=1)
elif key.startswith("tomorrow") or key == "average_price_tomorrow":
local_midnight = local_midnight + timedelta(days=1)
return local_midnight.isoformat()
def add_statistics_attributes(
attributes: dict,
key: str,
coordinator: TibberPricesDataUpdateCoordinator,
cached_data: dict,
) -> None:
"""
@ -414,21 +426,18 @@ def add_statistics_attributes(
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key
coordinator: The data update coordinator
cached_data: Dictionary containing cached sensor data
"""
price_info = coordinator.data.get("priceInfo", {})
now = dt_util.now()
# Data timestamp sensor - shows API fetch time
if key == "data_timestamp":
# For data_timestamp sensor, use the latest timestamp from cached_data
latest_timestamp = cached_data.get("data_timestamp")
if latest_timestamp:
attributes["timestamp"] = latest_timestamp.isoformat()
elif key == "current_interval_price_rating":
interval_data = find_price_data_for_interval(price_info, now)
attributes["timestamp"] = interval_data["startsAt"] if interval_data else None
return
# Current interval price rating - add rating attributes
if key == "current_interval_price_rating":
if cached_data.get("last_rating_difference") is not None:
attributes["diff_" + PERCENTAGE] = cached_data["last_rating_difference"]
if cached_data.get("last_rating_level") is not None:
@ -436,35 +445,42 @@ def add_statistics_attributes(
attributes["level_value"] = PRICE_RATING_MAPPING.get(
cached_data["last_rating_level"], cached_data["last_rating_level"]
)
elif key in [
return
# Extreme value sensors - show when the extreme occurs
extreme_sensors = {
"lowest_price_today",
"highest_price_today",
"lowest_price_tomorrow",
"highest_price_tomorrow",
]:
# Use the timestamp from the interval that has the extreme price
}
if key in extreme_sensors:
if cached_data.get("last_extreme_interval"):
attributes["timestamp"] = cached_data["last_extreme_interval"].get("startsAt")
else:
# Fallback: use the first timestamp of the appropriate day
_add_fallback_timestamp(attributes, key, price_info)
elif key in [
extreme_starts_at = cached_data["last_extreme_interval"].get("startsAt")
if extreme_starts_at:
attributes["timestamp"] = extreme_starts_at
return
# Daily average sensors - show midnight to indicate whole day
daily_avg_sensors = {"average_price_today", "average_price_tomorrow"}
if key in daily_avg_sensors:
attributes["timestamp"] = _get_day_midnight_timestamp(key)
return
# Daily aggregated level/rating sensors - show midnight to indicate whole day
daily_aggregated_sensors = {
"yesterday_price_level",
"today_price_level",
"tomorrow_price_level",
"yesterday_price_rating",
"today_price_rating",
"tomorrow_price_rating",
]:
# Daily aggregated level/rating sensors - add timestamp
day_key = _get_day_key_from_sensor_key(key)
day_data = price_info.get(day_key, [])
if day_data:
# Use first timestamp of the day (00:00)
attributes["timestamp"] = day_data[0].get("startsAt")
else:
# Fallback: use the first timestamp of the appropriate day
_add_fallback_timestamp(attributes, key, price_info)
}
if key in daily_aggregated_sensors:
attributes["timestamp"] = _get_day_midnight_timestamp(key)
return
# All other statistics sensors - keep default timestamp (when calculation was made)
def _get_day_key_from_sensor_key(key: str) -> str:
@ -978,3 +994,113 @@ def add_period_timing_attributes(
# Add icon_color for dynamic styling
add_icon_color_attribute(attributes, key=key, state_value=state_value)
def build_extra_state_attributes( # noqa: PLR0913
entity_key: str,
translation_key: str | None,
hass: HomeAssistant,
*,
config_entry: TibberPricesConfigEntry,
coordinator_data: dict,
sensor_attrs: dict | None = None,
) -> dict[str, Any] | None:
"""
Build extra state attributes for sensors.
This function implements the unified attribute building pattern:
1. Generate default timestamp (current time rounded to nearest quarter hour)
2. Merge sensor-specific attributes (may override timestamp)
3. Preserve timestamp ordering (always FIRST in dict)
4. Add description attributes (always LAST)
Args:
entity_key: Entity key (e.g., "current_interval_price")
translation_key: Translation key for entity
hass: Home Assistant instance
config_entry: Config entry with options (keyword-only)
coordinator_data: Coordinator data dict (keyword-only)
sensor_attrs: Sensor-specific attributes (keyword-only)
Returns:
Complete attributes dict or None if no data available
"""
if not coordinator_data:
return None
# Calculate default timestamp: current time rounded to nearest quarter hour
# This ensures all sensors have a consistent reference time for when calculations were made
# Individual sensors can override this if they need a different timestamp
now = dt_util.now()
default_timestamp = round_to_nearest_quarter_hour(now)
# Special handling for chart_data_export: metadata → descriptions → service data
if entity_key == "chart_data_export":
attributes: dict[str, Any] = {
"timestamp": default_timestamp.isoformat(),
}
# Step 1: Add metadata (timestamp + error if present)
if sensor_attrs:
if "timestamp" in sensor_attrs and sensor_attrs["timestamp"] is not None:
# Chart data has its own timestamp (when service was last called)
attributes["timestamp"] = sensor_attrs["timestamp"]
if "error" in sensor_attrs:
attributes["error"] = sensor_attrs["error"]
# Step 2: Add descriptions before service data (via central utility)
from ..entity_utils import add_description_attributes # noqa: PLC0415, TID252
add_description_attributes(
attributes,
"sensor",
translation_key,
hass,
config_entry,
position="before_service_data",
)
# Step 3: Add service data (everything except metadata)
if sensor_attrs:
attributes.update({k: v for k, v in sensor_attrs.items() if k not in ("timestamp", "error")})
return attributes if attributes else None
# For all other sensors: standard behavior
# Start with default timestamp
attributes: dict[str, Any] = {
"timestamp": default_timestamp.isoformat(),
}
# Add sensor-specific attributes (may override timestamp)
if sensor_attrs:
# Extract timestamp override if present
timestamp_override = sensor_attrs.pop("timestamp", None)
# Add all other sensor attributes
attributes.update(sensor_attrs)
# If sensor wants to override timestamp, rebuild dict with timestamp FIRST
if timestamp_override is not None:
temp_attrs = dict(attributes)
attributes.clear()
attributes["timestamp"] = timestamp_override
for key, value in temp_attrs.items():
if key != "timestamp":
attributes[key] = value
# Add description attributes (always last, via central utility)
from ..entity_utils import add_description_attributes # noqa: PLC0415, TID252
add_description_attributes(
attributes,
"sensor",
translation_key,
hass,
config_entry,
position="end",
)
return attributes if attributes else None