diff --git a/README.md b/README.md index f6d8d56..839e3ad 100644 --- a/README.md +++ b/README.md @@ -175,7 +175,7 @@ automation: entity_id: switch.dishwasher ``` -> **Learn more:** The [period calculation guide](docs/user/period-calculation.md) explains how Best/Peak Price periods are identified and how you can configure filters (flexibility, minimum volatility, price level filters with gap tolerance). +> **Learn more:** The [period calculation guide](docs/user/period-calculation.md) explains how Best/Peak Price periods are identified and how you can configure filters (flexibility, minimum distance from average, price level filters with gap tolerance). ### Notify on Extremely High Prices diff --git a/custom_components/tibber_prices/config_flow.py b/custom_components/tibber_prices/config_flow.py index d5a7be9..3f05f69 100644 --- a/custom_components/tibber_prices/config_flow.py +++ b/custom_components/tibber_prices/config_flow.py @@ -45,7 +45,6 @@ from .const import ( CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT, CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG, CONF_BEST_PRICE_MIN_PERIOD_LENGTH, - CONF_BEST_PRICE_MIN_VOLATILITY, CONF_ENABLE_MIN_PERIODS_BEST, CONF_ENABLE_MIN_PERIODS_PEAK, CONF_EXTENDED_DESCRIPTIONS, @@ -56,7 +55,6 @@ from .const import ( CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, CONF_PEAK_PRICE_MIN_LEVEL, CONF_PEAK_PRICE_MIN_PERIOD_LENGTH, - CONF_PEAK_PRICE_MIN_VOLATILITY, CONF_PRICE_RATING_THRESHOLD_HIGH, CONF_PRICE_RATING_THRESHOLD_LOW, CONF_PRICE_TREND_THRESHOLD_FALLING, @@ -71,7 +69,6 @@ from .const import ( DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT, DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH, - DEFAULT_BEST_PRICE_MIN_VOLATILITY, DEFAULT_ENABLE_MIN_PERIODS_BEST, DEFAULT_ENABLE_MIN_PERIODS_PEAK, DEFAULT_EXTENDED_DESCRIPTIONS, @@ -82,7 +79,6 @@ from .const import ( DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_PEAK_PRICE_MIN_LEVEL, DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH, - DEFAULT_PEAK_PRICE_MIN_VOLATILITY, DEFAULT_PRICE_RATING_THRESHOLD_HIGH, DEFAULT_PRICE_RATING_THRESHOLD_LOW, DEFAULT_PRICE_TREND_THRESHOLD_FALLING, @@ -94,7 +90,6 @@ from .const import ( DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH, DOMAIN, LOGGER, - MIN_VOLATILITY_FOR_PERIODS_OPTIONS, PEAK_PRICE_MIN_LEVEL_OPTIONS, ) @@ -657,19 +652,6 @@ class TibberPricesOptionsFlowHandler(OptionsFlow): mode=NumberSelectorMode.SLIDER, ), ), - vol.Optional( - CONF_BEST_PRICE_MIN_VOLATILITY, - default=self.config_entry.options.get( - CONF_BEST_PRICE_MIN_VOLATILITY, - DEFAULT_BEST_PRICE_MIN_VOLATILITY, - ), - ): SelectSelector( - SelectSelectorConfig( - options=MIN_VOLATILITY_FOR_PERIODS_OPTIONS, - mode=SelectSelectorMode.DROPDOWN, - translation_key="volatility", - ), - ), vol.Optional( CONF_BEST_PRICE_MAX_LEVEL, default=self.config_entry.options.get( @@ -805,19 +787,6 @@ class TibberPricesOptionsFlowHandler(OptionsFlow): mode=NumberSelectorMode.SLIDER, ), ), - vol.Optional( - CONF_PEAK_PRICE_MIN_VOLATILITY, - default=self.config_entry.options.get( - CONF_PEAK_PRICE_MIN_VOLATILITY, - DEFAULT_PEAK_PRICE_MIN_VOLATILITY, - ), - ): SelectSelector( - SelectSelectorConfig( - options=MIN_VOLATILITY_FOR_PERIODS_OPTIONS, - mode=SelectSelectorMode.DROPDOWN, - translation_key="volatility", - ), - ), vol.Optional( CONF_PEAK_PRICE_MIN_LEVEL, default=self.config_entry.options.get( diff --git a/custom_components/tibber_prices/const.py b/custom_components/tibber_prices/const.py index ac0d275..8b6da34 100644 --- a/custom_components/tibber_prices/const.py +++ b/custom_components/tibber_prices/const.py @@ -31,8 +31,6 @@ CONF_PRICE_TREND_THRESHOLD_FALLING = "price_trend_threshold_falling" CONF_VOLATILITY_THRESHOLD_MODERATE = "volatility_threshold_moderate" CONF_VOLATILITY_THRESHOLD_HIGH = "volatility_threshold_high" CONF_VOLATILITY_THRESHOLD_VERY_HIGH = "volatility_threshold_very_high" -CONF_BEST_PRICE_MIN_VOLATILITY = "best_price_min_volatility" -CONF_PEAK_PRICE_MIN_VOLATILITY = "peak_price_min_volatility" CONF_BEST_PRICE_MAX_LEVEL = "best_price_max_level" CONF_PEAK_PRICE_MIN_LEVEL = "peak_price_min_level" CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT = "best_price_max_level_gap_count" @@ -50,11 +48,20 @@ ATTRIBUTION = "Data provided by Tibber" DEFAULT_NAME = "Tibber Price Information & Ratings" DEFAULT_EXTENDED_DESCRIPTIONS = False DEFAULT_BEST_PRICE_FLEX = 15 # 15% flexibility for best price (user-facing, percent) -DEFAULT_PEAK_PRICE_FLEX = -15 # 15% flexibility for peak price (user-facing, percent) -DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG = 2 # 2% minimum distance from daily average for best price -DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG = 2 # 2% minimum distance from daily average for peak price +# Peak price flexibility is set to -20 (20%) to allow for more adaptive detection of expensive periods. +# This is intentionally more flexible than best price (15%) because peak price periods can be more variable, +# and users may benefit from earlier warnings about expensive periods, even if they are less sharply defined. +# The negative sign indicates that the threshold is set below the MAX price (e.g., -20% means MAX * 0.8), not above the average price. +# A higher percentage allows for more conservative detection, reducing false negatives for peak price warnings. +DEFAULT_PEAK_PRICE_FLEX = -20 # 20% flexibility for peak price (user-facing, percent) +DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG = 5 # 5% minimum distance from daily average (ensures significance) +DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG = 5 # 5% minimum distance from daily average (ensures significance) DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH = 60 # 60 minutes minimum period length for best price (user-facing, minutes) -DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH = 60 # 60 minutes minimum period length for peak price (user-facing, minutes) +# Note: Peak price warnings are allowed for shorter periods (30 min) than best price periods (60 min). +# This asymmetry is intentional: shorter peak periods are acceptable for alerting users to brief expensive spikes, +# while best price periods require longer duration to ensure meaningful savings and avoid recommending short, +# impractical windows. +DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH = 30 # 30 minutes minimum period length for peak price (user-facing, minutes) DEFAULT_PRICE_RATING_THRESHOLD_LOW = -10 # Default rating threshold low percentage DEFAULT_PRICE_RATING_THRESHOLD_HIGH = 10 # Default rating threshold high percentage DEFAULT_PRICE_TREND_THRESHOLD_RISING = 5 # Default trend threshold for rising prices (%) @@ -62,17 +69,15 @@ DEFAULT_PRICE_TREND_THRESHOLD_FALLING = -5 # Default trend threshold for fallin DEFAULT_VOLATILITY_THRESHOLD_MODERATE = 5.0 # Default threshold for MODERATE volatility (ct/øre) DEFAULT_VOLATILITY_THRESHOLD_HIGH = 15.0 # Default threshold for HIGH volatility (ct/øre) DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH = 30.0 # Default threshold for VERY_HIGH volatility (ct/øre) -DEFAULT_BEST_PRICE_MIN_VOLATILITY = "low" # Show best price at any volatility (optimization always useful) -DEFAULT_PEAK_PRICE_MIN_VOLATILITY = "low" # Always show peak price (warning relevant even at low spreads) -DEFAULT_BEST_PRICE_MAX_LEVEL = "any" # Default: show best price periods regardless of price level -DEFAULT_PEAK_PRICE_MIN_LEVEL = "any" # Default: show peak price periods regardless of price level -DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT = 0 # Default: no tolerance for level gaps (strict filtering) -DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT = 0 # Default: no tolerance for level gaps (strict filtering) +DEFAULT_BEST_PRICE_MAX_LEVEL = "cheap" # Default: prefer genuinely cheap periods, relax to "any" if needed +DEFAULT_PEAK_PRICE_MIN_LEVEL = "expensive" # Default: prefer genuinely expensive periods, relax to "any" if needed +DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT = 1 # Default: allow 1 level gap (e.g., CHEAP→NORMAL→CHEAP stays together) +DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT = 1 # Default: allow 1 level gap for peak price periods MIN_INTERVALS_FOR_GAP_TOLERANCE = 6 # Minimum period length (in 15-min intervals = 1.5h) required for gap tolerance -DEFAULT_ENABLE_MIN_PERIODS_BEST = False # Default: minimum periods feature disabled for best price +DEFAULT_ENABLE_MIN_PERIODS_BEST = True # Default: minimum periods feature enabled for best price DEFAULT_MIN_PERIODS_BEST = 2 # Default: require at least 2 best price periods (when enabled) DEFAULT_RELAXATION_STEP_BEST = 25 # Default: 25% of original threshold per relaxation step for best price -DEFAULT_ENABLE_MIN_PERIODS_PEAK = False # Default: minimum periods feature disabled for peak price +DEFAULT_ENABLE_MIN_PERIODS_PEAK = True # Default: minimum periods feature enabled for peak price DEFAULT_MIN_PERIODS_PEAK = 2 # Default: require at least 2 peak price periods (when enabled) DEFAULT_RELAXATION_STEP_PEAK = 25 # Default: 25% of original threshold per relaxation step for peak price @@ -193,15 +198,7 @@ VOLATILITY_OPTIONS = [ VOLATILITY_VERY_HIGH.lower(), ] -# Valid options for minimum volatility filter for periods -MIN_VOLATILITY_FOR_PERIODS_OPTIONS = [ - VOLATILITY_LOW.lower(), # Show at any volatility (≥0ct spread) - no filter - VOLATILITY_MODERATE.lower(), # Only show periods when volatility ≥ MODERATE (≥5ct) - VOLATILITY_HIGH.lower(), # Only show periods when volatility ≥ HIGH (≥15ct) - VOLATILITY_VERY_HIGH.lower(), # Only show periods when volatility ≥ VERY_HIGH (≥30ct) -] - -# Valid options for best price maximum level filter (AND-linked with volatility filter) +# Valid options for best price maximum level filter # Sorted from cheap to expensive: user selects "up to how expensive" BEST_PRICE_MAX_LEVEL_OPTIONS = [ "any", # No filter, allow all price levels diff --git a/custom_components/tibber_prices/coordinator.py b/custom_components/tibber_prices/coordinator.py index ddc6c8a..8afac64 100644 --- a/custom_components/tibber_prices/coordinator.py +++ b/custom_components/tibber_prices/coordinator.py @@ -30,7 +30,6 @@ from .const import ( CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT, CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG, CONF_BEST_PRICE_MIN_PERIOD_LENGTH, - CONF_BEST_PRICE_MIN_VOLATILITY, CONF_ENABLE_MIN_PERIODS_BEST, CONF_ENABLE_MIN_PERIODS_PEAK, CONF_MIN_PERIODS_BEST, @@ -40,7 +39,6 @@ from .const import ( CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, CONF_PEAK_PRICE_MIN_LEVEL, CONF_PEAK_PRICE_MIN_PERIOD_LENGTH, - CONF_PEAK_PRICE_MIN_VOLATILITY, CONF_PRICE_RATING_THRESHOLD_HIGH, CONF_PRICE_RATING_THRESHOLD_LOW, CONF_RELAXATION_STEP_BEST, @@ -53,7 +51,6 @@ from .const import ( DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT, DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH, - DEFAULT_BEST_PRICE_MIN_VOLATILITY, DEFAULT_ENABLE_MIN_PERIODS_BEST, DEFAULT_ENABLE_MIN_PERIODS_PEAK, DEFAULT_MIN_PERIODS_BEST, @@ -63,7 +60,6 @@ from .const import ( DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_PEAK_PRICE_MIN_LEVEL, DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH, - DEFAULT_PEAK_PRICE_MIN_VOLATILITY, DEFAULT_PRICE_RATING_THRESHOLD_HIGH, DEFAULT_PRICE_RATING_THRESHOLD_LOW, DEFAULT_RELAXATION_STEP_BEST, @@ -78,7 +74,6 @@ from .const import ( from .period_utils import ( PeriodConfig, calculate_periods_with_relaxation, - filter_periods_by_volatility, ) from .price_utils import ( enrich_price_info_with_differences, @@ -908,6 +903,13 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # Periods shorter than MIN_INTERVALS_FOR_GAP_TOLERANCE (1.5h) use strict filtering if interval_count < MIN_INTERVALS_FOR_GAP_TOLERANCE: + period_type = "peak" if reverse_sort else "best" + _LOGGER.debug( + "Using strict filtering for short %s period (%d intervals < %d min required for gap tolerance)", + period_type, + interval_count, + MIN_INTERVALS_FOR_GAP_TOLERANCE, + ) return self._check_short_period_strict(today_intervals, level_order, reverse_sort=reverse_sort) # Try normal gap tolerance check first @@ -1143,14 +1145,19 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH, ) - # Check if best price periods should be shown (apply filters) - show_best_price = self._should_show_periods(price_info, reverse_sort=False) if all_prices else False - # Get relaxation configuration for best price enable_relaxation_best = self.config_entry.options.get( CONF_ENABLE_MIN_PERIODS_BEST, DEFAULT_ENABLE_MIN_PERIODS_BEST, ) + + # Check if best price periods should be shown + # If relaxation is enabled, always calculate (relaxation will try "any" filter) + # If relaxation is disabled, apply level filter check + if enable_relaxation_best: + show_best_price = bool(all_prices) + else: + show_best_price = self._should_show_periods(price_info, reverse_sort=False) if all_prices else False min_periods_best = self.config_entry.options.get( CONF_MIN_PERIODS_BEST, DEFAULT_MIN_PERIODS_BEST, @@ -1163,6 +1170,15 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # Calculate best price periods (or return empty if filtered) if show_best_price: best_config = self._get_period_config(reverse_sort=False) + # Get level filter configuration + max_level_best = self.config_entry.options.get( + CONF_BEST_PRICE_MAX_LEVEL, + DEFAULT_BEST_PRICE_MAX_LEVEL, + ) + gap_count_best = self.config_entry.options.get( + CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT, + DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT, + ) best_period_config = PeriodConfig( reverse_sort=False, flex=best_config["flex"], @@ -1173,6 +1189,8 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): threshold_volatility_moderate=threshold_volatility_moderate, threshold_volatility_high=threshold_volatility_high, threshold_volatility_very_high=threshold_volatility_very_high, + level_filter=max_level_best, + gap_count=gap_count_best, ) best_periods, best_relaxation = calculate_periods_with_relaxation( all_prices, @@ -1186,13 +1204,6 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): level_override=lvl, ), ) - - # Apply period-level volatility filter (after calculation) - min_volatility_best = self.config_entry.options.get( - CONF_BEST_PRICE_MIN_VOLATILITY, - DEFAULT_BEST_PRICE_MIN_VOLATILITY, - ) - best_periods = filter_periods_by_volatility(best_periods, min_volatility_best) else: best_periods = { "periods": [], @@ -1201,14 +1212,19 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): } best_relaxation = {"relaxation_active": False, "relaxation_attempted": False} - # Check if peak price periods should be shown (apply filters) - show_peak_price = self._should_show_periods(price_info, reverse_sort=True) if all_prices else False - # Get relaxation configuration for peak price enable_relaxation_peak = self.config_entry.options.get( CONF_ENABLE_MIN_PERIODS_PEAK, DEFAULT_ENABLE_MIN_PERIODS_PEAK, ) + + # Check if peak price periods should be shown + # If relaxation is enabled, always calculate (relaxation will try "any" filter) + # If relaxation is disabled, apply level filter check + if enable_relaxation_peak: + show_peak_price = bool(all_prices) + else: + show_peak_price = self._should_show_periods(price_info, reverse_sort=True) if all_prices else False min_periods_peak = self.config_entry.options.get( CONF_MIN_PERIODS_PEAK, DEFAULT_MIN_PERIODS_PEAK, @@ -1221,6 +1237,15 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # Calculate peak price periods (or return empty if filtered) if show_peak_price: peak_config = self._get_period_config(reverse_sort=True) + # Get level filter configuration + min_level_peak = self.config_entry.options.get( + CONF_PEAK_PRICE_MIN_LEVEL, + DEFAULT_PEAK_PRICE_MIN_LEVEL, + ) + gap_count_peak = self.config_entry.options.get( + CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT, + DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT, + ) peak_period_config = PeriodConfig( reverse_sort=True, flex=peak_config["flex"], @@ -1231,6 +1256,8 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): threshold_volatility_moderate=threshold_volatility_moderate, threshold_volatility_high=threshold_volatility_high, threshold_volatility_very_high=threshold_volatility_very_high, + level_filter=min_level_peak, + gap_count=gap_count_peak, ) peak_periods, peak_relaxation = calculate_periods_with_relaxation( all_prices, @@ -1244,13 +1271,6 @@ class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): level_override=lvl, ), ) - - # Apply period-level volatility filter (after calculation) - min_volatility_peak = self.config_entry.options.get( - CONF_PEAK_PRICE_MIN_VOLATILITY, - DEFAULT_PEAK_PRICE_MIN_VOLATILITY, - ) - peak_periods = filter_periods_by_volatility(peak_periods, min_volatility_peak) else: peak_periods = { "periods": [], diff --git a/custom_components/tibber_prices/period_utils.py b/custom_components/tibber_prices/period_utils.py deleted file mode 100644 index 9dfb5cd..0000000 --- a/custom_components/tibber_prices/period_utils.py +++ /dev/null @@ -1,1594 +0,0 @@ -"""Utility functions for calculating price periods (best price and peak price).""" - -from __future__ import annotations - -import logging -from datetime import date, datetime, timedelta -from typing import TYPE_CHECKING, Any, NamedTuple - -if TYPE_CHECKING: - from collections.abc import Callable - -from homeassistant.util import dt as dt_util - -from .const import ( - DEFAULT_PRICE_RATING_THRESHOLD_HIGH, - DEFAULT_PRICE_RATING_THRESHOLD_LOW, - DEFAULT_VOLATILITY_THRESHOLD_HIGH, - DEFAULT_VOLATILITY_THRESHOLD_MODERATE, - DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH, -) -from .price_utils import ( - aggregate_period_levels, - aggregate_period_ratings, - calculate_volatility_level, -) - -_LOGGER = logging.getLogger(__name__) - -MINUTES_PER_INTERVAL = 15 - -# Log indentation levels for visual hierarchy -INDENT_L0 = "" # Top level (calculate_periods_with_relaxation) -INDENT_L1 = " " # Per-day loop -INDENT_L2 = " " # Flex/filter loop (_relax_single_day) -INDENT_L3 = " " # _resolve_period_overlaps function -INDENT_L4 = " " # Period-by-period analysis -INDENT_L5 = " " # Segment details - - -class PeriodConfig(NamedTuple): - """Configuration for period calculation.""" - - reverse_sort: bool - flex: float - min_distance_from_avg: float - min_period_length: int - threshold_low: float = DEFAULT_PRICE_RATING_THRESHOLD_LOW - threshold_high: float = DEFAULT_PRICE_RATING_THRESHOLD_HIGH - threshold_volatility_moderate: float = DEFAULT_VOLATILITY_THRESHOLD_MODERATE - threshold_volatility_high: float = DEFAULT_VOLATILITY_THRESHOLD_HIGH - threshold_volatility_very_high: float = DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH - - -class PeriodData(NamedTuple): - """Data for building a period summary.""" - - start_time: datetime - end_time: datetime - period_length: int - period_idx: int - total_periods: int - - -class PeriodStatistics(NamedTuple): - """Calculated statistics for a period.""" - - aggregated_level: str | None - aggregated_rating: str | None - rating_difference_pct: float | None - price_avg: float - price_min: float - price_max: float - price_spread: float - volatility: str - period_price_diff: float | None - period_price_diff_pct: float | None - - -class ThresholdConfig(NamedTuple): - """Threshold configuration for period calculations.""" - - threshold_low: float | None - threshold_high: float | None - threshold_volatility_moderate: float - threshold_volatility_high: float - threshold_volatility_very_high: float - reverse_sort: bool - - -def calculate_periods( - all_prices: list[dict], - *, - config: PeriodConfig, -) -> dict[str, Any]: - """ - Calculate price periods (best or peak) from price data. - - This function identifies periods but does NOT store full interval data redundantly. - It returns lightweight period summaries that reference the original price data. - - Steps: - 1. Split prices by day and calculate daily averages - 2. Calculate reference prices (min/max per day) - 3. Build periods based on criteria - 4. Filter by minimum length - 5. Merge adjacent periods at midnight - 6. Extract period summaries (start/end times, not full price data) - - Args: - all_prices: All price data points from yesterday/today/tomorrow - config: Period configuration containing reverse_sort, flex, min_distance_from_avg, - min_period_length, threshold_low, and threshold_high - - Returns: - Dict with: - - periods: List of lightweight period summaries (start/end times only) - - metadata: Config and statistics - - reference_data: Daily min/max/avg for on-demand annotation - - """ - # Extract config values - reverse_sort = config.reverse_sort - flex = config.flex - min_distance_from_avg = config.min_distance_from_avg - min_period_length = config.min_period_length - threshold_low = config.threshold_low - threshold_high = config.threshold_high - - if not all_prices: - return { - "periods": [], - "metadata": { - "total_periods": 0, - "config": { - "reverse_sort": reverse_sort, - "flex": flex, - "min_distance_from_avg": min_distance_from_avg, - "min_period_length": min_period_length, - }, - }, - "reference_data": { - "ref_prices": {}, - "avg_prices": {}, - }, - } - - # Ensure prices are sorted chronologically - all_prices_sorted = sorted(all_prices, key=lambda p: p["startsAt"]) - - # Step 1: Split by day and calculate averages - intervals_by_day, avg_price_by_day = _split_intervals_by_day(all_prices_sorted) - - # Step 2: Calculate reference prices (min or max per day) - ref_prices = _calculate_reference_prices(intervals_by_day, reverse_sort=reverse_sort) - - # Step 3: Build periods - price_context = { - "ref_prices": ref_prices, - "avg_prices": avg_price_by_day, - "flex": flex, - "min_distance_from_avg": min_distance_from_avg, - } - raw_periods = _build_periods(all_prices_sorted, price_context, reverse_sort=reverse_sort) - - # Step 4: Filter by minimum length - raw_periods = _filter_periods_by_min_length(raw_periods, min_period_length) - - # Step 5: Merge adjacent periods at midnight - raw_periods = _merge_adjacent_periods_at_midnight(raw_periods) - - # Step 6: Add interval ends - _add_interval_ends(raw_periods) - - # Step 7: Filter periods by end date (keep periods ending today or later) - raw_periods = _filter_periods_by_end_date(raw_periods) - - # Step 8: Extract lightweight period summaries (no full price data) - # Note: Filtering for current/future is done here based on end date, - # not start date. This preserves periods that started yesterday but end today. - thresholds = ThresholdConfig( - threshold_low=threshold_low, - threshold_high=threshold_high, - threshold_volatility_moderate=config.threshold_volatility_moderate, - threshold_volatility_high=config.threshold_volatility_high, - threshold_volatility_very_high=config.threshold_volatility_very_high, - reverse_sort=reverse_sort, - ) - period_summaries = _extract_period_summaries( - raw_periods, - all_prices_sorted, - price_context, - thresholds, - ) - - return { - "periods": period_summaries, # Lightweight summaries only - "metadata": { - "total_periods": len(period_summaries), - "config": { - "reverse_sort": reverse_sort, - "flex": flex, - "min_distance_from_avg": min_distance_from_avg, - "min_period_length": min_period_length, - }, - }, - "reference_data": { - "ref_prices": {k.isoformat(): v for k, v in ref_prices.items()}, - "avg_prices": {k.isoformat(): v for k, v in avg_price_by_day.items()}, - }, - } - - -def _split_intervals_by_day(all_prices: list[dict]) -> tuple[dict[date, list[dict]], dict[date, float]]: - """Split intervals by day and calculate average price per day.""" - intervals_by_day: dict[date, list[dict]] = {} - avg_price_by_day: dict[date, float] = {} - - for price_data in all_prices: - dt = dt_util.parse_datetime(price_data["startsAt"]) - if dt is None: - continue - dt = dt_util.as_local(dt) - date_key = dt.date() - intervals_by_day.setdefault(date_key, []).append(price_data) - - for date_key, intervals in intervals_by_day.items(): - avg_price_by_day[date_key] = sum(float(p["total"]) for p in intervals) / len(intervals) - - return intervals_by_day, avg_price_by_day - - -def _calculate_reference_prices(intervals_by_day: dict[date, list[dict]], *, reverse_sort: bool) -> dict[date, float]: - """Calculate reference prices for each day (min for best, max for peak).""" - ref_prices: dict[date, float] = {} - for date_key, intervals in intervals_by_day.items(): - prices = [float(p["total"]) for p in intervals] - ref_prices[date_key] = max(prices) if reverse_sort else min(prices) - return ref_prices - - -def _build_periods( - all_prices: list[dict], - price_context: dict[str, Any], - *, - reverse_sort: bool, -) -> list[list[dict]]: - """ - Build periods, allowing periods to cross midnight (day boundary). - - Periods are built day-by-day, comparing each interval to its own day's reference. - When a day boundary is crossed, the current period is ended. - Adjacent periods at midnight are merged in a later step. - - """ - ref_prices = price_context["ref_prices"] - avg_prices = price_context["avg_prices"] - flex = price_context["flex"] - min_distance_from_avg = price_context["min_distance_from_avg"] - - periods: list[list[dict]] = [] - current_period: list[dict] = [] - last_ref_date: date | None = None - - for price_data in all_prices: - starts_at = dt_util.parse_datetime(price_data["startsAt"]) - if starts_at is None: - continue - starts_at = dt_util.as_local(starts_at) - date_key = starts_at.date() - ref_price = ref_prices[date_key] - avg_price = avg_prices[date_key] - price = float(price_data["total"]) - - # Calculate percentage difference from reference - percent_diff = ((price - ref_price) / ref_price) * 100 if ref_price != 0 else 0.0 - percent_diff = round(percent_diff, 2) - - # Check if interval qualifies for the period - in_flex = percent_diff >= flex * 100 if reverse_sort else percent_diff <= flex * 100 - - # Minimum distance from average - if reverse_sort: - # Peak price: must be at least min_distance_from_avg% above average - min_distance_threshold = avg_price * (1 + min_distance_from_avg / 100) - meets_min_distance = price >= min_distance_threshold - else: - # Best price: must be at least min_distance_from_avg% below average - min_distance_threshold = avg_price * (1 - min_distance_from_avg / 100) - meets_min_distance = price <= min_distance_threshold - - # Split period if day changes - if last_ref_date is not None and date_key != last_ref_date and current_period: - periods.append(current_period) - current_period = [] - - last_ref_date = date_key - - # Add to period if all criteria are met - if in_flex and meets_min_distance: - current_period.append( - { - "interval_hour": starts_at.hour, - "interval_minute": starts_at.minute, - "interval_time": f"{starts_at.hour:02d}:{starts_at.minute:02d}", - "price": price, - "interval_start": starts_at, - } - ) - elif current_period: - # Criteria no longer met, end current period - periods.append(current_period) - current_period = [] - - # Add final period if exists - if current_period: - periods.append(current_period) - - return periods - - -def _filter_periods_by_min_length(periods: list[list[dict]], min_period_length: int) -> list[list[dict]]: - """Filter periods to only include those meeting the minimum length requirement.""" - min_intervals = min_period_length // MINUTES_PER_INTERVAL - return [period for period in periods if len(period) >= min_intervals] - - -def _merge_adjacent_periods_at_midnight(periods: list[list[dict]]) -> list[list[dict]]: - """ - Merge adjacent periods that meet at midnight. - - When two periods are detected separately for consecutive days but are directly - adjacent at midnight (15 minutes apart), merge them into a single period. - - """ - if not periods: - return periods - - merged = [] - i = 0 - - while i < len(periods): - current_period = periods[i] - - # Check if there's a next period and if they meet at midnight - if i + 1 < len(periods): - next_period = periods[i + 1] - - last_start = current_period[-1].get("interval_start") - next_start = next_period[0].get("interval_start") - - if last_start and next_start: - time_diff = next_start - last_start - last_date = last_start.date() - next_date = next_start.date() - - # If they are 15 minutes apart and on different days (crossing midnight) - if time_diff == timedelta(minutes=MINUTES_PER_INTERVAL) and next_date > last_date: - # Merge the two periods - merged_period = current_period + next_period - merged.append(merged_period) - i += 2 # Skip both periods as we've merged them - continue - - # If no merge happened, just add the current period - merged.append(current_period) - i += 1 - - return merged - - -def _add_interval_ends(periods: list[list[dict]]) -> None: - """Add interval_end to each interval in-place.""" - for period in periods: - for interval in period: - start = interval.get("interval_start") - if start: - interval["interval_end"] = start + timedelta(minutes=MINUTES_PER_INTERVAL) - - -def _filter_periods_by_end_date(periods: list[list[dict]]) -> list[list[dict]]: - """ - Filter periods to keep only relevant ones for today and tomorrow. - - Keep periods that: - - End in the future (> now) - - End today but after the start of the day (not exactly at midnight) - - This removes: - - Periods that ended yesterday - - Periods that ended exactly at midnight today (they're completely in the past) - """ - now = dt_util.now() - today = now.date() - midnight_today = dt_util.start_of_local_day(now) - - filtered = [] - for period in periods: - if not period: - continue - - # Get the end time of the period (last interval's end) - last_interval = period[-1] - period_end = last_interval.get("interval_end") - - if not period_end: - continue - - # Keep if period ends in the future - if period_end > now: - filtered.append(period) - continue - - # Keep if period ends today but AFTER midnight (not exactly at midnight) - if period_end.date() == today and period_end > midnight_today: - filtered.append(period) - - return filtered - - -def _calculate_period_price_diff( - price_avg: float, - start_time: datetime, - price_context: dict[str, Any], -) -> tuple[float | None, float | None]: - """ - Calculate period price difference from daily reference (min or max). - - Uses reference price from start day of the period for consistency. - - Returns: - Tuple of (period_price_diff, period_price_diff_pct) or (None, None) if no reference available. - - """ - if not price_context or not start_time: - return None, None - - ref_prices = price_context.get("ref_prices", {}) - date_key = start_time.date() - ref_price = ref_prices.get(date_key) - - if ref_price is None: - return None, None - - # Convert reference price to minor units (ct/øre) - ref_price_minor = round(ref_price * 100, 2) - period_price_diff = round(price_avg - ref_price_minor, 2) - period_price_diff_pct = None - if ref_price_minor != 0: - period_price_diff_pct = round((period_price_diff / ref_price_minor) * 100, 2) - - return period_price_diff, period_price_diff_pct - - -def _calculate_aggregated_rating_difference(period_price_data: list[dict]) -> float | None: - """ - Calculate aggregated rating difference percentage for the period. - - Takes the average of all interval differences (from their respective thresholds). - - Args: - period_price_data: List of price data dictionaries with "difference" field - - Returns: - Average difference percentage, or None if no valid data - - """ - differences = [] - for price_data in period_price_data: - diff = price_data.get("difference") - if diff is not None: - differences.append(float(diff)) - - if not differences: - return None - - return round(sum(differences) / len(differences), 2) - - -def _calculate_period_price_statistics(period_price_data: list[dict]) -> dict[str, float]: - """ - Calculate price statistics for a period. - - Args: - period_price_data: List of price data dictionaries with "total" field - - Returns: - Dictionary with price_avg, price_min, price_max, price_spread (all in minor units: ct/øre) - - """ - prices_minor = [round(float(p["total"]) * 100, 2) for p in period_price_data] - - if not prices_minor: - return { - "price_avg": 0.0, - "price_min": 0.0, - "price_max": 0.0, - "price_spread": 0.0, - } - - price_avg = round(sum(prices_minor) / len(prices_minor), 2) - price_min = round(min(prices_minor), 2) - price_max = round(max(prices_minor), 2) - price_spread = round(price_max - price_min, 2) - - return { - "price_avg": price_avg, - "price_min": price_min, - "price_max": price_max, - "price_spread": price_spread, - } - - -def _build_period_summary_dict( - period_data: PeriodData, - stats: PeriodStatistics, - *, - reverse_sort: bool, -) -> dict: - """ - Build the complete period summary dictionary. - - Args: - period_data: Period timing and position data - stats: Calculated period statistics - reverse_sort: True for peak price, False for best price (keyword-only) - - Returns: - Complete period summary dictionary following attribute ordering - - """ - # Build complete period summary (following attribute ordering from AGENTS.md) - summary = { - # 1. Time information (when does this apply?) - "start": period_data.start_time, - "end": period_data.end_time, - "duration_minutes": period_data.period_length * MINUTES_PER_INTERVAL, - # 2. Core decision attributes (what should I do?) - "level": stats.aggregated_level, - "rating_level": stats.aggregated_rating, - "rating_difference_%": stats.rating_difference_pct, - # 3. Price statistics (how much does it cost?) - "price_avg": stats.price_avg, - "price_min": stats.price_min, - "price_max": stats.price_max, - "price_spread": stats.price_spread, - "volatility": stats.volatility, - # 4. Price differences will be added below if available - # 5. Detail information (additional context) - "period_interval_count": period_data.period_length, - "period_position": period_data.period_idx, - "periods_total": period_data.total_periods, - "periods_remaining": period_data.total_periods - period_data.period_idx, - } - - # Add period price difference attributes based on sensor type (step 4) - if stats.period_price_diff is not None: - if reverse_sort: - # Peak price sensor: compare to daily maximum - summary["period_price_diff_from_daily_max"] = stats.period_price_diff - if stats.period_price_diff_pct is not None: - summary["period_price_diff_from_daily_max_%"] = stats.period_price_diff_pct - else: - # Best price sensor: compare to daily minimum - summary["period_price_diff_from_daily_min"] = stats.period_price_diff - if stats.period_price_diff_pct is not None: - summary["period_price_diff_from_daily_min_%"] = stats.period_price_diff_pct - - return summary - - -def _extract_period_summaries( - periods: list[list[dict]], - all_prices: list[dict], - price_context: dict[str, Any], - thresholds: ThresholdConfig, -) -> list[dict]: - """ - Extract complete period summaries with all aggregated attributes. - - Returns sensor-ready period summaries with: - - Timestamps and positioning (start, end, hour, minute, time) - - Aggregated price statistics (price_avg, price_min, price_max, price_spread) - - Volatility categorization (low/moderate/high/very_high based on absolute spread) - - Rating difference percentage (aggregated from intervals) - - Period price differences (period_price_diff_from_daily_min/max) - - Aggregated level and rating_level - - Interval count (number of 15-min intervals in period) - - All data is pre-calculated and ready for display - no further processing needed. - - Args: - periods: List of periods, where each period is a list of interval dictionaries - all_prices: All price data from the API (enriched with level, difference, rating_level) - price_context: Dictionary with ref_prices and avg_prices per day - thresholds: Threshold configuration for calculations - - """ - # Build lookup dictionary for full price data by timestamp - price_lookup: dict[str, dict] = {} - for price_data in all_prices: - starts_at = dt_util.parse_datetime(price_data["startsAt"]) - if starts_at: - starts_at = dt_util.as_local(starts_at) - price_lookup[starts_at.isoformat()] = price_data - - summaries = [] - total_periods = len(periods) - - for period_idx, period in enumerate(periods, 1): - if not period: - continue - - first_interval = period[0] - last_interval = period[-1] - - start_time = first_interval.get("interval_start") - end_time = last_interval.get("interval_end") - - if not start_time or not end_time: - continue - - # Look up full price data for each interval in the period - period_price_data: list[dict] = [] - for interval in period: - start = interval.get("interval_start") - if not start: - continue - start_iso = start.isoformat() - price_data = price_lookup.get(start_iso) - if price_data: - period_price_data.append(price_data) - - # Calculate aggregated level and rating_level - aggregated_level = None - aggregated_rating = None - - if period_price_data: - # Aggregate level (from API's "level" field) - aggregated_level = aggregate_period_levels(period_price_data) - - # Aggregate rating_level (from calculated "rating_level" and "difference" fields) - if thresholds.threshold_low is not None and thresholds.threshold_high is not None: - aggregated_rating, _ = aggregate_period_ratings( - period_price_data, - thresholds.threshold_low, - thresholds.threshold_high, - ) - - # Calculate price statistics (in minor units: ct/øre) - price_stats = _calculate_period_price_statistics(period_price_data) - - # Calculate period price difference from daily reference - period_price_diff, period_price_diff_pct = _calculate_period_price_diff( - price_stats["price_avg"], start_time, price_context - ) - - # Calculate volatility (categorical) and aggregated rating difference (numeric) - volatility = calculate_volatility_level( - price_stats["price_spread"], - threshold_moderate=thresholds.threshold_volatility_moderate, - threshold_high=thresholds.threshold_volatility_high, - threshold_very_high=thresholds.threshold_volatility_very_high, - ).lower() - rating_difference_pct = _calculate_aggregated_rating_difference(period_price_data) - - # Build period data and statistics objects - period_data = PeriodData( - start_time=start_time, - end_time=end_time, - period_length=len(period), - period_idx=period_idx, - total_periods=total_periods, - ) - - stats = PeriodStatistics( - aggregated_level=aggregated_level, - aggregated_rating=aggregated_rating, - rating_difference_pct=rating_difference_pct, - price_avg=price_stats["price_avg"], - price_min=price_stats["price_min"], - price_max=price_stats["price_max"], - price_spread=price_stats["price_spread"], - volatility=volatility, - period_price_diff=period_price_diff, - period_price_diff_pct=period_price_diff_pct, - ) - - # Build complete period summary - summary = _build_period_summary_dict(period_data, stats, reverse_sort=thresholds.reverse_sort) - summaries.append(summary) - - return summaries - - -def _recalculate_period_metadata(periods: list[dict]) -> None: - """ - Recalculate period metadata after merging periods. - - Updates period_position, periods_total, and periods_remaining for all periods - based on chronological order. - - This must be called after _resolve_period_overlaps() to ensure metadata - reflects the final merged period list. - - Args: - periods: List of period summary dicts (mutated in-place) - - """ - if not periods: - return - - # Sort periods chronologically by start time - periods.sort(key=lambda p: p.get("start") or dt_util.now()) - - # Update metadata for all periods - total_periods = len(periods) - - for position, period in enumerate(periods, 1): - period["period_position"] = position - period["periods_total"] = total_periods - period["periods_remaining"] = total_periods - position - - -def filter_periods_by_volatility( - periods_data: dict[str, Any], - min_volatility: str, -) -> dict[str, Any]: - """ - Filter calculated periods based on their internal volatility. - - This applies period-level volatility filtering AFTER periods have been calculated. - Removes periods that don't meet the minimum volatility requirement based on their - own price spread (volatility attribute), not the daily volatility. - - Args: - periods_data: Dict with "periods" and "intervals" lists from calculate_periods_with_relaxation() - min_volatility: Minimum volatility level required ("low", "moderate", "high", "very_high") - - Returns: - Filtered periods_data dict with updated periods, intervals, and metadata. - - """ - periods = periods_data.get("periods", []) - if not periods: - return periods_data - - # "low" means no filtering (accept any volatility level) - if min_volatility == "low": - return periods_data - - # Define volatility hierarchy (LOW < MODERATE < HIGH < VERY_HIGH) - volatility_levels = ["LOW", "MODERATE", "HIGH", "VERY_HIGH"] - - # Map filter config values to actual level names - config_to_level = { - "low": "LOW", - "moderate": "MODERATE", - "high": "HIGH", - "very_high": "VERY_HIGH", - } - - min_level = config_to_level.get(min_volatility, "LOW") - - # Filter periods based on their volatility - filtered_periods = [] - for period in periods: - period_volatility = period.get("volatility", "MODERATE") - - # Check if period's volatility meets or exceeds minimum requirement - try: - period_idx = volatility_levels.index(period_volatility) - min_idx = volatility_levels.index(min_level) - except ValueError: - # If level not found, don't filter out this period - filtered_periods.append(period) - else: - if period_idx >= min_idx: - filtered_periods.append(period) - - # If no periods left after filtering, return empty structure - if not filtered_periods: - return { - "periods": [], - "intervals": [], - "metadata": { - "total_intervals": 0, - "total_periods": 0, - "config": periods_data.get("metadata", {}).get("config", {}), - }, - } - - # Collect intervals from filtered periods - filtered_intervals = [] - for period in filtered_periods: - filtered_intervals.extend(period.get("intervals", [])) - - # Update metadata - return { - "periods": filtered_periods, - "intervals": filtered_intervals, - "metadata": { - "total_intervals": len(filtered_intervals), - "total_periods": len(filtered_periods), - "config": periods_data.get("metadata", {}).get("config", {}), - }, - } - - -def _group_periods_by_day(periods: list[dict]) -> dict[date, list[dict]]: - """ - Group periods by the day they end in. - - This ensures periods crossing midnight are counted towards the day they end, - not the day they start. Example: Period 23:00 yesterday - 02:00 today counts - as "today" since it ends today. - - Args: - periods: List of period summary dicts with "start" and "end" datetime - - Returns: - Dict mapping date to list of periods ending on that date - - """ - periods_by_day: dict[date, list[dict]] = {} - - for period in periods: - # Use end time for grouping so periods crossing midnight are counted - # towards the day they end (more relevant for min_periods check) - end_time = period.get("end") - if end_time: - day = end_time.date() - periods_by_day.setdefault(day, []).append(period) - - return periods_by_day - - -def _group_prices_by_day(all_prices: list[dict]) -> dict[date, list[dict]]: - """ - Group price intervals by the day they belong to (today and future only). - - Args: - all_prices: List of price dicts with "startsAt" timestamp - - Returns: - Dict mapping date to list of price intervals for that day (only today and future) - - """ - today = dt_util.now().date() - prices_by_day: dict[date, list[dict]] = {} - - for price in all_prices: - starts_at = dt_util.parse_datetime(price["startsAt"]) - if starts_at: - price_date = dt_util.as_local(starts_at).date() - # Only include today and future days - if price_date >= today: - prices_by_day.setdefault(price_date, []).append(price) - - return prices_by_day - - -def _check_min_periods_per_day(periods: list[dict], min_periods: int, all_prices: list[dict]) -> bool: - """ - Check if minimum periods requirement is met for each day individually. - - Returns True if we should STOP relaxation (enough periods found per day). - Returns False if we should CONTINUE relaxation (not enough periods yet). - - Args: - periods: List of period summary dicts - min_periods: Minimum number of periods required per day - all_prices: All available price intervals (used to determine which days have data) - - Returns: - True if every day with price data has at least min_periods, False otherwise - - """ - if not periods: - return False # No periods at all, continue relaxation - - # Get all days that have price data (today and future only, not yesterday) - today = dt_util.now().date() - available_days = set() - for price in all_prices: - starts_at = dt_util.parse_datetime(price["startsAt"]) - if starts_at: - price_date = dt_util.as_local(starts_at).date() - # Only count today and future days (not yesterday) - if price_date >= today: - available_days.add(price_date) - - if not available_days: - return False # No price data for today/future, continue relaxation - - # Group found periods by day - periods_by_day = _group_periods_by_day(periods) - - # Check each day with price data: ALL must have at least min_periods - # Only count standalone periods (exclude extensions) - for day in available_days: - day_periods = periods_by_day.get(day, []) - # Count only standalone periods (not extensions) - standalone_count = sum(1 for p in day_periods if not p.get("is_extension")) - if standalone_count < min_periods: - _LOGGER.debug( - "Day %s has only %d standalone periods (need %d) - continuing relaxation", - day, - standalone_count, - min_periods, - ) - return False # This day doesn't have enough, continue relaxation - - # All days with price data have enough periods, stop relaxation - return True - - -def calculate_periods_with_relaxation( # noqa: PLR0913, PLR0915 - Per-day relaxation requires many parameters and statements - all_prices: list[dict], - *, - config: PeriodConfig, - enable_relaxation: bool, - min_periods: int, - relaxation_step_pct: int, - should_show_callback: Callable[[str | None, str | None], bool], -) -> tuple[dict[str, Any], dict[str, Any]]: - """ - Calculate periods with optional per-day filter relaxation. - - NEW: Each day gets its own independent relaxation loop. Today can be in Phase 1 - while tomorrow is in Phase 3, ensuring each day finds enough periods. - - If min_periods is not reached with normal filters, this function gradually - relaxes filters in multiple phases FOR EACH DAY SEPARATELY: - - Phase 1: Increase flex threshold step-by-step (up to 4 attempts) - Phase 2: Disable volatility filter (set to "any") - Phase 3: Disable level filter (set to "any") - - Args: - all_prices: All price data points - config: Base period configuration - enable_relaxation: Whether relaxation is enabled - min_periods: Minimum number of periods required PER DAY - relaxation_step_pct: Percentage of original flex to add per relaxation step - should_show_callback: Callback function(volatility_override, level_override) -> bool - Returns True if periods should be shown with given filter overrides. - Pass None to use original configured filter values. - - Returns: - Tuple of (periods_result, relaxation_metadata): - - periods_result: Same format as calculate_periods() output, with periods from all days - - relaxation_metadata: Dict with relaxation information (aggregated across all days) - - """ - # Compact INFO-level summary - period_type = "PEAK PRICE" if config.reverse_sort else "BEST PRICE" - relaxation_status = "ON" if enable_relaxation else "OFF" - if enable_relaxation: - _LOGGER.info( - "Calculating %s periods: relaxation=%s, target=%d/day, flex=%.1f%%", - period_type, - relaxation_status, - min_periods, - abs(config.flex) * 100, - ) - else: - _LOGGER.info( - "Calculating %s periods: relaxation=%s, flex=%.1f%%", - period_type, - relaxation_status, - abs(config.flex) * 100, - ) - - # Detailed DEBUG-level context header - period_type_full = "PEAK PRICE (most expensive)" if config.reverse_sort else "BEST PRICE (cheapest)" - _LOGGER.debug( - "%s========== %s PERIODS ==========", - INDENT_L0, - period_type_full, - ) - _LOGGER.debug( - "%sRelaxation: %s", - INDENT_L0, - "ENABLED (user setting: ON)" if enable_relaxation else "DISABLED by user configuration", - ) - _LOGGER.debug( - "%sBase config: flex=%.1f%%, min_length=%d min", - INDENT_L0, - abs(config.flex) * 100, - config.min_period_length, - ) - if enable_relaxation: - _LOGGER.debug( - "%sRelaxation target: %d periods per day", - INDENT_L0, - min_periods, - ) - _LOGGER.debug( - "%sRelaxation strategy: %.1f%% flex increment per step (4 flex levels x 4 filter combinations)", - INDENT_L0, - relaxation_step_pct, - ) - _LOGGER.debug( - "%sEarly exit: After EACH filter combination when target reached", - INDENT_L0, - ) - _LOGGER.debug( - "%s=============================================", - INDENT_L0, - ) - - # Group prices by day (for both relaxation enabled/disabled) - prices_by_day = _group_prices_by_day(all_prices) - - if not prices_by_day: - # No price data for today/future - _LOGGER.warning( - "No price data available for today/future - cannot calculate periods", - ) - return {"periods": [], "metadata": {}, "reference_data": {}}, { - "relaxation_active": False, - "relaxation_attempted": False, - "min_periods_requested": min_periods if enable_relaxation else 0, - "periods_found": 0, - } - - total_days = len(prices_by_day) - _LOGGER.info( - "Calculating baseline periods for %d days...", - total_days, - ) - - # === BASELINE CALCULATION (same for both modes) === - all_periods: list[dict] = [] - all_phases_used: list[str] = [] - relaxation_was_needed = False - days_meeting_requirement = 0 - - for day, day_prices in sorted(prices_by_day.items()): - _LOGGER.debug( - "%sProcessing day %s with %d price intervals", - INDENT_L1, - day, - len(day_prices), - ) - - # Calculate baseline periods for this day - day_result = calculate_periods(day_prices, config=config) - day_periods = day_result["periods"] - standalone_count = len([p for p in day_periods if not p.get("is_extension")]) - - _LOGGER.debug( - "%sDay %s baseline: Found %d standalone periods%s", - INDENT_L1, - day, - standalone_count, - f" (need {min_periods})" if enable_relaxation else "", - ) - - # Check if relaxation is needed for this day - if not enable_relaxation or standalone_count >= min_periods: - # No relaxation needed/possible - use baseline - if enable_relaxation: - _LOGGER.debug( - "%sDay %s: Target reached with baseline - no relaxation needed", - INDENT_L1, - day, - ) - all_periods.extend(day_periods) - days_meeting_requirement += 1 - continue - - # === RELAXATION PATH (only when enabled AND needed) === - _LOGGER.debug( - "%sDay %s: Baseline insufficient - starting relaxation", - INDENT_L1, - day, - ) - relaxation_was_needed = True - - # Run full relaxation for this specific day - day_relaxed_result, day_metadata = _relax_single_day( - day_prices=day_prices, - config=config, - min_periods=min_periods, - relaxation_step_pct=relaxation_step_pct, - should_show_callback=should_show_callback, - baseline_periods=day_periods, - day_label=str(day), - ) - - all_periods.extend(day_relaxed_result["periods"]) - if day_metadata.get("phases_used"): - all_phases_used.extend(day_metadata["phases_used"]) - - # Check if this day met the requirement after relaxation - day_standalone = len([p for p in day_relaxed_result["periods"] if not p.get("is_extension")]) - if day_standalone >= min_periods: - days_meeting_requirement += 1 - - # Sort all periods by start time - all_periods.sort(key=lambda p: p["start"]) - - # Recalculate metadata for combined periods - _recalculate_period_metadata(all_periods) - - # Build combined result - if all_periods: - # Use the last day's result as template - final_result = day_result.copy() - final_result["periods"] = all_periods - else: - final_result = {"periods": [], "metadata": {}, "reference_data": {}} - - total_standalone = len([p for p in all_periods if not p.get("is_extension")]) - - return final_result, { - "relaxation_active": relaxation_was_needed, - "relaxation_attempted": relaxation_was_needed, - "min_periods_requested": min_periods, - "periods_found": total_standalone, - "phases_used": list(set(all_phases_used)), # Unique phases used across all days - "days_processed": total_days, - "days_meeting_requirement": days_meeting_requirement, - "relaxation_incomplete": days_meeting_requirement < total_days, - } - - -def _relax_single_day( # noqa: PLR0913 - Comprehensive filter relaxation per day - day_prices: list[dict], - config: PeriodConfig, - min_periods: int, - relaxation_step_pct: int, - should_show_callback: Callable[[str | None, str | None], bool], - baseline_periods: list[dict], - day_label: str, -) -> tuple[dict[str, Any], dict[str, Any]]: - """ - Run comprehensive relaxation for a single day. - - NEW STRATEGY: For each flex level, try all filter combinations before increasing flex. - This finds solutions faster by relaxing filters first (cheaper than increasing flex). - - Per flex level (6.25%, 7.5%, 8.75%, 10%), try in order: - 1. Original filters (volatility=configured, level=configured) - 2. Relax only volatility (volatility=any, level=configured) - 3. Relax only level (volatility=configured, level=any) - 4. Relax both (volatility=any, level=any) - - This ensures we find the minimal relaxation needed. Example: - - If periods exist at flex=6.25% with level=any, we find them before trying flex=7.5% - - If periods need both filters relaxed, we try that before increasing flex further - - Args: - day_prices: Price data for this specific day only - config: Base period configuration - min_periods: Minimum periods needed for this day - relaxation_step_pct: Relaxation increment percentage - should_show_callback: Filter visibility callback(volatility_override, level_override) - Returns True if periods should be shown with given overrides. - baseline_periods: Periods found with normal filters - day_label: Label for logging (e.g., "2025-11-11") - - Returns: - Tuple of (periods_result, metadata) for this day - - """ - accumulated_periods = baseline_periods.copy() - original_flex = abs(config.flex) - relaxation_increment = original_flex * (relaxation_step_pct / 100.0) - phases_used = [] - relaxed_result = None - - baseline_standalone = len([p for p in baseline_periods if not p.get("is_extension")]) - - # 4 flex levels: original + 3 steps (e.g., 5% → 6.25% → 7.5% → 8.75% → 10%) - for flex_step in range(1, 5): - new_flex = original_flex + (flex_step * relaxation_increment) - new_flex = min(new_flex, 100.0) - - if config.reverse_sort: - new_flex = -new_flex - - # Try filter combinations for this flex level - # Each tuple contains: volatility_override, level_override, label_suffix - filter_attempts = [ - (None, None, ""), # Original config - ("any", None, "+volatility_any"), # Relax volatility only - (None, "any", "+level_any"), # Relax level only - ("any", "any", "+all_filters_any"), # Relax both - ] - - for vol_override, lvl_override, label_suffix in filter_attempts: - # Check if this combination is allowed by user config - if not should_show_callback(vol_override, lvl_override): - continue - - # Calculate periods with this flex + filter combination - relaxed_config = config._replace(flex=new_flex) - relaxed_result = calculate_periods(day_prices, config=relaxed_config) - new_periods = relaxed_result["periods"] - - # Build relaxation level label BEFORE marking periods - flex_pct = round(abs(new_flex) * 100, 1) - relaxation_level = f"price_diff_{flex_pct}%{label_suffix}" - phases_used.append(relaxation_level) - - # Mark NEW periods with their specific relaxation metadata BEFORE merging - for period in new_periods: - period["relaxation_active"] = True - # Set the metadata immediately - this preserves which phase found this period - _mark_periods_with_relaxation([period], relaxation_level, original_flex, abs(new_flex)) - - # Merge with accumulated periods - merged, standalone_count = _resolve_period_overlaps( - accumulated_periods, new_periods, config.min_period_length, baseline_periods - ) - - total_standalone = standalone_count + baseline_standalone - filters_label = label_suffix if label_suffix else "(original filters)" - - _LOGGER.debug( - "%sDay %s flex=%.1f%% %s: found %d new periods, %d standalone total (%d baseline + %d new)", - INDENT_L2, - day_label, - flex_pct, - filters_label, - len(new_periods), - total_standalone, - baseline_standalone, - standalone_count, - ) - - accumulated_periods = merged.copy() - - # ✅ EARLY EXIT: Check after EACH filter combination - if total_standalone >= min_periods: - _LOGGER.info( - "Day %s: Success with flex=%.1f%% %s - found %d/%d periods (%d baseline + %d from relaxation)", - day_label, - flex_pct, - filters_label, - total_standalone, - min_periods, - baseline_standalone, - standalone_count, - ) - _recalculate_period_metadata(merged) - result = relaxed_result.copy() - result["periods"] = merged - return result, {"phases_used": phases_used} - - # ❌ Only reach here if ALL phases exhausted WITHOUT reaching min_periods - final_standalone = len([p for p in accumulated_periods if not p.get("is_extension")]) - new_standalone = final_standalone - baseline_standalone - - _LOGGER.warning( - "Day %s: All relaxation phases exhausted WITHOUT reaching goal - " - "found %d/%d standalone periods (%d baseline + %d from relaxation)", - day_label, - final_standalone, - min_periods, - baseline_standalone, - new_standalone, - ) - - _recalculate_period_metadata(accumulated_periods) - - if relaxed_result: - result = relaxed_result.copy() - else: - result = {"periods": accumulated_periods, "metadata": {}, "reference_data": {}} - result["periods"] = accumulated_periods - - return result, {"phases_used": phases_used} - - -def _mark_periods_with_relaxation( - periods: list[dict], - relaxation_level: str, - original_threshold: float, - applied_threshold: float, -) -> None: - """ - Mark periods with relaxation information (mutates period dicts in-place). - - Uses consistent 'relaxation_*' prefix for all relaxation-related attributes. - - Args: - periods: List of period dicts to mark - relaxation_level: String describing the relaxation level - original_threshold: Original flex threshold value (decimal, e.g., 0.19 for 19%) - applied_threshold: Actually applied threshold value (decimal, e.g., 0.25 for 25%) - - """ - for period in periods: - period["relaxation_active"] = True - period["relaxation_level"] = relaxation_level - # Convert decimal to percentage for display (0.19 → 19.0) - period["relaxation_threshold_original_%"] = round(original_threshold * 100, 1) - period["relaxation_threshold_applied_%"] = round(applied_threshold * 100, 1) - - -def _resolve_period_overlaps( # noqa: PLR0912, PLR0915, C901 - Complex overlap resolution with replacement and extension logic - existing_periods: list[dict], - new_relaxed_periods: list[dict], - min_period_length: int, - baseline_periods: list[dict] | None = None, -) -> tuple[list[dict], int]: - """ - Resolve overlaps between existing periods and newly found relaxed periods. - - Existing periods (baseline + previous relaxation phases) have priority and remain unchanged. - Newly relaxed periods are adjusted to not overlap with existing periods. - - After splitting relaxed periods to avoid overlaps, each segment is validated against - min_period_length. Segments shorter than this threshold are discarded. - - This function is called incrementally after each relaxation phase: - - Phase 1: existing = accumulated, baseline = baseline - - Phase 2: existing = accumulated, baseline = baseline - - Phase 3: existing = accumulated, baseline = baseline - - Args: - existing_periods: All previously found periods (baseline + earlier relaxation phases) - new_relaxed_periods: Periods found in current relaxation phase (will be adjusted) - min_period_length: Minimum period length in minutes (segments shorter than this are discarded) - baseline_periods: Original baseline periods (for extension detection). Extensions only count - against baseline, not against other relaxation periods. - - Returns: - Tuple of (merged_periods, count_standalone_relaxed): - - merged_periods: All periods (existing + adjusted new), sorted by start time - - count_standalone_relaxed: Number of new relaxed periods that count toward min_periods - (excludes extensions of baseline periods only) - - """ - if baseline_periods is None: - baseline_periods = existing_periods # Fallback to existing if not provided - - _LOGGER.debug( - "%s_resolve_period_overlaps called: existing=%d, new=%d, baseline=%d", - INDENT_L3, - len(existing_periods), - len(new_relaxed_periods), - len(baseline_periods), - ) - - if not new_relaxed_periods: - return existing_periods.copy(), 0 - - if not existing_periods: - # No overlaps possible - all relaxed periods are standalone - return new_relaxed_periods.copy(), len(new_relaxed_periods) - - merged = existing_periods.copy() - count_standalone = 0 - - for relaxed in new_relaxed_periods: - # Skip if this exact period is already in existing_periods (duplicate from previous relaxation attempt) - # Compare current start/end (before any splitting), not original_start/end - # Note: original_start/end are set AFTER splitting and indicate split segments from same source - relaxed_start = relaxed["start"] - relaxed_end = relaxed["end"] - - is_duplicate = False - for existing in existing_periods: - # Only compare with existing periods that haven't been adjusted (unsplit originals) - # If existing has original_start/end, it's already a split segment - skip comparison - if "original_start" in existing: - continue - - existing_start = existing["start"] - existing_end = existing["end"] - - # Duplicate if same boundaries (within 1 minute tolerance) - tolerance_seconds = 60 # 1 minute tolerance for duplicate detection - if ( - abs((relaxed_start - existing_start).total_seconds()) < tolerance_seconds - and abs((relaxed_end - existing_end).total_seconds()) < tolerance_seconds - ): - is_duplicate = True - _LOGGER.debug( - "%sSkipping duplicate period %s-%s (already exists from previous relaxation)", - INDENT_L4, - relaxed_start.strftime("%H:%M"), - relaxed_end.strftime("%H:%M"), - ) - break - - if is_duplicate: - continue - - # Find all overlapping existing periods - overlaps = [] - for existing in existing_periods: - existing_start = existing["start"] - existing_end = existing["end"] - - # Check for overlap - if relaxed_start < existing_end and relaxed_end > existing_start: - overlaps.append((existing_start, existing_end)) - - if not overlaps: - # No overlap - check if adjacent to baseline period (= extension) - # Only baseline extensions don't count toward min_periods - is_extension = False - for baseline in baseline_periods: - if relaxed_end == baseline["start"] or relaxed_start == baseline["end"]: - is_extension = True - break - - if is_extension: - relaxed["is_extension"] = True - _LOGGER.debug( - "%sMarking period %s-%s as extension (no overlap, adjacent to baseline)", - INDENT_L4, - relaxed_start.strftime("%H:%M"), - relaxed_end.strftime("%H:%M"), - ) - else: - count_standalone += 1 - - merged.append(relaxed) - else: - # Has overlaps - check if this new period extends BASELINE periods - # Extension = new period encompasses/extends baseline period(s) - # Note: If new period encompasses OTHER RELAXED periods, that's a replacement, not extension! - is_extension = False - periods_to_replace = [] - - for existing in existing_periods: - existing_start = existing["start"] - existing_end = existing["end"] - - # Check if new period completely encompasses existing period - if relaxed_start <= existing_start and relaxed_end >= existing_end: - # Is this existing period a BASELINE period? - is_baseline = any( - bp["start"] == existing_start and bp["end"] == existing_end for bp in baseline_periods - ) - - if is_baseline: - # Extension of baseline → counts as extension - is_extension = True - _LOGGER.debug( - "%sNew period %s-%s extends BASELINE period %s-%s", - INDENT_L4, - relaxed_start.strftime("%H:%M"), - relaxed_end.strftime("%H:%M"), - existing_start.strftime("%H:%M"), - existing_end.strftime("%H:%M"), - ) - else: - # Encompasses another relaxed period → REPLACEMENT, not extension - periods_to_replace.append(existing) - _LOGGER.debug( - "%sNew period %s-%s replaces relaxed period %s-%s (larger is better)", - INDENT_L4, - relaxed_start.strftime("%H:%M"), - relaxed_end.strftime("%H:%M"), - existing_start.strftime("%H:%M"), - existing_end.strftime("%H:%M"), - ) - - # Remove periods that are being replaced by this larger period - if periods_to_replace: - for period_to_remove in periods_to_replace: - if period_to_remove in merged: - merged.remove(period_to_remove) - _LOGGER.debug( - "%sReplaced period %s-%s with larger period %s-%s", - INDENT_L5, - period_to_remove["start"].strftime("%H:%M"), - period_to_remove["end"].strftime("%H:%M"), - relaxed_start.strftime("%H:%M"), - relaxed_end.strftime("%H:%M"), - ) - - # Split the relaxed period into non-overlapping segments - segments = _split_period_by_overlaps(relaxed_start, relaxed_end, overlaps) - - # If no segments (completely overlapped), but we replaced periods, add the full period - if not segments and periods_to_replace: - _LOGGER.debug( - "%sAdding full replacement period %s-%s (no non-overlapping segments)", - INDENT_L5, - relaxed_start.strftime("%H:%M"), - relaxed_end.strftime("%H:%M"), - ) - # Mark as extension if it extends baseline, otherwise standalone - if is_extension: - relaxed["is_extension"] = True - merged.append(relaxed) - continue - - for seg_start, seg_end in segments: - # Calculate segment duration in minutes - segment_duration_minutes = int((seg_end - seg_start).total_seconds() / 60) - - # Skip segment if it's too short - if segment_duration_minutes < min_period_length: - continue - - # Create adjusted period segment - adjusted_period = relaxed.copy() - adjusted_period["start"] = seg_start - adjusted_period["end"] = seg_end - adjusted_period["duration_minutes"] = segment_duration_minutes - - # Mark as adjusted and potentially as extension - adjusted_period["adjusted_for_overlap"] = True - adjusted_period["original_start"] = relaxed_start - adjusted_period["original_end"] = relaxed_end - - # If the original period was an extension, all its segments are extensions too - # OR if segment is adjacent to baseline - segment_is_extension = is_extension - if not segment_is_extension: - # Check if segment is directly adjacent to BASELINE period - for baseline in baseline_periods: - if seg_end == baseline["start"] or seg_start == baseline["end"]: - segment_is_extension = True - break - - if segment_is_extension: - adjusted_period["is_extension"] = True - _LOGGER.debug( - "%sMarking segment %s-%s as extension (original was extension or adjacent to baseline)", - INDENT_L5, - seg_start.strftime("%H:%M"), - seg_end.strftime("%H:%M"), - ) - else: - # Standalone segment counts toward min_periods - count_standalone += 1 - - merged.append(adjusted_period) - - # Sort all periods by start time - merged.sort(key=lambda p: p["start"]) - - # Count ACTUAL standalone periods in final merged list (not just newly added ones) - # This accounts for replacements where old standalone was replaced by new standalone - final_standalone_count = len([p for p in merged if not p.get("is_extension")]) - - # Subtract baseline standalone count to get NEW standalone from this relaxation - baseline_standalone_count = len([p for p in baseline_periods if not p.get("is_extension")]) - new_standalone_count = final_standalone_count - baseline_standalone_count - - return merged, new_standalone_count - - -def _split_period_by_overlaps( - period_start: datetime, - period_end: datetime, - overlaps: list[tuple[datetime, datetime]], -) -> list[tuple[datetime, datetime]]: - """ - Split a time period into segments that don't overlap with given ranges. - - Args: - period_start: Start of period to split - period_end: End of period to split - overlaps: List of (start, end) tuples representing overlapping ranges - - Returns: - List of (start, end) tuples for non-overlapping segments - - Example: - period: 09:00-15:00 - overlaps: [(10:00-12:00), (14:00-16:00)] - result: [(09:00-10:00), (12:00-14:00)] - - """ - # Sort overlaps by start time - sorted_overlaps = sorted(overlaps, key=lambda x: x[0]) - - segments = [] - current_pos = period_start - - for overlap_start, overlap_end in sorted_overlaps: - # Add segment before this overlap (if any) - if current_pos < overlap_start: - segments.append((current_pos, overlap_start)) - - # Move position past this overlap - current_pos = max(current_pos, overlap_end) - - # Add final segment after all overlaps (if any) - if current_pos < period_end: - segments.append((current_pos, period_end)) - - return segments diff --git a/custom_components/tibber_prices/period_utils/__init__.py b/custom_components/tibber_prices/period_utils/__init__.py new file mode 100644 index 0000000..88b02eb --- /dev/null +++ b/custom_components/tibber_prices/period_utils/__init__.py @@ -0,0 +1,61 @@ +""" +Period calculation utilities (sub-package for modular organization). + +This package splits period calculation logic into focused modules: +- types: Type definitions and constants +- level_filtering: Interval-level filtering logic +- period_building: Period construction from intervals +- period_statistics: Statistics calculation +- period_merging: Overlap resolution and merging +- relaxation: Per-day relaxation strategy +- core: Main API orchestration +- outlier_filtering: Price spike detection and smoothing + +All public APIs are re-exported for backwards compatibility. +""" + +from __future__ import annotations + +# Re-export main API functions +from .core import calculate_periods, filter_periods_by_volatility + +# Re-export outlier filtering +from .outlier_filtering import filter_price_outliers + +# Re-export relaxation +from .relaxation import calculate_periods_with_relaxation + +# Re-export constants and types +from .types import ( + INDENT_L0, + INDENT_L1, + INDENT_L2, + INDENT_L3, + INDENT_L4, + INDENT_L5, + MINUTES_PER_INTERVAL, + IntervalCriteria, + PeriodConfig, + PeriodData, + PeriodStatistics, + ThresholdConfig, +) + +__all__ = [ + "INDENT_L0", + "INDENT_L1", + "INDENT_L2", + "INDENT_L3", + "INDENT_L4", + "INDENT_L5", + "MINUTES_PER_INTERVAL", + "IntervalCriteria", + "PeriodConfig", + "PeriodData", + "PeriodStatistics", + "ThresholdConfig", + "calculate_periods", + "calculate_periods_with_relaxation", + "filter_periods_by_volatility", + "filter_price_outliers", +] diff --git a/custom_components/tibber_prices/period_utils/core.py b/custom_components/tibber_prices/period_utils/core.py new file mode 100644 index 0000000..cd3d245 --- /dev/null +++ b/custom_components/tibber_prices/period_utils/core.py @@ -0,0 +1,250 @@ +"""Core period calculation API - main entry points.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from custom_components.tibber_prices.period_utils.types import PeriodConfig + +from custom_components.tibber_prices.period_utils.outlier_filtering import ( + filter_price_outliers, +) +from custom_components.tibber_prices.period_utils.period_building import ( + add_interval_ends, + build_periods, + calculate_reference_prices, + filter_periods_by_end_date, + filter_periods_by_min_length, + split_intervals_by_day, +) +from custom_components.tibber_prices.period_utils.period_merging import ( + merge_adjacent_periods_at_midnight, +) +from custom_components.tibber_prices.period_utils.period_statistics import ( + extract_period_summaries, +) +from custom_components.tibber_prices.period_utils.types import ThresholdConfig + + +def calculate_periods( + all_prices: list[dict], + *, + config: PeriodConfig, +) -> dict[str, Any]: + """ + Calculate price periods (best or peak) from price data. + + This function identifies periods but does NOT store full interval data redundantly. + It returns lightweight period summaries that reference the original price data. + + Steps: + 1. Split prices by day and calculate daily averages + 2. Calculate reference prices (min/max per day) + 3. Build periods based on criteria + 4. Filter by minimum length + 5. Merge adjacent periods at midnight + 6. Extract period summaries (start/end times, not full price data) + + Args: + all_prices: All price data points from yesterday/today/tomorrow + config: Period configuration containing reverse_sort, flex, min_distance_from_avg, + min_period_length, threshold_low, and threshold_high + + Returns: + Dict with: + - periods: List of lightweight period summaries (start/end times only) + - metadata: Config and statistics + - reference_data: Daily min/max/avg for on-demand annotation + + """ + # Extract config values + reverse_sort = config.reverse_sort + flex = config.flex + min_distance_from_avg = config.min_distance_from_avg + min_period_length = config.min_period_length + threshold_low = config.threshold_low + threshold_high = config.threshold_high + + if not all_prices: + return { + "periods": [], + "metadata": { + "total_periods": 0, + "config": { + "reverse_sort": reverse_sort, + "flex": flex, + "min_distance_from_avg": min_distance_from_avg, + "min_period_length": min_period_length, + }, + }, + "reference_data": { + "ref_prices": {}, + "avg_prices": {}, + }, + } + + # Ensure prices are sorted chronologically + all_prices_sorted = sorted(all_prices, key=lambda p: p["startsAt"]) + + # Step 1: Split by day and calculate averages + intervals_by_day, avg_price_by_day = split_intervals_by_day(all_prices_sorted) + + # Step 2: Calculate reference prices (min or max per day) + ref_prices = calculate_reference_prices(intervals_by_day, reverse_sort=reverse_sort) + + # Step 2.5: Filter price outliers (smoothing for period formation only) + # This runs BEFORE period formation to prevent isolated price spikes + # from breaking up otherwise continuous periods + all_prices_smoothed = filter_price_outliers( + all_prices_sorted, + abs(flex) * 100, # Convert to percentage (e.g., 0.15 → 15.0) + min_period_length, + ) + + # Step 3: Build periods + price_context = { + "ref_prices": ref_prices, + "avg_prices": avg_price_by_day, + "flex": flex, + "min_distance_from_avg": min_distance_from_avg, + } + raw_periods = build_periods( + all_prices_smoothed, # Use smoothed prices for period formation + price_context, + reverse_sort=reverse_sort, + level_filter=config.level_filter, + gap_count=config.gap_count, + ) + + # Step 4: Filter by minimum length + raw_periods = filter_periods_by_min_length(raw_periods, min_period_length) + + # Step 5: Merge adjacent periods at midnight + raw_periods = merge_adjacent_periods_at_midnight(raw_periods) + + # Step 6: Add interval ends + add_interval_ends(raw_periods) + + # Step 7: Filter periods by end date (keep periods ending today or later) + raw_periods = filter_periods_by_end_date(raw_periods) + + # Step 8: Extract lightweight period summaries (no full price data) + # Note: Filtering for current/future is done here based on end date, + # not start date. This preserves periods that started yesterday but end today. + thresholds = ThresholdConfig( + threshold_low=threshold_low, + threshold_high=threshold_high, + threshold_volatility_moderate=config.threshold_volatility_moderate, + threshold_volatility_high=config.threshold_volatility_high, + threshold_volatility_very_high=config.threshold_volatility_very_high, + reverse_sort=reverse_sort, + ) + period_summaries = extract_period_summaries( + raw_periods, + all_prices_sorted, + price_context, + thresholds, + ) + + return { + "periods": period_summaries, # Lightweight summaries only + "metadata": { + "total_periods": len(period_summaries), + "config": { + "reverse_sort": reverse_sort, + "flex": flex, + "min_distance_from_avg": min_distance_from_avg, + "min_period_length": min_period_length, + }, + }, + "reference_data": { + "ref_prices": {k.isoformat(): v for k, v in ref_prices.items()}, + "avg_prices": {k.isoformat(): v for k, v in avg_price_by_day.items()}, + }, + } + + +def filter_periods_by_volatility( + periods_data: dict[str, Any], + min_volatility: str, +) -> dict[str, Any]: + """ + Filter calculated periods based on their internal volatility. + + This applies period-level volatility filtering AFTER periods have been calculated. + Removes periods that don't meet the minimum volatility requirement based on their + own price spread (volatility attribute), not the daily volatility. + + Args: + periods_data: Dict with "periods" and "intervals" lists from calculate_periods_with_relaxation() + min_volatility: Minimum volatility level required ("low", "moderate", "high", "very_high") + + Returns: + Filtered periods_data dict with updated periods, intervals, and metadata. + + """ + periods = periods_data.get("periods", []) + if not periods: + return periods_data + + # "low" means no filtering (accept any volatility level) + if min_volatility == "low": + return periods_data + + # Define volatility hierarchy (LOW < MODERATE < HIGH < VERY_HIGH) + volatility_levels = ["LOW", "MODERATE", "HIGH", "VERY_HIGH"] + + # Map filter config values to actual level names + config_to_level = { + "low": "LOW", + "moderate": "MODERATE", + "high": "HIGH", + "very_high": "VERY_HIGH", + } + + min_level = config_to_level.get(min_volatility, "LOW") + + # Filter periods based on their volatility + filtered_periods = [] + for period in periods: + period_volatility = period.get("volatility", "MODERATE") + + # Check if period's volatility meets or exceeds minimum requirement + try: + period_idx = volatility_levels.index(period_volatility) + min_idx = volatility_levels.index(min_level) + except ValueError: + # If level not found, don't filter out this period + filtered_periods.append(period) + else: + if period_idx >= min_idx: + filtered_periods.append(period) + + # If no periods left after filtering, return empty structure + if not filtered_periods: + return { + "periods": [], + "intervals": [], + "metadata": { + "total_intervals": 0, + "total_periods": 0, + "config": periods_data.get("metadata", {}).get("config", {}), + }, + } + + # Collect intervals from filtered periods + filtered_intervals = [] + for period in filtered_periods: + filtered_intervals.extend(period.get("intervals", [])) + + # Update metadata + return { + "periods": filtered_periods, + "intervals": filtered_intervals, + "metadata": { + "total_intervals": len(filtered_intervals), + "total_periods": len(filtered_periods), + "config": periods_data.get("metadata", {}).get("config", {}), + }, + } diff --git a/custom_components/tibber_prices/period_utils/level_filtering.py b/custom_components/tibber_prices/period_utils/level_filtering.py new file mode 100644 index 0000000..d709606 --- /dev/null +++ b/custom_components/tibber_prices/period_utils/level_filtering.py @@ -0,0 +1,120 @@ +"""Interval-level filtering logic for period calculation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from custom_components.tibber_prices.period_utils.types import IntervalCriteria + +from custom_components.tibber_prices.const import PRICE_LEVEL_MAPPING + + +def check_level_with_gap_tolerance( + interval_level: int, + level_order: int, + consecutive_gaps: int, + gap_count: int, + *, + reverse_sort: bool, +) -> tuple[bool, bool, int]: + """ + Check if interval meets level requirement with gap tolerance. + + Args: + interval_level: Level value of current interval (from PRICE_LEVEL_MAPPING) + level_order: Required level value + consecutive_gaps: Current count of consecutive gap intervals + gap_count: Maximum allowed consecutive gap intervals + reverse_sort: True for peak price, False for best price + + Returns: + Tuple of (meets_level, is_gap, new_consecutive_gaps): + - meets_level: True if interval qualifies (exact match or within gap tolerance) + - is_gap: True if this is a gap interval (deviates by exactly 1 step) + - new_consecutive_gaps: Updated gap counter + + """ + if reverse_sort: + # Peak price: interval must be >= level_order (e.g., EXPENSIVE or higher) + meets_level_exact = interval_level >= level_order + # Gap: exactly 1 step below (e.g., NORMAL when expecting EXPENSIVE) + is_gap = interval_level == level_order - 1 + else: + # Best price: interval must be <= level_order (e.g., CHEAP or lower) + meets_level_exact = interval_level <= level_order + # Gap: exactly 1 step above (e.g., NORMAL when expecting CHEAP) + is_gap = interval_level == level_order + 1 + + # Apply gap tolerance + if meets_level_exact: + return True, False, 0 # Meets level, not a gap, reset counter + if is_gap and consecutive_gaps < gap_count: + return True, True, consecutive_gaps + 1 # Allowed gap, increment counter + return False, False, 0 # Doesn't meet level, reset counter + + +def apply_level_filter( + price_data: dict, + level_order: int | None, + consecutive_gaps: int, + gap_count: int, + *, + reverse_sort: bool, +) -> tuple[bool, int, bool]: + """ + Apply level filter to a single interval. + + Args: + price_data: Price data dict with "level" key + level_order: Required level value (from PRICE_LEVEL_MAPPING) or None if disabled + consecutive_gaps: Current count of consecutive gap intervals + gap_count: Maximum allowed consecutive gap intervals + reverse_sort: True for peak price, False for best price + + Returns: + Tuple of (meets_level, new_consecutive_gaps, is_gap) + + """ + if level_order is None: + return True, consecutive_gaps, False + + interval_level = PRICE_LEVEL_MAPPING.get(price_data.get("level", "NORMAL"), 0) + meets_level, is_gap, new_consecutive_gaps = check_level_with_gap_tolerance( + interval_level, level_order, consecutive_gaps, gap_count, reverse_sort=reverse_sort + ) + return meets_level, new_consecutive_gaps, is_gap + + +def check_interval_criteria( + price: float, + criteria: IntervalCriteria, +) -> tuple[bool, bool]: + """ + Check if interval meets flex and minimum distance criteria. + + Args: + price: Interval price + criteria: Interval criteria (ref_price, avg_price, flex, etc.) + + Returns: + Tuple of (in_flex, meets_min_distance) + + """ + # Calculate percentage difference from reference + percent_diff = ((price - criteria.ref_price) / criteria.ref_price) * 100 if criteria.ref_price != 0 else 0.0 + + # Check if interval qualifies for the period + in_flex = percent_diff >= criteria.flex * 100 if criteria.reverse_sort else percent_diff <= criteria.flex * 100 + + # Minimum distance from average + if criteria.reverse_sort: + # Peak price: must be at least min_distance_from_avg% above average + min_distance_threshold = criteria.avg_price * (1 + criteria.min_distance_from_avg / 100) + meets_min_distance = price >= min_distance_threshold + else: + # Best price: must be at least min_distance_from_avg% below average + min_distance_threshold = criteria.avg_price * (1 - criteria.min_distance_from_avg / 100) + meets_min_distance = price <= min_distance_threshold + + return in_flex, meets_min_distance diff --git a/custom_components/tibber_prices/period_utils/outlier_filtering.py b/custom_components/tibber_prices/period_utils/outlier_filtering.py new file mode 100644 index 0000000..12981f2 --- /dev/null +++ b/custom_components/tibber_prices/period_utils/outlier_filtering.py @@ -0,0 +1,296 @@ +""" +Price outlier filtering for period calculation. + +This module handles the detection and smoothing of single-interval price spikes +that would otherwise break up continuous periods. Outliers are only smoothed for +period formation - original prices are preserved for all statistics. + +Uses statistical methods: +- Linear regression for trend-based spike detection +- Standard deviation for confidence thresholds +- Symmetry checking to avoid smoothing legitimate price shifts +- Zigzag detection with relative volatility for cluster rejection +""" + +from __future__ import annotations + +import logging + +_LOGGER = logging.getLogger(__name__) + +# Outlier filtering constants +MIN_CONTEXT_SIZE = 3 # Minimum intervals needed before/after for analysis +CONFIDENCE_LEVEL = 2.0 # Standard deviations for 95% confidence interval +VOLATILITY_THRESHOLD = 0.05 # 5% max relative std dev for zigzag detection +SYMMETRY_THRESHOLD = 1.5 # Max std dev difference for symmetric spike +RELATIVE_VOLATILITY_THRESHOLD = 2.0 # Window volatility vs context (cluster detection) + +# Module-local log indentation (each module starts at level 0) +INDENT_L0 = "" # All logs in this module (no indentation needed) + + +def _calculate_statistics(prices: list[float]) -> dict[str, float]: + """ + Calculate statistical measures for price context. + + Uses linear regression to detect trends, enabling accurate spike detection + even when prices are gradually rising or falling. + + Args: + prices: List of price values + + Returns: + Dictionary with: + - mean: Average price + - std_dev: Standard deviation + - trend_slope: Linear regression slope (price change per interval) + + """ + n = len(prices) + mean = sum(prices) / n + + # Standard deviation + variance = sum((p - mean) ** 2 for p in prices) / n + std_dev = variance**0.5 + + # Linear trend (least squares regression) + # y = mx + b, we calculate m (slope) + x_values = list(range(n)) # 0, 1, 2, ... + x_mean = sum(x_values) / n + + numerator = sum((x - x_mean) * (y - mean) for x, y in zip(x_values, prices, strict=True)) + denominator = sum((x - x_mean) ** 2 for x in x_values) + + trend_slope = numerator / denominator if denominator != 0 else 0.0 + + return { + "mean": mean, + "std_dev": std_dev, + "trend_slope": trend_slope, + } + + +def _check_symmetry(avg_before: float, avg_after: float, std_dev: float) -> bool: + """ + Check if spike is symmetric (returns to baseline). + + A symmetric spike has similar average prices before and after the spike. + Asymmetric spikes might indicate legitimate price level changes and should + not be smoothed. + + Args: + avg_before: Average price before spike + avg_after: Average price after spike + std_dev: Standard deviation of context prices + + Returns: + True if symmetric (should smooth), False if asymmetric (should keep) + + """ + difference = abs(avg_after - avg_before) + threshold = SYMMETRY_THRESHOLD * std_dev + + return difference <= threshold + + +def _detect_zigzag_pattern(window: list[dict], context_std_dev: float) -> bool: + """ + Detect zigzag pattern or clustered spikes using multiple criteria. + + Enhanced detection with three checks: + 1. Absolute volatility: Is standard deviation too high? + 2. Direction changes: Too many up-down-up transitions? + 3. Relative volatility: Is window more volatile than context? (catches clusters!) + + The third check implicitly handles spike clusters without explicit multi-pass + detection. + + Args: + window: List of price intervals to analyze + context_std_dev: Standard deviation of surrounding context + + Returns: + True if zigzag/cluster detected (reject smoothing) + + """ + prices = [x["total"] for x in window] + + if len(prices) < MIN_CONTEXT_SIZE: + return False + + avg_price = sum(prices) / len(prices) + + # Check 1: Absolute volatility + variance = sum((p - avg_price) ** 2 for p in prices) / len(prices) + std_dev = variance**0.5 + + if std_dev / avg_price > VOLATILITY_THRESHOLD: + return True # Too volatile overall + + # Check 2: Direction changes + direction_changes = 0 + for i in range(1, len(prices) - 1): + prev_trend = prices[i] - prices[i - 1] + next_trend = prices[i + 1] - prices[i] + + # Direction change when signs differ + if prev_trend * next_trend < 0: + direction_changes += 1 + + max_allowed_changes = len(prices) / 3 + if direction_changes > max_allowed_changes: + return True # Too many direction changes + + # Check 3: Relative volatility (NEW - catches spike clusters!) + # If this window is much more volatile than the surrounding context, + # it's likely a cluster of spikes rather than one isolated spike + return std_dev > RELATIVE_VOLATILITY_THRESHOLD * context_std_dev + + +def filter_price_outliers( + intervals: list[dict], + flexibility_pct: float, + _min_duration: int, # Unused, kept for API compatibility +) -> list[dict]: + """ + Filter single-interval price spikes within stable sequences. + + Uses statistical methods to detect and smooth isolated spikes: + - Linear regression to predict expected prices (handles trends) + - Standard deviation for confidence intervals (adapts to volatility) + - Symmetry checking (avoids smoothing legitimate price shifts) + - Zigzag detection (rejects volatile areas and spike clusters) + + This runs BEFORE period formation to smooth out brief anomalies that would + otherwise break continuous periods. Original prices are preserved for all + statistics. + + Args: + intervals: Price intervals to filter (typically 96 for yesterday/today/tomorrow) + flexibility_pct: User's flexibility setting (derives tolerance) + _min_duration: Minimum period duration (unused, kept for API compatibility) + + Returns: + Intervals with smoothed prices (marked with _smoothed flag) + + """ + _LOGGER.info( + "%sSmoothing price outliers: %d intervals, flex=%.1f%%", + INDENT_L0, + len(intervals), + flexibility_pct, + ) + + # Convert percentage to ratio once for all comparisons (e.g., 15.0 → 0.15) + flexibility_ratio = flexibility_pct / 100 + + result = [] + smoothed_count = 0 + + for i, current in enumerate(intervals): + current_price = current["total"] + + # Get context windows (3 intervals before and after) + context_before = intervals[max(0, i - MIN_CONTEXT_SIZE) : i] + context_after = intervals[i + 1 : min(len(intervals), i + 1 + MIN_CONTEXT_SIZE)] + + # Need sufficient context on both sides + if len(context_before) < MIN_CONTEXT_SIZE or len(context_after) < MIN_CONTEXT_SIZE: + result.append(current) + continue + + # Calculate statistics for combined context (excluding current interval) + context_prices = [x["total"] for x in context_before + context_after] + stats = _calculate_statistics(context_prices) + + # Predict expected price at current position using linear trend + # Position offset: current is at index len(context_before) in the combined window + offset_position = len(context_before) + expected_price = stats["mean"] + (stats["trend_slope"] * offset_position) + + # Calculate how far current price deviates from expected + residual = abs(current_price - expected_price) + + # Tolerance based on statistical confidence (2 std dev = 95% confidence) + tolerance = stats["std_dev"] * CONFIDENCE_LEVEL + + # Not a spike if within tolerance + if residual <= tolerance: + result.append(current) + continue + + # SPIKE CANDIDATE DETECTED - Now validate + + # Check 1: Context Stability + # If context is changing significantly, this might be a legitimate transition + avg_before = sum(x["total"] for x in context_before) / len(context_before) + avg_after = sum(x["total"] for x in context_after) / len(context_after) + + context_diff_pct = abs(avg_after - avg_before) / avg_before if avg_before > 0 else 0 + + if context_diff_pct > flexibility_ratio: + result.append(current) + _LOGGER.debug( + "%sInterval %s: Context unstable (%.1f%% change) - not a spike", + INDENT_L0, + current.get("startsAt", f"index {i}"), + context_diff_pct * 100, + ) + continue + + # Check 2: Symmetry + # Symmetric spikes return to baseline; asymmetric might be legitimate shifts + if not _check_symmetry(avg_before, avg_after, stats["std_dev"]): + result.append(current) + _LOGGER.debug( + "%sSpike at %s rejected: Asymmetric (before=%.2f, after=%.2f ct/kWh)", + INDENT_L0, + current.get("startsAt", f"index {i}"), + avg_before * 100, + avg_after * 100, + ) + continue + + # Check 3: Zigzag Pattern / Cluster Detection + # Build analysis window including the spike + analysis_window = [*context_before[-2:], current, *context_after[:2]] + + if _detect_zigzag_pattern(analysis_window, stats["std_dev"]): + result.append(current) + _LOGGER.debug( + "%sSpike at %s rejected: Zigzag/cluster pattern detected", + INDENT_L0, + current.get("startsAt", f"index {i}"), + ) + continue + + # ALL CHECKS PASSED - Smooth the spike + smoothed = current.copy() + smoothed["total"] = expected_price # Use trend-based prediction + smoothed["_smoothed"] = True + smoothed["_original_price"] = current_price + + result.append(smoothed) + smoothed_count += 1 + + _LOGGER.debug( + "%sSmoothed spike at %s: %.2f → %.2f ct/kWh (residual: %.2f, tolerance: %.2f, trend_slope: %.4f)", + INDENT_L0, + current.get("startsAt", f"index {i}"), + current_price * 100, + expected_price * 100, + residual * 100, + tolerance * 100, + stats["trend_slope"] * 100, + ) + + if smoothed_count > 0: + _LOGGER.info( + "%sPrice outlier smoothing complete: %d/%d intervals smoothed (%.1f%%)", + INDENT_L0, + smoothed_count, + len(intervals), + (smoothed_count / len(intervals)) * 100, + ) + + return result diff --git a/custom_components/tibber_prices/period_utils/period_building.py b/custom_components/tibber_prices/period_utils/period_building.py new file mode 100644 index 0000000..01b18c9 --- /dev/null +++ b/custom_components/tibber_prices/period_utils/period_building.py @@ -0,0 +1,247 @@ +"""Period building and basic filtering logic.""" + +from __future__ import annotations + +import logging +from datetime import date, timedelta +from typing import Any + +from custom_components.tibber_prices.const import PRICE_LEVEL_MAPPING +from custom_components.tibber_prices.period_utils.level_filtering import ( + apply_level_filter, + check_interval_criteria, +) +from custom_components.tibber_prices.period_utils.types import ( + MINUTES_PER_INTERVAL, + IntervalCriteria, +) +from homeassistant.util import dt as dt_util + +_LOGGER = logging.getLogger(__name__) + +# Module-local log indentation (each module starts at level 0) +INDENT_L0 = "" # Entry point / main function + + +def split_intervals_by_day(all_prices: list[dict]) -> tuple[dict[date, list[dict]], dict[date, float]]: + """Split intervals by day and calculate average price per day.""" + intervals_by_day: dict[date, list[dict]] = {} + avg_price_by_day: dict[date, float] = {} + + for price_data in all_prices: + dt = dt_util.parse_datetime(price_data["startsAt"]) + if dt is None: + continue + dt = dt_util.as_local(dt) + date_key = dt.date() + intervals_by_day.setdefault(date_key, []).append(price_data) + + for date_key, intervals in intervals_by_day.items(): + avg_price_by_day[date_key] = sum(float(p["total"]) for p in intervals) / len(intervals) + + return intervals_by_day, avg_price_by_day + + +def calculate_reference_prices(intervals_by_day: dict[date, list[dict]], *, reverse_sort: bool) -> dict[date, float]: + """Calculate reference prices for each day (min for best, max for peak).""" + ref_prices: dict[date, float] = {} + for date_key, intervals in intervals_by_day.items(): + prices = [float(p["total"]) for p in intervals] + ref_prices[date_key] = max(prices) if reverse_sort else min(prices) + return ref_prices + + +def build_periods( # noqa: PLR0915 - Complex period building logic requires many statements + all_prices: list[dict], + price_context: dict[str, Any], + *, + reverse_sort: bool, + level_filter: str | None = None, + gap_count: int = 0, +) -> list[list[dict]]: + """ + Build periods, allowing periods to cross midnight (day boundary). + + Periods are built day-by-day, comparing each interval to its own day's reference. + When a day boundary is crossed, the current period is ended. + Adjacent periods at midnight are merged in a later step. + + Args: + all_prices: All price data points + price_context: Dict with ref_prices, avg_prices, flex, min_distance_from_avg + reverse_sort: True for peak price (high prices), False for best price (low prices) + level_filter: Level filter string ("cheap", "expensive", "any", None) + gap_count: Number of allowed consecutive intervals deviating by exactly 1 level step + + """ + ref_prices = price_context["ref_prices"] + avg_prices = price_context["avg_prices"] + flex = price_context["flex"] + min_distance_from_avg = price_context["min_distance_from_avg"] + + # Calculate level_order if level_filter is active + level_order = None + level_filter_active = False + if level_filter and level_filter.lower() != "any": + level_order = PRICE_LEVEL_MAPPING.get(level_filter.upper(), 0) + level_filter_active = True + filter_direction = "≥" if reverse_sort else "≤" + gap_info = f", gap_tolerance={gap_count}" if gap_count > 0 else "" + _LOGGER.debug( + "%sLevel filter active: %s (order %s, require interval level %s filter level%s)", + INDENT_L0, + level_filter.upper(), + level_order, + filter_direction, + gap_info, + ) + else: + status = "RELAXED to ANY" if (level_filter and level_filter.lower() == "any") else "DISABLED (not configured)" + _LOGGER.debug("%sLevel filter: %s (accepting all levels)", INDENT_L0, status) + + periods: list[list[dict]] = [] + current_period: list[dict] = [] + last_ref_date: date | None = None + consecutive_gaps = 0 # Track consecutive intervals that deviate by 1 level step + intervals_checked = 0 + intervals_filtered_by_level = 0 + + for price_data in all_prices: + starts_at = dt_util.parse_datetime(price_data["startsAt"]) + if starts_at is None: + continue + starts_at = dt_util.as_local(starts_at) + date_key = starts_at.date() + + # Use smoothed price for criteria checks (flex/distance) + # but preserve original price for period data + price_for_criteria = float(price_data["total"]) # Smoothed if this interval was an outlier + price_original = float(price_data.get("_original_price", price_data["total"])) + + intervals_checked += 1 + + # Check flex and minimum distance criteria (using smoothed price) + criteria = IntervalCriteria( + ref_price=ref_prices[date_key], + avg_price=avg_prices[date_key], + flex=flex, + min_distance_from_avg=min_distance_from_avg, + reverse_sort=reverse_sort, + ) + in_flex, meets_min_distance = check_interval_criteria(price_for_criteria, criteria) + + # If this interval was smoothed, check if smoothing actually made a difference + smoothing_was_impactful = False + if price_data.get("_smoothed", False): + # Check if original price would have passed the same criteria + in_flex_original, meets_min_distance_original = check_interval_criteria(price_original, criteria) + # Smoothing was impactful if original would have failed but smoothed passed + smoothing_was_impactful = (in_flex and meets_min_distance) and not ( + in_flex_original and meets_min_distance_original + ) + + # Level filter: Check if interval meets level requirement with gap tolerance + meets_level, consecutive_gaps, is_level_gap = apply_level_filter( + price_data, level_order, consecutive_gaps, gap_count, reverse_sort=reverse_sort + ) + if not meets_level: + intervals_filtered_by_level += 1 + + # Split period if day changes + if last_ref_date is not None and date_key != last_ref_date and current_period: + periods.append(current_period) + current_period = [] + consecutive_gaps = 0 # Reset gap counter on day boundary + + last_ref_date = date_key + + # Add to period if all criteria are met + if in_flex and meets_min_distance and meets_level: + current_period.append( + { + "interval_hour": starts_at.hour, + "interval_minute": starts_at.minute, + "interval_time": f"{starts_at.hour:02d}:{starts_at.minute:02d}", + "price": price_original, # Use original price in period data + "interval_start": starts_at, + "smoothing_was_impactful": smoothing_was_impactful, # Only True if smoothing changed whether the interval qualified for period inclusion + "is_level_gap": is_level_gap, # Track if kept due to level gap tolerance + } + ) + elif current_period: + # Criteria no longer met, end current period + periods.append(current_period) + current_period = [] + consecutive_gaps = 0 # Reset gap counter + + # Add final period if exists + if current_period: + periods.append(current_period) + + # Log summary + if level_filter_active and intervals_checked > 0: + filtered_pct = (intervals_filtered_by_level / intervals_checked) * 100 + _LOGGER.debug( + "%sLevel filter summary: %d/%d intervals filtered (%.1f%%)", + INDENT_L0, + intervals_filtered_by_level, + intervals_checked, + filtered_pct, + ) + + return periods + + +def filter_periods_by_min_length(periods: list[list[dict]], min_period_length: int) -> list[list[dict]]: + """Filter periods to only include those meeting the minimum length requirement.""" + min_intervals = min_period_length // MINUTES_PER_INTERVAL + return [period for period in periods if len(period) >= min_intervals] + + +def add_interval_ends(periods: list[list[dict]]) -> None: + """Add interval_end to each interval in-place.""" + for period in periods: + for interval in period: + start = interval.get("interval_start") + if start: + interval["interval_end"] = start + timedelta(minutes=MINUTES_PER_INTERVAL) + + +def filter_periods_by_end_date(periods: list[list[dict]]) -> list[list[dict]]: + """ + Filter periods to keep only relevant ones for today and tomorrow. + + Keep periods that: + - End in the future (> now) + - End today but after the start of the day (not exactly at midnight) + + This removes: + - Periods that ended yesterday + - Periods that ended exactly at midnight today (they're completely in the past) + """ + now = dt_util.now() + today = now.date() + midnight_today = dt_util.start_of_local_day(now) + + filtered = [] + for period in periods: + if not period: + continue + + # Get the end time of the period (last interval's end) + last_interval = period[-1] + period_end = last_interval.get("interval_end") + + if not period_end: + continue + + # Keep if period ends in the future + if period_end > now: + filtered.append(period) + continue + + # Keep if period ends today but AFTER midnight (not exactly at midnight) + if period_end.date() == today and period_end > midnight_today: + filtered.append(period) + + return filtered diff --git a/custom_components/tibber_prices/period_utils/period_merging.py b/custom_components/tibber_prices/period_utils/period_merging.py new file mode 100644 index 0000000..aee229f --- /dev/null +++ b/custom_components/tibber_prices/period_utils/period_merging.py @@ -0,0 +1,382 @@ +"""Period merging and overlap resolution logic.""" + +from __future__ import annotations + +import logging +from datetime import datetime, timedelta + +from custom_components.tibber_prices.period_utils.types import MINUTES_PER_INTERVAL +from homeassistant.util import dt as dt_util + +_LOGGER = logging.getLogger(__name__) + +# Module-local log indentation (each module starts at level 0) +INDENT_L0 = "" # Entry point / main function +INDENT_L1 = " " # Nested logic / loop iterations +INDENT_L2 = " " # Deeper nesting + + +def merge_adjacent_periods_at_midnight(periods: list[list[dict]]) -> list[list[dict]]: + """ + Merge adjacent periods that meet at midnight. + + When two periods are detected separately for consecutive days but are directly + adjacent at midnight (15 minutes apart), merge them into a single period. + + """ + if not periods: + return periods + + merged = [] + i = 0 + + while i < len(periods): + current_period = periods[i] + + # Check if there's a next period and if they meet at midnight + if i + 1 < len(periods): + next_period = periods[i + 1] + + last_start = current_period[-1].get("interval_start") + next_start = next_period[0].get("interval_start") + + if last_start and next_start: + time_diff = next_start - last_start + last_date = last_start.date() + next_date = next_start.date() + + # If they are 15 minutes apart and on different days (crossing midnight) + if time_diff == timedelta(minutes=MINUTES_PER_INTERVAL) and next_date > last_date: + # Merge the two periods + merged_period = current_period + next_period + merged.append(merged_period) + i += 2 # Skip both periods as we've merged them + continue + + # If no merge happened, just add the current period + merged.append(current_period) + i += 1 + + return merged + + +def recalculate_period_metadata(periods: list[dict]) -> None: + """ + Recalculate period metadata after merging periods. + + Updates period_position, periods_total, and periods_remaining for all periods + based on chronological order. + + This must be called after resolve_period_overlaps() to ensure metadata + reflects the final merged period list. + + Args: + periods: List of period summary dicts (mutated in-place) + + """ + if not periods: + return + + # Sort periods chronologically by start time + periods.sort(key=lambda p: p.get("start") or dt_util.now()) + + # Update metadata for all periods + total_periods = len(periods) + + for position, period in enumerate(periods, 1): + period["period_position"] = position + period["periods_total"] = total_periods + period["periods_remaining"] = total_periods - position + + +def split_period_by_overlaps( + period_start: datetime, + period_end: datetime, + overlaps: list[tuple[datetime, datetime]], +) -> list[tuple[datetime, datetime]]: + """ + Split a time period into segments that don't overlap with given ranges. + + Args: + period_start: Start of period to split + period_end: End of period to split + overlaps: List of (start, end) tuples representing overlapping ranges + + Returns: + List of (start, end) tuples for non-overlapping segments + + Example: + period: 09:00-15:00 + overlaps: [(10:00-12:00), (14:00-16:00)] + result: [(09:00-10:00), (12:00-14:00)] + + """ + # Sort overlaps by start time + sorted_overlaps = sorted(overlaps, key=lambda x: x[0]) + + segments = [] + current_pos = period_start + + for overlap_start, overlap_end in sorted_overlaps: + # Add segment before this overlap (if any) + if current_pos < overlap_start: + segments.append((current_pos, overlap_start)) + + # Move position past this overlap + current_pos = max(current_pos, overlap_end) + + # Add final segment after all overlaps (if any) + if current_pos < period_end: + segments.append((current_pos, period_end)) + + return segments + + +def resolve_period_overlaps( # noqa: PLR0912, PLR0915, C901 - Complex overlap resolution with replacement and extension logic + existing_periods: list[dict], + new_relaxed_periods: list[dict], + min_period_length: int, + baseline_periods: list[dict] | None = None, +) -> tuple[list[dict], int]: + """ + Resolve overlaps between existing periods and newly found relaxed periods. + + Existing periods (baseline + previous relaxation phases) have priority and remain unchanged. + Newly relaxed periods are adjusted to not overlap with existing periods. + + After splitting relaxed periods to avoid overlaps, each segment is validated against + min_period_length. Segments shorter than this threshold are discarded. + + This function is called incrementally after each relaxation phase: + - Phase 1: existing = accumulated, baseline = baseline + - Phase 2: existing = accumulated, baseline = baseline + - Phase 3: existing = accumulated, baseline = baseline + + Args: + existing_periods: All previously found periods (baseline + earlier relaxation phases) + new_relaxed_periods: Periods found in current relaxation phase (will be adjusted) + min_period_length: Minimum period length in minutes (segments shorter than this are discarded) + baseline_periods: Original baseline periods (for extension detection). Extensions only count + against baseline, not against other relaxation periods. + + Returns: + Tuple of (merged_periods, count_standalone_relaxed): + - merged_periods: All periods (existing + adjusted new), sorted by start time + - count_standalone_relaxed: Number of new relaxed periods that count toward min_periods + (excludes extensions of baseline periods only) + + """ + if baseline_periods is None: + baseline_periods = existing_periods # Fallback to existing if not provided + + _LOGGER.debug( + "%sresolve_period_overlaps called: existing=%d, new=%d, baseline=%d", + INDENT_L0, + len(existing_periods), + len(new_relaxed_periods), + len(baseline_periods), + ) + + if not new_relaxed_periods: + return existing_periods.copy(), 0 + + if not existing_periods: + # No overlaps possible - all relaxed periods are standalone + return new_relaxed_periods.copy(), len(new_relaxed_periods) + + merged = existing_periods.copy() + count_standalone = 0 + + for relaxed in new_relaxed_periods: + # Skip if this exact period is already in existing_periods (duplicate from previous relaxation attempt) + # Compare current start/end (before any splitting), not original_start/end + # Note: original_start/end are set AFTER splitting and indicate split segments from same source + relaxed_start = relaxed["start"] + relaxed_end = relaxed["end"] + + is_duplicate = False + for existing in existing_periods: + # Only compare with existing periods that haven't been adjusted (unsplit originals) + # If existing has original_start/end, it's already a split segment - skip comparison + if "original_start" in existing: + continue + + existing_start = existing["start"] + existing_end = existing["end"] + + # Duplicate if same boundaries (within 1 minute tolerance) + tolerance_seconds = 60 # 1 minute tolerance for duplicate detection + if ( + abs((relaxed_start - existing_start).total_seconds()) < tolerance_seconds + and abs((relaxed_end - existing_end).total_seconds()) < tolerance_seconds + ): + is_duplicate = True + _LOGGER.debug( + "%sSkipping duplicate period %s-%s (already exists from previous relaxation)", + INDENT_L1, + relaxed_start.strftime("%H:%M"), + relaxed_end.strftime("%H:%M"), + ) + break + + if is_duplicate: + continue + + # Find all overlapping existing periods + overlaps = [] + for existing in existing_periods: + existing_start = existing["start"] + existing_end = existing["end"] + + # Check for overlap + if relaxed_start < existing_end and relaxed_end > existing_start: + overlaps.append((existing_start, existing_end)) + + if not overlaps: + # No overlap - check if adjacent to baseline period (= extension) + # Only baseline extensions don't count toward min_periods + is_extension = False + for baseline in baseline_periods: + if relaxed_end == baseline["start"] or relaxed_start == baseline["end"]: + is_extension = True + break + + if is_extension: + relaxed["is_extension"] = True + _LOGGER.debug( + "%sMarking period %s-%s as extension (no overlap, adjacent to baseline)", + INDENT_L1, + relaxed_start.strftime("%H:%M"), + relaxed_end.strftime("%H:%M"), + ) + else: + count_standalone += 1 + + merged.append(relaxed) + else: + # Has overlaps - check if this new period extends BASELINE periods + # Extension = new period encompasses/extends baseline period(s) + # Note: If new period encompasses OTHER RELAXED periods, that's a replacement, not extension! + is_extension = False + periods_to_replace = [] + + for existing in existing_periods: + existing_start = existing["start"] + existing_end = existing["end"] + + # Check if new period completely encompasses existing period + if relaxed_start <= existing_start and relaxed_end >= existing_end: + # Is this existing period a BASELINE period? + is_baseline = any( + bp["start"] == existing_start and bp["end"] == existing_end for bp in baseline_periods + ) + + if is_baseline: + # Extension of baseline → counts as extension + is_extension = True + _LOGGER.debug( + "%sNew period %s-%s extends BASELINE period %s-%s", + INDENT_L1, + relaxed_start.strftime("%H:%M"), + relaxed_end.strftime("%H:%M"), + existing_start.strftime("%H:%M"), + existing_end.strftime("%H:%M"), + ) + else: + # Encompasses another relaxed period → REPLACEMENT, not extension + periods_to_replace.append(existing) + _LOGGER.debug( + "%sNew period %s-%s replaces relaxed period %s-%s (larger is better)", + INDENT_L1, + relaxed_start.strftime("%H:%M"), + relaxed_end.strftime("%H:%M"), + existing_start.strftime("%H:%M"), + existing_end.strftime("%H:%M"), + ) + + # Remove periods that are being replaced by this larger period + if periods_to_replace: + for period_to_remove in periods_to_replace: + if period_to_remove in merged: + merged.remove(period_to_remove) + _LOGGER.debug( + "%sReplaced period %s-%s with larger period %s-%s", + INDENT_L2, + period_to_remove["start"].strftime("%H:%M"), + period_to_remove["end"].strftime("%H:%M"), + relaxed_start.strftime("%H:%M"), + relaxed_end.strftime("%H:%M"), + ) + + # Split the relaxed period into non-overlapping segments + segments = split_period_by_overlaps(relaxed_start, relaxed_end, overlaps) + + # If no segments (completely overlapped), but we replaced periods, add the full period + if not segments and periods_to_replace: + _LOGGER.debug( + "%sAdding full replacement period %s-%s (no non-overlapping segments)", + INDENT_L2, + relaxed_start.strftime("%H:%M"), + relaxed_end.strftime("%H:%M"), + ) + # Mark as extension if it extends baseline, otherwise standalone + if is_extension: + relaxed["is_extension"] = True + merged.append(relaxed) + continue + + for seg_start, seg_end in segments: + # Calculate segment duration in minutes + segment_duration_minutes = int((seg_end - seg_start).total_seconds() / 60) + + # Skip segment if it's too short + if segment_duration_minutes < min_period_length: + continue + + # Create adjusted period segment + adjusted_period = relaxed.copy() + adjusted_period["start"] = seg_start + adjusted_period["end"] = seg_end + adjusted_period["duration_minutes"] = segment_duration_minutes + + # Mark as adjusted and potentially as extension + adjusted_period["adjusted_for_overlap"] = True + adjusted_period["original_start"] = relaxed_start + adjusted_period["original_end"] = relaxed_end + + # If the original period was an extension, all its segments are extensions too + # OR if segment is adjacent to baseline + segment_is_extension = is_extension + if not segment_is_extension: + # Check if segment is directly adjacent to BASELINE period + for baseline in baseline_periods: + if seg_end == baseline["start"] or seg_start == baseline["end"]: + segment_is_extension = True + break + + if segment_is_extension: + adjusted_period["is_extension"] = True + _LOGGER.debug( + "%sMarking segment %s-%s as extension (original was extension or adjacent to baseline)", + INDENT_L2, + seg_start.strftime("%H:%M"), + seg_end.strftime("%H:%M"), + ) + else: + # Standalone segment counts toward min_periods + count_standalone += 1 + + merged.append(adjusted_period) + + # Sort all periods by start time + merged.sort(key=lambda p: p["start"]) + + # Count ACTUAL standalone periods in final merged list (not just newly added ones) + # This accounts for replacements where old standalone was replaced by new standalone + final_standalone_count = len([p for p in merged if not p.get("is_extension")]) + + # Subtract baseline standalone count to get NEW standalone from this relaxation + baseline_standalone_count = len([p for p in baseline_periods if not p.get("is_extension")]) + new_standalone_count = final_standalone_count - baseline_standalone_count + + return merged, new_standalone_count diff --git a/custom_components/tibber_prices/period_utils/period_statistics.py b/custom_components/tibber_prices/period_utils/period_statistics.py new file mode 100644 index 0000000..6cbc13d --- /dev/null +++ b/custom_components/tibber_prices/period_utils/period_statistics.py @@ -0,0 +1,317 @@ +"""Period statistics calculation and summary building.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from datetime import datetime + + from custom_components.tibber_prices.period_utils.types import ( + PeriodData, + PeriodStatistics, + ThresholdConfig, + ) + +from custom_components.tibber_prices.period_utils.types import MINUTES_PER_INTERVAL +from custom_components.tibber_prices.price_utils import ( + aggregate_period_levels, + aggregate_period_ratings, + calculate_volatility_level, +) +from homeassistant.util import dt as dt_util + + +def calculate_period_price_diff( + price_avg: float, + start_time: datetime, + price_context: dict[str, Any], +) -> tuple[float | None, float | None]: + """ + Calculate period price difference from daily reference (min or max). + + Uses reference price from start day of the period for consistency. + + Returns: + Tuple of (period_price_diff, period_price_diff_pct) or (None, None) if no reference available. + + """ + if not price_context or not start_time: + return None, None + + ref_prices = price_context.get("ref_prices", {}) + date_key = start_time.date() + ref_price = ref_prices.get(date_key) + + if ref_price is None: + return None, None + + # Convert reference price to minor units (ct/øre) + ref_price_minor = round(ref_price * 100, 2) + period_price_diff = round(price_avg - ref_price_minor, 2) + period_price_diff_pct = None + if ref_price_minor != 0: + period_price_diff_pct = round((period_price_diff / ref_price_minor) * 100, 2) + + return period_price_diff, period_price_diff_pct + + +def calculate_aggregated_rating_difference(period_price_data: list[dict]) -> float | None: + """ + Calculate aggregated rating difference percentage for the period. + + Takes the average of all interval differences (from their respective thresholds). + + Args: + period_price_data: List of price data dictionaries with "difference" field + + Returns: + Average difference percentage, or None if no valid data + + """ + differences = [] + for price_data in period_price_data: + diff = price_data.get("difference") + if diff is not None: + differences.append(float(diff)) + + if not differences: + return None + + return round(sum(differences) / len(differences), 2) + + +def calculate_period_price_statistics(period_price_data: list[dict]) -> dict[str, float]: + """ + Calculate price statistics for a period. + + Args: + period_price_data: List of price data dictionaries with "total" field + + Returns: + Dictionary with price_avg, price_min, price_max, price_spread (all in minor units: ct/øre) + + """ + prices_minor = [round(float(p["total"]) * 100, 2) for p in period_price_data] + + if not prices_minor: + return { + "price_avg": 0.0, + "price_min": 0.0, + "price_max": 0.0, + "price_spread": 0.0, + } + + price_avg = round(sum(prices_minor) / len(prices_minor), 2) + price_min = round(min(prices_minor), 2) + price_max = round(max(prices_minor), 2) + price_spread = round(price_max - price_min, 2) + + return { + "price_avg": price_avg, + "price_min": price_min, + "price_max": price_max, + "price_spread": price_spread, + } + + +def build_period_summary_dict( + period_data: PeriodData, + stats: PeriodStatistics, + *, + reverse_sort: bool, +) -> dict: + """ + Build the complete period summary dictionary. + + Args: + period_data: Period timing and position data + stats: Calculated period statistics + reverse_sort: True for peak price, False for best price (keyword-only) + + Returns: + Complete period summary dictionary following attribute ordering + + """ + # Build complete period summary (following attribute ordering from AGENTS.md) + summary = { + # 1. Time information (when does this apply?) + "start": period_data.start_time, + "end": period_data.end_time, + "duration_minutes": period_data.period_length * MINUTES_PER_INTERVAL, + # 2. Core decision attributes (what should I do?) + "level": stats.aggregated_level, + "rating_level": stats.aggregated_rating, + "rating_difference_%": stats.rating_difference_pct, + # 3. Price statistics (how much does it cost?) + "price_avg": stats.price_avg, + "price_min": stats.price_min, + "price_max": stats.price_max, + "price_spread": stats.price_spread, + "volatility": stats.volatility, + # 4. Price differences will be added below if available + # 5. Detail information (additional context) + "period_interval_count": period_data.period_length, + "period_position": period_data.period_idx, + "periods_total": period_data.total_periods, + "periods_remaining": period_data.total_periods - period_data.period_idx, + } + + # Add period price difference attributes based on sensor type (step 4) + if stats.period_price_diff is not None: + if reverse_sort: + # Peak price sensor: compare to daily maximum + summary["period_price_diff_from_daily_max"] = stats.period_price_diff + if stats.period_price_diff_pct is not None: + summary["period_price_diff_from_daily_max_%"] = stats.period_price_diff_pct + else: + # Best price sensor: compare to daily minimum + summary["period_price_diff_from_daily_min"] = stats.period_price_diff + if stats.period_price_diff_pct is not None: + summary["period_price_diff_from_daily_min_%"] = stats.period_price_diff_pct + + return summary + + +def extract_period_summaries( + periods: list[list[dict]], + all_prices: list[dict], + price_context: dict[str, Any], + thresholds: ThresholdConfig, +) -> list[dict]: + """ + Extract complete period summaries with all aggregated attributes. + + Returns sensor-ready period summaries with: + - Timestamps and positioning (start, end, hour, minute, time) + - Aggregated price statistics (price_avg, price_min, price_max, price_spread) + - Volatility categorization (low/moderate/high/very_high based on absolute spread) + - Rating difference percentage (aggregated from intervals) + - Period price differences (period_price_diff_from_daily_min/max) + - Aggregated level and rating_level + - Interval count (number of 15-min intervals in period) + + All data is pre-calculated and ready for display - no further processing needed. + + Args: + periods: List of periods, where each period is a list of interval dictionaries + all_prices: All price data from the API (enriched with level, difference, rating_level) + price_context: Dictionary with ref_prices and avg_prices per day + thresholds: Threshold configuration for calculations + + """ + from custom_components.tibber_prices.period_utils.types import ( # noqa: PLC0415 - Avoid circular import + PeriodData, + PeriodStatistics, + ) + + # Build lookup dictionary for full price data by timestamp + price_lookup: dict[str, dict] = {} + for price_data in all_prices: + starts_at = dt_util.parse_datetime(price_data["startsAt"]) + if starts_at: + starts_at = dt_util.as_local(starts_at) + price_lookup[starts_at.isoformat()] = price_data + + summaries = [] + total_periods = len(periods) + + for period_idx, period in enumerate(periods, 1): + if not period: + continue + + first_interval = period[0] + last_interval = period[-1] + + start_time = first_interval.get("interval_start") + end_time = last_interval.get("interval_end") + + if not start_time or not end_time: + continue + + # Look up full price data for each interval in the period + period_price_data: list[dict] = [] + for interval in period: + start = interval.get("interval_start") + if not start: + continue + start_iso = start.isoformat() + price_data = price_lookup.get(start_iso) + if price_data: + period_price_data.append(price_data) + + # Calculate aggregated level and rating_level + aggregated_level = None + aggregated_rating = None + + if period_price_data: + # Aggregate level (from API's "level" field) + aggregated_level = aggregate_period_levels(period_price_data) + + # Aggregate rating_level (from calculated "rating_level" and "difference" fields) + if thresholds.threshold_low is not None and thresholds.threshold_high is not None: + aggregated_rating, _ = aggregate_period_ratings( + period_price_data, + thresholds.threshold_low, + thresholds.threshold_high, + ) + + # Calculate price statistics (in minor units: ct/øre) + price_stats = calculate_period_price_statistics(period_price_data) + + # Calculate period price difference from daily reference + period_price_diff, period_price_diff_pct = calculate_period_price_diff( + price_stats["price_avg"], start_time, price_context + ) + + # Calculate volatility (categorical) and aggregated rating difference (numeric) + volatility = calculate_volatility_level( + price_stats["price_spread"], + threshold_moderate=thresholds.threshold_volatility_moderate, + threshold_high=thresholds.threshold_volatility_high, + threshold_very_high=thresholds.threshold_volatility_very_high, + ).lower() + rating_difference_pct = calculate_aggregated_rating_difference(period_price_data) + + # Count how many intervals in this period benefited from smoothing (i.e., would have been excluded) + smoothed_impactful_count = sum(1 for interval in period if interval.get("smoothing_was_impactful", False)) + + # Count how many intervals were kept due to level filter gap tolerance + level_gap_count = sum(1 for interval in period if interval.get("is_level_gap", False)) + + # Build period data and statistics objects + period_data = PeriodData( + start_time=start_time, + end_time=end_time, + period_length=len(period), + period_idx=period_idx, + total_periods=total_periods, + ) + + stats = PeriodStatistics( + aggregated_level=aggregated_level, + aggregated_rating=aggregated_rating, + rating_difference_pct=rating_difference_pct, + price_avg=price_stats["price_avg"], + price_min=price_stats["price_min"], + price_max=price_stats["price_max"], + price_spread=price_stats["price_spread"], + volatility=volatility, + period_price_diff=period_price_diff, + period_price_diff_pct=period_price_diff_pct, + ) + + # Build complete period summary + summary = build_period_summary_dict(period_data, stats, reverse_sort=thresholds.reverse_sort) + + # Add smoothing information if any intervals benefited from smoothing + if smoothed_impactful_count > 0: + summary["period_interval_smoothed_count"] = smoothed_impactful_count + + # Add level gap tolerance information if any intervals were kept as gaps + if level_gap_count > 0: + summary["period_interval_level_gap_count"] = level_gap_count + + summaries.append(summary) + + return summaries diff --git a/custom_components/tibber_prices/period_utils/relaxation.py b/custom_components/tibber_prices/period_utils/relaxation.py new file mode 100644 index 0000000..45e9d33 --- /dev/null +++ b/custom_components/tibber_prices/period_utils/relaxation.py @@ -0,0 +1,547 @@ +"""Relaxation strategy for finding minimum periods per day.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from collections.abc import Callable + from datetime import date + + from custom_components.tibber_prices.period_utils.types import PeriodConfig + +from custom_components.tibber_prices.period_utils.period_merging import ( + recalculate_period_metadata, + resolve_period_overlaps, +) +from custom_components.tibber_prices.period_utils.types import ( + INDENT_L0, + INDENT_L1, + INDENT_L2, +) +from homeassistant.util import dt as dt_util + +_LOGGER = logging.getLogger(__name__) + + +def group_periods_by_day(periods: list[dict]) -> dict[date, list[dict]]: + """ + Group periods by the day they end in. + + This ensures periods crossing midnight are counted towards the day they end, + not the day they start. Example: Period 23:00 yesterday - 02:00 today counts + as "today" since it ends today. + + Args: + periods: List of period summary dicts with "start" and "end" datetime + + Returns: + Dict mapping date to list of periods ending on that date + + """ + periods_by_day: dict[date, list[dict]] = {} + + for period in periods: + # Use end time for grouping so periods crossing midnight are counted + # towards the day they end (more relevant for min_periods check) + end_time = period.get("end") + if end_time: + day = end_time.date() + periods_by_day.setdefault(day, []).append(period) + + return periods_by_day + + +def group_prices_by_day(all_prices: list[dict]) -> dict[date, list[dict]]: + """ + Group price intervals by the day they belong to (today and future only). + + Args: + all_prices: List of price dicts with "startsAt" timestamp + + Returns: + Dict mapping date to list of price intervals for that day (only today and future) + + """ + today = dt_util.now().date() + prices_by_day: dict[date, list[dict]] = {} + + for price in all_prices: + starts_at = dt_util.parse_datetime(price["startsAt"]) + if starts_at: + price_date = dt_util.as_local(starts_at).date() + # Only include today and future days + if price_date >= today: + prices_by_day.setdefault(price_date, []).append(price) + + return prices_by_day + + +def check_min_periods_per_day(periods: list[dict], min_periods: int, all_prices: list[dict]) -> bool: + """ + Check if minimum periods requirement is met for each day individually. + + Returns True if we should STOP relaxation (enough periods found per day). + Returns False if we should CONTINUE relaxation (not enough periods yet). + + Args: + periods: List of period summary dicts + min_periods: Minimum number of periods required per day + all_prices: All available price intervals (used to determine which days have data) + + Returns: + True if every day with price data has at least min_periods, False otherwise + + """ + if not periods: + return False # No periods at all, continue relaxation + + # Get all days that have price data (today and future only, not yesterday) + today = dt_util.now().date() + available_days = set() + for price in all_prices: + starts_at = dt_util.parse_datetime(price["startsAt"]) + if starts_at: + price_date = dt_util.as_local(starts_at).date() + # Only count today and future days (not yesterday) + if price_date >= today: + available_days.add(price_date) + + if not available_days: + return False # No price data for today/future, continue relaxation + + # Group found periods by day + periods_by_day = group_periods_by_day(periods) + + # Check each day with price data: ALL must have at least min_periods + # Only count standalone periods (exclude extensions) + for day in available_days: + day_periods = periods_by_day.get(day, []) + # Count only standalone periods (not extensions) + standalone_count = sum(1 for p in day_periods if not p.get("is_extension")) + if standalone_count < min_periods: + _LOGGER.debug( + "Day %s has only %d standalone periods (need %d) - continuing relaxation", + day, + standalone_count, + min_periods, + ) + return False # This day doesn't have enough, continue relaxation + + # All days with price data have enough periods, stop relaxation + return True + + +def mark_periods_with_relaxation( + periods: list[dict], + relaxation_level: str, + original_threshold: float, + applied_threshold: float, +) -> None: + """ + Mark periods with relaxation information (mutates period dicts in-place). + + Uses consistent 'relaxation_*' prefix for all relaxation-related attributes. + + Args: + periods: List of period dicts to mark + relaxation_level: String describing the relaxation level + original_threshold: Original flex threshold value (decimal, e.g., 0.19 for 19%) + applied_threshold: Actually applied threshold value (decimal, e.g., 0.25 for 25%) + + """ + for period in periods: + period["relaxation_active"] = True + period["relaxation_level"] = relaxation_level + # Convert decimal to percentage for display (0.19 → 19.0) + period["relaxation_threshold_original_%"] = round(original_threshold * 100, 1) + period["relaxation_threshold_applied_%"] = round(applied_threshold * 100, 1) + + +def calculate_periods_with_relaxation( # noqa: PLR0913, PLR0915 - Per-day relaxation requires many parameters and statements + all_prices: list[dict], + *, + config: PeriodConfig, + enable_relaxation: bool, + min_periods: int, + relaxation_step_pct: int, + should_show_callback: Callable[[str | None, str | None], bool], +) -> tuple[dict[str, Any], dict[str, Any]]: + """ + Calculate periods with optional per-day filter relaxation. + + NEW: Each day gets its own independent relaxation loop. Today can be in Phase 1 + while tomorrow is in Phase 3, ensuring each day finds enough periods. + + If min_periods is not reached with normal filters, this function gradually + relaxes filters in multiple phases FOR EACH DAY SEPARATELY: + + Phase 1: Increase flex threshold step-by-step (up to 4 attempts) + Phase 2: Disable volatility filter (set to "any") + Phase 3: Disable level filter (set to "any") + + Args: + all_prices: All price data points + config: Base period configuration + enable_relaxation: Whether relaxation is enabled + min_periods: Minimum number of periods required PER DAY + relaxation_step_pct: Percentage of original flex to add per relaxation step + should_show_callback: Callback function(volatility_override, level_override) -> bool + Returns True if periods should be shown with given filter overrides. + Pass None to use original configured filter values. + + Returns: + Tuple of (periods_result, relaxation_metadata): + - periods_result: Same format as calculate_periods() output, with periods from all days + - relaxation_metadata: Dict with relaxation information (aggregated across all days) + + """ + # Import here to avoid circular dependency + from custom_components.tibber_prices.period_utils.core import ( # noqa: PLC0415 + calculate_periods, + ) + + # Compact INFO-level summary + period_type = "PEAK PRICE" if config.reverse_sort else "BEST PRICE" + relaxation_status = "ON" if enable_relaxation else "OFF" + if enable_relaxation: + _LOGGER.info( + "Calculating %s periods: relaxation=%s, target=%d/day, flex=%.1f%%", + period_type, + relaxation_status, + min_periods, + abs(config.flex) * 100, + ) + else: + _LOGGER.info( + "Calculating %s periods: relaxation=%s, flex=%.1f%%", + period_type, + relaxation_status, + abs(config.flex) * 100, + ) + + # Detailed DEBUG-level context header + period_type_full = "PEAK PRICE (most expensive)" if config.reverse_sort else "BEST PRICE (cheapest)" + _LOGGER.debug( + "%s========== %s PERIODS ==========", + INDENT_L0, + period_type_full, + ) + _LOGGER.debug( + "%sRelaxation: %s", + INDENT_L0, + "ENABLED (user setting: ON)" if enable_relaxation else "DISABLED by user configuration", + ) + _LOGGER.debug( + "%sBase config: flex=%.1f%%, min_length=%d min", + INDENT_L0, + abs(config.flex) * 100, + config.min_period_length, + ) + if enable_relaxation: + _LOGGER.debug( + "%sRelaxation target: %d periods per day", + INDENT_L0, + min_periods, + ) + _LOGGER.debug( + "%sRelaxation strategy: %.1f%% flex increment per step (4 flex levels x 4 filter combinations)", + INDENT_L0, + relaxation_step_pct, + ) + _LOGGER.debug( + "%sEarly exit: After EACH filter combination when target reached", + INDENT_L0, + ) + _LOGGER.debug( + "%s=============================================", + INDENT_L0, + ) + + # Group prices by day (for both relaxation enabled/disabled) + prices_by_day = group_prices_by_day(all_prices) + + if not prices_by_day: + # No price data for today/future + _LOGGER.warning( + "No price data available for today/future - cannot calculate periods", + ) + return {"periods": [], "metadata": {}, "reference_data": {}}, { + "relaxation_active": False, + "relaxation_attempted": False, + "min_periods_requested": min_periods if enable_relaxation else 0, + "periods_found": 0, + } + + total_days = len(prices_by_day) + _LOGGER.info( + "Calculating baseline periods for %d days...", + total_days, + ) + + # === BASELINE CALCULATION (same for both modes) === + all_periods: list[dict] = [] + all_phases_used: list[str] = [] + relaxation_was_needed = False + days_meeting_requirement = 0 + + for day, day_prices in sorted(prices_by_day.items()): + _LOGGER.debug( + "%sProcessing day %s with %d price intervals", + INDENT_L1, + day, + len(day_prices), + ) + + # Calculate baseline periods for this day + day_result = calculate_periods(day_prices, config=config) + day_periods = day_result["periods"] + standalone_count = len([p for p in day_periods if not p.get("is_extension")]) + + _LOGGER.debug( + "%sDay %s baseline: Found %d standalone periods%s", + INDENT_L1, + day, + standalone_count, + f" (need {min_periods})" if enable_relaxation else "", + ) + + # Check if relaxation is needed for this day + if not enable_relaxation or standalone_count >= min_periods: + # No relaxation needed/possible - use baseline + if enable_relaxation: + _LOGGER.debug( + "%sDay %s: Target reached with baseline - no relaxation needed", + INDENT_L1, + day, + ) + all_periods.extend(day_periods) + days_meeting_requirement += 1 + continue + + # === RELAXATION PATH (only when enabled AND needed) === + _LOGGER.debug( + "%sDay %s: Baseline insufficient - starting relaxation", + INDENT_L1, + day, + ) + relaxation_was_needed = True + + # Run full relaxation for this specific day + day_relaxed_result, day_metadata = relax_single_day( + day_prices=day_prices, + config=config, + min_periods=min_periods, + relaxation_step_pct=relaxation_step_pct, + should_show_callback=should_show_callback, + baseline_periods=day_periods, + day_label=str(day), + ) + + all_periods.extend(day_relaxed_result["periods"]) + if day_metadata.get("phases_used"): + all_phases_used.extend(day_metadata["phases_used"]) + + # Check if this day met the requirement after relaxation + day_standalone = len([p for p in day_relaxed_result["periods"] if not p.get("is_extension")]) + if day_standalone >= min_periods: + days_meeting_requirement += 1 + + # Sort all periods by start time + all_periods.sort(key=lambda p: p["start"]) + + # Recalculate metadata for combined periods + recalculate_period_metadata(all_periods) + + # Build combined result + if all_periods: + # Use the last day's result as template + final_result = day_result.copy() + final_result["periods"] = all_periods + else: + final_result = {"periods": [], "metadata": {}, "reference_data": {}} + + total_standalone = len([p for p in all_periods if not p.get("is_extension")]) + + return final_result, { + "relaxation_active": relaxation_was_needed, + "relaxation_attempted": relaxation_was_needed, + "min_periods_requested": min_periods, + "periods_found": total_standalone, + "phases_used": list(set(all_phases_used)), # Unique phases used across all days + "days_processed": total_days, + "days_meeting_requirement": days_meeting_requirement, + "relaxation_incomplete": days_meeting_requirement < total_days, + } + + +def relax_single_day( # noqa: PLR0913 - Comprehensive filter relaxation per day + day_prices: list[dict], + config: PeriodConfig, + min_periods: int, + relaxation_step_pct: int, + should_show_callback: Callable[[str | None, str | None], bool], + baseline_periods: list[dict], + day_label: str, +) -> tuple[dict[str, Any], dict[str, Any]]: + """ + Run comprehensive relaxation for a single day. + + NEW STRATEGY: For each flex level, try all filter combinations before increasing flex. + This finds solutions faster by relaxing filters first (cheaper than increasing flex). + + Per flex level (6.25%, 7.5%, 8.75%, 10%), try in order: + 1. Original filters (volatility=configured, level=configured) + 2. Relax only volatility (volatility=any, level=configured) + 3. Relax only level (volatility=configured, level=any) + 4. Relax both (volatility=any, level=any) + + This ensures we find the minimal relaxation needed. Example: + - If periods exist at flex=6.25% with level=any, we find them before trying flex=7.5% + - If periods need both filters relaxed, we try that before increasing flex further + + Args: + day_prices: Price data for this specific day only + config: Base period configuration + min_periods: Minimum periods needed for this day + relaxation_step_pct: Relaxation increment percentage + should_show_callback: Filter visibility callback(volatility_override, level_override) + Returns True if periods should be shown with given overrides. + baseline_periods: Periods found with normal filters + day_label: Label for logging (e.g., "2025-11-11") + + Returns: + Tuple of (periods_result, metadata) for this day + + """ + # Import here to avoid circular dependency + from custom_components.tibber_prices.period_utils.core import ( # noqa: PLC0415 + calculate_periods, + ) + + accumulated_periods = baseline_periods.copy() + original_flex = abs(config.flex) + relaxation_increment = original_flex * (relaxation_step_pct / 100.0) + phases_used = [] + relaxed_result = None + + baseline_standalone = len([p for p in baseline_periods if not p.get("is_extension")]) + + # 4 flex levels: original + 3 steps (e.g., 5% → 6.25% → 7.5% → 8.75% → 10%) + for flex_step in range(1, 5): + new_flex = original_flex + (flex_step * relaxation_increment) + new_flex = min(new_flex, 100.0) + + if config.reverse_sort: + new_flex = -new_flex + + # Try filter combinations for this flex level + # Each tuple contains: volatility_override, level_override, label_suffix + filter_attempts = [ + (None, None, ""), # Original config + ("any", None, "+volatility_any"), # Relax volatility only + (None, "any", "+level_any"), # Relax level only + ("any", "any", "+all_filters_any"), # Relax both + ] + + for vol_override, lvl_override, label_suffix in filter_attempts: + # Check if this combination is allowed by user config + if not should_show_callback(vol_override, lvl_override): + continue + + # Calculate periods with this flex + filter combination + # Apply level override if specified + level_filter_value = lvl_override if lvl_override is not None else config.level_filter + + # Log filter changes + flex_pct = round(abs(new_flex) * 100, 1) + if lvl_override is not None: + _LOGGER.debug( + "%sDay %s flex=%.1f%%: OVERRIDING level_filter: %s → %s", + INDENT_L2, + day_label, + flex_pct, + config.level_filter or "None", + str(lvl_override).upper(), + ) + + relaxed_config = config._replace( + flex=new_flex, + level_filter=level_filter_value, + ) + relaxed_result = calculate_periods(day_prices, config=relaxed_config) + new_periods = relaxed_result["periods"] + + # Build relaxation level label BEFORE marking periods + relaxation_level = f"price_diff_{flex_pct}%{label_suffix}" + phases_used.append(relaxation_level) + + # Mark NEW periods with their specific relaxation metadata BEFORE merging + for period in new_periods: + period["relaxation_active"] = True + # Set the metadata immediately - this preserves which phase found this period + mark_periods_with_relaxation([period], relaxation_level, original_flex, abs(new_flex)) + + # Merge with accumulated periods + merged, standalone_count = resolve_period_overlaps( + accumulated_periods, new_periods, config.min_period_length, baseline_periods + ) + + total_standalone = standalone_count + baseline_standalone + filters_label = label_suffix if label_suffix else "(original filters)" + + _LOGGER.debug( + "%sDay %s flex=%.1f%% %s: found %d new periods, %d standalone total (%d baseline + %d new)", + INDENT_L2, + day_label, + flex_pct, + filters_label, + len(new_periods), + total_standalone, + baseline_standalone, + standalone_count, + ) + + accumulated_periods = merged.copy() + + # ✅ EARLY EXIT: Check after EACH filter combination + if total_standalone >= min_periods: + _LOGGER.info( + "Day %s: Success with flex=%.1f%% %s - found %d/%d periods (%d baseline + %d from relaxation)", + day_label, + flex_pct, + filters_label, + total_standalone, + min_periods, + baseline_standalone, + standalone_count, + ) + recalculate_period_metadata(merged) + result = relaxed_result.copy() + result["periods"] = merged + return result, {"phases_used": phases_used} + + # ❌ Only reach here if ALL phases exhausted WITHOUT reaching min_periods + final_standalone = len([p for p in accumulated_periods if not p.get("is_extension")]) + new_standalone = final_standalone - baseline_standalone + + _LOGGER.warning( + "Day %s: All relaxation phases exhausted WITHOUT reaching goal - " + "found %d/%d standalone periods (%d baseline + %d from relaxation)", + day_label, + final_standalone, + min_periods, + baseline_standalone, + new_standalone, + ) + + recalculate_period_metadata(accumulated_periods) + + if relaxed_result: + result = relaxed_result.copy() + else: + result = {"periods": accumulated_periods, "metadata": {}, "reference_data": {}} + result["periods"] = accumulated_periods + + return result, {"phases_used": phases_used} diff --git a/custom_components/tibber_prices/period_utils/types.py b/custom_components/tibber_prices/period_utils/types.py new file mode 100644 index 0000000..a96de38 --- /dev/null +++ b/custom_components/tibber_prices/period_utils/types.py @@ -0,0 +1,89 @@ +"""Type definitions and constants for period calculation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple + +if TYPE_CHECKING: + from datetime import datetime + +from custom_components.tibber_prices.const import ( + DEFAULT_PRICE_RATING_THRESHOLD_HIGH, + DEFAULT_PRICE_RATING_THRESHOLD_LOW, + DEFAULT_VOLATILITY_THRESHOLD_HIGH, + DEFAULT_VOLATILITY_THRESHOLD_MODERATE, + DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH, +) + +# Constants +MINUTES_PER_INTERVAL = 15 + +# Log indentation levels for visual hierarchy +INDENT_L0 = "" # Top level (calculate_periods_with_relaxation) +INDENT_L1 = " " # Per-day loop +INDENT_L2 = " " # Flex/filter loop (_relax_single_day) +INDENT_L3 = " " # _resolve_period_overlaps function +INDENT_L4 = " " # Period-by-period analysis +INDENT_L5 = " " # Segment details + + +class PeriodConfig(NamedTuple): + """Configuration for period calculation.""" + + reverse_sort: bool + flex: float + min_distance_from_avg: float + min_period_length: int + threshold_low: float = DEFAULT_PRICE_RATING_THRESHOLD_LOW + threshold_high: float = DEFAULT_PRICE_RATING_THRESHOLD_HIGH + threshold_volatility_moderate: float = DEFAULT_VOLATILITY_THRESHOLD_MODERATE + threshold_volatility_high: float = DEFAULT_VOLATILITY_THRESHOLD_HIGH + threshold_volatility_very_high: float = DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH + level_filter: str | None = None # "any", "cheap", "expensive", etc. or None + gap_count: int = 0 # Number of allowed consecutive deviating intervals + + +class PeriodData(NamedTuple): + """Data for building a period summary.""" + + start_time: datetime + end_time: datetime + period_length: int + period_idx: int + total_periods: int + + +class PeriodStatistics(NamedTuple): + """Calculated statistics for a period.""" + + aggregated_level: str | None + aggregated_rating: str | None + rating_difference_pct: float | None + price_avg: float + price_min: float + price_max: float + price_spread: float + volatility: str + period_price_diff: float | None + period_price_diff_pct: float | None + + +class ThresholdConfig(NamedTuple): + """Threshold configuration for period calculations.""" + + threshold_low: float | None + threshold_high: float | None + threshold_volatility_moderate: float + threshold_volatility_high: float + threshold_volatility_very_high: float + reverse_sort: bool + + +class IntervalCriteria(NamedTuple): + """Criteria for checking if an interval qualifies for a period.""" + + ref_price: float + avg_price: float + flex: float + min_distance_from_avg: float + reverse_sort: bool diff --git a/custom_components/tibber_prices/translations/de.json b/custom_components/tibber_prices/translations/de.json index 2b21361..0da6150 100644 --- a/custom_components/tibber_prices/translations/de.json +++ b/custom_components/tibber_prices/translations/de.json @@ -104,7 +104,6 @@ "best_price_min_period_length": "Minimale Periodenlänge", "best_price_flex": "Flexibilität: Maximal über dem Mindestpreis", "best_price_min_distance_from_avg": "Mindestabstand: Erforderlich unter dem Tagesdurchschnitt", - "best_price_min_volatility": "Mindest-Volatilitätsfilter", "best_price_max_level": "Preisniveau-Filter (Optional)", "best_price_max_level_gap_count": "Lückentoleranz für Niveaufilter", "enable_min_periods_best": "Mindestanzahl Perioden anstreben", @@ -112,7 +111,6 @@ "relaxation_step_best": "Lockerungsschritt" }, "data_description": { - "best_price_min_volatility": "Zeigt Bestpreis-Perioden nur an, wenn die interne Preisvolatilität der Periode (Preisspanne innerhalb der Periode) mindestens diesem Level entspricht. Standard: 'Niedrig' (zeigt Perioden mit beliebigem Volatilitätslevel) - ermöglicht das Finden günstiger Perioden auch wenn die Preise stabil sind. Wähle 'Moderat'/'Hoch' um nur Perioden mit signifikanten Preisschwankungen anzuzeigen, was auf dynamischere Preismöglichkeiten hinweisen kann.", "best_price_max_level": "Zeigt Bestpreis-Perioden nur an, wenn sie Intervalle mit Preisniveaus ≤ dem gewählten Wert enthalten. Beispiel: Wahl von 'Günstig' bedeutet, dass die Periode mindestens ein 'SEHR_GÜNSTIG' oder 'GÜNSTIG' Intervall haben muss. Dies stellt sicher, dass 'Bestpreis'-Perioden nicht nur relativ günstig für den Tag sind, sondern tatsächlich günstig in absoluten Zahlen. Wähle 'Beliebig' um Bestpreise unabhängig vom absoluten Preisniveau anzuzeigen.", "best_price_max_level_gap_count": "Maximale Anzahl aufeinanderfolgender Intervalle, die exakt um eine Niveaustufe vom geforderten Level abweichen dürfen. Beispiel: Bei Filter 'Günstig' und Lückentoleranz 1 wird die Sequenz 'GÜNSTIG, GÜNSTIG, NORMAL, GÜNSTIG' akzeptiert (NORMAL ist eine Stufe über GÜNSTIG). Dies verhindert, dass Perioden durch gelegentliche Niveau-Abweichungen aufgespalten werden. Standard: 0 (strenge Filterung, keine Toleranz).", "enable_min_periods_best": "Wenn aktiviert, werden Filter schrittweise gelockert, falls nicht genug Perioden gefunden wurden. Dies versucht die gewünschte Mindestanzahl zu erreichen, was dazu führen kann, dass auch weniger optimale Zeiträume als Bestpreis-Perioden markiert werden.", @@ -128,7 +126,6 @@ "peak_price_min_period_length": "Minimale Periodenlänge", "peak_price_flex": "Flexibilität: Maximal unter dem Höchstpreis (negativer Wert)", "peak_price_min_distance_from_avg": "Mindestabstand: Erforderlich über dem Tagesdurchschnitt", - "peak_price_min_volatility": "Mindest-Volatilitätsfilter", "peak_price_min_level": "Preisniveau-Filter (Optional)", "peak_price_max_level_gap_count": "Lückentoleranz für Niveaufilter", "enable_min_periods_peak": "Mindestanzahl Perioden anstreben", @@ -136,7 +133,6 @@ "relaxation_step_peak": "Lockerungsschritt" }, "data_description": { - "peak_price_min_volatility": "Zeigt Spitzenpreis-Perioden nur an, wenn die interne Preisvolatilität der Periode (Preisspanne innerhalb der Periode) mindestens diesem Level entspricht. Standard: 'Niedrig' (zeigt Perioden mit beliebigem Volatilitätslevel) - ermöglicht das Identifizieren teurer Perioden auch wenn die Preise stabil sind. Wähle 'Moderat'/'Hoch' um nur Perioden mit signifikanten Preisschwankungen anzuzeigen, was auf dringenderen Bedarf hinweisen kann, diese Zeiten zu vermeiden.", "peak_price_min_level": "Zeigt Spitzenpreis-Perioden nur an, wenn sie Intervalle mit Preisniveaus ≥ dem gewählten Wert enthalten. Beispiel: Wahl von 'Teuer' bedeutet, dass die Periode mindestens ein 'TEUER' oder 'SEHR_TEUER' Intervall haben muss. Dies stellt sicher, dass 'Spitzenpreis'-Perioden nicht nur relativ teuer für den Tag sind, sondern tatsächlich teuer in absoluten Zahlen. Wähle 'Beliebig' um Spitzenpreise unabhängig vom absoluten Preisniveau anzuzeigen.", "peak_price_max_level_gap_count": "Maximale Anzahl aufeinanderfolgender Intervalle, die exakt um eine Niveaustufe vom geforderten Level abweichen dürfen. Beispiel: Bei Filter 'Teuer' und Lückentoleranz 2 wird die Sequenz 'TEUER, NORMAL, NORMAL, TEUER' akzeptiert (NORMAL ist eine Stufe unter TEUER). Dies verhindert, dass Perioden durch gelegentliche Niveau-Abweichungen aufgespalten werden. Standard: 0 (strenge Filterung, keine Toleranz).", "enable_min_periods_peak": "Wenn aktiviert, werden Filter schrittweise gelockert, falls nicht genug Perioden gefunden wurden. Dies versucht die gewünschte Mindestanzahl zu erreichen, um sicherzustellen, dass du auch an Tagen mit ungewöhnlichen Preismustern vor teuren Perioden gewarnt wirst.", diff --git a/custom_components/tibber_prices/translations/en.json b/custom_components/tibber_prices/translations/en.json index ad0a6a1..468cb61 100644 --- a/custom_components/tibber_prices/translations/en.json +++ b/custom_components/tibber_prices/translations/en.json @@ -104,7 +104,6 @@ "best_price_min_period_length": "Minimum Period Length", "best_price_flex": "Flexibility: Maximum above minimum price", "best_price_min_distance_from_avg": "Minimum Distance: Required below daily average", - "best_price_min_volatility": "Minimum Volatility Filter", "best_price_max_level": "Price Level Filter (Optional)", "best_price_max_level_gap_count": "Level Filter Gap Tolerance", "enable_min_periods_best": "Try to Achieve Minimum Period Count", @@ -112,7 +111,6 @@ "relaxation_step_best": "Filter Relaxation Step Size" }, "data_description": { - "best_price_min_volatility": "Only show best price periods when the period's internal price volatility (spread within the period) meets or exceeds this level. Default: 'Low' (show periods with any volatility level) - allows finding cheap periods even if prices are stable. Select 'Moderate'/'High' to only show periods with significant price variations, which may indicate more dynamic pricing opportunities.", "best_price_max_level": "Only show best price periods if they contain intervals with price levels ≤ selected value. For example, selecting 'Cheap' means the period must have at least one 'VERY_CHEAP' or 'CHEAP' interval. This ensures 'best price' periods are not just relatively cheap for the day, but actually cheap in absolute terms. Select 'Any' to show best prices regardless of their absolute price level.", "best_price_max_level_gap_count": "Maximum number of consecutive intervals allowed that deviate by exactly one level step from the required level. For example: with 'Cheap' filter and gap count 1, a sequence 'CHEAP, CHEAP, NORMAL, CHEAP' is accepted (NORMAL is one step above CHEAP). This prevents periods from being split by occasional level deviations. Default: 0 (strict filtering, no tolerance).", "enable_min_periods_best": "When enabled, filters will be gradually relaxed if not enough periods are found. This attempts to reach the desired minimum number of periods, which may include less optimal time windows as best-price periods.", @@ -128,7 +126,6 @@ "peak_price_min_period_length": "Minimum Period Length", "peak_price_flex": "Flexibility: Maximum below maximum price (negative value)", "peak_price_min_distance_from_avg": "Minimum Distance: Required above daily average", - "peak_price_min_volatility": "Minimum Volatility Filter", "peak_price_min_level": "Price Level Filter (Optional)", "peak_price_max_level_gap_count": "Level Filter Gap Tolerance", "enable_min_periods_peak": "Try to Achieve Minimum Period Count", @@ -136,7 +133,6 @@ "relaxation_step_peak": "Filter Relaxation Step Size" }, "data_description": { - "peak_price_min_volatility": "Only show peak price periods when the period's internal price volatility (spread within the period) meets or exceeds this level. Default: 'Low' (show periods with any volatility level) - allows identifying expensive periods even if prices are stable. Select 'Moderate'/'High' to only show periods with significant price variations, which may indicate more urgent need to avoid these times.", "peak_price_min_level": "Only show peak price periods if they contain intervals with price levels ≥ selected value. For example, selecting 'Expensive' means the period must have at least one 'EXPENSIVE' or 'VERY_EXPENSIVE' interval. This ensures 'peak price' periods are not just relatively expensive for the day, but actually expensive in absolute terms. Select 'Any' to show peak prices regardless of their absolute price level.", "peak_price_max_level_gap_count": "Maximum number of consecutive intervals allowed that deviate by exactly one level step from the required level. For example: with 'Expensive' filter and gap count 2, a sequence 'EXPENSIVE, NORMAL, NORMAL, EXPENSIVE' is accepted (NORMAL is one step below EXPENSIVE). This prevents periods from being split by occasional level deviations. Default: 0 (strict filtering, no tolerance).", "enable_min_periods_peak": "When enabled, filters will be gradually relaxed if not enough periods are found. This attempts to reach the desired minimum number of periods to ensure you're warned about expensive periods even on days with unusual price patterns.", diff --git a/custom_components/tibber_prices/translations/nb.json b/custom_components/tibber_prices/translations/nb.json index dd57633..23d7d34 100644 --- a/custom_components/tibber_prices/translations/nb.json +++ b/custom_components/tibber_prices/translations/nb.json @@ -104,7 +104,6 @@ "best_price_min_period_length": "Minimum periodelengde", "best_price_flex": "Fleksibilitet: Maksimum % over minimumspris", "best_price_min_distance_from_avg": "Minimumsavstand: Påkrevd % under daglig gjennomsnitt", - "best_price_min_volatility": "Minimum volatilitetsfilter", "best_price_max_level": "Prisnivåfilter (valgfritt)", "best_price_max_level_gap_count": "Gaptoleranse for nivåfilter", "enable_min_periods_best": "Prøv å oppnå minimum antall perioder", @@ -112,7 +111,6 @@ "relaxation_step_best": "Avslappingstrinn" }, "data_description": { - "best_price_min_volatility": "Vis kun beste prisperioder når periodens interne prisvolatilitet (prisspennet innenfor perioden) oppfyller eller overskrider dette nivået. Standard: 'Lav' (vis perioder med hvilket som helst volatilitetsnivå) - gjør det mulig å finne billige perioder selv om prisene er stabile. Velg 'Moderat'/'Høy' for kun å vise perioder med betydelige prisvariasjoner, noe som kan indikere mer dynamiske prismuligheter.", "best_price_max_level": "Vis kun beste prisperioder hvis de inneholder intervaller med prisnivåer ≤ valgt verdi. For eksempel: å velge 'Billig' betyr at perioden må ha minst ett 'VELDIG_BILLIG' eller 'BILLIG' intervall. Dette sikrer at 'beste pris'-perioder ikke bare er relativt billige for dagen, men faktisk billige i absolutte tall. Velg 'Alle' for å vise beste priser uavhengig av deres absolutte prisnivå.", "enable_min_periods_best": "Når aktivert vil filtrene gradvis bli lempeligere hvis det ikke blir funnet nok perioder. Dette forsøker å nå ønsket minimum antall perioder, noe som kan føre til at mindre optimale tidsrom blir markert som beste-pris-perioder.", "min_periods_best": "Minimum antall beste-pris-perioder å sikte mot per dag. Filtre vil bli lempet trinn for trinn for å prøve å oppnå dette antallet. Kun aktiv når 'Prøv å oppnå minimum antall perioder' er aktivert. Standard: 1", @@ -128,7 +126,6 @@ "peak_price_min_period_length": "Minimum periodelengde", "peak_price_flex": "Fleksibilitet: Maksimum % under maksimumspris (negativ verdi)", "peak_price_min_distance_from_avg": "Minimumsavstand: Påkrevd % over daglig gjennomsnitt", - "peak_price_min_volatility": "Minimum volatilitetsfilter", "peak_price_min_level": "Prisnivåfilter (valgfritt)", "peak_price_max_level_gap_count": "Gaptoleranse for nivåfilter", "enable_min_periods_peak": "Prøv å oppnå minimum antall perioder", @@ -136,7 +133,6 @@ "relaxation_step_peak": "Avslappingstrinn" }, "data_description": { - "peak_price_min_volatility": "Vis kun topprisperioder når periodens interne prisvolatilitet (prisspennet innenfor perioden) oppfyller eller overskrider dette nivået. Standard: 'Lav' (vis perioder med hvilket som helst volatilitetsnivå) - gjør det mulig å identifisere dyre perioder selv om prisene er stabile. Velg 'Moderat'/'Høy' for kun å vise perioder med betydelige prisvariasjoner, noe som kan indikere mer presserende behov for å unngå disse tidspunktene.", "peak_price_min_level": "Vis kun topprisperioder hvis de inneholder intervaller med prisnivåer ≥ valgt verdi. For eksempel: å velge 'Dyr' betyr at perioden må ha minst ett 'DYR' eller 'VELDIG_DYR' intervall. Dette sikrer at 'topppris'-perioder ikke bare er relativt dyre for dagen, men faktisk dyre i absolutte tall. Velg 'Alle' for å vise topppriser uavhengig av deres absolutte prisnivå.", "enable_min_periods_peak": "Når aktivert vil filtrene gradvis bli lempeligere hvis det ikke blir funnet nok perioder. Dette forsøker å nå ønsket minimum antall perioder for å sikre at du blir advart om dyre perioder selv på dager med uvanlige prismønstre.", "min_periods_peak": "Minimum antall topp-pris-perioder å sikte mot per dag. Filtre vil bli lempet trinn for trinn for å prøve å oppnå dette antallet. Kun aktiv når 'Prøv å oppnå minimum antall perioder' er aktivert. Standard: 1", @@ -515,4 +511,4 @@ } }, "title": "Tibber Prisinformasjon & Vurderinger" -} \ No newline at end of file +} diff --git a/custom_components/tibber_prices/translations/nl.json b/custom_components/tibber_prices/translations/nl.json index 71117d8..4ac94e0 100644 --- a/custom_components/tibber_prices/translations/nl.json +++ b/custom_components/tibber_prices/translations/nl.json @@ -104,7 +104,6 @@ "best_price_min_period_length": "Minimale periode lengte", "best_price_flex": "Flexibiliteit: Maximaal % boven minimumprijs", "best_price_min_distance_from_avg": "Minimale afstand: Vereist % onder dagelijks gemiddelde", - "best_price_min_volatility": "Minimum volatiliteitsfilter", "best_price_max_level": "Prijsniveaufilter (Optioneel)", "best_price_max_level_gap_count": "Gaptolerantie voor niveaufilter", "enable_min_periods_best": "Probeer minimum aantal periodes te bereiken", @@ -112,7 +111,6 @@ "relaxation_step_best": "Ontspanningsstap" }, "data_description": { - "best_price_min_volatility": "Toon alleen beste prijsperiodes wanneer de interne prijsvolatiliteit van de periode (prijsspanne binnen de periode) dit niveau bereikt of overschrijdt. Standaard: 'Laag' (toon periodes met elk volatiliteitsniveau) - maakt het mogelijk om goedkope periodes te vinden zelfs als de prijzen stabiel zijn. Selecteer 'Matig'/'Hoog' om alleen periodes met significante prijsvariaties te tonen, wat kan wijzen op meer dynamische prijsmogelijkheden.", "best_price_max_level": "Toon alleen beste prijsperiodes als ze intervallen bevatten met prijsniveaus ≤ geselecteerde waarde. Bijvoorbeeld: selecteren van 'Goedkoop' betekent dat de periode minstens één 'ZEER_GOEDKOOP' of 'GOEDKOOP' interval moet hebben. Dit zorgt ervoor dat 'beste prijs'-periodes niet alleen relatief goedkoop zijn voor de dag, maar daadwerkelijk goedkoop in absolute termen. Selecteer 'Alle' om beste prijzen te tonen ongeacht hun absolute prijsniveau.", "enable_min_periods_best": "Wanneer ingeschakeld worden filters geleidelijk versoepeld als er niet genoeg periodes worden gevonden. Dit probeert het gewenste minimum aantal periodes te bereiken om ervoor te zorgen dat je kansen hebt om van lage prijzen te profiteren, zelfs op dagen met ongebruikelijke prijspatronen.", "min_periods_best": "Minimum aantal beste prijsperiodes om naar te streven per dag. Filters worden stap voor stap versoepeld om dit aantal te proberen bereiken. Alleen actief wanneer 'Probeer minimum aantal periodes te bereiken' is ingeschakeld. Standaard: 1", @@ -128,7 +126,6 @@ "peak_price_min_period_length": "Minimale periode lengte", "peak_price_flex": "Flexibiliteit: Maximaal % onder maximumprijs (negatieve waarde)", "peak_price_min_distance_from_avg": "Minimale afstand: Vereist % boven dagelijks gemiddelde", - "peak_price_min_volatility": "Minimum volatiliteitsfilter", "peak_price_min_level": "Prijsniveaufilter (Optioneel)", "peak_price_max_level_gap_count": "Gaptolerantie voor niveaufilter", "enable_min_periods_peak": "Probeer minimum aantal periodes te bereiken", @@ -136,7 +133,6 @@ "relaxation_step_peak": "Ontspanningsstap" }, "data_description": { - "peak_price_min_volatility": "Toon alleen piekprijsperiodes wanneer de interne prijsvolatiliteit van de periode (prijsspanne binnen de periode) dit niveau bereikt of overschrijdt. Standaard: 'Laag' (toon periodes met elk volatiliteitsniveau) - maakt het mogelijk om dure periodes te identificeren zelfs als de prijzen stabiel zijn. Selecteer 'Matig'/'Hoog' om alleen periodes met significante prijsvariaties te tonen, wat kan wijzen op een urgenter noodzaak om deze tijden te vermijden.", "peak_price_min_level": "Toon alleen piekprijsperiodes als ze intervallen bevatten met prijsniveaus ≥ geselecteerde waarde. Bijvoorbeeld: selecteren van 'Duur' betekent dat de periode minstens één 'DUUR' of 'ZEER_DUUR' interval moet hebben. Dit zorgt ervoor dat 'piekprijs'-periodes niet alleen relatief duur zijn voor de dag, maar daadwerkelijk duur in absolute termen. Selecteer 'Alle' om piekprijzen te tonen ongeacht hun absolute prijsniveau.", "enable_min_periods_peak": "Wanneer ingeschakeld worden filters geleidelijk versoepeld als er niet genoeg periodes worden gevonden. Dit probeert het gewenste minimum aantal periodes te bereiken om ervoor te zorgen dat je wordt gewaarschuwd voor dure periodes, zelfs op dagen met ongebruikelijke prijspatronen.", "min_periods_peak": "Minimum aantal piekprijsperiodes om naar te streven per dag. Filters worden stap voor stap versoepeld om dit aantal te proberen bereiken. Alleen actief wanneer 'Probeer minimum aantal periodes te bereiken' is ingeschakeld. Standaard: 1", @@ -515,4 +511,4 @@ } }, "title": "Tibber Prijsinformatie & Beoordelingen" -} \ No newline at end of file +} diff --git a/custom_components/tibber_prices/translations/sv.json b/custom_components/tibber_prices/translations/sv.json index 298bb2b..7f24197 100644 --- a/custom_components/tibber_prices/translations/sv.json +++ b/custom_components/tibber_prices/translations/sv.json @@ -104,7 +104,6 @@ "best_price_min_period_length": "Minsta periodlängd", "best_price_flex": "Flexibilitet: Maximalt % över minimumpris", "best_price_min_distance_from_avg": "Minimiavstånd: Krävd % under dagligt genomsnitt", - "best_price_min_volatility": "Minimum volatilitetsfilter", "best_price_max_level": "Prisnivåfilter (Valfritt)", "best_price_max_level_gap_count": "Gaptolerens för nivåfilter", "enable_min_periods_best": "Försök uppnå minsta antal perioder", @@ -112,7 +111,6 @@ "relaxation_step_best": "Avslappningssteg" }, "data_description": { - "best_price_min_volatility": "Visa endast bästa prisperioder när periodens interna prisvolatilitet (prisspann inom perioden) uppfyller eller överskrider denna nivå. Standard: 'Låg' (visa perioder med valfri volatilitetsnivå) - möjliggör att hitta billiga perioder även om priserna är stabila. Välj 'Måttlig'/'Hög' för att endast visa perioder med betydande prisvariationer, vilket kan indikera mer dynamiska prismöjligheter.", "best_price_max_level": "Visa endast bästa prisperioder om de innehåller intervall med prisnivåer ≤ valt värde. Till exempel: att välja 'Billigt' betyder att perioden måste ha minst ett 'MYCKET_BILLIGT' eller 'BILLIGT' intervall. Detta säkerställer att 'bästa pris'-perioder inte bara är relativt billiga för dagen, utan faktiskt billiga i absoluta tal. Välj 'Alla' för att visa bästa priser oavsett deras absoluta prisnivå.", "enable_min_periods_best": "När aktiverad kommer filtren att gradvis luckras upp om inte tillräckligt många perioder hittas. Detta försöker uppnå det önskade minsta antalet perioder för att säkerställa att du har möjligheter att dra nytta av låga priser även på dagar med ovanliga prismönster.", "min_periods_best": "Minsta antal bästa prisperioder att sträva efter per dag. Filtren kommer att luckras upp steg för steg för att försöka uppnå detta antal. Endast aktiv när 'Försök uppnå minsta antal perioder' är aktiverad. Standard: 1", @@ -128,7 +126,6 @@ "peak_price_min_period_length": "Minsta periodlängd", "peak_price_flex": "Flexibilitet: Maximalt % under maximumpris (negativt värde)", "peak_price_min_distance_from_avg": "Minimiavstånd: Krävd % över dagligt genomsnitt", - "peak_price_min_volatility": "Minimum volatilitetsfilter", "peak_price_min_level": "Prisnivåfilter (Valfritt)", "peak_price_max_level_gap_count": "Gaptolerens för nivåfilter", "enable_min_periods_peak": "Försök uppnå minsta antal perioder", @@ -136,7 +133,6 @@ "relaxation_step_peak": "Avslappningssteg" }, "data_description": { - "peak_price_min_volatility": "Visa endast topprisperioder om de har en intern prisvolatilitet (prisspann inom perioden) som uppfyller eller överskrider denna nivå. Standard: 'Låg' (visa oavsett periodens volatilitet) - toppvarningar är relevanta även vid låg intern spridning. Högre volatilitet inom en period kan indikera mer brådskande behov att undvika dessa tider (eftersom priserna varierar kraftigt även inom den korta perioden).", "peak_price_min_level": "Visa endast topprisperioder om de innehåller intervall med prisnivåer ≥ valt värde. Till exempel måste perioden om du väljer 'Dyr' ha minst ett 'DYR' eller 'MYCKET_DYR' intervall. Detta säkerställer att 'toppris'-perioder inte bara är relativt dyra för dagen, utan faktiskt dyra i absoluta termer (inte bara 'lite dyrare än genomsnittet på en billig dag').", "enable_min_periods_peak": "När aktiverad kommer filtren att gradvis luckras upp om inte tillräckligt många perioder hittas. Detta försöker uppnå det önskade minsta antalet perioder för att säkerställa att du blir varnad för dyra perioder även på dagar med ovanliga prismönster.", "min_periods_peak": "Minsta antal topprisperioder att sträva efter per dag. Filtren kommer att luckras upp steg för steg för att försöka uppnå detta antal. Endast aktiv när 'Försök uppnå minsta antal perioder' är aktiverad. Standard: 1", @@ -515,4 +511,4 @@ } }, "title": "Tibber Prisinformation & Betyg" -} \ No newline at end of file +} diff --git a/docs/user/period-calculation.md b/docs/user/period-calculation.md index 27fb8be..87f44da 100644 --- a/docs/user/period-calculation.md +++ b/docs/user/period-calculation.md @@ -1,1919 +1,516 @@ -# Period Calculation# Period Calculation +# Period Calculation +Learn how Best Price and Peak Price periods work, and how to configure them for your needs. +## Table of Contents -Learn how Best Price and Peak Price periods work, and how to configure them for your needs.Learn how Best Price and Peak Price periods work, and how to configure them for your needs. +- [Quick Start](#quick-start) +- [How It Works](#how-it-works) +- [Configuration Guide](#configuration-guide) +- [Understanding Relaxation](#understanding-relaxation) +- [Common Scenarios](#common-scenarios) +- [Troubleshooting](#troubleshooting) +- [Advanced Topics](#advanced-topics) +--- +## Quick Start -## Table of Contents## Table of Contents +### What Are Price Periods? +The integration finds time windows when electricity is especially **cheap** (Best Price) or **expensive** (Peak Price): +- **Best Price Periods** 🟢 - When to run your dishwasher, charge your EV, or heat water +- **Peak Price Periods** 🔴 - When to reduce consumption or defer non-essential loads -- [Quick Start](#quick-start)- [Quick Start](#quick-start) +### Default Behavior -- [How It Works](#how-it-works)- [How It Works](#how-it-works) +Out of the box, the integration: -- [Configuration Guide](#configuration-guide)- [Configuration Guide](#configuration-guide) +1. **Best Price**: Finds cheapest 1-hour+ windows that are at least 2% below the daily average +2. **Peak Price**: Finds most expensive 1-hour+ windows that are at least 2% above the daily average +3. **Relaxation**: Automatically loosens filters if not enough periods are found -- [Understanding Relaxation](#understanding-relaxation)- [Understanding Relaxation](#understanding-relaxation) +**Most users don't need to change anything!** The defaults work well for typical use cases. -- [Common Scenarios](#common-scenarios)- [Common Scenarios](#common-scenarios) - -- [Troubleshooting](#troubleshooting)- [Troubleshooting](#troubleshooting) - -- [Advanced Topics](#advanced-topics)- [Advanced Topics](#advanced-topics) - - - ------- - - - -## Quick Start## Quick Start - - - -### What Are Price Periods?### What Are Price Periods? - - - -The integration finds time windows when electricity is especially **cheap** (Best Price) or **expensive** (Peak Price):The integration finds time windows when electricity is especially **cheap** (Best Price) or **expensive** (Peak Price): - - - -- **Best Price Periods** 🟢 - When to run your dishwasher, charge your EV, or heat water- **Best Price Periods** 🟢 - When to run your dishwasher, charge your EV, or heat water - -- **Peak Price Periods** 🔴 - When to reduce consumption or defer non-essential loads- **Peak Price Periods** 🔴 - When to reduce consumption or defer non-essential loads - - - -### Default Behavior### Default Behavior - - - -Out of the box, the integration:Out of the box, the integration: - -- ✅ Finds the cheapest time windows each day (Best Price)- ✅ Finds the cheapest time windows each day (Best Price) - -- ✅ Finds the most expensive time windows each day (Peak Price)- ✅ Finds the most expensive time windows each day (Peak Price) - -- ✅ Requires periods to be at least 1 hour long- ✅ Requires periods to be at least 1 hour long - -- ✅ Automatically adjusts when no perfect matches exist (Relaxation)- ✅ Automatically adjusts when no perfect matches exist (Relaxation) - - - -**Most users don't need to change anything!** The defaults work well for typical use cases.**Most users don't need to change anything!** The defaults work well for typical use cases. - - - ------- - - - -## How It Works## How It Works - - - -### The Basic Idea### The Basic Idea - - - -Each day, the integration analyzes all 96 quarter-hourly price intervals and identifies **continuous time ranges** that meet specific criteria.Each day, the integration analyzes all 96 quarter-hourly price intervals and identifies **continuous time ranges** that meet specific criteria. - - - -Think of it like this:Think of it like this: - -1. **Find potential windows** - Times close to the daily MIN (Best Price) or MAX (Peak Price)1. **Find potential windows** - Times close to the daily MIN (Best Price) or MAX (Peak Price) - -2. **Filter by quality** - Ensure they're meaningfully different from average2. **Filter by quality** - Ensure they're meaningfully different from average - -3. **Check duration** - Must be long enough to be useful3. **Check duration** - Must be long enough to be useful - -4. **Apply preferences** - Optional: only show stable prices, avoid mediocre times4. **Apply preferences** - Optional: only show stable prices, avoid mediocre times - - - -### Step-by-Step Process### Step-by-Step Process - - - -#### 1. Define the Search Range (Flexibility)#### 1. Define the Search Range (Flexibility) - - - -**Best Price:** How much MORE than the daily minimum can a price be?**Best Price:** How much MORE than the daily minimum can a price be? - -`````` - -Daily MIN: 20 ct/kWhDaily MIN: 20 ct/kWh - -Flexibility: 15% (default)Flexibility: 15% (default) - -→ Search for times ≤ 23 ct/kWh (20 + 15%)→ Search for times ≤ 23 ct/kWh (20 + 15%) - -`````` - - - -**Peak Price:** How much LESS than the daily maximum can a price be?**Peak Price:** How much LESS than the daily maximum can a price be? - -`````` - -Daily MAX: 40 ct/kWhDaily MAX: 40 ct/kWh - -Flexibility: -15% (default)Flexibility: -15% (default) - -→ Search for times ≥ 34 ct/kWh (40 - 15%)→ Search for times ≥ 34 ct/kWh (40 - 15%) - -`````` - - - -**Why flexibility?** Prices rarely stay at exactly MIN/MAX. Flexibility lets you capture realistic time windows.**Why flexibility?** Prices rarely stay at exactly MIN/MAX. Flexibility lets you capture realistic time windows. - - - -#### 2. Ensure Quality (Distance from Average)#### 2. Ensure Quality (Distance from Average) - - - -Periods must be meaningfully different from the daily average:Periods must be meaningfully different from the daily average: - - - -`````` - -Daily AVG: 30 ct/kWhDaily AVG: 30 ct/kWh - -Minimum distance: 2% (default)Minimum distance: 2% (default) - - - -Best Price: Must be ≤ 29.4 ct/kWh (30 - 2%)Best Price: Must be ≤ 29.4 ct/kWh (30 - 2%) - -Peak Price: Must be ≥ 30.6 ct/kWh (30 + 2%)Peak Price: Must be ≥ 30.6 ct/kWh (30 + 2%) - -`````` - - - -**Why?** This prevents marking mediocre times as "best" just because they're slightly below average.**Why?** This prevents marking mediocre times as "best" just because they're slightly below average. - - - -#### 3. Check Duration#### 3. Check Duration - - - -Periods must be long enough to be practical:Periods must be long enough to be practical: - -`````` - -Default: 60 minutes minimumDefault: 60 minutes minimum - - - -45-minute period → Discarded45-minute period → Discarded - -90-minute period → Kept ✓90-minute period → Kept ✓ - -`````` - - - -#### 4. Apply Optional Filters#### 4. Apply Optional Filters - - - -You can optionally require:You can optionally require: - -- **Stable prices** (volatility filter) - "Only show if price doesn't fluctuate much"- **Stable prices** (volatility filter) - "Only show if price doesn't fluctuate much" - -- **Absolute quality** (level filter) - "Only show if prices are CHEAP/EXPENSIVE (not just below/above average)"- **Absolute quality** (level filter) - "Only show if prices are CHEAP/EXPENSIVE (not just below/above average)" - - - -### Visual Example### Visual Example - - - -**Timeline for a typical day:****Timeline for a typical day:** - -`````` - -Hour: 00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23Hour: 00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 - -Price: 18 19 20 28 29 30 35 34 33 32 30 28 25 24 26 28 30 32 31 22 21 20 19 18Price: 18 19 20 28 29 30 35 34 33 32 30 28 25 24 26 28 30 32 31 22 21 20 19 18 - - - -Daily MIN: 18 ct | Daily MAX: 35 ct | Daily AVG: 26 ctDaily MIN: 18 ct | Daily MAX: 35 ct | Daily AVG: 26 ct - - - -Best Price (15% flex = ≤20.7 ct):Best Price (15% flex = ≤20.7 ct): - - ████████ ████████████████ ████████ ████████████████ - - 00:00-03:00 (3h) 19:00-24:00 (5h) 00:00-03:00 (3h) 19:00-24:00 (5h) - - - -Peak Price (-15% flex = ≥29.75 ct):Peak Price (-15% flex = ≥29.75 ct): - - ████████████████████████ ████████████████████████ - - 06:00-11:00 (5h) 06:00-11:00 (5h) - -`````` - - - ------- - - - -## Configuration Guide## Configuration Guide - - - -### Basic Settings### Basic Settings - - - -#### Flexibility#### Flexibility - - - -**What:** How far from MIN/MAX to search for periods **What:** How far from MIN/MAX to search for periods - -**Default:** 15% (Best Price), -15% (Peak Price) **Default:** 15% (Best Price), -15% (Peak Price) - -**Range:** 0-100%**Range:** 0-100% - - - -```yaml```yaml - -best_price_flex: 15 # Can be up to 15% more expensive than daily MINbest_price_flex: 15 # Can be up to 15% more expensive than daily MIN - -peak_price_flex: -15 # Can be up to 15% less expensive than daily MAXpeak_price_flex: -15 # Can be up to 15% less expensive than daily MAX - -`````` - - - -**When to adjust:****When to adjust:** - -- **Increase (20-25%)** → Find more/longer periods- **Increase (20-25%)** → Find more/longer periods - -- **Decrease (5-10%)** → Find only the very best/worst times- **Decrease (5-10%)** → Find only the very best/worst times - - - -#### Minimum Period Length#### Minimum Period Length - - - -**What:** How long a period must be to show it **What:** How long a period must be to show it - -**Default:** 60 minutes **Default:** 60 minutes - -**Range:** 15-240 minutes**Range:** 15-240 minutes - - - -```yaml```yaml - -best_price_min_period_length: 60best_price_min_period_length: 60 - -peak_price_min_period_length: 60peak_price_min_period_length: 60 - -`````` - - - -**When to adjust:****When to adjust:** - -- **Increase (90-120 min)** → Only show longer periods (e.g., for heat pump cycles)- **Increase (90-120 min)** → Only show longer periods (e.g., for heat pump cycles) - -- **Decrease (30-45 min)** → Show shorter windows (e.g., for quick tasks)- **Decrease (30-45 min)** → Show shorter windows (e.g., for quick tasks) - - - -#### Distance from Average#### Distance from Average - - - -**What:** How much better than average a period must be **What:** How much better than average a period must be - -**Default:** 2% **Default:** 2% - -**Range:** 0-20%**Range:** 0-20% - - - -```yaml```yaml - -best_price_min_distance_from_avg: 2best_price_min_distance_from_avg: 2 - -peak_price_min_distance_from_avg: 2peak_price_min_distance_from_avg: 2 - -`````` - - - -**When to adjust:****When to adjust:** - -- **Increase (5-10%)** → Only show clearly better times- **Increase (5-10%)** → Only show clearly better times - -- **Decrease (0-1%)** → Show any time below/above average- **Decrease (0-1%)** → Show any time below/above average - - - -### Optional Filters### Optional Filters - - - -#### Volatility Filter (Price Stability)#### Volatility Filter (Price Stability) - - - -**What:** Only show periods with stable prices (low fluctuation) **What:** Only show periods with stable prices (low fluctuation) - -**Default:** `low` (disabled) **Default:** `low` (disabled) - -**Options:** `low` | `moderate` | `high` | `very_high`**Options:** `low` | `moderate` | `high` | `very_high` - - - -```yaml```yaml - -best_price_min_volatility: low # Show all periodsbest_price_min_volatility: low # Show all periods - -best_price_min_volatility: moderate # Only show if price doesn't swing >5 ctbest_price_min_volatility: moderate # Only show if price doesn't swing >5 ct - -`````` - - - -**Use case:** "I want predictable prices during the period"**Use case:** "I want predictable prices during the period" - - - -#### Level Filter (Absolute Quality)#### Level Filter (Absolute Quality) - - - -**What:** Only show periods with CHEAP/EXPENSIVE intervals (not just below/above average) **What:** Only show periods with CHEAP/EXPENSIVE intervals (not just below/above average) - -**Default:** `any` (disabled) **Default:** `any` (disabled) - -**Options:** `any` | `cheap` | `very_cheap` (Best Price) | `expensive` | `very_expensive` (Peak Price)**Options:** `any` | `cheap` | `very_cheap` (Best Price) | `expensive` | `very_expensive` (Peak Price) - - - -```yaml```yaml - -best_price_max_level: any # Show any period below averagebest_price_max_level: any # Show any period below average - -best_price_max_level: cheap # Only show if at least one interval is CHEAPbest_price_max_level: cheap # Only show if at least one interval is CHEAP - -`````` - - - -**Use case:** "Only notify me when prices are objectively cheap/expensive"**Use case:** "Only notify me when prices are objectively cheap/expensive" - - - -#### Gap Tolerance (for Level Filter)#### Gap Tolerance (for Level Filter) - - - -**What:** Allow some "mediocre" intervals within an otherwise good period **What:** Allow some "mediocre" intervals within an otherwise good period - -**Default:** 0 (strict) **Default:** 0 (strict) - -**Range:** 0-10**Range:** 0-10 - - - -```yaml```yaml - -best_price_max_level: cheapbest_price_max_level: cheap - -best_price_max_level_gap_count: 2 # Allow up to 2 NORMAL intervals per periodbest_price_max_level_gap_count: 2 # Allow up to 2 NORMAL intervals per period - -`````` - - - -**Use case:** "Don't split periods just because one interval isn't perfectly CHEAP"**Use case:** "Don't split periods just because one interval isn't perfectly CHEAP" - - - ------- - - - -## Understanding Relaxation## Understanding Relaxation - - - -### What Is Relaxation?### What Is Relaxation? - - - -Sometimes, strict filters find too few periods (or none). **Relaxation automatically loosens filters** until a minimum number of periods is found.Sometimes, strict filters find too few periods (or none). **Relaxation automatically loosens filters** until a minimum number of periods is found. - - - -### How to Enable### How to Enable - - - -```yaml```yaml - -enable_min_periods_best: trueenable_min_periods_best: true - -min_periods_best: 2 # Try to find at least 2 periods per daymin_periods_best: 2 # Try to find at least 2 periods per day - -relaxation_step_best: 35 # Increase flex by 35% per step (e.g., 15% → 20.25% → 27.3%)relaxation_step_best: 35 # Increase flex by 35% per step (e.g., 15% → 20.25% → 27.3%) - -`````` - - - -### How It Works (Smart 4×4 Matrix)### How It Works (New Smart Strategy) +### Example Timeline +``` +00:00 ████████████████ Best Price Period (cheap prices) +04:00 ░░░░░░░░░░░░░░░░ Normal +08:00 ████████████████ Peak Price Period (expensive prices) +12:00 ░░░░░░░░░░░░░░░░ Normal +16:00 ████████████████ Peak Price Period (expensive prices) +20:00 ████████████████ Best Price Period (cheap prices) ``` -Relaxation uses a **4×4 matrix approach** - trying 4 flexibility levels with 4 different filter combinations (16 attempts total per day):Found periods: +--- -- 00:00-01:00 (60 min) ✓ Keep +## How It Works -#### Phase Matrix- 03:00-03:30 (30 min) ✗ Discard (too short) +### The Basic Idea -- 14:00-15:15 (75 min) ✓ Keep +Each day, the integration analyzes all 96 quarter-hourly price intervals and identifies **continuous time ranges** that meet specific criteria. -For each day, the system tries:``` +Think of it like this: +1. **Find potential windows** - Times close to the daily MIN (Best Price) or MAX (Peak Price) +2. **Filter by quality** - Ensure they're meaningfully different from average +3. **Check duration** - Must be long enough to be useful +4. **Apply preferences** - Optional: only show stable prices, avoid mediocre times +### Step-by-Step Process -**4 Flexibility Levels:** +#### 1. Define the Search Range (Flexibility) -1. Original (e.g., 15%)### How It Works (New Smart Strategy) +**Best Price:** How much MORE than the daily minimum can a price be? -2. +35% step (e.g., 20.25%) +``` +Daily MIN: 20 ct/kWh +Flexibility: 15% (default) +→ Search for times ≤ 23 ct/kWh (20 + 15%) +``` -3. +35% step (e.g., 27.3%)Relaxation uses a **4×4 matrix approach** - trying 4 flexibility levels with 4 different filter combinations (16 attempts total per day): +**Peak Price:** How much LESS than the daily maximum can a price be? -4. +35% step (e.g., 36.9%) +``` +Daily MAX: 40 ct/kWh +Flexibility: -15% (default) +→ Search for times ≥ 34 ct/kWh (40 - 15%) +``` + +**Why flexibility?** Prices rarely stay at exactly MIN/MAX. Flexibility lets you capture realistic time windows. + +#### 2. Ensure Quality (Distance from Average) + +Periods must be meaningfully different from the daily average: + +``` +Daily AVG: 30 ct/kWh +Minimum distance: 2% (default) + +Best Price: Must be ≤ 29.4 ct/kWh (30 - 2%) +Peak Price: Must be ≥ 30.6 ct/kWh (30 + 2%) +``` + +**Why?** This prevents marking mediocre times as "best" just because they're slightly below average. + +#### 3. Check Duration + +Periods must be long enough to be practical: + +``` +Default: 60 minutes minimum + +45-minute period → Discarded +90-minute period → Kept ✓ +``` + +#### 4. Apply Optional Filters + +You can optionally require: + +- **Stable prices** (volatility filter) - "Only show if price doesn't fluctuate much" +- **Absolute quality** (level filter) - "Only show if prices are CHEAP/EXPENSIVE (not just below/above average)" + +#### 5. Statistical Outlier Filtering + +**Before** period identification, price spikes are automatically detected and smoothed: + +``` +Raw prices: 18, 19, 35, 20, 19 ct ← 35 ct is an isolated spike +Smoothed: 18, 19, 19, 20, 19 ct ← Spike replaced with trend prediction + +Result: Continuous period 00:00-01:15 instead of split periods +``` + +**How it works:** + +- **Linear regression** predicts expected price based on surrounding trend +- **95% confidence intervals** (2 standard deviations) define spike tolerance +- **Symmetry checking** preserves legitimate price shifts (morning/evening peaks) +- **Enhanced zigzag detection** catches spike clusters without multiple passes + +**Data integrity:** + +- Original prices **always preserved** for statistics (min/max/avg show real values) +- Smoothing **only affects period formation** (which intervals qualify for periods) +- Attributes show when smoothing was impactful: `period_interval_smoothed_count` + +**Example log output:** + +``` +DEBUG: [2025-11-11T14:30:00+01:00] Outlier detected: 35.2 ct +DEBUG: Residual: 14.5 ct > tolerance: 4.8 ct (2×2.4 std dev) +DEBUG: Trend slope: 0.3 ct/interval (gradual increase) +DEBUG: Smoothed to: 20.7 ct (trend prediction) +``` + +### Visual Example + +**Timeline for a typical day:** + +``` +Hour: 00 01 02 03 04 05 06 07 08 09 10 11 12 13 14 15 16 17 18 19 20 21 22 23 +Price: 18 19 20 28 29 30 35 34 33 32 30 28 25 24 26 28 30 32 31 22 21 20 19 18 + +Daily MIN: 18 ct | Daily MAX: 35 ct | Daily AVG: 26 ct + +Best Price (15% flex = ≤20.7 ct): + ████████ ████████████████ + 00:00-03:00 (3h) 19:00-24:00 (5h) + +Peak Price (-15% flex = ≥29.75 ct): + ████████████████████████ + 06:00-11:00 (5h) +``` + +--- + +## Configuration Guide + +### Basic Settings + +#### Flexibility + +**What:** How far from MIN/MAX to search for periods +**Default:** 15% (Best Price), -15% (Peak Price) +**Range:** 0-100% + +```yaml +best_price_flex: 15 # Can be up to 15% more expensive than daily MIN +peak_price_flex: -15 # Can be up to 15% less expensive than daily MAX +``` + +**When to adjust:** + +- **Increase (20-25%)** → Find more/longer periods +- **Decrease (5-10%)** → Find only the very best/worst times + +#### Minimum Period Length + +**What:** How long a period must be to show it +**Default:** 60 minutes +**Range:** 15-240 minutes + +```yaml +best_price_min_period_length: 60 +peak_price_min_period_length: 60 +``` + +**When to adjust:** + +- **Increase (90-120 min)** → Only show longer periods (e.g., for heat pump cycles) +- **Decrease (30-45 min)** → Show shorter windows (e.g., for quick tasks) + +#### Distance from Average + +**What:** How much better than average a period must be +**Default:** 2% +**Range:** 0-20% + +```yaml +best_price_min_distance_from_avg: 2 +peak_price_min_distance_from_avg: 2 +``` + +**When to adjust:** + +- **Increase (5-10%)** → Only show clearly better times +- **Decrease (0-1%)** → Show any time below/above average + +### Optional Filters + +#### Volatility Filter (Price Stability) + +**What:** Only show periods with stable prices (low fluctuation) +**Default:** `low` (disabled) +**Options:** `low` | `moderate` | `high` | `very_high` + +```yaml +best_price_min_volatility: low # Show all periods +best_price_min_volatility: moderate # Only show if price doesn't swing >5 ct +``` + +**Use case:** "I want predictable prices during the period" + +#### Level Filter (Absolute Quality) + +**What:** Only show periods with CHEAP/EXPENSIVE intervals (not just below/above average) +**Default:** `any` (disabled) +**Options:** `any` | `cheap` | `very_cheap` (Best Price) | `expensive` | `very_expensive` (Peak Price) + +```yaml +best_price_max_level: any # Show any period below average +best_price_max_level: cheap # Only show if at least one interval is CHEAP +``` + +**Use case:** "Only notify me when prices are objectively cheap/expensive" + +#### Gap Tolerance (for Level Filter) + +**What:** Allow some "mediocre" intervals within an otherwise good period +**Default:** 0 (strict) +**Range:** 0-10 + +```yaml +best_price_max_level: cheap +best_price_max_level_gap_count: 2 # Allow up to 2 NORMAL intervals per period +``` + +**Use case:** "Don't split periods just because one interval isn't perfectly CHEAP" + +--- + +## Understanding Relaxation + +### What Is Relaxation? + +Sometimes, strict filters find too few periods (or none). **Relaxation automatically loosens filters** until a minimum number of periods is found. + +### How to Enable + +```yaml +enable_min_periods_best: true +min_periods_best: 2 # Try to find at least 2 periods per day +relaxation_step_best: 35 # Increase flex by 35% per step (e.g., 15% → 20.25% → 27.3%) +``` + +### How It Works (Smart 4×4 Matrix) + +Relaxation uses a **4×4 matrix approach** - trying 4 flexibility levels with 4 different filter combinations (16 attempts total per day): #### Phase Matrix +For each day, the system tries: + +**4 Flexibility Levels:** + +1. Original (e.g., 15%) +2. +35% step (e.g., 20.25%) +3. +35% step (e.g., 27.3%) +4. +35% step (e.g., 36.9%) + **4 Filter Combinations (per flexibility level):** -1. Original filters (your configured volatility + level)For each day, the system tries: - +1. Original filters (your configured volatility + level) 2. Remove volatility filter (keep level filter) - -3. Remove level filter (keep volatility filter)**4 Flexibility Levels:** - -4. Remove both filters1. Original (e.g., 15%) - -2. +35% step (e.g., 20.25%) - -**Example progression:**3. +35% step (e.g., 27.3%) - -```4. +35% step (e.g., 36.9%) - -Flex 15% + Original filters → Not enough periods - -Flex 15% + Volatility=any → Not enough periods**4 Filter Combinations (per flexibility level):** - -Flex 15% + Level=any → Not enough periods1. Original filters (your configured volatility + level) - -Flex 15% + All filters off → Not enough periods2. Remove volatility filter (keep level filter) - -Flex 20.25% + Original → SUCCESS! Found 2 periods ✓3. Remove level filter (keep volatility filter) - -(stops here - no need to try more)4. Remove both filters - -``` +3. Remove level filter (keep volatility filter) +4. Remove both filters **Example progression:** -#### Per-Day Independence``` - +``` Flex 15% + Original filters → Not enough periods - -**Critical:** Each day relaxes **independently**:Flex 15% + Volatility=any → Not enough periods - +Flex 15% + Volatility=any → Not enough periods Flex 15% + Level=any → Not enough periods - -```Flex 15% + All filters off → Not enough periods - -Day 1: Finds 2 periods with flex 15% (original) → No relaxation neededFlex 20.25% + Original → SUCCESS! Found 2 periods ✓ - -Day 2: Needs flex 27.3% + level=any → Uses relaxed settings(stops here - no need to try more) - -Day 3: Finds 2 periods with flex 15% (original) → No relaxation needed``` - +Flex 15% + All filters off → Not enough periods +Flex 20.25% + Original → SUCCESS! Found 2 periods ✓ +(stops here - no need to try more) ``` #### Per-Day Independence -**Why?** Price patterns vary daily. Some days have clear cheap/expensive windows (strict filters work), others don't (relaxation needed). - **Critical:** Each day relaxes **independently**: -#### Period Replacement Logic - ``` - -When relaxation finds new periods, they interact with baseline periods in two ways:Day 1: Finds 2 periods with flex 15% (original) → No relaxation needed - +Day 1: Finds 2 periods with flex 15% (original) → No relaxation needed Day 2: Needs flex 27.3% + level=any → Uses relaxed settings - -**1. Extension** (Enlargement)Day 3: Finds 2 periods with flex 15% (original) → No relaxation needed - -A relaxed period that **overlaps** with a baseline period and extends it:``` - +Day 3: Finds 2 periods with flex 15% (original) → No relaxation needed ``` -Baseline: [14:00-16:00] ████████**Why?** Price patterns vary daily. Some days have clear cheap/expensive windows (strict filters work), others don't (relaxation needed). +**Why?** Price patterns vary daily. Some days have clear cheap/expensive windows (strict filters work), others don't (relaxation needed). -Relaxed: [13:00-16:30] ████████████ +--- -Result: [13:00-16:30] ████████████████ (baseline expanded)#### Period Replacement Logic +## Common Scenarios - ↑ Keeps baseline metadata (original flex/filters) - -```When relaxation finds new periods, they interact with baseline periods in two ways: - - - -**2. Replacement** (Substitution)**1. Extension** (Enlargement) - -A **larger** relaxed period completely contains a **smaller** relaxed period from earlier phases:A relaxed period that **overlaps** with a baseline period and extends it: - -`````` - -Phase 1: [14:00-15:00] ████ (found with flex 15%)Baseline: [14:00-16:00] ████████ - -Phase 3: [13:00-17:00] ████████████ (found with flex 27.3%)Relaxed: [13:00-16:30] ████████████ - -Result: [13:00-17:00] ████████████ (larger replaces smaller)Result: [13:00-16:30] ████████████████ (baseline expanded) - - ↑ Uses Phase 3 metadata (flex 27.3%) ↑ Keeps baseline metadata (original flex/filters) - -`````` - - - -**Why two different behaviors?****2. Replacement** (Substitution) - -- **Extensions preserve quality:** Baseline periods found with original strict filters are high-quality. When relaxation finds overlapping periods, we expand the baseline but keep its original metadata (indicating it was found with strict criteria).A **larger** relaxed period completely contains a **smaller** relaxed period from earlier phases: - -- **Replacements reflect reality:** When a larger relaxed period is found, it completely replaces smaller relaxed periods because it better represents the actual price window. The metadata shows which relaxation phase actually found this period.``` - -Phase 1: [14:00-15:00] ████ (found with flex 15%) - -**Key principle:** Baseline periods are "gold standard" - they get extended but never replaced. Relaxed periods compete with each other based on size.Phase 3: [13:00-17:00] ████████████ (found with flex 27.3%) - -Result: [13:00-17:00] ████████████ (larger replaces smaller) - -#### Counting Logic ↑ Uses Phase 3 metadata (flex 27.3%) - -``` - -The system counts **standalone periods** (periods that remain in the final result): - -**Why two different behaviors?** - -```- **Extensions preserve quality:** Baseline periods found with original strict filters are high-quality. When relaxation finds overlapping periods, we expand the baseline but keep its original metadata (indicating it was found with strict criteria). - -After all relaxation phases:- **Replacements reflect reality:** When a larger relaxed period is found, it completely replaces smaller relaxed periods because it better represents the actual price window. The metadata shows which relaxation phase actually found this period. - -- Period A: Extended baseline (counts ✓) - -- Period B: Standalone relaxed (counts ✓)**Key principle:** Baseline periods are "gold standard" - they get extended but never replaced. Relaxed periods compete with each other based on size. - -- Period C: Was replaced by larger period (doesn't count ✗) - -#### Counting Logic - -Total: 2 periods - -Comparison: ≥ min_periods_best? → Yes → SUCCESSThe system counts **standalone periods** (periods that remain in the final result): - -``` - -``` - -### Metadata TrackingAfter all relaxation phases: - -- Period A: Extended baseline (counts ✓) - -Each period shows **how it was found** via entity attributes:- Period B: Standalone relaxed (counts ✓) - -- Period C: Was replaced by larger period (doesn't count ✗) - -**Baseline Period (no relaxation needed):** - -```yamlTotal: 2 periods - -relaxation_active: falseComparison: ≥ min_periods_best? → Yes → SUCCESS - -relaxation_level: "price_diff_15.0%" # Original flexibility``` - -``` - -### Metadata Tracking - -**Extended Baseline (relaxation extended it):** - -```yamlEach period shows **how it was found** via entity attributes: - -relaxation_active: true # Relaxation was needed globally - -relaxation_level: "price_diff_15.0%" # But THIS period was baseline**Baseline Period (no relaxation needed):** - -``````yaml - -relaxation_active: false - -**Standalone Relaxed Period:**relaxation_level: "price_diff_15.0%" # Original flexibility - -```yaml``` - -relaxation_active: true - -relaxation_level: "price_diff_27.3%+level_any" # Found at flex 27.3%, level filter removed**Extended Baseline (relaxation extended it):** - -``````yaml - -relaxation_active: true # Relaxation was needed globally - -**Replaced Period (doesn't appear in final result):**relaxation_level: "price_diff_15.0%" # But THIS period was baseline - -- Not exposed as entity (was replaced by larger period)``` - - - -### Configuration Example**Standalone Relaxed Period:** - -```yaml - -```yamlrelaxation_active: true - -# Best Price with relaxationrelaxation_level: "price_diff_27.3%+level_any" # Found at flex 27.3%, level filter removed - -enable_min_periods_best: true``` - -min_periods_best: 2 # Try to find at least 2 periods per day - -relaxation_step_best: 35 # Increase flex by 35% per step**Replaced Period (doesn't appear in final result):** - -best_price_flex: 15 # Start with 15%- Not exposed as entity (was replaced by larger period) - -best_price_min_volatility: moderate # Start with volatility filter - -best_price_max_level: cheap # Start with level filter### Configuration Example - - - -# Result: Tries up to 16 combinations per day:```yaml - -# Flex 15%/20.25%/27.3%/36.9% × Filters original/vol-any/lvl-any/all-any# Best Price with relaxation - -# Stops immediately when 2 periods foundenable_min_periods_best: true - -```min_periods_best: 2 # Try to find at least 2 periods per day - -relaxation_step_best: 35 # Increase flex by 35% per step - ----best_price_flex: 15 # Start with 15% - -best_price_min_volatility: moderate # Start with volatility filter - -## Common Scenariosbest_price_max_level: cheap # Start with level filter - - - -### Scenario 1: Simple Best Price (Default)# Result: Tries up to 16 combinations per day: - -# Flex 15%/20.25%/27.3%/36.9% × Filters original/vol-any/lvl-any/all-any - -**Goal:** Find the cheapest time each day to run dishwasher# Stops immediately when 2 periods found - -``` - -**Configuration:** - -```yaml--- - -# Use defaults - no configuration needed! - -best_price_flex: 15 # (default)## Common Scenarios - -best_price_min_period_length: 60 # (default) - -best_price_min_distance_from_avg: 2 # (default)### Scenario 1: Simple Best Price (Default) - -``` +### Scenario 1: Simple Best Price (Default) **Goal:** Find the cheapest time each day to run dishwasher -**What you get:** - -- 1-3 periods per day with prices ≤ MIN + 15%**Configuration:** - -- Each period at least 1 hour long```yaml - -- All periods at least 2% cheaper than daily average# Use defaults - no configuration needed! - -best_price_flex: 15 # (default) - -**Automation example:**best_price_min_period_length: 60 # (default) - -```yamlbest_price_min_distance_from_avg: 2 # (default) - -automation:``` - - - trigger: - - - platform: state**What you get:** - - entity_id: binary_sensor.tibber_home_best_price_period- 1-3 periods per day with prices ≤ MIN + 15% - - to: "on"- Each period at least 1 hour long - - action:- All periods at least 2% cheaper than daily average - - - service: switch.turn_on - - target:**Automation example:** - - entity_id: switch.dishwasher```yaml - -```automation: - - - trigger: - -### Scenario 2: Heat Pump (Long Periods + Relaxation) - platform: state - - entity_id: binary_sensor.tibber_home_best_price_period - -**Goal:** Run water heater during long cheap windows, accept longer periods even if not perfectly cheap to: "on" - - action: - -**Configuration:** - service: switch.turn_on - -```yaml target: - -best_price_min_period_length: 120 # Need at least 2 hours entity_id: switch.dishwasher - -enable_min_periods_best: true``` - -min_periods_best: 2 # Want 2 opportunities per day - -relaxation_step_best: 35### Scenario 2: Heat Pump (Long Periods + Relaxation) - -best_price_max_level: cheap # Prefer CHEAP intervals - -best_price_max_level_gap_count: 3 # But allow some NORMAL intervals**Goal:** Run water heater during long cheap windows, accept longer periods even if not perfectly cheap - -``` - **Configuration:** -**What you get:**```yaml +```yaml +# Use defaults - no configuration needed! +best_price_flex: 15 # (default) +best_price_min_period_length: 60 # (default) +best_price_min_distance_from_avg: 2 # (default) +``` -- At least 2 periods per day (relaxation ensures this)best_price_min_period_length: 120 # Need at least 2 hours +**What you get:** -- Each period at least 2 hours longenable_min_periods_best: true +- 1-3 periods per day with prices ≤ MIN + 15% +- Each period at least 1 hour long +- All periods at least 2% cheaper than daily average -- Primarily CHEAP intervals, but tolerates up to 3 NORMAL intervals per periodmin_periods_best: 2 # Want 2 opportunities per day - -- If not enough strict matches, relaxation finds longer/less-strict periodsrelaxation_step_best: 35 - -best_price_max_level: cheap # Prefer CHEAP intervals - -**Automation example:**best_price_max_level_gap_count: 3 # But allow some NORMAL intervals - -```yaml``` +**Automation example:** +```yaml automation: - - - trigger:**What you get:** - - - platform: state- At least 2 periods per day (relaxation ensures this) - - entity_id: binary_sensor.tibber_home_best_price_period- Each period at least 2 hours long - - to: "on"- Primarily CHEAP intervals, but tolerates up to 3 NORMAL intervals per period - - condition:- If not enough strict matches, relaxation finds longer/less-strict periods - - - condition: numeric_state - - entity_id: sensor.water_heater_temperature**Automation example:** - - below: 50```yaml - - action:automation: - - - service: climate.set_hvac_mode - trigger: - - target: - platform: state - - entity_id: climate.water_heater entity_id: binary_sensor.tibber_home_best_price_period - - data: to: "on" - - hvac_mode: heat condition: - -``` - condition: numeric_state - - entity_id: sensor.water_heater_temperature - -### Scenario 3: EV Charging (Stable Prices Only) below: 50 - - action: - -**Goal:** Charge electric vehicle only during stable, predictable cheap prices - service: climate.set_hvac_mode - - target: - -**Configuration:** entity_id: climate.water_heater - -```yaml data: - -best_price_flex: 10 # Very strict (only very cheap times) hvac_mode: heat - -best_price_min_volatility: moderate # Require stable prices``` - -best_price_max_level: cheap # Require at least one CHEAP interval - -enable_min_periods_best: false # Don't relax - better to skip a day### Scenario 3: EV Charging (Stable Prices Only) - + - trigger: + - platform: state + entity_id: binary_sensor.tibber_home_best_price_period + to: "on" + action: + - service: switch.turn_on + target: + entity_id: switch.dishwasher ``` -**Goal:** Charge electric vehicle only during stable, predictable cheap prices - -**What you get:** - -- Very strict matching - only clearly cheap, stable periods**Configuration:** - -- Some days might have 0 periods (and that's OK)```yaml - -- When periods appear, they're high confidencebest_price_flex: 10 # Very strict (only very cheap times) - -best_price_min_volatility: moderate # Require stable prices - -**Automation example:**best_price_max_level: cheap # Require at least one CHEAP interval - -```yamlenable_min_periods_best: false # Don't relax - better to skip a day - -automation:``` - - - trigger: - - - platform: state**What you get:** - - entity_id: binary_sensor.tibber_home_best_price_period- Very strict matching - only clearly cheap, stable periods - - to: "on"- Some days might have 0 periods (and that's OK) - - condition:- When periods appear, they're high confidence - - - condition: numeric_state - - entity_id: sensor.ev_battery_level**Automation example:** - - below: 80```yaml - - - condition: stateautomation: - - entity_id: binary_sensor.ev_connected - trigger: - - state: "on" - platform: state - - action: entity_id: binary_sensor.tibber_home_best_price_period - - - service: switch.turn_on to: "on" - - target: condition: - - entity_id: switch.ev_charger - condition: numeric_state - -``` entity_id: sensor.ev_battery_level - - below: 80 - -### Scenario 4: Peak Price Avoidance - condition: state - - entity_id: binary_sensor.ev_connected - -**Goal:** Reduce heating during the most expensive hours state: "on" - - action: - -**Configuration:** - service: switch.turn_on - -```yaml target: - -peak_price_flex: -10 # Only the very expensive times entity_id: switch.ev_charger - -peak_price_min_period_length: 30 # Even short periods matter``` - -enable_min_periods_peak: true - -min_periods_peak: 1 # Ensure at least 1 peak warning per day### Scenario 4: Peak Price Avoidance - -``` - -**Goal:** Reduce heating during the most expensive hours - -**What you get:** - -- At least 1 expensive period per day (relaxation ensures this)**Configuration:** - -- Periods can be as short as 30 minutes```yaml - -- Clear signal when to reduce consumptionpeak_price_flex: -10 # Only the very expensive times - -peak_price_min_period_length: 30 # Even short periods matter - -**Automation example:**enable_min_periods_peak: true - -```yamlmin_periods_peak: 1 # Ensure at least 1 peak warning per day - -automation:``` - - - trigger: - - - platform: state**What you get:** - - entity_id: binary_sensor.tibber_home_peak_price_period- At least 1 expensive period per day (relaxation ensures this) - - to: "on"- Periods can be as short as 30 minutes - - action:- Clear signal when to reduce consumption - - - service: climate.set_temperature - - target:**Automation example:** - - entity_id: climate.living_room```yaml - - data:automation: - - temperature: 19 # Reduce by 2°C during peaks - trigger: - -``` - platform: state - - entity_id: binary_sensor.tibber_home_peak_price_period - ---- to: "on" - - action: - -## Troubleshooting - service: climate.set_temperature - - target: - -### No Periods Found entity_id: climate.living_room - - data: - -**Symptom:** `binary_sensor.tibber_home_best_price_period` never turns "on" temperature: 19 # Reduce by 2°C during peaks - -``` - -**Possible causes:** - ---- - -1. **Filters too strict** - - ```yaml## Troubleshooting - - # Try: - - best_price_flex: 20 # Increase from default 15%### No Periods Found - - best_price_min_distance_from_avg: 1 # Reduce from default 2% - - ```**Symptom:** `binary_sensor.tibber_home_best_price_period` never turns "on" - - - -2. **Period length too long****Possible causes:** - - ```yaml - - # Try:1. **Filters too strict** - - best_price_min_period_length: 45 # Reduce from default 60 minutes ```yaml - - ``` # Try: - - best_price_flex: 20 # Increase from default 15% - -3. **Flat price curve** (all prices very similar) best_price_min_distance_from_avg: 1 # Reduce from default 2% - - - Enable relaxation to ensure at least some periods ``` - - ```yaml - - enable_min_periods_best: true2. **Period length too long** - - min_periods_best: 1 ```yaml - - ``` # Try: - - best_price_min_period_length: 45 # Reduce from default 60 minutes - -### Too Many Periods ``` - - - -**Symptom:** 5+ periods per day, hard to decide which one to use3. **Flat price curve** (all prices very similar) - - - Enable relaxation to ensure at least some periods - -**Solution:** ```yaml - -```yaml enable_min_periods_best: true - -# Make filters stricter: min_periods_best: 1 - -best_price_flex: 10 # Reduce from default 15% ``` - -best_price_min_period_length: 90 # Increase from default 60 minutes - -best_price_min_volatility: moderate # Require stable prices - -best_price_max_level: cheap # Require CHEAP intervals**Symptom:** 5+ periods per day, hard to decide which one to use - -``` - -**Solution:** - -### Periods Split Into Small Pieces```yaml - -# Make filters stricter: - -**Symptom:** Many short periods instead of one long periodbest_price_flex: 10 # Reduce from default 15% - -best_price_min_period_length: 90 # Increase from default 60 minutes - -**Possible causes:**best_price_min_volatility: moderate # Require stable prices - -best_price_max_level: cheap # Require CHEAP intervals - -1. **Level filter too strict**``` - - ```yaml - - # One "NORMAL" interval splits an otherwise good period### Periods Split Into Small Pieces - - # Solution: Use gap tolerance - - best_price_max_level: cheap**Symptom:** Many short periods instead of one long period - - best_price_max_level_gap_count: 2 # Allow 2 NORMAL intervals - - ```**Possible causes:** - - - -2. **Flexibility too tight**1. **Level filter too strict** - - ```yaml ```yaml - - # One interval just outside flex range splits the period # One "NORMAL" interval splits an otherwise good period - - # Solution: Increase flexibility # Solution: Use gap tolerance - - best_price_flex: 20 # Increase from 15% best_price_max_level: cheap - - ``` best_price_max_level_gap_count: 2 # Allow 2 NORMAL intervals - - ``` - -### Understanding Sensor Attributes - -2. **Flexibility too tight** - -**Check period details:** ```yaml - -```yaml # One interval just outside flex range splits the period - -# Entity: binary_sensor.tibber_home_best_price_period # Solution: Increase flexibility - - best_price_flex: 20 # Increase from 15% - -# Attributes when "on": ``` - -start: "2025-11-11T02:00:00+01:00" - -end: "2025-11-11T05:00:00+01:00"### Understanding Sensor Attributes - -duration_minutes: 180 - -rating_level: "LOW" # All intervals are LOW price**Check period details:** - -price_avg: 18.5 # Average price in this period```yaml - -relaxation_active: true # This day used relaxation# Entity: binary_sensor.tibber_home_best_price_period - -relaxation_level: "price_diff_20.25%+level_any" # Found at flex 20.25%, level filter removed - -# Attributes when "on": - -# When "off" (outside any period):start: "2025-11-11T02:00:00+01:00" - -next_start: "2025-11-11T14:00:00+01:00" # Next period starts at 14:00end: "2025-11-11T05:00:00+01:00" - -next_end: "2025-11-11T17:00:00+01:00"duration_minutes: 180 - -next_duration_minutes: 180rating_level: "LOW" # All intervals are LOW price - -```price_avg: 18.5 # Average price in this period - -relaxation_active: true # This day used relaxation - -### Checking the Logsrelaxation_level: "price_diff_20.25%+level_any" # Found at flex 20.25%, level filter removed - - - -Enable debug logging to see detailed calculation:# When "off" (outside any period): - -next_start: "2025-11-11T14:00:00+01:00" # Next period starts at 14:00 - -```yamlnext_end: "2025-11-11T17:00:00+01:00" - -# configuration.yamlnext_duration_minutes: 180 - -logger:``` - - default: warning - - logs:### Checking the Logs - - custom_components.tibber_prices.period_utils: debug - -```Enable debug logging to see detailed calculation: - - - -**What to look for:**```yaml - -```# configuration.yaml - -INFO: Calculating BEST PRICE periods: relaxation=ON, target=2/day, flex=15.0%logger: - -DEBUG: Day 2025-11-11: Found 1 baseline period (need 2) default: warning - -DEBUG: Day 2025-11-11: Starting relaxation... logs: - -DEBUG: Phase 1: flex 20.25% + original filters custom_components.tibber_prices.period_utils: debug - -DEBUG: Candidate: 02:00-05:00 (3h) - rating=LOW, avg=18.5 ct``` - -DEBUG: Result: 2 standalone periods after merge ✓ - -INFO: Day 2025-11-11: Success after 1 relaxation phase (2 periods)**What to look for:** - -`````` - -INFO: Calculating BEST PRICE periods: relaxation=ON, target=2/day, flex=15.0% - ----DEBUG: Day 2025-11-11: Found 1 baseline period (need 2) - -DEBUG: Day 2025-11-11: Starting relaxation... - -## Advanced TopicsDEBUG: Phase 1: flex 20.25% + original filters - -DEBUG: Candidate: 02:00-05:00 (3h) - rating=LOW, avg=18.5 ct - -For advanced configuration patterns and technical deep-dive, see:DEBUG: Result: 2 standalone periods after merge ✓ - -- [Automation Examples](./automation-examples.md) - Real-world automation patternsINFO: Day 2025-11-11: Success after 1 relaxation phase (2 periods) - -- [Services](./services.md) - Using the `tibber_prices.get_price` service for custom logic``` - - - -### Quick Reference--- - - - -**Configuration Parameters:**## Advanced Topics - - - -| Parameter | Default | Range | Purpose |For advanced configuration patterns and technical deep-dive, see: - -|-----------|---------|-------|---------|- [Automation Examples](./automation-examples.md) - Real-world automation patterns - -| `best_price_flex` | 15% | 0-100% | Search range from daily MIN |- [Services](./services.md) - Using the `tibber_prices.get_price` service for custom logic - -| `best_price_min_period_length` | 60 min | 15-240 | Minimum duration | - -| `best_price_min_distance_from_avg` | 2% | 0-20% | Quality threshold |### Quick Reference - -| `best_price_min_volatility` | low | low/mod/high/vhigh | Stability filter | - -| `best_price_max_level` | any | any/cheap/vcheap | Absolute quality |**Configuration Parameters:** - -| `best_price_max_level_gap_count` | 0 | 0-10 | Gap tolerance | - -| `enable_min_periods_best` | false | true/false | Enable relaxation || Parameter | Default | Range | Purpose | - -| `min_periods_best` | - | 1-10 | Target periods per day ||-----------|---------|-------|---------| - -| `relaxation_step_best` | - | 5-100% | Relaxation increment || `best_price_flex` | 15% | 0-100% | Search range from daily MIN | - -| `best_price_min_period_length` | 60 min | 15-240 | Minimum duration | - -**Peak Price:** Same parameters with `peak_price_*` prefix (defaults: flex=-15%, same otherwise)| `best_price_min_distance_from_avg` | 2% | 0-20% | Quality threshold | - -| `best_price_min_volatility` | low | low/mod/high/vhigh | Stability filter | - -### Price Levels Reference| `best_price_max_level` | any | any/cheap/vcheap | Absolute quality | - -| `best_price_max_level_gap_count` | 0 | 0-10 | Gap tolerance | - -The Tibber API provides price levels for each 15-minute interval:| `enable_min_periods_best` | false | true/false | Enable relaxation | - -| `min_periods_best` | - | 1-10 | Target periods per day | - -**Levels (based on trailing 24h average):**| `relaxation_step_best` | - | 5-100% | Relaxation increment | - -- `VERY_CHEAP` - Significantly below average - -- `CHEAP` - Below average**Peak Price:** Same parameters with `peak_price_*` prefix (defaults: flex=-15%, same otherwise) - -- `NORMAL` - Around average - -- `EXPENSIVE` - Above average### Price Levels Reference - -- `VERY_EXPENSIVE` - Significantly above average - -The Tibber API provides price levels for each 15-minute interval: - -**Note:** Your configured `best_price_max_level` or `peak_price_min_level` filter uses these API-provided levels. - -**Levels (based on trailing 24h average):** - ----- `VERY_CHEAP` - Significantly below average - -- `CHEAP` - Below average - -**Last updated:** November 11, 2025 - `NORMAL` - Around average - -**Integration version:** 2.0+- `EXPENSIVE` - Above average - -- `VERY_EXPENSIVE` - Significantly above average - -**Note:** Your configured `best_price_max_level` or `peak_price_min_level` filter uses these API-provided levels. - ---- - -**Last updated:** November 11, 2025 -**Integration version:** 2.0+ - -### Best Price Period Settings - -| Option | Default | Description | Acts in Step | -|--------|---------|-------------|--------------| -| `best_price_flex` | 15% | How much more expensive than the daily **MIN** can an interval be? | 2 (Identification) | -| `best_price_min_period_length` | 60 min | Minimum length of a period | 3 (Length filter) | -| `best_price_min_distance_from_avg` | 2% | Minimum distance below daily **average** (separate from flexibility) | 4 (Quality filter) | -| `best_price_min_volatility` | LOW | Minimum volatility within the period (optional) | 5 (Volatility filter) | -| `best_price_max_level` | ANY | Maximum price level (optional, e.g., only CHEAP or better) | 5 (Level filter) | -| `best_price_max_level_gap_count` | 0 | Tolerance for level deviations (see [Gap Tolerance](#gap-tolerance-for-level-filters)) | 5 (Level filter) | -| `enable_min_periods_best` | Off | Enables relaxation mechanism | - (Relaxation) | -| `min_periods_best` | 2 | Minimum number of periods **per day** to achieve | - (Relaxation) | -| `relaxation_step_best` | 25% | Step size for filter relaxation | - (Relaxation) | - -### Peak Price Period Settings - -| Option | Default | Description | Acts in Step | -|--------|---------|-------------|--------------| -| `peak_price_flex` | -15% | How much less expensive than the daily **MAX** can an interval be? | 2 (Identification) | -| `peak_price_min_period_length` | 60 min | Minimum length of a period | 3 (Length filter) | -| `peak_price_min_distance_from_avg` | 2% | Minimum distance above daily **average** (separate from flexibility) | 4 (Quality filter) | -| `peak_price_min_volatility` | LOW | Minimum volatility within the period (optional) | 5 (Volatility filter) | -| `peak_price_min_level` | ANY | Minimum price level (optional, e.g., only EXPENSIVE or higher) | 5 (Level filter) | -| `peak_price_max_level_gap_count` | 0 | Tolerance for level deviations (see [Gap Tolerance](#gap-tolerance-for-level-filters)) | 5 (Level filter) | -| `enable_min_periods_peak` | Off | Enables relaxation mechanism | - (Relaxation) | -| `min_periods_peak` | 2 | Minimum number of periods **per day** to achieve | - (Relaxation) | -| `relaxation_step_peak` | 25% | Step size for filter relaxation | - (Relaxation) | - ---- - -## Filter Pipeline - -After basic period identification (Steps 1-4), two optional **additional filters** can be applied: - -### Volatility Filter - -**Purpose:** Only show periods when the price spread within the period is large enough. - -**Use case:** -- **Best Price**: "I only want to optimize when it's really worth it" (high volatility) -- **Peak Price**: "Only warn me about large price swings" (high volatility) - -**How it works:** -``` -Period: 00:00-01:00 -Intervals: 20.5 | 19.8 | 21.0 | 20.2 ct/kWh -Min: 19.8 ct, Max: 21.0 ct -Volatility (spread): 21.0 - 19.8 = 1.2 ct/kWh - -Volatility thresholds: -- LOW: < 5.0 ct → This period: LOW -- MODERATE: 5-15 ct -- HIGH: 15-30 ct -- VERY_HIGH: ≥ 30 ct - -best_price_min_volatility = "MODERATE" (5 ct) -→ Period is REJECTED (1.2 ct < 5.0 ct) -``` - -**Configuration:** -- `best_price_min_volatility`: `low` | `moderate` | `high` | `very_high` -- `peak_price_min_volatility`: `low` | `moderate` | `high` | `very_high` - -**Default:** `low` (filter disabled, all periods shown) - -### Level Filter (Price Level) - -**Purpose:** Only show periods that are actually cheap/expensive in absolute terms, not just relative to the daily average. - -**Use case:** -- **Best Price**: "Only show best price when there's at least one CHEAP interval" (not just "less expensive than usual today") -- **Peak Price**: "Only show peak price when there's at least one EXPENSIVE interval" (not just "more expensive than average") - -**Price levels (from Tibber API):** -- `VERY_CHEAP` (-2) -- `CHEAP` (-1) -- `NORMAL` (0) -- `EXPENSIVE` (+1) -- `VERY_EXPENSIVE` (+2) - -**How it works (Best Price example):** -``` -best_price_max_level = "CHEAP" - -Period: 00:00-01:00 -Intervals with levels: - 00:00: 20.5 ct → CHEAP ✓ - 00:15: 19.8 ct → VERY_CHEAP ✓ - 00:30: 21.0 ct → NORMAL ✗ - 00:45: 20.2 ct → CHEAP ✓ - -Filter logic (without gap tolerance): - → Does the period have at least ONE interval with level ≤ CHEAP? - → YES (three intervals are CHEAP or better) - → Period is KEPT - -But: One NORMAL interval in the middle! - → Without gap tolerance: Period is split into two parts - → With gap tolerance: Period stays together (see next section) -``` - -**Configuration:** -- `best_price_max_level`: `any` | `very_cheap` | `cheap` | `normal` | `expensive` -- `peak_price_min_level`: `any` | `expensive` | `normal` | `cheap` | `very_cheap` - -**Default:** `any` (filter disabled) - ---- - -## Gap Tolerance for Level Filters - -### Problem Without Gap Tolerance - -When you activate a level filter (e.g., `best_price_max_level = "CHEAP"`), periods are **strictly filtered**: - -``` -Period: 00:00-02:00 (2 hours) -Intervals: - 00:00-01:30: CHEAP, CHEAP, CHEAP, CHEAP, CHEAP, CHEAP - 01:30-01:45: NORMAL ← A single deviating interval! - 01:45-02:00: CHEAP - -Without gap tolerance: - → Period is split into TWO periods: - 1. 00:00-01:30 (1.5h) - 2. 01:45-02:00 (0.25h) ✗ too short, discarded! - → Result: Only 1.5h best price instead of 2h -``` - -### Solution: Gap Tolerance - -**Gap tolerance** allows a configurable number of intervals that deviate by **exactly one level step** from the required level. - -**How it works:** - -1. **"Gap" definition:** An interval that deviates by exactly 1 level step - ``` - Best Price filter: CHEAP (-1) - NORMAL (0) is +1 step → GAP ✓ - EXPENSIVE (+1) is +2 steps → NOT A GAP, too far away - ``` - -2. **Gap counting:** Max X gaps allowed per period (configurable: 0-8) - -3. **Minimum distance between gaps:** Gaps must not be too close together - ``` - Dynamic formula: max(2, (interval_count / max_gaps) / 2) - - Example: 16 intervals, max 2 gaps allowed - → Minimum distance: max(2, (16/2)/2) = max(2, 4) = 4 intervals - - CHEAP, CHEAP, CHEAP, CHEAP, NORMAL, CHEAP, CHEAP, CHEAP, NORMAL, CHEAP - ↑ GAP1 ↑ GAP2 - └─────── 4 intervals ──────────────┘ - → OK, minimum distance maintained - ``` - -4. **25% cap:** Maximum 25% of a period's intervals can be gaps - ``` - Period: 12 intervals, user configured 5 gaps - → Effective: min(5, 12/4) = min(5, 3) = 3 gaps allowed - ``` - -5. **Minimum period length:** Gap tolerance only applies to periods ≥ 1.5h (6 intervals) - ``` - Period < 1.5h: Strict filtering (0 tolerance) - Period ≥ 1.5h: Gap tolerance as configured - ``` - -### Gap Cluster Splitting - -If a period would still be rejected **despite gap tolerance** (too many gaps or too dense), the integration tries to **intelligently split** it: - -``` -Period: 00:00-04:00 (16 intervals) -CHEAP, CHEAP, CHEAP, NORMAL, NORMAL, NORMAL, CHEAP, CHEAP, ..., CHEAP - └─ Gap cluster (3×) ─┘ - -Gap cluster = 2+ consecutive deviating intervals - -→ Splitting at gap cluster: - 1. 00:00-00:45 (3 intervals) ✗ too short - 2. 01:30-04:00 (10 intervals) ✓ kept - -→ Result: 2.5h best price instead of complete rejection -``` - -### Configuration - -**Best Price:** -```yaml -best_price_max_level: "cheap" # Enable level filter -best_price_max_level_gap_count: 2 # Allow 2 NORMAL intervals per period -``` - -**Peak Price:** -```yaml -peak_price_min_level: "expensive" # Enable level filter -peak_price_max_level_gap_count: 1 # Allow 1 NORMAL interval per period -``` - -**Default:** `0` (no tolerance, strict filtering) - -### Example Scenarios - -#### Scenario 1: Conservative (0 gaps) -```yaml -best_price_max_level: "cheap" -best_price_max_level_gap_count: 0 # Default -``` - -**Behavior:** -- Every interval MUST be CHEAP or better -- A single NORMAL interval → period is split - -**Good for:** Users who want absolute price guarantees - -#### Scenario 2: Moderate (2-3 gaps) -```yaml -best_price_max_level: "cheap" -best_price_max_level_gap_count: 2 -``` - -**Behavior:** -- Up to 2 NORMAL intervals per period tolerated -- Minimum distance between gaps dynamically calculated -- 25% cap protects against too many gaps - -**Good for:** Most users - balance between quality and period length - -#### Scenario 3: Aggressive (5-8 gaps) -```yaml -best_price_max_level: "cheap" -best_price_max_level_gap_count: 5 -``` - -**Behavior:** -- Up to 5 NORMAL intervals (but max 25% of period) -- Longer periods possible -- Quality may suffer (more "not-quite-so-cheap" intervals) - -**Good for:** Users with flexible devices that need long run times - ---- - -## Relaxation Mechanism - -If **too few periods** are found despite all filters, the integration can automatically **gradually relax** filters. - -### When is Relaxation Applied? - -Only when **both conditions** are met: -1. `enable_min_periods_best/peak` is enabled -2. Fewer than `min_periods_best/peak` periods found **for a specific day** - -**Important:** The minimum period requirement is checked **separately for each day** (today and tomorrow). This ensures: -- Each day must have enough periods independently -- Today can meet the requirement while tomorrow doesn't (or vice versa) -- When tomorrow's prices arrive, both days are evaluated separately - -**Example scenario:** -- Configuration: `min_periods_best = 3` -- 14:00: Tomorrow's prices arrive -- Today: 10 periods remaining → ✅ Meets requirement (≥3) -- Tomorrow: 2 periods found → ❌ Doesn't meet requirement (<3) -- **Result:** Relaxation only applies to tomorrow's periods - -### Relaxation Levels - -The integration tries to relax filters in this order: - -#### Level 1: Relax Flexibility -``` -Original: best_price_flex = 15% -Step 1: 15% + (15% × 0.25) = 18.75% -Step 2: 18.75% + (18.75% × 0.25) = 23.44% -Step 3: ... -``` - -**Calculation:** `new_flexibility = old_flexibility × (1 + relaxation_step / 100)` - -**Important:** This increases the flexibility percentage, which allows intervals **further from the daily MIN/MAX** to be included. For best price, this means accepting intervals more expensive than the original flexibility threshold. - -#### Level 2: Disable Volatility Filter -``` -If flexibility relaxation isn't enough: - → best_price_min_volatility = "any" (filter off) -``` - -#### Level 3: Disable All Filters -``` -If still too few periods: - → Volatility = "any" - → Level filter = "any" - → Only flexibility and minimum length active -``` - -### Relaxation Status - -The sensors show the **relaxation status** as an attribute: - -```yaml -Best Price Period: # sensor.tibber_home_best_price_period - state: "on" - attributes: - relaxation_level: "volatility_any" # Volatility filter was disabled -``` - -**Possible values:** -- `none` - No relaxation, normal filters -- `volatility_any` - Volatility filter disabled -- `all_filters_off` - All optional filters disabled - -### Example Configuration - -```yaml -# Best Price: Try to find at least 2 periods -enable_min_periods_best: true -min_periods_best: 2 -relaxation_step_best: 25 # 25% per step - -best_price_flex: 15 -best_price_min_volatility: "moderate" -``` - -**Process on a day with little price spread:** -1. Try with 15% flex + MODERATE volatility → 0 periods -2. Relax to 18.75% flex → 1 period -3. Relax to 23.44% flex → 1 period (still < 2) -4. Disable volatility filter → 2 periods ✓ - -**Result:** User sees 2 periods with `relaxation_level: "volatility_any"` - ---- - -## Practical Examples - -### Example 1: Standard Configuration (Best Price) - -**Configuration:** -```yaml -best_price_flex: 15 -best_price_min_period_length: 60 -best_price_min_distance_from_avg: 2 -best_price_min_volatility: "low" # Filter disabled -best_price_max_level: "any" # Filter disabled -``` - -**Daily prices:** -``` -MIN: 18.0 ct/kWh -MAX: 32.0 ct/kWh -AVG: 25.0 ct/kWh - -00:00-02:00: 18-20 ct (cheap) -06:00-08:00: 28-30 ct (expensive) -12:00-14:00: 24-26 ct (normal) -18:00-20:00: 19-21 ct (cheap) -``` - -**Calculation:** -1. Flexibility threshold: 18.0 × 1.15 = 20.7 ct (vs MIN, not average!) -2. Minimum distance threshold: 25.0 × 0.98 = 24.5 ct (vs AVG) -3. Both conditions: Price ≤ 20.7 ct AND Price ≤ 24.5 ct - -**Result:** -- ✓ 00:00-02:00 (18-20 ct, all ≤ 20.7 and all ≤ 24.5) -- ✗ 06:00-08:00 (too expensive) -- ✗ 12:00-14:00 (24-26 ct, exceeds flexibility threshold of 20.7 ct) -- ✓ 18:00-20:00 (19-21 ct, all ≤ 20.7 and all ≤ 24.5) - -**2 Best Price periods found!** - -### Example 2: Strict Level Filter Without Gap Tolerance - -**Configuration:** -```yaml -best_price_flex: 15 -best_price_max_level: "cheap" -best_price_max_level_gap_count: 0 # No tolerance -``` - -**Period candidate:** -``` -00:00-02:00: - 00:00-01:30: CHEAP, CHEAP, CHEAP, CHEAP, CHEAP, CHEAP - 01:30-01:45: NORMAL ← Deviation! - 01:45-02:00: CHEAP -``` - -**Result:** -- ✗ Period is split into 00:00-01:30 and 01:45-02:00 -- ✗ 01:45-02:00 too short (15 min < 60 min) → discarded -- ✓ Only 00:00-01:30 (1.5h) remains - -### Example 3: Level Filter With Gap Tolerance - -**Configuration:** -```yaml -best_price_flex: 15 -best_price_max_level: "cheap" -best_price_max_level_gap_count: 2 # 2 gaps allowed -``` - -**Period candidate (same as above):** -``` -00:00-02:00: - 00:00-01:30: CHEAP, CHEAP, CHEAP, CHEAP, CHEAP, CHEAP - 01:30-01:45: NORMAL ← Gap (1 of 2 allowed) - 01:45-02:00: CHEAP -``` - -**Gap tolerance check:** -- Gaps found: 1 (NORMAL) -- Max allowed: 2 -- 25% cap: min(2, 8/4) = 2 (8 intervals) -- Minimum distance: N/A (only 1 gap) - -**Result:** -- ✓ Period stays as a whole: 00:00-02:00 (2h) -- 1 NORMAL interval is tolerated - -### Example 4: Gap Cluster Gets Split - -**Configuration:** -```yaml -best_price_flex: 15 -best_price_max_level: "cheap" -best_price_max_level_gap_count: 2 -``` - -**Period candidate:** -``` -00:00-04:00 (16 intervals): - 00:00-01:00: CHEAP, CHEAP, CHEAP, CHEAP (4) - 01:00-02:00: NORMAL, NORMAL, NORMAL, NORMAL (4) ← Gap cluster! - 02:00-04:00: CHEAP, CHEAP, CHEAP, ..., CHEAP (8) -``` - -**Gap tolerance check:** -- Gaps found: 4 (too many) -- Max allowed: 2 -- → Normal check fails - -**Gap cluster splitting:** -- Detect cluster: 4× consecutive NORMAL intervals -- Split period at cluster boundaries: - 1. 00:00-01:00 (4 intervals = 60 min) ✓ - 2. 02:00-04:00 (8 intervals = 120 min) ✓ - -**Result:** -- ✓ Two separate periods: 00:00-01:00 and 02:00-04:00 -- Total 3h best price (instead of complete rejection) - -### Example 5: Relaxation in Action - -**Configuration:** -```yaml -enable_min_periods_best: true -min_periods_best: 2 -relaxation_step_best: 25 - -best_price_flex: 5 # Very strict! -best_price_min_volatility: "high" # Very strict! -``` - -**Day with little price spread:** -``` -MIN: 23.0 ct/kWh -MAX: 27.0 ct/kWh -AVG: 25.0 ct/kWh -All prices between 23-27 ct (low volatility) -``` - -**Relaxation process:** - -1. **Attempt 1:** 5% flex + HIGH volatility - ``` - Threshold: 23.0 × 1.05 = 24.15 ct (vs MIN) - No period meets both conditions - → 0 periods (< 2 required) - ``` - -2. **Attempt 2:** 6.25% flex + HIGH volatility - ``` - Threshold: 23.0 × 1.0625 = 24.44 ct - Still 0 periods - ``` - -3. **Attempt 3:** Disable volatility filter - ``` - 6.25% flex + ANY volatility - → 1 period found (< 2) - ``` - -4. **Attempt 4:** 7.81% flex + ANY volatility - ``` - Threshold: 23.0 × 1.0781 = 24.80 ct - → 2 periods found ✓ - ``` - -**Result:** -- Sensor shows 2 periods with `relaxation_level: "volatility_any"` -- User knows: "Filters were relaxed to reach minimum count" - --- ## Troubleshooting -### Problem: No Periods Found +### No Periods Found + +**Symptom:** `binary_sensor.tibber_home_best_price_period` never turns "on" **Possible causes:** -1. **Too strict flexibility** - ``` - best_price_flex: 5 # Only allows intervals ≤5% above daily MIN - ``` - **Solution:** Increase to 10-15% +1. **Filters too strict** -2. **Too strict level filter without gap tolerance** - ``` - best_price_max_level: "very_cheap" - best_price_max_level_gap_count: 0 - ``` - **Solution:** Relax level to "cheap" or enable gap tolerance (1-2) + ```yaml + # Try: + best_price_flex: 20 # Increase from default 15% + best_price_min_distance_from_avg: 1 # Reduce from default 2% + ``` -3. **Too high volatility requirement** - ``` - best_price_min_volatility: "very_high" - ``` - **Solution:** Reduce to "moderate" or "low" +2. **Period length too long** -4. **Too long minimum period length** - ``` - best_price_min_period_length: 180 # 3 hours - ``` - **Solution:** Reduce to 60-90 minutes + ```yaml + # Try: + best_price_min_period_length: 45 # Reduce from default 60 minutes + ``` -5. **Day with very small price spread** - ``` - MIN: 23 ct, MAX: 27 ct (hardly any differences) - ``` - **Solution:** Enable relaxation mechanism: - ```yaml - enable_min_periods_best: true - min_periods_best: 1 - ``` +3. **Flat price curve** (all prices very similar) -### Problem: Too Many Periods + - Enable relaxation to ensure at least some periods -**Solution:** Make filters stricter: + ```yaml + enable_min_periods_best: true + min_periods_best: 1 + ``` -```yaml -best_price_flex: 20 # Reduce to 10-15 -best_price_min_volatility: "moderate" # Require higher volatility -best_price_max_level: "cheap" # Only truly cheap times -``` +### Periods Split Into Small Pieces -### Problem: Periods Are Too Short - -**Solution:** Increase minimum length and use gap tolerance: - -```yaml -best_price_min_period_length: 90 # 1.5 hours -best_price_max_level_gap_count: 2 # Tolerate deviations -``` - -### Problem: Periods With "Mediocre" Prices - -**Solution:** Increase minimum distance: - -```yaml -best_price_min_distance_from_avg: 5 # Must be 5% below average -``` - -### Problem: Relaxation Applied Too Aggressively - -**Solution:** Reduce step size: - -```yaml -relaxation_step_best: 10 # Smaller steps (instead of 25) -``` - -Or disable relaxation completely: - -```yaml -enable_min_periods_best: false -``` - -### Problem: Gap Tolerance Not Working As Expected +**Symptom:** Many short periods instead of one long period **Possible causes:** -1. **Period too short (< 1.5h)** - ``` - Gap tolerance only applies to periods ≥ 1.5h - ``` - **Solution:** Reduce `best_price_min_period_length` or adjust flexibility +1. **Level filter too strict** -2. **25% cap limiting effective gaps** - ``` - Period: 8 intervals, configured 4 gaps - → Effective: min(4, 8/4) = 2 gaps - ``` - **Solution:** Accept limitation or relax level filter + ```yaml + # One "NORMAL" interval splits an otherwise good period + # Solution: Use gap tolerance + best_price_max_level: cheap + best_price_max_level_gap_count: 2 # Allow 2 NORMAL intervals + ``` -3. **Gaps too close together** - ``` - Minimum distance between gaps not maintained - ``` - **Solution:** Increase gap count or accept splitting +2. **Flexibility too tight** + + ```yaml + # One interval just outside flex range splits the period + # Solution: Increase flexibility + best_price_flex: 20 # Increase from 15% + ``` + +3. **Price spikes breaking periods** + + - Statistical outlier filtering should handle this automatically + - Check logs for smoothing activity: + + ``` + DEBUG: [2025-11-11T14:30:00+01:00] Outlier detected: 35.2 ct + DEBUG: Smoothed to: 20.7 ct (trend prediction) + ``` + + - If smoothing isn't working as expected, check: + - Is spike truly isolated? (3+ similar prices in a row won't be smoothed) + - Is it a legitimate price shift? (symmetry check preserves morning/evening peaks) + +### Understanding Sensor Attributes + +**Check period details:** + +```yaml +# Entity: binary_sensor.tibber_home_best_price_period + +# Attributes when "on": +start: "2025-11-11T02:00:00+01:00" +end: "2025-11-11T05:00:00+01:00" +duration_minutes: 180 +rating_level: "LOW" # All intervals are LOW price +price_avg: 18.5 # Average price in this period +relaxation_active: true # This day used relaxation +relaxation_level: "price_diff_20.25%+level_any" # Found at flex 20.25%, level filter removed +period_interval_smoothed_count: 2 # 2 outliers were smoothed (only if >0) +period_interval_level_gap_count: 1 # 1 interval kept via gap tolerance (only if >0) +``` --- -## Further Documentation +## Advanced Topics -- **[Configuration Guide](configuration.md)** - UI screenshots and step-by-step guide -- **[Sensors](sensors.md)** - All available sensors and attributes -- **[Automation Examples](automation-examples.md)** - Practical automation recipes with periods -- **[Developer Documentation](../development/)** - Code architecture and algorithm details +For advanced configuration patterns and technical deep-dive, see: + +- [Automation Examples](./automation-examples.md) - Real-world automation patterns +- [Services](./services.md) - Using the `tibber_prices.get_price` service for custom logic + +### Quick Reference + +**Configuration Parameters:** + +| Parameter | Default | Range | Purpose | +| ---------------------------------- | ------- | ------------------ | --------------------------- | +| `best_price_flex` | 15% | 0-100% | Search range from daily MIN | +| `best_price_min_period_length` | 60 min | 15-240 | Minimum duration | +| `best_price_min_distance_from_avg` | 2% | 0-20% | Quality threshold | +| `best_price_min_volatility` | low | low/mod/high/vhigh | Stability filter | +| `best_price_max_level` | any | any/cheap/vcheap | Absolute quality | +| `best_price_max_level_gap_count` | 0 | 0-10 | Gap tolerance | +| `enable_min_periods_best` | false | true/false | Enable relaxation | +| `min_periods_best` | - | 1-10 | Target periods per day | +| `relaxation_step_best` | - | 5-100% | Relaxation increment | + +**Peak Price:** Same parameters with `peak_price_*` prefix (defaults: flex=-15%, same otherwise) + +### Price Levels Reference + +The Tibber API provides price levels for each 15-minute interval: + +**Levels (based on trailing 24h average):** + +- `VERY_CHEAP` - Significantly below average +- `CHEAP` - Below average +- `NORMAL` - Around average +- `EXPENSIVE` - Above average +- `VERY_EXPENSIVE` - Significantly above average + +### Outlier Filtering Technical Details + +**Algorithm:** + +1. **Linear regression**: Predicts expected price based on surrounding trend +2. **Confidence intervals**: 2 standard deviations (95% confidence) +3. **Symmetry check**: Rejects asymmetric outliers (1.5 std dev threshold) +4. **Enhanced zigzag detection**: Catches spike clusters with relative volatility (2.0× threshold) + +**Constants:** + +- `CONFIDENCE_LEVEL`: 2.0 (95% confidence) +- `SYMMETRY_THRESHOLD`: 1.5 std dev +- `RELATIVE_VOLATILITY_THRESHOLD`: 2.0 +- `MIN_CONTEXT_SIZE`: 3 intervals minimum + +**Data integrity:** + +- Smoothed intervals stored with `_original_price` field +- All statistics (min/max/avg) use original prices +- Period attributes show impact: `period_interval_smoothed_count` +- Smart counting: Only counts smoothing that actually changed period formation --- -**Questions or feedback?** Open an [issue on GitHub](https://github.com/jpawlowski/hass.tibber_prices/issues)! +**Last updated:** November 12, 2025 +**Integration version:** 2.0+