Compare commits

..

No commits in common. "main" and "v0.3.0" have entirely different histories.
main ... v0.3.0

537 changed files with 12089 additions and 165290 deletions

8
.copilot/config.json Normal file
View file

@ -0,0 +1,8 @@
{
"instructionFiles": [
{
"pattern": "**",
"path": ".github/copilot-instructions.md"
}
]
}

View file

@ -1,29 +1,18 @@
{ {
"name": "jpawlowski/hass.tibber_prices", "name": "jpawlowski/hass.tibber_prices",
"image": "mcr.microsoft.com/devcontainers/python:3.14", "image": "mcr.microsoft.com/devcontainers/python:3.13",
"postCreateCommand": "bash .devcontainer/setup-git.sh && scripts/setup/setup", "postCreateCommand": "scripts/setup",
"postStartCommand": "scripts/motd", "postStartCommand": "scripts/motd",
"containerEnv": { "containerEnv": {
"PYTHONASYNCIODEBUG": "1", "PYTHONASYNCIODEBUG": "1"
"TIBBER_PRICES_DEV": "1"
}, },
"forwardPorts": [ "forwardPorts": [
8123, 8123
3000,
3001
], ],
"portsAttributes": { "portsAttributes": {
"8123": { "8123": {
"label": "Home Assistant", "label": "Home Assistant",
"onAutoForward": "notify" "onAutoForward": "notify"
},
"3000": {
"label": "Docusaurus User Docs",
"onAutoForward": "notify"
},
"3001": {
"label": "Docusaurus Developer Docs",
"onAutoForward": "notify"
} }
}, },
"customizations": { "customizations": {
@ -38,7 +27,8 @@
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"ms-vscode-remote.remote-containers", "ms-vscode-remote.remote-containers",
"redhat.vscode-yaml", "redhat.vscode-yaml",
"ryanluker.vscode-coverage-gutters" "ryanluker.vscode-coverage-gutters",
"visualstudioexptteam.vscodeintellicode"
], ],
"settings": { "settings": {
"editor.tabSize": 4, "editor.tabSize": 4,
@ -50,15 +40,6 @@
"python.analysis.typeCheckingMode": "basic", "python.analysis.typeCheckingMode": "basic",
"python.analysis.autoImportCompletions": true, "python.analysis.autoImportCompletions": true,
"python.analysis.diagnosticMode": "workspace", "python.analysis.diagnosticMode": "workspace",
"python.analysis.diagnosticSeverityOverrides": {
"reportUnusedImport": "none",
"reportUnusedVariable": "none",
"reportUnusedCoroutine": "none",
"reportMissingTypeStubs": "none"
},
"python.analysis.include": [
"custom_components/tibber_prices"
],
"python.analysis.exclude": [ "python.analysis.exclude": [
"**/.venv/**", "**/.venv/**",
"**/venv/**", "**/venv/**",
@ -70,7 +51,7 @@
], ],
"python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
"python.analysis.extraPaths": [ "python.analysis.extraPaths": [
"${workspaceFolder}/.venv/lib/python3.14/site-packages" "${workspaceFolder}/.venv/lib/python3.13/site-packages"
], ],
"python.terminal.activateEnvironment": true, "python.terminal.activateEnvironment": true,
"python.terminal.activateEnvInCurrentTerminal": true, "python.terminal.activateEnvInCurrentTerminal": true,
@ -105,30 +86,23 @@
"fileMatch": [ "fileMatch": [
"homeassistant/components/*/manifest.json" "homeassistant/components/*/manifest.json"
], ],
"url": "${containerWorkspaceFolder}/schemas/json/manifest_schema.json" "url": "${containerWorkspaceFolder}/scripts/json_schemas/manifest_schema.json"
}, },
{ {
"fileMatch": [ "fileMatch": [
"homeassistant/components/*/translations/*.json" "homeassistant/components/*/translations/*.json"
], ],
"url": "${containerWorkspaceFolder}/schemas/json/translation_schema.json" "url": "${containerWorkspaceFolder}/scripts/json_schemas/translation_schema.json"
} }
], ]
"git.useConfigOnly": false
} }
} }
}, },
"mounts": [
"source=${localEnv:HOME}${localEnv:USERPROFILE}/.gitconfig,target=/home/vscode/.gitconfig.host,type=bind,consistency=cached"
],
"remoteUser": "vscode", "remoteUser": "vscode",
"features": { "features": {
"ghcr.io/devcontainers/features/github-cli:1": {}, "ghcr.io/devcontainers/features/github-cli:1": {},
"ghcr.io/flexwie/devcontainer-features/op:1": {
"version": "latest"
},
"ghcr.io/devcontainers/features/node:1": { "ghcr.io/devcontainers/features/node:1": {
"version": "24" "version": "22"
}, },
"ghcr.io/devcontainers/features/rust:1": { "ghcr.io/devcontainers/features/rust:1": {
"version": "latest", "version": "latest",

View file

@ -1,99 +0,0 @@
#!/bin/bash
# Setup Git configuration from host
# This script is idempotent and can be run multiple times safely.
# Exit on error
set -e
# Check if host gitconfig exists
if [ ! -f ~/.gitconfig.host ]; then
echo "No host .gitconfig found, skipping Git setup"
exit 0
fi
echo "Setting up Git configuration from host..."
# Extract and set user info
USER_NAME=$(grep -A 2 '^\[user\]' ~/.gitconfig.host | grep 'name' | sed 's/.*= //' | xargs)
USER_EMAIL=$(grep -A 2 '^\[user\]' ~/.gitconfig.host | grep 'email' | sed 's/.*= //' | xargs)
if [ -n "$USER_NAME" ]; then
CURRENT_NAME=$(git config --global user.name 2>/dev/null || echo "")
if [ "$CURRENT_NAME" != "$USER_NAME" ]; then
git config --global user.name "$USER_NAME"
echo "✓ Set user.name: $USER_NAME"
else
echo " user.name already set: $USER_NAME"
fi
fi
if [ -n "$USER_EMAIL" ]; then
CURRENT_EMAIL=$(git config --global user.email 2>/dev/null || echo "")
if [ "$CURRENT_EMAIL" != "$USER_EMAIL" ]; then
git config --global user.email "$USER_EMAIL"
echo "✓ Set user.email: $USER_EMAIL"
else
echo " user.email already set: $USER_EMAIL"
fi
fi
# Set safe defaults for container
git config --global init.defaultBranch main
git config --global pull.rebase false
git config --global merge.conflictStyle diff3
git config --global submodule.recurse true
git config --global color.ui true
echo "✓ Set Git defaults"
# Copy useful aliases (skip if they have macOS-specific paths)
if grep -q '^\[alias\]' ~/.gitconfig.host; then
echo "✓ Syncing Git aliases..."
# First, collect all aliases from host config
TEMP_ALIASES=$(mktemp)
sed -n '/^\[alias\]/,/^\[/p' ~/.gitconfig.host | \
grep -v '^\[' | \
grep -v '^$' | \
while IFS= read -r line; do
# Skip aliases with macOS-specific paths
if echo "$line" | grep -q -E '/(Applications|usr/local)'; then
continue
fi
echo "$line" >> "$TEMP_ALIASES"
done
# Apply each alias (git config --global overwrites existing values = idempotent)
if [ -s "$TEMP_ALIASES" ]; then
while IFS= read -r line; do
ALIAS_NAME=$(echo "$line" | awk '{print $1}')
ALIAS_VALUE=$(echo "$line" | sed "s/^$ALIAS_NAME = //")
git config --global "alias.$ALIAS_NAME" "$ALIAS_VALUE" 2>/dev/null || true
done < "$TEMP_ALIASES"
echo " Synced $(wc -l < "$TEMP_ALIASES") aliases"
fi
rm -f "$TEMP_ALIASES"
fi
# Disable GPG signing in container (1Password SSH signing doesn't work in DevContainers)
# SSH agent forwarding works for git push/pull, but SSH signing requires direct
# access to 1Password app which isn't available in the container.
#
# For signed commits: Make final commits on host macOS where 1Password is available.
# The container is for development/testing - pre-commit hooks will still run.
CURRENT_SIGNING=$(git config --global commit.gpgsign 2>/dev/null || echo "false")
if [ "$CURRENT_SIGNING" != "false" ]; then
echo " Disabling commit signing in container (1Password not accessible)"
echo " → For signed commits, commit from macOS terminal outside container"
git config --global commit.gpgsign false
else
echo " Commit signing already disabled"
fi
# Keep the signing key info for reference, but don't use it
SIGNING_KEY=$(grep 'signingkey' ~/.gitconfig.host 2>/dev/null | sed 's/.*= //' | xargs || echo "")
if [ -n "$SIGNING_KEY" ]; then
echo " → Your signing key: ${SIGNING_KEY:0:20}... (available on host)"
fi
echo "✓ Git configuration complete"

4
.github/FUNDING.yml vendored
View file

@ -1,4 +0,0 @@
# These are supported funding model platforms
github: [ jpawlowski ]
buy_me_a_coffee: jpawlowski

1625
.github/copilot-instructions.md vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v6 uses: actions/checkout@v5
with: with:
fetch-depth: 0 # Need full history for git describe fetch-depth: 0 # Need full history for git describe
@ -43,13 +43,13 @@ jobs:
echo "✗ Tag v${{ steps.manifest.outputs.version }} does not exist yet" echo "✗ Tag v${{ steps.manifest.outputs.version }} does not exist yet"
fi fi
- name: Validate version format (stable or beta) - name: Validate version format
if: steps.tag_check.outputs.exists == 'false' if: steps.tag_check.outputs.exists == 'false'
run: | run: |
VERSION="${{ steps.manifest.outputs.version }}" VERSION="${{ steps.manifest.outputs.version }}"
if ! echo "$VERSION" | grep -qE '^[0-9]+\.[0-9]+\.[0-9]+(b[0-9]+)?$'; then if ! echo "$VERSION" | grep -qE '^[0-9]+\.[0-9]+\.[0-9]+$'; then
echo "❌ Invalid version format: $VERSION" echo "❌ Invalid version format: $VERSION"
echo "Expected format: X.Y.Z or X.Y.ZbN (e.g., 1.0.0, 0.25.0b0)" echo "Expected format: X.Y.Z (e.g., 1.0.0)"
exit 1 exit 1
fi fi
echo "✓ Version format valid: $VERSION" echo "✓ Version format valid: $VERSION"

View file

@ -1,163 +0,0 @@
name: Deploy Docusaurus Documentation (Dual Sites)
on:
push:
branches: [main]
paths:
- 'docs/**'
- '.github/workflows/docusaurus.yml'
tags:
- 'v*.*.*'
workflow_dispatch:
# Concurrency control: cancel in-progress deployments
# Pattern from GitHub Actions best practices for deployment workflows
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: write
pages: write
id-token: write
jobs:
deploy:
name: Build and Deploy Documentation Sites
runs-on: ubuntu-latest
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0 # Needed for version timestamps
- name: Detect prerelease tag (beta/rc)
id: taginfo
run: |
if [[ "${GITHUB_REF}" =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+(b[0-9]+|rc[0-9]+)$ ]]; then
echo "is_prerelease=true" >> "$GITHUB_OUTPUT"
echo "Detected prerelease tag: ${GITHUB_REF}"
else
echo "is_prerelease=false" >> "$GITHUB_OUTPUT"
echo "Stable tag or branch: ${GITHUB_REF}"
fi
- uses: actions/setup-node@v6
with:
node-version: 24
cache: 'npm'
cache-dependency-path: |
docs/user/package-lock.json
docs/developer/package-lock.json
# USER DOCS BUILD
- name: Install user docs dependencies
working-directory: docs/user
run: npm ci
- name: Create user docs version snapshot on tag
if: startsWith(github.ref, 'refs/tags/v') && steps.taginfo.outputs.is_prerelease != 'true'
working-directory: docs/user
run: |
TAG_VERSION=${GITHUB_REF#refs/tags/}
echo "Creating user documentation version: $TAG_VERSION"
npm run docusaurus docs:version $TAG_VERSION || echo "Version already exists"
# Update GitHub links in versioned docs
if [ -d "versioned_docs/version-$TAG_VERSION" ]; then
find versioned_docs/version-$TAG_VERSION -name "*.md" -type f -exec sed -i "s|github.com/jpawlowski/hass.tibber_prices/blob/main/|github.com/jpawlowski/hass.tibber_prices/blob/$TAG_VERSION/|g" {} \; || true
fi
- name: Cleanup old user docs versions
if: startsWith(github.ref, 'refs/tags/v') && steps.taginfo.outputs.is_prerelease != 'true'
working-directory: docs/user
run: |
chmod +x ../cleanup-old-versions.sh
# Adapt script for single-instance mode (versioned_docs/ instead of user_versioned_docs/)
sed 's/user_versioned_docs/versioned_docs/g; s/user_versions.json/versions.json/g; s/developer_versioned_docs/versioned_docs/g; s/developer_versions.json/versions.json/g' ../cleanup-old-versions.sh > cleanup-single.sh
chmod +x cleanup-single.sh
./cleanup-single.sh
- name: Build user docs website
working-directory: docs/user
run: npm run build
# DEVELOPER DOCS BUILD
- name: Install developer docs dependencies
working-directory: docs/developer
run: npm ci
- name: Create developer docs version snapshot on tag
if: startsWith(github.ref, 'refs/tags/v') && steps.taginfo.outputs.is_prerelease != 'true'
working-directory: docs/developer
run: |
TAG_VERSION=${GITHUB_REF#refs/tags/}
echo "Creating developer documentation version: $TAG_VERSION"
npm run docusaurus docs:version $TAG_VERSION || echo "Version already exists"
# Update GitHub links in versioned docs
if [ -d "versioned_docs/version-$TAG_VERSION" ]; then
find versioned_docs/version-$TAG_VERSION -name "*.md" -type f -exec sed -i "s|github.com/jpawlowski/hass.tibber_prices/blob/main/|github.com/jpawlowski/hass.tibber_prices/blob/$TAG_VERSION/|g" {} \; || true
fi
- name: Cleanup old developer docs versions
if: startsWith(github.ref, 'refs/tags/v') && steps.taginfo.outputs.is_prerelease != 'true'
working-directory: docs/developer
run: |
chmod +x ../cleanup-old-versions.sh
# Adapt script for single-instance mode
sed 's/user_versioned_docs/versioned_docs/g; s/user_versions.json/versions.json/g; s/developer_versioned_docs/versioned_docs/g; s/developer_versions.json/versions.json/g' ../cleanup-old-versions.sh > cleanup-single.sh
chmod +x cleanup-single.sh
./cleanup-single.sh
- name: Build developer docs website
working-directory: docs/developer
run: npm run build
# MERGE BUILDS
- name: Merge both documentation sites
run: |
mkdir -p deploy-root/user
mkdir -p deploy-root/developer
cp docs/index.html deploy-root/
cp -r docs/user/build/* deploy-root/user/
cp -r docs/developer/build/* deploy-root/developer/
# COMMIT VERSION SNAPSHOTS
- name: Commit version snapshots back to repository
if: startsWith(github.ref, 'refs/tags/v') && steps.taginfo.outputs.is_prerelease != 'true'
run: |
TAG_VERSION=${GITHUB_REF#refs/tags/}
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
# Add version files from both docs
git add docs/user/versioned_docs/ docs/user/versions.json 2>/dev/null || true
git add docs/developer/versioned_docs/ docs/developer/versions.json 2>/dev/null || true
# Commit if there are changes
if git diff --staged --quiet; then
echo "No version snapshot changes to commit"
else
git commit -m "docs: add version snapshot $TAG_VERSION and cleanup old versions [skip ci]"
git push origin HEAD:main
echo "Version snapshots committed and pushed to main"
fi
# DEPLOY TO GITHUB PAGES
- name: Setup Pages
uses: actions/configure-pages@v6
- name: Upload artifact
uses: actions/upload-pages-artifact@v4
with:
path: ./deploy-root
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v5

View file

@ -4,15 +4,9 @@ on:
push: push:
branches: branches:
- "main" - "main"
paths-ignore:
- 'docs/**'
- '.github/workflows/docusaurus.yml'
pull_request: pull_request:
branches: branches:
- "main" - "main"
paths-ignore:
- 'docs/**'
- '.github/workflows/docusaurus.yml'
permissions: {} permissions: {}
@ -26,20 +20,20 @@ jobs:
runs-on: "ubuntu-latest" runs-on: "ubuntu-latest"
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with: with:
python-version: "3.14" python-version: "3.13"
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7.6.0 uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
with: with:
version: "0.9.3" version: "0.9.3"
- name: Install requirements - name: Install requirements
run: scripts/setup/bootstrap run: scripts/bootstrap
- name: Lint check - name: Lint check
run: scripts/lint-check run: scripts/lint-check

View file

@ -27,7 +27,7 @@ jobs:
version: ${{ steps.tag.outputs.version }} version: ${{ steps.tag.outputs.version }}
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v6 uses: actions/checkout@v5
with: with:
fetch-depth: 0 fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
@ -77,36 +77,22 @@ jobs:
- name: Commit and push manifest.json update - name: Commit and push manifest.json update
if: steps.update.outputs.updated == 'true' if: steps.update.outputs.updated == 'true'
run: | run: |
TAG_VERSION="v${{ steps.tag.outputs.version }}"
git config user.name "github-actions[bot]" git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com" git config user.email "github-actions[bot]@users.noreply.github.com"
git add custom_components/tibber_prices/manifest.json git add custom_components/tibber_prices/manifest.json
git commit -m "chore(release): sync manifest.json with tag ${TAG_VERSION}" git commit -m "chore(release): sync manifest.json with tag v${{ steps.tag.outputs.version }}"
# Push to main branch # Push to main branch
git push origin HEAD:main git push origin HEAD:main
# Delete and recreate tag on new commit
echo "::notice::Recreating tag ${TAG_VERSION} on updated commit"
git tag -d "${TAG_VERSION}"
git push origin :refs/tags/"${TAG_VERSION}"
git tag -a "${TAG_VERSION}" -m "Release ${TAG_VERSION}"
git push origin "${TAG_VERSION}"
# Delete existing release if present (will be recreated by release-notes job)
gh release delete "${TAG_VERSION}" --yes --cleanup-tag=false || echo "No existing release to delete"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
release-notes: release-notes:
name: Generate and publish release notes name: Generate and publish release notes
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: sync-manifest # Wait for manifest sync to complete needs: sync-manifest # Wait for manifest sync to complete
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v6 uses: actions/checkout@v5
with: with:
fetch-depth: 0 # Fetch all history for git-cliff fetch-depth: 0 # Fetch all history for git-cliff
ref: main # Use updated main branch if manifest was synced ref: main # Use updated main branch if manifest was synced
@ -135,20 +121,10 @@ jobs:
FEAT=$(echo "$COMMITS" | grep -cE "^feat(\(.+\))?:" || true) FEAT=$(echo "$COMMITS" | grep -cE "^feat(\(.+\))?:" || true)
FIX=$(echo "$COMMITS" | grep -cE "^fix(\(.+\))?:" || true) FIX=$(echo "$COMMITS" | grep -cE "^fix(\(.+\))?:" || true)
parse_version() { # Parse versions
local version="$1"
if [[ $version =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)(b[0-9]+)?$ ]]; then
echo "${BASH_REMATCH[1]} ${BASH_REMATCH[2]} ${BASH_REMATCH[3]} ${BASH_REMATCH[4]}"
else
echo "Invalid version format: $version" >&2
exit 1
fi
}
# Parse versions (support beta/prerelease suffix like 0.25.0b0)
PREV_VERSION="${PREV_TAG#v}" PREV_VERSION="${PREV_TAG#v}"
read -r PREV_MAJOR PREV_MINOR PREV_PATCH PREV_PRERELEASE <<< "$(parse_version "$PREV_VERSION")" IFS='.' read -r PREV_MAJOR PREV_MINOR PREV_PATCH <<< "$PREV_VERSION"
read -r MAJOR MINOR PATCH PRERELEASE <<< "$(parse_version "$TAG_VERSION")" IFS='.' read -r MAJOR MINOR PATCH <<< "$TAG_VERSION"
WARNING="" WARNING=""
SUGGESTION="" SUGGESTION=""
@ -190,11 +166,9 @@ jobs:
echo "**Commits analyzed:** Breaking=$BREAKING, Features=$FEAT, Fixes=$FIX" echo "**Commits analyzed:** Breaking=$BREAKING, Features=$FEAT, Fixes=$FIX"
echo "" echo ""
echo "**To fix:**" echo "**To fix:**"
echo "1. Run locally: \`./scripts/release/suggest-version\`" echo "1. Delete the tag: \`git tag -d v$TAG_VERSION && git push origin :refs/tags/v$TAG_VERSION\`"
echo "2. Create correct tag: \`./scripts/release/prepare <suggested-version>\`" echo "2. Run locally: \`./scripts/suggest-version\`"
echo "3. Push the corrected tag: \`git push origin v<suggested-version>\`" echo "3. Create correct tag: \`./scripts/prepare-release X.Y.Z\`"
echo ""
echo "**This tag will be automatically deleted in the next step.**"
echo "EOF" echo "EOF"
} >> $GITHUB_OUTPUT } >> $GITHUB_OUTPUT
else else
@ -202,19 +176,7 @@ jobs:
echo "warning=" >> $GITHUB_OUTPUT echo "warning=" >> $GITHUB_OUTPUT
fi fi
- name: Delete inappropriate version tag
if: steps.version_check.outputs.warning != ''
run: |
TAG_NAME="${GITHUB_REF#refs/tags/}"
echo "❌ Deleting tag $TAG_NAME (version not appropriate for changes)"
echo ""
echo "${{ steps.version_check.outputs.warning }}"
echo ""
git push origin --delete "$TAG_NAME"
exit 1
- name: Install git-cliff - name: Install git-cliff
if: steps.version_check.outputs.warning == ''
run: | run: |
wget https://github.com/orhun/git-cliff/releases/download/v2.4.0/git-cliff-2.4.0-x86_64-unknown-linux-gnu.tar.gz wget https://github.com/orhun/git-cliff/releases/download/v2.4.0/git-cliff-2.4.0-x86_64-unknown-linux-gnu.tar.gz
tar -xzf git-cliff-2.4.0-x86_64-unknown-linux-gnu.tar.gz tar -xzf git-cliff-2.4.0-x86_64-unknown-linux-gnu.tar.gz
@ -222,7 +184,6 @@ jobs:
git-cliff --version git-cliff --version
- name: Generate release notes - name: Generate release notes
if: steps.version_check.outputs.warning == ''
id: release_notes id: release_notes
run: | run: |
FROM_TAG="${{ steps.previoustag.outputs.previous_tag }}" FROM_TAG="${{ steps.previoustag.outputs.previous_tag }}"
@ -232,14 +193,16 @@ jobs:
# Use our script with git-cliff backend (AI disabled in CI) # Use our script with git-cliff backend (AI disabled in CI)
# git-cliff will handle filtering via cliff.toml # git-cliff will handle filtering via cliff.toml
USE_AI=false ./scripts/release/generate-notes "${FROM_TAG}" "${TO_TAG}" > release-notes.md USE_AI=false ./scripts/generate-release-notes "${FROM_TAG}" "${TO_TAG}" > release-notes.md
# Extract title from release notes (first line starting with "# ") # Append version warning if present
TITLE=$(head -n 1 release-notes.md | sed 's/^# //') WARNING="${{ steps.version_check.outputs.warning }}"
if [ -z "$TITLE" ]; then if [ -n "$WARNING" ]; then
TITLE="Release Updates" echo "" >> release-notes.md
echo "---" >> release-notes.md
echo "" >> release-notes.md
echo "$WARNING" >> release-notes.md
fi fi
echo "title=$TITLE" >> $GITHUB_OUTPUT
# Output for GitHub Actions # Output for GitHub Actions
{ {
@ -248,20 +211,49 @@ jobs:
echo EOF echo EOF
} >> $GITHUB_OUTPUT } >> $GITHUB_OUTPUT
- name: Version Check Summary
if: steps.version_check.outputs.warning != ''
run: |
echo "### ⚠️ Version Mismatch Detected" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "${{ steps.version_check.outputs.warning }}" >> $GITHUB_STEP_SUMMARY
- name: Generate release title
id: release_title
run: |
TAG_VERSION="${GITHUB_REF#refs/tags/}"
FROM_TAG="${{ steps.previoustag.outputs.previous_tag }}"
# Extract main feature types from commits
FEAT_COUNT=$(git log ${FROM_TAG}..HEAD --format="%s" --no-merges | grep -cE "^feat(\(.+\))?:" || true)
FIX_COUNT=$(git log ${FROM_TAG}..HEAD --format="%s" --no-merges | grep -cE "^fix(\(.+\))?:" || true)
# Build title based on what changed
if [ $FEAT_COUNT -gt 0 ] && [ $FIX_COUNT -gt 0 ]; then
TITLE="$TAG_VERSION - New Features & Bug Fixes"
elif [ $FEAT_COUNT -gt 0 ]; then
TITLE="$TAG_VERSION - New Features"
elif [ $FIX_COUNT -gt 0 ]; then
TITLE="$TAG_VERSION - Bug Fixes"
else
TITLE="$TAG_VERSION"
fi
echo "title=$TITLE" >> $GITHUB_OUTPUT
echo "Release title: $TITLE"
- name: Create GitHub Release - name: Create GitHub Release
if: steps.version_check.outputs.warning == ''
uses: softprops/action-gh-release@v2 uses: softprops/action-gh-release@v2
with: with:
name: ${{ steps.release_notes.outputs.title }} name: ${{ steps.release_title.outputs.title }}
body: ${{ steps.release_notes.outputs.notes }} body: ${{ steps.release_notes.outputs.notes }}
draft: false draft: false
prerelease: ${{ contains(github.ref, 'b') }} prerelease: false
generate_release_notes: false # We provide our own generate_release_notes: false # We provide our own
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Summary - name: Summary
if: steps.version_check.outputs.warning == ''
run: | run: |
echo "✅ Release notes generated and published!" >> $GITHUB_STEP_SUMMARY echo "✅ Release notes generated and published!" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY echo "" >> $GITHUB_STEP_SUMMARY

View file

@ -7,15 +7,9 @@ on:
push: push:
branches: branches:
- main - main
paths-ignore:
- 'docs/**'
- '.github/workflows/docusaurus.yml'
pull_request: pull_request:
branches: branches:
- main - main
paths-ignore:
- 'docs/**'
- '.github/workflows/docusaurus.yml'
permissions: {} permissions: {}
@ -29,10 +23,10 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1 uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run hassfest validation - name: Run hassfest validation
uses: home-assistant/actions/hassfest@d56d093b9ab8d2105bc0cb6ee9bcc0ef4ec8b96d # master uses: home-assistant/actions/hassfest@8ca6e134c077479b26138bd33520707e8d94ef59 # master
hacs: # https://github.com/hacs/action hacs: # https://github.com/hacs/action
name: HACS validation name: HACS validation
@ -42,3 +36,5 @@ jobs:
uses: hacs/action@d556e736723344f83838d08488c983a15381059a # 22.5.0 uses: hacs/action@d556e736723344f83838d08488c983a15381059a # 22.5.0
with: with:
category: integration category: integration
# Remove this 'ignore' key when you have added brand images for your custom integration to https://github.com/home-assistant/brands
ignore: brands

8
.gitignore vendored
View file

@ -19,11 +19,6 @@ uv.lock
config/* config/*
!config/configuration.yaml !config/configuration.yaml
# HACS and other custom components installed for testing
# Ignore everything in custom_components/ except tibber_prices
custom_components/*
!custom_components/tibber_prices/
# Home Assistant database and logs # Home Assistant database and logs
*.db *.db
*.db-shm *.db-shm
@ -37,6 +32,3 @@ home-assistant_v2.db*
*.temp *.temp
.DS_Store .DS_Store
Thumbs.db Thumbs.db
# Planning documents (work-in-progress)
planning/

2894
AGENTS.md

File diff suppressed because it is too large Load diff

1
AGENTS.md Symbolic link
View file

@ -0,0 +1 @@
.github/copilot-instructions.md

1
CLAUDE.md Symbolic link
View file

@ -0,0 +1 @@
.github/copilot-instructions.md

View file

@ -12,7 +12,7 @@ Thank you for your interest in contributing! This document provides guidelines f
For detailed developer documentation, see [docs/development/](docs/development/). For detailed developer documentation, see [docs/development/](docs/development/).
> **Note:** This project is developed with extensive AI assistance (GitHub Copilot, Claude). If you're also using AI tools, check [`AGENTS.md`](/AGENTS.md) for patterns and conventions that ensure consistency. > **Note:** This project is developed with extensive AI assistance (GitHub Copilot, Claude). If you're also using AI tools, check [`.github/copilot-instructions.md`](/.github/copilot-instructions.md) for patterns and conventions that ensure consistency.
## Getting Started ## Getting Started
@ -81,7 +81,7 @@ Added new sensor that calculates average price for the entire day.
Impact: Users can now track daily average prices for cost analysis." Impact: Users can now track daily average prices for cost analysis."
``` ```
See [`AGENTS.md`](AGENTS.md) section "Git Workflow Guidance" for detailed guidelines. See `.github/copilot-instructions.md` section "Git Workflow Guidance" for detailed guidelines.
## Submitting Changes ## Submitting Changes
@ -122,23 +122,13 @@ Always run before committing:
- Enrich price data before exposing to entities - Enrich price data before exposing to entities
- Follow Home Assistant entity naming conventions - Follow Home Assistant entity naming conventions
See [Coding Guidelines](docs/developer/docs/coding-guidelines.md) for complete details. See [Coding Guidelines](docs/development/coding-guidelines.md) for complete details.
## Documentation ## Documentation
Documentation is organized in two Docusaurus sites: - **User guides**: Place in `docs/user/` (installation, configuration, usage)
- **Developer guides**: Place in `docs/development/` (architecture, patterns)
- **User docs** (`docs/user/`): Installation, configuration, usage guides - **Update translations**: When changing `translations/en.json`, update ALL language files
- Markdown files in `docs/user/docs/*.md`
- Navigation via `docs/user/sidebars.ts`
- **Developer docs** (`docs/developer/`): Architecture, patterns, contribution guides
- Markdown files in `docs/developer/docs/*.md`
- Navigation via `docs/developer/sidebars.ts`
**When adding new documentation:**
1. Place file in appropriate `docs/*/docs/` directory
2. Add to corresponding `sidebars.ts` for navigation
3. Update translations when changing `translations/en.json` (update ALL language files)
## Reporting Bugs ## Reporting Bugs
@ -153,7 +143,7 @@ Report bugs via [GitHub Issues](../../issues/new/choose).
## Questions? ## Questions?
- Check [Developer Documentation](docs/development/) - Check [Developer Documentation](docs/development/)
- Read [Copilot Instructions](AGENTS.md) for patterns - Read [Copilot Instructions](.github/copilot-instructions.md) for patterns
- Search [existing issues](https://github.com/jpawlowski/hass.tibber_prices/issues) - Search [existing issues](https://github.com/jpawlowski/hass.tibber_prices/issues)
- Open a [new issue](https://github.com/jpawlowski/hass.tibber_prices/issues/new) - Open a [new issue](https://github.com/jpawlowski/hass.tibber_prices/issues/new)

179
README.md
View file

@ -1,8 +1,4 @@
# Tibber Prices - Custom Home Assistant Integration # Tibber Price Information & Ratings
<p align="center">
<img src="images/header.svg" alt="Tibber Prices Custom Integration for Tibber" width="600">
</p>
[![GitHub Release][releases-shield]][releases] [![GitHub Release][releases-shield]][releases]
[![GitHub Activity][commits-shield]][commits] [![GitHub Activity][commits-shield]][commits]
@ -10,91 +6,56 @@
[![hacs][hacsbadge]][hacs] [![hacs][hacsbadge]][hacs]
[![Project Maintenance][maintenance-shield]][user_profile] [![Project Maintenance][maintenance-shield]][user_profile]
[![BuyMeCoffee][buymecoffeebadge]][buymecoffee]
<a href="https://www.buymeacoffee.com/jpawlowski" target="_blank"><img src="images/bmc-button.svg" alt="Buy Me A Coffee" height="41" width="174"></a> A Home Assistant integration that provides advanced price information and ratings from Tibber. This integration fetches **quarter-hourly** electricity prices, enriches them with statistical analysis, and provides smart indicators to help you optimize your energy consumption and save money.
> **⚠️ Not affiliated with Tibber** ![Tibber Price Information & Ratings](images/logo.png)
> This is an independent, community-maintained custom integration for Home Assistant. It is **not** an official Tibber product and is **not** affiliated with or endorsed by Tibber AS.
A custom Home Assistant integration that provides advanced electricity price information and ratings from Tibber. This integration fetches **quarter-hourly** electricity prices, enriches them with statistical analysis, and provides smart indicators to help you optimize your energy consumption and save money.
## 📖 Documentation ## 📖 Documentation
**[📚 Complete Documentation](https://jpawlowski.github.io/hass.tibber_prices/)** - Two comprehensive documentation sites: - **[User Guide](docs/user/)** - Installation, configuration, and usage guides
- **[Developer Guide](docs/development/)** - Contributing, architecture, and release process
- **[👤 User Documentation](https://jpawlowski.github.io/hass.tibber_prices/user/)** - Installation, configuration, usage guides, and examples - **[Changelog](https://github.com/jpawlowski/hass.tibber_prices/releases)** - Release history and notes
- **[🔧 Developer Documentation](https://jpawlowski.github.io/hass.tibber_prices/developer/)** - Architecture, contributing guidelines, and development setup
**Quick Links:**
- [Installation Guide](https://jpawlowski.github.io/hass.tibber_prices/user/installation) - Step-by-step setup instructions
- [Sensor Reference](https://jpawlowski.github.io/hass.tibber_prices/user/sensors) - Complete list of all sensors and attributes
- [Chart Examples](https://jpawlowski.github.io/hass.tibber_prices/user/chart-examples) - ApexCharts visualizations
- [Automation Examples](https://jpawlowski.github.io/hass.tibber_prices/user/automation-examples) - Real-world automation scenarios
- [Changelog](https://github.com/jpawlowski/hass.tibber_prices/releases) - Release history and notes
## ✨ Features ## ✨ Features
- **Quarter-Hourly Price Data**: Access detailed 15-minute interval pricing (384 data points across 4 days: day before yesterday/yesterday/today/tomorrow) - **Quarter-Hourly Price Data**: Access detailed 15-minute interval pricing (192 data points across yesterday/today/tomorrow)
- **Flexible Currency Display**: Choose between base currency (€, kr) or subunit (ct, øre) display - configurable per your preference with smart defaults - **Current and Next Interval Prices**: Get real-time price data in both major currency (€, kr) and minor units (ct, øre)
- **Multi-Currency Support**: Automatic detection and formatting for EUR, NOK, SEK, DKK, USD, and GBP - **Multi-Currency Support**: Automatic detection and formatting for EUR, NOK, SEK, DKK, USD, and GBP
- **Price Level Indicators**: Know when you're in a VERY_CHEAP, CHEAP, NORMAL, EXPENSIVE, or VERY_EXPENSIVE period - **Price Level Indicators**: Know when you're in a VERY_CHEAP, CHEAP, NORMAL, EXPENSIVE, or VERY_EXPENSIVE period
- **Statistical Sensors**: Track lowest, highest, and average prices for the day - **Statistical Sensors**: Track lowest, highest, and average prices for the day
- **Price Ratings**: Quarter-hourly ratings comparing current prices to 24-hour trailing averages - **Price Ratings**: Quarter-hourly ratings comparing current prices to 24-hour trailing averages
- **Smart Indicators**: Binary sensors to detect peak hours and best price hours for automations - **Smart Indicators**: Binary sensors to detect peak hours and best price hours for automations
- **Beautiful ApexCharts**: Auto-generated chart configurations with dynamic Y-axis scaling ([see examples](https://jpawlowski.github.io/hass.tibber_prices/user/chart-examples))
- **Chart Metadata Sensor**: Dynamic chart configuration for optimal visualization
- **Intelligent Caching**: Minimizes API calls while ensuring data freshness across Home Assistant restarts - **Intelligent Caching**: Minimizes API calls while ensuring data freshness across Home Assistant restarts
- **Custom Actions** (backend services): API endpoints for advanced integrations (ApexCharts support included) - **Custom Services**: API endpoints for advanced integrations (ApexCharts support included)
- **Diagnostic Sensors**: Monitor data freshness and availability - **Diagnostic Sensors**: Monitor data freshness and availability
- **Reliable API Usage**: Uses only official Tibber [`priceInfo`](https://developer.tibber.com/docs/reference#priceinfo) and [`priceInfoRange`](https://developer.tibber.com/docs/reference#subscription) endpoints - no legacy APIs. Price ratings and statistics are calculated locally for maximum reliability and future-proofing. - **Reliable API Usage**: Uses only official Tibber [`priceInfo`](https://developer.tibber.com/docs/reference#priceinfo) and [`priceInfoRange`](https://developer.tibber.com/docs/reference#subscription) endpoints - no legacy APIs. Price ratings and statistics are calculated locally for maximum reliability and future-proofing.
## 🚀 Quick Start ## 🚀 Quick Start
### Step 1: Install the Integration ### Installation
**Prerequisites:** This integration requires [HACS](https://hacs.xyz/) (Home Assistant Community Store) to be installed. 1. **Install via HACS** (recommended)
- Add as custom repository: `https://github.com/jpawlowski/hass.tibber_prices`
- Search for "Tibber Price Information & Ratings"
- Click "Install"
Click the button below to open the integration directly in HACS: 2. **Add Integration**
- Go to Settings → Devices & Services
- Click "+ Add Integration"
- Search for "Tibber Price Information & Ratings"
[![Open your Home Assistant instance and open a repository inside the Home Assistant Community Store.](https://my.home-assistant.io/badges/hacs_repository.svg)](https://my.home-assistant.io/redirect/hacs_repository/?owner=jpawlowski&repository=hass.tibber_prices&category=integration) 3. **Configure**
- Enter your Tibber API token ([get one here](https://developer.tibber.com/settings/access-token))
- Select your Tibber home
- Configure price thresholds (optional)
Then: 4. **Start Using!**
- 30+ sensors available (key sensors enabled by default)
- Use in automations, dashboards, and scripts
1. Click "Download" to install the integration 📖 **[Full Installation Guide →](docs/user/installation.md)**
2. **Restart Home Assistant** (required after installation)
> **Note:** The My Home Assistant redirect will first take you to a landing page. Click the button there to open your Home Assistant instance. If the repository is not yet in the HACS default store, HACS will ask if you want to add it as a custom repository.
### Step 2: Add and Configure the Integration
**Important:** You must have installed the integration first (see Step 1) and restarted Home Assistant!
#### Option 1: One-Click Setup (Quick)
Click the button below to open the configuration dialog:
[![Open your Home Assistant instance and start setting up a new integration.](https://my.home-assistant.io/badges/config_flow_start.svg)](https://my.home-assistant.io/redirect/config_flow_start/?domain=tibber_prices)
This will guide you through:
1. Enter your Tibber API token ([get one here](https://developer.tibber.com/settings/access-token))
2. Select your Tibber home
3. Configure price thresholds (optional)
#### Option 2: Manual Configuration
1. Go to **Settings** → **Devices & Services**
2. Click **"+ Add Integration"**
3. Search for "Tibber Price Information & Ratings"
4. Follow the configuration steps (same as Option 1)
### Step 3: Start Using!
- 30+ sensors are now available (key sensors enabled by default)
- Configure additional sensors in **Settings****Devices & Services****Tibber Price Information & Ratings** → **Entities**
- Use sensors in automations, dashboards, and scripts
📖 **[Full Installation Guide →](https://jpawlowski.github.io/hass.tibber_prices/user/installation)**
## 📊 Available Entities ## 📊 Available Entities
@ -102,12 +63,10 @@ The integration provides **30+ sensors** across different categories. Key sensor
> **Rich Sensor Attributes**: All sensors include extensive attributes with timestamps, context data, and detailed explanations. Enable **Extended Descriptions** in the integration options to add `long_description` and `usage_tips` attributes to every sensor, providing in-context documentation directly in Home Assistant's UI. > **Rich Sensor Attributes**: All sensors include extensive attributes with timestamps, context data, and detailed explanations. Enable **Extended Descriptions** in the integration options to add `long_description` and `usage_tips` attributes to every sensor, providing in-context documentation directly in Home Assistant's UI.
**[📋 Complete Sensor Reference](https://jpawlowski.github.io/hass.tibber_prices/user/sensors)** - Full list with descriptions and attributes
### Core Price Sensors (Enabled by Default) ### Core Price Sensors (Enabled by Default)
| Entity | Description | | Entity | Description |
| -------------------------- | ------------------------------------------------- | | ----------------------------- | ------------------------------------------------- |
| Current Electricity Price | Current 15-minute interval price | | Current Electricity Price | Current 15-minute interval price |
| Next Interval Price | Price for the next 15-minute interval | | Next Interval Price | Price for the next 15-minute interval |
| Current Hour Average Price | Average of current hour's 4 intervals | | Current Hour Average Price | Average of current hour's 4 intervals |
@ -120,7 +79,7 @@ The integration provides **30+ sensors** across different categories. Key sensor
### Statistical Sensors (Enabled by Default) ### Statistical Sensors (Enabled by Default)
| Entity | Description | | Entity | Description |
| ------------------------- | ------------------------------------------- | | ------------------------------ | -------------------------------------------- |
| Today's Lowest Price | Minimum price for today | | Today's Lowest Price | Minimum price for today |
| Today's Highest Price | Maximum price for today | | Today's Highest Price | Maximum price for today |
| Today's Average Price | Mean price across today's intervals | | Today's Average Price | Mean price across today's intervals |
@ -134,7 +93,7 @@ The integration provides **30+ sensors** across different categories. Key sensor
### Price Rating Sensors (Enabled by Default) ### Price Rating Sensors (Enabled by Default)
| Entity | Description | | Entity | Description |
| -------------------------- | --------------------------------------------------------- | | --------------------------- | ---------------------------------------------------------- |
| Current Price Rating | % difference from 24h trailing average (current interval) | | Current Price Rating | % difference from 24h trailing average (current interval) |
| Next Interval Price Rating | % difference from 24h trailing average (next interval) | | Next Interval Price Rating | % difference from 24h trailing average (next interval) |
| Current Hour Price Rating | % difference for current hour average | | Current Hour Price Rating | % difference for current hour average |
@ -145,16 +104,16 @@ The integration provides **30+ sensors** across different categories. Key sensor
### Binary Sensors (Enabled by Default) ### Binary Sensors (Enabled by Default)
| Entity | Description | | Entity | Description |
| ------------------------- | ----------------------------------------------------------------------------------------- | | -------------------------- | -------------------------------------------------------------- |
| Peak Price Period | ON when in a detected peak price period ([how it works](https://jpawlowski.github.io/hass.tibber_prices/user/period-calculation)) | | Peak Price Interval | ON when current interval is in the highest 20% of day's prices |
| Best Price Period | ON when in a detected best price period ([how it works](https://jpawlowski.github.io/hass.tibber_prices/user/period-calculation)) | | Best Price Interval | ON when current interval is in the lowest 20% of day's prices |
| Tibber API Connection | Connection status to Tibber API | | Tibber API Connection | Connection status to Tibber API |
| Tomorrow's Data Available | Whether tomorrow's price data is available | | Tomorrow's Data Available | Whether tomorrow's price data is available |
### Diagnostic Sensors (Enabled by Default) ### Diagnostic Sensors (Enabled by Default)
| Entity | Description | | Entity | Description |
| --------------- | ------------------------------------------ | | ----------------- | ------------------------------------------ |
| Data Expiration | Timestamp when current data expires | | Data Expiration | Timestamp when current data expires |
| Price Forecast | Formatted list of upcoming price intervals | | Price Forecast | Formatted list of upcoming price intervals |
@ -165,26 +124,22 @@ The following sensors are available but disabled by default. Enable them in `Set
- **Previous Interval Price** & **Previous Interval Price Level**: Historical data for the last 15-minute interval - **Previous Interval Price** & **Previous Interval Price Level**: Historical data for the last 15-minute interval
- **Previous Interval Price Rating**: Rating for the previous interval - **Previous Interval Price Rating**: Rating for the previous interval
- **Trailing 24h Average Price**: Average of the past 24 hours from now - **Trailing 24h Average Price**: Average of the past 24 hours from now
- **Trailing 24h Minimum/Maximum Price**: Min/max in the past 24 hours - **Trailing 24h Minimum/Maximum Price**: Min/max in the past 24 hours
> **Note**: Currency display is configurable during setup. Choose between: > **Note**: All monetary sensors use minor currency units (ct/kWh, øre/kWh, ¢/kWh, p/kWh) automatically based on your Tibber account's currency. Supported: EUR, NOK, SEK, DKK, USD, GBP.
> - **Base currency** (€/kWh, kr/kWh) - decimal values, differences visible from 3rd-4th decimal
> - **Subunit** (ct/kWh, øre/kWh) - larger values, differences visible from 1st decimal
>
> Smart defaults: EUR → subunit (German/Dutch preference), NOK/SEK/DKK → base (Scandinavian preference). Supported currencies: EUR, NOK, SEK, DKK, USD, GBP.
## Automation Examples> **Note:** See the [full automation examples guide](https://jpawlowski.github.io/hass.tibber_prices/user/automation-examples) for more advanced recipes. ## Automation Examples
### Run Appliances During Cheap Hours ### Run Appliances During Cheap Hours
Use the `binary_sensor.tibber_best_price_period` to automatically start appliances during detected best price periods: Use the `binary_sensor.tibber_best_price_interval` to automatically start appliances during the cheapest 15-minute periods:
```yaml ```yaml
automation: automation:
- alias: "Run Dishwasher During Cheap Hours" - alias: "Run Dishwasher During Cheap Hours"
trigger: trigger:
- platform: state - platform: state
entity_id: binary_sensor.tibber_best_price_period entity_id: binary_sensor.tibber_best_price_interval
to: "on" to: "on"
condition: condition:
- condition: time - condition: time
@ -196,8 +151,6 @@ automation:
entity_id: switch.dishwasher entity_id: switch.dishwasher
``` ```
> **Learn more:** The [period calculation guide](https://jpawlowski.github.io/hass.tibber_prices/user/period-calculation) explains how Best/Peak Price periods are identified and how you can configure filters (flexibility, minimum distance from average, price level filters with gap tolerance).
### Notify on Extremely High Prices ### Notify on Extremely High Prices
Get notified when prices reach the VERY_EXPENSIVE level: Get notified when prices reach the VERY_EXPENSIVE level:
@ -207,7 +160,7 @@ automation:
- alias: "Notify on Very Expensive Electricity" - alias: "Notify on Very Expensive Electricity"
trigger: trigger:
- platform: state - platform: state
entity_id: sensor.tibber_current_interval_price_level entity_id: sensor.tibber_current_price_level
to: "VERY_EXPENSIVE" to: "VERY_EXPENSIVE"
action: action:
- service: notify.mobile_app - service: notify.mobile_app
@ -225,7 +178,7 @@ automation:
- alias: "Reduce Heating During High Price Ratings" - alias: "Reduce Heating During High Price Ratings"
trigger: trigger:
- platform: numeric_state - platform: numeric_state
entity_id: sensor.tibber_current_interval_price_rating entity_id: sensor.tibber_current_price_rating
above: 20 # More than 20% above 24h average above: 20 # More than 20% above 24h average
action: action:
- service: climate.set_temperature - service: climate.set_temperature
@ -248,7 +201,7 @@ automation:
to: "on" to: "on"
condition: condition:
- condition: numeric_state - condition: numeric_state
entity_id: sensor.tibber_current_interval_price_rating entity_id: sensor.tibber_current_price_rating
below: -15 # At least 15% below average below: -15 # At least 15% below average
- condition: numeric_state - condition: numeric_state
entity_id: sensor.ev_battery_level entity_id: sensor.ev_battery_level
@ -284,9 +237,8 @@ automation:
### Currency or units showing incorrectly ### Currency or units showing incorrectly
- Currency is automatically detected from your Tibber account - Currency is automatically detected from your Tibber account
- Display mode (base currency vs. subunit) can be configured in integration options: `Settings > Devices & Services > Tibber Price Information & Ratings > Configure` - The integration supports EUR, NOK, SEK, DKK, USD, and GBP with appropriate minor units
- Supported currencies: EUR, NOK, SEK, DKK, USD, and GBP - Enable/disable major vs. minor unit sensors in `Settings > Devices & Services > Tibber Price Information & Ratings > Entities`
- Smart defaults apply: EUR users get subunit (ct), Scandinavian users get base currency (kr)
## Advanced Features ## Advanced Features
@ -295,18 +247,15 @@ automation:
Every sensor includes rich attributes beyond just the state value. These attributes provide context, timestamps, and additional data useful for automations and templates. Every sensor includes rich attributes beyond just the state value. These attributes provide context, timestamps, and additional data useful for automations and templates.
**Standard attributes available on most sensors:** **Standard attributes available on most sensors:**
- `timestamp` - ISO 8601 timestamp for the data point - `timestamp` - ISO 8601 timestamp for the data point
- `description` - Brief explanation of what the sensor represents - `description` - Brief explanation of what the sensor represents
- `level_id` and `level_value` - For price level sensors (e.g., `VERY_CHEAP` = -2) - `level_id` and `level_value` - For price level sensors (e.g., `VERY_CHEAP` = -2)
**Extended descriptions** (enable in integration options): **Extended descriptions** (enable in integration options):
- `long_description` - Detailed explanation of the sensor's purpose - `long_description` - Detailed explanation of the sensor's purpose
- `usage_tips` - Practical suggestions for using the sensor in automations - `usage_tips` - Practical suggestions for using the sensor in automations
**Example - Current Price sensor attributes:** **Example - Current Price sensor attributes:**
```yaml ```yaml
timestamp: "2025-11-03T14:15:00+01:00" timestamp: "2025-11-03T14:15:00+01:00"
description: "The current electricity price per kWh" description: "The current electricity price per kWh"
@ -315,7 +264,6 @@ usage_tips: "Use this to track prices or to create automations that run when ele
``` ```
**Example template using attributes:** **Example template using attributes:**
```yaml ```yaml
template: template:
- sensor: - sensor:
@ -326,47 +274,34 @@ template:
Price at {{ timestamp }}: {{ price }} ct/kWh Price at {{ timestamp }}: {{ price }} ct/kWh
``` ```
📖 **[View all sensors and attributes →](https://jpawlowski.github.io/hass.tibber_prices/user/sensors)** 📖 **[View all sensors and attributes →](docs/user/sensors.md)**
### Dynamic Icons & Visual Indicators ### Custom Services
All sensors feature dynamic icons that change based on price levels, providing instant visual feedback in your dashboards. The integration provides custom services for advanced use cases:
<img src="docs/user/static/img/entities-overview.jpg" width="400" alt="Entity list showing dynamic icons for different price states"> - `tibber_prices.get_price` - Fetch price data for specific days/times
- `tibber_prices.get_apexcharts_data` - Get formatted data for ApexCharts cards
_Dynamic icons adapt to price levels, trends, and period states - showing CHEAP prices, FALLING trend, and active Best Price Period_
📖 **[Dynamic Icons Guide →](https://jpawlowski.github.io/hass.tibber_prices/user/dynamic-icons)** | **[Icon Colors Guide →](https://jpawlowski.github.io/hass.tibber_prices/user/icon-colors)**
### Custom Actions
The integration provides custom actions (they still appear as services under the hood) for advanced use cases. These actions show up in Home Assistant under **Developer Tools → Actions**.
- `tibber_prices.get_chartdata` - Get price data in chart-friendly formats for any visualization card
- `tibber_prices.get_apexcharts_yaml` - Generate complete ApexCharts configurations - `tibber_prices.get_apexcharts_yaml` - Generate complete ApexCharts configurations
- `tibber_prices.refresh_user_data` - Manually refresh account information - `tibber_prices.refresh_user_data` - Manually refresh account information
📖 **[Action documentation and examples →](https://jpawlowski.github.io/hass.tibber_prices/user/actions)** 📖 **[Service documentation and examples →](docs/user/services.md)**
### Chart Visualizations (Optional) ### ApexCharts Integration
The integration includes built-in support for creating price visualization cards with automatic Y-axis scaling and color-coded series. The integration includes built-in support for creating beautiful price visualization cards. Use the `get_apexcharts_yaml` service to generate card configurations automatically.
<img src="docs/user/static/img/charts/rolling-window.jpg" width="600" alt="Example: Dynamic 48h rolling window chart"> 📖 **[ApexCharts examples →](docs/user/automation-examples.md#apexcharts-cards)**
_Optional: Dynamic 48h chart with automatic Y-axis scaling - generated via `get_apexcharts_yaml` action_
📖 **[Chart examples and setup guide →](https://jpawlowski.github.io/hass.tibber_prices/user/chart-examples)**
## 🤝 Contributing ## 🤝 Contributing
Contributions are welcome! Please read the [Contributing Guidelines](CONTRIBUTING.md) and [Developer Documentation](https://jpawlowski.github.io/hass.tibber_prices/developer/) before submitting pull requests. Contributions are welcome! Please read the [Contributing Guidelines](CONTRIBUTING.md) and [Developer Guide](docs/development/) before submitting pull requests.
### For Contributors ### For Contributors
- **[Developer Setup](https://jpawlowski.github.io/hass.tibber_prices/developer/setup)** - Get started with DevContainer - **[Developer Setup](docs/development/setup.md)** - Get started with DevContainer
- **[Architecture Guide](https://jpawlowski.github.io/hass.tibber_prices/developer/architecture)** - Understand the codebase - **[Architecture Guide](docs/development/architecture.md)** - Understand the codebase
- **[Release Management](https://jpawlowski.github.io/hass.tibber_prices/developer/release-management)** - Release process and versioning - **[Release Management](docs/development/release-management.md)** - Release process and versioning
## 🤖 Development Note ## 🤖 Development Note

View file

@ -3,19 +3,6 @@
homeassistant: homeassistant:
debug: true debug: true
# Disable analytics, diagnostics and error reporting for development instance
# https://www.home-assistant.io/integrations/analytics/
analytics:
# Disable usage analytics to prevent skewing production statistics
# https://analytics.home-assistant.io should only reflect real user installations
# https://www.home-assistant.io/integrations/system_health/
system_health:
# https://www.home-assistant.io/integrations/diagnostics/
# Note: Diagnostics integration cannot be disabled, but without analytics
# and with internal_url set, no data is sent externally
# Core integrations needed for development # Core integrations needed for development
http: http:
@ -25,49 +12,8 @@ script:
scene: scene:
energy:
# https://www.home-assistant.io/integrations/logger/ # https://www.home-assistant.io/integrations/logger/
logger: logger:
default: info default: info
logs: logs:
# Main integration logger - applies to ALL sub-loggers by default
custom_components.tibber_prices: debug custom_components.tibber_prices: debug
# Reduce verbosity for details loggers (change to 'debug' for deep debugging)
# API client details (raw requests/responses - very verbose!)
custom_components.tibber_prices.api.client.details: info
# Period calculation details (all set to 'info' by default, change to 'debug' as needed):
# Relaxation strategy details (flex levels, per-day results)
custom_components.tibber_prices.coordinator.period_handlers.relaxation.details: info
# Filter statistics and criteria checks
custom_components.tibber_prices.coordinator.period_handlers.period_building.details: info
# Outlier/spike detection details
custom_components.tibber_prices.coordinator.period_handlers.outlier_filtering.details: info
# Period overlap resolution details
custom_components.tibber_prices.coordinator.period_handlers.period_overlap.details: info
# Outlier flex capping
custom_components.tibber_prices.coordinator.period_handlers.core.details: info
# Level filtering details (min_distance scaling)
custom_components.tibber_prices.coordinator.period_handlers.level_filtering.details: info
# Interval pool details (cache operations, GC):
# Cache lookup/miss, gap detection, fetch group additions
custom_components.tibber_prices.interval_pool.manager.details: info
# Garbage collection details (eviction, dead interval cleanup)
custom_components.tibber_prices.interval_pool.garbage_collector.details: info
# Gap detection and API fetching details
custom_components.tibber_prices.interval_pool.fetcher.details: info
# API endpoint routing decisions
custom_components.tibber_prices.interval_pool.routing.details: info
# Cache fetch group operations
custom_components.tibber_prices.interval_pool.cache.details: info
# Index rebuild operations
custom_components.tibber_prices.interval_pool.index.details: info
# Storage save operations
custom_components.tibber_prices.interval_pool.storage.details: info
# API helpers details (response validation):
# Data emptiness checks, structure validation
custom_components.tibber_prices.api.helpers.details: info

View file

@ -7,23 +7,16 @@ https://github.com/jpawlowski/hass.tibber_prices
from __future__ import annotations from __future__ import annotations
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING
import voluptuous as vol from homeassistant.config_entries import ConfigEntryState
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.const import CONF_ACCESS_TOKEN, Platform
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.storage import Store from homeassistant.helpers.storage import Store
from homeassistant.loader import async_get_loaded_integration from homeassistant.loader import async_get_loaded_integration
from .api import TibberPricesApiClient from .api import TibberPricesApiClient
from .const import ( from .const import (
CONF_CURRENCY_DISPLAY_MODE,
DATA_CHART_CONFIG,
DATA_CHART_METADATA_CONFIG,
DISPLAY_MODE_SUBUNIT,
DOMAIN, DOMAIN,
LOGGER, LOGGER,
async_load_standard_translations, async_load_standard_translations,
@ -31,12 +24,6 @@ from .const import (
) )
from .coordinator import STORAGE_VERSION, TibberPricesDataUpdateCoordinator from .coordinator import STORAGE_VERSION, TibberPricesDataUpdateCoordinator
from .data import TibberPricesData from .data import TibberPricesData
from .interval_pool import (
TibberPricesIntervalPool,
async_load_pool_state,
async_remove_pool_storage,
async_save_pool_state,
)
from .services import async_setup_services from .services import async_setup_services
if TYPE_CHECKING: if TYPE_CHECKING:
@ -47,142 +34,8 @@ if TYPE_CHECKING:
PLATFORMS: list[Platform] = [ PLATFORMS: list[Platform] = [
Platform.SENSOR, Platform.SENSOR,
Platform.BINARY_SENSOR, Platform.BINARY_SENSOR,
Platform.NUMBER,
Platform.SWITCH,
] ]
# Configuration schema for configuration.yaml
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional("chart_export"): vol.Schema(
{
vol.Optional("day"): vol.All(vol.Any(str, list), vol.Coerce(list)),
vol.Optional("resolution"): str,
vol.Optional("output_format"): str,
vol.Optional("subunit_currency"): bool,
vol.Optional("round_decimals"): vol.All(int, vol.Range(min=0, max=10)),
vol.Optional("include_level"): bool,
vol.Optional("include_rating_level"): bool,
vol.Optional("include_average"): bool,
vol.Optional("level_filter"): vol.All(vol.Any(str, list), vol.Coerce(list)),
vol.Optional("rating_level_filter"): vol.All(vol.Any(str, list), vol.Coerce(list)),
vol.Optional("period_filter"): str,
vol.Optional("insert_nulls"): str,
vol.Optional("add_trailing_null"): bool,
vol.Optional("array_fields"): str,
vol.Optional("start_time_field"): str,
vol.Optional("end_time_field"): str,
vol.Optional("price_field"): str,
vol.Optional("level_field"): str,
vol.Optional("rating_level_field"): str,
vol.Optional("average_field"): str,
vol.Optional("data_key"): str,
}
),
}
),
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: dict[str, Any]) -> bool:
"""Set up the Tibber Prices component from configuration.yaml."""
# Store chart export configuration in hass.data for sensor access
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
# Extract chart_export config if present
domain_config = config.get(DOMAIN, {})
chart_config = domain_config.get("chart_export", {})
if chart_config:
LOGGER.debug("Loaded chart_export configuration from configuration.yaml")
hass.data[DOMAIN][DATA_CHART_CONFIG] = chart_config
else:
LOGGER.debug("No chart_export configuration found in configuration.yaml")
hass.data[DOMAIN][DATA_CHART_CONFIG] = {}
# Extract chart_metadata config if present
chart_metadata_config = domain_config.get("chart_metadata", {})
if chart_metadata_config:
LOGGER.debug("Loaded chart_metadata configuration from configuration.yaml")
hass.data[DOMAIN][DATA_CHART_METADATA_CONFIG] = chart_metadata_config
else:
LOGGER.debug("No chart_metadata configuration found in configuration.yaml")
hass.data[DOMAIN][DATA_CHART_METADATA_CONFIG] = {}
return True
async def _migrate_config_options(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""
Migrate config options for backward compatibility.
This ensures existing configs get sensible defaults when new options are added.
Runs automatically on integration startup.
"""
migration_performed = False
migrated = dict(entry.options)
# Migration: Set currency_display_mode to subunit for legacy configs
# New configs (created after v1.1.0) get currency-appropriate defaults via get_default_options().
# This migration preserves legacy behavior where all prices were in subunit currency (cents/øre).
# Only runs for old config entries that don't have this option explicitly set.
if CONF_CURRENCY_DISPLAY_MODE not in migrated:
migrated[CONF_CURRENCY_DISPLAY_MODE] = DISPLAY_MODE_SUBUNIT
migration_performed = True
LOGGER.info(
"[%s] Migrated legacy config: Set currency_display_mode=%s (preserves pre-v1.1.0 behavior)",
entry.title,
DISPLAY_MODE_SUBUNIT,
)
# Save migrated options if any changes were made
if migration_performed:
hass.config_entries.async_update_entry(entry, options=migrated)
LOGGER.debug("[%s] Config migration completed", entry.title)
def _get_access_token(hass: HomeAssistant, entry: ConfigEntry) -> str:
"""
Get access token from entry or parent entry.
For parent entries, the token is stored in entry.data.
For subentries, we need to find the parent entry and get its token.
Args:
hass: HomeAssistant instance
entry: Config entry (parent or subentry)
Returns:
Access token string
Raises:
ConfigEntryAuthFailed: If no access token found
"""
# Try to get token from this entry (works for parent)
if CONF_ACCESS_TOKEN in entry.data:
return entry.data[CONF_ACCESS_TOKEN]
# This is a subentry, find parent entry
# Parent entry is the one without subentries in its data structure
# and has the same domain
for potential_parent in hass.config_entries.async_entries(DOMAIN):
# Parent has ACCESS_TOKEN and is not the current entry
if potential_parent.entry_id != entry.entry_id and CONF_ACCESS_TOKEN in potential_parent.data:
# Check if this entry is actually a subentry of this parent
# (HA Core manages this relationship internally)
return potential_parent.data[CONF_ACCESS_TOKEN]
# No token found anywhere
msg = f"No access token found for entry {entry.entry_id}"
raise ConfigEntryAuthFailed(msg)
# https://developers.home-assistant.io/docs/config_entries_index/#setting-up-an-entry # https://developers.home-assistant.io/docs/config_entries_index/#setting-up-an-entry
async def async_setup_entry( async def async_setup_entry(
@ -191,10 +44,6 @@ async def async_setup_entry(
) -> bool: ) -> bool:
"""Set up this integration using UI.""" """Set up this integration using UI."""
LOGGER.debug(f"[tibber_prices] async_setup_entry called for entry_id={entry.entry_id}") LOGGER.debug(f"[tibber_prices] async_setup_entry called for entry_id={entry.entry_id}")
# Migrate config options if needed (e.g., set default currency display mode for existing configs)
await _migrate_config_options(hass, entry)
# Preload translations to populate the cache # Preload translations to populate the cache
await async_load_translations(hass, "en") await async_load_translations(hass, "en")
await async_load_standard_translations(hass, "en") await async_load_standard_translations(hass, "en")
@ -209,78 +58,26 @@ async def async_setup_entry(
integration = async_get_loaded_integration(hass, entry.domain) integration = async_get_loaded_integration(hass, entry.domain)
# Get access token (from this entry if parent, from parent if subentry)
access_token = _get_access_token(hass, entry)
# Create API client
api_client = TibberPricesApiClient(
access_token=access_token,
session=async_get_clientsession(hass),
version=str(integration.version) if integration.version else "unknown",
)
# Get home_id from config entry (required for single-home pool architecture)
home_id = entry.data.get("home_id")
if not home_id:
msg = f"[{entry.title}] Config entry missing home_id (required for interval pool)"
raise ConfigEntryAuthFailed(msg)
# Create or load interval pool for this config entry (single-home architecture)
pool_state = await async_load_pool_state(hass, entry.entry_id)
if pool_state:
interval_pool = TibberPricesIntervalPool.from_dict(
pool_state,
api=api_client,
hass=hass,
entry_id=entry.entry_id,
)
if interval_pool is None:
# Old multi-home format or corrupted → create new pool
LOGGER.info(
"[%s] Interval pool storage invalid/corrupted, creating new pool (will rebuild from API)",
entry.title,
)
interval_pool = TibberPricesIntervalPool(
home_id=home_id,
api=api_client,
hass=hass,
entry_id=entry.entry_id,
)
else:
LOGGER.debug("[%s] Interval pool restored from storage (auto-save enabled)", entry.title)
else:
interval_pool = TibberPricesIntervalPool(
home_id=home_id,
api=api_client,
hass=hass,
entry_id=entry.entry_id,
)
LOGGER.debug("[%s] Created new interval pool (auto-save enabled)", entry.title)
coordinator = TibberPricesDataUpdateCoordinator( coordinator = TibberPricesDataUpdateCoordinator(
hass=hass, hass=hass,
config_entry=entry, config_entry=entry,
api_client=api_client, version=str(integration.version) if integration.version else "unknown",
interval_pool=interval_pool,
) )
# CRITICAL: Load cache BEFORE first refresh to ensure user_data is available
# for metadata sensors (grid_company, estimated_annual_consumption, etc.)
# This prevents sensors from being marked as "unavailable" on first setup
await coordinator.load_cache()
entry.runtime_data = TibberPricesData( entry.runtime_data = TibberPricesData(
client=api_client, client=TibberPricesApiClient(
access_token=entry.data[CONF_ACCESS_TOKEN],
session=async_get_clientsession(hass),
version=str(integration.version) if integration.version else "unknown",
),
integration=integration, integration=integration,
coordinator=coordinator, coordinator=coordinator,
interval_pool=interval_pool,
) )
# https://developers.home-assistant.io/docs/integration_fetching_data#coordinated-single-api-poll-for-data-for-all-entities # https://developers.home-assistant.io/docs/integration_fetching_data#coordinated-single-api-poll-for-data-for-all-entities
if entry.state == ConfigEntryState.SETUP_IN_PROGRESS: if entry.state == ConfigEntryState.SETUP_IN_PROGRESS:
await coordinator.async_config_entry_first_refresh() await coordinator.async_config_entry_first_refresh()
# Note: Options update listener is registered in coordinator.__init__ entry.async_on_unload(entry.add_update_listener(async_reload_entry))
# (handles cache invalidation + refresh without full reload)
else: else:
await coordinator.async_refresh() await coordinator.async_refresh()
@ -294,15 +91,6 @@ async def async_unload_entry(
entry: TibberPricesConfigEntry, entry: TibberPricesConfigEntry,
) -> bool: ) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
# Save interval pool state before unloading
if entry.runtime_data is not None and entry.runtime_data.interval_pool is not None:
pool_state = entry.runtime_data.interval_pool.to_dict()
await async_save_pool_state(hass, entry.entry_id, pool_state)
LOGGER.debug("[%s] Interval pool state saved on unload", entry.title)
# Shutdown interval pool (cancels background tasks)
await entry.runtime_data.interval_pool.async_shutdown()
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok and entry.runtime_data is not None: if unload_ok and entry.runtime_data is not None:
@ -311,7 +99,8 @@ async def async_unload_entry(
# Unregister services if this was the last config entry # Unregister services if this was the last config entry
if not hass.config_entries.async_entries(DOMAIN): if not hass.config_entries.async_entries(DOMAIN):
for service in [ for service in [
"get_chartdata", "get_price",
"get_apexcharts_data",
"get_apexcharts_yaml", "get_apexcharts_yaml",
"refresh_user_data", "refresh_user_data",
]: ]:
@ -326,15 +115,10 @@ async def async_remove_entry(
entry: TibberPricesConfigEntry, entry: TibberPricesConfigEntry,
) -> None: ) -> None:
"""Handle removal of an entry.""" """Handle removal of an entry."""
# Remove coordinator cache storage
if storage := Store(hass, STORAGE_VERSION, f"{DOMAIN}.{entry.entry_id}"): if storage := Store(hass, STORAGE_VERSION, f"{DOMAIN}.{entry.entry_id}"):
LOGGER.debug(f"[tibber_prices] async_remove_entry removing cache store for entry_id={entry.entry_id}") LOGGER.debug(f"[tibber_prices] async_remove_entry removing cache store for entry_id={entry.entry_id}")
await storage.async_remove() await storage.async_remove()
# Remove interval pool storage
await async_remove_pool_storage(hass, entry.entry_id)
LOGGER.debug(f"[tibber_prices] async_remove_entry removed interval pool storage for entry_id={entry.entry_id}")
async def async_reload_entry( async def async_reload_entry(
hass: HomeAssistant, hass: HomeAssistant,

View file

@ -0,0 +1,866 @@
"""Tibber API Client."""
from __future__ import annotations
import asyncio
import logging
import re
import socket
from datetime import timedelta
from enum import Enum
from typing import Any
import aiohttp
from homeassistant.const import __version__ as ha_version
from homeassistant.util import dt as dt_util
_LOGGER = logging.getLogger(__name__)
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_FORBIDDEN = 403
HTTP_TOO_MANY_REQUESTS = 429
class QueryType(Enum):
"""Types of queries that can be made to the API."""
PRICE_INFO = "price_info"
DAILY_RATING = "daily"
HOURLY_RATING = "hourly"
MONTHLY_RATING = "monthly"
USER = "user"
class TibberPricesApiClientError(Exception):
"""Exception to indicate a general API error."""
UNKNOWN_ERROR = "Unknown GraphQL error"
MALFORMED_ERROR = "Malformed GraphQL error: {error}"
GRAPHQL_ERROR = "GraphQL error: {message}"
EMPTY_DATA_ERROR = "Empty data received for {query_type}"
GENERIC_ERROR = "Something went wrong! {exception}"
RATE_LIMIT_ERROR = "Rate limit exceeded. Please wait {retry_after} seconds before retrying"
INVALID_QUERY_ERROR = "Invalid GraphQL query: {message}"
class TibberPricesApiClientCommunicationError(TibberPricesApiClientError):
"""Exception to indicate a communication error."""
TIMEOUT_ERROR = "Timeout error fetching information - {exception}"
CONNECTION_ERROR = "Error fetching information - {exception}"
class TibberPricesApiClientAuthenticationError(TibberPricesApiClientError):
"""Exception to indicate an authentication error."""
INVALID_CREDENTIALS = "Invalid access token or expired credentials"
class TibberPricesApiClientPermissionError(TibberPricesApiClientError):
"""Exception to indicate insufficient permissions."""
INSUFFICIENT_PERMISSIONS = "Access forbidden - insufficient permissions for this operation"
def _verify_response_or_raise(response: aiohttp.ClientResponse) -> None:
"""Verify that the response is valid."""
if response.status == HTTP_UNAUTHORIZED:
_LOGGER.error("Tibber API authentication failed - check access token")
raise TibberPricesApiClientAuthenticationError(TibberPricesApiClientAuthenticationError.INVALID_CREDENTIALS)
if response.status == HTTP_FORBIDDEN:
_LOGGER.error("Tibber API access forbidden - insufficient permissions")
raise TibberPricesApiClientPermissionError(TibberPricesApiClientPermissionError.INSUFFICIENT_PERMISSIONS)
if response.status == HTTP_TOO_MANY_REQUESTS:
# Check for Retry-After header that Tibber might send
retry_after = response.headers.get("Retry-After", "unknown")
_LOGGER.warning("Tibber API rate limit exceeded - retry after %s seconds", retry_after)
raise TibberPricesApiClientError(TibberPricesApiClientError.RATE_LIMIT_ERROR.format(retry_after=retry_after))
if response.status == HTTP_BAD_REQUEST:
_LOGGER.error("Tibber API rejected request - likely invalid GraphQL query")
raise TibberPricesApiClientError(
TibberPricesApiClientError.INVALID_QUERY_ERROR.format(message="Bad request - likely invalid GraphQL query")
)
response.raise_for_status()
async def _verify_graphql_response(response_json: dict, query_type: QueryType) -> None:
"""Verify the GraphQL response for errors and data completeness, including empty data."""
if "errors" in response_json:
errors = response_json["errors"]
if not errors:
_LOGGER.error("Tibber API returned empty errors array")
raise TibberPricesApiClientError(TibberPricesApiClientError.UNKNOWN_ERROR)
error = errors[0] # Take first error
if not isinstance(error, dict):
_LOGGER.error("Tibber API returned malformed error: %s", error)
raise TibberPricesApiClientError(TibberPricesApiClientError.MALFORMED_ERROR.format(error=error))
message = error.get("message", "Unknown error")
extensions = error.get("extensions", {})
error_code = extensions.get("code")
# Handle specific Tibber API error codes
if error_code == "UNAUTHENTICATED":
_LOGGER.error("Tibber API authentication error: %s", message)
raise TibberPricesApiClientAuthenticationError(TibberPricesApiClientAuthenticationError.INVALID_CREDENTIALS)
if error_code == "FORBIDDEN":
_LOGGER.error("Tibber API permission error: %s", message)
raise TibberPricesApiClientPermissionError(TibberPricesApiClientPermissionError.INSUFFICIENT_PERMISSIONS)
if error_code in ["RATE_LIMITED", "TOO_MANY_REQUESTS"]:
# Some GraphQL APIs return rate limit info in extensions
retry_after = extensions.get("retryAfter", "unknown")
_LOGGER.warning(
"Tibber API rate limited via GraphQL: %s (retry after %s)",
message,
retry_after,
)
raise TibberPricesApiClientError(
TibberPricesApiClientError.RATE_LIMIT_ERROR.format(retry_after=retry_after)
)
if error_code in ["VALIDATION_ERROR", "GRAPHQL_VALIDATION_FAILED"]:
_LOGGER.error("Tibber API validation error: %s", message)
raise TibberPricesApiClientError(TibberPricesApiClientError.INVALID_QUERY_ERROR.format(message=message))
_LOGGER.error("Tibber API GraphQL error (code: %s): %s", error_code or "unknown", message)
raise TibberPricesApiClientError(TibberPricesApiClientError.GRAPHQL_ERROR.format(message=message))
if "data" not in response_json or response_json["data"] is None:
_LOGGER.error("Tibber API response missing data object")
raise TibberPricesApiClientError(
TibberPricesApiClientError.GRAPHQL_ERROR.format(message="Response missing data object")
)
# Empty data check (for retry logic) - always check, regardless of query_type
if _is_data_empty(response_json["data"], query_type.value):
_LOGGER.debug("Empty data detected for query_type: %s", query_type)
raise TibberPricesApiClientError(
TibberPricesApiClientError.EMPTY_DATA_ERROR.format(query_type=query_type.value)
)
def _is_data_empty(data: dict, query_type: str) -> bool:
"""
Check if the response data is empty or incomplete.
For viewer data:
- Must have userId and homes
- If either is missing, data is considered empty
- If homes is empty, data is considered empty
- If userId is None, data is considered empty
For price info:
- Must have range data
- Must have today data
- tomorrow can be empty if we have valid historical and today data
For rating data:
- Must have thresholdPercentages
- Must have non-empty entries for the specific rating type
"""
_LOGGER.debug("Checking if data is empty for query_type %s", query_type)
is_empty = False
try:
if query_type == "user":
has_user_id = (
"viewer" in data
and isinstance(data["viewer"], dict)
and "userId" in data["viewer"]
and data["viewer"]["userId"] is not None
)
has_homes = (
"viewer" in data
and isinstance(data["viewer"], dict)
and "homes" in data["viewer"]
and isinstance(data["viewer"]["homes"], list)
and len(data["viewer"]["homes"]) > 0
)
is_empty = not has_user_id or not has_homes
_LOGGER.debug(
"Viewer check - has_user_id: %s, has_homes: %s, is_empty: %s",
has_user_id,
has_homes,
is_empty,
)
elif query_type == "price_info":
# Check for homes existence and non-emptiness before accessing
subscription = None
if (
"viewer" not in data
or "homes" not in data["viewer"]
or not isinstance(data["viewer"]["homes"], list)
or len(data["viewer"]["homes"]) == 0
or "currentSubscription" not in data["viewer"]["homes"][0]
or data["viewer"]["homes"][0]["currentSubscription"] is None
):
_LOGGER.debug("Missing homes/currentSubscription in price_info check")
is_empty = True
else:
subscription = data["viewer"]["homes"][0]["currentSubscription"]
# Check priceInfoRange (192 quarter-hourly intervals)
has_historical = (
"priceInfoRange" in subscription
and subscription["priceInfoRange"] is not None
and "edges" in subscription["priceInfoRange"]
and subscription["priceInfoRange"]["edges"]
)
# Check priceInfo for today's data
has_price_info = "priceInfo" in subscription and subscription["priceInfo"] is not None
has_today = (
has_price_info
and "today" in subscription["priceInfo"]
and subscription["priceInfo"]["today"] is not None
and len(subscription["priceInfo"]["today"]) > 0
)
# Data is empty if we don't have historical data or today's data
is_empty = not has_historical or not has_today
_LOGGER.debug(
"Price info check - priceInfoRange: %s, today: %s, is_empty: %s",
bool(has_historical),
bool(has_today),
is_empty,
)
elif query_type in ["daily", "hourly", "monthly"]:
# Check for homes existence and non-emptiness before accessing
if (
"viewer" not in data
or "homes" not in data["viewer"]
or not isinstance(data["viewer"]["homes"], list)
or len(data["viewer"]["homes"]) == 0
or "currentSubscription" not in data["viewer"]["homes"][0]
or data["viewer"]["homes"][0]["currentSubscription"] is None
or "priceRating" not in data["viewer"]["homes"][0]["currentSubscription"]
):
_LOGGER.debug("Missing homes/currentSubscription/priceRating in rating check")
is_empty = True
else:
rating = data["viewer"]["homes"][0]["currentSubscription"]["priceRating"]
# Check rating entries
has_entries = (
query_type in rating
and rating[query_type] is not None
and "entries" in rating[query_type]
and rating[query_type]["entries"] is not None
and len(rating[query_type]["entries"]) > 0
)
is_empty = not has_entries
_LOGGER.debug(
"%s rating check - entries count: %d, is_empty: %s",
query_type,
len(rating[query_type]["entries"]) if has_entries else 0,
is_empty,
)
else:
_LOGGER.debug("Unknown query type %s, treating as non-empty", query_type)
is_empty = False
except (KeyError, IndexError, TypeError) as error:
_LOGGER.debug("Error checking data emptiness: %s", error)
is_empty = True
return is_empty
def _prepare_headers(access_token: str, version: str) -> dict[str, str]:
"""Prepare headers for API request."""
return {
"Authorization": f"Bearer {access_token}",
"Accept": "application/json",
"User-Agent": f"HomeAssistant/{ha_version} tibber_prices/{version}",
}
def _flatten_price_info(subscription: dict, currency: str | None = None) -> dict:
"""
Transform and flatten priceInfo from full API data structure.
Now handles priceInfoRange (192 quarter-hourly intervals) separately from
priceInfo (today and tomorrow data). Currency is stored as a separate attribute.
"""
price_info = subscription.get("priceInfo", {})
price_info_range = subscription.get("priceInfoRange", {})
# Get today and yesterday dates using Home Assistant's dt_util
today_local = dt_util.now().date()
yesterday_local = today_local - timedelta(days=1)
_LOGGER.debug("Processing data for yesterday's date: %s", yesterday_local)
# Transform priceInfoRange edges data (extract yesterday's quarter-hourly prices)
yesterday_prices = []
if "edges" in price_info_range:
edges = price_info_range["edges"]
for edge in edges:
if "node" not in edge:
_LOGGER.debug("Skipping edge without node: %s", edge)
continue
price_data = edge["node"]
# Parse timestamp using dt_util for proper timezone handling
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
_LOGGER.debug("Could not parse timestamp: %s", price_data["startsAt"])
continue
# Convert to local timezone
starts_at = dt_util.as_local(starts_at)
price_date = starts_at.date()
# Only include prices from yesterday
if price_date == yesterday_local:
yesterday_prices.append(price_data)
_LOGGER.debug("Found %d price entries for yesterday", len(yesterday_prices))
return {
"yesterday": yesterday_prices,
"today": price_info.get("today", []),
"tomorrow": price_info.get("tomorrow", []),
"currency": currency,
}
def _flatten_price_rating(subscription: dict) -> dict:
"""Extract and flatten priceRating from subscription, including currency."""
price_rating = subscription.get("priceRating", {})
def extract_entries_and_currency(rating: dict) -> tuple[list, str | None]:
if rating is None:
return [], None
return rating.get("entries", []), rating.get("currency")
hourly_entries, hourly_currency = extract_entries_and_currency(price_rating.get("hourly"))
daily_entries, daily_currency = extract_entries_and_currency(price_rating.get("daily"))
monthly_entries, monthly_currency = extract_entries_and_currency(price_rating.get("monthly"))
# Prefer hourly, then daily, then monthly for top-level currency
currency = hourly_currency or daily_currency or monthly_currency
return {
"hourly": hourly_entries,
"daily": daily_entries,
"monthly": monthly_entries,
"currency": currency,
}
class TibberPricesApiClient:
"""Tibber API Client."""
def __init__(
self,
access_token: str,
session: aiohttp.ClientSession,
version: str,
) -> None:
"""Tibber API Client."""
self._access_token = access_token
self._session = session
self._version = version
self._request_semaphore = asyncio.Semaphore(2) # Max 2 concurrent requests
self._last_request_time = dt_util.now()
self._min_request_interval = timedelta(seconds=1) # Min 1 second between requests
self._max_retries = 5
self._retry_delay = 2 # Base retry delay in seconds
# Timeout configuration - more granular control
self._connect_timeout = 10 # Connection timeout in seconds
self._request_timeout = 25 # Total request timeout in seconds
self._socket_connect_timeout = 5 # Socket connection timeout
async def async_get_viewer_details(self) -> Any:
"""Test connection to the API."""
return await self._api_wrapper(
data={
"query": """
{
viewer {
userId
name
login
homes {
id
type
appNickname
address {
address1
postalCode
city
country
}
}
}
}
"""
},
query_type=QueryType.USER,
)
async def async_get_price_info(self) -> dict:
"""Get price info data in flat format for all homes."""
data = await self._api_wrapper(
data={
"query": """
{viewer{homes{
id
consumption(resolution:DAILY,last:1){
pageInfo{currency}
}
currentSubscription{
priceInfoRange(resolution:QUARTER_HOURLY,last:192){
edges{node{
startsAt total energy tax level
}}
}
priceInfo(resolution:QUARTER_HOURLY){
today{startsAt total energy tax level}
tomorrow{startsAt total energy tax level}
}
}
}}}"""
},
query_type=QueryType.PRICE_INFO,
)
homes = data.get("viewer", {}).get("homes", [])
homes_data = {}
for home in homes:
home_id = home.get("id")
if home_id:
if "currentSubscription" in home:
# Extract currency from consumption data if available
currency = None
if home.get("consumption"):
page_info = home["consumption"].get("pageInfo")
if page_info:
currency = page_info.get("currency")
homes_data[home_id] = _flatten_price_info(
home["currentSubscription"],
currency,
)
else:
homes_data[home_id] = {}
data["homes"] = homes_data
return data
async def async_get_daily_price_rating(self) -> dict:
"""Get daily price rating data in flat format for all homes."""
data = await self._api_wrapper(
data={
"query": """
{viewer{homes{id,currentSubscription{priceRating{
daily{
currency
entries{time total energy tax difference level}
}
}}}}}"""
},
query_type=QueryType.DAILY_RATING,
)
homes = data.get("viewer", {}).get("homes", [])
homes_data = {}
for home in homes:
home_id = home.get("id")
if home_id:
if "currentSubscription" in home:
homes_data[home_id] = _flatten_price_rating(home["currentSubscription"])
else:
homes_data[home_id] = {}
data["homes"] = homes_data
return data
async def async_get_hourly_price_rating(self) -> dict:
"""Get hourly price rating data in flat format for all homes."""
data = await self._api_wrapper(
data={
"query": """
{viewer{homes{id,currentSubscription{priceRating{
hourly{
currency
entries{time total energy tax difference level}
}
}}}}}"""
},
query_type=QueryType.HOURLY_RATING,
)
homes = data.get("viewer", {}).get("homes", [])
homes_data = {}
for home in homes:
home_id = home.get("id")
if home_id:
if "currentSubscription" in home:
homes_data[home_id] = _flatten_price_rating(home["currentSubscription"])
else:
homes_data[home_id] = {}
data["homes"] = homes_data
return data
async def async_get_monthly_price_rating(self) -> dict:
"""Get monthly price rating data in flat format for all homes."""
data = await self._api_wrapper(
data={
"query": """
{viewer{homes{id,currentSubscription{priceRating{
monthly{
currency
entries{time total energy tax difference level}
}
}}}}}"""
},
query_type=QueryType.MONTHLY_RATING,
)
homes = data.get("viewer", {}).get("homes", [])
homes_data = {}
for home in homes:
home_id = home.get("id")
if home_id:
if "currentSubscription" in home:
homes_data[home_id] = _flatten_price_rating(home["currentSubscription"])
else:
homes_data[home_id] = {}
data["homes"] = homes_data
return data
async def async_get_data(self) -> dict:
"""Get all data from the API by combining multiple queries in flat format for all homes."""
price_info = await self.async_get_price_info()
daily_rating = await self.async_get_daily_price_rating()
hourly_rating = await self.async_get_hourly_price_rating()
monthly_rating = await self.async_get_monthly_price_rating()
all_home_ids = set()
all_home_ids.update(price_info.get("homes", {}).keys())
all_home_ids.update(daily_rating.get("homes", {}).keys())
all_home_ids.update(hourly_rating.get("homes", {}).keys())
all_home_ids.update(monthly_rating.get("homes", {}).keys())
homes_combined = {}
for home_id in all_home_ids:
daily_data = daily_rating.get("homes", {}).get(home_id, {})
hourly_data = hourly_rating.get("homes", {}).get(home_id, {})
monthly_data = monthly_rating.get("homes", {}).get(home_id, {})
price_rating = {
"thresholdPercentages": daily_data.get("thresholdPercentages"),
"daily": daily_data.get("daily", []),
"hourly": hourly_data.get("hourly", []),
"monthly": monthly_data.get("monthly", []),
"currency": (daily_data.get("currency") or hourly_data.get("currency") or monthly_data.get("currency")),
}
homes_combined[home_id] = {
"priceInfo": price_info.get("homes", {}).get(home_id, {}),
"priceRating": price_rating,
}
return {"homes": homes_combined}
async def _make_request(
self,
headers: dict[str, str],
data: dict,
query_type: QueryType,
) -> dict[str, Any]:
"""Make an API request with comprehensive error handling for network issues."""
_LOGGER.debug("Making API request with data: %s", data)
try:
# More granular timeout configuration for better network failure handling
timeout = aiohttp.ClientTimeout(
total=self._request_timeout, # Total request timeout: 25s
connect=self._connect_timeout, # Connection timeout: 10s
sock_connect=self._socket_connect_timeout, # Socket connection: 5s
)
response = await self._session.request(
method="POST",
url="https://api.tibber.com/v1-beta/gql",
headers=headers,
json=data,
timeout=timeout,
)
_verify_response_or_raise(response)
response_json = await response.json()
_LOGGER.debug("Received API response: %s", response_json)
await _verify_graphql_response(response_json, query_type)
return response_json["data"]
except aiohttp.ClientResponseError as error:
_LOGGER.exception("HTTP error during API request")
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
except aiohttp.ClientConnectorError as error:
_LOGGER.exception("Connection error - server unreachable or network down")
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
except aiohttp.ServerDisconnectedError as error:
_LOGGER.exception("Server disconnected during request")
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
except TimeoutError as error:
_LOGGER.exception(
"Request timeout after %d seconds - slow network or server overload",
self._request_timeout,
)
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.TIMEOUT_ERROR.format(exception=str(error))
) from error
except socket.gaierror as error:
self._handle_dns_error(error)
raise # Ensure type checker knows this path always raises
except OSError as error:
self._handle_network_error(error)
raise # Ensure type checker knows this path always raises
def _handle_dns_error(self, error: socket.gaierror) -> None:
"""Handle DNS resolution errors with IPv4/IPv6 dual stack considerations."""
error_msg = str(error)
if "Name or service not known" in error_msg:
_LOGGER.exception("DNS resolution failed - domain name not found")
elif "Temporary failure in name resolution" in error_msg:
_LOGGER.exception("DNS resolution temporarily failed - network or DNS server issue")
elif "Address family for hostname not supported" in error_msg:
_LOGGER.exception("DNS resolution failed - IPv4/IPv6 address family not supported")
elif "No address associated with hostname" in error_msg:
_LOGGER.exception("DNS resolution failed - no IPv4/IPv6 addresses found")
else:
_LOGGER.exception("DNS resolution failed - check internet connection: %s", error_msg)
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
def _handle_network_error(self, error: OSError) -> None:
"""Handle network-level errors with IPv4/IPv6 dual stack considerations."""
error_msg = str(error)
errno = getattr(error, "errno", None)
# Common IPv4/IPv6 dual stack network error codes
errno_network_unreachable = 101 # ENETUNREACH
errno_host_unreachable = 113 # EHOSTUNREACH
errno_connection_refused = 111 # ECONNREFUSED
errno_connection_timeout = 110 # ETIMEDOUT
if errno == errno_network_unreachable:
_LOGGER.exception("Network unreachable - check internet connection or IPv4/IPv6 routing")
elif errno == errno_host_unreachable:
_LOGGER.exception("Host unreachable - routing issue or IPv4/IPv6 connectivity problem")
elif errno == errno_connection_refused:
_LOGGER.exception("Connection refused - server not accepting connections")
elif errno == errno_connection_timeout:
_LOGGER.exception("Connection timed out - network latency or server overload")
elif "Address family not supported" in error_msg:
_LOGGER.exception("Address family not supported - IPv4/IPv6 configuration issue")
elif "Protocol not available" in error_msg:
_LOGGER.exception("Protocol not available - IPv4/IPv6 stack configuration issue")
elif "Network is down" in error_msg:
_LOGGER.exception("Network interface is down - check network adapter")
elif "Permission denied" in error_msg:
_LOGGER.exception("Network permission denied - firewall or security restriction")
else:
_LOGGER.exception("Network error - internet may be down: %s", error_msg)
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
async def _handle_request(
self,
headers: dict[str, str],
data: dict,
query_type: QueryType,
) -> Any:
"""Handle a single API request with rate limiting."""
async with self._request_semaphore:
now = dt_util.now()
time_since_last_request = now - self._last_request_time
if time_since_last_request < self._min_request_interval:
sleep_time = (self._min_request_interval - time_since_last_request).total_seconds()
_LOGGER.debug(
"Rate limiting: waiting %s seconds before next request",
sleep_time,
)
await asyncio.sleep(sleep_time)
self._last_request_time = dt_util.now()
return await self._make_request(
headers,
data or {},
query_type,
)
def _should_retry_error(self, error: Exception, retry: int) -> tuple[bool, int]:
"""Determine if an error should be retried and calculate delay."""
# Check if we've exceeded max retries first
if retry >= self._max_retries:
return False, 0
# Non-retryable errors - authentication and permission issues
if isinstance(
error,
(
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientPermissionError,
),
):
return False, 0
# Handle API-specific errors
if isinstance(error, TibberPricesApiClientError):
return self._handle_api_error_retry(error, retry)
# Network and timeout errors - retryable with exponential backoff
if isinstance(error, (aiohttp.ClientError, socket.gaierror, TimeoutError)):
delay = min(self._retry_delay * (2**retry), 30) # Cap at 30 seconds
return True, delay
# Unknown errors - not retryable
return False, 0
def _handle_api_error_retry(self, error: TibberPricesApiClientError, retry: int) -> tuple[bool, int]:
"""Handle retry logic for API-specific errors."""
error_msg = str(error)
# Non-retryable: Invalid queries
if "Invalid GraphQL query" in error_msg or "Bad request" in error_msg:
return False, 0
# Rate limits - special handling with extracted delay
if "Rate limit exceeded" in error_msg or "rate limited" in error_msg.lower():
delay = self._extract_retry_delay(error, retry)
return True, delay
# Empty data - retryable with capped exponential backoff
if "Empty data received" in error_msg:
delay = min(self._retry_delay * (2**retry), 60) # Cap at 60 seconds
return True, delay
# Other API errors - retryable with capped exponential backoff
delay = min(self._retry_delay * (2**retry), 30) # Cap at 30 seconds
return True, delay
def _extract_retry_delay(self, error: Exception, retry: int) -> int:
"""Extract retry delay from rate limit error or use exponential backoff."""
error_msg = str(error)
# Try to extract Retry-After value from error message
retry_after_match = re.search(r"retry after (\d+) seconds", error_msg.lower())
if retry_after_match:
try:
retry_after = int(retry_after_match.group(1))
return min(retry_after + 1, 300) # Add buffer, max 5 minutes
except ValueError:
pass
# Try to extract generic seconds value
seconds_match = re.search(r"(\d+) seconds", error_msg)
if seconds_match:
try:
seconds = int(seconds_match.group(1))
return min(seconds + 1, 300) # Add buffer, max 5 minutes
except ValueError:
pass
# Fall back to exponential backoff with cap
base_delay = self._retry_delay * (2**retry)
return min(base_delay, 120) # Cap at 2 minutes for rate limits
async def _api_wrapper(
self,
data: dict | None = None,
headers: dict | None = None,
query_type: QueryType = QueryType.USER,
) -> Any:
"""Get information from the API with rate limiting and retry logic."""
headers = headers or _prepare_headers(self._access_token, self._version)
last_error: Exception | None = None
for retry in range(self._max_retries + 1):
try:
return await self._handle_request(headers, data or {}, query_type)
except (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientPermissionError,
):
_LOGGER.exception("Non-retryable error occurred")
raise
except (
TibberPricesApiClientError,
aiohttp.ClientError,
socket.gaierror,
TimeoutError,
) as error:
last_error = (
error
if isinstance(error, TibberPricesApiClientError)
else TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
)
)
should_retry, delay = self._should_retry_error(error, retry)
if should_retry:
error_type = self._get_error_type(error)
_LOGGER.warning(
"Tibber %s error, attempt %d/%d. Retrying in %d seconds: %s",
error_type,
retry + 1,
self._max_retries,
delay,
str(error),
)
await asyncio.sleep(delay)
continue
if "Invalid GraphQL query" in str(error):
_LOGGER.exception("Invalid query - not retrying")
raise
# Handle final error state
if isinstance(last_error, TimeoutError):
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.TIMEOUT_ERROR.format(exception=last_error)
) from last_error
if isinstance(last_error, (aiohttp.ClientError, socket.gaierror)):
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=last_error)
) from last_error
raise last_error or TibberPricesApiClientError(TibberPricesApiClientError.UNKNOWN_ERROR)
def _get_error_type(self, error: Exception) -> str:
"""Get a descriptive error type for logging."""
if "Rate limit" in str(error):
return "rate limit"
if isinstance(error, (aiohttp.ClientError, socket.gaierror, TimeoutError)):
return "network"
return "API"

View file

@ -1,31 +0,0 @@
"""
Tibber GraphQL API client package.
This package handles all communication with Tibber's GraphQL API:
- GraphQL query construction and execution
- Authentication and session management
- Error handling and retry logic
- Response parsing and validation
Main components:
- client.py: TibberPricesApiClient (aiohttp-based GraphQL client)
- queries.py: GraphQL query definitions
- exceptions.py: API-specific error classes
- helpers.py: Response parsing utilities
"""
from .client import TibberPricesApiClient
from .exceptions import (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
TibberPricesApiClientPermissionError,
)
__all__ = [
"TibberPricesApiClient",
"TibberPricesApiClientAuthenticationError",
"TibberPricesApiClientCommunicationError",
"TibberPricesApiClientError",
"TibberPricesApiClientPermissionError",
]

View file

@ -1,970 +0,0 @@
"""Tibber API Client."""
from __future__ import annotations
import asyncio
import base64
import logging
import re
import socket
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any
from zoneinfo import ZoneInfo
import aiohttp
from homeassistant.util import dt as dt_utils
from .exceptions import (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
TibberPricesApiClientPermissionError,
)
from .helpers import (
flatten_price_info,
prepare_headers,
verify_graphql_response,
verify_response_or_raise,
)
from .queries import TibberPricesQueryType
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
_LOGGER_API_DETAILS = logging.getLogger(__name__ + ".details")
class TibberPricesApiClient:
"""Tibber API Client."""
def __init__(
self,
access_token: str,
session: aiohttp.ClientSession,
version: str,
) -> None:
"""Tibber API Client."""
self._access_token = access_token
self._session = session
self._version = version
self._request_semaphore = asyncio.Semaphore(2) # Max 2 concurrent requests
self.time: TibberPricesTimeService | None = None # Set externally by coordinator (optional during config flow)
self._last_request_time = None # Set on first request
self._min_request_interval = timedelta(seconds=1) # Min 1 second between requests
self._max_retries = 5
self._retry_delay = 2 # Base retry delay in seconds
# Timeout configuration - more granular control
self._connect_timeout = 10 # Connection timeout in seconds
self._request_timeout = 25 # Total request timeout in seconds
self._socket_connect_timeout = 5 # Socket connection timeout
async def async_get_viewer_details(self) -> Any:
"""Get comprehensive viewer and home details from Tibber API."""
return await self._api_wrapper(
data={
"query": """
{
viewer {
userId
name
login
accountType
homes {
id
type
appNickname
appAvatar
size
timeZone
mainFuseSize
numberOfResidents
primaryHeatingSource
hasVentilationSystem
address {
address1
address2
address3
postalCode
city
country
latitude
longitude
}
owner {
id
firstName
lastName
isCompany
name
contactInfo {
email
mobile
}
language
}
meteringPointData {
consumptionEan
gridCompany
gridAreaCode
priceAreaCode
productionEan
energyTaxType
vatType
estimatedAnnualConsumption
}
currentSubscription {
id
status
validFrom
validTo
priceInfo {
current {
currency
}
}
}
features {
realTimeConsumptionEnabled
}
}
}
}
"""
},
query_type=TibberPricesQueryType.USER,
)
async def async_get_price_info_for_range(
self,
home_id: str,
user_data: dict[str, Any],
start_time: datetime,
end_time: datetime,
) -> dict:
"""
Get price info for a specific time range with automatic routing.
This is a convenience wrapper around interval_pool.get_price_intervals_for_range().
Args:
home_id: Home ID to fetch price data for.
user_data: User data dict containing home metadata (including timezone).
start_time: Start of the range (inclusive, timezone-aware).
end_time: End of the range (exclusive, timezone-aware).
Returns:
Dict with "home_id" and "price_info" (list of intervals).
Raises:
TibberPricesApiClientError: If arguments invalid or requests fail.
"""
# Import here to avoid circular dependency (interval_pool imports TibberPricesApiClient)
from custom_components.tibber_prices.interval_pool import ( # noqa: PLC0415
get_price_intervals_for_range,
)
price_info = await get_price_intervals_for_range(
api_client=self,
home_id=home_id,
user_data=user_data,
start_time=start_time,
end_time=end_time,
)
return {
"home_id": home_id,
"price_info": price_info,
}
async def async_get_price_info(self, home_id: str, user_data: dict[str, Any]) -> dict:
"""
Get price info for a single home.
Uses timezone-aware cursor calculation based on the home's actual timezone
from Tibber API (not HA system timezone). This ensures correct "day before yesterday
midnight" calculation for homes in different timezones.
Args:
home_id: Home ID to fetch price data for.
user_data: User data dict containing home metadata (including timezone).
REQUIRED - must be fetched before calling this method.
Returns:
Dict with "home_id", "price_info", and other home data.
Raises:
TibberPricesApiClientError: If TimeService not initialized or user_data missing.
"""
if not self.time:
msg = "TimeService not initialized - required for price info processing"
raise TibberPricesApiClientError(msg)
if not user_data:
msg = "User data required for timezone-aware price fetching - fetch user data first"
raise TibberPricesApiClientError(msg)
if not home_id:
msg = "Home ID is required"
raise TibberPricesApiClientError(msg)
# Build home_id -> timezone mapping from user_data
home_timezones = self._extract_home_timezones(user_data)
# Get timezone for this home (fallback to HA system timezone)
home_tz = home_timezones.get(home_id)
# Calculate cursor: day before yesterday midnight in home's timezone
cursor = self._calculate_cursor_for_home(home_tz)
# Simple single-home query (no alias needed)
query = f"""
{{viewer{{
home(id: "{home_id}") {{
id
currentSubscription {{
priceInfoRange(resolution:QUARTER_HOURLY, first:192, after: "{cursor}") {{
pageInfo{{ count }}
edges{{node{{
startsAt total level
}}}}
}}
priceInfo(resolution:QUARTER_HOURLY) {{
today{{startsAt total level}}
tomorrow{{startsAt total level}}
}}
}}
}}
}}}}
"""
_LOGGER.debug("Fetching price info for home %s", home_id)
data = await self._api_wrapper(
data={"query": query},
query_type=TibberPricesQueryType.PRICE_INFO,
)
# Parse response
viewer = data.get("viewer", {})
home = viewer.get("home")
if not home:
msg = f"Home {home_id} not found in API response"
_LOGGER.warning(msg)
return {"home_id": home_id, "price_info": []}
if "currentSubscription" in home and home["currentSubscription"] is not None:
price_info = flatten_price_info(home["currentSubscription"])
else:
_LOGGER.warning(
"Home %s has no active subscription - price data will be unavailable",
home_id,
)
price_info = []
return {
"home_id": home_id,
"price_info": price_info,
}
async def async_get_price_info_range(
self,
home_id: str,
user_data: dict[str, Any],
start_time: datetime,
end_time: datetime,
) -> dict:
"""
Get historical price info for a specific time range using priceInfoRange endpoint.
Uses the priceInfoRange GraphQL endpoint for flexible historical data queries.
Intended for intervals BEFORE "day before yesterday midnight" (outside PRICE_INFO scope).
Automatically handles API pagination if Tibber limits batch size.
Args:
home_id: Home ID to fetch price data for.
user_data: User data dict containing home metadata (including timezone).
start_time: Start of the range (inclusive, timezone-aware).
end_time: End of the range (exclusive, timezone-aware).
Returns:
Dict with "home_id" and "price_info" (list of intervals).
Raises:
TibberPricesApiClientError: If arguments invalid or request fails.
"""
if not user_data:
msg = "User data required for timezone-aware price fetching - fetch user data first"
raise TibberPricesApiClientError(msg)
if not home_id:
msg = "Home ID is required"
raise TibberPricesApiClientError(msg)
if start_time >= end_time:
msg = f"Invalid time range: start_time ({start_time}) must be before end_time ({end_time})"
raise TibberPricesApiClientError(msg)
_LOGGER_API_DETAILS.debug(
"fetch_price_info_range called with: start_time=%s (type=%s, tzinfo=%s), end_time=%s (type=%s, tzinfo=%s)",
start_time,
type(start_time),
start_time.tzinfo,
end_time,
type(end_time),
end_time.tzinfo,
)
# Calculate cursor and interval count
start_cursor = self._encode_cursor(start_time)
interval_count = self._calculate_interval_count(start_time, end_time)
_LOGGER_API_DETAILS.debug(
"Calculated cursor for range: start_time=%s, cursor_time=%s, encoded=%s",
start_time,
start_time,
start_cursor,
)
# Fetch all intervals with automatic paging
price_info = await self._fetch_price_info_with_paging(
home_id=home_id,
start_cursor=start_cursor,
interval_count=interval_count,
)
return {
"home_id": home_id,
"price_info": price_info,
}
def _calculate_interval_count(self, start_time: datetime, end_time: datetime) -> int:
"""Calculate number of intervals needed based on date range."""
time_diff = end_time - start_time
resolution_change_date = datetime(2025, 10, 1, tzinfo=start_time.tzinfo)
if start_time < resolution_change_date:
# Pre-resolution-change: hourly intervals only
interval_count = int(time_diff.total_seconds() / 3600) # 3600s = 1h
_LOGGER_API_DETAILS.debug(
"Time range is pre-2025-10-01: expecting hourly intervals (count: %d)",
interval_count,
)
else:
# Post-resolution-change: quarter-hourly intervals
interval_count = int(time_diff.total_seconds() / 900) # 900s = 15min
_LOGGER_API_DETAILS.debug(
"Time range is post-2025-10-01: expecting quarter-hourly intervals (count: %d)",
interval_count,
)
return interval_count
async def _fetch_price_info_with_paging(
self,
home_id: str,
start_cursor: str,
interval_count: int,
) -> list[dict[str, Any]]:
"""
Fetch price info with automatic pagination if API limits batch size.
GraphQL Cursor Pagination:
- endCursor points to the last returned element (inclusive)
- Use 'after: endCursor' to get elements AFTER that cursor
- If count < requested, more pages available
Args:
home_id: Home ID to fetch price data for.
start_cursor: Base64-encoded start cursor.
interval_count: Total number of intervals to fetch.
Returns:
List of all price interval dicts across all pages.
"""
price_info = []
remaining_intervals = interval_count
cursor = start_cursor
page = 0
while remaining_intervals > 0:
page += 1
# Fetch one page
page_data = await self._fetch_single_page(
home_id=home_id,
cursor=cursor,
requested_count=remaining_intervals,
page=page,
)
if not page_data:
break
# Extract intervals and pagination info
page_intervals = page_data["intervals"]
returned_count = page_data["count"]
end_cursor = page_data["end_cursor"]
has_next_page = page_data.get("has_next_page", False)
price_info.extend(page_intervals)
_LOGGER_API_DETAILS.debug(
"Page %d: Received %d intervals for home %s (total so far: %d/%d, endCursor=%s, hasNextPage=%s)",
page,
returned_count,
home_id,
len(price_info),
interval_count,
end_cursor,
has_next_page,
)
# Update remaining count
remaining_intervals -= returned_count
# Check if we need more pages
# Continue if: (1) we still need more intervals AND (2) API has more data
if remaining_intervals > 0 and end_cursor:
cursor = end_cursor
_LOGGER_API_DETAILS.debug(
"Still need %d more intervals - fetching next page with cursor %s",
remaining_intervals,
cursor,
)
else:
# Done: Either we have all intervals we need, or API has no more data
if remaining_intervals > 0:
_LOGGER.warning(
"API has no more data - received %d out of %d requested intervals (missing %d)",
len(price_info),
interval_count,
remaining_intervals,
)
else:
_LOGGER.debug(
"Pagination complete - received all %d requested intervals",
interval_count,
)
break
_LOGGER_API_DETAILS.debug(
"Fetched %d total historical intervals for home %s across %d page(s)",
len(price_info),
home_id,
page,
)
return price_info
async def _fetch_single_page(
self,
home_id: str,
cursor: str,
requested_count: int,
page: int,
) -> dict[str, Any] | None:
"""
Fetch a single page of price intervals.
Args:
home_id: Home ID to fetch price data for.
cursor: Base64-encoded cursor for this page.
requested_count: Number of intervals to request.
page: Page number (for logging).
Returns:
Dict with "intervals", "count", and "end_cursor" keys, or None if no data.
"""
query = f"""
{{viewer{{
home(id: "{home_id}") {{
id
currentSubscription {{
priceInfoRange(resolution:QUARTER_HOURLY, first:{requested_count}, after: "{cursor}") {{
pageInfo{{
count
hasNextPage
startCursor
endCursor
}}
edges{{
cursor
node{{
startsAt total level
}}
}}
}}
}}
}}
}}}}
"""
_LOGGER_API_DETAILS.debug(
"Fetching historical price info for home %s (page %d): %d intervals from cursor %s",
home_id,
page,
requested_count,
cursor,
)
data = await self._api_wrapper(
data={"query": query},
query_type=TibberPricesQueryType.PRICE_INFO_RANGE,
)
# Parse response
viewer = data.get("viewer", {})
home = viewer.get("home")
if not home:
_LOGGER.warning("Home %s not found in API response", home_id)
return None
if "currentSubscription" not in home or home["currentSubscription"] is None:
_LOGGER.warning("Home %s has no active subscription - price data will be unavailable", home_id)
return None
# Extract priceInfoRange data
subscription = home["currentSubscription"]
price_info_range = subscription.get("priceInfoRange", {})
page_info = price_info_range.get("pageInfo", {})
edges = price_info_range.get("edges", [])
# Flatten edges to interval list
intervals = [edge["node"] for edge in edges if "node" in edge]
return {
"intervals": intervals,
"count": page_info.get("count", len(intervals)),
"end_cursor": page_info.get("endCursor"),
"has_next_page": page_info.get("hasNextPage", False),
}
def _extract_home_timezones(self, user_data: dict[str, Any]) -> dict[str, str]:
"""
Extract home_id -> timezone mapping from user_data.
Args:
user_data: User data dict from async_get_viewer_details() (required).
Returns:
Dict mapping home_id to timezone string (e.g., "Europe/Oslo").
"""
home_timezones = {}
viewer = user_data.get("viewer", {})
homes = viewer.get("homes", [])
for home in homes:
home_id = home.get("id")
timezone = home.get("timeZone")
if home_id and timezone:
home_timezones[home_id] = timezone
_LOGGER_API_DETAILS.debug("Extracted timezone %s for home %s", timezone, home_id)
elif home_id:
_LOGGER.warning("Home %s has no timezone in user data, will use fallback", home_id)
return home_timezones
def _calculate_day_before_yesterday_midnight(self, home_timezone: str | None) -> datetime:
"""
Calculate day before yesterday midnight in home's timezone.
CRITICAL: Uses REAL TIME (dt_utils.now()), NOT TimeService.now().
This ensures API boundary calculations are based on actual current time,
not simulated time from TimeService.
Args:
home_timezone: Timezone string (e.g., "Europe/Oslo").
If None, falls back to HA system timezone.
Returns:
Timezone-aware datetime for day before yesterday midnight.
"""
# Get current REAL time (not TimeService)
now = dt_utils.now()
# Convert to home's timezone or fallback to HA system timezone
if home_timezone:
try:
tz = ZoneInfo(home_timezone)
now_in_home_tz = now.astimezone(tz)
except (KeyError, ValueError, OSError) as error:
_LOGGER.warning(
"Invalid timezone %s (%s), falling back to HA system timezone",
home_timezone,
error,
)
now_in_home_tz = dt_utils.as_local(now)
else:
# Fallback to HA system timezone
now_in_home_tz = dt_utils.as_local(now)
# Calculate day before yesterday midnight
return (now_in_home_tz - timedelta(days=2)).replace(hour=0, minute=0, second=0, microsecond=0)
def _encode_cursor(self, timestamp: datetime) -> str:
"""
Encode a timestamp as base64 cursor for GraphQL API.
Args:
timestamp: Timezone-aware datetime to encode.
Returns:
Base64-encoded ISO timestamp string.
"""
iso_string = timestamp.isoformat()
return base64.b64encode(iso_string.encode()).decode()
def _parse_timestamp(self, timestamp_str: str) -> datetime:
"""
Parse ISO timestamp string to timezone-aware datetime.
Args:
timestamp_str: ISO format timestamp string.
Returns:
Timezone-aware datetime object.
"""
return dt_utils.parse_datetime(timestamp_str) or dt_utils.now()
def _calculate_cursor_for_home(self, home_timezone: str | None) -> str:
"""
Calculate cursor (day before yesterday midnight) for a home's timezone.
Convenience wrapper around _calculate_day_before_yesterday_midnight()
and _encode_cursor() for backward compatibility with existing code.
Args:
home_timezone: Timezone string (e.g., "Europe/Oslo", "America/New_York").
If None, falls back to HA system timezone.
Returns:
Base64-encoded ISO timestamp string for use as GraphQL cursor.
"""
day_before_yesterday_midnight = self._calculate_day_before_yesterday_midnight(home_timezone)
return self._encode_cursor(day_before_yesterday_midnight)
async def _make_request(
self,
headers: dict[str, str],
data: dict,
query_type: TibberPricesQueryType,
) -> dict[str, Any]:
"""Make an API request with comprehensive error handling for network issues."""
_LOGGER_API_DETAILS.debug("Making API request with data: %s", data)
try:
# More granular timeout configuration for better network failure handling
timeout = aiohttp.ClientTimeout(
total=self._request_timeout, # Total request timeout: 25s
connect=self._connect_timeout, # Connection timeout: 10s
sock_connect=self._socket_connect_timeout, # Socket connection: 5s
)
response = await self._session.request(
method="POST",
url="https://api.tibber.com/v1-beta/gql",
headers=headers,
json=data,
timeout=timeout,
)
verify_response_or_raise(response)
response_json = await response.json()
_LOGGER_API_DETAILS.debug("Received API response: %s", response_json)
await verify_graphql_response(response_json, query_type)
return response_json["data"]
except aiohttp.ClientResponseError as error:
_LOGGER.exception("HTTP error during API request")
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
except aiohttp.ClientConnectorError as error:
_LOGGER.exception("Connection error - server unreachable or network down")
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
except aiohttp.ServerDisconnectedError as error:
_LOGGER.exception("Server disconnected during request")
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
except TimeoutError as error:
_LOGGER.exception(
"Request timeout after %d seconds - slow network or server overload",
self._request_timeout,
)
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.TIMEOUT_ERROR.format(exception=str(error))
) from error
except socket.gaierror as error:
self._handle_dns_error(error)
raise # Ensure type checker knows this path always raises
except OSError as error:
self._handle_network_error(error)
raise # Ensure type checker knows this path always raises
def _handle_dns_error(self, error: socket.gaierror) -> None:
"""Handle DNS resolution errors with IPv4/IPv6 dual stack considerations."""
error_msg = str(error)
if "Name or service not known" in error_msg:
_LOGGER.exception("DNS resolution failed - domain name not found")
elif "Temporary failure in name resolution" in error_msg:
_LOGGER.exception("DNS resolution temporarily failed - network or DNS server issue")
elif "Address family for hostname not supported" in error_msg:
_LOGGER.exception("DNS resolution failed - IPv4/IPv6 address family not supported")
elif "No address associated with hostname" in error_msg:
_LOGGER.exception("DNS resolution failed - no IPv4/IPv6 addresses found")
else:
_LOGGER.exception("DNS resolution failed - check internet connection: %s", error_msg)
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
def _handle_network_error(self, error: OSError) -> None:
"""Handle network-level errors with IPv4/IPv6 dual stack considerations."""
error_msg = str(error)
errno = getattr(error, "errno", None)
# Common IPv4/IPv6 dual stack network error codes
errno_network_unreachable = 101 # ENETUNREACH
errno_host_unreachable = 113 # EHOSTUNREACH
errno_connection_refused = 111 # ECONNREFUSED
errno_connection_timeout = 110 # ETIMEDOUT
if errno == errno_network_unreachable:
_LOGGER.exception("Network unreachable - check internet connection or IPv4/IPv6 routing")
elif errno == errno_host_unreachable:
_LOGGER.exception("Host unreachable - routing issue or IPv4/IPv6 connectivity problem")
elif errno == errno_connection_refused:
_LOGGER.exception("Connection refused - server not accepting connections")
elif errno == errno_connection_timeout:
_LOGGER.exception("Connection timed out - network latency or server overload")
elif "Address family not supported" in error_msg:
_LOGGER.exception("Address family not supported - IPv4/IPv6 configuration issue")
elif "Protocol not available" in error_msg:
_LOGGER.exception("Protocol not available - IPv4/IPv6 stack configuration issue")
elif "Network is down" in error_msg:
_LOGGER.exception("Network interface is down - check network adapter")
elif "Permission denied" in error_msg:
_LOGGER.exception("Network permission denied - firewall or security restriction")
else:
_LOGGER.exception("Network error - internet may be down: %s", error_msg)
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
) from error
async def _handle_request(
self,
headers: dict[str, str],
data: dict,
query_type: TibberPricesQueryType,
) -> Any:
"""Handle a single API request with rate limiting."""
async with self._request_semaphore:
# Rate limiting: ensure minimum interval between requests
if self.time and self._last_request_time:
now = self.time.now()
time_since_last_request = now - self._last_request_time
if time_since_last_request < self._min_request_interval:
sleep_time = (self._min_request_interval - time_since_last_request).total_seconds()
_LOGGER_API_DETAILS.debug(
"Rate limiting: waiting %s seconds before next request",
sleep_time,
)
await asyncio.sleep(sleep_time)
if self.time:
self._last_request_time = self.time.now()
return await self._make_request(
headers,
data or {},
query_type,
)
def _should_retry_error(self, error: Exception, retry: int) -> tuple[bool, int]:
"""Determine if an error should be retried and calculate delay."""
# Check if we've exceeded max retries first
if retry >= self._max_retries:
return False, 0
# Non-retryable errors - authentication and permission issues
if isinstance(
error,
(
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientPermissionError,
),
):
return False, 0
# Handle API-specific errors
if isinstance(error, TibberPricesApiClientError):
return self._handle_api_error_retry(error, retry)
# Network and timeout errors - retryable with exponential backoff
if isinstance(error, (aiohttp.ClientError, socket.gaierror, TimeoutError)):
delay = min(self._retry_delay * (2**retry), 30) # Cap at 30 seconds
return True, delay
# Unknown errors - not retryable
return False, 0
def _handle_api_error_retry(self, error: TibberPricesApiClientError, retry: int) -> tuple[bool, int]:
"""Handle retry logic for API-specific errors."""
error_msg = str(error)
# Non-retryable: Invalid queries, bad requests, empty data
# Empty data means API has no data for the requested range - retrying won't help
if "Invalid GraphQL query" in error_msg or "Bad request" in error_msg or "Empty data received" in error_msg:
return False, 0
# Rate limits - only retry if server explicitly says so
if "Rate limit exceeded" in error_msg or "rate limited" in error_msg.lower():
delay = self._extract_retry_delay(error, retry)
return True, delay
# Other API errors - not retryable (assume permanent issue)
return False, 0
def _extract_retry_delay(self, error: Exception, retry: int) -> int:
"""Extract retry delay from rate limit error or use exponential backoff."""
error_msg = str(error)
# Try to extract Retry-After value from error message
retry_after_match = re.search(r"retry after (\d+) seconds", error_msg.lower())
if retry_after_match:
try:
retry_after = int(retry_after_match.group(1))
return min(retry_after + 1, 300) # Add buffer, max 5 minutes
except ValueError:
pass
# Try to extract generic seconds value
seconds_match = re.search(r"(\d+) seconds", error_msg)
if seconds_match:
try:
seconds = int(seconds_match.group(1))
return min(seconds + 1, 300) # Add buffer, max 5 minutes
except ValueError:
pass
# Fall back to exponential backoff with cap
base_delay = self._retry_delay * (2**retry)
return min(base_delay, 120) # Cap at 2 minutes for rate limits
async def _api_wrapper(
self,
data: dict | None = None,
headers: dict | None = None,
query_type: TibberPricesQueryType = TibberPricesQueryType.USER,
) -> Any:
"""
Get information from the API with rate limiting and retry logic.
Exception Handling Strategy:
- AuthenticationError: Immediate raise, triggers reauth flow
- PermissionError: Immediate raise, non-retryable
- CommunicationError: Retry with exponential backoff
- ApiClientError (Rate Limit): Retry with Retry-After delay
- ApiClientError (Other): Retry only if explicitly retryable
- Network errors (aiohttp.ClientError, socket.gaierror, TimeoutError):
Converted to CommunicationError and retried
Retry Logic:
- Max retries: 5 (configurable via _max_retries)
- Base delay: 2 seconds (exponential backoff: 2s, 4s, 8s, 16s, 32s)
- Rate limit delay: Uses Retry-After header or falls back to exponential
- Caps: 30s for network errors, 120s for rate limits, 300s for Retry-After
"""
headers = headers or prepare_headers(self._access_token, self._version)
last_error: Exception | None = None
for retry in range(self._max_retries + 1):
try:
return await self._handle_request(headers, data or {}, query_type)
except (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientPermissionError,
):
_LOGGER.exception("Non-retryable error occurred")
raise
except (
TibberPricesApiClientError,
aiohttp.ClientError,
socket.gaierror,
TimeoutError,
) as error:
last_error = (
error
if isinstance(error, TibberPricesApiClientError)
else TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=str(error))
)
)
should_retry, delay = self._should_retry_error(error, retry)
if should_retry:
error_type = self._get_error_type(error)
_LOGGER.warning(
"Tibber %s error, attempt %d/%d. Retrying in %d seconds: %s",
error_type,
retry + 1,
self._max_retries,
delay,
str(error),
)
await asyncio.sleep(delay)
continue
if "Invalid GraphQL query" in str(error):
_LOGGER.exception("Invalid query - not retrying")
raise
# Handle final error state
if isinstance(last_error, TimeoutError):
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.TIMEOUT_ERROR.format(exception=last_error)
) from last_error
if isinstance(last_error, (aiohttp.ClientError, socket.gaierror)):
raise TibberPricesApiClientCommunicationError(
TibberPricesApiClientCommunicationError.CONNECTION_ERROR.format(exception=last_error)
) from last_error
raise last_error or TibberPricesApiClientError(TibberPricesApiClientError.UNKNOWN_ERROR)
def _get_error_type(self, error: Exception) -> str:
"""Get a descriptive error type for logging."""
if "Rate limit" in str(error):
return "rate limit"
if isinstance(error, (aiohttp.ClientError, socket.gaierror, TimeoutError)):
return "network"
return "API"

View file

@ -1,34 +0,0 @@
"""Custom exceptions for API client."""
from __future__ import annotations
class TibberPricesApiClientError(Exception):
"""Exception to indicate a general API error."""
UNKNOWN_ERROR = "Unknown GraphQL error"
MALFORMED_ERROR = "Malformed GraphQL error: {error}"
GRAPHQL_ERROR = "GraphQL error: {message}"
EMPTY_DATA_ERROR = "Empty data received for {query_type}"
GENERIC_ERROR = "Something went wrong! {exception}"
RATE_LIMIT_ERROR = "Rate limit exceeded. Please wait {retry_after} seconds before retrying"
INVALID_QUERY_ERROR = "Invalid GraphQL query: {message}"
class TibberPricesApiClientCommunicationError(TibberPricesApiClientError):
"""Exception to indicate a communication error."""
TIMEOUT_ERROR = "Timeout error fetching information - {exception}"
CONNECTION_ERROR = "Error fetching information - {exception}"
class TibberPricesApiClientAuthenticationError(TibberPricesApiClientError):
"""Exception to indicate an authentication error."""
INVALID_CREDENTIALS = "Invalid access token or expired credentials"
class TibberPricesApiClientPermissionError(TibberPricesApiClientError):
"""Exception to indicate insufficient permissions."""
INSUFFICIENT_PERMISSIONS = "Access forbidden - insufficient permissions for this operation"

View file

@ -1,361 +0,0 @@
"""Helper functions for API response processing."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from homeassistant.const import __version__ as ha_version
if TYPE_CHECKING:
import aiohttp
from .queries import TibberPricesQueryType
from .exceptions import (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientError,
TibberPricesApiClientPermissionError,
)
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_FORBIDDEN = 403
HTTP_TOO_MANY_REQUESTS = 429
HTTP_INTERNAL_SERVER_ERROR = 500
HTTP_BAD_GATEWAY = 502
HTTP_SERVICE_UNAVAILABLE = 503
HTTP_GATEWAY_TIMEOUT = 504
def verify_response_or_raise(response: aiohttp.ClientResponse) -> None:
"""
Verify HTTP response and map to appropriate exceptions.
Error Mapping:
- 401 Unauthorized AuthenticationError (non-retryable)
- 403 Forbidden PermissionError (non-retryable)
- 429 Rate Limit ApiClientError with retry support
- 400 Bad Request ApiClientError (non-retryable, invalid query)
- 5xx Server Errors CommunicationError (retryable)
- Other errors Let aiohttp.raise_for_status() handle
"""
# Authentication failures - non-retryable
if response.status == HTTP_UNAUTHORIZED:
_LOGGER.error("Tibber API authentication failed - check access token")
raise TibberPricesApiClientAuthenticationError(TibberPricesApiClientAuthenticationError.INVALID_CREDENTIALS)
# Permission denied - non-retryable
if response.status == HTTP_FORBIDDEN:
_LOGGER.error("Tibber API access forbidden - insufficient permissions")
raise TibberPricesApiClientPermissionError(TibberPricesApiClientPermissionError.INSUFFICIENT_PERMISSIONS)
# Rate limiting - retryable with explicit delay
if response.status == HTTP_TOO_MANY_REQUESTS:
# Check for Retry-After header that Tibber might send
retry_after = response.headers.get("Retry-After", "unknown")
_LOGGER.warning("Tibber API rate limit exceeded - retry after %s seconds", retry_after)
raise TibberPricesApiClientError(TibberPricesApiClientError.RATE_LIMIT_ERROR.format(retry_after=retry_after))
# Bad request - non-retryable (invalid query)
if response.status == HTTP_BAD_REQUEST:
_LOGGER.error("Tibber API rejected request - likely invalid GraphQL query")
raise TibberPricesApiClientError(
TibberPricesApiClientError.INVALID_QUERY_ERROR.format(message="Bad request - likely invalid GraphQL query")
)
# Server errors 5xx - retryable (temporary server issues)
if response.status in (
HTTP_INTERNAL_SERVER_ERROR,
HTTP_BAD_GATEWAY,
HTTP_SERVICE_UNAVAILABLE,
HTTP_GATEWAY_TIMEOUT,
):
_LOGGER.warning(
"Tibber API server error %d - temporary issue, will retry",
response.status,
)
# Let this be caught as aiohttp.ClientResponseError in _api_wrapper
# where it's converted to CommunicationError with retry logic
response.raise_for_status()
# All other HTTP errors - let aiohttp handle
response.raise_for_status()
async def verify_graphql_response(response_json: dict, query_type: TibberPricesQueryType) -> None:
"""
Verify GraphQL response and map error codes to appropriate exceptions.
GraphQL Error Code Mapping:
- UNAUTHENTICATED AuthenticationError (triggers reauth flow)
- FORBIDDEN PermissionError (non-retryable)
- RATE_LIMITED/TOO_MANY_REQUESTS ApiClientError (retryable)
- VALIDATION_ERROR/GRAPHQL_VALIDATION_FAILED ApiClientError (non-retryable)
- Other codes Generic ApiClientError (with code in message)
- Empty data ApiClientError (non-retryable, API has no data)
"""
if "errors" in response_json:
errors = response_json["errors"]
if not errors:
_LOGGER.error("Tibber API returned empty errors array")
raise TibberPricesApiClientError(TibberPricesApiClientError.UNKNOWN_ERROR)
error = errors[0] # Take first error
if not isinstance(error, dict):
_LOGGER.error("Tibber API returned malformed error: %s", error)
raise TibberPricesApiClientError(TibberPricesApiClientError.MALFORMED_ERROR.format(error=error))
message = error.get("message", "Unknown error")
extensions = error.get("extensions", {})
error_code = extensions.get("code")
# Handle specific Tibber API error codes
if error_code == "UNAUTHENTICATED":
_LOGGER.error("Tibber API authentication error: %s", message)
raise TibberPricesApiClientAuthenticationError(TibberPricesApiClientAuthenticationError.INVALID_CREDENTIALS)
if error_code == "FORBIDDEN":
_LOGGER.error("Tibber API permission error: %s", message)
raise TibberPricesApiClientPermissionError(TibberPricesApiClientPermissionError.INSUFFICIENT_PERMISSIONS)
if error_code in ["RATE_LIMITED", "TOO_MANY_REQUESTS"]:
# Some GraphQL APIs return rate limit info in extensions
retry_after = extensions.get("retryAfter", "unknown")
_LOGGER.warning(
"Tibber API rate limited via GraphQL: %s (retry after %s)",
message,
retry_after,
)
raise TibberPricesApiClientError(
TibberPricesApiClientError.RATE_LIMIT_ERROR.format(retry_after=retry_after)
)
if error_code in ["VALIDATION_ERROR", "GRAPHQL_VALIDATION_FAILED"]:
_LOGGER.error("Tibber API validation error: %s", message)
raise TibberPricesApiClientError(TibberPricesApiClientError.INVALID_QUERY_ERROR.format(message=message))
_LOGGER.error("Tibber API GraphQL error (code: %s): %s", error_code or "unknown", message)
raise TibberPricesApiClientError(TibberPricesApiClientError.GRAPHQL_ERROR.format(message=message))
if "data" not in response_json or response_json["data"] is None:
_LOGGER.error("Tibber API response missing data object")
raise TibberPricesApiClientError(
TibberPricesApiClientError.GRAPHQL_ERROR.format(message="Response missing data object")
)
# Empty data check - validate response completeness
# This is NOT a retryable error - API simply has no data for the requested range
if is_data_empty(response_json["data"], query_type.value):
_LOGGER_DETAILS.debug("Empty data detected for query_type: %s - API has no data available", query_type)
raise TibberPricesApiClientError(
TibberPricesApiClientError.EMPTY_DATA_ERROR.format(query_type=query_type.value)
)
def _check_user_data_empty(data: dict) -> bool:
"""Check if user data is empty or incomplete."""
has_user_id = (
"viewer" in data
and isinstance(data["viewer"], dict)
and "userId" in data["viewer"]
and data["viewer"]["userId"] is not None
)
has_homes = (
"viewer" in data
and isinstance(data["viewer"], dict)
and "homes" in data["viewer"]
and isinstance(data["viewer"]["homes"], list)
and len(data["viewer"]["homes"]) > 0
)
is_empty = not has_user_id or not has_homes
_LOGGER_DETAILS.debug(
"Viewer check - has_user_id: %s, has_homes: %s, is_empty: %s",
has_user_id,
has_homes,
is_empty,
)
return is_empty
def _check_price_info_empty(data: dict) -> bool:
"""
Check if price_info data is empty or incomplete.
Note: Missing currentSubscription is VALID (home without active contract).
Only check for structural issues, not missing data that legitimately might not exist.
"""
viewer = data.get("viewer", {})
home_data = viewer.get("home")
if not home_data:
_LOGGER_DETAILS.debug("No home data found in price_info response")
return True
_LOGGER_DETAILS.debug("Checking price_info for single home")
# Missing currentSubscription is VALID - home has no active contract
# This is not an "empty data" error, it's a legitimate state
if "currentSubscription" not in home_data or home_data["currentSubscription"] is None:
_LOGGER_DETAILS.debug("No currentSubscription - home has no active contract (valid state)")
return False # NOT empty - this is expected for homes without subscription
subscription = home_data["currentSubscription"]
# Check priceInfoRange (yesterday data - optional, may not be available)
has_yesterday = (
"priceInfoRange" in subscription
and subscription["priceInfoRange"] is not None
and "edges" in subscription["priceInfoRange"]
and subscription["priceInfoRange"]["edges"]
)
# Check priceInfo for today's data (required if subscription exists)
has_price_info = "priceInfo" in subscription and subscription["priceInfo"] is not None
has_today = (
has_price_info
and "today" in subscription["priceInfo"]
and subscription["priceInfo"]["today"] is not None
and len(subscription["priceInfo"]["today"]) > 0
)
# Only require today's data - yesterday is optional
# If subscription exists but no today data, that's a structural problem
is_empty = not has_today
_LOGGER_DETAILS.debug(
"Price info check - priceInfoRange: %s, today: %s, is_empty: %s",
bool(has_yesterday),
bool(has_today),
is_empty,
)
return is_empty
def _check_price_info_range_empty(data: dict) -> bool:
"""
Check if price_info_range data is empty or incomplete.
For historical range queries, empty edges array is VALID (no data available
for that time range, e.g., too old). Only structural problems are errors.
"""
viewer = data.get("viewer", {})
home_data = viewer.get("home")
if not home_data:
_LOGGER_DETAILS.debug("No home data found in price_info_range response")
return True
subscription = home_data.get("currentSubscription")
if not subscription:
_LOGGER_DETAILS.debug("Missing currentSubscription in home")
return True
# For price_info_range, check if the structure exists
# Empty edges array is VALID (no data for that time range)
price_info_range = subscription.get("priceInfoRange")
if price_info_range is None:
_LOGGER_DETAILS.debug("Missing priceInfoRange in subscription")
return True
if "edges" not in price_info_range:
_LOGGER_DETAILS.debug("Missing edges key in priceInfoRange")
return True
edges = price_info_range["edges"]
if not isinstance(edges, list):
_LOGGER_DETAILS.debug("priceInfoRange edges is not a list")
return True
# Empty edges is VALID for historical queries (data not available)
_LOGGER_DETAILS.debug(
"Price info range check - structure valid, edge_count: %s (empty is OK for old data)",
len(edges),
)
return False # Structure is valid, even if edges is empty
def is_data_empty(data: dict, query_type: str) -> bool:
"""
Check if the response data is empty or incomplete.
For viewer data:
- Must have userId and homes
- If either is missing, data is considered empty
- If homes is empty, data is considered empty
- If userId is None, data is considered empty
For price info:
- Must have range data
- Must have today data
- tomorrow can be empty if we have valid historical and today data
For price info range:
- Must have priceInfoRange with edges
Used by interval pool for historical data fetching
"""
_LOGGER_DETAILS.debug("Checking if data is empty for query_type %s", query_type)
try:
if query_type == "user":
return _check_user_data_empty(data)
if query_type == "price_info":
return _check_price_info_empty(data)
if query_type == "price_info_range":
return _check_price_info_range_empty(data)
# Unknown query type
_LOGGER_DETAILS.debug("Unknown query type %s, treating as non-empty", query_type)
except (KeyError, IndexError, TypeError) as error:
_LOGGER_DETAILS.debug("Error checking data emptiness: %s", error)
return True
else:
return False
def prepare_headers(access_token: str, version: str) -> dict[str, str]:
"""Prepare headers for API request."""
return {
"Authorization": f"Bearer {access_token}",
"Accept": "application/json",
"User-Agent": f"HomeAssistant/{ha_version} tibber_prices/{version}",
}
def flatten_price_info(subscription: dict) -> list[dict]:
"""
Transform and flatten priceInfo from full API data structure.
Returns a flat list of all price intervals ordered as:
[day_before_yesterday_prices, yesterday_prices, today_prices, tomorrow_prices]
priceInfoRange fetches 192 quarter-hourly intervals starting from the day before
yesterday midnight (2 days of historical data), which provides sufficient data
for calculating trailing 24h averages for all intervals including yesterday.
Args:
subscription: The currentSubscription dictionary from API response.
Returns:
A flat list containing all price dictionaries (startsAt, total, level).
"""
# Use 'or {}' to handle None values (API may return None during maintenance)
price_info_range = subscription.get("priceInfoRange") or {}
# Transform priceInfoRange edges data (extract historical quarter-hourly prices)
# This contains 192 intervals (2 days) starting from day before yesterday midnight
historical_prices = []
if "edges" in price_info_range:
edges = price_info_range["edges"]
for edge in edges:
if "node" not in edge:
_LOGGER.debug("Skipping edge without node: %s", edge)
continue
historical_prices.append(edge["node"])
# Return all intervals as a single flattened array
# Use 'or {}' to handle None values (API may return None during maintenance)
price_info = subscription.get("priceInfo") or {}
return historical_prices + (price_info.get("today") or []) + (price_info.get("tomorrow") or [])

View file

@ -1,48 +0,0 @@
"""GraphQL queries and query types for Tibber API."""
from __future__ import annotations
from enum import Enum
class TibberPricesQueryType(Enum):
"""
Types of queries that can be made to the API.
CRITICAL: Query type selection is dictated by Tibber's API design and caching strategy.
PRICE_INFO:
- Used for current day-relative data (day before yesterday/yesterday/today/tomorrow)
- API automatically determines "today" and "tomorrow" based on current time
- MUST be used when querying any data from these 4 days, even if you only need
specific intervals, because Tibber's API requires this endpoint for current data
- Provides the core dataset needed for live data, recent historical context
(important until tomorrow's data arrives), and tomorrow's forecast
- Tibber likely has optimized caching for this frequently-accessed data range
- Boundary: FROM "day before yesterday midnight" (real time) onwards
PRICE_INFO_RANGE:
- Used for historical data older than day before yesterday
- Allows flexible date range queries with cursor-based pagination
- Required for any intervals beyond the 4-day window of PRICE_INFO
- Use this for historical analysis, comparisons, or trend calculations
- Boundary: BEFORE "day before yesterday midnight" (real time)
ROUTING:
- Use async_get_price_info_for_range() wrapper for automatic routing
- Wrapper intelligently splits requests spanning the boundary:
* Fully historical range (end < boundary) PRICE_INFO_RANGE only
* Fully recent range (start >= boundary) PRICE_INFO only
* Spanning range Both queries, merged results
- Boundary calculated using REAL TIME (dt_utils.now()), not TimeService
to ensure predictable API responses
USER:
- Fetches user account data and home metadata
- Separate from price data queries
"""
PRICE_INFO = "price_info"
PRICE_INFO_RANGE = "price_info_range"
USER = "user"

View file

@ -0,0 +1,563 @@
"""Utility functions for calculating price averages."""
from __future__ import annotations
from datetime import datetime, timedelta
from homeassistant.util import dt as dt_util
def calculate_trailing_24h_avg(all_prices: list[dict], interval_start: datetime) -> float:
"""
Calculate trailing 24-hour average price for a given interval.
Args:
all_prices: List of all price data (yesterday, today, tomorrow combined)
interval_start: Start time of the interval to calculate average for
Returns:
Average price for the 24 hours preceding the interval (not including the interval itself)
"""
# Define the 24-hour window: from 24 hours before interval_start up to interval_start
window_start = interval_start - timedelta(hours=24)
window_end = interval_start
# Filter prices within the 24-hour window
prices_in_window = []
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
# Include intervals that start within the window (not including the current interval's end)
if window_start <= starts_at < window_end:
prices_in_window.append(float(price_data["total"]))
# Calculate average
if prices_in_window:
return sum(prices_in_window) / len(prices_in_window)
return 0.0
def calculate_leading_24h_avg(all_prices: list[dict], interval_start: datetime) -> float:
"""
Calculate leading 24-hour average price for a given interval.
Args:
all_prices: List of all price data (yesterday, today, tomorrow combined)
interval_start: Start time of the interval to calculate average for
Returns:
Average price for up to 24 hours following the interval (including the interval itself)
"""
# Define the 24-hour window: from interval_start up to 24 hours after
window_start = interval_start
window_end = interval_start + timedelta(hours=24)
# Filter prices within the 24-hour window
prices_in_window = []
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
# Include intervals that start within the window
if window_start <= starts_at < window_end:
prices_in_window.append(float(price_data["total"]))
# Calculate average
if prices_in_window:
return sum(prices_in_window) / len(prices_in_window)
return 0.0
def calculate_current_trailing_avg(coordinator_data: dict) -> float | None:
"""
Calculate the trailing 24-hour average for the current time.
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Current trailing 24-hour average price, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
return calculate_trailing_24h_avg(all_prices, now)
def calculate_current_leading_avg(coordinator_data: dict) -> float | None:
"""
Calculate the leading 24-hour average for the current time.
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Current leading 24-hour average price, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
return calculate_leading_24h_avg(all_prices, now)
def calculate_trailing_24h_min(all_prices: list[dict], interval_start: datetime) -> float:
"""
Calculate trailing 24-hour minimum price for a given interval.
Args:
all_prices: List of all price data (yesterday, today, tomorrow combined)
interval_start: Start time of the interval to calculate minimum for
Returns:
Minimum price for the 24 hours preceding the interval (not including the interval itself)
"""
# Define the 24-hour window: from 24 hours before interval_start up to interval_start
window_start = interval_start - timedelta(hours=24)
window_end = interval_start
# Filter prices within the 24-hour window
prices_in_window = []
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
# Include intervals that start within the window (not including the current interval's end)
if window_start <= starts_at < window_end:
prices_in_window.append(float(price_data["total"]))
# Calculate minimum
if prices_in_window:
return min(prices_in_window)
return 0.0
def calculate_trailing_24h_max(all_prices: list[dict], interval_start: datetime) -> float:
"""
Calculate trailing 24-hour maximum price for a given interval.
Args:
all_prices: List of all price data (yesterday, today, tomorrow combined)
interval_start: Start time of the interval to calculate maximum for
Returns:
Maximum price for the 24 hours preceding the interval (not including the interval itself)
"""
# Define the 24-hour window: from 24 hours before interval_start up to interval_start
window_start = interval_start - timedelta(hours=24)
window_end = interval_start
# Filter prices within the 24-hour window
prices_in_window = []
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
# Include intervals that start within the window (not including the current interval's end)
if window_start <= starts_at < window_end:
prices_in_window.append(float(price_data["total"]))
# Calculate maximum
if prices_in_window:
return max(prices_in_window)
return 0.0
def calculate_leading_24h_min(all_prices: list[dict], interval_start: datetime) -> float:
"""
Calculate leading 24-hour minimum price for a given interval.
Args:
all_prices: List of all price data (yesterday, today, tomorrow combined)
interval_start: Start time of the interval to calculate minimum for
Returns:
Minimum price for up to 24 hours following the interval (including the interval itself)
"""
# Define the 24-hour window: from interval_start up to 24 hours after
window_start = interval_start
window_end = interval_start + timedelta(hours=24)
# Filter prices within the 24-hour window
prices_in_window = []
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
# Include intervals that start within the window
if window_start <= starts_at < window_end:
prices_in_window.append(float(price_data["total"]))
# Calculate minimum
if prices_in_window:
return min(prices_in_window)
return 0.0
def calculate_leading_24h_max(all_prices: list[dict], interval_start: datetime) -> float:
"""
Calculate leading 24-hour maximum price for a given interval.
Args:
all_prices: List of all price data (yesterday, today, tomorrow combined)
interval_start: Start time of the interval to calculate maximum for
Returns:
Maximum price for up to 24 hours following the interval (including the interval itself)
"""
# Define the 24-hour window: from interval_start up to 24 hours after
window_start = interval_start
window_end = interval_start + timedelta(hours=24)
# Filter prices within the 24-hour window
prices_in_window = []
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
# Include intervals that start within the window
if window_start <= starts_at < window_end:
prices_in_window.append(float(price_data["total"]))
# Calculate maximum
if prices_in_window:
return max(prices_in_window)
return 0.0
def calculate_current_trailing_min(coordinator_data: dict) -> float | None:
"""
Calculate the trailing 24-hour minimum for the current time.
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Current trailing 24-hour minimum price, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
return calculate_trailing_24h_min(all_prices, now)
def calculate_current_trailing_max(coordinator_data: dict) -> float | None:
"""
Calculate the trailing 24-hour maximum for the current time.
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Current trailing 24-hour maximum price, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
return calculate_trailing_24h_max(all_prices, now)
def calculate_current_leading_min(coordinator_data: dict) -> float | None:
"""
Calculate the leading 24-hour minimum for the current time.
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Current leading 24-hour minimum price, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
return calculate_leading_24h_min(all_prices, now)
def calculate_current_leading_max(coordinator_data: dict) -> float | None:
"""
Calculate the leading 24-hour maximum for the current time.
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Current leading 24-hour maximum price, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
return calculate_leading_24h_max(all_prices, now)
def calculate_current_rolling_5interval_avg(coordinator_data: dict) -> float | None:
"""
Calculate rolling 5-interval average (2 previous + current + 2 next intervals).
This provides a smoothed "hour price" that adapts as time moves, rather than
being fixed to clock hours. With 15-minute intervals, this covers a 75-minute
window (37.5 minutes before and after the current interval).
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Average price of the 5 intervals, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
# Find the current interval
current_idx = None
for idx, price_data in enumerate(all_prices):
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
interval_end = starts_at + timedelta(minutes=15)
if starts_at <= now < interval_end:
current_idx = idx
break
if current_idx is None:
return None
# Collect prices from 2 intervals before to 2 intervals after (5 total)
prices_in_window = []
for offset in range(-2, 3): # -2, -1, 0, 1, 2
idx = current_idx + offset
if 0 <= idx < len(all_prices):
price = all_prices[idx].get("total")
if price is not None:
prices_in_window.append(float(price))
# Calculate average
if prices_in_window:
return sum(prices_in_window) / len(prices_in_window)
return None
def calculate_next_hour_rolling_5interval_avg(coordinator_data: dict) -> float | None:
"""
Calculate rolling 5-interval average for the next hour (shifted by 4 intervals).
This provides the same smoothed "hour price" as the current hour sensor, but
looks ahead to the next hour. With 15-minute intervals, this shifts the
5-interval window forward by 60 minutes (4 intervals).
Args:
coordinator_data: The coordinator data containing priceInfo
Returns:
Average price of the 5 intervals one hour ahead, or None if unavailable
"""
if not coordinator_data:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
# Find the current interval
current_idx = None
for idx, price_data in enumerate(all_prices):
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
interval_end = starts_at + timedelta(minutes=15)
if starts_at <= now < interval_end:
current_idx = idx
break
if current_idx is None:
return None
# Shift forward by 4 intervals (1 hour) to get the "next hour" center point
next_hour_idx = current_idx + 4
# Collect prices from 2 intervals before to 2 intervals after the next hour center (5 total)
# This means: current_idx + 2, +3, +4, +5, +6
prices_in_window = []
for offset in range(-2, 3): # -2, -1, 0, 1, 2 relative to next_hour_idx
idx = next_hour_idx + offset
if 0 <= idx < len(all_prices):
price = all_prices[idx].get("total")
if price is not None:
prices_in_window.append(float(price))
# Calculate average
if prices_in_window:
return sum(prices_in_window) / len(prices_in_window)
return None
def calculate_next_n_hours_avg(coordinator_data: dict, hours: int) -> float | None:
"""
Calculate average price for the next N hours starting from the next interval.
This function computes the average of all 15-minute intervals starting from
the next interval (not current) up to N hours into the future.
Args:
coordinator_data: The coordinator data containing priceInfo
hours: Number of hours to look ahead (1, 2, 3, 4, 5, 6, 8, 12, etc.)
Returns:
Average price for the next N hours, or None if insufficient data
"""
if not coordinator_data or hours <= 0:
return None
price_info = coordinator_data.get("priceInfo", {})
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return None
now = dt_util.now()
# Find the current interval index
current_idx = None
for idx, price_data in enumerate(all_prices):
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
interval_end = starts_at + timedelta(minutes=15)
if starts_at <= now < interval_end:
current_idx = idx
break
if current_idx is None:
return None
# Calculate how many 15-minute intervals are in N hours
intervals_needed = hours * 4 # 4 intervals per hour
# Collect prices starting from NEXT interval (current_idx + 1)
prices_in_window = []
for offset in range(1, intervals_needed + 1):
idx = current_idx + offset
if idx < len(all_prices):
price = all_prices[idx].get("total")
if price is not None:
prices_in_window.append(float(price))
else:
# Not enough future data available
break
# Only return average if we have data for the full requested period
if len(prices_in_window) >= intervals_needed:
return sum(prices_in_window) / len(prices_in_window)
# If we don't have enough data for full period, return what we have
# (allows graceful degradation when tomorrow's data isn't available yet)
if prices_in_window:
return sum(prices_in_window) / len(prices_in_window)
return None

File diff suppressed because it is too large Load diff

View file

@ -1,41 +0,0 @@
"""
Binary sensor platform for Tibber Prices integration.
Provides binary (on/off) sensors for price-based automation:
- Best price period detection (cheapest intervals)
- Peak price period detection (most expensive intervals)
- Price threshold indicators (below/above configured limits)
- Tomorrow data availability status
These sensors enable simple automations like "run dishwasher during
cheap periods" without complex template logic.
See definitions.py for complete binary sensor catalog.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from .core import TibberPricesBinarySensor
from .definitions import ENTITY_DESCRIPTIONS
if TYPE_CHECKING:
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
async def async_setup_entry(
_hass: HomeAssistant,
entry: TibberPricesConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Tibber Prices binary sensor based on a config entry."""
async_add_entities(
TibberPricesBinarySensor(
coordinator=entry.runtime_data.coordinator,
entity_description=entity_description,
)
for entity_description in ENTITY_DESCRIPTIONS
)

View file

@ -1,511 +0,0 @@
"""Attribute builders for binary sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import get_display_unit_factor
from custom_components.tibber_prices.coordinator.helpers import get_intervals_for_day_offsets
from custom_components.tibber_prices.entity_utils import add_icon_color_attribute
# Constants for price display conversion
_SUBUNIT_FACTOR = 100 # Conversion factor for subunit currency (ct/øre)
_SUBUNIT_PRECISION = 2 # Decimal places for subunit currency
_BASE_PRECISION = 4 # Decimal places for base currency
# Import TypedDict definitions for documentation (not used in signatures)
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
if TYPE_CHECKING:
from datetime import datetime
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.core import HomeAssistant
def get_tomorrow_data_available_attributes(
coordinator_data: dict,
*,
time: TibberPricesTimeService,
) -> dict | None:
"""
Build attributes for tomorrow_data_available sensor.
Returns TomorrowDataAvailableAttributes structure.
Args:
coordinator_data: Coordinator data dict
time: TibberPricesTimeService instance
Returns:
Attributes dict with intervals_available and data_status
"""
if not coordinator_data:
return None
# Use helper to get tomorrow's intervals
tomorrow_prices = get_intervals_for_day_offsets(coordinator_data, [1])
tomorrow_date = time.get_local_date(offset_days=1)
interval_count = len(tomorrow_prices)
# Get expected intervals for tomorrow (handles DST)
expected_intervals = time.get_expected_intervals_for_day(tomorrow_date)
if interval_count == 0:
status = "none"
elif interval_count == expected_intervals:
status = "full"
else:
status = "partial"
return {
"intervals_available": interval_count,
"data_status": status,
}
def get_price_intervals_attributes(
coordinator_data: dict,
*,
time: TibberPricesTimeService,
reverse_sort: bool,
config_entry: TibberPricesConfigEntry,
) -> dict | None:
"""
Build attributes for period-based sensors (best/peak price).
Returns PeriodAttributes structure.
All data is already calculated in the coordinator - we just need to:
1. Get period summaries from coordinator (already filtered and fully calculated)
2. Add the current timestamp
3. Find current or next period based on time
4. Convert prices to display units based on user configuration
Args:
coordinator_data: Coordinator data dict
time: TibberPricesTimeService instance (required)
reverse_sort: True for peak_price (highest first), False for best_price (lowest first)
config_entry: Config entry for display unit configuration
Returns:
Attributes dict with current/next period and all periods list
"""
if not coordinator_data:
return build_no_periods_result(time=time)
# Get precomputed period summaries from coordinator
periods_data = coordinator_data.get("pricePeriods", {})
period_type = "peak_price" if reverse_sort else "best_price"
period_data = periods_data.get(period_type)
if not period_data:
return build_no_periods_result(time=time)
period_summaries = period_data.get("periods", [])
if not period_summaries:
return build_no_periods_result(time=time)
# Filter periods for today+tomorrow (sensors don't show yesterday's periods)
# Coordinator cache contains yesterday/today/tomorrow, but sensors only need today+tomorrow
now = time.now()
today_start = time.start_of_local_day(now)
filtered_periods = [period for period in period_summaries if period.get("end") and period["end"] >= today_start]
if not filtered_periods:
return build_no_periods_result(time=time)
# Find current or next period based on current time
current_period = None
# First pass: find currently active period
for period in filtered_periods:
start = period.get("start")
end = period.get("end")
if start and end and time.is_current_interval(start, end):
current_period = period
break
# Second pass: find next future period if none is active
if not current_period:
for period in filtered_periods:
start = period.get("start")
if start and time.is_in_future(start):
current_period = period
break
# Build final attributes (use filtered_periods for display)
return build_final_attributes_simple(current_period, filtered_periods, time=time, config_entry=config_entry)
def build_no_periods_result(*, time: TibberPricesTimeService) -> dict:
"""
Build result when no periods exist (not filtered, just none available).
Returns:
A dict with empty periods and timestamp.
"""
# Calculate timestamp: current time rounded down to last quarter hour
now = time.now()
current_minute = (now.minute // 15) * 15
timestamp = now.replace(minute=current_minute, second=0, microsecond=0)
return {
"timestamp": timestamp,
"start": None,
"end": None,
"periods": [],
}
def add_time_attributes(attributes: dict, current_period: dict, timestamp: datetime) -> None:
"""Add time-related attributes (priority 1)."""
attributes["timestamp"] = timestamp
if "start" in current_period:
attributes["start"] = current_period["start"]
if "end" in current_period:
attributes["end"] = current_period["end"]
if "duration_minutes" in current_period:
attributes["duration_minutes"] = current_period["duration_minutes"]
def add_decision_attributes(attributes: dict, current_period: dict) -> None:
"""Add core decision attributes (priority 2)."""
if "level" in current_period:
attributes["level"] = current_period["level"]
if "rating_level" in current_period:
attributes["rating_level"] = current_period["rating_level"]
if "rating_difference_%" in current_period:
attributes["rating_difference_%"] = current_period["rating_difference_%"]
def add_price_attributes(attributes: dict, current_period: dict, factor: int) -> None:
"""
Add price statistics attributes (priority 3).
Args:
attributes: Target dict to add attributes to
current_period: Period dict with price data (in base currency)
factor: Display unit conversion factor (100 for subunit, 1 for base)
"""
# Convert prices from base currency to display units
precision = _SUBUNIT_PRECISION if factor == _SUBUNIT_FACTOR else _BASE_PRECISION
if "price_mean" in current_period:
attributes["price_mean"] = round(current_period["price_mean"] * factor, precision)
if "price_median" in current_period:
attributes["price_median"] = round(current_period["price_median"] * factor, precision)
if "price_min" in current_period:
attributes["price_min"] = round(current_period["price_min"] * factor, precision)
if "price_max" in current_period:
attributes["price_max"] = round(current_period["price_max"] * factor, precision)
if "price_spread" in current_period:
attributes["price_spread"] = round(current_period["price_spread"] * factor, precision)
if "price_coefficient_variation_%" in current_period:
attributes["price_coefficient_variation_%"] = current_period["price_coefficient_variation_%"]
if "volatility" in current_period:
attributes["volatility"] = current_period["volatility"] # Volatility is not a price, keep as-is
def add_comparison_attributes(attributes: dict, current_period: dict, factor: int) -> None:
"""
Add price comparison attributes (priority 4).
Args:
attributes: Target dict to add attributes to
current_period: Period dict with price diff data (in base currency)
factor: Display unit conversion factor (100 for subunit, 1 for base)
"""
# Convert price differences from base currency to display units
precision = _SUBUNIT_PRECISION if factor == _SUBUNIT_FACTOR else _BASE_PRECISION
if "period_price_diff_from_daily_min" in current_period:
attributes["period_price_diff_from_daily_min"] = round(
current_period["period_price_diff_from_daily_min"] * factor, precision
)
if "period_price_diff_from_daily_min_%" in current_period:
attributes["period_price_diff_from_daily_min_%"] = current_period["period_price_diff_from_daily_min_%"]
if "period_price_diff_from_daily_max" in current_period:
attributes["period_price_diff_from_daily_max"] = round(
current_period["period_price_diff_from_daily_max"] * factor, precision
)
if "period_price_diff_from_daily_max_%" in current_period:
attributes["period_price_diff_from_daily_max_%"] = current_period["period_price_diff_from_daily_max_%"]
def add_detail_attributes(attributes: dict, current_period: dict) -> None:
"""Add detail information attributes (priority 5)."""
if "period_interval_count" in current_period:
attributes["period_interval_count"] = current_period["period_interval_count"]
if "period_position" in current_period:
attributes["period_position"] = current_period["period_position"]
if "periods_total" in current_period:
attributes["periods_total"] = current_period["periods_total"]
if "periods_remaining" in current_period:
attributes["periods_remaining"] = current_period["periods_remaining"]
def add_relaxation_attributes(attributes: dict, current_period: dict) -> None:
"""
Add relaxation information attributes (priority 6).
Only adds relaxation attributes if the period was actually relaxed.
If relaxation_active is False or missing, no attributes are added.
"""
if current_period.get("relaxation_active"):
attributes["relaxation_active"] = True
if "relaxation_level" in current_period:
attributes["relaxation_level"] = current_period["relaxation_level"]
if "relaxation_threshold_original_%" in current_period:
attributes["relaxation_threshold_original_%"] = current_period["relaxation_threshold_original_%"]
if "relaxation_threshold_applied_%" in current_period:
attributes["relaxation_threshold_applied_%"] = current_period["relaxation_threshold_applied_%"]
def _convert_periods_to_display_units(period_summaries: list[dict], factor: int) -> list[dict]:
"""
Convert price values in periods array to display units.
Args:
period_summaries: List of period dicts with price data (in base currency)
factor: Display unit conversion factor (100 for subunit, 1 for base)
Returns:
New list with converted period dicts
"""
precision = _SUBUNIT_PRECISION if factor == _SUBUNIT_FACTOR else _BASE_PRECISION
converted_periods = []
for period in period_summaries:
converted = period.copy()
# Convert all price fields
price_fields = ["price_mean", "price_median", "price_min", "price_max", "price_spread"]
for field in price_fields:
if field in converted:
converted[field] = round(converted[field] * factor, precision)
# Convert price differences (not percentages)
if "period_price_diff_from_daily_min" in converted:
converted["period_price_diff_from_daily_min"] = round(
converted["period_price_diff_from_daily_min"] * factor, precision
)
if "period_price_diff_from_daily_max" in converted:
converted["period_price_diff_from_daily_max"] = round(
converted["period_price_diff_from_daily_max"] * factor, precision
)
converted_periods.append(converted)
return converted_periods
def build_final_attributes_simple(
current_period: dict | None,
period_summaries: list[dict],
*,
time: TibberPricesTimeService,
config_entry: TibberPricesConfigEntry,
) -> dict:
"""
Build the final attributes dictionary from coordinator's period summaries.
All calculations are done in the coordinator - this just:
1. Adds the current timestamp (only thing calculated every 15min)
2. Uses the current/next period from summaries
3. Adds nested period summaries
4. Converts prices to display units based on user configuration
Attributes are ordered following the documented priority:
1. Time information (timestamp, start, end, duration)
2. Core decision attributes (level, rating_level, rating_difference_%)
3. Price statistics (price_mean, price_median, price_min, price_max, price_spread, volatility)
4. Price differences (period_price_diff_from_daily_min, period_price_diff_from_daily_min_%)
5. Detail information (period_interval_count, period_position, periods_total, periods_remaining)
6. Relaxation information (relaxation_active, relaxation_level, relaxation_threshold_original_%,
relaxation_threshold_applied_%) - only if period was relaxed
7. Meta information (periods list)
Args:
current_period: The current or next period (already complete from coordinator)
period_summaries: All period summaries from coordinator
time: TibberPricesTimeService instance (required)
config_entry: Config entry for display unit configuration
Returns:
Complete attributes dict with all fields
"""
now = time.now()
current_minute = (now.minute // 15) * 15
timestamp = now.replace(minute=current_minute, second=0, microsecond=0)
# Get display unit factor (100 for subunit, 1 for base currency)
factor = get_display_unit_factor(config_entry)
if current_period:
# Build attributes in priority order using helper methods
attributes = {}
# 1. Time information
add_time_attributes(attributes, current_period, timestamp)
# 2. Core decision attributes
add_decision_attributes(attributes, current_period)
# 3. Price statistics (converted to display units)
add_price_attributes(attributes, current_period, factor)
# 4. Price differences (converted to display units)
add_comparison_attributes(attributes, current_period, factor)
# 5. Detail information
add_detail_attributes(attributes, current_period)
# 6. Relaxation information (only if period was relaxed)
add_relaxation_attributes(attributes, current_period)
# 7. Meta information (periods array - prices converted to display units)
attributes["periods"] = _convert_periods_to_display_units(period_summaries, factor)
return attributes
# No current/next period found - return all periods with timestamp (prices converted)
return {
"timestamp": timestamp,
"periods": _convert_periods_to_display_units(period_summaries, factor),
}
async def build_async_extra_state_attributes( # noqa: PLR0913
entity_key: str,
translation_key: str | None,
hass: HomeAssistant,
*,
time: TibberPricesTimeService,
config_entry: TibberPricesConfigEntry,
sensor_attrs: dict | None = None,
is_on: bool | None = None,
) -> dict | None:
"""
Build async extra state attributes for binary sensors.
Adds icon_color and translated descriptions.
Args:
entity_key: Entity key (e.g., "best_price_period")
translation_key: Translation key for entity
hass: Home Assistant instance
time: TibberPricesTimeService instance (required)
config_entry: Config entry with options (keyword-only)
sensor_attrs: Sensor-specific attributes (keyword-only)
is_on: Binary sensor state (keyword-only)
Returns:
Complete attributes dict with descriptions (synchronous)
"""
# Calculate default timestamp: current time rounded to nearest quarter hour
# This ensures all binary sensors have a consistent reference time for when calculations were made
# Individual sensors can override this via sensor_attrs if needed
now = time.now()
default_timestamp = time.round_to_nearest_quarter(now)
attributes = {
"timestamp": default_timestamp,
}
# Add sensor-specific attributes (may override timestamp)
if sensor_attrs:
# Copy and remove internal fields before exposing to user
clean_attrs = {k: v for k, v in sensor_attrs.items() if not k.startswith("_")}
# Merge sensor attributes (can override default timestamp)
attributes.update(clean_attrs)
# Add icon_color for best/peak price period sensors using shared utility
add_icon_color_attribute(attributes, entity_key, is_on=is_on)
# Add description attributes (always last, via central utility)
from ..entity_utils import async_add_description_attributes # noqa: PLC0415, TID252
await async_add_description_attributes(
attributes,
"binary_sensor",
translation_key,
hass,
config_entry,
position="end",
)
return attributes if attributes else None
def build_sync_extra_state_attributes( # noqa: PLR0913
entity_key: str,
translation_key: str | None,
hass: HomeAssistant,
*,
time: TibberPricesTimeService,
config_entry: TibberPricesConfigEntry,
sensor_attrs: dict | None = None,
is_on: bool | None = None,
) -> dict | None:
"""
Build synchronous extra state attributes for binary sensors.
Adds icon_color and cached translated descriptions.
Args:
entity_key: Entity key (e.g., "best_price_period")
translation_key: Translation key for entity
hass: Home Assistant instance
time: TibberPricesTimeService instance (required)
config_entry: Config entry with options (keyword-only)
sensor_attrs: Sensor-specific attributes (keyword-only)
is_on: Binary sensor state (keyword-only)
Returns:
Complete attributes dict with cached descriptions
"""
# Calculate default timestamp: current time rounded to nearest quarter hour
# This ensures all binary sensors have a consistent reference time for when calculations were made
# Individual sensors can override this via sensor_attrs if needed
now = time.now()
default_timestamp = time.round_to_nearest_quarter(now)
attributes = {
"timestamp": default_timestamp,
}
# Add sensor-specific attributes (may override timestamp)
if sensor_attrs:
# Copy and remove internal fields before exposing to user
clean_attrs = {k: v for k, v in sensor_attrs.items() if not k.startswith("_")}
# Merge sensor attributes (can override default timestamp)
attributes.update(clean_attrs)
# Add icon_color for best/peak price period sensors using shared utility
add_icon_color_attribute(attributes, entity_key, is_on=is_on)
# Add description attributes (always last, via central utility)
from ..entity_utils import add_description_attributes # noqa: PLC0415, TID252
add_description_attributes(
attributes,
"binary_sensor",
translation_key,
hass,
config_entry,
position="end",
)
return attributes if attributes else None

View file

@ -1,438 +0,0 @@
"""Binary sensor core class for tibber_prices."""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.coordinator import TIME_SENSITIVE_ENTITY_KEYS
from custom_components.tibber_prices.coordinator.core import get_connection_state
from custom_components.tibber_prices.coordinator.helpers import get_intervals_for_day_offsets
from custom_components.tibber_prices.entity import TibberPricesEntity
from custom_components.tibber_prices.entity_utils import get_binary_sensor_icon
from homeassistant.components.binary_sensor import (
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.restore_state import RestoreEntity
from .attributes import (
build_async_extra_state_attributes,
build_sync_extra_state_attributes,
get_price_intervals_attributes,
get_tomorrow_data_available_attributes,
)
if TYPE_CHECKING:
from collections.abc import Callable
from custom_components.tibber_prices.coordinator import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
class TibberPricesBinarySensor(TibberPricesEntity, BinarySensorEntity, RestoreEntity):
"""tibber_prices binary_sensor class with state restoration."""
# Attributes excluded from recorder history
# See: https://developers.home-assistant.io/docs/core/entity/#excluding-state-attributes-from-recorder-history
_unrecorded_attributes = frozenset(
{
"timestamp",
# Descriptions/Help Text (static, large)
"description",
"usage_tips",
# Large Nested Structures
"periods", # Array of all period summaries
# Frequently Changing Diagnostics
"icon_color",
"data_status",
# Static/Rarely Changing
"level_value",
"rating_value",
"level_id",
"rating_id",
# Relaxation Details
"relaxation_level",
"relaxation_threshold_original_%",
"relaxation_threshold_applied_%",
# Redundant/Derived
"price_spread",
"volatility",
"rating_difference_%",
"period_price_diff_from_daily_min",
"period_price_diff_from_daily_min_%",
"periods_total",
"periods_remaining",
}
)
def __init__(
self,
coordinator: TibberPricesDataUpdateCoordinator,
entity_description: BinarySensorEntityDescription,
) -> None:
"""Initialize the binary_sensor class."""
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{entity_description.key}"
self._state_getter: Callable | None = self._get_value_getter()
self._time_sensitive_remove_listener: Callable | None = None
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
# Restore last state if available
if (last_state := await self.async_get_last_state()) is not None and last_state.state in ("on", "off"):
# Restore binary state (on/off) - will be used until first coordinator update
self._attr_is_on = last_state.state == "on"
# Register with coordinator for time-sensitive updates if applicable
if self.entity_description.key in TIME_SENSITIVE_ENTITY_KEYS:
self._time_sensitive_remove_listener = self.coordinator.async_add_time_sensitive_listener(
self._handle_time_sensitive_update
)
async def async_will_remove_from_hass(self) -> None:
"""When entity will be removed from hass."""
await super().async_will_remove_from_hass()
# Remove time-sensitive listener if registered
if self._time_sensitive_remove_listener:
self._time_sensitive_remove_listener()
self._time_sensitive_remove_listener = None
@callback
def _handle_time_sensitive_update(self, time_service: TibberPricesTimeService) -> None:
"""
Handle time-sensitive update from coordinator.
Args:
time_service: TibberPricesTimeService instance with reference time for this update cycle
"""
# Store TimeService from Timer #2 for calculations during this update cycle
self.coordinator.time = time_service
self.async_write_ha_state()
def _get_value_getter(self) -> Callable | None:
"""Return the appropriate value getter method based on the sensor type."""
key = self.entity_description.key
state_getters = {
"peak_price_period": self._peak_price_state,
"best_price_period": self._best_price_state,
"connection": lambda: get_connection_state(self.coordinator),
"tomorrow_data_available": self._tomorrow_data_available_state,
"has_ventilation_system": self._has_ventilation_system_state,
"realtime_consumption_enabled": self._realtime_consumption_enabled_state,
}
return state_getters.get(key)
def _best_price_state(self) -> bool | None:
"""Return True if the current time is within a best price period."""
if not self.coordinator.data:
return None
attrs = get_price_intervals_attributes(
self.coordinator.data,
reverse_sort=False,
time=self.coordinator.time,
config_entry=self.coordinator.config_entry,
)
if not attrs:
return False # Should not happen, but safety fallback
start = attrs.get("start")
end = attrs.get("end")
if not start or not end:
return False # No period found = sensor is off
time = self.coordinator.time
return time.is_time_in_period(start, end)
def _peak_price_state(self) -> bool | None:
"""Return True if the current time is within a peak price period."""
if not self.coordinator.data:
return None
attrs = get_price_intervals_attributes(
self.coordinator.data,
reverse_sort=True,
time=self.coordinator.time,
config_entry=self.coordinator.config_entry,
)
if not attrs:
return False # Should not happen, but safety fallback
start = attrs.get("start")
end = attrs.get("end")
if not start or not end:
return False # No period found = sensor is off
time = self.coordinator.time
return time.is_time_in_period(start, end)
def _tomorrow_data_available_state(self) -> bool | None:
"""Return True if tomorrow's data is fully available, False if not, None if unknown."""
# Auth errors: Cannot reliably check - return unknown
# User must fix auth via reauth flow before we can determine tomorrow data availability
if isinstance(self.coordinator.last_exception, ConfigEntryAuthFailed):
return None
# No data: unknown state (initializing or error)
if not self.coordinator.data:
return None
# Check tomorrow data availability (normal operation)
tomorrow_prices = get_intervals_for_day_offsets(self.coordinator.data, [1])
tomorrow_date = self.coordinator.time.get_local_date(offset_days=1)
interval_count = len(tomorrow_prices)
# Get expected intervals for tomorrow (handles DST)
expected_intervals = self.coordinator.time.get_expected_intervals_for_day(tomorrow_date)
if interval_count == expected_intervals:
return True
if interval_count == 0:
return False
return False
@property
def available(self) -> bool:
"""
Return if entity is available.
Override base implementation for connection sensor which should
always be available to show connection state.
"""
# Connection sensor is always available (shows connection state)
if self.entity_description.key == "connection":
return True
# All other binary sensors use base availability logic
return super().available
@property
def force_update(self) -> bool:
"""
Force update for connection sensor to record all state changes.
Connection sensor should write every state change to history,
even if the state (on/off) is the same, to track connectivity issues.
"""
return self.entity_description.key == "connection"
def _has_ventilation_system_state(self) -> bool | None:
"""Return True if the home has a ventilation system."""
if not self.coordinator.data:
return None
user_homes = self.coordinator.get_user_homes()
if not user_homes:
return None
home_id = self.coordinator.config_entry.data.get("home_id")
if not home_id:
return None
home_data = next((home for home in user_homes if home.get("id") == home_id), None)
if not home_data:
return None
value = home_data.get("hasVentilationSystem")
return value if isinstance(value, bool) else None
def _realtime_consumption_enabled_state(self) -> bool | None:
"""Return True if realtime consumption is enabled."""
if not self.coordinator.data:
return None
user_homes = self.coordinator.get_user_homes()
if not user_homes:
return None
home_id = self.coordinator.config_entry.data.get("home_id")
if not home_id:
return None
home_data = next((home for home in user_homes if home.get("id") == home_id), None)
if not home_data:
return None
features = home_data.get("features")
if not features:
return None
value = features.get("realTimeConsumptionEnabled")
return value if isinstance(value, bool) else None
def _get_tomorrow_data_available_attributes(self) -> dict | None:
"""Return attributes for tomorrow_data_available binary sensor."""
return get_tomorrow_data_available_attributes(self.coordinator.data, time=self.coordinator.time)
def _get_sensor_attributes(self) -> dict | None:
"""
Get sensor-specific attributes.
Returns a dictionary of sensor-specific attributes, or None if no
attributes are needed.
"""
key = self.entity_description.key
if key == "peak_price_period":
return get_price_intervals_attributes(
self.coordinator.data,
reverse_sort=True,
time=self.coordinator.time,
config_entry=self.coordinator.config_entry,
)
if key == "best_price_period":
return get_price_intervals_attributes(
self.coordinator.data,
reverse_sort=False,
time=self.coordinator.time,
config_entry=self.coordinator.config_entry,
)
if key == "tomorrow_data_available":
return self._get_tomorrow_data_available_attributes()
return None
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
# All binary sensors get push updates when coordinator has new data:
# - tomorrow_data_available: Reflects new data availability immediately after API fetch
# - connection: Reflects connection state changes immediately
# - chart_data_export: Updates chart data when price data changes
# - peak_price_period, best_price_period: Update when periods change (also get Timer #2 updates)
# - data_lifecycle_status: Gets both push and Timer #2 updates
self.async_write_ha_state()
@property
def is_on(self) -> bool | None:
"""Return true if the binary_sensor is on."""
try:
if not self.coordinator.data or not self._state_getter:
return None
return self._state_getter()
except (KeyError, ValueError, TypeError) as ex:
self.coordinator.logger.exception(
"Error getting binary sensor state",
extra={
"error": str(ex),
"entity": self.entity_description.key,
},
)
return None
@property
def icon(self) -> str | None:
"""Return the icon based on binary sensor state."""
key = self.entity_description.key
# Use shared icon utility
icon = get_binary_sensor_icon(
key,
is_on=self.is_on,
has_future_periods_callback=self._has_future_periods,
)
# Fall back to static icon from entity description
return icon or self.entity_description.icon
def _has_future_periods(self) -> bool:
"""
Check if there are any future periods.
Returns True if any period starts in the future (no time limit).
This ensures icons show "waiting" state whenever periods are scheduled.
"""
attrs = self._get_sensor_attributes()
if not attrs or "periods" not in attrs:
return False
time = self.coordinator.time
periods = attrs.get("periods", [])
# Check if any period starts in the future (no time limit)
for period in periods:
start_str = period.get("start")
if start_str:
# Already datetime object (periods come from coordinator.data)
start_time = start_str if not isinstance(start_str, str) else time.parse_datetime(start_str)
# Period starts in the future
if start_time and time.is_in_future(start_time):
return True
return False
@property
async def async_extra_state_attributes(self) -> dict | None:
"""Return additional state attributes asynchronously."""
try:
# Get the sensor-specific attributes
if not self.coordinator.data:
return None
sensor_attrs = self._get_sensor_attributes()
# Use extracted function to build all attributes
return await build_async_extra_state_attributes(
self.entity_description.key,
self.entity_description.translation_key,
self.hass,
config_entry=self.coordinator.config_entry,
sensor_attrs=sensor_attrs,
is_on=self.is_on,
time=self.coordinator.time,
)
except (KeyError, ValueError, TypeError) as ex:
self.coordinator.logger.exception(
"Error getting binary sensor attributes",
extra={
"error": str(ex),
"entity": self.entity_description.key,
},
)
return None
@property
def extra_state_attributes(self) -> dict | None:
"""Return additional state attributes synchronously."""
try:
# Get the sensor-specific attributes
if not self.coordinator.data:
return None
sensor_attrs = self._get_sensor_attributes()
# Use extracted function to build all attributes
return build_sync_extra_state_attributes(
self.entity_description.key,
self.entity_description.translation_key,
self.hass,
config_entry=self.coordinator.config_entry,
sensor_attrs=sensor_attrs,
is_on=self.is_on,
time=self.coordinator.time,
)
except (KeyError, ValueError, TypeError) as ex:
self.coordinator.logger.exception(
"Error getting binary sensor attributes",
extra={
"error": str(ex),
"entity": self.entity_description.key,
},
)
return None
async def async_update(self) -> None:
"""Force a refresh when homeassistant.update_entity is called."""
# Always refresh coordinator data
await self.coordinator.async_request_refresh()

View file

@ -1,61 +0,0 @@
"""Binary sensor entity descriptions for tibber_prices."""
from __future__ import annotations
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
# Period lookahead removed - icons show "waiting" state if ANY future periods exist
# No artificial time limit - show all periods until midnight
ENTITY_DESCRIPTIONS = (
BinarySensorEntityDescription(
key="peak_price_period",
translation_key="peak_price_period",
name="Peak Price Interval",
icon="mdi:clock-alert",
),
BinarySensorEntityDescription(
key="best_price_period",
translation_key="best_price_period",
name="Best Price Interval",
icon="mdi:clock-check",
),
BinarySensorEntityDescription(
key="connection",
translation_key="connection",
name="Tibber API Connection",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
),
BinarySensorEntityDescription(
key="tomorrow_data_available",
translation_key="tomorrow_data_available",
name="Tomorrow's Data Available",
icon="mdi:calendar-check",
device_class=None, # No specific device_class = shows generic "On/Off"
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=True, # Critical for automations
),
BinarySensorEntityDescription(
key="has_ventilation_system",
translation_key="has_ventilation_system",
name="Has Ventilation System",
icon="mdi:air-filter",
device_class=None, # No specific device_class = shows generic "On/Off"
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
BinarySensorEntityDescription(
key="realtime_consumption_enabled",
translation_key="realtime_consumption_enabled",
name="Realtime Consumption Enabled",
icon="mdi:speedometer",
device_class=None, # No specific device_class = shows generic "On/Off"
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
)

View file

@ -1,173 +0,0 @@
"""
Type definitions for Tibber Prices binary sensor attributes.
These TypedDict definitions serve as **documentation** of the attribute structure
for each binary sensor type. They enable IDE autocomplete and type checking when
working with attribute dictionaries.
NOTE: In function signatures, we still use dict[str, Any] for flexibility,
but these TypedDict definitions document what keys and types are expected.
IMPORTANT: PriceLevel and PriceRating types are duplicated here to avoid
cross-platform dependencies. Keep in sync with sensor/types.py.
"""
from __future__ import annotations
from typing import Literal, TypedDict
# ============================================================================
# Literal Type Definitions (Duplicated from sensor/types.py)
# ============================================================================
# SYNC: Keep these in sync with:
# 1. sensor/types.py (Literal type definitions)
# 2. const.py (runtime string constants - single source of truth)
#
# const.py defines:
# - PRICE_LEVEL_VERY_CHEAP, PRICE_LEVEL_CHEAP, etc.
# - PRICE_RATING_LOW, PRICE_RATING_NORMAL, etc.
#
# These types are intentionally duplicated here to avoid cross-platform imports.
# Binary sensor attributes need these types for type safety without importing
# from sensor/ package (maintains platform separation).
# Price level literals (shared with sensor platform - keep in sync!)
PriceLevel = Literal[
"VERY_CHEAP",
"CHEAP",
"NORMAL",
"EXPENSIVE",
"VERY_EXPENSIVE",
]
# Price rating literals (shared with sensor platform - keep in sync!)
PriceRating = Literal[
"LOW",
"NORMAL",
"HIGH",
]
class BaseAttributes(TypedDict, total=False):
"""
Base attributes common to all binary sensors.
All binary sensor attributes include at minimum:
- timestamp: ISO 8601 string indicating when the state/attributes are valid
- error: Optional error message if something went wrong
"""
timestamp: str
error: str
class TomorrowDataAvailableAttributes(BaseAttributes, total=False):
"""
Attributes for tomorrow_data_available binary sensor.
Indicates whether tomorrow's price data is available from Tibber API.
"""
intervals_available: int # Number of intervals available for tomorrow
data_status: Literal["none", "partial", "full"] # Data completeness status
class PeriodSummary(TypedDict, total=False):
"""
Structure for period summary nested in period attributes.
Each period summary contains all calculated information about one period.
"""
# Time information (priority 1)
start: str # ISO 8601 timestamp of period start
end: str # ISO 8601 timestamp of period end
duration_minutes: int # Duration in minutes
# Core decision attributes (priority 2)
level: PriceLevel # Price level classification
rating_level: PriceRating # Price rating classification
rating_difference_pct: float # Difference from daily average (%)
# Price statistics (priority 3)
price_mean: float # Arithmetic mean price in period
price_median: float # Median price in period
price_min: float # Minimum price in period
price_max: float # Maximum price in period
price_spread: float # Price spread (max - min)
volatility: float # Price volatility within period
# Price comparison (priority 4)
period_price_diff_from_daily_min: float # Difference from daily min
period_price_diff_from_daily_min_pct: float # Difference from daily min (%)
# Detail information (priority 5)
period_interval_count: int # Number of intervals in period
period_position: int # Period position (1-based)
periods_total: int # Total number of periods
periods_remaining: int # Remaining periods after this one
# Relaxation information (priority 6 - only if period was relaxed)
relaxation_active: bool # Whether this period was found via relaxation
relaxation_level: int # Relaxation level used (1-based)
relaxation_threshold_original_pct: float # Original flex threshold (%)
relaxation_threshold_applied_pct: float # Applied flex threshold after relaxation (%)
class PeriodAttributes(BaseAttributes, total=False):
"""
Attributes for period-based binary sensors (best_price_period, peak_price_period).
These sensors indicate whether the current/next cheap/expensive period is active.
Attributes follow priority ordering:
1. Time information (timestamp, start, end, duration_minutes)
2. Core decision attributes (level, rating_level, rating_difference_%)
3. Price statistics (price_mean, price_median, price_min, price_max, price_spread, volatility)
4. Price comparison (period_price_diff_from_daily_min, period_price_diff_from_daily_min_%)
5. Detail information (period_interval_count, period_position, periods_total, periods_remaining)
6. Relaxation information (only if period was relaxed)
7. Meta information (periods list)
"""
# Time information (priority 1) - start/end refer to current/next period
start: str | None # ISO 8601 timestamp of current/next period start
end: str | None # ISO 8601 timestamp of current/next period end
duration_minutes: int # Duration of current/next period in minutes
# Core decision attributes (priority 2)
level: PriceLevel # Price level of current/next period
rating_level: PriceRating # Price rating of current/next period
rating_difference_pct: float # Difference from daily average (%)
# Price statistics (priority 3)
price_mean: float # Arithmetic mean price in current/next period
price_median: float # Median price in current/next period
price_min: float # Minimum price in current/next period
price_max: float # Maximum price in current/next period
price_spread: float # Price spread (max - min) in current/next period
volatility: float # Price volatility within current/next period
# Price comparison (priority 4)
period_price_diff_from_daily_min: float # Difference from daily min
period_price_diff_from_daily_min_pct: float # Difference from daily min (%)
# Detail information (priority 5)
period_interval_count: int # Number of intervals in current/next period
period_position: int # Period position (1-based)
periods_total: int # Total number of periods found
periods_remaining: int # Remaining periods after current/next one
# Relaxation information (priority 6 - only if period was relaxed)
relaxation_active: bool # Whether current/next period was found via relaxation
relaxation_level: int # Relaxation level used (1-based)
relaxation_threshold_original_pct: float # Original flex threshold (%)
relaxation_threshold_applied_pct: float # Applied flex threshold after relaxation (%)
# Meta information (priority 7)
periods: list[PeriodSummary] # All periods found (sorted by start time)
# Union type for all binary sensor attributes (for documentation purposes)
# In actual code, use dict[str, Any] for flexibility
BinarySensorAttributes = TomorrowDataAvailableAttributes | PeriodAttributes

View file

@ -1,54 +1,880 @@
""" """Adds config flow for tibber_prices."""
Config flow for Tibber Prices integration.
This module serves as the entry point for Home Assistant's config flow discovery.
The actual implementation is in the config_flow_handlers package.
"""
from __future__ import annotations from __future__ import annotations
from .config_flow_handlers.options_flow import ( from typing import Any, ClassVar
TibberPricesOptionsFlowHandler as OptionsFlowHandler,
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryFlow,
OptionsFlow,
SubentryFlowResult,
) )
from .config_flow_handlers.schemas import ( from homeassistant.const import CONF_ACCESS_TOKEN
get_best_price_schema, from homeassistant.core import callback
get_options_init_schema, from homeassistant.helpers.aiohttp_client import async_create_clientsession
get_peak_price_schema, from homeassistant.helpers.selector import (
get_price_level_schema, BooleanSelector,
get_price_rating_schema, NumberSelector,
get_price_trend_schema, NumberSelectorConfig,
get_reauth_confirm_schema, NumberSelectorMode,
get_select_home_schema, SelectOptionDict,
get_subentry_init_schema, SelectSelector,
get_user_schema, SelectSelectorConfig,
get_volatility_schema, SelectSelectorMode,
TextSelector,
TextSelectorConfig,
TextSelectorType,
) )
from .config_flow_handlers.subentry_flow import ( from homeassistant.loader import async_get_integration
TibberPricesSubentryFlowHandler as SubentryFlowHandler,
from .api import (
TibberPricesApiClient,
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
) )
from .config_flow_handlers.user_flow import TibberPricesConfigFlowHandler as ConfigFlow from .const import (
from .config_flow_handlers.validators import ( BEST_PRICE_MAX_LEVEL_OPTIONS,
TibberPricesCannotConnectError, CONF_BEST_PRICE_FLEX,
TibberPricesInvalidAuthError, CONF_BEST_PRICE_MAX_LEVEL,
validate_api_token, CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
CONF_BEST_PRICE_MIN_VOLATILITY,
CONF_EXTENDED_DESCRIPTIONS,
CONF_PEAK_PRICE_FLEX,
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_PEAK_PRICE_MIN_LEVEL,
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
CONF_PEAK_PRICE_MIN_VOLATILITY,
CONF_PRICE_RATING_THRESHOLD_HIGH,
CONF_PRICE_RATING_THRESHOLD_LOW,
CONF_PRICE_TREND_THRESHOLD_FALLING,
CONF_PRICE_TREND_THRESHOLD_RISING,
CONF_VOLATILITY_THRESHOLD_HIGH,
CONF_VOLATILITY_THRESHOLD_MODERATE,
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
DEFAULT_BEST_PRICE_FLEX,
DEFAULT_BEST_PRICE_MAX_LEVEL,
DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_BEST_PRICE_MIN_VOLATILITY,
DEFAULT_EXTENDED_DESCRIPTIONS,
DEFAULT_PEAK_PRICE_FLEX,
DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_PEAK_PRICE_MIN_LEVEL,
DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_PEAK_PRICE_MIN_VOLATILITY,
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_PRICE_TREND_THRESHOLD_FALLING,
DEFAULT_PRICE_TREND_THRESHOLD_RISING,
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
DOMAIN,
LOGGER,
MIN_VOLATILITY_FOR_PERIODS_OPTIONS,
PEAK_PRICE_MIN_LEVEL_OPTIONS,
) )
__all__ = [
"ConfigFlow", class TibberPricesFlowHandler(ConfigFlow, domain=DOMAIN):
"OptionsFlowHandler", """Config flow for tibber_prices."""
"SubentryFlowHandler",
"TibberPricesCannotConnectError", VERSION = 1
"TibberPricesInvalidAuthError", MINOR_VERSION = 0
"get_best_price_schema",
"get_options_init_schema", def __init__(self) -> None:
"get_peak_price_schema", """Initialize the config flow."""
"get_price_level_schema", super().__init__()
"get_price_rating_schema", self._reauth_entry: ConfigEntry | None = None
"get_price_trend_schema", self._viewer: dict | None = None
"get_reauth_confirm_schema", self._access_token: str | None = None
"get_select_home_schema", self._user_name: str | None = None
"get_subentry_init_schema", self._user_login: str | None = None
"get_user_schema", self._user_id: str | None = None
"get_volatility_schema",
"validate_api_token", @classmethod
] @callback
def async_get_supported_subentry_types(
cls,
config_entry: ConfigEntry, # noqa: ARG003
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {"home": TibberPricesSubentryFlowHandler}
@staticmethod
@callback
def async_get_options_flow(_config_entry: ConfigEntry) -> OptionsFlow:
"""Create an options flow for this configentry."""
return TibberPricesOptionsFlowHandler()
def is_matching(self, other_flow: dict) -> bool:
"""Return True if match_dict matches this flow."""
return bool(other_flow.get("domain") == DOMAIN)
async def async_step_reauth(self, entry_data: dict[str, Any]) -> ConfigFlowResult: # noqa: ARG002
"""Handle reauth flow when access token becomes invalid."""
entry_id = self.context.get("entry_id")
if entry_id:
self._reauth_entry = self.hass.config_entries.async_get_entry(entry_id)
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(self, user_input: dict | None = None) -> ConfigFlowResult:
"""Confirm reauth dialog - prompt for new access token."""
_errors = {}
if user_input is not None:
try:
viewer = await self._get_viewer_details(access_token=user_input[CONF_ACCESS_TOKEN])
except TibberPricesApiClientAuthenticationError as exception:
LOGGER.warning(exception)
_errors["base"] = "auth"
except TibberPricesApiClientCommunicationError as exception:
LOGGER.error(exception)
_errors["base"] = "connection"
except TibberPricesApiClientError as exception:
LOGGER.exception(exception)
_errors["base"] = "unknown"
else:
# Validate that the new token has access to all configured homes
if self._reauth_entry:
# Get all configured home IDs (main entry + subentries)
configured_home_ids = self._get_all_configured_home_ids(self._reauth_entry)
# Get accessible home IDs from the new token
accessible_homes = viewer.get("homes", [])
accessible_home_ids = {home["id"] for home in accessible_homes}
# Check if all configured homes are accessible with the new token
missing_home_ids = configured_home_ids - accessible_home_ids
if missing_home_ids:
# New token doesn't have access to all configured homes
LOGGER.error(
"New access token missing access to configured homes: %s",
", ".join(missing_home_ids),
)
_errors["base"] = "missing_homes"
else:
# Update the config entry with the new access token
self.hass.config_entries.async_update_entry(
self._reauth_entry,
data={
**self._reauth_entry.data,
CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN],
},
)
await self.hass.config_entries.async_reload(self._reauth_entry.entry_id)
return self.async_abort(reason="reauth_successful")
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_ACCESS_TOKEN): TextSelector(
TextSelectorConfig(type=TextSelectorType.TEXT),
),
}
),
errors=_errors,
)
async def async_step_user(
self,
user_input: dict | None = None,
) -> ConfigFlowResult:
"""Handle a flow initialized by the user. Only ask for access token."""
_errors = {}
if user_input is not None:
try:
viewer = await self._get_viewer_details(access_token=user_input[CONF_ACCESS_TOKEN])
except TibberPricesApiClientAuthenticationError as exception:
LOGGER.warning(exception)
_errors["base"] = "auth"
except TibberPricesApiClientCommunicationError as exception:
LOGGER.error(exception)
_errors["base"] = "connection"
except TibberPricesApiClientError as exception:
LOGGER.exception(exception)
_errors["base"] = "unknown"
else:
user_id = viewer.get("userId", None)
user_name = viewer.get("name") or user_id or "Unknown User"
user_login = viewer.get("login", "N/A")
homes = viewer.get("homes", [])
if not user_id:
LOGGER.error("No user ID found: %s", viewer)
return self.async_abort(reason="unknown")
if not homes:
LOGGER.error("No homes found: %s", viewer)
return self.async_abort(reason="unknown")
LOGGER.debug("Viewer data received: %s", viewer)
await self.async_set_unique_id(unique_id=str(user_id))
self._abort_if_unique_id_configured()
# Store viewer data in the flow for use in the next step
self._viewer = viewer
self._access_token = user_input[CONF_ACCESS_TOKEN]
self._user_name = user_name
self._user_login = user_login
self._user_id = user_id
# Move to home selection step
return await self.async_step_select_home()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_ACCESS_TOKEN,
default=(user_input or {}).get(CONF_ACCESS_TOKEN, vol.UNDEFINED),
): TextSelector(
TextSelectorConfig(
type=TextSelectorType.TEXT,
),
),
},
),
errors=_errors,
)
async def async_step_select_home(self, user_input: dict | None = None) -> ConfigFlowResult:
"""Handle home selection during initial setup."""
homes = self._viewer.get("homes", []) if self._viewer else []
if not homes:
return self.async_abort(reason="unknown")
if user_input is not None:
selected_home_id = user_input["home_id"]
selected_home = next((home for home in homes if home["id"] == selected_home_id), None)
if not selected_home:
return self.async_abort(reason="unknown")
data = {
CONF_ACCESS_TOKEN: self._access_token or "",
"home_id": selected_home_id,
"home_data": selected_home,
"homes": homes,
"user_login": self._user_login or "N/A",
}
return self.async_create_entry(
title=self._user_name or "Unknown User",
data=data,
description=f"{self._user_login} ({self._user_id})",
)
home_options = [
SelectOptionDict(
value=home["id"],
label=self._get_home_title(home),
)
for home in homes
]
return self.async_show_form(
step_id="select_home",
data_schema=vol.Schema(
{
vol.Required("home_id"): SelectSelector(
SelectSelectorConfig(
options=home_options,
mode=SelectSelectorMode.DROPDOWN,
)
)
}
),
)
def _get_all_configured_home_ids(self, main_entry: ConfigEntry) -> set[str]:
"""Get all configured home IDs from main entry and all subentries."""
home_ids = set()
# Add home_id from main entry if it exists
if main_entry.data.get("home_id"):
home_ids.add(main_entry.data["home_id"])
# Add home_ids from all subentries
for entry in self.hass.config_entries.async_entries(DOMAIN):
if entry.data.get("home_id") and entry != main_entry:
home_ids.add(entry.data["home_id"])
return home_ids
@staticmethod
def _get_home_title(home: dict) -> str:
"""Generate a user-friendly title for a home."""
title = home.get("appNickname")
if title and title.strip():
return title.strip()
address = home.get("address", {})
if address:
parts = []
if address.get("address1"):
parts.append(address["address1"])
if address.get("city"):
parts.append(address["city"])
if parts:
return ", ".join(parts)
return home.get("id", "Unknown Home")
async def _get_viewer_details(self, access_token: str) -> dict:
"""Validate credentials and return information about the account (viewer object)."""
integration = await async_get_integration(self.hass, DOMAIN)
client = TibberPricesApiClient(
access_token=access_token,
session=async_create_clientsession(self.hass),
version=str(integration.version) if integration.version else "unknown",
)
result = await client.async_get_viewer_details()
return result["viewer"]
class TibberPricesSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flows for tibber_prices."""
async def async_step_user(self, user_input: dict[str, Any] | None = None) -> SubentryFlowResult:
"""User flow to add a new home."""
parent_entry = self._get_entry()
if not parent_entry or not hasattr(parent_entry, "runtime_data") or not parent_entry.runtime_data:
return self.async_abort(reason="no_parent_entry")
coordinator = parent_entry.runtime_data.coordinator
# Force refresh user data to get latest homes from Tibber API
await coordinator.refresh_user_data()
homes = coordinator.get_user_homes()
if not homes:
return self.async_abort(reason="no_available_homes")
if user_input is not None:
selected_home_id = user_input["home_id"]
selected_home = next((home for home in homes if home["id"] == selected_home_id), None)
if not selected_home:
return self.async_abort(reason="home_not_found")
home_title = self._get_home_title(selected_home)
home_id = selected_home["id"]
return self.async_create_entry(
title=home_title,
data={
"home_id": home_id,
"home_data": selected_home,
},
description=f"Subentry for {home_title}",
description_placeholders={"home_id": home_id},
unique_id=home_id,
)
# Get existing home IDs by checking all subentries for this parent
existing_home_ids = {
entry.data["home_id"]
for entry in self.hass.config_entries.async_entries(DOMAIN)
if entry.data.get("home_id") and entry != parent_entry
}
available_homes = [home for home in homes if home["id"] not in existing_home_ids]
if not available_homes:
return self.async_abort(reason="no_available_homes")
home_options = [
SelectOptionDict(
value=home["id"],
label=self._get_home_title(home),
)
for home in available_homes
]
schema = vol.Schema(
{
vol.Required("home_id"): SelectSelector(
SelectSelectorConfig(
options=home_options,
mode=SelectSelectorMode.DROPDOWN,
)
)
}
)
return self.async_show_form(
step_id="user",
data_schema=schema,
description_placeholders={},
errors={},
)
def _get_home_title(self, home: dict) -> str:
"""Generate a user-friendly title for a home."""
title = home.get("appNickname")
if title and title.strip():
return title.strip()
address = home.get("address", {})
if address:
parts = []
if address.get("address1"):
parts.append(address["address1"])
if address.get("city"):
parts.append(address["city"])
if parts:
return ", ".join(parts)
return home.get("id", "Unknown Home")
async def async_step_init(self, user_input: dict | None = None) -> SubentryFlowResult:
"""Manage the options for a subentry."""
subentry = self._get_reconfigure_subentry()
errors: dict[str, str] = {}
options = {
vol.Optional(
CONF_EXTENDED_DESCRIPTIONS,
default=subentry.data.get(CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS),
): BooleanSelector(),
}
if user_input is not None:
return self.async_update_and_abort(
self._get_entry(),
subentry,
data_updates=user_input,
)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(options),
errors=errors,
)
class TibberPricesOptionsFlowHandler(OptionsFlow):
"""Handle options for tibber_prices entries."""
# Step progress tracking
_TOTAL_STEPS: ClassVar[int] = 6
_STEP_INFO: ClassVar[dict[str, int]] = {
"init": 1,
"price_rating": 2,
"volatility": 3,
"best_price": 4,
"peak_price": 5,
"price_trend": 6,
}
def __init__(self) -> None:
"""Initialize options flow."""
self._options: dict[str, Any] = {}
def _get_step_description_placeholders(self, step_id: str) -> dict[str, str]:
"""Get description placeholders with step progress."""
if step_id not in self._STEP_INFO:
return {}
step_num = self._STEP_INFO[step_id]
# Get translations loaded by Home Assistant
standard_translations_key = f"{DOMAIN}_standard_translations_{self.hass.config.language}"
translations = self.hass.data.get(standard_translations_key, {})
# Get step progress text from translations with placeholders
step_progress_template = translations.get("common", {}).get("step_progress", "Step {step_num} of {total_steps}")
step_progress = step_progress_template.format(step_num=step_num, total_steps=self._TOTAL_STEPS)
return {
"step_progress": step_progress,
}
async def async_step_init(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Manage the options - General Settings."""
# Initialize options from config_entry on first call
if not self._options:
self._options = dict(self.config_entry.options)
if user_input is not None:
self._options.update(user_input)
return await self.async_step_price_rating()
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_EXTENDED_DESCRIPTIONS,
default=self.config_entry.options.get(
CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS
),
): BooleanSelector(),
}
),
description_placeholders={
**self._get_step_description_placeholders("init"),
"user_login": self.config_entry.data.get("user_login", "N/A"),
},
)
async def async_step_price_rating(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure price rating thresholds."""
if user_input is not None:
self._options.update(user_input)
return await self.async_step_volatility()
return self.async_show_form(
step_id="price_rating",
data_schema=vol.Schema(
{
vol.Optional(
CONF_PRICE_RATING_THRESHOLD_LOW,
default=int(
self.config_entry.options.get(
CONF_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
)
),
): NumberSelector(
NumberSelectorConfig(
min=-100,
max=0,
unit_of_measurement="%",
step=1,
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_RATING_THRESHOLD_HIGH,
default=int(
self.config_entry.options.get(
CONF_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0,
max=100,
unit_of_measurement="%",
step=1,
mode=NumberSelectorMode.SLIDER,
),
),
}
),
description_placeholders=self._get_step_description_placeholders("price_rating"),
)
async def async_step_best_price(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure best price period settings."""
if user_input is not None:
self._options.update(user_input)
return await self.async_step_peak_price()
return self.async_show_form(
step_id="best_price",
data_schema=vol.Schema(
{
vol.Optional(
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
default=int(
self.config_entry.options.get(
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=15,
max=240,
step=15,
unit_of_measurement="min",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_BEST_PRICE_FLEX,
default=int(
self.config_entry.options.get(
CONF_BEST_PRICE_FLEX,
DEFAULT_BEST_PRICE_FLEX,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0,
max=100,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
default=int(
self.config_entry.options.get(
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0,
max=50,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_BEST_PRICE_MIN_VOLATILITY,
default=self.config_entry.options.get(
CONF_BEST_PRICE_MIN_VOLATILITY,
DEFAULT_BEST_PRICE_MIN_VOLATILITY,
),
): SelectSelector(
SelectSelectorConfig(
options=MIN_VOLATILITY_FOR_PERIODS_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key="volatility",
),
),
vol.Optional(
CONF_BEST_PRICE_MAX_LEVEL,
default=self.config_entry.options.get(
CONF_BEST_PRICE_MAX_LEVEL,
DEFAULT_BEST_PRICE_MAX_LEVEL,
),
): SelectSelector(
SelectSelectorConfig(
options=BEST_PRICE_MAX_LEVEL_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key="price_level",
),
),
}
),
description_placeholders=self._get_step_description_placeholders("best_price"),
)
async def async_step_peak_price(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure peak price period settings."""
if user_input is not None:
self._options.update(user_input)
return await self.async_step_price_trend()
return self.async_show_form(
step_id="peak_price",
data_schema=vol.Schema(
{
vol.Optional(
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
default=int(
self.config_entry.options.get(
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=15,
max=240,
step=15,
unit_of_measurement="min",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PEAK_PRICE_FLEX,
default=int(
self.config_entry.options.get(
CONF_PEAK_PRICE_FLEX,
DEFAULT_PEAK_PRICE_FLEX,
)
),
): NumberSelector(
NumberSelectorConfig(
min=-100,
max=0,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
default=int(
self.config_entry.options.get(
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0,
max=50,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PEAK_PRICE_MIN_VOLATILITY,
default=self.config_entry.options.get(
CONF_PEAK_PRICE_MIN_VOLATILITY,
DEFAULT_PEAK_PRICE_MIN_VOLATILITY,
),
): SelectSelector(
SelectSelectorConfig(
options=MIN_VOLATILITY_FOR_PERIODS_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key="volatility",
),
),
vol.Optional(
CONF_PEAK_PRICE_MIN_LEVEL,
default=self.config_entry.options.get(
CONF_PEAK_PRICE_MIN_LEVEL,
DEFAULT_PEAK_PRICE_MIN_LEVEL,
),
): SelectSelector(
SelectSelectorConfig(
options=PEAK_PRICE_MIN_LEVEL_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key="price_level",
),
),
}
),
description_placeholders=self._get_step_description_placeholders("peak_price"),
)
async def async_step_price_trend(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure price trend thresholds."""
if user_input is not None:
self._options.update(user_input)
return self.async_create_entry(title="", data=self._options)
return self.async_show_form(
step_id="price_trend",
data_schema=vol.Schema(
{
vol.Optional(
CONF_PRICE_TREND_THRESHOLD_RISING,
default=int(
self.config_entry.options.get(
CONF_PRICE_TREND_THRESHOLD_RISING,
DEFAULT_PRICE_TREND_THRESHOLD_RISING,
)
),
): NumberSelector(
NumberSelectorConfig(
min=1,
max=50,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_TREND_THRESHOLD_FALLING,
default=int(
self.config_entry.options.get(
CONF_PRICE_TREND_THRESHOLD_FALLING,
DEFAULT_PRICE_TREND_THRESHOLD_FALLING,
)
),
): NumberSelector(
NumberSelectorConfig(
min=-50,
max=-1,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
}
),
description_placeholders=self._get_step_description_placeholders("price_trend"),
)
async def async_step_volatility(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure volatility thresholds and period filtering."""
if user_input is not None:
self._options.update(user_input)
return await self.async_step_best_price()
return self.async_show_form(
step_id="volatility",
data_schema=vol.Schema(
{
vol.Optional(
CONF_VOLATILITY_THRESHOLD_MODERATE,
default=float(
self.config_entry.options.get(
CONF_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0.0,
max=100.0,
step=0.1,
unit_of_measurement="ct",
mode=NumberSelectorMode.BOX,
),
),
vol.Optional(
CONF_VOLATILITY_THRESHOLD_HIGH,
default=float(
self.config_entry.options.get(
CONF_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0.0,
max=100.0,
step=0.1,
unit_of_measurement="ct",
mode=NumberSelectorMode.BOX,
),
),
vol.Optional(
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
default=float(
self.config_entry.options.get(
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=0.0,
max=100.0,
step=0.1,
unit_of_measurement="ct",
mode=NumberSelectorMode.BOX,
),
),
}
),
description_placeholders=self._get_step_description_placeholders("volatility"),
)

View file

@ -1,69 +0,0 @@
"""
Configuration flow package for UI-based setup.
This package handles all user interaction for integration configuration:
- Initial setup: API token validation, home selection
- Subentry flow: Add additional Tibber homes
- Options flow: Multi-step configuration wizard
- Reauthentication: Token refresh when expired
Flow handlers:
- user_flow.py: Initial setup and reauth
- subentry_flow.py: Add additional homes
- options_flow.py: 6-step configuration wizard
Supporting modules:
- schemas.py: Form schema definitions (vol.Schema)
- validators.py: Input validation and API testing
"""
from __future__ import annotations
# Phase 3: Import flow handlers from their new modular structure
from custom_components.tibber_prices.config_flow_handlers.options_flow import (
TibberPricesOptionsFlowHandler,
)
from custom_components.tibber_prices.config_flow_handlers.schemas import (
get_best_price_schema,
get_options_init_schema,
get_peak_price_schema,
get_price_level_schema,
get_price_rating_schema,
get_price_trend_schema,
get_reauth_confirm_schema,
get_select_home_schema,
get_subentry_init_schema,
get_user_schema,
get_volatility_schema,
)
from custom_components.tibber_prices.config_flow_handlers.subentry_flow import (
TibberPricesSubentryFlowHandler,
)
from custom_components.tibber_prices.config_flow_handlers.user_flow import (
TibberPricesConfigFlowHandler,
)
from custom_components.tibber_prices.config_flow_handlers.validators import (
TibberPricesCannotConnectError,
TibberPricesInvalidAuthError,
validate_api_token,
)
__all__ = [
"TibberPricesCannotConnectError",
"TibberPricesConfigFlowHandler",
"TibberPricesInvalidAuthError",
"TibberPricesOptionsFlowHandler",
"TibberPricesSubentryFlowHandler",
"get_best_price_schema",
"get_options_init_schema",
"get_peak_price_schema",
"get_price_level_schema",
"get_price_rating_schema",
"get_price_trend_schema",
"get_reauth_confirm_schema",
"get_select_home_schema",
"get_subentry_init_schema",
"get_user_schema",
"get_volatility_schema",
"validate_api_token",
]

View file

@ -1,243 +0,0 @@
"""
Entity check utilities for options flow.
This module provides functions to check if relevant entities are enabled
for specific options flow steps. If no relevant entities are enabled,
a warning can be displayed to users.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import DOMAIN
from homeassistant.helpers.entity_registry import async_get as async_get_entity_registry
if TYPE_CHECKING:
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
_LOGGER = logging.getLogger(__name__)
# Maximum number of example sensors to show in warning message
MAX_EXAMPLE_SENSORS = 3
# Threshold for using "and" vs "," in formatted names
NAMES_SIMPLE_JOIN_THRESHOLD = 2
# Mapping of options flow steps to affected sensor keys
# These are the entity keys (from sensor/definitions.py and binary_sensor/definitions.py)
# that are affected by each settings page
STEP_TO_SENSOR_KEYS: dict[str, list[str]] = {
# Price Rating settings affect all rating sensors
"current_interval_price_rating": [
# Interval rating sensors
"current_interval_price_rating",
"next_interval_price_rating",
"previous_interval_price_rating",
# Rolling hour rating sensors
"current_hour_price_rating",
"next_hour_price_rating",
# Daily rating sensors
"yesterday_price_rating",
"today_price_rating",
"tomorrow_price_rating",
],
# Price Level settings affect level sensors and period binary sensors
"price_level": [
# Interval level sensors
"current_interval_price_level",
"next_interval_price_level",
"previous_interval_price_level",
# Rolling hour level sensors
"current_hour_price_level",
"next_hour_price_level",
# Daily level sensors
"yesterday_price_level",
"today_price_level",
"tomorrow_price_level",
# Binary sensors that use level filtering
"best_price_period",
"peak_price_period",
],
# Volatility settings affect volatility sensors
"volatility": [
"today_volatility",
"tomorrow_volatility",
"next_24h_volatility",
"today_tomorrow_volatility",
# Also affects trend sensors (adaptive thresholds)
"current_price_trend",
"next_price_trend_change",
"price_trend_1h",
"price_trend_2h",
"price_trend_3h",
"price_trend_4h",
"price_trend_5h",
"price_trend_6h",
"price_trend_8h",
"price_trend_12h",
],
# Best Price settings affect best price binary sensor and timing sensors
"best_price": [
# Binary sensor
"best_price_period",
# Timing sensors
"best_price_end_time",
"best_price_period_duration",
"best_price_remaining_minutes",
"best_price_progress",
"best_price_next_start_time",
"best_price_next_in_minutes",
],
# Peak Price settings affect peak price binary sensor and timing sensors
"peak_price": [
# Binary sensor
"peak_price_period",
# Timing sensors
"peak_price_end_time",
"peak_price_period_duration",
"peak_price_remaining_minutes",
"peak_price_progress",
"peak_price_next_start_time",
"peak_price_next_in_minutes",
],
# Price Trend settings affect trend sensors
"price_trend": [
"current_price_trend",
"next_price_trend_change",
"price_trend_1h",
"price_trend_2h",
"price_trend_3h",
"price_trend_4h",
"price_trend_5h",
"price_trend_6h",
"price_trend_8h",
"price_trend_12h",
],
}
def check_relevant_entities_enabled(
hass: HomeAssistant,
config_entry: ConfigEntry,
step_id: str,
) -> tuple[bool, list[str]]:
"""
Check if any relevant entities for a settings step are enabled.
Args:
hass: Home Assistant instance
config_entry: Current config entry
step_id: The options flow step ID
Returns:
Tuple of (has_enabled_entities, list_of_example_sensor_names)
- has_enabled_entities: True if at least one relevant entity is enabled
- list_of_example_sensor_names: List of example sensor keys for the warning message
"""
sensor_keys = STEP_TO_SENSOR_KEYS.get(step_id)
if not sensor_keys:
# No mapping for this step - no check needed
return True, []
entity_registry = async_get_entity_registry(hass)
entry_id = config_entry.entry_id
enabled_count = 0
example_sensors: list[str] = []
for entity in entity_registry.entities.values():
# Check if entity belongs to our integration and config entry
if entity.config_entry_id != entry_id:
continue
if entity.platform != DOMAIN:
continue
# Extract the sensor key from unique_id
# unique_id format: "{home_id}_{sensor_key}" or "{entry_id}_{sensor_key}"
unique_id = entity.unique_id or ""
# The sensor key is after the last underscore that separates the ID prefix
# We check if any of our target keys is contained in the unique_id
for sensor_key in sensor_keys:
if unique_id.endswith(f"_{sensor_key}") or unique_id == sensor_key:
# Found a matching entity
if entity.disabled_by is None:
# Entity is enabled
enabled_count += 1
break
# Entity is disabled - add to examples (max MAX_EXAMPLE_SENSORS)
if len(example_sensors) < MAX_EXAMPLE_SENSORS and sensor_key not in example_sensors:
example_sensors.append(sensor_key)
break
# If we found enabled entities, return success
if enabled_count > 0:
return True, []
# No enabled entities - return the example sensors for the warning
# If we haven't collected any examples yet, use the first from the mapping
if not example_sensors:
example_sensors = sensor_keys[:MAX_EXAMPLE_SENSORS]
return False, example_sensors
def format_sensor_names_for_warning(sensor_keys: list[str]) -> str:
"""
Format sensor keys into human-readable names for warning message.
Args:
sensor_keys: List of sensor keys
Returns:
Formatted string like "Best Price Period, Best Price End Time, ..."
"""
# Convert snake_case keys to Title Case names
names = []
for key in sensor_keys:
# Replace underscores with spaces and title case
name = key.replace("_", " ").title()
names.append(name)
if len(names) <= NAMES_SIMPLE_JOIN_THRESHOLD:
return " and ".join(names)
return ", ".join(names[:-1]) + ", and " + names[-1]
def check_chart_data_export_enabled(
hass: HomeAssistant,
config_entry: ConfigEntry,
) -> bool:
"""
Check if the Chart Data Export sensor is enabled.
Args:
hass: Home Assistant instance
config_entry: Current config entry
Returns:
True if the Chart Data Export sensor is enabled, False otherwise
"""
entity_registry = async_get_entity_registry(hass)
entry_id = config_entry.entry_id
for entity in entity_registry.entities.values():
# Check if entity belongs to our integration and config entry
if entity.config_entry_id != entry_id:
continue
if entity.platform != DOMAIN:
continue
# Check for chart_data_export sensor
unique_id = entity.unique_id or ""
if unique_id.endswith("_chart_data_export") or unique_id == "chart_data_export":
# Found the entity - check if enabled
return entity.disabled_by is None
# Entity not found (shouldn't happen, but treat as disabled)
return False

View file

@ -1,895 +0,0 @@
"""Options flow for tibber_prices integration."""
from __future__ import annotations
import logging
from copy import deepcopy
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from collections.abc import Mapping
from custom_components.tibber_prices.config_flow_handlers.entity_check import (
check_chart_data_export_enabled,
check_relevant_entities_enabled,
format_sensor_names_for_warning,
)
from custom_components.tibber_prices.config_flow_handlers.schemas import (
ConfigOverrides,
get_best_price_schema,
get_chart_data_export_schema,
get_display_settings_schema,
get_options_init_schema,
get_peak_price_schema,
get_price_level_schema,
get_price_rating_schema,
get_price_trend_schema,
get_reset_to_defaults_schema,
get_volatility_schema,
)
from custom_components.tibber_prices.config_flow_handlers.validators import (
validate_best_price_distance_percentage,
validate_distance_percentage,
validate_flex_percentage,
validate_gap_count,
validate_min_periods,
validate_period_length,
validate_price_rating_threshold_high,
validate_price_rating_threshold_low,
validate_price_rating_thresholds,
validate_price_trend_falling,
validate_price_trend_rising,
validate_price_trend_strongly_falling,
validate_price_trend_strongly_rising,
validate_relaxation_attempts,
validate_volatility_threshold_high,
validate_volatility_threshold_moderate,
validate_volatility_threshold_very_high,
validate_volatility_thresholds,
)
from custom_components.tibber_prices.const import (
CONF_BEST_PRICE_FLEX,
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
CONF_MIN_PERIODS_BEST,
CONF_MIN_PERIODS_PEAK,
CONF_PEAK_PRICE_FLEX,
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
CONF_PRICE_RATING_THRESHOLD_HIGH,
CONF_PRICE_RATING_THRESHOLD_LOW,
CONF_PRICE_TREND_THRESHOLD_FALLING,
CONF_PRICE_TREND_THRESHOLD_RISING,
CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING,
CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING,
CONF_RELAXATION_ATTEMPTS_BEST,
CONF_RELAXATION_ATTEMPTS_PEAK,
CONF_VOLATILITY_THRESHOLD_HIGH,
CONF_VOLATILITY_THRESHOLD_MODERATE,
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
DOMAIN,
async_get_translation,
get_default_options,
)
from homeassistant.config_entries import ConfigFlowResult, OptionsFlow
from homeassistant.helpers import entity_registry as er
_LOGGER = logging.getLogger(__name__)
class TibberPricesOptionsFlowHandler(OptionsFlow):
"""Handle options for tibber_prices entries."""
def __init__(self) -> None:
"""Initialize options flow."""
self._options: dict[str, Any] = {}
def _merge_section_data(self, user_input: dict[str, Any]) -> None:
"""
Merge section data from form input into options.
Home Assistant forms with section() return nested dicts like:
{"section_name": {"setting1": value1, "setting2": value2}}
We need to preserve this structure in config_entry.options.
Args:
user_input: Nested user input from form with sections
"""
for section_key, section_data in user_input.items():
if isinstance(section_data, dict):
# This is a section - ensure the section exists in options
if section_key not in self._options:
self._options[section_key] = {}
# Update the section with new values
self._options[section_key].update(section_data)
else:
# This is a direct value - keep it as is
self._options[section_key] = section_data
def _migrate_config_options(self, options: Mapping[str, Any]) -> dict[str, Any]:
"""
Migrate deprecated config options to current format.
This removes obsolete keys and renames changed keys to maintain
compatibility with older config entries.
Args:
options: Original options dict from config_entry
Returns:
Migrated options dict with deprecated keys removed/renamed
"""
# CRITICAL: Use deepcopy to avoid modifying the original config_entry.options
# If we use dict(options), nested dicts are still referenced, causing
# self._options modifications to leak into config_entry.options
migrated = deepcopy(dict(options))
migration_performed = False
# Migration 1: Rename relaxation_step_* to relaxation_attempts_*
# (Changed in v0.6.0 - commit 5a5c8ca)
if "relaxation_step_best" in migrated:
migrated["relaxation_attempts_best"] = migrated.pop("relaxation_step_best")
migration_performed = True
_LOGGER.info(
"Migrated config option: relaxation_step_best -> relaxation_attempts_best (value: %s)",
migrated["relaxation_attempts_best"],
)
if "relaxation_step_peak" in migrated:
migrated["relaxation_attempts_peak"] = migrated.pop("relaxation_step_peak")
migration_performed = True
_LOGGER.info(
"Migrated config option: relaxation_step_peak -> relaxation_attempts_peak (value: %s)",
migrated["relaxation_attempts_peak"],
)
# Migration 2: Remove obsolete volatility filter options
# (Removed in v0.9.0 - volatility filter feature removed)
obsolete_keys = [
"best_price_min_volatility",
"peak_price_min_volatility",
"min_volatility_for_periods",
]
for key in obsolete_keys:
if key in migrated:
old_value = migrated.pop(key)
migration_performed = True
_LOGGER.info(
"Removed obsolete config option: %s (was: %s)",
key,
old_value,
)
if migration_performed:
_LOGGER.info("Config migration completed - deprecated options cleaned up")
return migrated
def _save_options_if_changed(self) -> bool:
"""
Save options only if they actually changed.
Returns:
True if options were updated, False if no changes detected
"""
# Compare old and new options
if self.config_entry.options != self._options:
self.hass.config_entries.async_update_entry(
self.config_entry,
options=self._options,
)
return True
return False
def _get_entity_warning_placeholders(self, step_id: str) -> dict[str, str]:
"""
Get description placeholders for entity availability warning.
Checks if any relevant entities for the step are enabled.
If not, adds a warning placeholder to display in the form description.
Args:
step_id: The options flow step ID
Returns:
Dictionary with placeholder keys for the form description
"""
has_enabled, example_sensors = check_relevant_entities_enabled(self.hass, self.config_entry, step_id)
if has_enabled:
# No warning needed - return empty placeholder
return {"entity_warning": ""}
# Build warning message with example sensor names
sensor_names = format_sensor_names_for_warning(example_sensors)
return {
"entity_warning": f"\n\n⚠️ **Note:** No sensors affected by these settings are currently enabled. "
f"To use these settings, first enable relevant sensors like *{sensor_names}* "
f"in **Settings → Devices & Services → Tibber Prices → Entities**."
}
def _get_enabled_config_entities(self) -> set[str]:
"""
Get config keys that have their config entity enabled.
Checks the entity registry for number/switch entities that override
config values. Returns the config_key for each enabled entity.
Returns:
Set of config keys (e.g., "best_price_flex", "enable_min_periods_best")
"""
enabled_keys: set[str] = set()
ent_reg = er.async_get(self.hass)
_LOGGER.debug(
"Checking for enabled config override entities for entry %s",
self.config_entry.entry_id,
)
# Map entity keys to their config keys
# Entity keys are defined in number/definitions.py and switch/definitions.py
override_entities = {
# Number entities (best price)
"number.best_price_flex_override": "best_price_flex",
"number.best_price_min_distance_override": "best_price_min_distance_from_avg",
"number.best_price_min_period_length_override": "best_price_min_period_length",
"number.best_price_min_periods_override": "min_periods_best",
"number.best_price_relaxation_attempts_override": "relaxation_attempts_best",
"number.best_price_gap_count_override": "best_price_max_level_gap_count",
# Number entities (peak price)
"number.peak_price_flex_override": "peak_price_flex",
"number.peak_price_min_distance_override": "peak_price_min_distance_from_avg",
"number.peak_price_min_period_length_override": "peak_price_min_period_length",
"number.peak_price_min_periods_override": "min_periods_peak",
"number.peak_price_relaxation_attempts_override": "relaxation_attempts_peak",
"number.peak_price_gap_count_override": "peak_price_max_level_gap_count",
# Switch entities
"switch.best_price_enable_relaxation_override": "enable_min_periods_best",
"switch.peak_price_enable_relaxation_override": "enable_min_periods_peak",
}
# Check each possible override entity
for entity_id_suffix, config_key in override_entities.items():
# Entity IDs include device name, so we need to search by unique_id pattern
# The unique_id follows pattern: {config_entry_id}_{entity_key}
domain, entity_key = entity_id_suffix.split(".", 1)
# Find entity by iterating through registry
for entity_entry in ent_reg.entities.values():
if (
entity_entry.domain == domain
and entity_entry.config_entry_id == self.config_entry.entry_id
and entity_entry.unique_id
and entity_entry.unique_id.endswith(entity_key)
and not entity_entry.disabled
):
_LOGGER.debug(
"Found enabled config override entity: %s -> config_key=%s",
entity_entry.entity_id,
config_key,
)
enabled_keys.add(config_key)
break
_LOGGER.debug("Enabled config override keys: %s", enabled_keys)
return enabled_keys
def _get_active_overrides(self) -> ConfigOverrides:
"""
Build override dict from enabled config entities.
Returns a dict structure compatible with schema functions.
"""
enabled_keys = self._get_enabled_config_entities()
if not enabled_keys:
_LOGGER.debug("No enabled config override entities found")
return {}
# Build structure expected by schema: {section: {key: True}}
# Section doesn't matter for read_only check, we just need the key present
overrides: ConfigOverrides = {"_enabled": {}}
for key in enabled_keys:
overrides["_enabled"][key] = True
_LOGGER.debug("Active overrides structure: %s", overrides)
return overrides
def _get_override_warning_placeholder(self, step_id: str, overrides: ConfigOverrides) -> dict[str, str]:
"""
Get description placeholder for config override warning.
Args:
step_id: The options flow step ID (e.g., "best_price", "peak_price")
overrides: Active overrides dictionary
Returns:
Dictionary with 'override_warning' placeholder
"""
# Define which config keys belong to each step
step_keys: dict[str, set[str]] = {
"best_price": {
"best_price_flex",
"best_price_min_distance_from_avg",
"best_price_min_period_length",
"min_periods_best",
"relaxation_attempts_best",
"enable_min_periods_best",
},
"peak_price": {
"peak_price_flex",
"peak_price_min_distance_from_avg",
"peak_price_min_period_length",
"min_periods_peak",
"relaxation_attempts_peak",
"enable_min_periods_peak",
},
}
keys_to_check = step_keys.get(step_id, set())
enabled_keys = overrides.get("_enabled", {})
override_count = sum(1 for k in enabled_keys if k in keys_to_check)
if override_count > 0:
field_word = "field is" if override_count == 1 else "fields are"
return {
"override_warning": (
f"\n\n🔒 **{override_count} {field_word} managed by configuration entities** "
"(grayed out). Disable the config entity to edit here, "
"or change the value directly via the entity."
)
}
return {"override_warning": ""}
async def _get_override_translations(self) -> dict[str, Any]:
"""
Load override translations from common section.
Uses the system language setting from Home Assistant.
Note: HA Options Flow does not provide user_id in context,
so we cannot determine the individual user's language preference.
Returns:
Dictionary with override_warning_template, override_warning_and,
and override_field_label_* keys for each config field.
"""
# Use system language - HA Options Flow context doesn't include user_id
language = self.hass.config.language or "en"
_LOGGER.debug("Loading override translations for language: %s", language)
translations: dict[str, Any] = {}
# Load template and connector from common section
template = await async_get_translation(self.hass, ["common", "override_warning_template"], language)
_LOGGER.debug("Loaded template: %s", template)
if template:
translations["override_warning_template"] = template
and_connector = await async_get_translation(self.hass, ["common", "override_warning_and"], language)
if and_connector:
translations["override_warning_and"] = and_connector
# Load flat field label translations
field_keys = [
"best_price_min_period_length",
"best_price_max_level_gap_count",
"best_price_flex",
"best_price_min_distance_from_avg",
"enable_min_periods_best",
"min_periods_best",
"relaxation_attempts_best",
"peak_price_min_period_length",
"peak_price_max_level_gap_count",
"peak_price_flex",
"peak_price_min_distance_from_avg",
"enable_min_periods_peak",
"min_periods_peak",
"relaxation_attempts_peak",
]
for field_key in field_keys:
translation_key = f"override_field_label_{field_key}"
label = await async_get_translation(self.hass, ["common", translation_key], language)
if label:
translations[translation_key] = label
return translations
async def async_step_init(self, _user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Manage the options - show menu."""
# Always reload options from config_entry to get latest saved state
# This ensures changes from previous steps are visible
self._options = self._migrate_config_options(self.config_entry.options)
# Show menu with all configuration categories
return self.async_show_menu(
step_id="init",
menu_options=[
"general_settings",
"display_settings",
"current_interval_price_rating",
"price_level",
"volatility",
"best_price",
"peak_price",
"price_trend",
"chart_data_export",
"reset_to_defaults",
"finish",
],
)
async def async_step_reset_to_defaults(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Reset all settings to factory defaults."""
if user_input is not None:
# Check if user confirmed the reset
if user_input.get("confirm_reset", False):
# Get currency from config_entry.data (this is immutable and safe)
currency_code = self.config_entry.data.get("currency", None)
# Completely replace options with fresh defaults (factory reset)
# This discards ALL old data including legacy structures
self._options = get_default_options(currency_code)
# Force save the new options
self._save_options_if_changed()
_LOGGER.info(
"Factory reset performed for config entry '%s' - all settings restored to defaults",
self.config_entry.title,
)
# Show success message and return to menu
return self.async_abort(reason="reset_successful")
# User didn't check the box - they want to cancel
# Show info message (not error) and return to menu
return self.async_abort(reason="reset_cancelled")
# Show confirmation form with checkbox
return self.async_show_form(
step_id="reset_to_defaults",
data_schema=get_reset_to_defaults_schema(),
)
async def async_step_finish(self, _user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Close the options flow."""
# Use empty reason to close without any message
return self.async_abort(reason="finished")
async def async_step_general_settings(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure general settings."""
if user_input is not None:
# Update options with new values
self._options.update(user_input)
# Save options only if changed (triggers listeners automatically)
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
return self.async_show_form(
step_id="general_settings",
data_schema=get_options_init_schema(self.config_entry.options),
description_placeholders={
"user_login": self.config_entry.data.get("user_login", "N/A"),
},
)
async def async_step_display_settings(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure currency display settings."""
# Get currency from coordinator data (if available)
# During options flow setup, integration might not be fully loaded yet
currency_code = None
if DOMAIN in self.hass.data and self.config_entry.entry_id in self.hass.data[DOMAIN]:
tibber_data = self.hass.data[DOMAIN][self.config_entry.entry_id]
if tibber_data.coordinator.data:
currency_code = tibber_data.coordinator.data.get("currency")
if user_input is not None:
# Update options with new values
self._options.update(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
return self.async_show_form(
step_id="display_settings",
data_schema=get_display_settings_schema(self.config_entry.options, currency_code),
)
async def async_step_current_interval_price_rating(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Configure price rating thresholds."""
errors: dict[str, str] = {}
if user_input is not None:
# Schema is now flattened - fields come directly in user_input
# But we still need to store them in nested structure for coordinator
# Validate low price rating threshold
if CONF_PRICE_RATING_THRESHOLD_LOW in user_input and not validate_price_rating_threshold_low(
user_input[CONF_PRICE_RATING_THRESHOLD_LOW]
):
errors[CONF_PRICE_RATING_THRESHOLD_LOW] = "invalid_price_rating_low"
# Validate high price rating threshold
if CONF_PRICE_RATING_THRESHOLD_HIGH in user_input and not validate_price_rating_threshold_high(
user_input[CONF_PRICE_RATING_THRESHOLD_HIGH]
):
errors[CONF_PRICE_RATING_THRESHOLD_HIGH] = "invalid_price_rating_high"
# Cross-validate both thresholds together (LOW must be < HIGH)
if not errors:
# Get current values directly from options (now flat)
low_val = user_input.get(
CONF_PRICE_RATING_THRESHOLD_LOW, self._options.get(CONF_PRICE_RATING_THRESHOLD_LOW, -10)
)
high_val = user_input.get(
CONF_PRICE_RATING_THRESHOLD_HIGH, self._options.get(CONF_PRICE_RATING_THRESHOLD_HIGH, 10)
)
if not validate_price_rating_thresholds(low_val, high_val):
# This should never happen given the range constraints, but add error for safety
errors["base"] = "invalid_price_rating_thresholds"
if not errors:
# Store flat data directly in options (no section wrapping)
self._options.update(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
return self.async_show_form(
step_id="current_interval_price_rating",
data_schema=get_price_rating_schema(self.config_entry.options),
errors=errors,
description_placeholders=self._get_entity_warning_placeholders("current_interval_price_rating"),
)
async def async_step_price_level(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure Tibber price level gap tolerance (smoothing for API 'level' field)."""
errors: dict[str, str] = {}
if user_input is not None:
# No validation needed - slider constraints ensure valid range
# Store flat data directly in options
self._options.update(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
return self.async_show_form(
step_id="price_level",
data_schema=get_price_level_schema(self.config_entry.options),
errors=errors,
description_placeholders=self._get_entity_warning_placeholders("price_level"),
)
async def async_step_best_price(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure best price period settings."""
errors: dict[str, str] = {}
if user_input is not None:
# Extract settings from sections
period_settings = user_input.get("period_settings", {})
flexibility_settings = user_input.get("flexibility_settings", {})
relaxation_settings = user_input.get("relaxation_and_target_periods", {})
# Validate period length
if CONF_BEST_PRICE_MIN_PERIOD_LENGTH in period_settings and not validate_period_length(
period_settings[CONF_BEST_PRICE_MIN_PERIOD_LENGTH]
):
errors[CONF_BEST_PRICE_MIN_PERIOD_LENGTH] = "invalid_period_length"
# Validate flex percentage
if CONF_BEST_PRICE_FLEX in flexibility_settings and not validate_flex_percentage(
flexibility_settings[CONF_BEST_PRICE_FLEX]
):
errors[CONF_BEST_PRICE_FLEX] = "invalid_flex"
# Validate distance from average (Best Price uses negative values)
if (
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG in flexibility_settings
and not validate_best_price_distance_percentage(
flexibility_settings[CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG]
)
):
errors[CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG] = "invalid_best_price_distance"
# Validate minimum periods count
if CONF_MIN_PERIODS_BEST in relaxation_settings and not validate_min_periods(
relaxation_settings[CONF_MIN_PERIODS_BEST]
):
errors[CONF_MIN_PERIODS_BEST] = "invalid_min_periods"
# Validate gap count
if CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT in period_settings and not validate_gap_count(
period_settings[CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT]
):
errors[CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT] = "invalid_gap_count"
# Validate relaxation attempts
if CONF_RELAXATION_ATTEMPTS_BEST in relaxation_settings and not validate_relaxation_attempts(
relaxation_settings[CONF_RELAXATION_ATTEMPTS_BEST]
):
errors[CONF_RELAXATION_ATTEMPTS_BEST] = "invalid_relaxation_attempts"
if not errors:
# Merge section data into options
self._merge_section_data(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
overrides = self._get_active_overrides()
placeholders = self._get_entity_warning_placeholders("best_price")
placeholders.update(self._get_override_warning_placeholder("best_price", overrides))
# Load translations for override warnings
override_translations = await self._get_override_translations()
return self.async_show_form(
step_id="best_price",
data_schema=get_best_price_schema(
self.config_entry.options,
overrides=overrides,
translations=override_translations,
),
errors=errors,
description_placeholders=placeholders,
)
async def async_step_peak_price(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure peak price period settings."""
errors: dict[str, str] = {}
if user_input is not None:
# Extract settings from sections
period_settings = user_input.get("period_settings", {})
flexibility_settings = user_input.get("flexibility_settings", {})
relaxation_settings = user_input.get("relaxation_and_target_periods", {})
# Validate period length
if CONF_PEAK_PRICE_MIN_PERIOD_LENGTH in period_settings and not validate_period_length(
period_settings[CONF_PEAK_PRICE_MIN_PERIOD_LENGTH]
):
errors[CONF_PEAK_PRICE_MIN_PERIOD_LENGTH] = "invalid_period_length"
# Validate flex percentage (peak uses negative values)
if CONF_PEAK_PRICE_FLEX in flexibility_settings and not validate_flex_percentage(
flexibility_settings[CONF_PEAK_PRICE_FLEX]
):
errors[CONF_PEAK_PRICE_FLEX] = "invalid_flex"
# Validate distance from average (Peak Price uses positive values)
if CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG in flexibility_settings and not validate_distance_percentage(
flexibility_settings[CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG]
):
errors[CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG] = "invalid_peak_price_distance"
# Validate minimum periods count
if CONF_MIN_PERIODS_PEAK in relaxation_settings and not validate_min_periods(
relaxation_settings[CONF_MIN_PERIODS_PEAK]
):
errors[CONF_MIN_PERIODS_PEAK] = "invalid_min_periods"
# Validate gap count
if CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT in period_settings and not validate_gap_count(
period_settings[CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT]
):
errors[CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT] = "invalid_gap_count"
# Validate relaxation attempts
if CONF_RELAXATION_ATTEMPTS_PEAK in relaxation_settings and not validate_relaxation_attempts(
relaxation_settings[CONF_RELAXATION_ATTEMPTS_PEAK]
):
errors[CONF_RELAXATION_ATTEMPTS_PEAK] = "invalid_relaxation_attempts"
if not errors:
# Merge section data into options
self._merge_section_data(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
overrides = self._get_active_overrides()
placeholders = self._get_entity_warning_placeholders("peak_price")
placeholders.update(self._get_override_warning_placeholder("peak_price", overrides))
# Load translations for override warnings
override_translations = await self._get_override_translations()
return self.async_show_form(
step_id="peak_price",
data_schema=get_peak_price_schema(
self.config_entry.options,
overrides=overrides,
translations=override_translations,
),
errors=errors,
description_placeholders=placeholders,
)
async def async_step_price_trend(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure price trend thresholds."""
errors: dict[str, str] = {}
if user_input is not None:
# Schema is now flattened - fields come directly in user_input
# Store them flat in options (no nested structure)
# Validate rising trend threshold
if CONF_PRICE_TREND_THRESHOLD_RISING in user_input and not validate_price_trend_rising(
user_input[CONF_PRICE_TREND_THRESHOLD_RISING]
):
errors[CONF_PRICE_TREND_THRESHOLD_RISING] = "invalid_price_trend_rising"
# Validate falling trend threshold
if CONF_PRICE_TREND_THRESHOLD_FALLING in user_input and not validate_price_trend_falling(
user_input[CONF_PRICE_TREND_THRESHOLD_FALLING]
):
errors[CONF_PRICE_TREND_THRESHOLD_FALLING] = "invalid_price_trend_falling"
# Validate strongly rising trend threshold
if CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING in user_input and not validate_price_trend_strongly_rising(
user_input[CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING]
):
errors[CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING] = "invalid_price_trend_strongly_rising"
# Validate strongly falling trend threshold
if CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING in user_input and not validate_price_trend_strongly_falling(
user_input[CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING]
):
errors[CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING] = "invalid_price_trend_strongly_falling"
# Cross-validation: Ensure rising < strongly_rising and falling > strongly_falling
if not errors:
rising = user_input.get(CONF_PRICE_TREND_THRESHOLD_RISING)
strongly_rising = user_input.get(CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING)
falling = user_input.get(CONF_PRICE_TREND_THRESHOLD_FALLING)
strongly_falling = user_input.get(CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING)
if rising is not None and strongly_rising is not None and rising >= strongly_rising:
errors[CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING] = (
"invalid_trend_strongly_rising_less_than_rising"
)
if falling is not None and strongly_falling is not None and falling <= strongly_falling:
errors[CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING] = (
"invalid_trend_strongly_falling_greater_than_falling"
)
if not errors:
# Store flat data directly in options (no section wrapping)
self._options.update(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
return self.async_show_form(
step_id="price_trend",
data_schema=get_price_trend_schema(self.config_entry.options),
errors=errors,
description_placeholders=self._get_entity_warning_placeholders("price_trend"),
)
async def async_step_chart_data_export(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Info page for chart data export sensor."""
if user_input is not None:
# No changes to save - just return to menu
return await self.async_step_init()
# Check if the chart data export sensor is enabled
is_enabled = check_chart_data_export_enabled(self.hass, self.config_entry)
# Show info-only form with status-dependent description
return self.async_show_form(
step_id="chart_data_export",
data_schema=get_chart_data_export_schema(self.config_entry.options),
description_placeholders={
"sensor_status_info": self._get_chart_export_status_info(is_enabled=is_enabled),
},
)
def _get_chart_export_status_info(self, *, is_enabled: bool) -> str:
"""Get the status info block for chart data export sensor."""
if is_enabled:
return (
"✅ **Status: Sensor is enabled**\n\n"
"The Chart Data Export sensor is currently active and providing data as attributes.\n\n"
"**Configuration (optional):**\n\n"
"Default settings work out-of-the-box (today+tomorrow, 15-minute intervals, prices only).\n\n"
"For customization, add to **`configuration.yaml`**:\n\n"
"```yaml\n"
"tibber_prices:\n"
" chart_export:\n"
" day:\n"
" - today\n"
" - tomorrow\n"
" include_level: true\n"
" include_rating_level: true\n"
"```\n\n"
"**All parameters:** See `tibber_prices.get_chartdata` service documentation"
)
return (
"❌ **Status: Sensor is disabled**\n\n"
"**Enable the sensor:**\n\n"
"1. Open **Settings → Devices & Services → Tibber Prices**\n"
"2. Select your home → Find **'Chart Data Export'** (Diagnostic section)\n"
"3. **Enable the sensor** (disabled by default)"
)
async def async_step_volatility(self, user_input: dict[str, Any] | None = None) -> ConfigFlowResult:
"""Configure volatility thresholds and period filtering."""
errors: dict[str, str] = {}
if user_input is not None:
# Schema is now flattened - fields come directly in user_input
# Validate moderate volatility threshold
if CONF_VOLATILITY_THRESHOLD_MODERATE in user_input and not validate_volatility_threshold_moderate(
user_input[CONF_VOLATILITY_THRESHOLD_MODERATE]
):
errors[CONF_VOLATILITY_THRESHOLD_MODERATE] = "invalid_volatility_threshold_moderate"
# Validate high volatility threshold
if CONF_VOLATILITY_THRESHOLD_HIGH in user_input and not validate_volatility_threshold_high(
user_input[CONF_VOLATILITY_THRESHOLD_HIGH]
):
errors[CONF_VOLATILITY_THRESHOLD_HIGH] = "invalid_volatility_threshold_high"
# Validate very high volatility threshold
if CONF_VOLATILITY_THRESHOLD_VERY_HIGH in user_input and not validate_volatility_threshold_very_high(
user_input[CONF_VOLATILITY_THRESHOLD_VERY_HIGH]
):
errors[CONF_VOLATILITY_THRESHOLD_VERY_HIGH] = "invalid_volatility_threshold_very_high"
# Cross-validation: Ensure MODERATE < HIGH < VERY_HIGH
if not errors:
# Get current values directly from options (now flat)
moderate = user_input.get(
CONF_VOLATILITY_THRESHOLD_MODERATE,
self._options.get(CONF_VOLATILITY_THRESHOLD_MODERATE, DEFAULT_VOLATILITY_THRESHOLD_MODERATE),
)
high = user_input.get(
CONF_VOLATILITY_THRESHOLD_HIGH,
self._options.get(CONF_VOLATILITY_THRESHOLD_HIGH, DEFAULT_VOLATILITY_THRESHOLD_HIGH),
)
very_high = user_input.get(
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
self._options.get(CONF_VOLATILITY_THRESHOLD_VERY_HIGH, DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH),
)
if not validate_volatility_thresholds(moderate, high, very_high):
errors["base"] = "invalid_volatility_thresholds"
if not errors:
# Store flat data directly in options (no section wrapping)
self._options.update(user_input)
# async_create_entry automatically handles change detection and listener triggering
self._save_options_if_changed()
# Return to menu for more changes
return await self.async_step_init()
return self.async_show_form(
step_id="volatility",
data_schema=get_volatility_schema(self.config_entry.options),
errors=errors,
description_placeholders=self._get_entity_warning_placeholders("volatility"),
)

View file

@ -1,998 +0,0 @@
"""Schema definitions for tibber_prices config flow."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from collections.abc import Mapping
import voluptuous as vol
from custom_components.tibber_prices.const import (
BEST_PRICE_MAX_LEVEL_OPTIONS,
CONF_AVERAGE_SENSOR_DISPLAY,
CONF_BEST_PRICE_FLEX,
CONF_BEST_PRICE_MAX_LEVEL,
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
CONF_CURRENCY_DISPLAY_MODE,
CONF_ENABLE_MIN_PERIODS_BEST,
CONF_ENABLE_MIN_PERIODS_PEAK,
CONF_EXTENDED_DESCRIPTIONS,
CONF_MIN_PERIODS_BEST,
CONF_MIN_PERIODS_PEAK,
CONF_PEAK_PRICE_FLEX,
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_PEAK_PRICE_MIN_LEVEL,
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
CONF_PRICE_LEVEL_GAP_TOLERANCE,
CONF_PRICE_RATING_GAP_TOLERANCE,
CONF_PRICE_RATING_HYSTERESIS,
CONF_PRICE_RATING_THRESHOLD_HIGH,
CONF_PRICE_RATING_THRESHOLD_LOW,
CONF_PRICE_TREND_THRESHOLD_FALLING,
CONF_PRICE_TREND_THRESHOLD_RISING,
CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING,
CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING,
CONF_RELAXATION_ATTEMPTS_BEST,
CONF_RELAXATION_ATTEMPTS_PEAK,
CONF_VIRTUAL_TIME_OFFSET_DAYS,
CONF_VIRTUAL_TIME_OFFSET_HOURS,
CONF_VIRTUAL_TIME_OFFSET_MINUTES,
CONF_VOLATILITY_THRESHOLD_HIGH,
CONF_VOLATILITY_THRESHOLD_MODERATE,
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
DEFAULT_AVERAGE_SENSOR_DISPLAY,
DEFAULT_BEST_PRICE_FLEX,
DEFAULT_BEST_PRICE_MAX_LEVEL,
DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_ENABLE_MIN_PERIODS_BEST,
DEFAULT_ENABLE_MIN_PERIODS_PEAK,
DEFAULT_EXTENDED_DESCRIPTIONS,
DEFAULT_MIN_PERIODS_BEST,
DEFAULT_MIN_PERIODS_PEAK,
DEFAULT_PEAK_PRICE_FLEX,
DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_PEAK_PRICE_MIN_LEVEL,
DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_PRICE_LEVEL_GAP_TOLERANCE,
DEFAULT_PRICE_RATING_GAP_TOLERANCE,
DEFAULT_PRICE_RATING_HYSTERESIS,
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_PRICE_TREND_THRESHOLD_FALLING,
DEFAULT_PRICE_TREND_THRESHOLD_RISING,
DEFAULT_PRICE_TREND_THRESHOLD_STRONGLY_FALLING,
DEFAULT_PRICE_TREND_THRESHOLD_STRONGLY_RISING,
DEFAULT_RELAXATION_ATTEMPTS_BEST,
DEFAULT_RELAXATION_ATTEMPTS_PEAK,
DEFAULT_VIRTUAL_TIME_OFFSET_DAYS,
DEFAULT_VIRTUAL_TIME_OFFSET_HOURS,
DEFAULT_VIRTUAL_TIME_OFFSET_MINUTES,
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
DISPLAY_MODE_BASE,
DISPLAY_MODE_SUBUNIT,
MAX_GAP_COUNT,
MAX_MIN_PERIOD_LENGTH,
MAX_MIN_PERIODS,
MAX_PRICE_LEVEL_GAP_TOLERANCE,
MAX_PRICE_RATING_GAP_TOLERANCE,
MAX_PRICE_RATING_HYSTERESIS,
MAX_PRICE_RATING_THRESHOLD_HIGH,
MAX_PRICE_RATING_THRESHOLD_LOW,
MAX_PRICE_TREND_FALLING,
MAX_PRICE_TREND_RISING,
MAX_PRICE_TREND_STRONGLY_FALLING,
MAX_PRICE_TREND_STRONGLY_RISING,
MAX_RELAXATION_ATTEMPTS,
MAX_VOLATILITY_THRESHOLD_HIGH,
MAX_VOLATILITY_THRESHOLD_MODERATE,
MAX_VOLATILITY_THRESHOLD_VERY_HIGH,
MIN_GAP_COUNT,
MIN_PERIOD_LENGTH,
MIN_PRICE_LEVEL_GAP_TOLERANCE,
MIN_PRICE_RATING_GAP_TOLERANCE,
MIN_PRICE_RATING_HYSTERESIS,
MIN_PRICE_RATING_THRESHOLD_HIGH,
MIN_PRICE_RATING_THRESHOLD_LOW,
MIN_PRICE_TREND_FALLING,
MIN_PRICE_TREND_RISING,
MIN_PRICE_TREND_STRONGLY_FALLING,
MIN_PRICE_TREND_STRONGLY_RISING,
MIN_RELAXATION_ATTEMPTS,
MIN_VOLATILITY_THRESHOLD_HIGH,
MIN_VOLATILITY_THRESHOLD_MODERATE,
MIN_VOLATILITY_THRESHOLD_VERY_HIGH,
PEAK_PRICE_MIN_LEVEL_OPTIONS,
get_default_currency_display,
)
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.data_entry_flow import section
from homeassistant.helpers import selector
from homeassistant.helpers.selector import (
BooleanSelector,
ConstantSelector,
ConstantSelectorConfig,
NumberSelector,
NumberSelectorConfig,
NumberSelectorMode,
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
# Type alias for config override structure: {section: {config_key: value}}
ConfigOverrides = dict[str, dict[str, Any]]
def is_field_overridden(
config_key: str,
config_section: str, # noqa: ARG001 - kept for API compatibility
overrides: ConfigOverrides | None,
) -> bool:
"""
Check if a config field has an active runtime override.
Args:
config_key: The configuration key to check (e.g., "best_price_flex")
config_section: Unused, kept for API compatibility
overrides: Dictionary of active overrides (with "_enabled" key)
Returns:
True if this field is being overridden by a config entity, False otherwise
"""
if overrides is None:
return False
# Check if key is in the _enabled section (from entity registry check)
return config_key in overrides.get("_enabled", {})
# Override translations structure from common section
# This will be loaded at runtime and passed to schema functions
OverrideTranslations = dict[str, Any] # Type alias
# Fallback labels when translations not available
# Used only as fallback - translations should be loaded from common.override_field_labels
DEFAULT_FIELD_LABELS: dict[str, str] = {
# Best Price
CONF_BEST_PRICE_MIN_PERIOD_LENGTH: "Minimum Period Length",
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT: "Gap Tolerance",
CONF_BEST_PRICE_FLEX: "Flexibility",
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG: "Minimum Distance",
CONF_ENABLE_MIN_PERIODS_BEST: "Achieve Minimum Count",
CONF_MIN_PERIODS_BEST: "Minimum Periods",
CONF_RELAXATION_ATTEMPTS_BEST: "Relaxation Attempts",
# Peak Price
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH: "Minimum Period Length",
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT: "Gap Tolerance",
CONF_PEAK_PRICE_FLEX: "Flexibility",
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG: "Minimum Distance",
CONF_ENABLE_MIN_PERIODS_PEAK: "Achieve Minimum Count",
CONF_MIN_PERIODS_PEAK: "Minimum Periods",
CONF_RELAXATION_ATTEMPTS_PEAK: "Relaxation Attempts",
}
# Section to config keys mapping for override detection
SECTION_CONFIG_KEYS: dict[str, dict[str, list[str]]] = {
"best_price": {
"period_settings": [
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
],
"flexibility_settings": [
CONF_BEST_PRICE_FLEX,
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
],
"relaxation_and_target_periods": [
CONF_ENABLE_MIN_PERIODS_BEST,
CONF_MIN_PERIODS_BEST,
CONF_RELAXATION_ATTEMPTS_BEST,
],
},
"peak_price": {
"period_settings": [
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
],
"flexibility_settings": [
CONF_PEAK_PRICE_FLEX,
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
],
"relaxation_and_target_periods": [
CONF_ENABLE_MIN_PERIODS_PEAK,
CONF_MIN_PERIODS_PEAK,
CONF_RELAXATION_ATTEMPTS_PEAK,
],
},
}
def get_section_override_warning(
step_id: str,
section_id: str,
overrides: ConfigOverrides | None,
translations: OverrideTranslations | None = None,
) -> dict[vol.Optional, ConstantSelector] | None:
"""
Return a warning constant selector if any fields in the section are overridden.
Args:
step_id: The step ID (best_price or peak_price)
section_id: The section ID within the step
overrides: Active runtime overrides from coordinator
translations: Override translations from common section (optional)
Returns:
Dict with override warning selector if any fields overridden, None otherwise
"""
if not overrides:
return None
section_keys = SECTION_CONFIG_KEYS.get(step_id, {}).get(section_id, [])
overridden_fields = []
for config_key in section_keys:
if is_field_overridden(config_key, section_id, overrides):
# Try to get translated label from flat keys, fallback to DEFAULT_FIELD_LABELS
translation_key = f"override_field_label_{config_key}"
label = (translations.get(translation_key) if translations else None) or DEFAULT_FIELD_LABELS.get(
config_key, config_key
)
overridden_fields.append(label)
if not overridden_fields:
return None
# Get translated "and" connector or use fallback
and_connector = " and "
if translations and "override_warning_and" in translations:
and_connector = f" {translations['override_warning_and']} "
# Build warning message with list of overridden fields
if len(overridden_fields) == 1:
fields_text = overridden_fields[0]
else:
fields_text = ", ".join(overridden_fields[:-1]) + and_connector + overridden_fields[-1]
# Get translated warning template or use fallback
warning_template = "⚠️ {fields} controlled by config entity"
if translations and "override_warning_template" in translations:
warning_template = translations["override_warning_template"]
return {
vol.Optional("_override_warning"): ConstantSelector(
ConstantSelectorConfig(
value=True,
label=warning_template.format(fields=fields_text),
)
),
}
def get_user_schema(access_token: str | None = None) -> vol.Schema:
"""Return schema for user step (API token input)."""
return vol.Schema(
{
vol.Required(
CONF_ACCESS_TOKEN,
default=access_token if access_token is not None else vol.UNDEFINED,
): TextSelector(
TextSelectorConfig(
type=TextSelectorType.TEXT,
),
),
}
)
def get_reauth_confirm_schema() -> vol.Schema:
"""Return schema for reauth confirmation step."""
return vol.Schema(
{
vol.Required(CONF_ACCESS_TOKEN): TextSelector(
TextSelectorConfig(type=TextSelectorType.TEXT),
),
}
)
def get_select_home_schema(home_options: list[SelectOptionDict]) -> vol.Schema:
"""Return schema for home selection step."""
return vol.Schema(
{
vol.Required("home_id"): SelectSelector(
SelectSelectorConfig(
options=home_options,
mode=SelectSelectorMode.DROPDOWN,
)
)
}
)
def get_subentry_init_schema(
*,
extended_descriptions: bool = DEFAULT_EXTENDED_DESCRIPTIONS,
offset_days: int = DEFAULT_VIRTUAL_TIME_OFFSET_DAYS,
offset_hours: int = DEFAULT_VIRTUAL_TIME_OFFSET_HOURS,
offset_minutes: int = DEFAULT_VIRTUAL_TIME_OFFSET_MINUTES,
) -> vol.Schema:
"""Return schema for subentry init step (includes time-travel settings)."""
return vol.Schema(
{
vol.Optional(
CONF_EXTENDED_DESCRIPTIONS,
default=extended_descriptions,
): BooleanSelector(),
vol.Optional(
CONF_VIRTUAL_TIME_OFFSET_DAYS,
default=offset_days,
): NumberSelector(
NumberSelectorConfig(
mode=NumberSelectorMode.BOX,
min=-365, # Max 1 year back
max=0, # Only past days allowed
step=1,
)
),
vol.Optional(
CONF_VIRTUAL_TIME_OFFSET_HOURS,
default=offset_hours,
): NumberSelector(
NumberSelectorConfig(
mode=NumberSelectorMode.BOX,
min=-23,
max=23,
step=1,
)
),
vol.Optional(
CONF_VIRTUAL_TIME_OFFSET_MINUTES,
default=offset_minutes,
): NumberSelector(
NumberSelectorConfig(
mode=NumberSelectorMode.BOX,
min=-59,
max=59,
step=1,
)
),
}
)
def get_options_init_schema(options: Mapping[str, Any]) -> vol.Schema:
"""Return schema for options init step (general settings)."""
return vol.Schema(
{
vol.Optional(
CONF_EXTENDED_DESCRIPTIONS,
default=options.get(CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS),
): BooleanSelector(),
vol.Optional(
CONF_AVERAGE_SENSOR_DISPLAY,
default=str(
options.get(
CONF_AVERAGE_SENSOR_DISPLAY,
DEFAULT_AVERAGE_SENSOR_DISPLAY,
)
),
): SelectSelector(
SelectSelectorConfig(
options=["median", "mean"],
mode=SelectSelectorMode.DROPDOWN,
translation_key="average_sensor_display",
),
),
}
)
def get_display_settings_schema(options: Mapping[str, Any], currency_code: str | None) -> vol.Schema:
"""Return schema for display settings configuration."""
default_display_mode = get_default_currency_display(currency_code)
return vol.Schema(
{
vol.Optional(
CONF_CURRENCY_DISPLAY_MODE,
default=str(
options.get(
CONF_CURRENCY_DISPLAY_MODE,
default_display_mode,
)
),
): SelectSelector(
SelectSelectorConfig(
options=[DISPLAY_MODE_BASE, DISPLAY_MODE_SUBUNIT],
mode=SelectSelectorMode.DROPDOWN,
translation_key="currency_display_mode",
),
),
}
)
def get_price_rating_schema(options: Mapping[str, Any]) -> vol.Schema:
"""Return schema for price rating configuration (thresholds and stabilization)."""
return vol.Schema(
{
vol.Optional(
CONF_PRICE_RATING_THRESHOLD_LOW,
default=int(
options.get(
CONF_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_RATING_THRESHOLD_LOW,
max=MAX_PRICE_RATING_THRESHOLD_LOW,
unit_of_measurement="%",
step=1,
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_RATING_THRESHOLD_HIGH,
default=int(
options.get(
CONF_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_RATING_THRESHOLD_HIGH,
max=MAX_PRICE_RATING_THRESHOLD_HIGH,
unit_of_measurement="%",
step=1,
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_RATING_HYSTERESIS,
default=float(
options.get(
CONF_PRICE_RATING_HYSTERESIS,
DEFAULT_PRICE_RATING_HYSTERESIS,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_RATING_HYSTERESIS,
max=MAX_PRICE_RATING_HYSTERESIS,
unit_of_measurement="%",
step=0.5,
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_RATING_GAP_TOLERANCE,
default=int(
options.get(
CONF_PRICE_RATING_GAP_TOLERANCE,
DEFAULT_PRICE_RATING_GAP_TOLERANCE,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_RATING_GAP_TOLERANCE,
max=MAX_PRICE_RATING_GAP_TOLERANCE,
step=1,
mode=NumberSelectorMode.SLIDER,
),
),
}
)
def get_price_level_schema(options: Mapping[str, Any]) -> vol.Schema:
"""Return schema for Tibber price level stabilization (gap tolerance for API level field)."""
return vol.Schema(
{
vol.Optional(
CONF_PRICE_LEVEL_GAP_TOLERANCE,
default=int(
options.get(
CONF_PRICE_LEVEL_GAP_TOLERANCE,
DEFAULT_PRICE_LEVEL_GAP_TOLERANCE,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_LEVEL_GAP_TOLERANCE,
max=MAX_PRICE_LEVEL_GAP_TOLERANCE,
step=1,
mode=NumberSelectorMode.SLIDER,
),
),
}
)
def get_volatility_schema(options: Mapping[str, Any]) -> vol.Schema:
"""Return schema for volatility thresholds configuration."""
return vol.Schema(
{
vol.Optional(
CONF_VOLATILITY_THRESHOLD_MODERATE,
default=float(
options.get(
CONF_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_VOLATILITY_THRESHOLD_MODERATE,
max=MAX_VOLATILITY_THRESHOLD_MODERATE,
step=1.0,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_VOLATILITY_THRESHOLD_HIGH,
default=float(
options.get(
CONF_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_VOLATILITY_THRESHOLD_HIGH,
max=MAX_VOLATILITY_THRESHOLD_HIGH,
step=1.0,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
default=float(
options.get(
CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_VOLATILITY_THRESHOLD_VERY_HIGH,
max=MAX_VOLATILITY_THRESHOLD_VERY_HIGH,
step=1.0,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
}
)
def get_best_price_schema(
options: Mapping[str, Any],
overrides: ConfigOverrides | None = None,
translations: OverrideTranslations | None = None,
) -> vol.Schema:
"""
Return schema for best price period configuration with collapsible sections.
Args:
options: Current options from config entry
overrides: Active runtime overrides from coordinator. Fields with active
overrides will be replaced with a constant placeholder.
translations: Override translations from common section (optional)
Returns:
Voluptuous schema for the best price configuration form
"""
period_settings = options.get("period_settings", {})
flexibility_settings = options.get("flexibility_settings", {})
relaxation_settings = options.get("relaxation_and_target_periods", {})
# Get current values for override display
min_period_length = int(
period_settings.get(CONF_BEST_PRICE_MIN_PERIOD_LENGTH, DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH)
)
max_level_gap_count = int(
period_settings.get(CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT, DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT)
)
best_price_flex = int(flexibility_settings.get(CONF_BEST_PRICE_FLEX, DEFAULT_BEST_PRICE_FLEX))
min_distance = int(
flexibility_settings.get(CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG)
)
enable_min_periods = relaxation_settings.get(CONF_ENABLE_MIN_PERIODS_BEST, DEFAULT_ENABLE_MIN_PERIODS_BEST)
min_periods = int(relaxation_settings.get(CONF_MIN_PERIODS_BEST, DEFAULT_MIN_PERIODS_BEST))
relaxation_attempts = int(relaxation_settings.get(CONF_RELAXATION_ATTEMPTS_BEST, DEFAULT_RELAXATION_ATTEMPTS_BEST))
# Build section schemas with optional override warnings
period_warning = get_section_override_warning("best_price", "period_settings", overrides, translations) or {}
period_fields: dict[vol.Optional | vol.Required, Any] = {
**period_warning, # type: ignore[misc]
vol.Optional(
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
default=min_period_length,
): NumberSelector(
NumberSelectorConfig(
min=MIN_PERIOD_LENGTH,
max=MAX_MIN_PERIOD_LENGTH,
step=15,
unit_of_measurement="min",
mode=NumberSelectorMode.SLIDER,
)
),
vol.Optional(
CONF_BEST_PRICE_MAX_LEVEL,
default=period_settings.get(
CONF_BEST_PRICE_MAX_LEVEL,
DEFAULT_BEST_PRICE_MAX_LEVEL,
),
): SelectSelector(
SelectSelectorConfig(
options=BEST_PRICE_MAX_LEVEL_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key="current_interval_price_level",
),
),
vol.Optional(
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
default=max_level_gap_count,
): NumberSelector(
NumberSelectorConfig(
min=MIN_GAP_COUNT,
max=MAX_GAP_COUNT,
step=1,
mode=NumberSelectorMode.SLIDER,
)
),
}
flexibility_warning = (
get_section_override_warning("best_price", "flexibility_settings", overrides, translations) or {}
)
flexibility_fields: dict[vol.Optional | vol.Required, Any] = {
**flexibility_warning, # type: ignore[misc]
vol.Optional(
CONF_BEST_PRICE_FLEX,
default=best_price_flex,
): NumberSelector(
NumberSelectorConfig(
min=0,
max=50,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
)
),
vol.Optional(
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
default=min_distance,
): NumberSelector(
NumberSelectorConfig(
min=-50,
max=0,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
)
),
}
relaxation_warning = (
get_section_override_warning("best_price", "relaxation_and_target_periods", overrides, translations) or {}
)
relaxation_fields: dict[vol.Optional | vol.Required, Any] = {
**relaxation_warning, # type: ignore[misc]
vol.Optional(
CONF_ENABLE_MIN_PERIODS_BEST,
default=enable_min_periods,
): BooleanSelector(selector.BooleanSelectorConfig()),
vol.Optional(
CONF_MIN_PERIODS_BEST,
default=min_periods,
): NumberSelector(
NumberSelectorConfig(
min=1,
max=MAX_MIN_PERIODS,
step=1,
mode=NumberSelectorMode.SLIDER,
)
),
vol.Optional(
CONF_RELAXATION_ATTEMPTS_BEST,
default=relaxation_attempts,
): NumberSelector(
NumberSelectorConfig(
min=MIN_RELAXATION_ATTEMPTS,
max=MAX_RELAXATION_ATTEMPTS,
step=1,
mode=NumberSelectorMode.SLIDER,
)
),
}
return vol.Schema(
{
vol.Required("period_settings"): section(
vol.Schema(period_fields),
{"collapsed": False},
),
vol.Required("flexibility_settings"): section(
vol.Schema(flexibility_fields),
{"collapsed": True},
),
vol.Required("relaxation_and_target_periods"): section(
vol.Schema(relaxation_fields),
{"collapsed": True},
),
}
)
def get_peak_price_schema(
options: Mapping[str, Any],
overrides: ConfigOverrides | None = None,
translations: OverrideTranslations | None = None,
) -> vol.Schema:
"""
Return schema for peak price period configuration with collapsible sections.
Args:
options: Current options from config entry
overrides: Active runtime overrides from coordinator. Fields with active
overrides will be replaced with a constant placeholder.
translations: Override translations from common section (optional)
Returns:
Voluptuous schema for the peak price configuration form
"""
period_settings = options.get("period_settings", {})
flexibility_settings = options.get("flexibility_settings", {})
relaxation_settings = options.get("relaxation_and_target_periods", {})
# Get current values for override display
min_period_length = int(
period_settings.get(CONF_PEAK_PRICE_MIN_PERIOD_LENGTH, DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH)
)
max_level_gap_count = int(
period_settings.get(CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT, DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT)
)
peak_price_flex = int(flexibility_settings.get(CONF_PEAK_PRICE_FLEX, DEFAULT_PEAK_PRICE_FLEX))
min_distance = int(
flexibility_settings.get(CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG)
)
enable_min_periods = relaxation_settings.get(CONF_ENABLE_MIN_PERIODS_PEAK, DEFAULT_ENABLE_MIN_PERIODS_PEAK)
min_periods = int(relaxation_settings.get(CONF_MIN_PERIODS_PEAK, DEFAULT_MIN_PERIODS_PEAK))
relaxation_attempts = int(relaxation_settings.get(CONF_RELAXATION_ATTEMPTS_PEAK, DEFAULT_RELAXATION_ATTEMPTS_PEAK))
# Build section schemas with optional override warnings
period_warning = get_section_override_warning("peak_price", "period_settings", overrides, translations) or {}
period_fields: dict[vol.Optional | vol.Required, Any] = {
**period_warning, # type: ignore[misc]
vol.Optional(
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
default=min_period_length,
): NumberSelector(
NumberSelectorConfig(
min=MIN_PERIOD_LENGTH,
max=MAX_MIN_PERIOD_LENGTH,
step=15,
unit_of_measurement="min",
mode=NumberSelectorMode.SLIDER,
)
),
vol.Optional(
CONF_PEAK_PRICE_MIN_LEVEL,
default=period_settings.get(
CONF_PEAK_PRICE_MIN_LEVEL,
DEFAULT_PEAK_PRICE_MIN_LEVEL,
),
): SelectSelector(
SelectSelectorConfig(
options=PEAK_PRICE_MIN_LEVEL_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key="current_interval_price_level",
),
),
vol.Optional(
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
default=max_level_gap_count,
): NumberSelector(
NumberSelectorConfig(
min=MIN_GAP_COUNT,
max=MAX_GAP_COUNT,
step=1,
mode=NumberSelectorMode.SLIDER,
)
),
}
flexibility_warning = (
get_section_override_warning("peak_price", "flexibility_settings", overrides, translations) or {}
)
flexibility_fields: dict[vol.Optional | vol.Required, Any] = {
**flexibility_warning, # type: ignore[misc]
vol.Optional(
CONF_PEAK_PRICE_FLEX,
default=peak_price_flex,
): NumberSelector(
NumberSelectorConfig(
min=-50,
max=0,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
)
),
vol.Optional(
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
default=min_distance,
): NumberSelector(
NumberSelectorConfig(
min=0,
max=50,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
)
),
}
relaxation_warning = (
get_section_override_warning("peak_price", "relaxation_and_target_periods", overrides, translations) or {}
)
relaxation_fields: dict[vol.Optional | vol.Required, Any] = {
**relaxation_warning, # type: ignore[misc]
vol.Optional(
CONF_ENABLE_MIN_PERIODS_PEAK,
default=enable_min_periods,
): BooleanSelector(selector.BooleanSelectorConfig()),
vol.Optional(
CONF_MIN_PERIODS_PEAK,
default=min_periods,
): NumberSelector(
NumberSelectorConfig(
min=1,
max=MAX_MIN_PERIODS,
step=1,
mode=NumberSelectorMode.SLIDER,
)
),
vol.Optional(
CONF_RELAXATION_ATTEMPTS_PEAK,
default=relaxation_attempts,
): NumberSelector(
NumberSelectorConfig(
min=MIN_RELAXATION_ATTEMPTS,
max=MAX_RELAXATION_ATTEMPTS,
step=1,
mode=NumberSelectorMode.SLIDER,
)
),
}
return vol.Schema(
{
vol.Required("period_settings"): section(
vol.Schema(period_fields),
{"collapsed": False},
),
vol.Required("flexibility_settings"): section(
vol.Schema(flexibility_fields),
{"collapsed": True},
),
vol.Required("relaxation_and_target_periods"): section(
vol.Schema(relaxation_fields),
{"collapsed": True},
),
}
)
def get_price_trend_schema(options: Mapping[str, Any]) -> vol.Schema:
"""Return schema for price trend thresholds configuration."""
return vol.Schema(
{
vol.Optional(
CONF_PRICE_TREND_THRESHOLD_RISING,
default=int(
options.get(
CONF_PRICE_TREND_THRESHOLD_RISING,
DEFAULT_PRICE_TREND_THRESHOLD_RISING,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_TREND_RISING,
max=MAX_PRICE_TREND_RISING,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING,
default=int(
options.get(
CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING,
DEFAULT_PRICE_TREND_THRESHOLD_STRONGLY_RISING,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_TREND_STRONGLY_RISING,
max=MAX_PRICE_TREND_STRONGLY_RISING,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_TREND_THRESHOLD_FALLING,
default=int(
options.get(
CONF_PRICE_TREND_THRESHOLD_FALLING,
DEFAULT_PRICE_TREND_THRESHOLD_FALLING,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_TREND_FALLING,
max=MAX_PRICE_TREND_FALLING,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
vol.Optional(
CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING,
default=int(
options.get(
CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING,
DEFAULT_PRICE_TREND_THRESHOLD_STRONGLY_FALLING,
)
),
): NumberSelector(
NumberSelectorConfig(
min=MIN_PRICE_TREND_STRONGLY_FALLING,
max=MAX_PRICE_TREND_STRONGLY_FALLING,
step=1,
unit_of_measurement="%",
mode=NumberSelectorMode.SLIDER,
),
),
}
)
def get_chart_data_export_schema(_options: Mapping[str, Any]) -> vol.Schema:
"""Return schema for chart data export info page (no input fields)."""
# Empty schema - this is just an info page now
return vol.Schema({})
def get_reset_to_defaults_schema() -> vol.Schema:
"""Return schema for reset to defaults confirmation step."""
return vol.Schema(
{
vol.Required("confirm_reset", default=False): selector.BooleanSelector(),
}
)

View file

@ -1,309 +0,0 @@
"""Subentry config flow for creating time-travel views."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from custom_components.tibber_prices.const import (
CONF_VIRTUAL_TIME_OFFSET_DAYS,
CONF_VIRTUAL_TIME_OFFSET_HOURS,
CONF_VIRTUAL_TIME_OFFSET_MINUTES,
DOMAIN,
)
from homeassistant.config_entries import ConfigSubentryFlow, SubentryFlowResult
from homeassistant.helpers.selector import (
DurationSelector,
DurationSelectorConfig,
NumberSelector,
NumberSelectorConfig,
NumberSelectorMode,
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
class TibberPricesSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flows for tibber_prices (time-travel views)."""
def __init__(self) -> None:
"""Initialize the subentry flow handler."""
super().__init__()
self._selected_parent_entry_id: str | None = None
async def async_step_user(self, user_input: dict[str, Any] | None = None) -> SubentryFlowResult:
"""Step 1: Select which config entry should get a time-travel subentry."""
errors: dict[str, str] = {}
if user_input is not None:
self._selected_parent_entry_id = user_input["parent_entry_id"]
return await self.async_step_time_offset()
# Get all main config entries (not subentries)
# Subentries have "_hist_" in their unique_id
main_entries = [
entry
for entry in self.hass.config_entries.async_entries(DOMAIN)
if entry.unique_id and "_hist_" not in entry.unique_id
]
if not main_entries:
return self.async_abort(reason="no_main_entries")
# Build options for entry selection
entry_options = [
SelectOptionDict(
value=entry.entry_id,
label=f"{entry.title} ({entry.data.get('user_login', 'N/A')})",
)
for entry in main_entries
]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required("parent_entry_id"): SelectSelector(
SelectSelectorConfig(
options=entry_options,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
),
description_placeholders={},
errors=errors,
)
async def async_step_time_offset(self, user_input: dict[str, Any] | None = None) -> SubentryFlowResult:
"""Step 2: Configure time offset for the time-travel view."""
errors: dict[str, str] = {}
if user_input is not None:
# Extract values (convert days to int to avoid float from slider)
offset_days = int(user_input.get(CONF_VIRTUAL_TIME_OFFSET_DAYS, 0))
# DurationSelector returns dict with 'hours', 'minutes', and 'seconds' keys
# We normalize to minute precision (ignore seconds)
time_offset = user_input.get("time_offset", {})
offset_hours = -abs(int(time_offset.get("hours", 0))) # Always negative for historical data
offset_minutes = -abs(int(time_offset.get("minutes", 0))) # Always negative for historical data
# Note: Seconds are ignored - we only support minute-level precision
# Validate that at least one offset is negative (historical data only)
if offset_days >= 0 and offset_hours >= 0 and offset_minutes >= 0:
errors["base"] = "no_time_offset"
if not errors:
# Get parent entry
if not self._selected_parent_entry_id:
return self.async_abort(reason="parent_entry_not_found")
parent_entry = self.hass.config_entries.async_get_entry(self._selected_parent_entry_id)
if not parent_entry:
return self.async_abort(reason="parent_entry_not_found")
# Get home data from parent entry
home_id = parent_entry.data.get("home_id")
home_data = parent_entry.data.get("home_data", {})
user_login = parent_entry.data.get("user_login", "N/A")
# Build unique_id with time offset signature
offset_str = f"d{offset_days}h{offset_hours}m{offset_minutes}"
user_id = parent_entry.unique_id.split("_")[0] if parent_entry.unique_id else home_id
unique_id = f"{user_id}_{home_id}_hist_{offset_str}"
# Check if this exact time offset already exists
for entry in self.hass.config_entries.async_entries(DOMAIN):
if entry.unique_id == unique_id:
return self.async_abort(reason="already_configured")
# No duplicate found - create the entry
offset_desc = self._format_offset_description(offset_days, offset_hours, offset_minutes)
subentry_title = f"{parent_entry.title} ({offset_desc})"
# Note: Subentries inherit options from parent entry automatically
# Options parameter is not supported by ConfigSubentryFlow.async_create_entry()
return self.async_create_entry(
title=subentry_title,
data={
"home_id": home_id,
"home_data": home_data,
"user_login": user_login,
CONF_VIRTUAL_TIME_OFFSET_DAYS: offset_days,
CONF_VIRTUAL_TIME_OFFSET_HOURS: offset_hours,
CONF_VIRTUAL_TIME_OFFSET_MINUTES: offset_minutes,
},
description=f"Time-travel view: {offset_desc}",
description_placeholders={"offset": offset_desc},
unique_id=unique_id,
)
return self.async_show_form(
step_id="time_offset",
data_schema=vol.Schema(
{
vol.Required(CONF_VIRTUAL_TIME_OFFSET_DAYS, default=0): NumberSelector(
NumberSelectorConfig(
mode=NumberSelectorMode.SLIDER,
min=-374,
max=0,
step=1,
)
),
vol.Optional("time_offset", default={"hours": 0, "minutes": 0}): DurationSelector(
DurationSelectorConfig(
allow_negative=False, # We handle sign automatically
enable_day=False, # Days are handled by the slider above
)
),
}
),
description_placeholders={},
errors=errors,
)
def _format_offset_description(self, days: int, hours: int, minutes: int) -> str:
"""
Format time offset into human-readable description.
Examples:
-7, 0, 0 -> "7 days ago" (English) / "vor 7 Tagen" (German)
0, -2, 0 -> "2 hours ago" (English) / "vor 2 Stunden" (German)
-7, -2, -30 -> "7 days - 02:30" (compact format when time is added)
"""
# Get translations from custom_translations (loaded via async_load_translations)
translations_key = f"{DOMAIN}_translations_{self.hass.config.language}"
translations = self.hass.data.get(translations_key, {})
time_units = translations.get("time_units", {})
# Fallback to English if translations not available
if not time_units:
time_units = {
"day": "{count} day",
"days": "{count} days",
"hour": "{count} hour",
"hours": "{count} hours",
"minute": "{count} minute",
"minutes": "{count} minutes",
"ago": "{parts} ago",
"now": "now",
}
# Check if we have hours or minutes (need compact format)
has_time = hours != 0 or minutes != 0
if days != 0 and has_time:
# Compact format: "7 days - 02:30"
count = abs(days)
unit_key = "days" if count != 1 else "day"
day_part = time_units[unit_key].format(count=count)
time_part = f"{abs(hours):02d}:{abs(minutes):02d}"
return f"{day_part} - {time_part}"
# Standard format: separate parts with spaces
parts = []
if days != 0:
count = abs(days)
unit_key = "days" if count != 1 else "day"
parts.append(time_units[unit_key].format(count=count))
if hours != 0:
count = abs(hours)
unit_key = "hours" if count != 1 else "hour"
parts.append(time_units[unit_key].format(count=count))
if minutes != 0:
count = abs(minutes)
unit_key = "minutes" if count != 1 else "minute"
parts.append(time_units[unit_key].format(count=count))
if not parts:
return time_units.get("now", "now")
# All offsets should be negative (historical data only)
# Join parts with space and apply "ago" template
return time_units["ago"].format(parts=" ".join(parts))
async def async_step_init(self, user_input: dict | None = None) -> SubentryFlowResult:
"""Manage the options for an existing subentry (time-travel settings)."""
subentry = self._get_reconfigure_subentry()
errors: dict[str, str] = {}
if user_input is not None:
# Extract values (convert days to int to avoid float from slider)
offset_days = int(user_input.get(CONF_VIRTUAL_TIME_OFFSET_DAYS, 0))
# DurationSelector returns dict with 'hours', 'minutes', and 'seconds' keys
# We normalize to minute precision (ignore seconds)
time_offset = user_input.get("time_offset", {})
offset_hours = -abs(int(time_offset.get("hours", 0))) # Always negative for historical data
offset_minutes = -abs(int(time_offset.get("minutes", 0))) # Always negative for historical data
# Note: Seconds are ignored - we only support minute-level precision
# Validate that at least one offset is negative (historical data only)
if offset_days >= 0 and offset_hours >= 0 and offset_minutes >= 0:
errors["base"] = "no_time_offset"
else:
# Get parent entry to extract home_id and user_id
parent_entry = self._get_entry()
home_id = parent_entry.data.get("home_id")
# Build new unique_id with updated offset signature
offset_str = f"d{offset_days}h{offset_hours}m{offset_minutes}"
user_id = parent_entry.unique_id.split("_")[0] if parent_entry.unique_id else home_id
new_unique_id = f"{user_id}_{home_id}_hist_{offset_str}"
# Generate new title with updated offset description
offset_desc = self._format_offset_description(offset_days, offset_hours, offset_minutes)
# Extract parent title (remove old offset description in parentheses)
parent_title = parent_entry.title.split(" (")[0] if " (" in parent_entry.title else parent_entry.title
new_title = f"{parent_title} ({offset_desc})"
return self.async_update_and_abort(
parent_entry,
subentry,
unique_id=new_unique_id,
title=new_title,
data_updates=user_input,
)
offset_days = subentry.data.get(CONF_VIRTUAL_TIME_OFFSET_DAYS, 0)
offset_hours = subentry.data.get(CONF_VIRTUAL_TIME_OFFSET_HOURS, 0)
offset_minutes = subentry.data.get(CONF_VIRTUAL_TIME_OFFSET_MINUTES, 0)
# Prepare time offset dict for DurationSelector (always positive, we negate on save)
time_offset_dict = {"hours": 0, "minutes": 0} # Default to zeros
if offset_hours != 0:
time_offset_dict["hours"] = abs(offset_hours)
if offset_minutes != 0:
time_offset_dict["minutes"] = abs(offset_minutes)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required(CONF_VIRTUAL_TIME_OFFSET_DAYS, default=offset_days): NumberSelector(
NumberSelectorConfig(
mode=NumberSelectorMode.SLIDER,
min=-374,
max=0,
step=1,
)
),
vol.Optional("time_offset", default=time_offset_dict): DurationSelector(
DurationSelectorConfig(
allow_negative=False, # We handle sign automatically
enable_day=False, # Days are handled by the slider above
)
),
}
),
errors=errors,
)

View file

@ -1,586 +0,0 @@
"""Main config flow for tibber_prices integration."""
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Any
import voluptuous as vol
from custom_components.tibber_prices.config_flow_handlers.options_flow import (
TibberPricesOptionsFlowHandler,
)
from custom_components.tibber_prices.config_flow_handlers.schemas import (
get_reauth_confirm_schema,
get_select_home_schema,
get_user_schema,
)
from custom_components.tibber_prices.config_flow_handlers.validators import (
TibberPricesCannotConnectError,
TibberPricesInvalidAuthError,
validate_api_token,
)
from custom_components.tibber_prices.const import (
DOMAIN,
LOGGER,
get_default_options,
get_translation,
)
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import callback
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
if TYPE_CHECKING:
from homeassistant.config_entries import ConfigSubentryFlow
class TibberPricesConfigFlowHandler(ConfigFlow, domain=DOMAIN):
"""Config flow for tibber_prices."""
VERSION = 1
MINOR_VERSION = 0
def __init__(self) -> None:
"""Initialize the config flow."""
super().__init__()
self._reauth_entry: ConfigEntry | None = None
self._viewer: dict | None = None
self._access_token: str | None = None
self._user_name: str | None = None
self._user_login: str | None = None
self._user_id: str | None = None
@classmethod
@callback
def async_get_supported_subentry_types(
cls,
config_entry: ConfigEntry, # noqa: ARG003
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
# Temporarily disabled: Time-travel feature not yet fully implemented
# When enabled, this causes "Devices that don't belong to a sub-entry" warning
# because subentries don't have their own entities yet.
# See: https://github.com/home-assistant/core/issues/147570
# Will be re-enabled when time-travel functionality is implemented
return {}
@staticmethod
@callback
def async_get_options_flow(_config_entry: ConfigEntry) -> OptionsFlow:
"""Create an options flow for this configentry."""
return TibberPricesOptionsFlowHandler()
def is_matching(self, other_flow: dict) -> bool:
"""Return True if match_dict matches this flow."""
return bool(other_flow.get("domain") == DOMAIN)
async def async_step_reauth(self, entry_data: dict[str, Any]) -> ConfigFlowResult: # noqa: ARG002
"""Handle reauth flow when access token becomes invalid."""
entry_id = self.context.get("entry_id")
if entry_id:
self._reauth_entry = self.hass.config_entries.async_get_entry(entry_id)
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(self, user_input: dict | None = None) -> ConfigFlowResult:
"""Confirm reauth dialog - prompt for new access token."""
_errors = {}
if user_input is not None:
try:
viewer = await validate_api_token(self.hass, user_input[CONF_ACCESS_TOKEN])
except TibberPricesInvalidAuthError as exception:
LOGGER.warning(exception)
_errors["base"] = "auth"
except TibberPricesCannotConnectError as exception:
LOGGER.error(exception)
_errors["base"] = "connection"
else:
# Validate that the new token has access to all configured homes
if self._reauth_entry:
# Get all configured home IDs (main entry + subentries)
configured_home_ids = self._get_all_configured_home_ids(self._reauth_entry)
# Get accessible home IDs from the new token
accessible_homes = viewer.get("homes", [])
accessible_home_ids = {home["id"] for home in accessible_homes}
# Check if all configured homes are accessible with the new token
missing_home_ids = configured_home_ids - accessible_home_ids
if missing_home_ids:
# New token doesn't have access to all configured homes
LOGGER.error(
"New access token missing access to configured homes: %s",
", ".join(missing_home_ids),
)
_errors["base"] = "missing_homes"
else:
# Update the config entry with the new access token
self.hass.config_entries.async_update_entry(
self._reauth_entry,
data={
**self._reauth_entry.data,
CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN],
},
)
await self.hass.config_entries.async_reload(self._reauth_entry.entry_id)
return self.async_abort(reason="reauth_successful")
return self.async_show_form(
step_id="reauth_confirm",
data_schema=get_reauth_confirm_schema(),
errors=_errors,
description_placeholders={"tibber_url": "https://developer.tibber.com"},
)
async def async_step_user(
self,
user_input: dict | None = None,
) -> ConfigFlowResult:
"""Handle a flow initialized by the user. Choose account or enter new token."""
# Get existing accounts
existing_entries = self.hass.config_entries.async_entries(DOMAIN)
# If there are existing accounts, offer choice
if existing_entries and user_input is None:
return await self.async_step_account_choice()
# Otherwise, go directly to token input
return await self.async_step_new_token(user_input)
async def async_step_account_choice(
self,
user_input: dict | None = None,
) -> ConfigFlowResult:
"""Let user choose between existing account or new token."""
if user_input is not None:
choice = user_input["account_choice"]
if choice == "new_token":
return await self.async_step_new_token()
# User selected an existing account - copy its token
selected_entry_id = choice
selected_entry = next(
(
entry
for entry in self.hass.config_entries.async_entries(DOMAIN)
if entry.entry_id == selected_entry_id
),
None,
)
if not selected_entry:
return self.async_abort(reason="unknown")
# Copy token from selected entry and proceed
access_token = selected_entry.data.get(CONF_ACCESS_TOKEN)
if not access_token:
return self.async_abort(reason="unknown")
return await self.async_step_new_token({CONF_ACCESS_TOKEN: access_token})
# Build options: unique user accounts (grouped by user_id) + "New Token" option
existing_entries = self.hass.config_entries.async_entries(DOMAIN)
# Group entries by user_id to show unique accounts
# Minimum parts in unique_id format: user_id_home_id
min_unique_id_parts = 2
seen_users = {}
for entry in existing_entries:
# Extract user_id from unique_id (format: user_id_home_id or user_id_home_id_sub/hist_...)
unique_id = entry.unique_id
if unique_id:
# Split by underscore and take first part as user_id
parts = unique_id.split("_")
if len(parts) >= min_unique_id_parts:
user_id = parts[0]
if user_id not in seen_users:
seen_users[user_id] = entry
# Build dropdown options from unique user accounts
account_options = [
SelectOptionDict(
value=entry.entry_id,
label=f"{entry.title} ({entry.data.get('user_login', 'N/A')})",
)
for entry in seen_users.values()
]
# Add "new_token" option with translated label
new_token_label = (
get_translation(
["selector", "account_choice", "options", "new_token"],
self.hass.config.language,
)
or "Add new Tibber account API token"
)
account_options.append(
SelectOptionDict(
value="new_token",
label=new_token_label,
)
)
return self.async_show_form(
step_id="account_choice",
data_schema=vol.Schema(
{
vol.Required("account_choice"): SelectSelector(
SelectSelectorConfig(
options=account_options,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
),
)
async def async_step_new_token(
self,
user_input: dict | None = None,
) -> ConfigFlowResult:
"""Handle token input (new or copied from existing account)."""
_errors = {}
if user_input is not None:
try:
viewer = await validate_api_token(self.hass, user_input[CONF_ACCESS_TOKEN])
except TibberPricesInvalidAuthError as exception:
LOGGER.warning(exception)
_errors["base"] = "auth"
except TibberPricesCannotConnectError as exception:
LOGGER.error(exception)
_errors["base"] = "connection"
else:
user_id = viewer.get("userId", None)
user_name = viewer.get("name") or user_id or "Unknown User"
user_login = viewer.get("login", "N/A")
homes = viewer.get("homes", [])
if not user_id:
LOGGER.error("No user ID found: %s", viewer)
return self.async_abort(reason="unknown")
if not homes:
LOGGER.error("No homes found: %s", viewer)
return self.async_abort(reason="unknown")
LOGGER.debug("Viewer data received: %s", viewer)
# Store viewer data in the flow for use in the next step
self._viewer = viewer
self._access_token = user_input[CONF_ACCESS_TOKEN]
self._user_name = user_name
self._user_login = user_login
self._user_id = user_id
# Move to home selection step
return await self.async_step_select_home()
return self.async_show_form(
step_id="new_token",
data_schema=get_user_schema((user_input or {}).get(CONF_ACCESS_TOKEN)),
errors=_errors,
description_placeholders={"tibber_url": "https://developer.tibber.com"},
)
async def async_step_select_home(self, user_input: dict | None = None) -> ConfigFlowResult: # noqa: PLR0911
"""Handle home selection during initial setup."""
homes = self._viewer.get("homes", []) if self._viewer else []
if not homes:
return self.async_abort(reason="unknown")
# Filter out already configured homes
configured_home_ids = {
entry.data.get("home_id")
for entry in self.hass.config_entries.async_entries(DOMAIN)
if entry.data.get("home_id")
}
available_homes = [home for home in homes if home["id"] not in configured_home_ids]
# If no homes available, abort
if not available_homes:
return self.async_abort(reason="already_configured")
if user_input is not None:
selected_home_id = user_input["home_id"]
selected_home = next((home for home in available_homes if home["id"] == selected_home_id), None)
if not selected_home:
return self.async_abort(reason="unknown")
# Validate that home has an active or future subscription
subscription_status = self._get_subscription_status(selected_home)
if subscription_status == "none":
return self.async_show_form(
step_id="select_home",
data_schema=get_select_home_schema(
[
SelectOptionDict(
value=home["id"],
label=self._get_home_title_with_status(home),
)
for home in available_homes
]
),
errors={"home_id": "no_active_subscription"},
)
if subscription_status == "expired":
return self.async_show_form(
step_id="select_home",
data_schema=get_select_home_schema(
[
SelectOptionDict(
value=home["id"],
label=self._get_home_title_with_status(home),
)
for home in available_homes
]
),
errors={"home_id": "subscription_expired"},
)
# Set unique_id to user_id + home_id combination
# This allows multiple homes per user account (single-home architecture)
unique_id = f"{self._user_id}_{selected_home_id}"
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
# Note: This check is now redundant since we filter available_homes upfront,
# but kept as defensive programming in case of race conditions
for entry in self.hass.config_entries.async_entries(DOMAIN):
if entry.data.get("home_id") == selected_home_id:
return self.async_show_form(
step_id="select_home",
data_schema=get_select_home_schema(
[
SelectOptionDict(
value=home["id"],
label=self._get_home_title(home),
)
for home in available_homes
]
),
errors={"home_id": "home_already_configured"},
)
data = {
CONF_ACCESS_TOKEN: self._access_token or "",
"home_id": selected_home_id,
"home_data": selected_home,
"homes": homes,
"user_login": self._user_login or "N/A",
}
# Extract currency from home data for intelligent defaults
currency_code = None
if (
selected_home
and (subscription := selected_home.get("currentSubscription"))
and (price_info := subscription.get("priceInfo"))
and (current_price := price_info.get("current"))
):
currency_code = current_price.get("currency")
# Generate entry title from home address (not appNickname)
entry_title = self._get_entry_title(selected_home)
return self.async_create_entry(
title=entry_title,
data=data,
description=f"{self._user_login} ({self._user_id})",
options=get_default_options(currency_code),
)
home_options = [
SelectOptionDict(
value=home["id"],
label=self._get_home_title_with_status(home),
)
for home in available_homes
]
return self.async_show_form(
step_id="select_home",
data_schema=get_select_home_schema(home_options),
)
def _get_all_configured_home_ids(self, main_entry: ConfigEntry) -> set[str]:
"""Get all configured home IDs from main entry and all subentries."""
home_ids = set()
# Add home_id from main entry if it exists
if main_entry.data.get("home_id"):
home_ids.add(main_entry.data["home_id"])
# Add home_ids from all subentries
for entry in self.hass.config_entries.async_entries(DOMAIN):
if entry.data.get("home_id") and entry != main_entry:
home_ids.add(entry.data["home_id"])
return home_ids
@staticmethod
def _get_subscription_status(home: dict) -> str:
"""
Check subscription status of home.
Returns:
- "active": Subscription is currently active
- "future": Subscription exists but starts in the future (validFrom > now)
- "expired": Subscription exists but has ended (validTo < now)
- "none": No subscription exists
"""
subscription = home.get("currentSubscription")
if subscription is None or subscription.get("status") is None:
return "none"
# Check validTo (contract end date)
valid_to = subscription.get("validTo")
if valid_to:
try:
valid_to_dt = datetime.fromisoformat(valid_to)
if valid_to_dt < datetime.now(valid_to_dt.tzinfo):
return "expired"
except (ValueError, AttributeError):
pass # If parsing fails, continue with other checks
# Check validFrom (contract start date)
valid_from = subscription.get("validFrom")
if valid_from:
try:
valid_from_dt = datetime.fromisoformat(valid_from)
if valid_from_dt > datetime.now(valid_from_dt.tzinfo):
return "future"
except (ValueError, AttributeError):
pass # If parsing fails, assume active
return "active"
def _get_home_title_with_status(self, home: dict) -> str:
"""Generate a user-friendly title for a home with subscription status."""
base_title = self._get_home_title(home)
status = self._get_subscription_status(home)
if status == "none":
return f"{base_title} ⚠️ (No active contract)"
if status == "expired":
return f"{base_title} ⚠️ (Contract expired)"
if status == "future":
return f"{base_title} ⚠️ (Contract starts soon)"
return base_title
@staticmethod
def _format_city_name(city: str) -> str:
"""
Format city name to title case.
Converts 'MÜNCHEN' to 'München', handles multi-word cities like
'BAD TÖLZ' -> 'Bad Tölz', and hyphenated cities like
'GARMISCH-PARTENKIRCHEN' -> 'Garmisch-Partenkirchen'.
"""
if not city:
return city
# Split by space and hyphen while preserving delimiters
words = []
current_word = ""
for char in city:
if char in (" ", "-"):
if current_word:
words.append(current_word)
words.append(char) # Preserve delimiter
current_word = ""
else:
current_word += char
if current_word: # Add last word
words.append(current_word)
# Capitalize first letter of each word (not delimiters)
formatted_words = []
for word in words:
if word in (" ", "-"):
formatted_words.append(word)
else:
# Capitalize first letter, lowercase rest
formatted_words.append(word.capitalize())
return "".join(formatted_words)
@staticmethod
def _get_entry_title(home: dict) -> str:
"""
Generate entry title from address (for config entry title).
Uses 'address1, City' format, e.g. 'Pählstraße 6B, München'.
Does NOT use appNickname (that's for _get_home_title).
"""
address = home.get("address", {})
if not address:
# Fallback to home ID if no address
return home.get("id", "Unknown Home")
parts = []
# Always prefer address1
address1 = address.get("address1")
if address1 and address1.strip():
parts.append(address1.strip())
# Format city name (convert MÜNCHEN -> München)
city = address.get("city")
if city and city.strip():
formatted_city = TibberPricesConfigFlowHandler._format_city_name(city.strip())
parts.append(formatted_city)
if parts:
return ", ".join(parts)
# Final fallback
return home.get("id", "Unknown Home")
@staticmethod
def _get_home_title(home: dict) -> str:
"""
Generate a user-friendly title for a home (for dropdown display).
Prefers appNickname, falls back to address.
"""
title = home.get("appNickname")
if title and title.strip():
return title.strip()
address = home.get("address", {})
if address:
parts = []
if address.get("address1"):
parts.append(address["address1"])
if address.get("city"):
# Format city for display too
city = address["city"]
formatted_city = TibberPricesConfigFlowHandler._format_city_name(city)
parts.append(formatted_city)
if parts:
return ", ".join(parts)
return home.get("id", "Unknown Home")

View file

@ -1,371 +0,0 @@
"""Validation functions for Tibber Prices config flow."""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.api import (
TibberPricesApiClient,
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
)
from custom_components.tibber_prices.const import (
DOMAIN,
MAX_DISTANCE_PERCENTAGE,
MAX_FLEX_PERCENTAGE,
MAX_GAP_COUNT,
MAX_MIN_PERIODS,
MAX_PRICE_RATING_THRESHOLD_HIGH,
MAX_PRICE_RATING_THRESHOLD_LOW,
MAX_PRICE_TREND_FALLING,
MAX_PRICE_TREND_RISING,
MAX_PRICE_TREND_STRONGLY_FALLING,
MAX_PRICE_TREND_STRONGLY_RISING,
MAX_RELAXATION_ATTEMPTS,
MAX_VOLATILITY_THRESHOLD_HIGH,
MAX_VOLATILITY_THRESHOLD_MODERATE,
MAX_VOLATILITY_THRESHOLD_VERY_HIGH,
MIN_GAP_COUNT,
MIN_PERIOD_LENGTH,
MIN_PRICE_RATING_THRESHOLD_HIGH,
MIN_PRICE_RATING_THRESHOLD_LOW,
MIN_PRICE_TREND_FALLING,
MIN_PRICE_TREND_RISING,
MIN_PRICE_TREND_STRONGLY_FALLING,
MIN_PRICE_TREND_STRONGLY_RISING,
MIN_RELAXATION_ATTEMPTS,
MIN_VOLATILITY_THRESHOLD_HIGH,
MIN_VOLATILITY_THRESHOLD_MODERATE,
MIN_VOLATILITY_THRESHOLD_VERY_HIGH,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.loader import async_get_integration
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
class TibberPricesInvalidAuthError(HomeAssistantError):
"""Error to indicate invalid authentication."""
class TibberPricesCannotConnectError(HomeAssistantError):
"""Error to indicate we cannot connect."""
async def validate_api_token(hass: HomeAssistant, token: str) -> dict:
"""
Validate Tibber API token.
Args:
hass: Home Assistant instance
token: Tibber API access token
Returns:
dict with viewer data on success
Raises:
TibberPricesInvalidAuthError: Invalid token
TibberPricesCannotConnectError: API connection failed
"""
try:
integration = await async_get_integration(hass, DOMAIN)
client = TibberPricesApiClient(
access_token=token,
session=async_create_clientsession(hass),
version=str(integration.version) if integration.version else "unknown",
)
result = await client.async_get_viewer_details()
return result["viewer"]
except TibberPricesApiClientAuthenticationError as exception:
raise TibberPricesInvalidAuthError from exception
except TibberPricesApiClientCommunicationError as exception:
raise TibberPricesCannotConnectError from exception
except TibberPricesApiClientError as exception:
raise TibberPricesCannotConnectError from exception
def validate_period_length(minutes: int) -> bool:
"""
Validate period length is a positive multiple of 15 minutes.
Args:
minutes: Period length in minutes
Returns:
True if length is valid (multiple of 15, at least MIN_PERIOD_LENGTH)
"""
return minutes % 15 == 0 and minutes >= MIN_PERIOD_LENGTH
def validate_flex_percentage(flex: float) -> bool:
"""
Validate flexibility percentage is within bounds.
Args:
flex: Flexibility percentage (can be negative for peak price)
Returns:
True if percentage is valid (-MAX_FLEX to +MAX_FLEX)
"""
return -MAX_FLEX_PERCENTAGE <= flex <= MAX_FLEX_PERCENTAGE
def validate_min_periods(count: int) -> bool:
"""
Validate minimum periods count is reasonable.
Args:
count: Number of minimum periods per day
Returns:
True if count is valid (1 to MAX_MIN_PERIODS)
"""
return 1 <= count <= MAX_MIN_PERIODS
def validate_distance_percentage(distance: float) -> bool:
"""
Validate distance from average percentage (for Peak Price - positive values).
Args:
distance: Distance percentage (0-50% is typical range)
Returns:
True if distance is valid (0-MAX_DISTANCE_PERCENTAGE)
"""
return 0.0 <= distance <= MAX_DISTANCE_PERCENTAGE
def validate_best_price_distance_percentage(distance: float) -> bool:
"""
Validate distance from average percentage (for Best Price - negative values).
Args:
distance: Distance percentage (-50% to 0% range, negative = below average)
Returns:
True if distance is valid (-MAX_DISTANCE_PERCENTAGE to 0)
"""
return -MAX_DISTANCE_PERCENTAGE <= distance <= 0.0
def validate_gap_count(count: int) -> bool:
"""
Validate gap count is within bounds.
Args:
count: Gap count (0-8)
Returns:
True if count is valid (MIN_GAP_COUNT to MAX_GAP_COUNT)
"""
return MIN_GAP_COUNT <= count <= MAX_GAP_COUNT
def validate_relaxation_attempts(attempts: int) -> bool:
"""
Validate relaxation attempts count is within bounds.
Args:
attempts: Number of relaxation attempts (1-12)
Returns:
True if attempts is valid (MIN_RELAXATION_ATTEMPTS to MAX_RELAXATION_ATTEMPTS)
"""
return MIN_RELAXATION_ATTEMPTS <= attempts <= MAX_RELAXATION_ATTEMPTS
def validate_price_rating_threshold_low(threshold: int) -> bool:
"""
Validate low price rating threshold.
Args:
threshold: Low rating threshold percentage (-50 to -5)
Returns:
True if threshold is valid (MIN_PRICE_RATING_THRESHOLD_LOW to MAX_PRICE_RATING_THRESHOLD_LOW)
"""
return MIN_PRICE_RATING_THRESHOLD_LOW <= threshold <= MAX_PRICE_RATING_THRESHOLD_LOW
def validate_price_rating_threshold_high(threshold: int) -> bool:
"""
Validate high price rating threshold.
Args:
threshold: High rating threshold percentage (5 to 50)
Returns:
True if threshold is valid (MIN_PRICE_RATING_THRESHOLD_HIGH to MAX_PRICE_RATING_THRESHOLD_HIGH)
"""
return MIN_PRICE_RATING_THRESHOLD_HIGH <= threshold <= MAX_PRICE_RATING_THRESHOLD_HIGH
def validate_price_rating_thresholds(threshold_low: int, threshold_high: int) -> bool:
"""
Cross-validate both price rating thresholds together.
Ensures that LOW threshold < HIGH threshold with proper gap to avoid
overlap at 0%. LOW should be negative (below average), HIGH should be
positive (above average).
Args:
threshold_low: Low rating threshold percentage (-50 to -5)
threshold_high: High rating threshold percentage (5 to 50)
Returns:
True if both thresholds are valid individually AND threshold_low < threshold_high
"""
# Validate individual ranges first
if not validate_price_rating_threshold_low(threshold_low):
return False
if not validate_price_rating_threshold_high(threshold_high):
return False
# Ensure LOW is always less than HIGH (should always be true given the ranges,
# but explicit check for safety)
return threshold_low < threshold_high
def validate_volatility_threshold_moderate(threshold: float) -> bool:
"""
Validate moderate volatility threshold.
Args:
threshold: Moderate volatility threshold percentage (5.0 to 25.0)
Returns:
True if threshold is valid (MIN_VOLATILITY_THRESHOLD_MODERATE to MAX_VOLATILITY_THRESHOLD_MODERATE)
"""
return MIN_VOLATILITY_THRESHOLD_MODERATE <= threshold <= MAX_VOLATILITY_THRESHOLD_MODERATE
def validate_volatility_threshold_high(threshold: float) -> bool:
"""
Validate high volatility threshold.
Args:
threshold: High volatility threshold percentage (20.0 to 40.0)
Returns:
True if threshold is valid (MIN_VOLATILITY_THRESHOLD_HIGH to MAX_VOLATILITY_THRESHOLD_HIGH)
"""
return MIN_VOLATILITY_THRESHOLD_HIGH <= threshold <= MAX_VOLATILITY_THRESHOLD_HIGH
def validate_volatility_threshold_very_high(threshold: float) -> bool:
"""
Validate very high volatility threshold.
Args:
threshold: Very high volatility threshold percentage (35.0 to 80.0)
Returns:
True if threshold is valid (MIN_VOLATILITY_THRESHOLD_VERY_HIGH to MAX_VOLATILITY_THRESHOLD_VERY_HIGH)
"""
return MIN_VOLATILITY_THRESHOLD_VERY_HIGH <= threshold <= MAX_VOLATILITY_THRESHOLD_VERY_HIGH
def validate_volatility_thresholds(
threshold_moderate: float,
threshold_high: float,
threshold_very_high: float,
) -> bool:
"""
Cross-validate all three volatility thresholds together.
Ensures that MODERATE < HIGH < VERY_HIGH to maintain logical classification
boundaries. Each threshold represents an escalating level of price volatility.
Args:
threshold_moderate: Moderate volatility threshold (5.0 to 25.0)
threshold_high: High volatility threshold (20.0 to 40.0)
threshold_very_high: Very high volatility threshold (35.0 to 80.0)
Returns:
True if all thresholds are valid individually AND maintain proper ordering
"""
# Validate individual ranges first
if not validate_volatility_threshold_moderate(threshold_moderate):
return False
if not validate_volatility_threshold_high(threshold_high):
return False
if not validate_volatility_threshold_very_high(threshold_very_high):
return False
# Ensure cascading order: MODERATE < HIGH < VERY_HIGH
return threshold_moderate < threshold_high < threshold_very_high
def validate_price_trend_rising(threshold: int) -> bool:
"""
Validate rising price trend threshold.
Args:
threshold: Rising trend threshold percentage (1 to 50)
Returns:
True if threshold is valid (MIN_PRICE_TREND_RISING to MAX_PRICE_TREND_RISING)
"""
return MIN_PRICE_TREND_RISING <= threshold <= MAX_PRICE_TREND_RISING
def validate_price_trend_falling(threshold: int) -> bool:
"""
Validate falling price trend threshold.
Args:
threshold: Falling trend threshold percentage (-50 to -1)
Returns:
True if threshold is valid (MIN_PRICE_TREND_FALLING to MAX_PRICE_TREND_FALLING)
"""
return MIN_PRICE_TREND_FALLING <= threshold <= MAX_PRICE_TREND_FALLING
def validate_price_trend_strongly_rising(threshold: int) -> bool:
"""
Validate strongly rising price trend threshold.
Args:
threshold: Strongly rising trend threshold percentage (2 to 100)
Returns:
True if threshold is valid (MIN_PRICE_TREND_STRONGLY_RISING to MAX_PRICE_TREND_STRONGLY_RISING)
"""
return MIN_PRICE_TREND_STRONGLY_RISING <= threshold <= MAX_PRICE_TREND_STRONGLY_RISING
def validate_price_trend_strongly_falling(threshold: int) -> bool:
"""
Validate strongly falling price trend threshold.
Args:
threshold: Strongly falling trend threshold percentage (-100 to -2)
Returns:
True if threshold is valid (MIN_PRICE_TREND_STRONGLY_FALLING to MAX_PRICE_TREND_STRONGLY_FALLING)
"""
return MIN_PRICE_TREND_STRONGLY_FALLING <= threshold <= MAX_PRICE_TREND_STRONGLY_FALLING

View file

@ -1,11 +1,10 @@
"""Constants for the Tibber Price Analytics integration.""" """Constants for the Tibber Price Analytics integration."""
from __future__ import annotations
import json import json
import logging import logging
from collections.abc import Sequence
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any from typing import Any
import aiofiles import aiofiles
@ -15,27 +14,10 @@ from homeassistant.const import (
UnitOfPower, UnitOfPower,
UnitOfTime, UnitOfTime,
) )
from homeassistant.core import HomeAssistant
if TYPE_CHECKING:
from collections.abc import Sequence
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
DOMAIN = "tibber_prices" DOMAIN = "tibber_prices"
LOGGER = logging.getLogger(__package__)
# Data storage keys
DATA_CHART_CONFIG = "chart_config" # Key for chart export config in hass.data
DATA_CHART_METADATA_CONFIG = "chart_metadata_config" # Key for chart metadata config in hass.data
# Configuration keys
CONF_EXTENDED_DESCRIPTIONS = "extended_descriptions" CONF_EXTENDED_DESCRIPTIONS = "extended_descriptions"
CONF_VIRTUAL_TIME_OFFSET_DAYS = (
"virtual_time_offset_days" # Time-travel: days offset (negative only, e.g., -7 = 7 days ago)
)
CONF_VIRTUAL_TIME_OFFSET_HOURS = "virtual_time_offset_hours" # Time-travel: hours offset (-23 to +23)
CONF_VIRTUAL_TIME_OFFSET_MINUTES = "virtual_time_offset_minutes" # Time-travel: minutes offset (-59 to +59)
CONF_BEST_PRICE_FLEX = "best_price_flex" CONF_BEST_PRICE_FLEX = "best_price_flex"
CONF_PEAK_PRICE_FLEX = "peak_price_flex" CONF_PEAK_PRICE_FLEX = "peak_price_flex"
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG = "best_price_min_distance_from_avg" CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG = "best_price_min_distance_from_avg"
@ -44,138 +26,38 @@ CONF_BEST_PRICE_MIN_PERIOD_LENGTH = "best_price_min_period_length"
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH = "peak_price_min_period_length" CONF_PEAK_PRICE_MIN_PERIOD_LENGTH = "peak_price_min_period_length"
CONF_PRICE_RATING_THRESHOLD_LOW = "price_rating_threshold_low" CONF_PRICE_RATING_THRESHOLD_LOW = "price_rating_threshold_low"
CONF_PRICE_RATING_THRESHOLD_HIGH = "price_rating_threshold_high" CONF_PRICE_RATING_THRESHOLD_HIGH = "price_rating_threshold_high"
CONF_PRICE_RATING_HYSTERESIS = "price_rating_hysteresis"
CONF_PRICE_RATING_GAP_TOLERANCE = "price_rating_gap_tolerance"
CONF_PRICE_LEVEL_GAP_TOLERANCE = "price_level_gap_tolerance"
CONF_AVERAGE_SENSOR_DISPLAY = "average_sensor_display" # "median" or "mean"
CONF_PRICE_TREND_THRESHOLD_RISING = "price_trend_threshold_rising" CONF_PRICE_TREND_THRESHOLD_RISING = "price_trend_threshold_rising"
CONF_PRICE_TREND_THRESHOLD_FALLING = "price_trend_threshold_falling" CONF_PRICE_TREND_THRESHOLD_FALLING = "price_trend_threshold_falling"
CONF_PRICE_TREND_THRESHOLD_STRONGLY_RISING = "price_trend_threshold_strongly_rising"
CONF_PRICE_TREND_THRESHOLD_STRONGLY_FALLING = "price_trend_threshold_strongly_falling"
CONF_VOLATILITY_THRESHOLD_MODERATE = "volatility_threshold_moderate" CONF_VOLATILITY_THRESHOLD_MODERATE = "volatility_threshold_moderate"
CONF_VOLATILITY_THRESHOLD_HIGH = "volatility_threshold_high" CONF_VOLATILITY_THRESHOLD_HIGH = "volatility_threshold_high"
CONF_VOLATILITY_THRESHOLD_VERY_HIGH = "volatility_threshold_very_high" CONF_VOLATILITY_THRESHOLD_VERY_HIGH = "volatility_threshold_very_high"
CONF_BEST_PRICE_MIN_VOLATILITY = "best_price_min_volatility"
CONF_PEAK_PRICE_MIN_VOLATILITY = "peak_price_min_volatility"
CONF_BEST_PRICE_MAX_LEVEL = "best_price_max_level" CONF_BEST_PRICE_MAX_LEVEL = "best_price_max_level"
CONF_PEAK_PRICE_MIN_LEVEL = "peak_price_min_level" CONF_PEAK_PRICE_MIN_LEVEL = "peak_price_min_level"
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT = "best_price_max_level_gap_count"
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT = "peak_price_max_level_gap_count"
CONF_ENABLE_MIN_PERIODS_BEST = "enable_min_periods_best"
CONF_MIN_PERIODS_BEST = "min_periods_best"
CONF_RELAXATION_ATTEMPTS_BEST = "relaxation_attempts_best"
CONF_ENABLE_MIN_PERIODS_PEAK = "enable_min_periods_peak"
CONF_MIN_PERIODS_PEAK = "min_periods_peak"
CONF_RELAXATION_ATTEMPTS_PEAK = "relaxation_attempts_peak"
CONF_CHART_DATA_CONFIG = "chart_data_config" # YAML config for chart data export
ATTRIBUTION = "Data provided by Tibber" ATTRIBUTION = "Data provided by Tibber"
# Integration name should match manifest.json # Integration name should match manifest.json
DEFAULT_NAME = "Tibber Price Information & Ratings" DEFAULT_NAME = "Tibber Price Information & Ratings"
DEFAULT_EXTENDED_DESCRIPTIONS = False DEFAULT_EXTENDED_DESCRIPTIONS = False
DEFAULT_VIRTUAL_TIME_OFFSET_DAYS = 0 # No time offset (live mode) DEFAULT_BEST_PRICE_FLEX = 15 # 15% flexibility for best price (user-facing, percent)
DEFAULT_VIRTUAL_TIME_OFFSET_HOURS = 0 DEFAULT_PEAK_PRICE_FLEX = -15 # 15% flexibility for peak price (user-facing, percent)
DEFAULT_VIRTUAL_TIME_OFFSET_MINUTES = 0 DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG = 2 # 2% minimum distance from daily average for best price
DEFAULT_BEST_PRICE_FLEX = 15 # 15% base flexibility - optimal for relaxation mode (default enabled) DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG = 2 # 2% minimum distance from daily average for peak price
# Peak price flexibility is set to -20% (20% base flexibility - optimal for relaxation mode).
# This is intentionally more flexible than best price (15%) because peak price periods can be more variable,
# and users may benefit from earlier warnings about expensive periods, even if they are less sharply defined.
# The negative sign indicates that the threshold is set below the MAX price
# (e.g., -20% means MAX * 0.8), not above the average price.
# A higher percentage allows for more conservative detection, reducing false negatives for peak price warnings.
DEFAULT_PEAK_PRICE_FLEX = -20 # 20% base flexibility (user-facing, percent)
DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG = (
-5
) # -5% minimum distance from daily average (below average, ensures significance)
DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG = (
5 # 5% minimum distance from daily average (above average, ensures significance)
)
DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH = 60 # 60 minutes minimum period length for best price (user-facing, minutes) DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH = 60 # 60 minutes minimum period length for best price (user-facing, minutes)
# Note: Peak price warnings are allowed for shorter periods (30 min) than best price periods (60 min). DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH = 60 # 60 minutes minimum period length for peak price (user-facing, minutes)
# This asymmetry is intentional: shorter peak periods are acceptable for alerting users to brief expensive spikes,
# while best price periods require longer duration to ensure meaningful savings and avoid recommending short,
# impractical windows.
DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH = 30 # 30 minutes minimum period length for peak price (user-facing, minutes)
DEFAULT_PRICE_RATING_THRESHOLD_LOW = -10 # Default rating threshold low percentage DEFAULT_PRICE_RATING_THRESHOLD_LOW = -10 # Default rating threshold low percentage
DEFAULT_PRICE_RATING_THRESHOLD_HIGH = 10 # Default rating threshold high percentage DEFAULT_PRICE_RATING_THRESHOLD_HIGH = 10 # Default rating threshold high percentage
DEFAULT_PRICE_RATING_HYSTERESIS = 2.0 # Hysteresis percentage to prevent flickering at threshold boundaries DEFAULT_PRICE_TREND_THRESHOLD_RISING = 5 # Default trend threshold for rising prices (%)
DEFAULT_PRICE_RATING_GAP_TOLERANCE = 1 # Max consecutive intervals to smooth out (0 = disabled) DEFAULT_PRICE_TREND_THRESHOLD_FALLING = -5 # Default trend threshold for falling prices (%, negative value)
DEFAULT_PRICE_LEVEL_GAP_TOLERANCE = 1 # Max consecutive intervals to smooth out for price level (0 = disabled) DEFAULT_VOLATILITY_THRESHOLD_MODERATE = 5.0 # Default threshold for MODERATE volatility (ct/øre)
DEFAULT_AVERAGE_SENSOR_DISPLAY = "median" # Default: show median in state, mean in attributes DEFAULT_VOLATILITY_THRESHOLD_HIGH = 15.0 # Default threshold for HIGH volatility (ct/øre)
DEFAULT_PRICE_TREND_THRESHOLD_RISING = 3 # Default trend threshold for rising prices (%) DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH = 30.0 # Default threshold for VERY_HIGH volatility (ct/øre)
DEFAULT_PRICE_TREND_THRESHOLD_FALLING = -3 # Default trend threshold for falling prices (%, negative value) DEFAULT_BEST_PRICE_MIN_VOLATILITY = "low" # Show best price at any volatility (optimization always useful)
# Strong trend thresholds default to 2x the base threshold. DEFAULT_PEAK_PRICE_MIN_VOLATILITY = "low" # Always show peak price (warning relevant even at low spreads)
# These are independently configurable to allow fine-tuning of "strongly" detection. DEFAULT_BEST_PRICE_MAX_LEVEL = "any" # Default: show best price periods regardless of price level
DEFAULT_PRICE_TREND_THRESHOLD_STRONGLY_RISING = 6 # Default strong rising threshold (%) DEFAULT_PEAK_PRICE_MIN_LEVEL = "any" # Default: show peak price periods regardless of price level
DEFAULT_PRICE_TREND_THRESHOLD_STRONGLY_FALLING = -6 # Default strong falling threshold (%, negative value)
# Default volatility thresholds (relative values using coefficient of variation)
# Coefficient of variation = (standard_deviation / mean) * 100%
# These thresholds are unitless and work across different price levels
DEFAULT_VOLATILITY_THRESHOLD_MODERATE = 15.0 # 15% - moderate price fluctuation
DEFAULT_VOLATILITY_THRESHOLD_HIGH = 30.0 # 30% - high price fluctuation
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH = 50.0 # 50% - very high price fluctuation
DEFAULT_BEST_PRICE_MAX_LEVEL = "cheap" # Default: prefer genuinely cheap periods, relax to "any" if needed
DEFAULT_PEAK_PRICE_MIN_LEVEL = "expensive" # Default: prefer genuinely expensive periods, relax to "any" if needed
DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT = 1 # Default: allow 1 level gap (e.g., CHEAP→NORMAL→CHEAP stays together)
DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT = 1 # Default: allow 1 level gap for peak price periods
MIN_INTERVALS_FOR_GAP_TOLERANCE = 6 # Minimum period length (in 15-min intervals = 1.5h) required for gap tolerance
DEFAULT_ENABLE_MIN_PERIODS_BEST = True # Default: minimum periods feature enabled for best price
DEFAULT_MIN_PERIODS_BEST = 2 # Default: require at least 2 best price periods (when enabled)
DEFAULT_RELAXATION_ATTEMPTS_BEST = 11 # Default: 11 steps allows escalation from 15% to 48% (3% increment per step)
DEFAULT_ENABLE_MIN_PERIODS_PEAK = True # Default: minimum periods feature enabled for peak price
DEFAULT_MIN_PERIODS_PEAK = 2 # Default: require at least 2 peak price periods (when enabled)
DEFAULT_RELAXATION_ATTEMPTS_PEAK = 11 # Default: 11 steps allows escalation from 20% to 50% (3% increment per step)
# Validation limits (used in GUI schemas and server-side validation)
# These ensure consistency between frontend and backend validation
MAX_FLEX_PERCENTAGE = 50 # Maximum flexibility percentage (aligned with GUI slider and MAX_SAFE_FLEX)
MAX_DISTANCE_PERCENTAGE = 50 # Maximum distance from average percentage (GUI slider limit)
MAX_GAP_COUNT = 8 # Maximum gap count for level filtering (GUI slider limit)
MAX_MIN_PERIODS = 10 # Maximum number of minimum periods per day (GUI slider limit)
MAX_RELAXATION_ATTEMPTS = 12 # Maximum relaxation attempts (GUI slider limit)
MIN_PERIOD_LENGTH = 15 # Minimum period length in minutes (1 quarter hour)
MAX_MIN_PERIOD_LENGTH = 180 # Maximum for minimum period length setting (3 hours - realistic for required minimum)
# Price rating threshold limits
# LOW threshold: negative values (prices below average) - practical range -50% to -5%
# HIGH threshold: positive values (prices above average) - practical range +5% to +50%
# Ensure minimum 5% gap between thresholds to avoid overlap at 0%
MIN_PRICE_RATING_THRESHOLD_LOW = -50 # Minimum value for low rating threshold
MAX_PRICE_RATING_THRESHOLD_LOW = -5 # Maximum value for low rating threshold (must be < HIGH)
MIN_PRICE_RATING_THRESHOLD_HIGH = 5 # Minimum value for high rating threshold (must be > LOW)
MAX_PRICE_RATING_THRESHOLD_HIGH = 50 # Maximum value for high rating threshold
MIN_PRICE_RATING_HYSTERESIS = 0.0 # Minimum hysteresis (0 = disabled)
MAX_PRICE_RATING_HYSTERESIS = 5.0 # Maximum hysteresis (5% band)
MIN_PRICE_RATING_GAP_TOLERANCE = 0 # Minimum gap tolerance (0 = disabled)
MAX_PRICE_RATING_GAP_TOLERANCE = 4 # Maximum gap tolerance (4 intervals = 1 hour)
MIN_PRICE_LEVEL_GAP_TOLERANCE = 0 # Minimum gap tolerance for price level (0 = disabled)
MAX_PRICE_LEVEL_GAP_TOLERANCE = 4 # Maximum gap tolerance for price level (4 intervals = 1 hour)
# Volatility threshold limits
# MODERATE threshold: practical range 5% to 25% (entry point for noticeable fluctuation)
# HIGH threshold: practical range 20% to 40% (significant price swings)
# VERY_HIGH threshold: practical range 35% to 80% (extreme volatility)
# Ensure cascading: MODERATE < HIGH < VERY_HIGH with ~5% minimum gaps
MIN_VOLATILITY_THRESHOLD_MODERATE = 5.0 # Minimum for moderate volatility threshold
MAX_VOLATILITY_THRESHOLD_MODERATE = 25.0 # Maximum for moderate volatility threshold (must be < HIGH)
MIN_VOLATILITY_THRESHOLD_HIGH = 20.0 # Minimum for high volatility threshold (must be > MODERATE)
MAX_VOLATILITY_THRESHOLD_HIGH = 40.0 # Maximum for high volatility threshold (must be < VERY_HIGH)
MIN_VOLATILITY_THRESHOLD_VERY_HIGH = 35.0 # Minimum for very high volatility threshold (must be > HIGH)
MAX_VOLATILITY_THRESHOLD_VERY_HIGH = 80.0 # Maximum for very high volatility threshold
# Price trend threshold limits
MIN_PRICE_TREND_RISING = 1 # Minimum rising trend threshold
MAX_PRICE_TREND_RISING = 50 # Maximum rising trend threshold
MIN_PRICE_TREND_FALLING = -50 # Minimum falling trend threshold (negative)
MAX_PRICE_TREND_FALLING = -1 # Maximum falling trend threshold (negative)
# Strong trend thresholds have higher ranges to allow detection of significant moves
MIN_PRICE_TREND_STRONGLY_RISING = 2 # Minimum strongly rising threshold (must be > rising)
MAX_PRICE_TREND_STRONGLY_RISING = 100 # Maximum strongly rising threshold
MIN_PRICE_TREND_STRONGLY_FALLING = -100 # Minimum strongly falling threshold (negative)
MAX_PRICE_TREND_STRONGLY_FALLING = -2 # Maximum strongly falling threshold (must be < falling)
# Gap count and relaxation limits
MIN_GAP_COUNT = 0 # Minimum gap count
MIN_RELAXATION_ATTEMPTS = 1 # Minimum relaxation attempts
# Home types # Home types
HOME_TYPE_APARTMENT = "APARTMENT" HOME_TYPE_APARTMENT = "APARTMENT"
@ -194,22 +76,12 @@ HOME_TYPES = {
# Currency mapping: ISO code -> (major_symbol, minor_symbol, minor_name) # Currency mapping: ISO code -> (major_symbol, minor_symbol, minor_name)
# For currencies with Home Assistant constants, use those; otherwise define custom ones # For currencies with Home Assistant constants, use those; otherwise define custom ones
CURRENCY_INFO = { CURRENCY_INFO = {
"EUR": (CURRENCY_EURO, "ct", "Cents"), "EUR": (CURRENCY_EURO, "ct", "cents"),
"NOK": ("kr", "øre", "Øre"), "NOK": ("kr", "øre", "øre"),
"SEK": ("kr", "öre", "Öre"), "SEK": ("kr", "öre", "öre"),
"DKK": ("kr", "øre", "Øre"), "DKK": ("kr", "øre", "øre"),
"USD": (CURRENCY_DOLLAR, "¢", "Cents"), "USD": (CURRENCY_DOLLAR, "¢", "cents"),
"GBP": ("£", "p", "Pence"), "GBP": ("£", "p", "pence"),
}
# Base currency names: ISO code -> full currency name (in local language)
CURRENCY_NAMES = {
"EUR": "Euro",
"NOK": "Norske kroner",
"SEK": "Svenska kronor",
"DKK": "Danske kroner",
"USD": "US Dollar",
"GBP": "British Pound",
} }
@ -231,9 +103,9 @@ def get_currency_info(currency_code: str | None) -> tuple[str, str, str]:
return CURRENCY_INFO.get(currency_code.upper(), CURRENCY_INFO["EUR"]) return CURRENCY_INFO.get(currency_code.upper(), CURRENCY_INFO["EUR"])
def format_price_unit_base(currency_code: str | None) -> str: def format_price_unit_major(currency_code: str | None) -> str:
""" """
Format the price unit string with base currency unit (e.g., '€/kWh'). Format the price unit string with major currency unit (e.g., '€/kWh').
Args: Args:
currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK', 'SEK') currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK', 'SEK')
@ -242,13 +114,13 @@ def format_price_unit_base(currency_code: str | None) -> str:
Formatted unit string like '€/kWh' or 'kr/kWh' Formatted unit string like '€/kWh' or 'kr/kWh'
""" """
base_symbol, _, _ = get_currency_info(currency_code) major_symbol, _, _ = get_currency_info(currency_code)
return f"{base_symbol}/{UnitOfPower.KILO_WATT}{UnitOfTime.HOURS}" return f"{major_symbol}/{UnitOfPower.KILO_WATT}{UnitOfTime.HOURS}"
def format_price_unit_subunit(currency_code: str | None) -> str: def format_price_unit_minor(currency_code: str | None) -> str:
""" """
Format the price unit string with subunit currency unit (e.g., 'ct/kWh'). Format the price unit string with minor currency unit (e.g., 'ct/kWh').
Args: Args:
currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK', 'SEK') currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK', 'SEK')
@ -257,190 +129,53 @@ def format_price_unit_subunit(currency_code: str | None) -> str:
Formatted unit string like 'ct/kWh' or 'øre/kWh' Formatted unit string like 'ct/kWh' or 'øre/kWh'
""" """
_, subunit_symbol, _ = get_currency_info(currency_code) _, minor_symbol, _ = get_currency_info(currency_code)
return f"{subunit_symbol}/{UnitOfPower.KILO_WATT}{UnitOfTime.HOURS}" return f"{minor_symbol}/{UnitOfPower.KILO_WATT}{UnitOfTime.HOURS}"
def get_currency_name(currency_code: str | None) -> str: def calculate_volatility_level(
spread: float,
threshold_moderate: float | None = None,
threshold_high: float | None = None,
threshold_very_high: float | None = None,
) -> str:
""" """
Get the full name of the base currency. Calculate volatility level from price spread.
Volatility indicates how much prices fluctuate during a period, which helps
determine whether active load shifting is worthwhile.
Args: Args:
currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK', 'SEK') spread: Absolute price difference between max and min (in minor currency units, e.g., ct or øre)
threshold_moderate: Custom threshold for MODERATE level (default: use VOLATILITY_THRESHOLD_MODERATE)
threshold_high: Custom threshold for HIGH level (default: use VOLATILITY_THRESHOLD_HIGH)
threshold_very_high: Custom threshold for VERY_HIGH level (default: use VOLATILITY_THRESHOLD_VERY_HIGH)
Returns: Returns:
Full currency name like 'Euro' or 'Norwegian Krone' Volatility level: LOW, MODERATE, HIGH, or VERY_HIGH
Defaults to 'Euro' if currency is not recognized
Examples:
- spread < 5: LOW minimal optimization potential
- 5 spread < 15: MODERATE some optimization worthwhile
- 15 spread < 30: HIGH strong optimization recommended
- spread 30: VERY_HIGH maximum optimization potential
""" """
if not currency_code: # Use provided thresholds or fall back to constants
currency_code = "EUR" t_moderate = threshold_moderate if threshold_moderate is not None else VOLATILITY_THRESHOLD_MODERATE
t_high = threshold_high if threshold_high is not None else VOLATILITY_THRESHOLD_HIGH
t_very_high = threshold_very_high if threshold_very_high is not None else VOLATILITY_THRESHOLD_VERY_HIGH
return CURRENCY_NAMES.get(currency_code.upper(), CURRENCY_NAMES["EUR"]) if spread < t_moderate:
return VOLATILITY_LOW
if spread < t_high:
return VOLATILITY_MODERATE
if spread < t_very_high:
return VOLATILITY_HIGH
return VOLATILITY_VERY_HIGH
# ============================================================================ # Price level constants from Tibber API
# Currency Display Mode Configuration
# ============================================================================
# Configuration key for currency display mode
CONF_CURRENCY_DISPLAY_MODE = "currency_display_mode"
# Display mode values
DISPLAY_MODE_BASE = "base" # Display in base currency units (€, kr)
DISPLAY_MODE_SUBUNIT = "subunit" # Display in subunit currency units (ct, øre)
# Intelligent per-currency defaults based on market analysis
# EUR: Subunit (cents) - established convention in Germany/Netherlands
# NOK/SEK/DKK: Base (kroner) - Scandinavian preference for whole units
# USD/GBP: Base - international standard
DEFAULT_CURRENCY_DISPLAY = {
"EUR": DISPLAY_MODE_SUBUNIT,
"NOK": DISPLAY_MODE_BASE,
"SEK": DISPLAY_MODE_BASE,
"DKK": DISPLAY_MODE_BASE,
"USD": DISPLAY_MODE_BASE,
"GBP": DISPLAY_MODE_BASE,
}
def get_default_currency_display(currency_code: str | None) -> str:
"""
Get intelligent default display mode for a currency.
Args:
currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK')
Returns:
Default display mode ('base' or 'subunit')
"""
if not currency_code:
return DISPLAY_MODE_SUBUNIT # Fallback default
return DEFAULT_CURRENCY_DISPLAY.get(currency_code.upper(), DISPLAY_MODE_SUBUNIT)
def get_default_options(currency_code: str | None) -> dict[str, Any]:
"""
Get complete default options for a new config entry.
This ensures new config entries have explicitly set defaults based on their currency,
distinguishing them from legacy config entries that need migration.
Options structure has been flattened for single-section steps:
- Flat values: extended_descriptions, average_sensor_display, currency_display_mode,
price_rating_thresholds, volatility_thresholds, price_trend_thresholds, time offsets
- Nested sections (multi-section steps only): period_settings, flexibility_settings,
relaxation_and_target_periods
Args:
currency_code: ISO 4217 currency code (e.g., 'EUR', 'NOK')
Returns:
Dictionary with all default option values in nested section structure
"""
return {
# Flat configuration values
CONF_EXTENDED_DESCRIPTIONS: DEFAULT_EXTENDED_DESCRIPTIONS,
CONF_AVERAGE_SENSOR_DISPLAY: DEFAULT_AVERAGE_SENSOR_DISPLAY,
CONF_CURRENCY_DISPLAY_MODE: get_default_currency_display(currency_code),
CONF_VIRTUAL_TIME_OFFSET_DAYS: DEFAULT_VIRTUAL_TIME_OFFSET_DAYS,
CONF_VIRTUAL_TIME_OFFSET_HOURS: DEFAULT_VIRTUAL_TIME_OFFSET_HOURS,
CONF_VIRTUAL_TIME_OFFSET_MINUTES: DEFAULT_VIRTUAL_TIME_OFFSET_MINUTES,
# Price rating settings (flat - single-section step)
CONF_PRICE_RATING_THRESHOLD_LOW: DEFAULT_PRICE_RATING_THRESHOLD_LOW,
CONF_PRICE_RATING_THRESHOLD_HIGH: DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
CONF_PRICE_RATING_HYSTERESIS: DEFAULT_PRICE_RATING_HYSTERESIS,
CONF_PRICE_RATING_GAP_TOLERANCE: DEFAULT_PRICE_RATING_GAP_TOLERANCE,
CONF_PRICE_LEVEL_GAP_TOLERANCE: DEFAULT_PRICE_LEVEL_GAP_TOLERANCE,
# Volatility thresholds (flat - single-section step)
CONF_VOLATILITY_THRESHOLD_MODERATE: DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
CONF_VOLATILITY_THRESHOLD_HIGH: DEFAULT_VOLATILITY_THRESHOLD_HIGH,
CONF_VOLATILITY_THRESHOLD_VERY_HIGH: DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
# Price trend thresholds (flat - single-section step)
CONF_PRICE_TREND_THRESHOLD_RISING: DEFAULT_PRICE_TREND_THRESHOLD_RISING,
CONF_PRICE_TREND_THRESHOLD_FALLING: DEFAULT_PRICE_TREND_THRESHOLD_FALLING,
# Nested section: Period settings (shared by best/peak price)
"period_settings": {
CONF_BEST_PRICE_MIN_PERIOD_LENGTH: DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH: DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT: DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT: DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
CONF_BEST_PRICE_MAX_LEVEL: DEFAULT_BEST_PRICE_MAX_LEVEL,
CONF_PEAK_PRICE_MIN_LEVEL: DEFAULT_PEAK_PRICE_MIN_LEVEL,
},
# Nested section: Flexibility settings (shared by best/peak price)
"flexibility_settings": {
CONF_BEST_PRICE_FLEX: DEFAULT_BEST_PRICE_FLEX,
CONF_PEAK_PRICE_FLEX: DEFAULT_PEAK_PRICE_FLEX,
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG: DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG: DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
},
# Nested section: Relaxation and target periods (shared by best/peak price)
"relaxation_and_target_periods": {
CONF_ENABLE_MIN_PERIODS_BEST: DEFAULT_ENABLE_MIN_PERIODS_BEST,
CONF_MIN_PERIODS_BEST: DEFAULT_MIN_PERIODS_BEST,
CONF_RELAXATION_ATTEMPTS_BEST: DEFAULT_RELAXATION_ATTEMPTS_BEST,
CONF_ENABLE_MIN_PERIODS_PEAK: DEFAULT_ENABLE_MIN_PERIODS_PEAK,
CONF_MIN_PERIODS_PEAK: DEFAULT_MIN_PERIODS_PEAK,
CONF_RELAXATION_ATTEMPTS_PEAK: DEFAULT_RELAXATION_ATTEMPTS_PEAK,
},
}
def get_display_unit_factor(config_entry: ConfigEntry) -> int:
"""
Get multiplication factor for converting base to display currency.
Internal storage is ALWAYS in base currency (4 decimals precision).
This function returns the conversion factor based on user configuration.
Args:
config_entry: ConfigEntry with currency_display_mode option
Returns:
100 for subunit currency display, 1 for base currency display
Example:
price_base = 0.2534 # Internal: 0.2534 €/kWh
factor = get_display_unit_factor(config_entry)
display_value = round(price_base * factor, 2)
# → 25.34 ct/kWh (subunit) or 0.25 €/kWh (base)
"""
display_mode = config_entry.options.get(CONF_CURRENCY_DISPLAY_MODE, DISPLAY_MODE_SUBUNIT)
return 100 if display_mode == DISPLAY_MODE_SUBUNIT else 1
def get_display_unit_string(config_entry: ConfigEntry, currency_code: str | None) -> str:
"""
Get unit string for display based on configuration.
Args:
config_entry: ConfigEntry with currency_display_mode option
currency_code: ISO 4217 currency code
Returns:
Formatted unit string (e.g., 'ct/kWh' or '€/kWh')
"""
display_mode = config_entry.options.get(CONF_CURRENCY_DISPLAY_MODE, DISPLAY_MODE_SUBUNIT)
if display_mode == DISPLAY_MODE_SUBUNIT:
return format_price_unit_subunit(currency_code)
return format_price_unit_base(currency_code)
# ============================================================================
# Price Level, Rating, and Volatility Constants
# ============================================================================
# IMPORTANT: These string constants are the single source of truth for
# valid enum values. The Literal types in sensor/types.py and binary_sensor/types.py
# should be kept in sync with these values manually.
# Price level constants (from Tibber API)
PRICE_LEVEL_VERY_CHEAP = "VERY_CHEAP" PRICE_LEVEL_VERY_CHEAP = "VERY_CHEAP"
PRICE_LEVEL_CHEAP = "CHEAP" PRICE_LEVEL_CHEAP = "CHEAP"
PRICE_LEVEL_NORMAL = "NORMAL" PRICE_LEVEL_NORMAL = "NORMAL"
@ -452,24 +187,21 @@ PRICE_RATING_LOW = "LOW"
PRICE_RATING_NORMAL = "NORMAL" PRICE_RATING_NORMAL = "NORMAL"
PRICE_RATING_HIGH = "HIGH" PRICE_RATING_HIGH = "HIGH"
# Price volatility level constants # Price volatility levels (based on spread between min and max)
VOLATILITY_LOW = "LOW" VOLATILITY_LOW = "LOW"
VOLATILITY_MODERATE = "MODERATE" VOLATILITY_MODERATE = "MODERATE"
VOLATILITY_HIGH = "HIGH" VOLATILITY_HIGH = "HIGH"
VOLATILITY_VERY_HIGH = "VERY_HIGH" VOLATILITY_VERY_HIGH = "VERY_HIGH"
# Price trend constants (calculated values with 5-level scale) # Volatility thresholds (in minor currency units like ct or øre)
# Used by trend sensors: momentary, short-term, mid-term, long-term VOLATILITY_THRESHOLD_MODERATE = 5 # Below this: LOW, above: MODERATE
PRICE_TREND_STRONGLY_FALLING = "strongly_falling" VOLATILITY_THRESHOLD_HIGH = 15 # Below this: MODERATE, above: HIGH
PRICE_TREND_FALLING = "falling" VOLATILITY_THRESHOLD_VERY_HIGH = 30 # Below this: HIGH, above: VERY_HIGH
PRICE_TREND_STABLE = "stable"
PRICE_TREND_RISING = "rising"
PRICE_TREND_STRONGLY_RISING = "strongly_rising"
# Sensor options (lowercase versions for ENUM device class) # Sensor options (lowercase versions for ENUM device class)
# NOTE: These constants define the valid enum options, but they are not used directly # NOTE: These constants define the valid enum options, but they are not used directly
# in sensor/definitions.py due to import timing issues. Instead, the options are defined inline # in sensor.py due to import timing issues. Instead, the options are defined inline
# in the SensorEntityDescription objects. Keep these in sync with sensor/definitions.py! # in the SensorEntityDescription objects. Keep these in sync with sensor.py!
PRICE_LEVEL_OPTIONS = [ PRICE_LEVEL_OPTIONS = [
PRICE_LEVEL_VERY_CHEAP.lower(), PRICE_LEVEL_VERY_CHEAP.lower(),
PRICE_LEVEL_CHEAP.lower(), PRICE_LEVEL_CHEAP.lower(),
@ -491,16 +223,15 @@ VOLATILITY_OPTIONS = [
VOLATILITY_VERY_HIGH.lower(), VOLATILITY_VERY_HIGH.lower(),
] ]
# Trend options for enum sensors (lowercase versions for ENUM device class) # Valid options for minimum volatility filter for periods
PRICE_TREND_OPTIONS = [ MIN_VOLATILITY_FOR_PERIODS_OPTIONS = [
PRICE_TREND_STRONGLY_FALLING, VOLATILITY_LOW.lower(), # Show at any volatility (≥0ct spread) - no filter
PRICE_TREND_FALLING, VOLATILITY_MODERATE.lower(), # Only show periods when volatility ≥ MODERATE (≥5ct)
PRICE_TREND_STABLE, VOLATILITY_HIGH.lower(), # Only show periods when volatility ≥ HIGH (≥15ct)
PRICE_TREND_RISING, VOLATILITY_VERY_HIGH.lower(), # Only show periods when volatility ≥ VERY_HIGH (≥30ct)
PRICE_TREND_STRONGLY_RISING,
] ]
# Valid options for best price maximum level filter # Valid options for best price maximum level filter (AND-linked with volatility filter)
# Sorted from cheap to expensive: user selects "up to how expensive" # Sorted from cheap to expensive: user selects "up to how expensive"
BEST_PRICE_MAX_LEVEL_OPTIONS = [ BEST_PRICE_MAX_LEVEL_OPTIONS = [
"any", # No filter, allow all price levels "any", # No filter, allow all price levels
@ -510,7 +241,7 @@ BEST_PRICE_MAX_LEVEL_OPTIONS = [
PRICE_LEVEL_EXPENSIVE.lower(), # Only show if level ≤ EXPENSIVE PRICE_LEVEL_EXPENSIVE.lower(), # Only show if level ≤ EXPENSIVE
] ]
# Valid options for peak price minimum level filter # Valid options for peak price minimum level filter (AND-linked with volatility filter)
# Sorted from expensive to cheap: user selects "starting from how expensive" # Sorted from expensive to cheap: user selects "starting from how expensive"
PEAK_PRICE_MIN_LEVEL_OPTIONS = [ PEAK_PRICE_MIN_LEVEL_OPTIONS = [
"any", # No filter, allow all price levels "any", # No filter, allow all price levels
@ -520,12 +251,6 @@ PEAK_PRICE_MIN_LEVEL_OPTIONS = [
PRICE_LEVEL_VERY_CHEAP.lower(), # Only show if level ≥ VERY_CHEAP PRICE_LEVEL_VERY_CHEAP.lower(), # Only show if level ≥ VERY_CHEAP
] ]
# Relaxation level constants (for period filter relaxation)
# These describe which filter relaxation was applied to find a period
RELAXATION_NONE = "none" # No relaxation, normal filters
RELAXATION_LEVEL_ANY = "level_any" # Level filter disabled
RELAXATION_ALL_FILTERS_OFF = "all_filters_off" # All filters disabled (deprecated, same as level_any)
# Mapping for comparing price levels (used for sorting) # Mapping for comparing price levels (used for sorting)
PRICE_LEVEL_MAPPING = { PRICE_LEVEL_MAPPING = {
PRICE_LEVEL_VERY_CHEAP: -2, PRICE_LEVEL_VERY_CHEAP: -2,
@ -542,75 +267,6 @@ PRICE_RATING_MAPPING = {
PRICE_RATING_HIGH: 1, PRICE_RATING_HIGH: 1,
} }
# Mapping for comparing price trends (used for sorting and automation comparisons)
# Values range from -2 (strongly falling) to +2 (strongly rising), with 0 = stable
PRICE_TREND_MAPPING = {
PRICE_TREND_STRONGLY_FALLING: -2,
PRICE_TREND_FALLING: -1,
PRICE_TREND_STABLE: 0,
PRICE_TREND_RISING: 1,
PRICE_TREND_STRONGLY_RISING: 2,
}
# Icon mapping for price levels (dynamic icons based on level)
PRICE_LEVEL_ICON_MAPPING = {
PRICE_LEVEL_VERY_CHEAP: "mdi:gauge-empty",
PRICE_LEVEL_CHEAP: "mdi:gauge-low",
PRICE_LEVEL_NORMAL: "mdi:gauge",
PRICE_LEVEL_EXPENSIVE: "mdi:gauge-full",
PRICE_LEVEL_VERY_EXPENSIVE: "mdi:alert",
}
# Color mapping for price levels (CSS variables for theme compatibility)
PRICE_LEVEL_COLOR_MAPPING = {
PRICE_LEVEL_VERY_CHEAP: "var(--success-color)",
PRICE_LEVEL_CHEAP: "var(--success-color)",
PRICE_LEVEL_NORMAL: "var(--state-icon-color)",
PRICE_LEVEL_EXPENSIVE: "var(--warning-color)",
PRICE_LEVEL_VERY_EXPENSIVE: "var(--error-color)",
}
# Icon mapping for current price sensors (dynamic icons based on price level)
# Used by current_interval_price and current_hour_average_price sensors
# Icon shows price level (cheap/normal/expensive), icon_color reinforces with color
PRICE_LEVEL_CASH_ICON_MAPPING = {
PRICE_LEVEL_VERY_CHEAP: "mdi:cash-multiple", # Many coins (save a lot!)
PRICE_LEVEL_CHEAP: "mdi:cash-plus", # Cash with plus (good price)
PRICE_LEVEL_NORMAL: "mdi:cash", # Standard cash icon
PRICE_LEVEL_EXPENSIVE: "mdi:cash-minus", # Cash with minus (expensive)
PRICE_LEVEL_VERY_EXPENSIVE: "mdi:cash-remove", # Cash crossed out (very expensive)
}
# Icon mapping for price ratings (dynamic icons based on rating)
PRICE_RATING_ICON_MAPPING = {
PRICE_RATING_LOW: "mdi:thumb-up",
PRICE_RATING_NORMAL: "mdi:thumbs-up-down",
PRICE_RATING_HIGH: "mdi:thumb-down",
}
# Color mapping for price ratings (CSS variables for theme compatibility)
PRICE_RATING_COLOR_MAPPING = {
PRICE_RATING_LOW: "var(--success-color)",
PRICE_RATING_NORMAL: "var(--state-icon-color)",
PRICE_RATING_HIGH: "var(--error-color)",
}
# Icon mapping for volatility levels (dynamic icons based on volatility)
VOLATILITY_ICON_MAPPING = {
VOLATILITY_LOW: "mdi:chart-line-variant",
VOLATILITY_MODERATE: "mdi:chart-timeline-variant",
VOLATILITY_HIGH: "mdi:chart-bar",
VOLATILITY_VERY_HIGH: "mdi:chart-scatter-plot",
}
# Color mapping for volatility levels (CSS variables for theme compatibility)
VOLATILITY_COLOR_MAPPING = {
VOLATILITY_LOW: "var(--success-color)",
VOLATILITY_MODERATE: "var(--info-color)",
VOLATILITY_HIGH: "var(--warning-color)",
VOLATILITY_VERY_HIGH: "var(--error-color)",
}
# Mapping for comparing volatility levels (used for sorting) # Mapping for comparing volatility levels (used for sorting)
VOLATILITY_MAPPING = { VOLATILITY_MAPPING = {
VOLATILITY_LOW: 0, VOLATILITY_LOW: 0,
@ -619,36 +275,7 @@ VOLATILITY_MAPPING = {
VOLATILITY_VERY_HIGH: 3, VOLATILITY_VERY_HIGH: 3,
} }
# Icon mapping for binary sensors (dynamic icons based on state) LOGGER = logging.getLogger(__package__)
# Note: OFF state icons can vary based on whether future periods exist
BINARY_SENSOR_ICON_MAPPING = {
"best_price_period": {
"on": "mdi:piggy-bank",
"off": "mdi:timer-sand", # Has future periods
"off_no_future": "mdi:sleep", # No future periods in next 6h
},
"peak_price_period": {
"on": "mdi:alert-circle",
"off": "mdi:shield-check", # Has future periods
"off_no_future": "mdi:sleep", # No future periods in next 6h
},
"chart_data_export": {
"on": "mdi:database-export", # Data available
"off": "mdi:database-alert", # Service call failed or no config
},
}
# Color mapping for binary sensors (CSS variables for theme compatibility)
BINARY_SENSOR_COLOR_MAPPING = {
"best_price_period": {
"on": "var(--success-color)",
"off": "var(--state-icon-color)",
},
"peak_price_period": {
"on": "var(--error-color)",
"off": "var(--state-icon-color)",
},
}
# Path to custom translations directory # Path to custom translations directory
CUSTOM_TRANSLATIONS_DIR = Path(__file__).parent / "custom_translations" CUSTOM_TRANSLATIONS_DIR = Path(__file__).parent / "custom_translations"
@ -973,9 +600,7 @@ async def async_get_price_level_translation(
The localized price level if found, None otherwise The localized price level if found, None otherwise
""" """
return await async_get_translation( return await async_get_translation(hass, ["sensor", "price_level", "price_levels", level], language)
hass, ["sensor", "current_interval_price_level", "price_levels", level], language
)
def get_price_level_translation( def get_price_level_translation(
@ -995,7 +620,7 @@ def get_price_level_translation(
The localized price level if found in cache, None otherwise The localized price level if found in cache, None otherwise
""" """
return get_translation(["sensor", "current_interval_price_level", "price_levels", level], language) return get_translation(["sensor", "price_level", "price_levels", level], language)
async def async_get_home_type_translation( async def async_get_home_type_translation(

View file

@ -0,0 +1,926 @@
"""Enhanced coordinator for fetching Tibber price data with comprehensive caching."""
from __future__ import annotations
import logging
from datetime import date, datetime, timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.event import async_track_utc_time_change
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
from homeassistant.config_entries import ConfigEntry
from .api import (
TibberPricesApiClient,
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
)
from .const import (
CONF_BEST_PRICE_FLEX,
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
CONF_PEAK_PRICE_FLEX,
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
CONF_PRICE_RATING_THRESHOLD_HIGH,
CONF_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_BEST_PRICE_FLEX,
DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_PEAK_PRICE_FLEX,
DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
DOMAIN,
)
from .period_utils import PeriodConfig, calculate_periods
from .price_utils import (
enrich_price_info_with_differences,
find_price_data_for_interval,
)
_LOGGER = logging.getLogger(__name__)
# Storage version for storing data
STORAGE_VERSION = 1
# Update interval - fetch data every 15 minutes (when data is incomplete)
UPDATE_INTERVAL = timedelta(minutes=15)
# Update interval when all data is available - every 4 hours (reduce API calls)
UPDATE_INTERVAL_COMPLETE = timedelta(hours=4)
# Quarter-hour boundaries for entity state updates (minutes: 00, 15, 30, 45)
QUARTER_HOUR_BOUNDARIES = (0, 15, 30, 45)
# Hour after which tomorrow's price data is expected (13:00 local time)
TOMORROW_DATA_CHECK_HOUR = 13
# Entity keys that require quarter-hour updates (time-sensitive entities)
# These entities calculate values based on current time and need updates every 15 minutes
# All other entities only update when new API data arrives
TIME_SENSITIVE_ENTITY_KEYS = frozenset(
{
# Current/next/previous price sensors
"current_price",
"next_interval_price",
"previous_interval_price",
# Current/next/previous price levels
"price_level",
"next_interval_price_level",
"previous_interval_price_level",
# Rolling hour calculations (5-interval windows)
"current_hour_average",
"next_hour_average",
"current_hour_price_level",
"next_hour_price_level",
# Current/next/previous price ratings
"price_rating",
"next_interval_price_rating",
"previous_interval_price_rating",
"current_hour_price_rating",
"next_hour_price_rating",
# Future average sensors (rolling N-hour windows from next interval)
"next_avg_1h",
"next_avg_2h",
"next_avg_3h",
"next_avg_4h",
"next_avg_5h",
"next_avg_6h",
"next_avg_8h",
"next_avg_12h",
# Price trend sensors
"price_trend_1h",
"price_trend_2h",
"price_trend_3h",
"price_trend_4h",
"price_trend_5h",
"price_trend_6h",
"price_trend_8h",
"price_trend_12h",
# Trailing/leading 24h calculations (based on current interval)
"trailing_price_average",
"leading_price_average",
"trailing_price_min",
"trailing_price_max",
"leading_price_min",
"leading_price_max",
# Binary sensors that check if current time is in a period
"peak_price_period",
"best_price_period",
}
)
class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Enhanced coordinator with main/subentry pattern and comprehensive caching."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
version: str,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=UPDATE_INTERVAL,
)
self.config_entry = config_entry
self.api = TibberPricesApiClient(
access_token=config_entry.data[CONF_ACCESS_TOKEN],
session=aiohttp_client.async_get_clientsession(hass),
version=version,
)
# Storage for persistence
storage_key = f"{DOMAIN}.{config_entry.entry_id}"
self._store = Store(hass, STORAGE_VERSION, storage_key)
# User data cache (updated daily)
self._cached_user_data: dict[str, Any] | None = None
self._last_user_update: datetime | None = None
self._user_update_interval = timedelta(days=1)
# Price data cache
self._cached_price_data: dict[str, Any] | None = None
self._last_price_update: datetime | None = None
# Track the last date we checked for midnight turnover
self._last_midnight_check: datetime | None = None
# Track if this is the main entry (first one created)
self._is_main_entry = not self._has_existing_main_coordinator()
# Log prefix for identifying this coordinator instance
self._log_prefix = f"[{config_entry.title}]"
# Quarter-hour entity refresh timer (runs at :00, :15, :30, :45)
self._quarter_hour_timer_cancel: CALLBACK_TYPE | None = None
# Selective listener system for time-sensitive entities
# Regular listeners update on API data changes, time-sensitive listeners update every 15 minutes
self._time_sensitive_listeners: list[CALLBACK_TYPE] = []
self._schedule_quarter_hour_refresh()
def _log(self, level: str, message: str, *args: Any, **kwargs: Any) -> None:
"""Log with coordinator-specific prefix."""
prefixed_message = f"{self._log_prefix} {message}"
getattr(_LOGGER, level)(prefixed_message, *args, **kwargs)
@callback
def async_add_time_sensitive_listener(self, update_callback: CALLBACK_TYPE) -> CALLBACK_TYPE:
"""
Listen for time-sensitive updates that occur every quarter-hour.
Time-sensitive entities (like current_price, next_interval_price, etc.) should use this
method instead of async_add_listener to receive updates at quarter-hour boundaries.
Returns:
Callback that can be used to remove the listener
"""
self._time_sensitive_listeners.append(update_callback)
def remove_listener() -> None:
"""Remove update listener."""
if update_callback in self._time_sensitive_listeners:
self._time_sensitive_listeners.remove(update_callback)
return remove_listener
@callback
def _async_update_time_sensitive_listeners(self) -> None:
"""Update all time-sensitive entities without triggering a full coordinator update."""
for update_callback in self._time_sensitive_listeners:
update_callback()
self._log(
"debug",
"Updated %d time-sensitive entities at quarter-hour boundary",
len(self._time_sensitive_listeners),
)
def _schedule_quarter_hour_refresh(self) -> None:
"""Schedule the next quarter-hour entity refresh using Home Assistant's time tracking."""
# Cancel any existing timer
if self._quarter_hour_timer_cancel:
self._quarter_hour_timer_cancel()
self._quarter_hour_timer_cancel = None
# Use Home Assistant's async_track_utc_time_change to trigger exactly at quarter-hour boundaries
# This ensures we trigger at :00, :15, :30, :45 seconds=1 to avoid triggering too early
self._quarter_hour_timer_cancel = async_track_utc_time_change(
self.hass,
self._handle_quarter_hour_refresh,
minute=QUARTER_HOUR_BOUNDARIES,
second=1,
)
self._log(
"debug",
"Scheduled quarter-hour refresh for boundaries: %s (at second=1)",
QUARTER_HOUR_BOUNDARIES,
)
@callback
def _handle_quarter_hour_refresh(self, _now: datetime | None = None) -> None:
"""Handle quarter-hour entity refresh - check for midnight turnover and update entities."""
now = dt_util.now()
self._log("debug", "Quarter-hour refresh triggered at %s", now.isoformat())
# Check if midnight has passed since last check
midnight_turnover_performed = self._check_and_handle_midnight_turnover(now)
if midnight_turnover_performed:
self._log("info", "Midnight turnover detected and performed during quarter-hour refresh")
# Schedule cache save asynchronously (we're in a callback)
self.hass.async_create_task(self._store_cache())
# Entity update already done in _check_and_handle_midnight_turnover
# Skip the regular update to avoid double-update
else:
# Regular quarter-hour refresh - only update time-sensitive entities
# This causes time-sensitive entity state properties to be re-evaluated with the current time
# Static entities (statistics, diagnostics) only update when new API data arrives
self._async_update_time_sensitive_listeners()
@callback
def _check_and_handle_midnight_turnover(self, now: datetime) -> bool:
"""
Check if midnight has passed and perform data rotation if needed.
This is called by the quarter-hour timer to ensure timely rotation
without waiting for the next API update cycle.
Returns:
True if midnight turnover was performed, False otherwise
"""
current_date = now.date()
# First time check - initialize
if self._last_midnight_check is None:
self._last_midnight_check = now
return False
last_check_date = self._last_midnight_check.date()
# Check if we've crossed into a new day
if current_date > last_check_date:
self._log(
"debug",
"Midnight crossed: last_check=%s, current=%s",
last_check_date,
current_date,
)
# Perform rotation on cached data if available
if self._cached_price_data and "homes" in self._cached_price_data:
for home_id, home_data in self._cached_price_data["homes"].items():
if "price_info" in home_data:
price_info = home_data["price_info"]
rotated = self._perform_midnight_turnover(price_info)
home_data["price_info"] = rotated
self._log("debug", "Rotated price data for home %s", home_id)
# Update coordinator's data with enriched rotated data
if self.data:
# Re-transform data to ensure enrichment is applied to rotated data
if self.is_main_entry():
self.data = self._transform_data_for_main_entry(self._cached_price_data)
else:
# For subentry, we need to get data from main coordinator
# but we can update the timestamp to trigger entity refresh
self.data["timestamp"] = now
# Notify listeners about the updated data after rotation
self.async_update_listeners()
self._last_midnight_check = now
return True
self._last_midnight_check = now
return False
async def async_shutdown(self) -> None:
"""Shut down the coordinator and clean up timers."""
if self._quarter_hour_timer_cancel:
self._quarter_hour_timer_cancel()
self._quarter_hour_timer_cancel = None
def _has_existing_main_coordinator(self) -> bool:
"""Check if there's already a main coordinator in hass.data."""
domain_data = self.hass.data.get(DOMAIN, {})
return any(
isinstance(coordinator, TibberPricesDataUpdateCoordinator) and coordinator.is_main_entry()
for coordinator in domain_data.values()
)
def is_main_entry(self) -> bool:
"""Return True if this is the main entry that fetches data for all homes."""
return self._is_main_entry
async def _async_update_data(self) -> dict[str, Any]:
"""Fetch data from Tibber API."""
# Load cache if not already loaded
if self._cached_price_data is None and self._cached_user_data is None:
await self._load_cache()
current_time = dt_util.utcnow()
try:
if self.is_main_entry():
# Main entry fetches data for all homes
return await self._handle_main_entry_update(current_time)
# Subentries get data from main coordinator
return await self._handle_subentry_update()
except TibberPricesApiClientAuthenticationError as err:
msg = "Invalid access token"
raise ConfigEntryAuthFailed(msg) from err
except (
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
) as err:
# Use cached data as fallback if available
if self._cached_price_data is not None:
self._log("warning", "API error, using cached data: %s", err)
return self._merge_cached_data()
msg = f"Error communicating with API: {err}"
raise UpdateFailed(msg) from err
async def _handle_main_entry_update(self, current_time: datetime) -> dict[str, Any]:
"""Handle update for main entry - fetch data for all homes."""
# Update user data if needed (daily check)
await self._update_user_data_if_needed(current_time)
# Check if we need to update price data
if self._should_update_price_data(current_time):
raw_data = await self._fetch_all_homes_data()
# Cache the data
self._cached_price_data = raw_data
self._last_price_update = current_time
await self._store_cache()
# Transform for main entry: provide aggregated view
return self._transform_data_for_main_entry(raw_data)
# Use cached data if available
if self._cached_price_data is not None:
return self._transform_data_for_main_entry(self._cached_price_data)
# Fallback: no cache and no update needed (shouldn't happen)
self._log("warning", "No cached data available and update not triggered - returning empty data")
return {
"timestamp": current_time,
"homes": {},
"priceInfo": {},
}
async def _handle_subentry_update(self) -> dict[str, Any]:
"""Handle update for subentry - get data from main coordinator."""
main_data = await self._get_data_from_main_coordinator()
return self._transform_data_for_subentry(main_data)
async def _fetch_all_homes_data(self) -> dict[str, Any]:
"""Fetch data for all homes (main coordinator only)."""
self._log("debug", "Fetching data for all homes")
# Get price data for all homes
price_data = await self.api.async_get_price_info()
all_homes_data = {}
homes_list = price_data.get("homes", {})
for home_id, home_price_data in homes_list.items():
# Store raw price data without enrichment
# Enrichment will be done dynamically when data is transformed
home_data = {
"price_info": home_price_data,
}
all_homes_data[home_id] = home_data
return {
"timestamp": dt_util.utcnow(),
"homes": all_homes_data,
}
async def _get_data_from_main_coordinator(self) -> dict[str, Any]:
"""Get data from the main coordinator (subentries only)."""
# Find the main coordinator
main_coordinator = self._find_main_coordinator()
if not main_coordinator:
msg = "Main coordinator not found"
raise UpdateFailed(msg)
# Wait for main coordinator to have data
if main_coordinator.data is None:
main_coordinator.async_set_updated_data({})
# Return the main coordinator's data
return main_coordinator.data or {}
def _find_main_coordinator(self) -> TibberPricesDataUpdateCoordinator | None:
"""Find the main coordinator that fetches data for all homes."""
domain_data = self.hass.data.get(DOMAIN, {})
for coordinator in domain_data.values():
if (
isinstance(coordinator, TibberPricesDataUpdateCoordinator)
and coordinator.is_main_entry()
and coordinator != self
):
return coordinator
return None
async def _load_cache(self) -> None:
"""Load cached data from storage."""
try:
stored = await self._store.async_load()
if stored:
self._cached_price_data = stored.get("price_data")
self._cached_user_data = stored.get("user_data")
# Restore timestamps
if last_price_update := stored.get("last_price_update"):
self._last_price_update = dt_util.parse_datetime(last_price_update)
if last_user_update := stored.get("last_user_update"):
self._last_user_update = dt_util.parse_datetime(last_user_update)
if last_midnight_check := stored.get("last_midnight_check"):
self._last_midnight_check = dt_util.parse_datetime(last_midnight_check)
# Validate cache: check if price data is from a previous day
if not self._is_cache_valid():
self._log("info", "Cached price data is from a previous day, clearing cache to fetch fresh data")
self._cached_price_data = None
self._last_price_update = None
await self._store_cache()
else:
self._log("debug", "Cache loaded successfully")
else:
self._log("debug", "No cache found, will fetch fresh data")
except OSError as ex:
self._log("warning", "Failed to load cache: %s", ex)
def _is_cache_valid(self) -> bool:
"""
Validate if cached price data is still current.
Returns False if:
- No cached data exists
- Cached data is from a different calendar day (in local timezone)
- Midnight turnover has occurred since cache was saved
"""
if self._cached_price_data is None or self._last_price_update is None:
return False
current_local_date = dt_util.as_local(dt_util.now()).date()
last_update_local_date = dt_util.as_local(self._last_price_update).date()
if current_local_date != last_update_local_date:
self._log(
"debug",
"Cache date mismatch: cached=%s, current=%s",
last_update_local_date,
current_local_date,
)
return False
return True
def _perform_midnight_turnover(self, price_info: dict[str, Any]) -> dict[str, Any]:
"""
Perform midnight turnover on price data.
Moves: today yesterday, tomorrow today, clears tomorrow.
This handles cases where:
- Server was running through midnight
- Cache is being refreshed and needs proper day rotation
Args:
price_info: The price info dict with 'today', 'tomorrow', 'yesterday' keys
Returns:
Updated price_info with rotated day data
"""
current_local_date = dt_util.as_local(dt_util.now()).date()
# Extract current data
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
# Check if any of today's prices are from the previous day
prices_need_rotation = False
if today_prices:
first_today_price_str = today_prices[0].get("startsAt")
if first_today_price_str:
first_today_price_time = dt_util.parse_datetime(first_today_price_str)
if first_today_price_time:
first_today_price_date = dt_util.as_local(first_today_price_time).date()
prices_need_rotation = first_today_price_date < current_local_date
if prices_need_rotation:
self._log("info", "Performing midnight turnover: today→yesterday, tomorrow→today")
return {
"yesterday": today_prices,
"today": tomorrow_prices,
"tomorrow": [],
"currency": price_info.get("currency", "EUR"),
}
return price_info
async def _store_cache(self) -> None:
"""Store cache data."""
data = {
"price_data": self._cached_price_data,
"user_data": self._cached_user_data,
"last_price_update": (self._last_price_update.isoformat() if self._last_price_update else None),
"last_user_update": (self._last_user_update.isoformat() if self._last_user_update else None),
"last_midnight_check": (self._last_midnight_check.isoformat() if self._last_midnight_check else None),
}
try:
await self._store.async_save(data)
self._log("debug", "Cache stored successfully")
except OSError:
_LOGGER.exception("Failed to store cache")
async def _update_user_data_if_needed(self, current_time: datetime) -> None:
"""Update user data if needed (daily check)."""
if self._last_user_update is None or current_time - self._last_user_update >= self._user_update_interval:
try:
self._log("debug", "Updating user data")
user_data = await self.api.async_get_viewer_details()
self._cached_user_data = user_data
self._last_user_update = current_time
self._log("debug", "User data updated successfully")
except (
TibberPricesApiClientError,
TibberPricesApiClientCommunicationError,
) as ex:
self._log("warning", "Failed to update user data: %s", ex)
@callback
def _should_update_price_data(self, current_time: datetime) -> bool:
"""
Check if price data should be updated from the API.
Updates occur when:
1. No cached data exists
2. Cache is invalid (from previous day)
3. It's after 13:00 local time and tomorrow's data is missing or invalid
4. Regular update interval has passed
"""
if self._cached_price_data is None:
self._log("debug", "Should update: No cached price data")
return True
if self._last_price_update is None:
self._log("debug", "Should update: No last price update timestamp")
return True
now_local = dt_util.as_local(current_time)
tomorrow_date = (now_local + timedelta(days=1)).date()
# Check if after 13:00 and tomorrow data is missing or invalid
if (
now_local.hour >= TOMORROW_DATA_CHECK_HOUR
and self._cached_price_data
and "homes" in self._cached_price_data
and self._needs_tomorrow_data(tomorrow_date)
):
self._log("debug", "Should update: After %s:00 and valid tomorrow data missing", TOMORROW_DATA_CHECK_HOUR)
return True
# Check regular update interval
time_since_update = current_time - self._last_price_update
# Determine appropriate interval based on data completeness
has_tomorrow_data = self._has_valid_tomorrow_data(tomorrow_date)
interval = UPDATE_INTERVAL_COMPLETE if has_tomorrow_data else UPDATE_INTERVAL
should_update = time_since_update >= interval
if should_update:
self._log(
"debug",
"Should update price data: %s (time since last update: %s, interval: %s, has_tomorrow: %s)",
should_update,
time_since_update,
interval,
has_tomorrow_data,
)
return should_update
def _needs_tomorrow_data(self, tomorrow_date: date) -> bool:
"""Check if tomorrow data is missing or invalid."""
if not self._cached_price_data or "homes" not in self._cached_price_data:
return False
for home_data in self._cached_price_data["homes"].values():
price_info = home_data.get("price_info", {})
tomorrow_prices = price_info.get("tomorrow", [])
# Check if tomorrow data is missing
if not tomorrow_prices:
return True
# Check if tomorrow data is actually for tomorrow (validate date)
first_price = tomorrow_prices[0]
if starts_at := first_price.get("startsAt"):
price_time = dt_util.parse_datetime(starts_at)
if price_time:
price_date = dt_util.as_local(price_time).date()
if price_date != tomorrow_date:
self._log(
"debug",
"Tomorrow data has wrong date: expected=%s, actual=%s",
tomorrow_date,
price_date,
)
return True
return False
def _has_valid_tomorrow_data(self, tomorrow_date: date) -> bool:
"""Check if we have valid tomorrow data (inverse of _needs_tomorrow_data)."""
return not self._needs_tomorrow_data(tomorrow_date)
@callback
def _merge_cached_data(self) -> dict[str, Any]:
"""Merge cached data into the expected format for main entry."""
if not self._cached_price_data:
return {}
return self._transform_data_for_main_entry(self._cached_price_data)
def _get_threshold_percentages(self) -> dict[str, int]:
"""Get threshold percentages from config options."""
options = self.config_entry.options or {}
return {
"low": options.get(CONF_PRICE_RATING_THRESHOLD_LOW, DEFAULT_PRICE_RATING_THRESHOLD_LOW),
"high": options.get(CONF_PRICE_RATING_THRESHOLD_HIGH, DEFAULT_PRICE_RATING_THRESHOLD_HIGH),
}
def _get_period_config(self, *, reverse_sort: bool) -> dict[str, Any]:
"""Get period calculation configuration from config options."""
options = self.config_entry.options
data = self.config_entry.data
if reverse_sort:
# Peak price configuration
flex = options.get(CONF_PEAK_PRICE_FLEX, data.get(CONF_PEAK_PRICE_FLEX, DEFAULT_PEAK_PRICE_FLEX))
min_distance_from_avg = options.get(
CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
data.get(CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG),
)
min_period_length = options.get(
CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
data.get(CONF_PEAK_PRICE_MIN_PERIOD_LENGTH, DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH),
)
else:
# Best price configuration
flex = options.get(CONF_BEST_PRICE_FLEX, data.get(CONF_BEST_PRICE_FLEX, DEFAULT_BEST_PRICE_FLEX))
min_distance_from_avg = options.get(
CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
data.get(CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG, DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG),
)
min_period_length = options.get(
CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
data.get(CONF_BEST_PRICE_MIN_PERIOD_LENGTH, DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH),
)
# Convert flex from percentage to decimal (e.g., 5 -> 0.05)
try:
flex = float(flex) / 100
except (TypeError, ValueError):
flex = DEFAULT_BEST_PRICE_FLEX / 100 if not reverse_sort else DEFAULT_PEAK_PRICE_FLEX / 100
return {
"flex": flex,
"min_distance_from_avg": float(min_distance_from_avg),
"min_period_length": int(min_period_length),
}
def _calculate_periods_for_price_info(self, price_info: dict[str, Any]) -> dict[str, Any]:
"""Calculate periods (best price and peak price) for the given price info."""
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
all_prices = yesterday_prices + today_prices + tomorrow_prices
if not all_prices:
return {
"best_price": {
"periods": [],
"intervals": [],
"metadata": {"total_intervals": 0, "total_periods": 0, "config": {}},
},
"peak_price": {
"periods": [],
"intervals": [],
"metadata": {"total_intervals": 0, "total_periods": 0, "config": {}},
},
}
# Get rating thresholds from config
threshold_low = self.config_entry.options.get(
CONF_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
)
threshold_high = self.config_entry.options.get(
CONF_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
)
# Calculate best price periods
best_config = self._get_period_config(reverse_sort=False)
best_period_config = PeriodConfig(
reverse_sort=False,
flex=best_config["flex"],
min_distance_from_avg=best_config["min_distance_from_avg"],
min_period_length=best_config["min_period_length"],
threshold_low=threshold_low,
threshold_high=threshold_high,
)
best_periods = calculate_periods(all_prices, config=best_period_config)
# Calculate peak price periods
peak_config = self._get_period_config(reverse_sort=True)
peak_period_config = PeriodConfig(
reverse_sort=True,
flex=peak_config["flex"],
min_distance_from_avg=peak_config["min_distance_from_avg"],
min_period_length=peak_config["min_period_length"],
threshold_low=threshold_low,
threshold_high=threshold_high,
)
peak_periods = calculate_periods(all_prices, config=peak_period_config)
return {
"best_price": best_periods,
"peak_price": peak_periods,
}
def _transform_data_for_main_entry(self, raw_data: dict[str, Any]) -> dict[str, Any]:
"""Transform raw data for main entry (aggregated view of all homes)."""
# For main entry, we can show data from the first home as default
# or provide an aggregated view
homes_data = raw_data.get("homes", {})
if not homes_data:
return {
"timestamp": raw_data.get("timestamp"),
"homes": {},
"priceInfo": {},
}
# Use the first home's data as the main entry's data
first_home_data = next(iter(homes_data.values()))
price_info = first_home_data.get("price_info", {})
# Perform midnight turnover if needed (handles day transitions)
price_info = self._perform_midnight_turnover(price_info)
# Ensure all required keys exist (API might not return tomorrow data yet)
price_info.setdefault("yesterday", [])
price_info.setdefault("today", [])
price_info.setdefault("tomorrow", [])
price_info.setdefault("currency", "EUR")
# Enrich price info dynamically with calculated differences and rating levels
# This ensures enrichment is always up-to-date, especially after midnight turnover
thresholds = self._get_threshold_percentages()
price_info = enrich_price_info_with_differences(
price_info,
threshold_low=thresholds["low"],
threshold_high=thresholds["high"],
)
# Calculate periods (best price and peak price)
periods = self._calculate_periods_for_price_info(price_info)
return {
"timestamp": raw_data.get("timestamp"),
"homes": homes_data,
"priceInfo": price_info,
"periods": periods,
}
def _transform_data_for_subentry(self, main_data: dict[str, Any]) -> dict[str, Any]:
"""Transform main coordinator data for subentry (home-specific view)."""
home_id = self.config_entry.data.get("home_id")
if not home_id:
return main_data
homes_data = main_data.get("homes", {})
home_data = homes_data.get(home_id, {})
if not home_data:
return {
"timestamp": main_data.get("timestamp"),
"priceInfo": {},
}
price_info = home_data.get("price_info", {})
# Perform midnight turnover if needed (handles day transitions)
price_info = self._perform_midnight_turnover(price_info)
# Ensure all required keys exist (API might not return tomorrow data yet)
price_info.setdefault("yesterday", [])
price_info.setdefault("today", [])
price_info.setdefault("tomorrow", [])
price_info.setdefault("currency", "EUR")
# Enrich price info dynamically with calculated differences and rating levels
# This ensures enrichment is always up-to-date, especially after midnight turnover
thresholds = self._get_threshold_percentages()
price_info = enrich_price_info_with_differences(
price_info,
threshold_low=thresholds["low"],
threshold_high=thresholds["high"],
)
# Calculate periods (best price and peak price)
periods = self._calculate_periods_for_price_info(price_info)
return {
"timestamp": main_data.get("timestamp"),
"priceInfo": price_info,
"periods": periods,
}
# --- Methods expected by sensors and services ---
def get_home_data(self, home_id: str) -> dict[str, Any] | None:
"""Get data for a specific home."""
if not self.data:
return None
homes_data = self.data.get("homes", {})
return homes_data.get(home_id)
def get_current_interval(self) -> dict[str, Any] | None:
"""Get the price data for the current interval."""
if not self.data:
return None
price_info = self.data.get("priceInfo", {})
if not price_info:
return None
now = dt_util.now()
return find_price_data_for_interval(price_info, now)
def get_all_intervals(self) -> list[dict[str, Any]]:
"""Get all price intervals (today + tomorrow)."""
if not self.data:
return []
price_info = self.data.get("priceInfo", {})
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
return today_prices + tomorrow_prices
async def refresh_user_data(self) -> bool:
"""Force refresh of user data and return True if data was updated."""
try:
current_time = dt_util.utcnow()
await self._update_user_data_if_needed(current_time)
await self._store_cache()
except (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
):
return False
else:
return True
def get_user_profile(self) -> dict[str, Any]:
"""Get user profile information."""
return {
"last_updated": self._last_user_update,
"cached_user_data": self._cached_user_data is not None,
}
def get_user_homes(self) -> list[dict[str, Any]]:
"""Get list of user homes."""
if not self._cached_user_data:
return []
return self._cached_user_data.get("homes", [])

View file

@ -1,33 +0,0 @@
"""
Data update coordination package.
This package orchestrates data fetching, caching, and entity updates:
- API polling at 15-minute intervals
- Persistent storage via HA Store
- Quarter-hour entity refresh scheduling
- Price data enrichment pipeline
- Period calculation (best/peak price periods)
Main components:
- core.py: TibberPricesDataUpdateCoordinator (main coordinator class)
- cache.py: Persistent storage management
- data_transformation.py: Raw data enriched data pipeline
- listeners.py: Entity refresh scheduling
- period_handlers/: Period calculation sub-package
"""
from .constants import (
MINUTE_UPDATE_ENTITY_KEYS,
STORAGE_VERSION,
TIME_SENSITIVE_ENTITY_KEYS,
)
from .core import TibberPricesDataUpdateCoordinator
from .time_service import TibberPricesTimeService
__all__ = [
"MINUTE_UPDATE_ENTITY_KEYS",
"STORAGE_VERSION",
"TIME_SENSITIVE_ENTITY_KEYS",
"TibberPricesDataUpdateCoordinator",
"TibberPricesTimeService",
]

View file

@ -1,105 +0,0 @@
"""
Cache management for coordinator persistent storage.
This module handles persistent storage for the coordinator, storing:
- user_data: Account/home metadata (required, refreshed daily)
- Timestamps for cache validation and lifecycle tracking
**Storage Architecture (as of v0.25.0):**
There are TWO persistent storage files per config entry:
1. `tibber_prices.{entry_id}` (this module)
- user_data: Account info, home metadata, timezone, currency
- Timestamps: last_user_update, last_midnight_check
2. `tibber_prices.interval_pool.{entry_id}` (interval_pool/storage.py)
- Intervals: Deduplicated quarter-hourly price data (source of truth)
- Fetch metadata: When each interval was fetched
- Protected range: Which intervals to keep during cleanup
**Single Source of Truth:**
Price intervals are ONLY stored in IntervalPool. This cache stores only
user metadata and timestamps. The IntervalPool handles all price data
fetching, caching, and persistence independently.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, NamedTuple
if TYPE_CHECKING:
from datetime import datetime
from homeassistant.helpers.storage import Store
from .time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
class TibberPricesCacheData(NamedTuple):
"""Cache data structure for user metadata (price data is in IntervalPool)."""
user_data: dict[str, Any] | None
last_user_update: datetime | None
last_midnight_check: datetime | None
async def load_cache(
store: Store,
log_prefix: str,
*,
time: TibberPricesTimeService,
) -> TibberPricesCacheData:
"""Load cached user data from storage (price data is in IntervalPool)."""
try:
stored = await store.async_load()
if stored:
cached_user_data = stored.get("user_data")
# Restore timestamps
last_user_update = None
last_midnight_check = None
if last_user_update_str := stored.get("last_user_update"):
last_user_update = time.parse_datetime(last_user_update_str)
if last_midnight_check_str := stored.get("last_midnight_check"):
last_midnight_check = time.parse_datetime(last_midnight_check_str)
_LOGGER.debug("%s Cache loaded successfully", log_prefix)
return TibberPricesCacheData(
user_data=cached_user_data,
last_user_update=last_user_update,
last_midnight_check=last_midnight_check,
)
_LOGGER.debug("%s No cache found, will fetch fresh data", log_prefix)
except OSError as ex:
_LOGGER.warning("%s Failed to load cache: %s", log_prefix, ex)
return TibberPricesCacheData(
user_data=None,
last_user_update=None,
last_midnight_check=None,
)
async def save_cache(
store: Store,
cache_data: TibberPricesCacheData,
log_prefix: str,
) -> None:
"""Store cache data (user metadata only, price data is in IntervalPool)."""
data = {
"user_data": cache_data.user_data,
"last_user_update": (cache_data.last_user_update.isoformat() if cache_data.last_user_update else None),
"last_midnight_check": (cache_data.last_midnight_check.isoformat() if cache_data.last_midnight_check else None),
}
try:
await store.async_save(data)
_LOGGER.debug("%s Cache stored successfully", log_prefix)
except OSError:
_LOGGER.exception("%s Failed to store cache", log_prefix)

View file

@ -1,112 +0,0 @@
"""Constants for coordinator module."""
from datetime import timedelta
# Storage version for storing data
STORAGE_VERSION = 1
# Update interval for DataUpdateCoordinator timer
# This determines how often Timer #1 runs to check if updates are needed.
# Actual API calls only happen when:
# - Cache is invalid (different day, corrupted)
# - Tomorrow data missing after 13:00
# - No cached data exists
UPDATE_INTERVAL = timedelta(minutes=15)
# Quarter-hour boundaries for entity state updates (minutes: 00, 15, 30, 45)
QUARTER_HOUR_BOUNDARIES = (0, 15, 30, 45)
# Hour after which tomorrow's price data is expected (13:00 local time)
TOMORROW_DATA_CHECK_HOUR = 13
# Random delay range for tomorrow data checks (spread API load)
# When tomorrow data is missing after 13:00, wait 0-30 seconds before fetching
# This prevents all HA instances from requesting simultaneously
TOMORROW_DATA_RANDOM_DELAY_MAX = 30 # seconds
# Entity keys that require quarter-hour updates (time-sensitive entities)
# These entities calculate values based on current time and need updates every 15 minutes
# All other entities only update when new API data arrives
TIME_SENSITIVE_ENTITY_KEYS = frozenset(
{
# Current/next/previous price sensors
"current_interval_price",
"current_interval_price_base",
"next_interval_price",
"previous_interval_price",
# Current/next/previous price levels
"current_interval_price_level",
"next_interval_price_level",
"previous_interval_price_level",
# Rolling hour calculations (5-interval windows)
"current_hour_average_price",
"next_hour_average_price",
"current_hour_price_level",
"next_hour_price_level",
# Current/next/previous price ratings
"current_interval_price_rating",
"next_interval_price_rating",
"previous_interval_price_rating",
"current_hour_price_rating",
"next_hour_price_rating",
# Future average sensors (rolling N-hour windows from next interval)
"next_avg_1h",
"next_avg_2h",
"next_avg_3h",
"next_avg_4h",
"next_avg_5h",
"next_avg_6h",
"next_avg_8h",
"next_avg_12h",
# Current/future price trend sensors (time-sensitive, update at interval boundaries)
"current_price_trend",
"next_price_trend_change",
# Price trend sensors
"price_trend_1h",
"price_trend_2h",
"price_trend_3h",
"price_trend_4h",
"price_trend_5h",
"price_trend_6h",
"price_trend_8h",
"price_trend_12h",
# Trailing/leading 24h calculations (based on current interval)
"trailing_price_average",
"leading_price_average",
"trailing_price_min",
"trailing_price_max",
"leading_price_min",
"leading_price_max",
# Binary sensors that check if current time is in a period
"peak_price_period",
"best_price_period",
# Best/Peak price timestamp sensors (periods only change at interval boundaries)
"best_price_end_time",
"best_price_next_start_time",
"peak_price_end_time",
"peak_price_next_start_time",
# Lifecycle sensor needs quarter-hour precision for state transitions:
# - 23:45: turnover_pending (last interval before midnight)
# - 00:00: turnover complete (after midnight API update)
# - 13:00: searching_tomorrow (when tomorrow data search begins)
# Uses state-change filter in _handle_time_sensitive_update() to prevent recorder spam
"data_lifecycle_status",
}
)
# Entities that require minute-by-minute updates (separate from quarter-hour updates)
# These are timing sensors that track countdown/progress within best/peak price periods
# Timestamp sensors (end_time, next_start_time) only need quarter-hour updates since periods
# can only change at interval boundaries
MINUTE_UPDATE_ENTITY_KEYS = frozenset(
{
# Best Price countdown/progress sensors (need minute updates)
"best_price_remaining_minutes",
"best_price_progress",
"best_price_next_in_minutes",
# Peak Price countdown/progress sensors (need minute updates)
"peak_price_remaining_minutes",
"peak_price_progress",
"peak_price_next_in_minutes",
}
)

View file

@ -1,924 +0,0 @@
"""Enhanced coordinator for fetching Tibber price data with comprehensive caching."""
from __future__ import annotations
import logging
from datetime import timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
if TYPE_CHECKING:
from datetime import date, datetime
from homeassistant.config_entries import ConfigEntry
from .listeners import TimeServiceCallback
from custom_components.tibber_prices.api import (
TibberPricesApiClient,
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
)
from custom_components.tibber_prices.const import DOMAIN
from custom_components.tibber_prices.utils.price import (
find_price_data_for_interval,
)
from homeassistant.exceptions import ConfigEntryAuthFailed
from . import helpers
from .constants import (
STORAGE_VERSION,
UPDATE_INTERVAL,
)
from .data_transformation import TibberPricesDataTransformer
from .listeners import TibberPricesListenerManager
from .midnight_handler import TibberPricesMidnightHandler
from .periods import TibberPricesPeriodCalculator
from .price_data_manager import TibberPricesPriceDataManager
from .repairs import TibberPricesRepairManager
from .time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
# Lifecycle state transition thresholds
FRESH_TO_CACHED_SECONDS = 300 # 5 minutes
def get_connection_state(coordinator: TibberPricesDataUpdateCoordinator) -> bool | None:
"""
Determine API connection state based on lifecycle and exceptions.
This is the source of truth for the connection binary sensor.
It ensures consistency between lifecycle_status and connection state.
Returns:
True: Connected and working (cached or fresh data)
False: Connection failed or auth failed
None: Unknown state (no data yet, initializing)
Logic:
- Auth failures definitively disconnected (False)
- Other errors with cached data considered connected (True, using cache)
- No errors with data connected (True)
- No data and no error initializing (None)
"""
# Auth failures = definitively disconnected
# User must provide new token via reauth flow
if isinstance(coordinator.last_exception, ConfigEntryAuthFailed):
return False
# Other errors but cache available = considered connected (using cached data as fallback)
# This shows "on" but lifecycle_status will show "error" to indicate degraded operation
if coordinator.last_exception and coordinator.data:
return True
# No error and data available = connected
if coordinator.data:
return True
# No data and no error = initializing (unknown state)
return None
# =============================================================================
# TIMER SYSTEM - Three independent update mechanisms:
# =============================================================================
#
# Timer #1: DataUpdateCoordinator (HA's built-in, every UPDATE_INTERVAL)
# - Purpose: Check if API data needs updating, fetch if necessary
# - Trigger: _async_update_data()
# - What it does:
# * Checks for midnight turnover FIRST (prevents race condition with Timer #2)
# * If turnover needed: Rotates data, saves cache, notifies entities, returns
# * Checks _should_update_price_data() (tomorrow missing? interval passed?)
# * Fetches fresh data from API if needed
# * Uses cached data otherwise (fast path)
# * Transforms data only when needed (config change, new data, midnight)
# - Load distribution:
# * Start time varies per installation → natural distribution
# * Tomorrow data check adds 0-30s random delay → prevents thundering herd
# - Midnight coordination:
# * Atomic check using _check_midnight_turnover_needed(now)
# * If turnover needed, performs it and returns early
# * Timer #2 will see turnover already done and skip
#
# Timer #2: Quarter-Hour Refresh (exact :00, :15, :30, :45 boundaries)
# - Purpose: Update time-sensitive entity states at interval boundaries
# - Trigger: _handle_quarter_hour_refresh()
# - What it does:
# * Checks for midnight turnover (atomic check, coordinates with Timer #1)
# * If Timer #1 already did turnover → skip gracefully
# * If turnover needed → performs it, saves cache, notifies all entities
# * Otherwise → only notifies time-sensitive entities (fast path)
# - Midnight coordination:
# * Uses same atomic check as Timer #1
# * Whoever runs first does turnover, the other skips
# * No race condition possible (date comparison is atomic)
#
# Timer #3: Minute Refresh (every minute)
# - Purpose: Update countdown/progress sensors
# - Trigger: _handle_minute_refresh()
# - What it does:
# * Notifies minute-update entities (remaining_minutes, progress)
# * Does NOT fetch data or transform - uses existing cache
# * No midnight handling (not relevant for timing sensors)
#
# Midnight Turnover Coordination:
# - Both Timer #1 and Timer #2 check for midnight turnover
# - Atomic check: _check_midnight_turnover_needed(now)
# Returns True if current_date > _last_midnight_turnover_check.date()
# Returns False if already done today
# - Whoever runs first (Timer #1 or Timer #2) performs turnover:
# Calls _perform_midnight_data_rotation(now)
# Updates _last_midnight_turnover_check and _last_actual_turnover to current time
# - The other timer sees turnover already done and skips
# - No locks needed - date comparison is naturally atomic
# - No race condition possible - Python datetime.date() comparison is thread-safe
# - _last_transformation_time is separate and tracks when data was last transformed (for cache)
#
# CRITICAL - Dual Listener System:
# After midnight turnover, BOTH listener groups must be notified:
# 1. Normal listeners (async_update_listeners) - standard HA entities
# 2. Time-sensitive listeners (_async_update_time_sensitive_listeners) - quarter-hour entities
#
# Why? Entities like best_price_period and peak_price_period register as time-sensitive
# listeners and won't update if only async_update_listeners() is called. This caused
# the bug where period binary sensors showed stale data until the next quarter-hour
# refresh at 00:15 (they were updated then because Timer #2 explicitly calls
# _async_update_time_sensitive_listeners in its normal flow).
#
# =============================================================================
class TibberPricesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Enhanced coordinator with main/subentry pattern and comprehensive caching."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
api_client: TibberPricesApiClient,
interval_pool: Any, # TibberPricesIntervalPool - Any to avoid circular import
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=UPDATE_INTERVAL,
)
self.config_entry = config_entry
# Get home_id from config entry
self._home_id = config_entry.data.get("home_id", "")
if not self._home_id:
_LOGGER.error("No home_id found in config entry %s", config_entry.entry_id)
# Use the API client from runtime_data (created in __init__.py with proper TOKEN handling)
self.api = api_client
# Use the shared interval pool (one per config entry/Tibber account)
self.interval_pool = interval_pool
# Storage for persistence
storage_key = f"{DOMAIN}.{config_entry.entry_id}"
self._store = Store(hass, STORAGE_VERSION, storage_key)
# Log prefix for identifying this coordinator instance
self._log_prefix = f"[{config_entry.title}]"
# Note: In the new architecture, all coordinators (parent + subentries) fetch their own data
# No distinction between "main" and "sub" coordinators anymore
# Initialize time service (single source of truth for all time operations)
self.time = TibberPricesTimeService()
# Set time on API client (needed for rate limiting)
self.api.time = self.time
# Initialize helper modules
self._listener_manager = TibberPricesListenerManager(hass, self._log_prefix)
self._midnight_handler = TibberPricesMidnightHandler()
self._price_data_manager = TibberPricesPriceDataManager(
api=self.api,
store=self._store,
log_prefix=self._log_prefix,
user_update_interval=timedelta(days=1),
time=self.time,
home_id=self._home_id,
interval_pool=self.interval_pool,
)
# Create period calculator BEFORE data transformer (transformer needs it in lambda)
self._period_calculator = TibberPricesPeriodCalculator(
config_entry=config_entry,
log_prefix=self._log_prefix,
get_config_override_fn=self.get_config_override,
)
self._data_transformer = TibberPricesDataTransformer(
config_entry=config_entry,
log_prefix=self._log_prefix,
calculate_periods_fn=lambda price_info: self._period_calculator.calculate_periods_for_price_info(
price_info
),
time=self.time,
)
self._repair_manager = TibberPricesRepairManager(
hass=hass,
entry_id=config_entry.entry_id,
home_name=config_entry.title,
)
# Register options update listener to invalidate config caches
config_entry.async_on_unload(config_entry.add_update_listener(self._handle_options_update))
# User data cache (price data is in IntervalPool)
self._cached_user_data: dict[str, Any] | None = None
self._last_user_update: datetime | None = None
self._user_update_interval = timedelta(days=1)
# Data lifecycle tracking
# Note: _lifecycle_state is used for DIAGNOSTICS only (diagnostics.py export).
# The lifecycle SENSOR calculates its state dynamically in get_lifecycle_state(),
# using: _is_fetching, last_exception, time calculations, _needs_tomorrow_data(),
# and _last_price_update. It does NOT read _lifecycle_state!
self._lifecycle_state: str = (
"cached" # For diagnostics: cached, fresh, refreshing, searching_tomorrow, turnover_pending, error
)
self._last_price_update: datetime | None = None # When price data was last fetched from API
self._api_calls_today: int = 0 # Counter for API calls today
self._last_api_call_date: date | None = None # Date of last API call (for daily reset)
self._is_fetching: bool = False # Flag to track active API fetch (read by lifecycle sensor)
self._last_coordinator_update: datetime | None = None # When Timer #1 last ran (_async_update_data)
# Runtime config overrides from config entities (number/switch)
# Structure: {"section_name": {"config_key": value, ...}, ...}
# When set, these override the corresponding options from config_entry.options
self._config_overrides: dict[str, dict[str, Any]] = {}
# Start timers
self._listener_manager.schedule_quarter_hour_refresh(self._handle_quarter_hour_refresh)
self._listener_manager.schedule_minute_refresh(self._handle_minute_refresh)
def _log(self, level: str, message: str, *args: Any, **kwargs: Any) -> None:
"""Log with coordinator-specific prefix."""
prefixed_message = f"{self._log_prefix} {message}"
getattr(_LOGGER, level)(prefixed_message, *args, **kwargs)
async def _handle_options_update(self, _hass: HomeAssistant, _config_entry: ConfigEntry) -> None:
"""Handle options update by invalidating config caches and re-transforming data."""
self._log("debug", "Options update triggered, re-transforming data")
self._data_transformer.invalidate_config_cache()
self._period_calculator.invalidate_config_cache()
# Re-transform existing data with new configuration
# This updates rating_levels, volatility, and period calculations
# without needing to fetch new data from the API
if self.data and "priceInfo" in self.data:
# Extract raw price_info and re-transform
raw_data = {"price_info": self.data["priceInfo"]}
self.data = self._transform_data(raw_data)
self.async_update_listeners()
else:
self._log("debug", "No data to re-transform")
# =========================================================================
# Runtime Config Override Methods (for number/switch entities)
# =========================================================================
def set_config_override(self, config_key: str, config_section: str, value: Any) -> None:
"""
Set a runtime config override value.
These overrides take precedence over options from config_entry.options
and are used by number/switch entities for runtime configuration.
Args:
config_key: The configuration key (e.g., CONF_BEST_PRICE_FLEX)
config_section: The section in options (e.g., "flexibility_settings")
value: The override value
"""
if config_section not in self._config_overrides:
self._config_overrides[config_section] = {}
self._config_overrides[config_section][config_key] = value
self._log(
"debug",
"Config override set: %s.%s = %s",
config_section,
config_key,
value,
)
def remove_config_override(self, config_key: str, config_section: str) -> None:
"""
Remove a runtime config override value.
After removal, the value from config_entry.options will be used again.
Args:
config_key: The configuration key to remove
config_section: The section the key belongs to
"""
if config_section in self._config_overrides:
self._config_overrides[config_section].pop(config_key, None)
# Clean up empty sections
if not self._config_overrides[config_section]:
del self._config_overrides[config_section]
self._log(
"debug",
"Config override removed: %s.%s",
config_section,
config_key,
)
def get_config_override(self, config_key: str, config_section: str) -> Any | None:
"""
Get a runtime config override value if set.
Args:
config_key: The configuration key to check
config_section: The section the key belongs to
Returns:
The override value if set, None otherwise
"""
return self._config_overrides.get(config_section, {}).get(config_key)
def has_config_override(self, config_key: str, config_section: str) -> bool:
"""
Check if a runtime config override is set.
Args:
config_key: The configuration key to check
config_section: The section the key belongs to
Returns:
True if an override is set, False otherwise
"""
return config_key in self._config_overrides.get(config_section, {})
def get_active_overrides(self) -> dict[str, dict[str, Any]]:
"""
Get all active config overrides.
Returns:
Dictionary of all active overrides by section
"""
return self._config_overrides.copy()
async def async_handle_config_override_update(self) -> None:
"""
Handle config override change by invalidating caches and re-transforming data.
This is called by number/switch entities when their values change.
Uses the same logic as options update to ensure consistent behavior.
"""
self._log("debug", "Config override update triggered, re-transforming data")
self._data_transformer.invalidate_config_cache()
self._period_calculator.invalidate_config_cache()
# Re-transform existing data with new configuration
if self.data and "priceInfo" in self.data:
raw_data = {"price_info": self.data["priceInfo"]}
self.data = self._transform_data(raw_data)
self.async_update_listeners()
else:
self._log("debug", "No data to re-transform")
@callback
def async_add_time_sensitive_listener(self, update_callback: TimeServiceCallback) -> CALLBACK_TYPE:
"""
Listen for time-sensitive updates that occur every quarter-hour.
Time-sensitive entities (like current_interval_price, next_interval_price, etc.) should use this
method instead of async_add_listener to receive updates at quarter-hour boundaries.
Returns:
Callback that can be used to remove the listener
"""
return self._listener_manager.async_add_time_sensitive_listener(update_callback)
@callback
def _async_update_time_sensitive_listeners(self, time_service: TibberPricesTimeService) -> None:
"""
Update all time-sensitive entities without triggering a full coordinator update.
Args:
time_service: TibberPricesTimeService instance with reference time for this update cycle
"""
self._listener_manager.async_update_time_sensitive_listeners(time_service)
@callback
def async_add_minute_update_listener(self, update_callback: TimeServiceCallback) -> CALLBACK_TYPE:
"""
Listen for minute-by-minute updates for timing sensors.
Timing sensors (like best_price_remaining_minutes, peak_price_progress, etc.) should use this
method to receive updates every minute for accurate countdown/progress tracking.
Returns:
Callback that can be used to remove the listener
"""
return self._listener_manager.async_add_minute_update_listener(update_callback)
@callback
def _async_update_minute_listeners(self, time_service: TibberPricesTimeService) -> None:
"""
Update all minute-update entities without triggering a full coordinator update.
Args:
time_service: TibberPricesTimeService instance with reference time for this update cycle
"""
self._listener_manager.async_update_minute_listeners(time_service)
@callback
def _handle_quarter_hour_refresh(self, _now: datetime | None = None) -> None:
"""
Handle quarter-hour entity refresh (Timer #2).
This is a SYNCHRONOUS callback (decorated with @callback) - it runs in the event loop
without async/await overhead because it performs only fast, non-blocking operations:
- Midnight turnover check (date comparison, data rotation)
- Listener notifications (entity state updates)
NO I/O operations (no API calls, no file operations), so no need for async def.
This is triggered at exact quarter-hour boundaries (:00, :15, :30, :45).
Does NOT fetch new data - only updates entity states based on existing cached data.
"""
# Create LOCAL TimeService with fresh reference time for this refresh
# Each timer has its own TimeService instance - no shared state between timers
# This timer updates 30+ time-sensitive entities at quarter-hour boundaries
# (Timer #3 handles timing entities separately - no overlap)
time_service = TibberPricesTimeService()
now = time_service.now()
# Update shared coordinator time (used by Timer #1 and other operations)
# This is safe because we're in a @callback (synchronous event loop)
self.time = time_service
# Update helper modules with fresh TimeService instance
self.api.time = time_service
self._price_data_manager.time = time_service
self._data_transformer.time = time_service
self._period_calculator.time = time_service
self._log("debug", "[Timer #2] Quarter-hour refresh triggered at %s", now.isoformat())
# Check if midnight has passed since last check
midnight_turnover_performed = self._check_and_handle_midnight_turnover(now)
if midnight_turnover_performed:
# Midnight turnover was performed by THIS call (Timer #1 didn't run yet)
self._log("info", "[Timer #2] Midnight turnover performed, entities updated")
# Schedule cache save asynchronously (we're in a callback)
self.hass.async_create_task(self._store_cache())
# async_update_listeners() was already called in _check_and_handle_midnight_turnover
# This includes time-sensitive listeners, so skip regular update to avoid double-update
else:
# Regular quarter-hour refresh - only update time-sensitive entities
# (Midnight turnover was either not needed, or already done by Timer #1)
# Pass local time_service to entities (not self.time which could be overwritten)
self._async_update_time_sensitive_listeners(time_service)
@callback
def _handle_minute_refresh(self, _now: datetime | None = None) -> None:
"""
Handle 30-second entity refresh for timing sensors (Timer #3).
This is a SYNCHRONOUS callback (decorated with @callback) - it runs in the event loop
without async/await overhead because it performs only fast, non-blocking operations:
- Listener notifications for timing sensors (remaining_minutes, progress)
NO I/O operations (no API calls, no file operations), so no need for async def.
Runs every 30 seconds to keep sensor values in sync with HA frontend display.
This runs every 30 seconds to update countdown/progress sensors.
Timing calculations use rounded minutes matching HA's relative time display.
Does NOT fetch new data - only updates entity states based on existing cached data.
"""
# Create LOCAL TimeService with fresh reference time for this 30-second refresh
# Each timer has its own TimeService instance - no shared state between timers
# Timer #2 updates 30+ time-sensitive entities (prices, levels, timestamps)
# Timer #3 updates 6 timing entities (remaining_minutes, progress, next_in_minutes)
# NO overlap - entities are registered with either Timer #2 OR Timer #3, never both
time_service = TibberPricesTimeService()
# Only log at debug level to avoid log spam (this runs every 30 seconds)
self._log("debug", "[Timer #3] 30-second refresh for timing sensors")
# Update only minute-update entities (remaining_minutes, progress, etc.)
# Pass local time_service to entities (not self.time which could be overwritten)
self._async_update_minute_listeners(time_service)
def _check_midnight_turnover_needed(self, now: datetime) -> bool:
"""
Check if midnight turnover is needed (atomic check, no side effects).
This is called by BOTH Timer #1 and Timer #2 to coordinate turnover.
Returns True only if turnover hasn't been performed yet today.
Args:
now: Current datetime
Returns:
True if midnight turnover is needed, False if already done
"""
# Initialize handler on first use
if self._midnight_handler.last_check_time is None:
self._midnight_handler.update_check_time(now)
return False
# Delegate to midnight handler
return self._midnight_handler.is_turnover_needed(now)
def _perform_midnight_data_rotation(self, now: datetime) -> None:
"""
Perform midnight data rotation on cached data (side effects).
This rotates yesterday/today/tomorrow and updates coordinator data.
Called by whoever detects midnight first (Timer #1 or Timer #2).
IMPORTANT: This method is NOT @callback because it modifies shared state.
Call this from async context only to ensure proper serialization.
Args:
now: Current datetime
"""
current_date = now.date()
last_check_date = (
self._midnight_handler.last_check_time.date() if self._midnight_handler.last_check_time else current_date
)
self._log(
"debug",
"Performing midnight turnover: last_check=%s, current=%s",
last_check_date,
current_date,
)
# With flat interval list architecture and IntervalPool as source of truth,
# no data rotation needed! get_intervals_for_day_offsets() automatically
# filters by date. Just re-transform to refresh enrichment.
if self.data and "priceInfo" in self.data:
# Re-transform data to ensure enrichment is refreshed for new day
raw_data = {"price_info": self.data["priceInfo"]}
self.data = self._transform_data(raw_data)
# Mark turnover as done for today (atomic update)
self._midnight_handler.mark_turnover_done(now)
@callback
def _check_and_handle_midnight_turnover(self, now: datetime) -> bool:
"""
Check if midnight has passed and perform data rotation if needed.
This is called by Timer #2 (quarter-hour refresh) to ensure timely rotation
without waiting for the next API update cycle.
Coordinates with Timer #1 using atomic check on _last_midnight_check date.
If Timer #1 already performed turnover, this skips gracefully.
Returns:
True if midnight turnover was performed by THIS call, False otherwise
"""
# Check if turnover is needed (atomic, no side effects)
if not self._check_midnight_turnover_needed(now):
# Already done today (by Timer #1 or previous Timer #2 call)
return False
# Turnover needed - perform it
# Note: We need to schedule this as a task because _perform_midnight_data_rotation
# is not a callback and may need async operations
self._log("info", "[Timer #2] Midnight turnover detected, performing data rotation")
self._perform_midnight_data_rotation(now)
# CRITICAL: Notify BOTH listener groups after midnight turnover
# - async_update_listeners(): Notifies normal entities (via HA's DataUpdateCoordinator)
# - async_update_time_sensitive_listeners(): Notifies time-sensitive entities (custom system)
# Without both calls, period binary sensors (best_price_period, peak_price_period)
# won't update because they're time-sensitive listeners, not normal listeners.
self.async_update_listeners()
# Create TimeService with fresh reference time for time-sensitive entity updates
time_service = TibberPricesTimeService()
self._async_update_time_sensitive_listeners(time_service)
return True
async def async_shutdown(self) -> None:
"""
Shut down the coordinator and clean up timers.
Cancels all three timer types:
- Timer #1: API polling (coordinator update timer)
- Timer #2: Quarter-hour entity updates
- Timer #3: Minute timing sensor updates
Also saves cache to persist any unsaved changes and clears all repairs.
"""
# Cancel all timers first
self._listener_manager.cancel_timers()
# Clear all repairs when integration is removed or disabled
await self._repair_manager.clear_all_repairs()
# Save cache to persist any unsaved data
# This ensures we don't lose data if HA is shutting down
try:
await self._store_cache()
self._log("debug", "Cache saved during shutdown")
except OSError as err:
# Log but don't raise - shutdown should complete even if cache save fails
self._log("error", "Failed to save cache during shutdown: %s", err)
async def _async_update_data(self) -> dict[str, Any]:
"""
Fetch data from Tibber API (called by DataUpdateCoordinator timer).
This is Timer #1 (HA's built-in coordinator timer, every 15 min).
"""
self._log("debug", "[Timer #1] DataUpdateCoordinator check triggered")
# Track when Timer #1 ran (for next_api_poll calculation)
self._last_coordinator_update = self.time.now()
# Create TimeService with fresh reference time for this update cycle
self.time = TibberPricesTimeService()
current_time = self.time.now()
# Transition lifecycle state from "fresh" to "cached" if enough time passed
# (5 minutes threshold defined in lifecycle calculator)
# Note: This updates _lifecycle_state for diagnostics only.
# The lifecycle sensor calculates its state dynamically in get_lifecycle_state(),
# checking _last_price_update timestamp directly.
if self._lifecycle_state == "fresh":
# After 5 minutes, data is considered "cached" (no longer "just fetched")
self._lifecycle_state = "cached"
# Update helper modules with fresh TimeService instance
self.api.time = self.time
self._price_data_manager.time = self.time
self._data_transformer.time = self.time
self._period_calculator.time = self.time
# Load cache if not already loaded (user data only, price data is in Pool)
if self._cached_user_data is None:
await self.load_cache()
# Initialize midnight handler on first run
if self._midnight_handler.last_check_time is None:
self._midnight_handler.update_check_time(current_time)
# CRITICAL: Check for midnight turnover FIRST (before any data operations)
# This prevents race condition with Timer #2 (quarter-hour refresh)
# Whoever runs first (Timer #1 or Timer #2) performs turnover, the other skips
midnight_turnover_needed = self._check_midnight_turnover_needed(current_time)
if midnight_turnover_needed:
self._log("info", "[Timer #1] Midnight turnover detected, performing data rotation")
self._perform_midnight_data_rotation(current_time)
# After rotation, save cache and notify entities
await self._store_cache()
# CRITICAL: Notify time-sensitive listeners explicitly
# When Timer #1 performs turnover, returning self.data will trigger
# async_update_listeners() (normal listeners) automatically via DataUpdateCoordinator.
# But time-sensitive listeners (like best_price_period, peak_price_period)
# won't be notified unless we explicitly call their update method.
# This ensures ALL entities see the updated periods after midnight turnover.
time_service = TibberPricesTimeService()
self._async_update_time_sensitive_listeners(time_service)
# Return current data (enriched after rotation) to trigger entity updates
if self.data:
return self.data
try:
# Reset API call counter if day changed
current_date = current_time.date()
if self._last_api_call_date != current_date:
self._api_calls_today = 0
self._last_api_call_date = current_date
# Set _is_fetching flag - lifecycle sensor shows "refreshing" during fetch
# Note: Lifecycle sensor reads this flag directly in get_lifecycle_state()
self._is_fetching = True
# Get current price info to check if tomorrow data already exists
current_price_info = self.data.get("priceInfo", []) if self.data else []
result, api_called = await self._price_data_manager.handle_main_entry_update(
current_time,
self._home_id,
self._transform_data,
current_price_info=current_price_info,
)
# CRITICAL: Reset fetching flag AFTER data fetch completes
self._is_fetching = False
# Sync user_data cache (price data is in IntervalPool)
self._cached_user_data = self._price_data_manager.cached_user_data
# Update lifecycle tracking - ONLY if API was actually called
# (not when returning cached data)
if api_called and result and "priceInfo" in result and len(result["priceInfo"]) > 0:
self._last_price_update = current_time # Track when data was fetched from API
self._api_calls_today += 1
self._lifecycle_state = "fresh" # Data just fetched
_LOGGER.debug(
"API call completed: Fetched %d intervals, updating lifecycle to 'fresh'",
len(result["priceInfo"]),
)
# Note: _lifecycle_state is for diagnostics only.
# Lifecycle sensor calculates state dynamically from _last_price_update.
elif not api_called:
# Using cached data - lifecycle stays as is (cached/searching_tomorrow/etc.)
_LOGGER.debug(
"Using cached data: %d intervals from pool, no API call made",
len(result.get("priceInfo", [])),
)
except (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
) as err:
# Reset lifecycle state on error
self._is_fetching = False
self._lifecycle_state = "error" # For diagnostics
# Note: Lifecycle sensor detects errors via coordinator.last_exception
# Track rate limit errors for repair system
await self._track_rate_limit_error(err)
# Handle API error - will re-raise as ConfigEntryAuthFailed or UpdateFailed
# Note: With IntervalPool, there's no local cache fallback here.
# The Pool has its own persistence for offline recovery.
await self._price_data_manager.handle_api_error(err)
# Note: handle_api_error always raises, this is never reached
return {} # Satisfy type checker
else:
# Check for repair conditions after successful update
await self._check_repair_conditions(result, current_time)
return result
async def _track_rate_limit_error(self, error: Exception) -> None:
"""Track rate limit errors for repair notification system."""
error_str = str(error).lower()
is_rate_limit = "429" in error_str or "rate limit" in error_str or "too many requests" in error_str
if is_rate_limit:
await self._repair_manager.track_rate_limit_error()
async def _check_repair_conditions(
self,
result: dict[str, Any],
current_time: datetime,
) -> None:
"""Check and manage repair conditions after successful data update."""
# 1. Home not found detection (home was removed from Tibber account)
if result and result.get("_home_not_found"):
await self._repair_manager.create_home_not_found_repair()
# Remove the marker before returning to entities
result.pop("_home_not_found", None)
else:
# Home exists - clear any existing repair
await self._repair_manager.clear_home_not_found_repair()
# 2. Tomorrow data availability (after 18:00)
if result and "priceInfo" in result:
has_tomorrow_data = self._price_data_manager.has_tomorrow_data(result["priceInfo"])
await self._repair_manager.check_tomorrow_data_availability(
has_tomorrow_data=has_tomorrow_data,
current_time=current_time,
)
# 3. Clear rate limit tracking on successful API call
await self._repair_manager.clear_rate_limit_tracking()
async def load_cache(self) -> None:
"""Load cached user data from storage (price data is in IntervalPool)."""
await self._price_data_manager.load_cache()
# Sync user data reference
self._cached_user_data = self._price_data_manager.cached_user_data
self._last_user_update = self._price_data_manager._last_user_update # noqa: SLF001 - Sync for lifecycle tracking
# Note: Midnight handler state is now based on current date
# Since price data is in IntervalPool (persistent), we just need to
# ensure turnover doesn't happen twice if HA restarts after midnight
today_midnight = self.time.as_local(self.time.now()).replace(hour=0, minute=0, second=0, microsecond=0)
# Mark today's midnight as done to prevent double turnover on HA restart
self._midnight_handler.mark_turnover_done(today_midnight)
async def _store_cache(self) -> None:
"""Store cache data (user metadata only, price data is in IntervalPool)."""
await self._price_data_manager.store_cache(self._midnight_handler.last_check_time)
def _needs_tomorrow_data(self) -> bool:
"""Check if tomorrow data is missing or invalid."""
# Check self.data (from Pool) instead of _cached_price_data
if not self.data or "priceInfo" not in self.data:
return True
return helpers.needs_tomorrow_data({"price_info": self.data["priceInfo"]})
def _has_valid_tomorrow_data(self) -> bool:
"""Check if we have valid tomorrow data (inverse of _needs_tomorrow_data)."""
return not self._needs_tomorrow_data()
@callback
def _merge_cached_data(self) -> dict[str, Any]:
"""Return current data (from Pool)."""
if not self.data:
return {}
return self.data
def _get_threshold_percentages(self) -> dict[str, int | float]:
"""Get threshold percentages from config options."""
return self._data_transformer.get_threshold_percentages()
def _calculate_periods_for_price_info(self, price_info: dict[str, Any]) -> dict[str, Any]:
"""Calculate periods (best price and peak price) for the given price info."""
return self._period_calculator.calculate_periods_for_price_info(price_info)
def _transform_data(self, raw_data: dict[str, Any]) -> dict[str, Any]:
"""Transform raw data for main entry (aggregated view of all homes)."""
# Delegate complete transformation to DataTransformer (enrichment + periods)
# DataTransformer handles its own caching internally
return self._data_transformer.transform_data(raw_data)
# --- Methods expected by sensors and services ---
def get_home_data(self, home_id: str) -> dict[str, Any] | None:
"""Get data for a specific home (returns this coordinator's data if home_id matches)."""
if not self.data:
return None
# In new architecture, each coordinator manages one home only
# Return data only if requesting this coordinator's home
if home_id == self._home_id:
return self.data
return None
def get_current_interval(self) -> dict[str, Any] | None:
"""Get the price data for the current interval."""
if not self.data:
return None
if not self.data:
return None
now = self.time.now()
return find_price_data_for_interval(self.data, now, time=self.time)
async def refresh_user_data(self) -> bool:
"""Force refresh of user data and return True if data was updated."""
try:
current_time = self.time.now()
self._log("info", "Forcing user data refresh (bypassing cache)")
# Force update by calling API directly (bypass cache check)
user_data = await self.api.async_get_viewer_details()
self._cached_user_data = user_data
self._last_user_update = current_time
self._log("info", "User data refreshed successfully - found %d home(s)", len(user_data.get("homes", [])))
await self._store_cache()
except (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
):
return False
else:
return True
def get_user_profile(self) -> dict[str, Any]:
"""Get user profile information."""
return {
"last_updated": self._last_user_update,
"cached_user_data": self._cached_user_data is not None,
}
def get_user_homes(self) -> list[dict[str, Any]]:
"""Get list of user homes."""
if not self._cached_user_data:
return []
viewer = self._cached_user_data.get("viewer", {})
return viewer.get("homes", [])

View file

@ -1,297 +0,0 @@
"""Data transformation and enrichment logic for the coordinator."""
from __future__ import annotations
import copy
import logging
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices import const as _const
from custom_components.tibber_prices.utils.price import enrich_price_info_with_differences
if TYPE_CHECKING:
from collections.abc import Callable
from datetime import datetime
from homeassistant.config_entries import ConfigEntry
from .time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
class TibberPricesDataTransformer:
"""Handles data transformation, enrichment, and period calculations."""
def __init__(
self,
config_entry: ConfigEntry,
log_prefix: str,
calculate_periods_fn: Callable[[dict[str, Any]], dict[str, Any]],
time: TibberPricesTimeService,
) -> None:
"""Initialize the data transformer."""
self.config_entry = config_entry
self._log_prefix = log_prefix
self._calculate_periods_fn = calculate_periods_fn
self.time: TibberPricesTimeService = time
# Transformation cache
self._cached_transformed_data: dict[str, Any] | None = None
self._last_transformation_config: dict[str, Any] | None = None
self._last_midnight_check: datetime | None = None
self._last_source_data_timestamp: datetime | None = None # Track when source data changed
self._config_cache: dict[str, Any] | None = None
self._config_cache_valid = False
def _log(self, level: str, message: str, *args: object, **kwargs: object) -> None:
"""Log with coordinator-specific prefix."""
prefixed_message = f"{self._log_prefix} {message}"
getattr(_LOGGER, level)(prefixed_message, *args, **kwargs)
def get_threshold_percentages(self) -> dict[str, int | float]:
"""
Get threshold percentages, hysteresis and gap tolerance for RATING_LEVEL from config options.
CRITICAL: This function is ONLY for rating_level (internal calculation: LOW/NORMAL/HIGH).
Do NOT use for price level (Tibber API: VERY_CHEAP/CHEAP/NORMAL/EXPENSIVE/VERY_EXPENSIVE).
"""
options = self.config_entry.options or {}
return {
"low": options.get(_const.CONF_PRICE_RATING_THRESHOLD_LOW, _const.DEFAULT_PRICE_RATING_THRESHOLD_LOW),
"high": options.get(_const.CONF_PRICE_RATING_THRESHOLD_HIGH, _const.DEFAULT_PRICE_RATING_THRESHOLD_HIGH),
"hysteresis": options.get(_const.CONF_PRICE_RATING_HYSTERESIS, _const.DEFAULT_PRICE_RATING_HYSTERESIS),
"gap_tolerance": options.get(
_const.CONF_PRICE_RATING_GAP_TOLERANCE, _const.DEFAULT_PRICE_RATING_GAP_TOLERANCE
),
}
def get_level_gap_tolerance(self) -> int:
"""
Get gap tolerance for PRICE LEVEL (Tibber API) from config options.
CRITICAL: This is separate from rating_level gap tolerance.
Price level comes from Tibber API (VERY_CHEAP/CHEAP/NORMAL/EXPENSIVE/VERY_EXPENSIVE).
Rating level is calculated internally (LOW/NORMAL/HIGH).
"""
options = self.config_entry.options or {}
return options.get(_const.CONF_PRICE_LEVEL_GAP_TOLERANCE, _const.DEFAULT_PRICE_LEVEL_GAP_TOLERANCE)
def invalidate_config_cache(self) -> None:
"""
Invalidate config cache AND transformation cache when options change.
CRITICAL: When options like gap_tolerance, hysteresis, or price_level_gap_tolerance
change, we must clear BOTH caches:
1. Config cache (_config_cache) - forces config rebuild on next check
2. Transformation cache (_cached_transformed_data) - forces data re-enrichment
This ensures that the next call to transform_data() will re-calculate
rating_levels and apply new gap tolerance settings to existing price data.
"""
self._config_cache_valid = False
self._config_cache = None
self._cached_transformed_data = None # Force re-transformation with new config
self._last_transformation_config = None # Force config comparison to trigger
def _get_current_transformation_config(self) -> dict[str, Any]:
"""
Get current configuration that affects data transformation.
Uses cached config to avoid ~30 options.get() calls on every update check.
Cache is invalidated when config_entry.options change.
"""
if self._config_cache_valid and self._config_cache is not None:
return self._config_cache
# Build config dictionary (expensive operation)
options = self.config_entry.options
# Best/peak price remain nested (multi-section steps)
best_period_section = options.get("period_settings", {})
best_flex_section = options.get("flexibility_settings", {})
best_relax_section = options.get("relaxation_and_target_periods", {})
peak_period_section = options.get("period_settings", {})
peak_flex_section = options.get("flexibility_settings", {})
peak_relax_section = options.get("relaxation_and_target_periods", {})
config = {
"thresholds": self.get_threshold_percentages(),
"level_gap_tolerance": self.get_level_gap_tolerance(), # Separate: Tibber's price level smoothing
# Volatility thresholds now flat (single-section step)
"volatility_thresholds": {
"moderate": options.get(_const.CONF_VOLATILITY_THRESHOLD_MODERATE, 15.0),
"high": options.get(_const.CONF_VOLATILITY_THRESHOLD_HIGH, 25.0),
"very_high": options.get(_const.CONF_VOLATILITY_THRESHOLD_VERY_HIGH, 40.0),
},
# Price trend thresholds now flat (single-section step)
"price_trend_thresholds": {
"rising": options.get(
_const.CONF_PRICE_TREND_THRESHOLD_RISING, _const.DEFAULT_PRICE_TREND_THRESHOLD_RISING
),
"falling": options.get(
_const.CONF_PRICE_TREND_THRESHOLD_FALLING, _const.DEFAULT_PRICE_TREND_THRESHOLD_FALLING
),
},
"best_price_config": {
"flex": best_flex_section.get(_const.CONF_BEST_PRICE_FLEX, 15.0),
"max_level": best_period_section.get(_const.CONF_BEST_PRICE_MAX_LEVEL, "NORMAL"),
"min_period_length": best_period_section.get(_const.CONF_BEST_PRICE_MIN_PERIOD_LENGTH, 4),
"min_distance_from_avg": best_flex_section.get(_const.CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG, -5.0),
"max_level_gap_count": best_period_section.get(_const.CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT, 0),
"enable_min_periods": best_relax_section.get(_const.CONF_ENABLE_MIN_PERIODS_BEST, False),
"min_periods": best_relax_section.get(_const.CONF_MIN_PERIODS_BEST, 2),
"relaxation_attempts": best_relax_section.get(_const.CONF_RELAXATION_ATTEMPTS_BEST, 4),
},
"peak_price_config": {
"flex": peak_flex_section.get(_const.CONF_PEAK_PRICE_FLEX, 15.0),
"min_level": peak_period_section.get(_const.CONF_PEAK_PRICE_MIN_LEVEL, "HIGH"),
"min_period_length": peak_period_section.get(_const.CONF_PEAK_PRICE_MIN_PERIOD_LENGTH, 4),
"min_distance_from_avg": peak_flex_section.get(_const.CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG, 5.0),
"max_level_gap_count": peak_period_section.get(_const.CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT, 0),
"enable_min_periods": peak_relax_section.get(_const.CONF_ENABLE_MIN_PERIODS_PEAK, False),
"min_periods": peak_relax_section.get(_const.CONF_MIN_PERIODS_PEAK, 2),
"relaxation_attempts": peak_relax_section.get(_const.CONF_RELAXATION_ATTEMPTS_PEAK, 4),
},
}
# Cache for future calls
self._config_cache = config
self._config_cache_valid = True
return config
def _should_retransform_data(self, current_time: datetime, source_data_timestamp: datetime | None = None) -> bool:
"""
Check if data transformation should be performed.
Args:
current_time: Current time for midnight check
source_data_timestamp: Timestamp of source data (if available)
Returns:
True if retransformation needed, False if cached data can be used
"""
# No cached transformed data - must transform
if self._cached_transformed_data is None:
return True
# Source data changed - must retransform
# This detects when new API data was fetched (e.g., tomorrow data arrival)
if source_data_timestamp is not None and source_data_timestamp != self._last_source_data_timestamp:
self._log("debug", "Source data changed, retransforming data")
return True
# Configuration changed - must retransform
current_config = self._get_current_transformation_config()
config_changed = current_config != self._last_transformation_config
if config_changed:
return True
# Check for midnight turnover
now_local = self.time.as_local(current_time)
current_date = now_local.date()
if self._last_midnight_check is None:
return True
last_check_local = self.time.as_local(self._last_midnight_check)
last_check_date = last_check_local.date()
if current_date != last_check_date:
self._log("debug", "Midnight turnover detected, retransforming data")
return True
return False
def transform_data(self, raw_data: dict[str, Any]) -> dict[str, Any]:
"""Transform raw data for main entry (single home view)."""
current_time = self.time.now()
source_data_timestamp = raw_data.get("timestamp")
# Return cached transformed data if no retransformation needed
should_retransform = self._should_retransform_data(current_time, source_data_timestamp)
has_cache = self._cached_transformed_data is not None
self._log(
"info",
"transform_data: should_retransform=%s, has_cache=%s",
should_retransform,
has_cache,
)
if not should_retransform and has_cache:
self._log("debug", "Using cached transformed data (no transformation needed)")
# has_cache ensures _cached_transformed_data is not None
return self._cached_transformed_data # type: ignore[return-value]
self._log("debug", "Transforming price data (enrichment + period calculation)")
# Extract data from single-home structure
home_id = raw_data.get("home_id", "")
# CRITICAL: Make a deep copy of intervals to avoid modifying cached raw data
# The enrichment function modifies intervals in-place, which would corrupt
# the original API data and make re-enrichment with different settings impossible
all_intervals = copy.deepcopy(raw_data.get("price_info", []))
currency = raw_data.get("currency", "EUR")
if not all_intervals:
return {
"timestamp": raw_data.get("timestamp"),
"home_id": home_id,
"priceInfo": [],
"pricePeriods": {
"best_price": [],
"peak_price": [],
},
"currency": currency,
}
# Enrich price info dynamically with calculated differences and rating levels
# (Modifies all_intervals in-place, returns same list)
thresholds = self.get_threshold_percentages() # Only for rating_level
level_gap_tolerance = self.get_level_gap_tolerance() # Separate: for Tibber's price level
enriched_intervals = enrich_price_info_with_differences(
all_intervals,
threshold_low=thresholds["low"],
threshold_high=thresholds["high"],
hysteresis=float(thresholds["hysteresis"]),
gap_tolerance=int(thresholds["gap_tolerance"]),
level_gap_tolerance=level_gap_tolerance,
time=self.time,
)
# Store enriched intervals directly as priceInfo (flat list)
transformed_data = {
"home_id": home_id,
"priceInfo": enriched_intervals,
"currency": currency,
}
# Calculate periods (best price and peak price)
if "priceInfo" in transformed_data:
transformed_data["pricePeriods"] = self._calculate_periods_fn(transformed_data["priceInfo"])
# Cache the transformed data
self._cached_transformed_data = transformed_data
self._last_transformation_config = self._get_current_transformation_config()
self._last_midnight_check = current_time
self._last_source_data_timestamp = source_data_timestamp
return transformed_data
def invalidate_cache(self) -> None:
"""Invalidate transformation cache."""
self._cached_transformed_data = None
@property
def last_midnight_check(self) -> datetime | None:
"""Get last midnight check timestamp."""
return self._last_midnight_check
@last_midnight_check.setter
def last_midnight_check(self, value: datetime | None) -> None:
"""Set last midnight check timestamp."""
self._last_midnight_check = value

View file

@ -1,175 +0,0 @@
"""Pure utility functions for coordinator module."""
from __future__ import annotations
import logging
from datetime import timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
from .time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
def get_intervals_for_day_offsets(
coordinator_data: dict[str, Any] | None,
offsets: list[int],
) -> list[dict[str, Any]]:
"""
Get intervals for specific day offsets from coordinator data.
This is the core function for filtering intervals by date offset.
Abstracts the data structure - callers don't need to know where intervals are stored.
Performance optimized:
- Date comparison using .date() on datetime objects (fast)
- Single pass through intervals with date caching
- Only processes requested offsets
Args:
coordinator_data: Coordinator data dict (typically coordinator.data).
offsets: List of day offsets relative to today (e.g., [0, 1] for today and tomorrow).
Range: -374 to +1 (allows historical comparisons up to one year + one week).
0 = today, -1 = yesterday, +1 = tomorrow, -7 = one week ago, etc.
Returns:
List of intervals matching the requested day offsets, in chronological order.
Example:
# Get only today's intervals
today_intervals = get_intervals_for_day_offsets(coordinator.data, [0])
# Get today and tomorrow
future_intervals = get_intervals_for_day_offsets(coordinator.data, [0, 1])
# Get all available intervals
all = get_intervals_for_day_offsets(coordinator.data, [-1, 0, 1])
# Compare last week with same week one year ago
comparison = get_intervals_for_day_offsets(coordinator.data, [-7, -371])
"""
if not coordinator_data:
return []
# Validate offsets are within acceptable range
min_offset = -374 # One year + one week for comparisons
max_offset = 1 # Tomorrow (we don't have data further in the future)
# Extract intervals from coordinator data structure (priceInfo is now a list)
all_intervals = coordinator_data.get("priceInfo", [])
if not all_intervals:
return []
# Get current local date for comparison (no TimeService needed - use dt_util directly)
now_local = dt_util.now()
today_date = now_local.date()
# Build set of target dates based on requested offsets
target_dates = set()
for offset in offsets:
# Silently clamp offsets to valid range (don't fail on invalid input)
if offset < min_offset or offset > max_offset:
continue
target_date = today_date + timedelta(days=offset)
target_dates.add(target_date)
if not target_dates:
return []
# Filter intervals matching target dates
# Optimized: single pass, date() called once per interval
result = []
for interval in all_intervals:
starts_at = interval.get("startsAt")
if not starts_at:
continue
# Handle both datetime objects and strings (for flexibility)
if isinstance(starts_at, str):
# Parse if string (should be rare after parse_all_timestamps)
starts_at = dt_util.parse_datetime(starts_at)
if not starts_at:
continue
starts_at = dt_util.as_local(starts_at)
# Fast date comparison using datetime.date()
interval_date = starts_at.date()
if interval_date in target_dates:
result.append(interval)
return result
def needs_tomorrow_data(
cached_price_data: dict[str, Any] | None,
) -> bool:
"""
Check if tomorrow data is missing or invalid in cached price data.
Expects single-home cache format: {"price_info": [...], "home_id": "xxx"}
Old multi-home format (v0.14.0) is automatically invalidated by is_cache_valid()
in cache.py, so we only need to handle the current format here.
Uses get_intervals_for_day_offsets() to automatically determine tomorrow
based on current date. No explicit date parameter needed.
Args:
cached_price_data: Cached price data in single-home structure
Returns:
True if tomorrow's data is missing, False otherwise
"""
if not cached_price_data or "price_info" not in cached_price_data:
return False
# Single-home format: {"price_info": [...], "home_id": "xxx"}
# Use helper to get tomorrow's intervals (offset +1 from current date)
coordinator_data = {"priceInfo": cached_price_data.get("price_info", [])}
tomorrow_intervals = get_intervals_for_day_offsets(coordinator_data, [1])
# If no intervals for tomorrow found, we need tomorrow data
return len(tomorrow_intervals) == 0
def parse_all_timestamps(price_data: dict[str, Any], *, time: TibberPricesTimeService) -> dict[str, Any]:
"""
Parse all API timestamp strings to datetime objects.
This is the SINGLE place where we convert API strings to datetime objects.
After this, all code works with datetime objects, not strings.
Performance: ~200 timestamps parsed ONCE instead of multiple times per update cycle.
Args:
price_data: Raw API data with string timestamps (single-home structure)
time: TibberPricesTimeService for parsing
Returns:
Same structure but with datetime objects instead of strings
"""
if not price_data:
return price_data
# Single-home structure: price_info is a flat list of intervals
price_info = price_data.get("price_info", [])
# Skip if price_info is not a list (empty or invalid)
if not isinstance(price_info, list):
return price_data
# Parse timestamps in flat interval list
for interval in price_info:
if (starts_at_str := interval.get("startsAt")) and isinstance(starts_at_str, str):
# Parse once, convert to local timezone, store as datetime object
interval["startsAt"] = time.parse_and_localize(starts_at_str)
# If already datetime (e.g., from cache), skip parsing
return price_data

View file

@ -1,232 +0,0 @@
"""Listener management and scheduling for the coordinator."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from homeassistant.core import CALLBACK_TYPE, callback
from homeassistant.helpers.event import async_track_utc_time_change
from .constants import QUARTER_HOUR_BOUNDARIES
if TYPE_CHECKING:
from collections.abc import Callable
from datetime import datetime
from homeassistant.core import HomeAssistant
from .time_service import TibberPricesTimeService
# Callback type that accepts TibberPricesTimeService parameter
TimeServiceCallback = Callable[[TibberPricesTimeService], None]
_LOGGER = logging.getLogger(__name__)
class TibberPricesListenerManager:
"""Manages listeners and scheduling for coordinator updates."""
def __init__(self, hass: HomeAssistant, log_prefix: str) -> None:
"""Initialize the listener manager."""
self.hass = hass
self._log_prefix = log_prefix
# Listener lists
self._time_sensitive_listeners: list[TimeServiceCallback] = []
self._minute_update_listeners: list[TimeServiceCallback] = []
# Timer cancellation callbacks
self._quarter_hour_timer_cancel: CALLBACK_TYPE | None = None
self._minute_timer_cancel: CALLBACK_TYPE | None = None
# Midnight turnover tracking
self._last_midnight_check: datetime | None = None
def _log(self, level: str, message: str, *args: object, **kwargs: object) -> None:
"""Log with coordinator-specific prefix."""
prefixed_message = f"{self._log_prefix} {message}"
getattr(_LOGGER, level)(prefixed_message, *args, **kwargs)
@callback
def async_add_time_sensitive_listener(self, update_callback: TimeServiceCallback) -> CALLBACK_TYPE:
"""
Listen for time-sensitive updates that occur every quarter-hour.
Time-sensitive entities (like current_interval_price, next_interval_price, etc.) should use this
method instead of async_add_listener to receive updates at quarter-hour boundaries.
Returns:
Callback that can be used to remove the listener
"""
self._time_sensitive_listeners.append(update_callback)
def remove_listener() -> None:
"""Remove update listener."""
if update_callback in self._time_sensitive_listeners:
self._time_sensitive_listeners.remove(update_callback)
return remove_listener
@callback
def async_update_time_sensitive_listeners(self, time_service: TibberPricesTimeService) -> None:
"""
Update all time-sensitive entities without triggering a full coordinator update.
Args:
time_service: TibberPricesTimeService instance with reference time for this update cycle
"""
for update_callback in self._time_sensitive_listeners:
update_callback(time_service)
self._log(
"debug",
"Updated %d time-sensitive entities at quarter-hour boundary",
len(self._time_sensitive_listeners),
)
@callback
def async_add_minute_update_listener(self, update_callback: TimeServiceCallback) -> CALLBACK_TYPE:
"""
Listen for minute-by-minute updates for timing sensors.
Timing sensors (like best_price_remaining_minutes, peak_price_progress, etc.) should use this
method to receive updates every minute for accurate countdown/progress tracking.
Returns:
Callback that can be used to remove the listener
"""
self._minute_update_listeners.append(update_callback)
def remove_listener() -> None:
"""Remove update listener."""
if update_callback in self._minute_update_listeners:
self._minute_update_listeners.remove(update_callback)
return remove_listener
@callback
def async_update_minute_listeners(self, time_service: TibberPricesTimeService) -> None:
"""
Update all minute-update entities without triggering a full coordinator update.
Args:
time_service: TibberPricesTimeService instance with reference time for this update cycle
"""
for update_callback in self._minute_update_listeners:
update_callback(time_service)
self._log(
"debug",
"Updated %d timing entities (30-second update)",
len(self._minute_update_listeners),
)
def schedule_quarter_hour_refresh(
self,
handler_callback: Callable[[datetime], None],
) -> None:
"""Schedule the next quarter-hour entity refresh using Home Assistant's time tracking."""
# Cancel any existing timer
if self._quarter_hour_timer_cancel:
self._quarter_hour_timer_cancel()
self._quarter_hour_timer_cancel = None
# Use Home Assistant's async_track_utc_time_change to trigger at quarter-hour boundaries
# HA may schedule us a few milliseconds before or after the exact boundary (:XX:59.9xx or :00:00.0xx)
# Our interval detection is robust - uses "starts_at <= target_time < interval_end" check,
# so we correctly identify the current interval regardless of millisecond timing.
self._quarter_hour_timer_cancel = async_track_utc_time_change(
self.hass,
handler_callback,
minute=QUARTER_HOUR_BOUNDARIES,
second=0, # Trigger at :00, :15, :30, :45 exactly (HA handles scheduling tolerance)
)
self._log(
"debug",
"Scheduled quarter-hour refresh for boundaries: %s (second=0)",
QUARTER_HOUR_BOUNDARIES,
)
def schedule_minute_refresh(
self,
handler_callback: Callable[[datetime], None],
) -> None:
"""
Schedule 30-second entity refresh for timing sensors (Timer #3).
This is Timer #3 in the integration's timer architecture. It MUST trigger
at exact 30-second boundaries (0, 30 seconds) to keep timing sensors
(countdown, time-to) accurate.
Home Assistant may introduce small scheduling delays (jitter), which are
corrected using _BOUNDARY_TOLERANCE_SECONDS in time_service.py.
Runs independently of Timer #1 (API polling), which operates at random offsets.
"""
# Cancel any existing timer
if self._minute_timer_cancel:
self._minute_timer_cancel()
self._minute_timer_cancel = None
# Trigger every 30 seconds (:00 and :30) to keep sensor values in sync with
# Home Assistant's frontend relative time display ("in X minutes").
# The timing calculator uses rounded minute values that match HA's rounding behavior.
self._minute_timer_cancel = async_track_utc_time_change(
self.hass,
handler_callback,
second=[0, 30], # Trigger at :XX:00 and :XX:30
)
self._log(
"debug",
"Scheduled 30-second refresh for timing sensors (second=[0, 30])",
)
def check_midnight_crossed(self, now: datetime) -> bool:
"""
Check if midnight has passed since last check.
Args:
now: Current datetime
Returns:
True if midnight has been crossed, False otherwise
"""
current_date = now.date()
# First time check - initialize
if self._last_midnight_check is None:
self._last_midnight_check = now
return False
last_check_date = self._last_midnight_check.date()
# Check if we've crossed into a new day
if current_date > last_check_date:
self._log(
"debug",
"Midnight crossed: last_check=%s, current=%s",
last_check_date,
current_date,
)
self._last_midnight_check = now
return True
self._last_midnight_check = now
return False
def cancel_timers(self) -> None:
"""Cancel all scheduled timers."""
if self._quarter_hour_timer_cancel:
self._quarter_hour_timer_cancel()
self._quarter_hour_timer_cancel = None
if self._minute_timer_cancel:
self._minute_timer_cancel()
self._minute_timer_cancel = None

View file

@ -1,121 +0,0 @@
"""
Midnight turnover detection and coordination handler.
This module provides atomic coordination logic for midnight turnover between
multiple timers (DataUpdateCoordinator and quarter-hour refresh timer).
The handler ensures that midnight turnover happens exactly once per day,
regardless of which timer detects it first.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from datetime import datetime
class TibberPricesMidnightHandler:
"""
Handles midnight turnover detection and atomic coordination.
This class encapsulates the logic for detecting when midnight has passed
and ensuring that data rotation happens exactly once per day.
The atomic coordination works without locks by comparing date values:
- Timer #1 and Timer #2 both check if current_date > last_checked_date
- First timer to succeed marks the date as checked
- Second timer sees dates are equal and skips turnover
- Timer #3 doesn't participate in midnight logic (only 30-second timing updates)
HA Restart Handling:
- If HA restarts after midnight, _last_midnight_check is None (fresh handler)
- But _last_actual_turnover is restored from cache with yesterday's date
- is_turnover_needed() detects the date mismatch and returns True
- Missed midnight turnover is caught up on first timer run after restart
Attributes:
_last_midnight_check: Last datetime when midnight turnover was checked
_last_actual_turnover: Last datetime when turnover actually happened
"""
def __init__(self) -> None:
"""Initialize the midnight handler."""
self._last_midnight_check: datetime | None = None
self._last_actual_turnover: datetime | None = None
def is_turnover_needed(self, now: datetime) -> bool:
"""
Check if midnight turnover is needed without side effects.
This is a pure check function - it doesn't modify state. Call
mark_turnover_done() after successfully performing the turnover.
IMPORTANT: If handler is uninitialized (HA restart), this checks if we
need to catch up on midnight turnover that happened while HA was down.
Args:
now: Current datetime to check
Returns:
True if midnight has passed since last check, False otherwise
"""
# First time initialization after HA restart
if self._last_midnight_check is None:
# Check if we need to catch up on missed midnight turnover
# If last_actual_turnover exists, we can determine if midnight was missed
if self._last_actual_turnover is not None:
last_turnover_date = self._last_actual_turnover.date()
current_date = now.date()
# Turnover needed if we're on a different day than last turnover
return current_date > last_turnover_date
# Both None = fresh start, no turnover needed yet
return False
# Extract date components
last_checked_date = self._last_midnight_check.date()
current_date = now.date()
# Midnight crossed if current date is after last checked date
return current_date > last_checked_date
def mark_turnover_done(self, now: datetime) -> None:
"""
Mark that midnight turnover has been completed.
Updates both check timestamp and actual turnover timestamp to prevent
duplicate turnover by another timer.
Args:
now: Current datetime when turnover was completed
"""
self._last_midnight_check = now
self._last_actual_turnover = now
def update_check_time(self, now: datetime) -> None:
"""
Update the last check time without marking turnover as done.
Used for initializing the handler or updating the check timestamp
without triggering turnover logic.
Args:
now: Current datetime to set as last check time
"""
if self._last_midnight_check is None:
self._last_midnight_check = now
@property
def last_turnover_time(self) -> datetime | None:
"""Get the timestamp of the last actual turnover."""
return self._last_actual_turnover
@property
def last_check_time(self) -> datetime | None:
"""Get the timestamp of the last midnight check."""
return self._last_midnight_check

View file

@ -1,58 +0,0 @@
"""
Period calculation utilities (sub-package for modular organization).
This package splits period calculation logic into focused modules:
- types: Type definitions and constants
- level_filtering: Interval-level filtering logic
- period_building: Period construction from intervals
- period_statistics: Statistics calculation
- period_overlap: Overlap resolution logic
- relaxation: Per-day relaxation strategy
- core: Main API orchestration
- outlier_filtering: Price spike detection and smoothing
All public APIs are re-exported for backwards compatibility.
"""
from __future__ import annotations
# Re-export main API functions
from .core import calculate_periods
# Re-export outlier filtering
from .outlier_filtering import filter_price_outliers
# Re-export relaxation
from .relaxation import calculate_periods_with_relaxation
# Re-export constants and types
from .types import (
INDENT_L0,
INDENT_L1,
INDENT_L2,
INDENT_L3,
INDENT_L4,
INDENT_L5,
TibberPricesIntervalCriteria,
TibberPricesPeriodConfig,
TibberPricesPeriodData,
TibberPricesPeriodStatistics,
TibberPricesThresholdConfig,
)
__all__ = [
"INDENT_L0",
"INDENT_L1",
"INDENT_L2",
"INDENT_L3",
"INDENT_L4",
"INDENT_L5",
"TibberPricesIntervalCriteria",
"TibberPricesPeriodConfig",
"TibberPricesPeriodData",
"TibberPricesPeriodStatistics",
"TibberPricesThresholdConfig",
"calculate_periods",
"calculate_periods_with_relaxation",
"filter_price_outliers",
]

View file

@ -1,247 +0,0 @@
"""Core period calculation API - main entry points."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from .types import TibberPricesPeriodConfig
from .outlier_filtering import (
filter_price_outliers,
)
from .period_building import (
add_interval_ends,
build_periods,
calculate_reference_prices,
extend_periods_across_midnight,
filter_periods_by_end_date,
filter_periods_by_min_length,
filter_superseded_periods,
split_intervals_by_day,
)
from .period_statistics import (
extract_period_summaries,
)
from .types import TibberPricesThresholdConfig
# Flex limits to prevent degenerate behavior (see docs/development/period-calculation-theory.md)
MAX_SAFE_FLEX = 0.50 # 50% - hard cap: above this, period detection becomes unreliable
MAX_OUTLIER_FLEX = 0.25 # 25% - cap for outlier filtering: above this, spike detection too permissive
def calculate_periods(
all_prices: list[dict],
*,
config: TibberPricesPeriodConfig,
time: TibberPricesTimeService,
) -> dict[str, Any]:
"""
Calculate price periods (best or peak) from price data.
This function identifies periods but does NOT store full interval data redundantly.
It returns lightweight period summaries that reference the original price data.
Steps:
1. Split prices by day and calculate daily averages
2. Calculate reference prices (min/max per day)
3. Build periods based on criteria
4. Filter by minimum length
5. Add interval ends
6. Filter periods by end date
7. Extract period summaries (start/end times, not full price data)
Args:
all_prices: All price data points from yesterday/today/tomorrow.
config: Period configuration containing reverse_sort, flex, min_distance_from_avg,
min_period_length, threshold_low, and threshold_high.
time: TibberPricesTimeService instance (required).
Returns:
Dict with:
- periods: List of lightweight period summaries (start/end times only)
- metadata: Config and statistics
- reference_data: Daily min/max/avg for on-demand annotation
"""
# Import logger at the start of function
import logging # noqa: PLC0415
from .types import INDENT_L0 # noqa: PLC0415
_LOGGER = logging.getLogger(__name__) # noqa: N806
# Extract config values
reverse_sort = config.reverse_sort
flex_raw = config.flex # Already normalized to positive by get_period_config()
min_distance_from_avg = config.min_distance_from_avg
min_period_length = config.min_period_length
threshold_low = config.threshold_low
threshold_high = config.threshold_high
# CRITICAL: Hard cap flex at 50% to prevent degenerate behavior
# Above 50%, period detection becomes unreliable (too many intervals qualify)
# NOTE: flex_raw is already positive from normalization in get_period_config()
flex = flex_raw
if flex_raw > MAX_SAFE_FLEX:
flex = MAX_SAFE_FLEX
_LOGGER.warning(
"Flex %.1f%% exceeds maximum safe value! Capping at %.0f%%. "
"Recommendation: Use 15-20%% with relaxation enabled, or 25-35%% without relaxation.",
flex_raw * 100,
MAX_SAFE_FLEX * 100,
)
if not all_prices:
return {
"periods": [],
"metadata": {
"total_periods": 0,
"config": {
"reverse_sort": reverse_sort,
"flex": flex,
"min_distance_from_avg": min_distance_from_avg,
"min_period_length": min_period_length,
},
},
"reference_data": {
"ref_prices": {},
"avg_prices": {},
},
}
# Ensure prices are sorted chronologically
all_prices_sorted = sorted(all_prices, key=lambda p: p["startsAt"])
# Step 1: Split by day and calculate averages
intervals_by_day, avg_price_by_day = split_intervals_by_day(all_prices_sorted, time=time)
# Step 2: Calculate reference prices (min or max per day)
ref_prices = calculate_reference_prices(intervals_by_day, reverse_sort=reverse_sort)
# Step 2.5: Filter price outliers (smoothing for period formation only)
# This runs BEFORE period formation to prevent isolated price spikes
# from breaking up otherwise continuous periods
# CRITICAL: Cap flexibility for outlier filtering at 25%
# High flex (>25%) makes outlier detection too permissive, accepting
# unstable price contexts as "normal". This breaks period formation.
# User's flex setting still applies to period criteria (in_flex check).
# Import details logger locally (core.py imports logger locally in function)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details") # noqa: N806
outlier_flex = min(abs(flex) * 100, MAX_OUTLIER_FLEX * 100)
if abs(flex) * 100 > MAX_OUTLIER_FLEX * 100:
_LOGGER_DETAILS.debug(
"%sOutlier filtering: Using capped flex %.1f%% (user setting: %.1f%%)",
INDENT_L0,
outlier_flex,
abs(flex) * 100,
)
all_prices_smoothed = filter_price_outliers(
all_prices_sorted,
outlier_flex, # Use capped flex for outlier detection
min_period_length,
)
# Step 3: Build periods
price_context = {
"ref_prices": ref_prices,
"avg_prices": avg_price_by_day,
"intervals_by_day": intervals_by_day, # Needed for day volatility calculation
"flex": flex,
"min_distance_from_avg": min_distance_from_avg,
}
raw_periods = build_periods(
all_prices_smoothed, # Use smoothed prices for period formation
price_context,
reverse_sort=reverse_sort,
level_filter=config.level_filter,
gap_count=config.gap_count,
time=time,
)
_LOGGER.debug(
"%sAfter build_periods: %d raw periods found (flex=%.1f%%, level_filter=%s)",
INDENT_L0,
len(raw_periods),
abs(flex) * 100,
config.level_filter or "None",
)
# Step 4: Filter by minimum length
raw_periods = filter_periods_by_min_length(raw_periods, min_period_length, time=time)
_LOGGER.debug(
"%sAfter filter_by_min_length (>= %d min): %d periods remain",
INDENT_L0,
min_period_length,
len(raw_periods),
)
# Step 5: Add interval ends
add_interval_ends(raw_periods, time=time)
# Step 6: Filter periods by end date (keep periods ending yesterday or later)
# This ensures coordinator cache contains yesterday/today/tomorrow periods
# Sensors filter further for today+tomorrow, services can access all cached periods
raw_periods = filter_periods_by_end_date(raw_periods, time=time)
# Step 7: Extract lightweight period summaries (no full price data)
# Note: Periods are filtered by end date to keep yesterday/today/tomorrow.
# This preserves periods that started day-before-yesterday but end yesterday.
thresholds = TibberPricesThresholdConfig(
threshold_low=threshold_low,
threshold_high=threshold_high,
threshold_volatility_moderate=config.threshold_volatility_moderate,
threshold_volatility_high=config.threshold_volatility_high,
threshold_volatility_very_high=config.threshold_volatility_very_high,
reverse_sort=reverse_sort,
)
period_summaries = extract_period_summaries(
raw_periods,
all_prices_sorted,
price_context,
thresholds,
time=time,
)
# Step 8: Cross-day extension for late-night periods
# If a best-price period ends near midnight and tomorrow has continued low prices,
# extend the period across midnight to give users the full cheap window
period_summaries = extend_periods_across_midnight(
period_summaries,
all_prices_sorted,
price_context,
time=time,
reverse_sort=reverse_sort,
)
# Step 9: Filter superseded periods
# When tomorrow data is available, late-night today periods that were found via
# relaxation may be obsolete if tomorrow has significantly better alternatives
period_summaries = filter_superseded_periods(
period_summaries,
time=time,
reverse_sort=reverse_sort,
)
return {
"periods": period_summaries, # Lightweight summaries only
"metadata": {
"total_periods": len(period_summaries),
"config": {
"reverse_sort": reverse_sort,
"flex": flex,
"min_distance_from_avg": min_distance_from_avg,
"min_period_length": min_period_length,
},
},
"reference_data": {
"ref_prices": {k.isoformat(): v for k, v in ref_prices.items()},
"avg_prices": {k.isoformat(): v for k, v in avg_price_by_day.items()},
},
}

View file

@ -1,208 +0,0 @@
"""
Interval-level filtering logic for period calculation.
Key Concepts:
- Flex Filter: Limits price distance from daily min/max
- Min Distance Filter: Ensures prices are significantly different from average
- Dynamic Scaling: Min_Distance reduces at high Flex to prevent conflicts
See docs/development/period-calculation-theory.md for detailed explanation.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .types import TibberPricesIntervalCriteria
from custom_components.tibber_prices.const import PRICE_LEVEL_MAPPING
# Module-local log indentation (each module starts at level 0)
INDENT_L0 = "" # Entry point / main function
# Flex threshold for min_distance scaling
FLEX_SCALING_THRESHOLD = 0.20 # 20% - start adjusting min_distance
SCALE_FACTOR_WARNING_THRESHOLD = 0.8 # Log when reduction > 20%
def check_level_with_gap_tolerance(
interval_level: int,
level_order: int,
consecutive_gaps: int,
gap_count: int,
*,
reverse_sort: bool,
) -> tuple[bool, bool, int]:
"""
Check if interval meets level requirement with gap tolerance.
Args:
interval_level: Level value of current interval (from PRICE_LEVEL_MAPPING)
level_order: Required level value
consecutive_gaps: Current count of consecutive gap intervals
gap_count: Maximum allowed consecutive gap intervals
reverse_sort: True for peak price, False for best price
Returns:
Tuple of (meets_level, is_gap, new_consecutive_gaps):
- meets_level: True if interval qualifies (exact match or within gap tolerance)
- is_gap: True if this is a gap interval (deviates by exactly 1 step)
- new_consecutive_gaps: Updated gap counter
"""
if reverse_sort:
# Peak price: interval must be >= level_order (e.g., EXPENSIVE or higher)
meets_level_exact = interval_level >= level_order
# Gap: exactly 1 step below (e.g., NORMAL when expecting EXPENSIVE)
is_gap = interval_level == level_order - 1
else:
# Best price: interval must be <= level_order (e.g., CHEAP or lower)
meets_level_exact = interval_level <= level_order
# Gap: exactly 1 step above (e.g., NORMAL when expecting CHEAP)
is_gap = interval_level == level_order + 1
# Apply gap tolerance
if meets_level_exact:
return True, False, 0 # Meets level, not a gap, reset counter
if is_gap and consecutive_gaps < gap_count:
return True, True, consecutive_gaps + 1 # Allowed gap, increment counter
return False, False, 0 # Doesn't meet level, reset counter
def apply_level_filter(
price_data: dict,
level_order: int | None,
consecutive_gaps: int,
gap_count: int,
*,
reverse_sort: bool,
) -> tuple[bool, int, bool]:
"""
Apply level filter to a single interval.
Args:
price_data: Price data dict with "level" key
level_order: Required level value (from PRICE_LEVEL_MAPPING) or None if disabled
consecutive_gaps: Current count of consecutive gap intervals
gap_count: Maximum allowed consecutive gap intervals
reverse_sort: True for peak price, False for best price
Returns:
Tuple of (meets_level, new_consecutive_gaps, is_gap)
"""
if level_order is None:
return True, consecutive_gaps, False
interval_level = PRICE_LEVEL_MAPPING.get(price_data.get("level", "NORMAL"), 0)
meets_level, is_gap, new_consecutive_gaps = check_level_with_gap_tolerance(
interval_level, level_order, consecutive_gaps, gap_count, reverse_sort=reverse_sort
)
return meets_level, new_consecutive_gaps, is_gap
def check_interval_criteria(
price: float,
criteria: TibberPricesIntervalCriteria,
) -> tuple[bool, bool]:
"""
Check if interval meets flex and minimum distance criteria.
CRITICAL: This function works with NORMALIZED values (always positive):
- criteria.flex: Always positive (e.g., 0.20 for 20%)
- criteria.min_distance_from_avg: Always positive (e.g., 5.0 for 5%)
- criteria.reverse_sort: Determines direction (True=Peak, False=Best)
Args:
price: Interval price
criteria: Interval criteria (ref_price, avg_price, flex, etc.)
Returns:
Tuple of (in_flex, meets_min_distance)
"""
# Normalize inputs to absolute values for consistent calculation
flex_abs = abs(criteria.flex)
min_distance_abs = abs(criteria.min_distance_from_avg)
# ============================================================
# FLEX FILTER: Check if price is within flex threshold of reference
# ============================================================
# Reference price is:
# - Peak price (reverse_sort=True): daily MAXIMUM
# - Best price (reverse_sort=False): daily MINIMUM
#
# Flex band calculation (using absolute values):
# - Peak price: [max - max*flex, max] → accept prices near the maximum
# - Best price: [min, min + min*flex] → accept prices near the minimum
#
# Examples with flex=20%:
# - Peak: max=30 ct → accept [24, 30] ct (prices ≥ 24 ct)
# - Best: min=10 ct → accept [10, 12] ct (prices ≤ 12 ct)
if criteria.ref_price == 0:
# Zero reference: flex has no effect, use strict equality
in_flex = price == 0
else:
# Calculate flex amount using absolute value
flex_amount = abs(criteria.ref_price) * flex_abs
if criteria.reverse_sort:
# Peak price: accept prices >= (ref_price - flex_amount)
# Prices must be CLOSE TO or AT the maximum
flex_threshold = criteria.ref_price - flex_amount
in_flex = price >= flex_threshold
else:
# Best price: accept prices <= (ref_price + flex_amount)
# Accept ALL low prices up to the flex threshold, not just those >= minimum
# This ensures that if there are multiple low-price intervals, all that meet
# the threshold are included, regardless of whether they're before or after
# the daily minimum in the chronological sequence.
flex_threshold = criteria.ref_price + flex_amount
in_flex = price <= flex_threshold
# ============================================================
# MIN_DISTANCE FILTER: Check if price is far enough from average
# ============================================================
# CRITICAL: Adjust min_distance dynamically based on flex to prevent conflicts
# Problem: High flex (e.g., 50%) can conflict with fixed min_distance (e.g., 5%)
# Solution: When flex is high, reduce min_distance requirement proportionally
adjusted_min_distance = min_distance_abs
if flex_abs > FLEX_SCALING_THRESHOLD:
# Scale down min_distance as flex increases
# At 20% flex: multiplier = 1.0 (full min_distance)
# At 40% flex: multiplier = 0.5 (half min_distance)
# At 50% flex: multiplier = 0.25 (quarter min_distance)
flex_excess = flex_abs - 0.20 # How much above 20%
scale_factor = max(0.25, 1.0 - (flex_excess * 2.5)) # Linear reduction, min 25%
adjusted_min_distance = min_distance_abs * scale_factor
# Log adjustment at DEBUG level (only when significant reduction)
if scale_factor < SCALE_FACTOR_WARNING_THRESHOLD:
import logging # noqa: PLC0415
_LOGGER = logging.getLogger(f"{__name__}.details") # noqa: N806
_LOGGER.debug(
"High flex %.1f%% detected: Reducing min_distance %.1f%%%.1f%% (scale %.2f)",
flex_abs * 100,
min_distance_abs,
adjusted_min_distance,
scale_factor,
)
# Calculate threshold from average (using normalized positive distance)
# - Peak price: threshold = avg * (1 + distance/100) → prices must be ABOVE avg+distance
# - Best price: threshold = avg * (1 - distance/100) → prices must be BELOW avg-distance
if criteria.reverse_sort:
# Peak: price must be >= avg * (1 + distance%)
min_distance_threshold = criteria.avg_price * (1 + adjusted_min_distance / 100)
meets_min_distance = price >= min_distance_threshold
else:
# Best: price must be <= avg * (1 - distance%)
min_distance_threshold = criteria.avg_price * (1 - adjusted_min_distance / 100)
meets_min_distance = price <= min_distance_threshold
return in_flex, meets_min_distance

View file

@ -1,559 +0,0 @@
"""
Price outlier filtering for period calculation.
This module handles the detection and smoothing of single-interval price spikes
that would otherwise break up continuous periods. Outliers are only smoothed for
period formation - original prices are preserved for all statistics.
Uses statistical methods:
- Linear regression for trend-based spike detection
- Standard deviation for confidence thresholds
- Symmetry checking to avoid smoothing legitimate price shifts
- Zigzag detection with relative volatility for cluster rejection
"""
from __future__ import annotations
import logging
from datetime import datetime
from typing import NamedTuple
from custom_components.tibber_prices.utils.price import calculate_coefficient_of_variation
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Outlier filtering constants
MIN_CONTEXT_SIZE = 3 # Minimum intervals needed before/after for analysis
VOLATILITY_THRESHOLD = 0.05 # 5% max relative std dev for zigzag detection
SYMMETRY_THRESHOLD = 1.5 # Max std dev difference for symmetric spike
RELATIVE_VOLATILITY_THRESHOLD = 2.0 # Window volatility vs context (cluster detection)
ASYMMETRY_TAIL_WINDOW = 6 # Skip asymmetry check for last ~1.5h (6 intervals) of available data
ZIGZAG_TAIL_WINDOW = 6 # Skip zigzag/cluster detection for last ~1.5h (6 intervals)
EXTREMES_PROTECTION_TOLERANCE = 0.001 # Protect prices within 0.1% of daily min/max from smoothing
# Adaptive confidence level constants
# Uses coefficient of variation (CV) from utils/price.py for consistency with volatility sensors
# On flat days (low CV), we're more conservative (higher confidence = fewer smoothed)
# On volatile days (high CV), we're more aggressive (lower confidence = more smoothed)
CONFIDENCE_LEVEL_MIN = 1.5 # Minimum confidence (volatile days: smooth more aggressively)
CONFIDENCE_LEVEL_MAX = 2.5 # Maximum confidence (flat days: smooth more conservatively)
CONFIDENCE_LEVEL_DEFAULT = 2.0 # Default: 95% confidence interval (2 std devs)
# CV thresholds for adaptive confidence (align with volatility sensor defaults)
# These are in percentage points (e.g., 10.0 = 10% CV)
DAILY_CV_LOW = 10.0 # ≤10% CV = flat day (use max confidence)
DAILY_CV_HIGH = 30.0 # ≥30% CV = volatile day (use min confidence)
# Module-local log indentation (each module starts at level 0)
INDENT_L0 = "" # All logs in this module (no indentation needed)
class TibberPricesSpikeCandidateContext(NamedTuple):
"""Container for spike validation parameters."""
current: dict
context_before: list[dict]
context_after: list[dict]
flexibility_ratio: float
remaining_intervals: int
stats: dict[str, float]
analysis_window: list[dict]
def _should_skip_tail_check(
remaining_intervals: int,
tail_window: int,
check_name: str,
interval_label: str,
) -> bool:
"""Return True when remaining intervals fall inside tail window and log why."""
if remaining_intervals < tail_window:
_LOGGER_DETAILS.debug(
"%sSpike at %s: Skipping %s check (only %d intervals remaining)",
INDENT_L0,
interval_label,
check_name,
remaining_intervals,
)
return True
return False
def _calculate_statistics(prices: list[float]) -> dict[str, float]:
"""
Calculate statistical measures for price context.
Uses linear regression to detect trends, enabling accurate spike detection
even when prices are gradually rising or falling.
Args:
prices: List of price values
Returns:
Dictionary with:
- mean: Average price
- std_dev: Standard deviation
- trend_slope: Linear regression slope (price change per interval)
"""
n = len(prices)
mean = sum(prices) / n
# Standard deviation
variance = sum((p - mean) ** 2 for p in prices) / n
std_dev = variance**0.5
# Linear trend (least squares regression)
# y = mx + b, we calculate m (slope)
x_values = list(range(n)) # 0, 1, 2, ...
x_mean = sum(x_values) / n
numerator = sum((x - x_mean) * (y - mean) for x, y in zip(x_values, prices, strict=True))
denominator = sum((x - x_mean) ** 2 for x in x_values)
trend_slope = numerator / denominator if denominator != 0 else 0.0
return {
"mean": mean,
"std_dev": std_dev,
"trend_slope": trend_slope,
}
def _check_symmetry(avg_before: float, avg_after: float, std_dev: float) -> bool:
"""
Check if spike is symmetric (returns to baseline).
A symmetric spike has similar average prices before and after the spike.
Asymmetric spikes might indicate legitimate price level changes and should
not be smoothed.
Args:
avg_before: Average price before spike
avg_after: Average price after spike
std_dev: Standard deviation of context prices
Returns:
True if symmetric (should smooth), False if asymmetric (should keep)
"""
difference = abs(avg_after - avg_before)
threshold = SYMMETRY_THRESHOLD * std_dev
return difference <= threshold
def _detect_zigzag_pattern(window: list[dict], context_std_dev: float) -> bool:
"""
Detect zigzag pattern or clustered spikes using multiple criteria.
Enhanced detection with three checks:
1. Absolute volatility: Is standard deviation too high?
2. Direction changes: Too many up-down-up transitions?
3. Relative volatility: Is window more volatile than context? (catches clusters!)
The third check implicitly handles spike clusters without explicit multi-pass
detection.
Args:
window: List of price intervals to analyze
context_std_dev: Standard deviation of surrounding context
Returns:
True if zigzag/cluster detected (reject smoothing)
"""
prices = [x["total"] for x in window]
if len(prices) < MIN_CONTEXT_SIZE:
return False
avg_price = sum(prices) / len(prices)
# Check 1: Absolute volatility
variance = sum((p - avg_price) ** 2 for p in prices) / len(prices)
std_dev = variance**0.5
if std_dev / avg_price > VOLATILITY_THRESHOLD:
return True # Too volatile overall
# Check 2: Direction changes
direction_changes = 0
for i in range(1, len(prices) - 1):
prev_trend = prices[i] - prices[i - 1]
next_trend = prices[i + 1] - prices[i]
# Direction change when signs differ
if prev_trend * next_trend < 0:
direction_changes += 1
max_allowed_changes = len(prices) / 3
if direction_changes > max_allowed_changes:
return True # Too many direction changes
# Check 3: Relative volatility (NEW - catches spike clusters!)
# If this window is much more volatile than the surrounding context,
# it's likely a cluster of spikes rather than one isolated spike
return std_dev > RELATIVE_VOLATILITY_THRESHOLD * context_std_dev
def _validate_spike_candidate(
candidate: TibberPricesSpikeCandidateContext,
) -> bool:
"""Run stability, symmetry, and zigzag checks before smoothing."""
avg_before = sum(x["total"] for x in candidate.context_before) / len(candidate.context_before)
avg_after = sum(x["total"] for x in candidate.context_after) / len(candidate.context_after)
context_diff_pct = abs(avg_after - avg_before) / avg_before if avg_before > 0 else 0
if context_diff_pct > candidate.flexibility_ratio:
_LOGGER_DETAILS.debug(
"%sInterval %s: Context unstable (%.1f%% change) - not a spike",
INDENT_L0,
candidate.current.get("startsAt", "unknown interval"),
context_diff_pct * 100,
)
return False
if not _should_skip_tail_check(
candidate.remaining_intervals,
ASYMMETRY_TAIL_WINDOW,
"asymmetry",
candidate.current.get("startsAt", "unknown interval"),
) and not _check_symmetry(avg_before, avg_after, candidate.stats["std_dev"]):
_LOGGER_DETAILS.debug(
"%sSpike at %s rejected: Asymmetric (before=%.2f, after=%.2f ct/kWh)",
INDENT_L0,
candidate.current.get("startsAt", "unknown interval"),
avg_before * 100,
avg_after * 100,
)
return False
if _should_skip_tail_check(
candidate.remaining_intervals,
ZIGZAG_TAIL_WINDOW,
"zigzag/cluster",
candidate.current.get("startsAt", "unknown interval"),
):
return True
if _detect_zigzag_pattern(candidate.analysis_window, candidate.stats["std_dev"]):
_LOGGER_DETAILS.debug(
"%sSpike at %s rejected: Zigzag/cluster pattern detected",
INDENT_L0,
candidate.current.get("startsAt", "unknown interval"),
)
return False
return True
def _calculate_daily_extremes(intervals: list[dict]) -> dict[str, tuple[float, float]]:
"""
Calculate daily min/max prices for each day in the interval list.
These extremes are used to protect reference prices from being smoothed.
The daily minimum is the reference for best_price periods, and the daily
maximum is the reference for peak_price periods - smoothing these would
break period detection.
Args:
intervals: List of price intervals with 'startsAt' and 'total' keys
Returns:
Dict mapping date strings to (min_price, max_price) tuples
"""
daily_prices: dict[str, list[float]] = {}
for interval in intervals:
starts_at = interval.get("startsAt")
if starts_at is None:
continue
# Handle both datetime objects and ISO strings
dt = datetime.fromisoformat(starts_at) if isinstance(starts_at, str) else starts_at
date_key = dt.strftime("%Y-%m-%d")
price = float(interval["total"])
daily_prices.setdefault(date_key, []).append(price)
# Calculate min/max for each day
return {date_key: (min(prices), max(prices)) for date_key, prices in daily_prices.items()}
def _calculate_daily_cv(intervals: list[dict]) -> dict[str, float]:
"""
Calculate daily coefficient of variation (CV) for each day.
Uses the same CV calculation as volatility sensors for consistency.
CV = (std_dev / mean) * 100, expressed as percentage.
Used to adapt the confidence level for outlier detection:
- Flat days (low CV): Higher confidence fewer false positives
- Volatile days (high CV): Lower confidence catch more real outliers
Args:
intervals: List of price intervals with 'startsAt' and 'total' keys
Returns:
Dict mapping date strings to CV percentage (e.g., 15.0 for 15% CV)
"""
daily_prices: dict[str, list[float]] = {}
for interval in intervals:
starts_at = interval.get("startsAt")
if starts_at is None:
continue
dt = datetime.fromisoformat(starts_at) if isinstance(starts_at, str) else starts_at
date_key = dt.strftime("%Y-%m-%d")
price = float(interval["total"])
daily_prices.setdefault(date_key, []).append(price)
# Calculate CV using the shared function from utils/price.py
result = {}
for date_key, prices in daily_prices.items():
cv = calculate_coefficient_of_variation(prices)
result[date_key] = cv if cv is not None else 0.0
return result
def _get_adaptive_confidence_level(
interval: dict,
daily_cv: dict[str, float],
) -> float:
"""
Get adaptive confidence level based on daily coefficient of variation (CV).
Maps daily CV to confidence level:
- Low CV (10%): High confidence (2.5) conservative, fewer smoothed
- High CV (30%): Low confidence (1.5) aggressive, more smoothed
- Between: Linear interpolation
Uses the same CV calculation as volatility sensors for consistency.
Args:
interval: Price interval dict with 'startsAt' key
daily_cv: Dict from _calculate_daily_cv()
Returns:
Confidence level multiplier for std_dev threshold
"""
starts_at = interval.get("startsAt")
if starts_at is None:
return CONFIDENCE_LEVEL_DEFAULT
dt = datetime.fromisoformat(starts_at) if isinstance(starts_at, str) else starts_at
date_key = dt.strftime("%Y-%m-%d")
cv = daily_cv.get(date_key, 0.0)
# Linear interpolation between LOW and HIGH CV
# Low CV → high confidence (conservative)
# High CV → low confidence (aggressive)
if cv <= DAILY_CV_LOW:
return CONFIDENCE_LEVEL_MAX
if cv >= DAILY_CV_HIGH:
return CONFIDENCE_LEVEL_MIN
# Linear interpolation: as CV increases, confidence decreases
ratio = (cv - DAILY_CV_LOW) / (DAILY_CV_HIGH - DAILY_CV_LOW)
return CONFIDENCE_LEVEL_MAX - (ratio * (CONFIDENCE_LEVEL_MAX - CONFIDENCE_LEVEL_MIN))
def _is_daily_extreme(
interval: dict,
daily_extremes: dict[str, tuple[float, float]],
tolerance: float = EXTREMES_PROTECTION_TOLERANCE,
) -> bool:
"""
Check if an interval's price is at or very near a daily extreme.
Prices at daily extremes should never be smoothed because:
- Daily minimum is the reference for best_price period detection
- Daily maximum is the reference for peak_price period detection
- Smoothing these would cause periods to miss their most important intervals
Args:
interval: Price interval dict with 'startsAt' and 'total' keys
daily_extremes: Dict from _calculate_daily_extremes()
tolerance: Relative tolerance for matching (default 0.1%)
Returns:
True if the price is at or very near a daily min or max
"""
starts_at = interval.get("startsAt")
if starts_at is None:
return False
# Handle both datetime objects and ISO strings
dt = datetime.fromisoformat(starts_at) if isinstance(starts_at, str) else starts_at
date_key = dt.strftime("%Y-%m-%d")
if date_key not in daily_extremes:
return False
price = float(interval["total"])
daily_min, daily_max = daily_extremes[date_key]
# Check if price is within tolerance of daily min or max
# Using relative tolerance: |price - extreme| <= extreme * tolerance
min_threshold = daily_min * (1 + tolerance)
max_threshold = daily_max * (1 - tolerance)
return price <= min_threshold or price >= max_threshold
def filter_price_outliers(
intervals: list[dict],
flexibility_pct: float,
_min_duration: int, # Unused, kept for API compatibility
) -> list[dict]:
"""
Filter single-interval price spikes within stable sequences.
Uses statistical methods to detect and smooth isolated spikes:
- Linear regression to predict expected prices (handles trends)
- Standard deviation for confidence intervals (adapts to volatility)
- Symmetry checking (avoids smoothing legitimate price shifts)
- Zigzag detection (rejects volatile areas and spike clusters)
This runs BEFORE period formation to smooth out brief anomalies that would
otherwise break continuous periods. Original prices are preserved for all
statistics.
Args:
intervals: Price intervals to filter (typically 96 for yesterday/today/tomorrow)
flexibility_pct: User's flexibility setting (derives tolerance)
_min_duration: Minimum period duration (unused, kept for API compatibility)
Returns:
Intervals with smoothed prices (marked with _smoothed flag)
"""
# Convert percentage to ratio once for all comparisons (e.g., 15.0 → 0.15)
flexibility_ratio = flexibility_pct / 100
# Calculate daily extremes to protect reference prices from smoothing
# Daily min is the reference for best_price, daily max for peak_price
daily_extremes = _calculate_daily_extremes(intervals)
# Calculate daily coefficient of variation (CV) for adaptive confidence levels
# Uses same CV calculation as volatility sensors for consistency
# Flat days → conservative smoothing, volatile days → aggressive smoothing
daily_cv = _calculate_daily_cv(intervals)
# Log CV info for debugging (CV is in percentage points, e.g., 15.0 = 15%)
cv_info = ", ".join(f"{date}: {cv:.1f}%" for date, cv in sorted(daily_cv.items()))
_LOGGER.info(
"%sSmoothing price outliers: %d intervals, flex=%.1f%%, daily CV: %s",
INDENT_L0,
len(intervals),
flexibility_pct,
cv_info,
)
protected_count = 0
result = []
smoothed_count = 0
for i, current in enumerate(intervals):
current_price = current["total"]
# CRITICAL: Never smooth daily extremes - they are the reference prices!
# Smoothing the daily min would break best_price period detection,
# smoothing the daily max would break peak_price period detection.
if _is_daily_extreme(current, daily_extremes):
result.append(current)
protected_count += 1
_LOGGER_DETAILS.debug(
"%sProtected daily extreme at %s: %.2f ct/kWh (not smoothed)",
INDENT_L0,
current.get("startsAt", f"index {i}"),
current_price * 100,
)
continue
# Get context windows (3 intervals before and after)
context_before = intervals[max(0, i - MIN_CONTEXT_SIZE) : i]
context_after = intervals[i + 1 : min(len(intervals), i + 1 + MIN_CONTEXT_SIZE)]
# Need sufficient context on both sides
if len(context_before) < MIN_CONTEXT_SIZE or len(context_after) < MIN_CONTEXT_SIZE:
result.append(current)
continue
# Calculate statistics for combined context (excluding current interval)
context_prices = [x["total"] for x in context_before + context_after]
stats = _calculate_statistics(context_prices)
# Predict expected price at current position using linear trend
# Position offset: current is at index len(context_before) in the combined window
offset_position = len(context_before)
expected_price = stats["mean"] + (stats["trend_slope"] * offset_position)
# Calculate how far current price deviates from expected
residual = abs(current_price - expected_price)
# Adaptive confidence level based on daily CV:
# - Flat days (low CV): higher confidence (2.5) → fewer false positives
# - Volatile days (high CV): lower confidence (1.5) → catch more real spikes
confidence_level = _get_adaptive_confidence_level(current, daily_cv)
tolerance = stats["std_dev"] * confidence_level
# Not a spike if within tolerance
if residual <= tolerance:
result.append(current)
continue
# SPIKE CANDIDATE DETECTED - Now validate
remaining_intervals = len(intervals) - (i + 1)
analysis_window = [*context_before[-2:], current, *context_after[:2]]
candidate_context = TibberPricesSpikeCandidateContext(
current=current,
context_before=context_before,
context_after=context_after,
flexibility_ratio=flexibility_ratio,
remaining_intervals=remaining_intervals,
stats=stats,
analysis_window=analysis_window,
)
if not _validate_spike_candidate(candidate_context):
result.append(current)
continue
# ALL CHECKS PASSED - Smooth the spike
smoothed = current.copy()
smoothed["total"] = expected_price # Use trend-based prediction
smoothed["_smoothed"] = True
smoothed["_original_price"] = current_price
result.append(smoothed)
smoothed_count += 1
_LOGGER_DETAILS.debug(
"%sSmoothed spike at %s: %.2f%.2f ct/kWh (residual: %.2f, tolerance: %.2f, confidence: %.2f)",
INDENT_L0,
current.get("startsAt", f"index {i}"),
current_price * 100,
expected_price * 100,
residual * 100,
tolerance * 100,
confidence_level,
)
if smoothed_count > 0 or protected_count > 0:
_LOGGER.info(
"%sPrice outlier smoothing complete: %d smoothed, %d protected (daily extremes)",
INDENT_L0,
smoothed_count,
protected_count,
)
return result

View file

@ -1,707 +0,0 @@
"""Period building and basic filtering logic."""
from __future__ import annotations
import logging
from datetime import date, datetime, timedelta
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.const import PRICE_LEVEL_MAPPING
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from .level_filtering import (
apply_level_filter,
check_interval_criteria,
)
from .types import TibberPricesIntervalCriteria
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Module-local log indentation (each module starts at level 0)
INDENT_L0 = "" # Entry point / main function
def split_intervals_by_day(
all_prices: list[dict], *, time: TibberPricesTimeService
) -> tuple[dict[date, list[dict]], dict[date, float]]:
"""Split intervals by day and calculate average price per day."""
intervals_by_day: dict[date, list[dict]] = {}
avg_price_by_day: dict[date, float] = {}
for price_data in all_prices:
dt = time.get_interval_time(price_data)
if dt is None:
continue
date_key = dt.date()
intervals_by_day.setdefault(date_key, []).append(price_data)
for date_key, intervals in intervals_by_day.items():
avg_price_by_day[date_key] = sum(float(p["total"]) for p in intervals) / len(intervals)
return intervals_by_day, avg_price_by_day
def calculate_reference_prices(intervals_by_day: dict[date, list[dict]], *, reverse_sort: bool) -> dict[date, float]:
"""Calculate reference prices for each day (min for best, max for peak)."""
ref_prices: dict[date, float] = {}
for date_key, intervals in intervals_by_day.items():
prices = [float(p["total"]) for p in intervals]
ref_prices[date_key] = max(prices) if reverse_sort else min(prices)
return ref_prices
def build_periods( # noqa: PLR0913, PLR0915, PLR0912 - Complex period building logic requires many arguments, statements, and branches
all_prices: list[dict],
price_context: dict[str, Any],
*,
reverse_sort: bool,
level_filter: str | None = None,
gap_count: int = 0,
time: TibberPricesTimeService,
) -> list[list[dict]]:
"""
Build periods, allowing periods to cross midnight (day boundary).
Periods can span multiple days. Each interval is evaluated against the reference
price (min/max) and average price of its own day. This ensures fair filtering
criteria even when periods cross midnight, where prices can jump significantly
due to different forecasting uncertainty (prices at day end vs. day start).
Args:
all_prices: All price data points
price_context: Dict with ref_prices, avg_prices, flex, min_distance_from_avg
reverse_sort: True for peak price (high prices), False for best price (low prices)
level_filter: Level filter string ("cheap", "expensive", "any", None)
gap_count: Number of allowed consecutive intervals deviating by exactly 1 level step
time: TibberPricesTimeService instance (required)
"""
ref_prices = price_context["ref_prices"]
avg_prices = price_context["avg_prices"]
flex = price_context["flex"]
min_distance_from_avg = price_context["min_distance_from_avg"]
# Calculate level_order if level_filter is active
level_order = None
level_filter_active = False
if level_filter and level_filter.lower() != "any":
level_order = PRICE_LEVEL_MAPPING.get(level_filter.upper(), 0)
level_filter_active = True
filter_direction = "" if reverse_sort else ""
gap_info = f", gap_tolerance={gap_count}" if gap_count > 0 else ""
_LOGGER_DETAILS.debug(
"%sLevel filter active: %s (order %s, require interval level %s filter level%s)",
INDENT_L0,
level_filter.upper(),
level_order,
filter_direction,
gap_info,
)
else:
status = "RELAXED to ANY" if (level_filter and level_filter.lower() == "any") else "DISABLED (not configured)"
_LOGGER_DETAILS.debug("%sLevel filter: %s (accepting all levels)", INDENT_L0, status)
periods: list[list[dict]] = []
current_period: list[dict] = []
consecutive_gaps = 0 # Track consecutive intervals that deviate by 1 level step
intervals_checked = 0
intervals_filtered_by_level = 0
intervals_filtered_by_flex = 0
intervals_filtered_by_min_distance = 0
for price_data in all_prices:
starts_at = time.get_interval_time(price_data)
if starts_at is None:
continue
date_key = starts_at.date()
# Use smoothed price for criteria checks (flex/distance)
# but preserve original price for period data
price_for_criteria = float(price_data["total"]) # Smoothed if this interval was an outlier
price_original = float(price_data.get("_original_price", price_data["total"]))
intervals_checked += 1
# CRITICAL: Always use reference price from the interval's own day
# Each interval must meet the criteria of its own day, not the period start day.
# This ensures fair filtering even when periods cross midnight, where prices
# can jump significantly (last intervals of a day have more risk buffer than
# first intervals of next day, as they're set with different uncertainty levels).
ref_date = date_key
# Check flex and minimum distance criteria (using smoothed price and interval's own day reference)
criteria = TibberPricesIntervalCriteria(
ref_price=ref_prices[ref_date],
avg_price=avg_prices[ref_date],
flex=flex,
min_distance_from_avg=min_distance_from_avg,
reverse_sort=reverse_sort,
)
in_flex, meets_min_distance = check_interval_criteria(price_for_criteria, criteria)
# Track why intervals are filtered
if not in_flex:
intervals_filtered_by_flex += 1
if not meets_min_distance:
intervals_filtered_by_min_distance += 1
# If this interval was smoothed, check if smoothing actually made a difference
smoothing_was_impactful = False
if price_data.get("_smoothed", False):
# Check if original price would have passed the same criteria
in_flex_original, meets_min_distance_original = check_interval_criteria(price_original, criteria)
# Smoothing was impactful if original would have failed but smoothed passed
smoothing_was_impactful = (in_flex and meets_min_distance) and not (
in_flex_original and meets_min_distance_original
)
# Level filter: Check if interval meets level requirement with gap tolerance
meets_level, consecutive_gaps, is_level_gap = apply_level_filter(
price_data, level_order, consecutive_gaps, gap_count, reverse_sort=reverse_sort
)
if not meets_level:
intervals_filtered_by_level += 1
# Add to period if all criteria are met
if in_flex and meets_min_distance and meets_level:
current_period.append(
{
"interval_hour": starts_at.hour,
"interval_minute": starts_at.minute,
"interval_time": f"{starts_at.hour:02d}:{starts_at.minute:02d}",
"price": price_original, # Use original price in period data
"interval_start": starts_at,
# Only True if smoothing changed whether the interval qualified for period inclusion
"smoothing_was_impactful": smoothing_was_impactful,
"is_level_gap": is_level_gap, # Track if kept due to level gap tolerance
}
)
elif current_period:
# Criteria no longer met, end current period
periods.append(current_period)
current_period = []
consecutive_gaps = 0 # Reset gap counter
# Add final period if exists
if current_period:
periods.append(current_period)
# Log detailed filter statistics
if intervals_checked > 0:
_LOGGER_DETAILS.debug(
"%sFilter statistics: %d intervals checked",
INDENT_L0,
intervals_checked,
)
if intervals_filtered_by_flex > 0:
flex_pct = (intervals_filtered_by_flex / intervals_checked) * 100
_LOGGER_DETAILS.debug(
"%s Filtered by FLEX (price too far from ref): %d/%d (%.1f%%)",
INDENT_L0,
intervals_filtered_by_flex,
intervals_checked,
flex_pct,
)
if intervals_filtered_by_min_distance > 0:
distance_pct = (intervals_filtered_by_min_distance / intervals_checked) * 100
_LOGGER_DETAILS.debug(
"%s Filtered by MIN_DISTANCE (price too close to avg): %d/%d (%.1f%%)",
INDENT_L0,
intervals_filtered_by_min_distance,
intervals_checked,
distance_pct,
)
if level_filter_active and intervals_filtered_by_level > 0:
level_pct = (intervals_filtered_by_level / intervals_checked) * 100
_LOGGER_DETAILS.debug(
"%s Filtered by LEVEL (wrong price level): %d/%d (%.1f%%)",
INDENT_L0,
intervals_filtered_by_level,
intervals_checked,
level_pct,
)
return periods
def filter_periods_by_min_length(
periods: list[list[dict]], min_period_length: int, *, time: TibberPricesTimeService
) -> list[list[dict]]:
"""Filter periods to only include those meeting the minimum length requirement."""
min_intervals = time.minutes_to_intervals(min_period_length)
return [period for period in periods if len(period) >= min_intervals]
def add_interval_ends(periods: list[list[dict]], *, time: TibberPricesTimeService) -> None:
"""Add interval_end to each interval in-place."""
interval_duration = time.get_interval_duration()
for period in periods:
for interval in period:
start = interval.get("interval_start")
if start:
interval["interval_end"] = start + interval_duration
def filter_periods_by_end_date(periods: list[list[dict]], *, time: TibberPricesTimeService) -> list[list[dict]]:
"""
Filter periods to keep only relevant ones for yesterday, today, and tomorrow.
Keep periods that:
- End yesterday or later (>= start of yesterday)
This removes:
- Periods that ended before yesterday (day-before-yesterday or earlier)
Rationale: Coordinator caches periods for yesterday/today/tomorrow so that:
- Binary sensors can filter for today+tomorrow (current/next periods)
- Services can access yesterday's periods when user requests "yesterday" data
"""
now = time.now()
# Calculate start of yesterday (midnight yesterday)
yesterday_start = time.start_of_local_day(now) - time.get_interval_duration() * 96 # 96 intervals = 24 hours
filtered = []
for period in periods:
if not period:
continue
# Get the end time of the period (last interval's end)
last_interval = period[-1]
period_end = last_interval.get("interval_end")
if not period_end:
continue
# Keep if period ends yesterday or later
if period_end >= yesterday_start:
filtered.append(period)
return filtered
def _categorize_periods_for_supersession(
period_summaries: list[dict],
today: date,
tomorrow: date,
late_hour_threshold: int,
early_hour_limit: int,
) -> tuple[list[dict], list[dict], list[dict]]:
"""Categorize periods into today-late, tomorrow-early, and other."""
today_late: list[dict] = []
tomorrow_early: list[dict] = []
other: list[dict] = []
for period in period_summaries:
period_start = period.get("start")
period_end = period.get("end")
if not period_start or not period_end:
other.append(period)
# Today late-night periods: START today at or after late_hour_threshold (e.g., 20:00)
# Note: period_end could be tomorrow (e.g., 23:30-00:00 spans midnight)
elif period_start.date() == today and period_start.hour >= late_hour_threshold:
today_late.append(period)
# Tomorrow early-morning periods: START tomorrow before early_hour_limit (e.g., 08:00)
elif period_start.date() == tomorrow and period_start.hour < early_hour_limit:
tomorrow_early.append(period)
else:
other.append(period)
return today_late, tomorrow_early, other
def _filter_superseded_today_periods(
today_late_periods: list[dict],
best_tomorrow: dict,
best_tomorrow_price: float,
improvement_threshold: float,
) -> list[dict]:
"""Filter today periods that are superseded by a better tomorrow period."""
kept: list[dict] = []
for today_period in today_late_periods:
today_price = today_period.get("price_mean")
if today_price is None:
kept.append(today_period)
continue
# Calculate how much better tomorrow is (as percentage)
improvement_pct = ((today_price - best_tomorrow_price) / today_price * 100) if today_price > 0 else 0
_LOGGER.debug(
"Supersession check: Today %s-%s (%.4f) vs Tomorrow %s-%s (%.4f) = %.1f%% improvement (threshold: %.1f%%)",
today_period["start"].strftime("%H:%M"),
today_period["end"].strftime("%H:%M"),
today_price,
best_tomorrow["start"].strftime("%H:%M"),
best_tomorrow["end"].strftime("%H:%M"),
best_tomorrow_price,
improvement_pct,
improvement_threshold,
)
if improvement_pct >= improvement_threshold:
_LOGGER.info(
"Period superseded: Today %s-%s (%.2f) replaced by Tomorrow %s-%s (%.2f, %.1f%% better)",
today_period["start"].strftime("%H:%M"),
today_period["end"].strftime("%H:%M"),
today_price,
best_tomorrow["start"].strftime("%H:%M"),
best_tomorrow["end"].strftime("%H:%M"),
best_tomorrow_price,
improvement_pct,
)
else:
kept.append(today_period)
return kept
def filter_superseded_periods(
period_summaries: list[dict],
*,
time: TibberPricesTimeService,
reverse_sort: bool,
) -> list[dict]:
"""
Filter out late-night today periods that are superseded by better tomorrow periods.
When tomorrow's data becomes available, some late-night periods that were found
through relaxation may no longer make sense. If tomorrow has a significantly
better period in the early morning, the late-night today period is obsolete.
Example:
- Today 23:30-00:00 at 0.70 kr (found via relaxation, was best available)
- Tomorrow 04:00-05:30 at 0.50 kr (much better alternative)
The today period is superseded and should be filtered out
This only applies to best-price periods (reverse_sort=False).
Peak-price periods are not filtered this way.
"""
from .types import ( # noqa: PLC0415
CROSS_DAY_LATE_PERIOD_START_HOUR,
CROSS_DAY_MAX_EXTENSION_HOUR,
SUPERSESSION_PRICE_IMPROVEMENT_PCT,
)
_LOGGER.debug(
"filter_superseded_periods called: %d periods, reverse_sort=%s",
len(period_summaries) if period_summaries else 0,
reverse_sort,
)
# Only filter for best-price periods
if reverse_sort or not period_summaries:
return period_summaries
now = time.now()
today = now.date()
tomorrow = today + timedelta(days=1)
# Categorize periods
today_late, tomorrow_early, other = _categorize_periods_for_supersession(
period_summaries,
today,
tomorrow,
CROSS_DAY_LATE_PERIOD_START_HOUR,
CROSS_DAY_MAX_EXTENSION_HOUR,
)
_LOGGER.debug(
"Supersession categorization: today_late=%d, tomorrow_early=%d, other=%d",
len(today_late),
len(tomorrow_early),
len(other),
)
# If no tomorrow early periods, nothing to compare against
if not tomorrow_early:
_LOGGER.debug("No tomorrow early periods - skipping supersession check")
return period_summaries
# Find the best tomorrow early period (lowest mean price)
best_tomorrow = min(tomorrow_early, key=lambda p: p.get("price_mean", float("inf")))
best_tomorrow_price = best_tomorrow.get("price_mean")
if best_tomorrow_price is None:
return period_summaries
# Filter superseded today periods
kept_today = _filter_superseded_today_periods(
today_late,
best_tomorrow,
best_tomorrow_price,
SUPERSESSION_PRICE_IMPROVEMENT_PCT,
)
# Reconstruct and sort by start time
result = other + kept_today + tomorrow_early
result.sort(key=lambda p: p.get("start") or time.now())
return result
def _is_period_eligible_for_extension(
period: dict,
today: date,
late_hour_threshold: int,
) -> bool:
"""
Check if a period is eligible for cross-day extension.
Eligibility criteria:
- Period has valid start and end times
- Period ends on today (not yesterday or tomorrow)
- Period ends late (after late_hour_threshold, e.g. 20:00)
"""
period_end = period.get("end")
period_start = period.get("start")
if not period_end or not period_start:
return False
if period_end.date() != today:
return False
return period_end.hour >= late_hour_threshold
def _find_extension_intervals(
period_end: datetime,
price_lookup: dict[str, dict],
criteria: Any,
max_extension_time: datetime,
interval_duration: timedelta,
) -> list[dict]:
"""
Find consecutive intervals after period_end that meet criteria.
Iterates forward from period_end, adding intervals while they
meet the flex and min_distance criteria. Stops at first failure
or when reaching max_extension_time.
"""
from .level_filtering import check_interval_criteria # noqa: PLC0415
extension_intervals: list[dict] = []
check_time = period_end
while check_time < max_extension_time:
price_data = price_lookup.get(check_time.isoformat())
if not price_data:
break # No more data
price = float(price_data["total"])
in_flex, meets_min_distance = check_interval_criteria(price, criteria)
if not (in_flex and meets_min_distance):
break # Criteria no longer met
extension_intervals.append(price_data)
check_time = check_time + interval_duration
return extension_intervals
def _collect_original_period_prices(
period_start: datetime,
period_end: datetime,
price_lookup: dict[str, dict],
interval_duration: timedelta,
) -> list[float]:
"""Collect prices from original period for CV calculation."""
prices: list[float] = []
current = period_start
while current < period_end:
price_data = price_lookup.get(current.isoformat())
if price_data:
prices.append(float(price_data["total"]))
current = current + interval_duration
return prices
def _build_extended_period(
period: dict,
extension_intervals: list[dict],
combined_prices: list[float],
combined_cv: float,
interval_duration: timedelta,
) -> dict:
"""Create extended period dict with updated statistics."""
period_start = period["start"]
period_end = period["end"]
new_end = period_end + (interval_duration * len(extension_intervals))
extended = period.copy()
extended["end"] = new_end
extended["duration_minutes"] = int((new_end - period_start).total_seconds() / 60)
extended["period_interval_count"] = len(combined_prices)
extended["cross_day_extended"] = True
extended["cross_day_extension_intervals"] = len(extension_intervals)
# Recalculate price statistics
extended["price_min"] = min(combined_prices)
extended["price_max"] = max(combined_prices)
extended["price_mean"] = sum(combined_prices) / len(combined_prices)
extended["price_spread"] = extended["price_max"] - extended["price_min"]
extended["price_coefficient_variation_%"] = round(combined_cv, 1)
return extended
def extend_periods_across_midnight(
period_summaries: list[dict],
all_prices: list[dict],
price_context: dict[str, Any],
*,
time: TibberPricesTimeService,
reverse_sort: bool,
) -> list[dict]:
"""
Extend late-night periods across midnight if favorable prices continue.
When a period ends close to midnight and tomorrow's data shows continued
favorable prices, extend the period into the next day. This prevents
artificial period breaks at midnight when it's actually better to continue.
Example: Best price period 22:00-23:45 today could extend to 04:00 tomorrow
if prices remain low overnight.
Rules:
- Only extends periods ending after CROSS_DAY_LATE_PERIOD_START_HOUR (20:00)
- Won't extend beyond CROSS_DAY_MAX_EXTENSION_HOUR (08:00) next day
- Extension must pass same flex criteria as original period
- Quality Gate (CV check) applies to extended period
Args:
period_summaries: List of period summary dicts (already processed)
all_prices: All price intervals including tomorrow
price_context: Dict with ref_prices, avg_prices, flex, min_distance_from_avg
time: Time service instance
reverse_sort: True for peak price, False for best price
Returns:
Updated list of period summaries with extensions applied
"""
from custom_components.tibber_prices.utils.price import calculate_coefficient_of_variation # noqa: PLC0415
from .types import ( # noqa: PLC0415
CROSS_DAY_LATE_PERIOD_START_HOUR,
CROSS_DAY_MAX_EXTENSION_HOUR,
PERIOD_MAX_CV,
TibberPricesIntervalCriteria,
)
if not period_summaries or not all_prices:
return period_summaries
# Build price lookup by timestamp
price_lookup: dict[str, dict] = {}
for price_data in all_prices:
interval_time = time.get_interval_time(price_data)
if interval_time:
price_lookup[interval_time.isoformat()] = price_data
ref_prices = price_context.get("ref_prices", {})
avg_prices = price_context.get("avg_prices", {})
flex = price_context.get("flex", 0.15)
min_distance = price_context.get("min_distance_from_avg", 0)
now = time.now()
today = now.date()
tomorrow = today + timedelta(days=1)
interval_duration = time.get_interval_duration()
# Max extension time (e.g., 08:00 tomorrow)
max_extension_time = time.start_of_local_day(now) + timedelta(days=1, hours=CROSS_DAY_MAX_EXTENSION_HOUR)
extended_summaries = []
for period in period_summaries:
# Check eligibility for extension
if not _is_period_eligible_for_extension(period, today, CROSS_DAY_LATE_PERIOD_START_HOUR):
extended_summaries.append(period)
continue
# Get tomorrow's reference prices
tomorrow_ref = ref_prices.get(tomorrow) or ref_prices.get(str(tomorrow))
tomorrow_avg = avg_prices.get(tomorrow) or avg_prices.get(str(tomorrow))
if tomorrow_ref is None or tomorrow_avg is None:
extended_summaries.append(period)
continue
# Set up criteria for extension check
criteria = TibberPricesIntervalCriteria(
ref_price=tomorrow_ref,
avg_price=tomorrow_avg,
flex=flex,
min_distance_from_avg=min_distance,
reverse_sort=reverse_sort,
)
# Find extension intervals
extension_intervals = _find_extension_intervals(
period["end"],
price_lookup,
criteria,
max_extension_time,
interval_duration,
)
if not extension_intervals:
extended_summaries.append(period)
continue
# Collect all prices for CV check
original_prices = _collect_original_period_prices(
period["start"],
period["end"],
price_lookup,
interval_duration,
)
extension_prices = [float(p["total"]) for p in extension_intervals]
combined_prices = original_prices + extension_prices
# Quality Gate: Check CV of extended period
combined_cv = calculate_coefficient_of_variation(combined_prices)
if combined_cv is not None and combined_cv <= PERIOD_MAX_CV:
# Extension passes quality gate
extended_period = _build_extended_period(
period,
extension_intervals,
combined_prices,
combined_cv,
interval_duration,
)
_LOGGER.info(
"Cross-day extension: Period %s-%s extended to %s (+%d intervals, CV=%.1f%%)",
period["start"].strftime("%H:%M"),
period["end"].strftime("%H:%M"),
extended_period["end"].strftime("%H:%M"),
len(extension_intervals),
combined_cv,
)
extended_summaries.append(extended_period)
else:
# Extension would exceed quality gate
_LOGGER_DETAILS.debug(
"%sCross-day extension rejected for period %s-%s: CV=%.1f%% > %.1f%%",
INDENT_L0,
period["start"].strftime("%H:%M"),
period["end"].strftime("%H:%M"),
combined_cv or 0,
PERIOD_MAX_CV,
)
extended_summaries.append(period)
return extended_summaries

View file

@ -1,380 +0,0 @@
"""Period overlap resolution logic."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Module-local log indentation (each module starts at level 0)
INDENT_L0 = "" # Entry point / main function
INDENT_L1 = " " # Nested logic / loop iterations
INDENT_L2 = " " # Deeper nesting
def _estimate_merged_cv(period1: dict, period2: dict) -> float | None:
"""
Estimate the CV of a merged period from two period summaries.
Since we don't have the raw prices, we estimate using the combined min/max range.
This is a conservative estimate - the actual CV could be higher or lower.
Formula: CV (range / 2) / mean * 100
Where range = max - min, mean = (min + max) / 2
This approximation assumes roughly uniform distribution within the range.
"""
p1_min = period1.get("price_min")
p1_max = period1.get("price_max")
p2_min = period2.get("price_min")
p2_max = period2.get("price_max")
if None in (p1_min, p1_max, p2_min, p2_max):
return None
# Cast to float - None case handled above
combined_min = min(float(p1_min), float(p2_min)) # type: ignore[arg-type]
combined_max = max(float(p1_max), float(p2_max)) # type: ignore[arg-type]
if combined_min <= 0:
return None
combined_mean = (combined_min + combined_max) / 2
price_range = combined_max - combined_min
# CV estimate based on range (assuming uniform distribution)
# For uniform distribution: std_dev ≈ range / sqrt(12) ≈ range / 3.46
return (price_range / 3.46) / combined_mean * 100
def recalculate_period_metadata(periods: list[dict], *, time: TibberPricesTimeService) -> None:
"""
Recalculate period metadata after merging periods.
Updates period_position, periods_total, and periods_remaining for all periods
based on chronological order.
This must be called after resolve_period_overlaps() to ensure metadata
reflects the final merged period list.
Args:
periods: List of period summary dicts (mutated in-place)
time: TibberPricesTimeService instance (required)
"""
if not periods:
return
# Sort periods chronologically by start time
periods.sort(key=lambda p: p.get("start") or time.now())
# Update metadata for all periods
total_periods = len(periods)
for position, period in enumerate(periods, 1):
period["period_position"] = position
period["periods_total"] = total_periods
period["periods_remaining"] = total_periods - position
def merge_adjacent_periods(period1: dict, period2: dict) -> dict:
"""
Merge two adjacent or overlapping periods into one.
The newer period's relaxation attributes override the older period's.
Takes the earliest start time and latest end time.
Relaxation attributes from the newer period (period2) override those from period1:
- relaxation_active
- relaxation_level
- relaxation_threshold_original_%
- relaxation_threshold_applied_%
- period_interval_level_gap_count
- period_interval_smoothed_count
Args:
period1: First period (older baseline or relaxed period)
period2: Second period (newer relaxed period with higher flex)
Returns:
Merged period dict with combined time span and newer period's attributes
"""
# Take earliest start and latest end
merged_start = min(period1["start"], period2["start"])
merged_end = max(period1["end"], period2["end"])
merged_duration = int((merged_end - merged_start).total_seconds() / 60)
# Start with period1 as base
merged = period1.copy()
# Update time boundaries
merged["start"] = merged_start
merged["end"] = merged_end
merged["duration_minutes"] = merged_duration
# Override with period2's relaxation attributes (newer/higher flex wins)
relaxation_attrs = [
"relaxation_active",
"relaxation_level",
"relaxation_threshold_original_%",
"relaxation_threshold_applied_%",
"period_interval_level_gap_count",
"period_interval_smoothed_count",
]
for attr in relaxation_attrs:
if attr in period2:
merged[attr] = period2[attr]
# Mark as merged (for debugging)
merged["merged_from"] = {
"period1_start": period1["start"].isoformat(),
"period1_end": period1["end"].isoformat(),
"period2_start": period2["start"].isoformat(),
"period2_end": period2["end"].isoformat(),
}
_LOGGER_DETAILS.debug(
"%sMerged periods: %s-%s + %s-%s%s-%s (duration: %d min)",
INDENT_L2,
period1["start"].strftime("%H:%M"),
period1["end"].strftime("%H:%M"),
period2["start"].strftime("%H:%M"),
period2["end"].strftime("%H:%M"),
merged_start.strftime("%H:%M"),
merged_end.strftime("%H:%M"),
merged_duration,
)
return merged
def _check_merge_quality_gate(periods_to_merge: list[tuple[int, dict]], relaxed: dict) -> bool:
"""
Check if merging would create a period that's too heterogeneous.
Returns True if merge is allowed, False if blocked by Quality Gate.
"""
from .types import PERIOD_MAX_CV # noqa: PLC0415
relaxed_start = relaxed["start"]
relaxed_end = relaxed["end"]
for _idx, existing in periods_to_merge:
estimated_cv = _estimate_merged_cv(existing, relaxed)
if estimated_cv is not None and estimated_cv > PERIOD_MAX_CV:
_LOGGER.debug(
"Merge blocked by Quality Gate: %s-%s + %s-%s would have CV≈%.1f%% (max: %.1f%%)",
existing["start"].strftime("%H:%M"),
existing["end"].strftime("%H:%M"),
relaxed_start.strftime("%H:%M"),
relaxed_end.strftime("%H:%M"),
estimated_cv,
PERIOD_MAX_CV,
)
return False
return True
def _would_swallow_existing(relaxed: dict, existing_periods: list[dict]) -> bool:
"""
Check if the relaxed period would "swallow" any existing period.
A period is "swallowed" if the new relaxed period completely contains it.
In this case, we should NOT merge - the existing smaller period is more
homogeneous and should be preserved.
This prevents relaxation from replacing good small periods with larger,
more heterogeneous ones.
Returns:
True if any existing period would be swallowed (merge should be blocked)
False if safe to proceed with merge evaluation
"""
relaxed_start = relaxed["start"]
relaxed_end = relaxed["end"]
for existing in existing_periods:
existing_start = existing["start"]
existing_end = existing["end"]
# Check if relaxed completely contains existing
if relaxed_start <= existing_start and relaxed_end >= existing_end:
_LOGGER.debug(
"Blocking merge: %s-%s would swallow %s-%s (keeping smaller period)",
relaxed_start.strftime("%H:%M"),
relaxed_end.strftime("%H:%M"),
existing_start.strftime("%H:%M"),
existing_end.strftime("%H:%M"),
)
return True
return False
def _is_duplicate_period(relaxed: dict, existing_periods: list[dict], tolerance_seconds: int = 60) -> bool:
"""Check if relaxed period is a duplicate of any existing period."""
relaxed_start = relaxed["start"]
relaxed_end = relaxed["end"]
for existing in existing_periods:
if (
abs((relaxed_start - existing["start"]).total_seconds()) < tolerance_seconds
and abs((relaxed_end - existing["end"]).total_seconds()) < tolerance_seconds
):
_LOGGER_DETAILS.debug(
"%sSkipping duplicate period %s-%s (already exists)",
INDENT_L1,
relaxed_start.strftime("%H:%M"),
relaxed_end.strftime("%H:%M"),
)
return True
return False
def _find_adjacent_or_overlapping(relaxed: dict, existing_periods: list[dict]) -> list[tuple[int, dict]]:
"""Find all periods that are adjacent to or overlapping with the relaxed period."""
relaxed_start = relaxed["start"]
relaxed_end = relaxed["end"]
periods_to_merge = []
for idx, existing in enumerate(existing_periods):
existing_start = existing["start"]
existing_end = existing["end"]
# Check if adjacent (no gap) or overlapping
is_adjacent = relaxed_end == existing_start or relaxed_start == existing_end
is_overlapping = relaxed_start < existing_end and relaxed_end > existing_start
if is_adjacent or is_overlapping:
periods_to_merge.append((idx, existing))
_LOGGER_DETAILS.debug(
"%sPeriod %s-%s %s with existing period %s-%s",
INDENT_L1,
relaxed_start.strftime("%H:%M"),
relaxed_end.strftime("%H:%M"),
"overlaps" if is_overlapping else "is adjacent to",
existing_start.strftime("%H:%M"),
existing_end.strftime("%H:%M"),
)
return periods_to_merge
def resolve_period_overlaps(
existing_periods: list[dict],
new_relaxed_periods: list[dict],
) -> tuple[list[dict], int]:
"""
Resolve overlaps between existing periods and newly found relaxed periods.
Adjacent or overlapping periods are merged into single continuous periods.
The newer period's relaxation attributes override the older period's.
Quality Gate: Merging is blocked if the combined period would have
an estimated CV above PERIOD_MAX_CV (25%), to prevent creating
periods with excessive internal price variation.
This function is called incrementally after each relaxation phase:
- Phase 1: existing = baseline, new = first relaxation
- Phase 2: existing = baseline + phase 1, new = second relaxation
- Phase 3: existing = baseline + phase 1 + phase 2, new = third relaxation
Args:
existing_periods: All previously found periods (baseline + earlier relaxation phases)
new_relaxed_periods: Periods found in current relaxation phase (will be merged if adjacent)
Returns:
Tuple of (merged_periods, new_periods_count):
- merged_periods: All periods after merging, sorted by start time
- new_periods_count: Number of new periods added (some may have been merged)
"""
_LOGGER_DETAILS.debug(
"%sresolve_period_overlaps called: existing=%d, new=%d",
INDENT_L0,
len(existing_periods),
len(new_relaxed_periods),
)
if not new_relaxed_periods:
return existing_periods.copy(), 0
if not existing_periods:
# No existing periods - return all new periods
return new_relaxed_periods.copy(), len(new_relaxed_periods)
merged = existing_periods.copy()
periods_added = 0
for relaxed in new_relaxed_periods:
relaxed_start = relaxed["start"]
relaxed_end = relaxed["end"]
# Check if this period is duplicate (exact match within tolerance)
if _is_duplicate_period(relaxed, merged):
continue
# Check if this period would "swallow" an existing smaller period
# In that case, skip it - the smaller existing period is more homogeneous
if _would_swallow_existing(relaxed, merged):
continue
# Find periods that are adjacent or overlapping (should be merged)
periods_to_merge = _find_adjacent_or_overlapping(relaxed, merged)
if not periods_to_merge:
# No merge needed - add as new period
merged.append(relaxed)
periods_added += 1
_LOGGER_DETAILS.debug(
"%sAdded new period %s-%s (no overlap/adjacency)",
INDENT_L1,
relaxed_start.strftime("%H:%M"),
relaxed_end.strftime("%H:%M"),
)
continue
# Quality Gate: Check if merging would create a period that's too heterogeneous
should_merge = _check_merge_quality_gate(periods_to_merge, relaxed)
if not should_merge:
# Don't merge - add as separate period instead
merged.append(relaxed)
periods_added += 1
_LOGGER_DETAILS.debug(
"%sAdded new period %s-%s separately (merge blocked by CV gate)",
INDENT_L1,
relaxed_start.strftime("%H:%M"),
relaxed_end.strftime("%H:%M"),
)
continue
# Merge with all adjacent/overlapping periods
# Start with the new relaxed period
merged_period = relaxed.copy()
# Remove old periods (in reverse order to maintain indices)
for idx, existing in reversed(periods_to_merge):
merged_period = merge_adjacent_periods(existing, merged_period)
merged.pop(idx)
# Add the merged result
merged.append(merged_period)
# Count as added if we merged exactly one existing period
# (means we extended/merged, not replaced multiple)
if len(periods_to_merge) == 1:
periods_added += 1
# Sort all periods by start time
merged.sort(key=lambda p: p["start"])
return merged, periods_added

View file

@ -1,376 +0,0 @@
"""Period statistics calculation and summary building."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from datetime import datetime
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from .types import (
TibberPricesPeriodData,
TibberPricesPeriodStatistics,
TibberPricesThresholdConfig,
)
from custom_components.tibber_prices.utils.average import calculate_median
from custom_components.tibber_prices.utils.price import (
aggregate_period_levels,
aggregate_period_ratings,
calculate_coefficient_of_variation,
calculate_volatility_level,
)
def calculate_period_price_diff(
price_mean: float,
start_time: datetime,
price_context: dict[str, Any],
) -> tuple[float | None, float | None]:
"""
Calculate period price difference from daily reference (min or max).
Uses reference price from start day of the period for consistency.
Args:
price_mean: Mean price of the period (in base currency).
start_time: Start time of the period.
price_context: Dictionary with ref_prices per day.
Returns:
Tuple of (period_price_diff, period_price_diff_pct) or (None, None) if no reference available.
"""
if not price_context or not start_time:
return None, None
ref_prices = price_context.get("ref_prices", {})
date_key = start_time.date()
ref_price = ref_prices.get(date_key)
if ref_price is None:
return None, None
# Both prices are in base currency, no conversion needed
ref_price_display = round(ref_price, 4)
period_price_diff = round(price_mean - ref_price_display, 4)
period_price_diff_pct = None
if ref_price_display != 0:
# CRITICAL: Use abs() for negative prices (same logic as calculate_difference_percentage)
# Example: avg=-10, ref=-20 → diff=10, pct=10/abs(-20)*100=+50% (correctly shows more expensive)
period_price_diff_pct = round((period_price_diff / abs(ref_price_display)) * 100, 2)
return period_price_diff, period_price_diff_pct
def calculate_aggregated_rating_difference(period_price_data: list[dict]) -> float | None:
"""
Calculate aggregated rating difference percentage for the period.
Takes the average of all interval differences (from their respective thresholds).
Args:
period_price_data: List of price data dictionaries with "difference" field
Returns:
Average difference percentage, or None if no valid data
"""
differences = []
for price_data in period_price_data:
diff = price_data.get("difference")
if diff is not None:
differences.append(float(diff))
if not differences:
return None
return round(sum(differences) / len(differences), 2)
def calculate_period_price_statistics(
period_price_data: list[dict],
) -> dict[str, float]:
"""
Calculate price statistics for a period.
Args:
period_price_data: List of price data dictionaries with "total" field.
Returns:
Dictionary with price_mean, price_median, price_min, price_max, price_spread (all in base currency).
Note: price_spread is calculated based on price_mean (max - min range as percentage of mean).
"""
# Keep prices in base currency (Euro/NOK/SEK) for internal storage
# Conversion to display units (ct/øre) happens in services/formatting layer
factor = 1 # Always use base currency for storage
prices_display = [round(float(p["total"]) * factor, 4) for p in period_price_data]
if not prices_display:
return {
"price_mean": 0.0,
"price_median": 0.0,
"price_min": 0.0,
"price_max": 0.0,
"price_spread": 0.0,
}
price_mean = round(sum(prices_display) / len(prices_display), 4)
median_value = calculate_median(prices_display)
price_median = round(median_value, 4) if median_value is not None else 0.0
price_min = round(min(prices_display), 4)
price_max = round(max(prices_display), 4)
price_spread = round(price_max - price_min, 4)
return {
"price_mean": price_mean,
"price_median": price_median,
"price_min": price_min,
"price_max": price_max,
"price_spread": price_spread,
}
def build_period_summary_dict(
period_data: TibberPricesPeriodData,
stats: TibberPricesPeriodStatistics,
*,
reverse_sort: bool,
price_context: dict[str, Any] | None = None,
) -> dict:
"""
Build the complete period summary dictionary.
Args:
period_data: Period timing and position data
stats: Calculated period statistics
reverse_sort: True for peak price, False for best price (keyword-only)
price_context: Optional dict with ref_prices, avg_prices, intervals_by_day for day statistics
Returns:
Complete period summary dictionary following attribute ordering
"""
# Build complete period summary (following attribute ordering from AGENTS.md)
summary = {
# 1. Time information (when does this apply?)
"start": period_data.start_time,
"end": period_data.end_time,
"duration_minutes": period_data.period_length * 15, # period_length is in intervals
# 2. Core decision attributes (what should I do?)
"level": stats.aggregated_level,
"rating_level": stats.aggregated_rating,
"rating_difference_%": stats.rating_difference_pct,
# 3. Price statistics (how much does it cost?)
"price_mean": stats.price_mean,
"price_median": stats.price_median,
"price_min": stats.price_min,
"price_max": stats.price_max,
"price_spread": stats.price_spread,
"price_coefficient_variation_%": stats.coefficient_of_variation,
"volatility": stats.volatility,
# 4. Price differences will be added below if available
# 5. Detail information (additional context)
"period_interval_count": period_data.period_length,
"period_position": period_data.period_idx,
"periods_total": period_data.total_periods,
"periods_remaining": period_data.total_periods - period_data.period_idx,
}
# Add period price difference attributes based on sensor type (step 4)
if stats.period_price_diff is not None:
if reverse_sort:
# Peak price sensor: compare to daily maximum
summary["period_price_diff_from_daily_max"] = stats.period_price_diff
if stats.period_price_diff_pct is not None:
summary["period_price_diff_from_daily_max_%"] = stats.period_price_diff_pct
else:
# Best price sensor: compare to daily minimum
summary["period_price_diff_from_daily_min"] = stats.period_price_diff
if stats.period_price_diff_pct is not None:
summary["period_price_diff_from_daily_min_%"] = stats.period_price_diff_pct
# Add day volatility and price statistics (for understanding midnight classification changes)
if price_context:
period_start_date = period_data.start_time.date()
intervals_by_day = price_context.get("intervals_by_day", {})
avg_prices = price_context.get("avg_prices", {})
day_intervals = intervals_by_day.get(period_start_date, [])
if day_intervals:
# Calculate day price statistics (in EUR major units from API)
day_prices = [float(p["total"]) for p in day_intervals]
day_min = min(day_prices)
day_max = max(day_prices)
day_span = day_max - day_min
day_avg = avg_prices.get(period_start_date, sum(day_prices) / len(day_prices))
# Calculate volatility percentage (span / avg * 100)
day_volatility_pct = round((day_span / day_avg * 100), 1) if day_avg > 0 else 0.0
# Convert to minor units (ct/øre) for consistency with other price attributes
summary["day_volatility_%"] = day_volatility_pct
summary["day_price_min"] = round(day_min * 100, 2)
summary["day_price_max"] = round(day_max * 100, 2)
summary["day_price_span"] = round(day_span * 100, 2)
return summary
def extract_period_summaries(
periods: list[list[dict]],
all_prices: list[dict],
price_context: dict[str, Any],
thresholds: TibberPricesThresholdConfig,
*,
time: TibberPricesTimeService,
) -> list[dict]:
"""
Extract complete period summaries with all aggregated attributes.
Returns sensor-ready period summaries with:
- Timestamps and positioning (start, end, hour, minute, time)
- Aggregated price statistics (price_mean, price_median, price_min, price_max, price_spread)
- Volatility categorization (low/moderate/high/very_high based on coefficient of variation)
- Rating difference percentage (aggregated from intervals)
- Period price differences (period_price_diff_from_daily_min/max)
- Aggregated level and rating_level
- Interval count (number of 15-min intervals in period)
All data is pre-calculated and ready for display - no further processing needed.
Args:
periods: List of periods, where each period is a list of interval dictionaries.
all_prices: All price data from the API (enriched with level, difference, rating_level).
price_context: Dictionary with ref_prices and avg_prices per day.
thresholds: Threshold configuration for calculations.
time: TibberPricesTimeService instance (required).
"""
from .types import ( # noqa: PLC0415 - Avoid circular import
TibberPricesPeriodData,
TibberPricesPeriodStatistics,
)
# Build lookup dictionary for full price data by timestamp
price_lookup: dict[str, dict] = {}
for price_data in all_prices:
starts_at = time.get_interval_time(price_data)
if starts_at:
price_lookup[starts_at.isoformat()] = price_data
summaries = []
total_periods = len(periods)
for period_idx, period in enumerate(periods, 1):
if not period:
continue
first_interval = period[0]
last_interval = period[-1]
start_time = first_interval.get("interval_start")
end_time = last_interval.get("interval_end")
if not start_time or not end_time:
continue
# Look up full price data for each interval in the period
period_price_data: list[dict] = []
for interval in period:
start = interval.get("interval_start")
if not start:
continue
start_iso = start.isoformat()
price_data = price_lookup.get(start_iso)
if price_data:
period_price_data.append(price_data)
# Calculate aggregated level and rating_level
aggregated_level = None
aggregated_rating = None
if period_price_data:
# Aggregate level (from API's "level" field)
aggregated_level = aggregate_period_levels(period_price_data)
# Aggregate rating_level (from calculated "rating_level" and "difference" fields)
if thresholds.threshold_low is not None and thresholds.threshold_high is not None:
aggregated_rating, _ = aggregate_period_ratings(
period_price_data,
thresholds.threshold_low,
thresholds.threshold_high,
)
# Calculate price statistics (in base currency, conversion happens in presentation layer)
price_stats = calculate_period_price_statistics(period_price_data)
# Calculate period price difference from daily reference
period_price_diff, period_price_diff_pct = calculate_period_price_diff(
price_stats["price_mean"], start_time, price_context
)
# Extract prices for volatility calculation (coefficient of variation)
prices_for_volatility = [float(p["total"]) for p in period_price_data if "total" in p]
# Calculate CV (numeric) for quality gate checks
period_cv = calculate_coefficient_of_variation(prices_for_volatility)
# Calculate volatility (categorical) using thresholds
volatility = calculate_volatility_level(
prices_for_volatility,
threshold_moderate=thresholds.threshold_volatility_moderate,
threshold_high=thresholds.threshold_volatility_high,
threshold_very_high=thresholds.threshold_volatility_very_high,
).lower()
rating_difference_pct = calculate_aggregated_rating_difference(period_price_data)
# Count how many intervals in this period benefited from smoothing (i.e., would have been excluded)
smoothed_impactful_count = sum(1 for interval in period if interval.get("smoothing_was_impactful", False))
# Count how many intervals were kept due to level filter gap tolerance
level_gap_count = sum(1 for interval in period if interval.get("is_level_gap", False))
# Build period data and statistics objects
period_data = TibberPricesPeriodData(
start_time=start_time,
end_time=end_time,
period_length=len(period),
period_idx=period_idx,
total_periods=total_periods,
)
stats = TibberPricesPeriodStatistics(
aggregated_level=aggregated_level,
aggregated_rating=aggregated_rating,
rating_difference_pct=rating_difference_pct,
price_mean=price_stats["price_mean"],
price_median=price_stats["price_median"],
price_min=price_stats["price_min"],
price_max=price_stats["price_max"],
price_spread=price_stats["price_spread"],
volatility=volatility,
coefficient_of_variation=round(period_cv, 1) if period_cv is not None else None,
period_price_diff=period_price_diff,
period_price_diff_pct=period_price_diff_pct,
)
# Build complete period summary
summary = build_period_summary_dict(
period_data, stats, reverse_sort=thresholds.reverse_sort, price_context=price_context
)
# Add smoothing information if any intervals benefited from smoothing
if smoothed_impactful_count > 0:
summary["period_interval_smoothed_count"] = smoothed_impactful_count
# Add level gap tolerance information if any intervals were kept as gaps
if level_gap_count > 0:
summary["period_interval_level_gap_count"] = level_gap_count
summaries.append(summary)
return summaries

View file

@ -1,856 +0,0 @@
"""Relaxation strategy for finding minimum periods per day."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from collections.abc import Callable
from datetime import date
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.utils.price import calculate_coefficient_of_variation
from .period_overlap import (
recalculate_period_metadata,
resolve_period_overlaps,
)
from .types import (
INDENT_L0,
INDENT_L1,
INDENT_L2,
PERIOD_MAX_CV,
TibberPricesPeriodConfig,
)
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Flex thresholds for warnings (see docs/development/period-calculation-theory.md)
# With relaxation active, high base flex is counterproductive (reduces relaxation effectiveness)
FLEX_WARNING_THRESHOLD_RELAXATION = 0.25 # 25% - INFO: suggest lowering to 15-20%
MAX_FLEX_HARD_LIMIT = 0.50 # 50% - hard maximum flex value
FLEX_HIGH_THRESHOLD_RELAXATION = 0.30 # 30% - WARNING: base flex too high for relaxation mode
# Min duration fallback constants
# When all relaxation phases are exhausted and still no periods found,
# gradually reduce min_period_length to find at least something
MIN_DURATION_FALLBACK_MINIMUM = 30 # Minimum period length to try (30 min = 2 intervals)
MIN_DURATION_FALLBACK_STEP = 15 # Reduce by 15 min (1 interval) each step
def _check_period_quality(
period: dict, all_prices: list[dict], *, time: TibberPricesTimeService
) -> tuple[bool, float | None]:
"""
Check if a period passes the quality gate (internal CV not too high).
The Quality Gate prevents relaxation from creating periods with too much
internal price variation. A "best price period" with prices ranging from
0.5 to 1.0 kr/kWh is not useful - user can't trust it's actually "best".
Args:
period: Period summary dict with "start" and "end" datetime
all_prices: All price intervals (to look up prices for CV calculation)
time: Time service for interval time parsing
Returns:
Tuple of (passes_quality_gate, cv_value)
- passes_quality_gate: True if CV <= PERIOD_MAX_CV
- cv_value: Calculated CV as percentage, or None if not calculable
"""
start_time = period.get("start")
end_time = period.get("end")
if not start_time or not end_time:
return True, None # Can't check, assume OK
# Build lookup for prices
price_lookup: dict[str, float] = {}
for price_data in all_prices:
interval_time = time.get_interval_time(price_data)
if interval_time:
price_lookup[interval_time.isoformat()] = float(price_data["total"])
# Collect prices within the period
period_prices: list[float] = []
interval_duration = time.get_interval_duration()
current = start_time
while current < end_time:
price = price_lookup.get(current.isoformat())
if price is not None:
period_prices.append(price)
current = current + interval_duration
# Need at least 2 prices to calculate CV (same as MIN_PRICES_FOR_VOLATILITY in price.py)
min_prices_for_cv = 2
if len(period_prices) < min_prices_for_cv:
return True, None # Too few prices to calculate CV
cv = calculate_coefficient_of_variation(period_prices)
if cv is None:
return True, None
passes = cv <= PERIOD_MAX_CV
return passes, cv
def _count_quality_periods(
periods: list[dict],
all_prices: list[dict],
prices_by_day: dict[date, list[dict]],
min_periods: int,
*,
time: TibberPricesTimeService,
) -> tuple[int, int]:
"""
Count days meeting requirement when considering quality gate.
Only periods passing the quality gate (CV <= PERIOD_MAX_CV) are counted
towards meeting the min_periods requirement.
Args:
periods: List of all periods
all_prices: All price intervals
prices_by_day: Price intervals grouped by day
min_periods: Target periods per day
time: Time service
Returns:
Tuple of (days_meeting_requirement, total_quality_periods)
"""
periods_by_day = group_periods_by_day(periods)
days_meeting_requirement = 0
total_quality_periods = 0
for day in sorted(prices_by_day.keys()):
day_periods = periods_by_day.get(day, [])
quality_count = 0
for period in day_periods:
passes, cv = _check_period_quality(period, all_prices, time=time)
if passes:
quality_count += 1
else:
_LOGGER_DETAILS.debug(
"%s Day %s: Period %s-%s REJECTED by quality gate (CV=%.1f%% > %.1f%%)",
INDENT_L2,
day,
period.get("start", "?").strftime("%H:%M") if hasattr(period.get("start"), "strftime") else "?",
period.get("end", "?").strftime("%H:%M") if hasattr(period.get("end"), "strftime") else "?",
cv or 0,
PERIOD_MAX_CV,
)
total_quality_periods += quality_count
if quality_count >= min_periods:
days_meeting_requirement += 1
return days_meeting_requirement, total_quality_periods
def group_periods_by_day(periods: list[dict]) -> dict[date, list[dict]]:
"""
Group periods by ALL days they span (including midnight crossings).
Periods crossing midnight are assigned to ALL affected days.
Example: Period 23:00 yesterday - 02:00 today appears in BOTH days.
This ensures that:
1. For min_periods checking: A midnight-crossing period counts towards both days
2. For binary sensors: Each day shows all relevant periods (including those starting/ending in other days)
Args:
periods: List of period summary dicts with "start" and "end" datetime
Returns:
Dict mapping date to list of periods spanning that date
"""
periods_by_day: dict[date, list[dict]] = {}
for period in periods:
start_time = period.get("start")
end_time = period.get("end")
if not start_time or not end_time:
continue
# Assign period to ALL days it spans
start_date = start_time.date()
end_date = end_time.date()
# Handle single-day and multi-day periods
current_date = start_date
while current_date <= end_date:
periods_by_day.setdefault(current_date, []).append(period)
# Move to next day
from datetime import timedelta # noqa: PLC0415
current_date = current_date + timedelta(days=1)
return periods_by_day
def mark_periods_with_relaxation(
periods: list[dict],
relaxation_level: str,
original_threshold: float,
applied_threshold: float,
*,
reverse_sort: bool = False,
) -> None:
"""
Mark periods with relaxation information (mutates period dicts in-place).
Uses consistent 'relaxation_*' prefix for all relaxation-related attributes.
These attributes are read by period_overlap.py and binary_sensor/attributes.py.
For Peak Price periods (reverse_sort=True), thresholds are stored as negative
values to match the user's configuration semantics (negative flex = below maximum).
Args:
periods: List of period dicts to mark
relaxation_level: String describing the relaxation level (e.g., "flex=18.0% +level_any")
original_threshold: Original flex threshold value (decimal, e.g., 0.15 for 15%)
applied_threshold: Actually applied threshold value (decimal, e.g., 0.18 for 18%)
reverse_sort: True for Peak Price (negative values), False for Best Price (positive values)
"""
for period in periods:
period["relaxation_active"] = True
period["relaxation_level"] = relaxation_level
# Convert decimal to percentage for display
# For Peak Prices: Store as negative to match user's config semantics
sign = -1 if reverse_sort else 1
period["relaxation_threshold_original_%"] = round(original_threshold * 100 * sign, 1)
period["relaxation_threshold_applied_%"] = round(applied_threshold * 100 * sign, 1)
def group_prices_by_day(all_prices: list[dict], *, time: TibberPricesTimeService) -> dict[date, list[dict]]:
"""
Group price intervals by the day they belong to (today and future only).
Args:
all_prices: List of price dicts with "startsAt" timestamp
time: TibberPricesTimeService instance (required)
Returns:
Dict mapping date to list of price intervals for that day (only today and future)
"""
today = time.now().date()
prices_by_day: dict[date, list[dict]] = {}
for price in all_prices:
starts_at = price["startsAt"] # Already datetime in local timezone
if starts_at:
price_date = starts_at.date()
# Only include today and future days
if price_date >= today:
prices_by_day.setdefault(price_date, []).append(price)
return prices_by_day
def _try_min_duration_fallback(
*,
config: TibberPricesPeriodConfig,
existing_periods: list[dict],
prices_by_day: dict[date, list[dict]],
time: TibberPricesTimeService,
) -> tuple[dict[str, Any] | None, dict[str, Any]]:
"""
Try reducing min_period_length to find periods when relaxation is exhausted.
This is a LAST RESORT mechanism. It only activates when:
1. All relaxation phases have been tried
2. Some days STILL have zero periods (not just below min_periods)
The fallback progressively reduces min_period_length:
- 60 min (default) 45 min 30 min (minimum)
It does NOT reduce below 30 min (2 intervals) because a single 15-min
interval is essentially just the daily min/max price - not a "period".
Args:
config: Period configuration
existing_periods: Periods found so far (from relaxation)
prices_by_day: Price intervals grouped by day
time: Time service instance
Returns:
Tuple of (result dict with periods, metadata dict) or (None, empty metadata)
"""
from .core import calculate_periods # noqa: PLC0415 - Avoid circular import
metadata: dict[str, Any] = {"phases_used": [], "fallback_active": False}
# Only try fallback if current min_period_length > minimum
if config.min_period_length <= MIN_DURATION_FALLBACK_MINIMUM:
return None, metadata
# Check which days have ZERO periods (not just below target)
existing_by_day = group_periods_by_day(existing_periods)
days_with_zero_periods = [day for day in prices_by_day if not existing_by_day.get(day)]
if not days_with_zero_periods:
_LOGGER_DETAILS.debug(
"%sMin duration fallback: All days have at least one period - no fallback needed",
INDENT_L1,
)
return None, metadata
_LOGGER.info(
"Min duration fallback: %d day(s) have zero periods, trying shorter min_period_length...",
len(days_with_zero_periods),
)
# Try progressively shorter min_period_length
current_min_duration = config.min_period_length
fallback_periods: list[dict] = []
while current_min_duration > MIN_DURATION_FALLBACK_MINIMUM:
current_min_duration = max(
current_min_duration - MIN_DURATION_FALLBACK_STEP,
MIN_DURATION_FALLBACK_MINIMUM,
)
_LOGGER_DETAILS.debug(
"%sTrying min_period_length=%d min for days with zero periods",
INDENT_L2,
current_min_duration,
)
# Create modified config with shorter min_period_length
# Use maxed-out flex (50%) since we're in fallback mode
fallback_config = TibberPricesPeriodConfig(
reverse_sort=config.reverse_sort,
flex=MAX_FLEX_HARD_LIMIT, # Max flex
min_distance_from_avg=0, # Disable min_distance in fallback
min_period_length=current_min_duration,
threshold_low=config.threshold_low,
threshold_high=config.threshold_high,
threshold_volatility_moderate=config.threshold_volatility_moderate,
threshold_volatility_high=config.threshold_volatility_high,
threshold_volatility_very_high=config.threshold_volatility_very_high,
level_filter=None, # Disable level filter
gap_count=config.gap_count,
)
# Try to find periods for days with zero periods
for day in days_with_zero_periods:
day_prices = prices_by_day.get(day, [])
if not day_prices:
continue
try:
day_result = calculate_periods(
day_prices,
config=fallback_config,
time=time,
)
day_periods = day_result.get("periods", [])
if day_periods:
# Mark periods with fallback metadata
for period in day_periods:
period["duration_fallback_active"] = True
period["duration_fallback_min_length"] = current_min_duration
period["relaxation_active"] = True
period["relaxation_level"] = f"duration_fallback={current_min_duration}min"
fallback_periods.extend(day_periods)
_LOGGER.info(
"Min duration fallback: Found %d period(s) for %s at min_length=%d min",
len(day_periods),
day,
current_min_duration,
)
except (KeyError, ValueError, TypeError) as err:
_LOGGER.warning(
"Error during min duration fallback for %s: %s",
day,
err,
)
continue
# If we found periods for all zero-period days, we can stop
if fallback_periods:
# Remove days that now have periods from the list
fallback_by_day = group_periods_by_day(fallback_periods)
days_with_zero_periods = [day for day in days_with_zero_periods if not fallback_by_day.get(day)]
if not days_with_zero_periods:
break
if fallback_periods:
# Merge with existing periods
# resolve_period_overlaps merges adjacent/overlapping periods
merged_periods, _new_count = resolve_period_overlaps(
existing_periods,
fallback_periods,
)
recalculate_period_metadata(merged_periods, time=time)
metadata["fallback_active"] = True
metadata["phases_used"] = [f"duration_fallback (min_length={current_min_duration}min)"]
_LOGGER.info(
"Min duration fallback complete: Added %d period(s), total now %d",
len(fallback_periods),
len(merged_periods),
)
return {"periods": merged_periods}, metadata
_LOGGER.warning(
"Min duration fallback: Still %d day(s) with zero periods after trying all durations",
len(days_with_zero_periods),
)
return None, metadata
def calculate_periods_with_relaxation( # noqa: PLR0912, PLR0913, PLR0915 - Per-day relaxation requires many parameters and branches
all_prices: list[dict],
*,
config: TibberPricesPeriodConfig,
enable_relaxation: bool,
min_periods: int,
max_relaxation_attempts: int,
should_show_callback: Callable[[str | None], bool],
time: TibberPricesTimeService,
config_entry: Any, # ConfigEntry type
) -> dict[str, Any]:
"""
Calculate periods with optional per-day filter relaxation.
NEW: Each day gets its own independent relaxation loop. Today can be in Phase 1
while tomorrow is in Phase 3, ensuring each day finds enough periods.
If min_periods is not reached with normal filters, this function gradually
relaxes filters in multiple phases FOR EACH DAY SEPARATELY:
Phase 1: Increase flex threshold step-by-step (up to max_relaxation_attempts)
Phase 2: Disable level filter (set to "any")
Args:
all_prices: All price data points
config: Base period configuration
enable_relaxation: Whether relaxation is enabled
min_periods: Minimum number of periods required PER DAY
max_relaxation_attempts: Maximum number of flex levels (attempts) to try per day
before giving up (each attempt runs the full filter matrix). With 3% increment
per step, 11 attempts allows escalation from 15% to 48% flex.
should_show_callback: Callback function(level_override) -> bool
Returns True if periods should be shown with given filter overrides. Pass None
to use original configured filter values.
time: TibberPricesTimeService instance (required).
config_entry: Config entry to get display unit configuration.
Returns:
Dict with same format as calculate_periods() output:
- periods: List of period summaries
- metadata: Config and statistics (includes relaxation info)
- reference_data: Daily min/max/avg prices
"""
# Import here to avoid circular dependency
from .core import ( # noqa: PLC0415
calculate_periods,
)
from .period_building import ( # noqa: PLC0415
filter_superseded_periods,
)
# Compact INFO-level summary
period_type = "PEAK PRICE" if config.reverse_sort else "BEST PRICE"
relaxation_status = "ON" if enable_relaxation else "OFF"
if enable_relaxation:
_LOGGER.info(
"Calculating %s periods: relaxation=%s, target=%d/day, flex=%.1f%%",
period_type,
relaxation_status,
min_periods,
abs(config.flex) * 100,
)
else:
_LOGGER.info(
"Calculating %s periods: relaxation=%s, flex=%.1f%%",
period_type,
relaxation_status,
abs(config.flex) * 100,
)
# Detailed DEBUG-level context header
period_type_full = "PEAK PRICE (most expensive)" if config.reverse_sort else "BEST PRICE (cheapest)"
_LOGGER_DETAILS.debug(
"%s========== %s PERIODS ==========",
INDENT_L0,
period_type_full,
)
_LOGGER_DETAILS.debug(
"%sRelaxation: %s",
INDENT_L0,
"ENABLED (user setting: ON)" if enable_relaxation else "DISABLED by user configuration",
)
_LOGGER_DETAILS.debug(
"%sBase config: flex=%.1f%%, min_length=%d min",
INDENT_L0,
abs(config.flex) * 100,
config.min_period_length,
)
if enable_relaxation:
_LOGGER_DETAILS.debug(
"%sRelaxation target: %d periods per day",
INDENT_L0,
min_periods,
)
_LOGGER_DETAILS.debug(
"%sRelaxation strategy: 3%% fixed flex increment per step (%d flex levels x 2 filter combinations)",
INDENT_L0,
max_relaxation_attempts,
)
_LOGGER_DETAILS.debug(
"%sEarly exit: After EACH filter combination when target reached",
INDENT_L0,
)
_LOGGER_DETAILS.debug(
"%s=============================================",
INDENT_L0,
)
# Validate we have price data
if not all_prices:
_LOGGER.warning(
"No price data available - cannot calculate periods",
)
return {
"periods": [],
"metadata": {
"relaxation": {
"relaxation_active": False,
"relaxation_attempted": False,
"min_periods_requested": min_periods if enable_relaxation else 0,
"periods_found": 0,
},
},
"reference_data": {},
}
# Count available days for logging (today and future only)
prices_by_day = group_prices_by_day(all_prices, time=time)
total_days = len(prices_by_day)
_LOGGER.info(
"Calculating baseline periods for %d days...",
total_days,
)
_LOGGER_DETAILS.debug(
"%sProcessing ALL %d price intervals together (yesterday+today+tomorrow, allows midnight crossing)",
INDENT_L1,
len(all_prices),
)
# === BASELINE CALCULATION (process ALL prices together, including yesterday) ===
# Periods that ended before yesterday will be filtered out later by filter_periods_by_end_date()
# This keeps yesterday/today/tomorrow periods in the cache
baseline_result = calculate_periods(all_prices, config=config, time=time)
all_periods = baseline_result["periods"]
# Count periods per day for min_periods check
periods_by_day = group_periods_by_day(all_periods)
days_meeting_requirement = 0
for day in sorted(prices_by_day.keys()):
day_periods = periods_by_day.get(day, [])
period_count = len(day_periods)
_LOGGER_DETAILS.debug(
"%sDay %s baseline: Found %d periods%s",
INDENT_L1,
day,
period_count,
f" (need {min_periods})" if enable_relaxation else "",
)
if period_count >= min_periods:
days_meeting_requirement += 1
# Check if relaxation is needed
relaxation_was_needed = False
all_phases_used: list[str] = []
if enable_relaxation and days_meeting_requirement < total_days:
# At least one day doesn't have enough periods
_LOGGER_DETAILS.debug(
"%sBaseline insufficient (%d/%d days met target) - starting relaxation",
INDENT_L1,
days_meeting_requirement,
total_days,
)
relaxation_was_needed = True
# Run relaxation on ALL prices together (including yesterday)
relaxed_result, relax_metadata = relax_all_prices(
all_prices=all_prices,
config=config,
min_periods=min_periods,
max_relaxation_attempts=max_relaxation_attempts,
should_show_callback=should_show_callback,
baseline_periods=all_periods,
time=time,
config_entry=config_entry,
)
all_periods = relaxed_result["periods"]
if relax_metadata.get("phases_used"):
all_phases_used = relax_metadata["phases_used"]
# Recount after relaxation
periods_by_day = group_periods_by_day(all_periods)
days_meeting_requirement = 0
for day in sorted(prices_by_day.keys()):
day_periods = periods_by_day.get(day, [])
period_count = len(day_periods)
if period_count >= min_periods:
days_meeting_requirement += 1
# === MIN DURATION FALLBACK ===
# If still no periods after relaxation, try reducing min_period_length
# This is a last resort to ensure users always get SOME period
if days_meeting_requirement < total_days and config.min_period_length > MIN_DURATION_FALLBACK_MINIMUM:
_LOGGER.info(
"Relaxation incomplete (%d/%d days). Trying min_duration fallback...",
days_meeting_requirement,
total_days,
)
fallback_result, fallback_metadata = _try_min_duration_fallback(
config=config,
existing_periods=all_periods,
prices_by_day=prices_by_day,
time=time,
)
if fallback_result:
all_periods = fallback_result["periods"]
all_phases_used.extend(fallback_metadata.get("phases_used", []))
# Recount after fallback
periods_by_day = group_periods_by_day(all_periods)
days_meeting_requirement = 0
for day in sorted(prices_by_day.keys()):
day_periods = periods_by_day.get(day, [])
period_count = len(day_periods)
if period_count >= min_periods:
days_meeting_requirement += 1
elif enable_relaxation:
_LOGGER_DETAILS.debug(
"%sAll %d days met target with baseline - no relaxation needed",
INDENT_L1,
total_days,
)
# Sort periods by start time
all_periods.sort(key=lambda p: p["start"])
# Recalculate metadata for combined periods
recalculate_period_metadata(all_periods, time=time)
# Apply cross-day supersession filter (only for best-price periods)
# This removes late-night today periods that are superseded by better tomorrow alternatives
all_periods = filter_superseded_periods(
all_periods,
time=time,
reverse_sort=config.reverse_sort,
)
# Build final result
final_result = baseline_result.copy()
final_result["periods"] = all_periods
total_periods = len(all_periods)
# Add relaxation info to metadata
if "metadata" not in final_result:
final_result["metadata"] = {}
final_result["metadata"]["relaxation"] = {
"relaxation_active": relaxation_was_needed,
"relaxation_attempted": relaxation_was_needed,
"min_periods_requested": min_periods,
"periods_found": total_periods,
"phases_used": list(set(all_phases_used)), # Unique phases used across all days
"days_processed": total_days,
"days_meeting_requirement": days_meeting_requirement,
"relaxation_incomplete": days_meeting_requirement < total_days,
}
return final_result
def relax_all_prices( # noqa: PLR0913 - Comprehensive filter relaxation requires many parameters and statements
all_prices: list[dict],
config: TibberPricesPeriodConfig,
min_periods: int,
max_relaxation_attempts: int,
should_show_callback: Callable[[str | None], bool],
baseline_periods: list[dict],
*,
time: TibberPricesTimeService,
config_entry: Any, # ConfigEntry type
) -> tuple[dict[str, Any], dict[str, Any]]:
"""
Relax filters for all prices until min_periods per day is reached.
Strategy: Try increasing flex by 3% increments, then relax level filter.
Processes all prices together (yesterday+today+tomorrow), allowing periods
to cross midnight boundaries. Returns when ALL days have min_periods
(or max attempts exhausted).
Args:
all_prices: All price intervals (yesterday+today+tomorrow).
config: Base period configuration.
min_periods: Target number of periods PER DAY.
max_relaxation_attempts: Maximum flex levels to try.
should_show_callback: Callback to check if a flex level should be shown.
baseline_periods: Baseline periods (before relaxation).
time: TibberPricesTimeService instance.
config_entry: Config entry to get display unit configuration.
Returns:
Tuple of (result_dict, metadata_dict)
"""
# Import here to avoid circular dependency
from .core import ( # noqa: PLC0415
calculate_periods,
)
flex_increment = 0.03 # 3% per step (hard-coded for reliability)
base_flex = abs(config.flex)
original_level_filter = config.level_filter
existing_periods = list(baseline_periods) # Start with baseline
phases_used = []
# Get available days from prices for checking
prices_by_day = group_prices_by_day(all_prices, time=time)
total_days = len(prices_by_day)
# Try flex levels (3% increments)
attempts = max(1, int(max_relaxation_attempts))
for attempt in range(1, attempts + 1):
current_flex = base_flex + (attempt * flex_increment)
# Stop if we exceed hard maximum
if current_flex > MAX_FLEX_HARD_LIMIT:
_LOGGER_DETAILS.debug(
"%s Reached 50%% flex hard limit",
INDENT_L2,
)
break
phase_label = f"flex={current_flex * 100:.1f}%"
# Skip this flex level if callback says not to show it
if not should_show_callback(phase_label):
continue
# Try current flex with level="any" (in relaxation mode)
if original_level_filter != "any":
_LOGGER_DETAILS.debug(
"%s Flex=%.1f%%: OVERRIDING level_filter: %s → ANY",
INDENT_L2,
current_flex * 100,
original_level_filter,
)
# NOTE: config.flex is already normalized to positive by get_period_config()
relaxed_config = config._replace(
flex=current_flex, # Already positive from normalization
level_filter="any",
)
phase_label_full = f"flex={current_flex * 100:.1f}% +level_any"
_LOGGER_DETAILS.debug(
"%s Trying %s: config has %d intervals (all days together), level_filter=%s",
INDENT_L2,
phase_label_full,
len(all_prices),
relaxed_config.level_filter,
)
# Process ALL prices together (allows midnight crossing)
result = calculate_periods(all_prices, config=relaxed_config, time=time)
new_periods = result["periods"]
_LOGGER_DETAILS.debug(
"%s %s: calculate_periods returned %d periods",
INDENT_L2,
phase_label_full,
len(new_periods),
)
# Mark newly found periods with relaxation metadata BEFORE merging
mark_periods_with_relaxation(
new_periods,
relaxation_level=phase_label_full,
original_threshold=base_flex,
applied_threshold=current_flex,
reverse_sort=config.reverse_sort,
)
# Resolve overlaps between existing and new periods
combined, standalone_count = resolve_period_overlaps(
existing_periods=existing_periods,
new_relaxed_periods=new_periods,
)
# Count periods per day with QUALITY GATE check
# Only periods with CV <= PERIOD_MAX_CV count towards min_periods requirement
days_meeting_requirement, quality_period_count = _count_quality_periods(
combined, all_prices, prices_by_day, min_periods, time=time
)
total_periods = len(combined)
_LOGGER_DETAILS.debug(
"%s %s: found %d periods total, %d/%d days meet requirement",
INDENT_L2,
phase_label_full,
total_periods,
days_meeting_requirement,
total_days,
)
existing_periods = combined
phases_used.append(phase_label_full)
# Check if ALL days reached target
if days_meeting_requirement >= total_days:
_LOGGER.info(
"Success with %s - all %d days have %d+ periods (%d total)",
phase_label_full,
total_days,
min_periods,
total_periods,
)
break
# Build final result
final_result = (
result.copy() if "result" in locals() else {"periods": baseline_periods, "metadata": {}, "reference_data": {}}
)
final_result["periods"] = existing_periods
return final_result, {
"phases_used": phases_used,
"periods_found": len(existing_periods),
}

View file

@ -1,106 +0,0 @@
"""Type definitions and constants for period calculation."""
from __future__ import annotations
from typing import TYPE_CHECKING, NamedTuple
if TYPE_CHECKING:
from datetime import datetime
from custom_components.tibber_prices.const import (
DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
DEFAULT_PRICE_RATING_THRESHOLD_LOW,
DEFAULT_VOLATILITY_THRESHOLD_HIGH,
DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
)
# Quality Gate: Maximum coefficient of variation (CV) allowed within a period
# Periods with internal CV above this are considered too heterogeneous for "best price"
# A 25% CV means the std dev is 25% of the mean - beyond this, prices vary too much
# Example: Period with prices 0.7-0.99 kr has ~15% CV which is acceptable
# Period with prices 0.5-1.0 kr has ~30% CV which would be rejected
PERIOD_MAX_CV = 25.0 # 25% max coefficient of variation within a period
# Cross-Day Extension: Time window constants
# When a period ends late in the day and tomorrow data is available,
# we can extend it past midnight if prices remain favorable
CROSS_DAY_LATE_PERIOD_START_HOUR = 20 # Consider periods starting at 20:00 or later for extension
CROSS_DAY_MAX_EXTENSION_HOUR = 8 # Don't extend beyond 08:00 next day (covers typical night low)
# Cross-Day Supersession: When tomorrow data arrives, late-night periods that are
# worse than early-morning tomorrow periods become obsolete
# A today period is "superseded" if tomorrow has a significantly better alternative
SUPERSESSION_PRICE_IMPROVEMENT_PCT = 10.0 # Tomorrow must be at least 10% cheaper to supersede
# Log indentation levels for visual hierarchy
INDENT_L0 = "" # Top level (calculate_periods_with_relaxation)
INDENT_L1 = " " # Per-day loop
INDENT_L2 = " " # Flex/filter loop (_relax_single_day)
INDENT_L3 = " " # _resolve_period_overlaps function
INDENT_L4 = " " # Period-by-period analysis
INDENT_L5 = " " # Segment details
class TibberPricesPeriodConfig(NamedTuple):
"""Configuration for period calculation."""
reverse_sort: bool
flex: float
min_distance_from_avg: float
min_period_length: int
threshold_low: float = DEFAULT_PRICE_RATING_THRESHOLD_LOW
threshold_high: float = DEFAULT_PRICE_RATING_THRESHOLD_HIGH
threshold_volatility_moderate: float = DEFAULT_VOLATILITY_THRESHOLD_MODERATE
threshold_volatility_high: float = DEFAULT_VOLATILITY_THRESHOLD_HIGH
threshold_volatility_very_high: float = DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH
level_filter: str | None = None # "any", "cheap", "expensive", etc. or None
gap_count: int = 0 # Number of allowed consecutive deviating intervals
class TibberPricesPeriodData(NamedTuple):
"""Data for building a period summary."""
start_time: datetime
end_time: datetime
period_length: int
period_idx: int
total_periods: int
class TibberPricesPeriodStatistics(NamedTuple):
"""Calculated statistics for a period."""
aggregated_level: str | None
aggregated_rating: str | None
rating_difference_pct: float | None
price_mean: float
price_median: float
price_min: float
price_max: float
price_spread: float
volatility: str
coefficient_of_variation: float | None # CV as percentage (e.g., 15.0 for 15%)
period_price_diff: float | None
period_price_diff_pct: float | None
class TibberPricesThresholdConfig(NamedTuple):
"""Threshold configuration for period calculations."""
threshold_low: float | None
threshold_high: float | None
threshold_volatility_moderate: float
threshold_volatility_high: float
threshold_volatility_very_high: float
reverse_sort: bool
class TibberPricesIntervalCriteria(NamedTuple):
"""Criteria for checking if an interval qualifies for a period."""
ref_price: float
avg_price: float
flex: float
min_distance_from_avg: float
reverse_sort: bool

View file

@ -1,822 +0,0 @@
"""
Period calculation logic for the coordinator.
This module handles all period calculation including level filtering,
gap tolerance, and coordination of the period_handlers calculation functions.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices import const as _const
if TYPE_CHECKING:
from collections.abc import Callable
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from homeassistant.config_entries import ConfigEntry
from .helpers import get_intervals_for_day_offsets
from .period_handlers import (
TibberPricesPeriodConfig,
calculate_periods_with_relaxation,
)
_LOGGER = logging.getLogger(__name__)
class TibberPricesPeriodCalculator:
"""Handles period calculations with level filtering and gap tolerance."""
def __init__(
self,
config_entry: ConfigEntry,
log_prefix: str,
get_config_override_fn: Callable[[str, str], Any | None] | None = None,
) -> None:
"""Initialize the period calculator."""
self.config_entry = config_entry
self._log_prefix = log_prefix
self.time: TibberPricesTimeService # Set by coordinator before first use
self._config_cache: dict[str, dict[str, Any]] | None = None
self._config_cache_valid = False
self._get_config_override = get_config_override_fn
# Period calculation cache
self._cached_periods: dict[str, Any] | None = None
self._last_periods_hash: str | None = None
def _get_option(
self,
config_key: str,
config_section: str,
default: Any,
) -> Any:
"""
Get a config option, checking overrides first.
Args:
config_key: The configuration key
config_section: The section in options (e.g., "flexibility_settings")
default: Default value if not set
Returns:
Override value if set, otherwise options value, otherwise default
"""
# Check overrides first
if self._get_config_override is not None:
override = self._get_config_override(config_key, config_section)
if override is not None:
return override
# Fall back to options
section = self.config_entry.options.get(config_section, {})
return section.get(config_key, default)
def _log(self, level: str, message: str, *args: object, **kwargs: object) -> None:
"""Log with calculator-specific prefix."""
prefixed_message = f"{self._log_prefix} {message}"
getattr(_LOGGER, level)(prefixed_message, *args, **kwargs)
def invalidate_config_cache(self) -> None:
"""Invalidate config cache when options change."""
self._config_cache_valid = False
self._config_cache = None
# Also invalidate period calculation cache when config changes
self._cached_periods = None
self._last_periods_hash = None
self._log("debug", "Period config cache and calculation cache invalidated")
def _compute_periods_hash(self, price_info: dict[str, Any]) -> str:
"""
Compute hash of price data and config for period calculation caching.
Only includes data that affects period calculation:
- All interval timestamps and enriched rating levels (yesterday/today/tomorrow)
- Period calculation config (flex, min_distance, min_period_length)
- Level filter overrides
Returns:
Hash string for cache key comparison.
"""
# Get today and tomorrow intervals for hash calculation
# CRITICAL: Only today+tomorrow needed in hash because:
# 1. Mitternacht: "today" startsAt changes → cache invalidates
# 2. Tomorrow arrival: "tomorrow" startsAt changes from None → cache invalidates
# 3. Yesterday/day-before-yesterday are static (rating_levels don't change retroactively)
# 4. Using first startsAt as representative (changes → entire day changed)
coordinator_data = {"priceInfo": price_info}
today_intervals = get_intervals_for_day_offsets(coordinator_data, [0])
tomorrow_intervals = get_intervals_for_day_offsets(coordinator_data, [1])
# Use first startsAt of each day as representative for entire day's data
# If day is empty, use None (detects data availability changes)
today_start = today_intervals[0].get("startsAt") if today_intervals else None
tomorrow_start = tomorrow_intervals[0].get("startsAt") if tomorrow_intervals else None
# Get period configs (both best and peak)
best_config = self.get_period_config(reverse_sort=False)
peak_config = self.get_period_config(reverse_sort=True)
# Get level filter overrides from options
options = self.config_entry.options
period_settings = options.get("period_settings", {})
best_level_filter = period_settings.get(_const.CONF_BEST_PRICE_MAX_LEVEL, _const.DEFAULT_BEST_PRICE_MAX_LEVEL)
peak_level_filter = period_settings.get(_const.CONF_PEAK_PRICE_MIN_LEVEL, _const.DEFAULT_PEAK_PRICE_MIN_LEVEL)
# Compute hash from all relevant data
hash_data = (
today_start, # Representative for today's data (changes at midnight)
tomorrow_start, # Representative for tomorrow's data (changes when data arrives)
tuple(best_config.items()),
tuple(peak_config.items()),
best_level_filter,
peak_level_filter,
)
return str(hash(hash_data))
def get_period_config(self, *, reverse_sort: bool) -> dict[str, Any]:
"""
Get period calculation configuration from config options.
Uses cached config to avoid multiple options.get() calls.
Cache is invalidated when config_entry.options change or override entities update.
"""
cache_key = "peak" if reverse_sort else "best"
# Return cached config if available
if self._config_cache_valid and self._config_cache is not None and cache_key in self._config_cache:
return self._config_cache[cache_key]
# Build config (cache miss)
if self._config_cache is None:
self._config_cache = {}
# Get config values, checking overrides first
# CRITICAL: Best/Peak price settings are stored in nested sections:
# - period_settings: min_period_length, max_level, gap_count
# - flexibility_settings: flex, min_distance_from_avg
# Override entities can override any of these values at runtime
if reverse_sort:
# Peak price configuration
flex = self._get_option(
_const.CONF_PEAK_PRICE_FLEX,
"flexibility_settings",
_const.DEFAULT_PEAK_PRICE_FLEX,
)
min_distance_from_avg = self._get_option(
_const.CONF_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
"flexibility_settings",
_const.DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG,
)
min_period_length = self._get_option(
_const.CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
"period_settings",
_const.DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
)
else:
# Best price configuration
flex = self._get_option(
_const.CONF_BEST_PRICE_FLEX,
"flexibility_settings",
_const.DEFAULT_BEST_PRICE_FLEX,
)
min_distance_from_avg = self._get_option(
_const.CONF_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
"flexibility_settings",
_const.DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG,
)
min_period_length = self._get_option(
_const.CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
"period_settings",
_const.DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
)
# Convert flex from percentage to decimal (e.g., 5 -> 0.05)
# CRITICAL: Normalize to absolute value for internal calculations
# User-facing values use sign convention:
# - Best price: positive (e.g., +15% above minimum)
# - Peak price: negative (e.g., -20% below maximum)
# Internal calculations always use positive values with reverse_sort flag
try:
flex = abs(float(flex)) / 100 # Always positive internally
except (TypeError, ValueError):
flex = (
abs(_const.DEFAULT_BEST_PRICE_FLEX) / 100
if not reverse_sort
else abs(_const.DEFAULT_PEAK_PRICE_FLEX) / 100
)
# CRITICAL: Normalize min_distance_from_avg to absolute value
# User-facing values use sign convention:
# - Best price: negative (e.g., -5% below average)
# - Peak price: positive (e.g., +5% above average)
# Internal calculations always use positive values with reverse_sort flag
min_distance_from_avg_normalized = abs(float(min_distance_from_avg))
config = {
"flex": flex,
"min_distance_from_avg": min_distance_from_avg_normalized,
"min_period_length": int(min_period_length),
}
# Cache the result
self._config_cache[cache_key] = config
self._config_cache_valid = True
return config
def should_show_periods(
self,
price_info: dict[str, Any],
*,
reverse_sort: bool,
level_override: str | None = None,
) -> bool:
"""
Check if periods should be shown based on level filter only.
Args:
price_info: Price information dict with today/yesterday/tomorrow data
reverse_sort: If False (best_price), checks max_level filter.
If True (peak_price), checks min_level filter.
level_override: Optional override for level filter ("any" to disable)
Returns:
True if periods should be displayed, False if they should be filtered out.
"""
# Only check level filter (day-level check: "does today have any qualifying intervals?")
return self.check_level_filter(
price_info,
reverse_sort=reverse_sort,
override=level_override,
)
def split_at_gap_clusters(
self,
today_intervals: list[dict[str, Any]],
level_order: int,
min_period_length: int,
*,
reverse_sort: bool,
) -> list[list[dict[str, Any]]]:
"""
Split intervals into sub-sequences at gap clusters.
A gap cluster is 2+ consecutive intervals that don't meet the level requirement.
This allows recovering usable periods from sequences that would otherwise be rejected.
Args:
today_intervals: List of price intervals for today
level_order: Required level order from _const.PRICE_LEVEL_MAPPING
min_period_length: Minimum number of intervals required for a valid sub-sequence
reverse_sort: True for peak price, False for best price
Returns:
List of sub-sequences, each at least min_period_length long.
"""
sub_sequences = []
current_sequence = []
consecutive_non_qualifying = 0
for interval in today_intervals:
interval_level = _const.PRICE_LEVEL_MAPPING.get(interval.get("level", "NORMAL"), 0)
meets_requirement = interval_level >= level_order if reverse_sort else interval_level <= level_order
if meets_requirement:
# Qualifying interval - add to current sequence
current_sequence.append(interval)
consecutive_non_qualifying = 0
elif consecutive_non_qualifying == 0:
# First non-qualifying interval (single gap) - add to current sequence
current_sequence.append(interval)
consecutive_non_qualifying = 1
else:
# Second+ consecutive non-qualifying interval = gap cluster starts
# Save current sequence if long enough (excluding the first gap we just added)
if len(current_sequence) - 1 >= min_period_length:
sub_sequences.append(current_sequence[:-1]) # Exclude the first gap
current_sequence = []
consecutive_non_qualifying = 0
# Don't forget last sequence
if len(current_sequence) >= min_period_length:
sub_sequences.append(current_sequence)
return sub_sequences
def check_short_period_strict(
self,
today_intervals: list[dict[str, Any]],
level_order: int,
*,
reverse_sort: bool,
) -> bool:
"""
Strict filtering for short periods (< 1.5h) without gap tolerance.
All intervals must meet the requirement perfectly, or at least one does
and all others are exact matches.
Args:
today_intervals: List of price intervals for today
level_order: Required level order from _const.PRICE_LEVEL_MAPPING
reverse_sort: True for peak price, False for best price
Returns:
True if all intervals meet requirement (with at least one qualifying), False otherwise.
"""
has_qualifying = False
for interval in today_intervals:
interval_level = _const.PRICE_LEVEL_MAPPING.get(interval.get("level", "NORMAL"), 0)
meets_requirement = interval_level >= level_order if reverse_sort else interval_level <= level_order
if meets_requirement:
has_qualifying = True
elif interval_level != level_order:
# Any deviation in short periods disqualifies the entire sequence
return False
return has_qualifying
def check_level_filter_with_gaps(
self,
today_intervals: list[dict[str, Any]],
level_order: int,
max_gap_count: int,
*,
reverse_sort: bool,
) -> bool:
"""
Check if intervals meet level requirements with gap tolerance and minimum distance.
A "gap" is an interval that deviates by exactly 1 level step.
For best price: CHEAP allows NORMAL as gap (but not EXPENSIVE).
For peak price: EXPENSIVE allows NORMAL as gap (but not CHEAP).
Gap tolerance is only applied to periods with at least _const.MIN_INTERVALS_FOR_GAP_TOLERANCE
intervals (1.5h). Shorter periods use strict filtering (zero tolerance).
Between gaps, there must be a minimum number of "good" intervals to prevent
periods that are mostly interrupted by gaps.
Args:
today_intervals: List of price intervals for today
level_order: Required level order from _const.PRICE_LEVEL_MAPPING
max_gap_count: Maximum total gaps allowed
reverse_sort: True for peak price, False for best price
Returns:
True if any qualifying sequence exists, False otherwise.
"""
if not today_intervals:
return False
interval_count = len(today_intervals)
# Periods shorter than _const.MIN_INTERVALS_FOR_GAP_TOLERANCE (1.5h) use strict filtering
if interval_count < _const.MIN_INTERVALS_FOR_GAP_TOLERANCE:
period_type = "peak" if reverse_sort else "best"
self._log(
"debug",
"Using strict filtering for short %s period (%d intervals < %d min required for gap tolerance)",
period_type,
interval_count,
_const.MIN_INTERVALS_FOR_GAP_TOLERANCE,
)
return self.check_short_period_strict(today_intervals, level_order, reverse_sort=reverse_sort)
# Try normal gap tolerance check first
if self.check_sequence_with_gap_tolerance(
today_intervals, level_order, max_gap_count, reverse_sort=reverse_sort
):
return True
# Normal check failed - try splitting at gap clusters as fallback
# Get minimum period length from config (convert minutes to intervals)
period_settings = self.config_entry.options.get("period_settings", {})
if reverse_sort:
min_period_minutes = period_settings.get(
_const.CONF_PEAK_PRICE_MIN_PERIOD_LENGTH,
_const.DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH,
)
else:
min_period_minutes = period_settings.get(
_const.CONF_BEST_PRICE_MIN_PERIOD_LENGTH,
_const.DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH,
)
min_period_intervals = self.time.minutes_to_intervals(min_period_minutes)
sub_sequences = self.split_at_gap_clusters(
today_intervals,
level_order,
min_period_intervals,
reverse_sort=reverse_sort,
)
# Check if ANY sub-sequence passes gap tolerance
for sub_seq in sub_sequences:
if self.check_sequence_with_gap_tolerance(sub_seq, level_order, max_gap_count, reverse_sort=reverse_sort):
return True
return False
def check_sequence_with_gap_tolerance(
self,
intervals: list[dict[str, Any]],
level_order: int,
max_gap_count: int,
*,
reverse_sort: bool,
) -> bool:
"""
Check if a single interval sequence passes gap tolerance requirements.
This is the core gap tolerance logic extracted for reuse with sub-sequences.
Args:
intervals: List of price intervals to check
level_order: Required level order from _const.PRICE_LEVEL_MAPPING
max_gap_count: Maximum total gaps allowed
reverse_sort: True for peak price, False for best price
Returns:
True if sequence meets all gap tolerance requirements, False otherwise.
"""
if not intervals:
return False
interval_count = len(intervals)
# Calculate minimum distance between gaps dynamically.
# Shorter periods require relatively larger distances.
# Longer periods allow gaps closer together.
# Distance is never less than 2 intervals between gaps.
min_distance_between_gaps = max(2, (interval_count // max_gap_count) // 2)
# Limit total gaps to max 25% of period length to prevent too many outliers.
# This ensures periods remain predominantly "good" even when long.
effective_max_gaps = min(max_gap_count, interval_count // 4)
gap_count = 0
consecutive_good_count = 0
has_qualifying_interval = False
for interval in intervals:
interval_level = _const.PRICE_LEVEL_MAPPING.get(interval.get("level", "NORMAL"), 0)
# Check if interval meets the strict requirement
meets_requirement = interval_level >= level_order if reverse_sort else interval_level <= level_order
if meets_requirement:
has_qualifying_interval = True
consecutive_good_count += 1
continue
# Check if this is a tolerable gap (exactly 1 step deviation)
is_tolerable_gap = interval_level == level_order - 1 if reverse_sort else interval_level == level_order + 1
if is_tolerable_gap:
# If we already had gaps, check minimum distance
if gap_count > 0 and consecutive_good_count < min_distance_between_gaps:
# Not enough "good" intervals between gaps
return False
gap_count += 1
if gap_count > effective_max_gaps:
return False
# Reset counter for next gap
consecutive_good_count = 0
else:
# Too far from required level (more than 1 step deviation)
return False
return has_qualifying_interval
def check_level_filter(
self,
price_info: dict[str, Any],
*,
reverse_sort: bool,
override: str | None = None,
) -> bool:
"""
Check if today has any intervals that meet the level requirement with gap tolerance.
Gap tolerance allows a configurable number of intervals within a qualifying sequence
to deviate by one level step (e.g., CHEAP allows NORMAL, but not EXPENSIVE).
Args:
price_info: Price information dict with today data
reverse_sort: If False (best_price), checks max_level (upper bound filter).
If True (peak_price), checks min_level (lower bound filter).
override: Optional override value (e.g., "any" to disable filter)
Returns:
True if ANY sequence of intervals meets the level requirement
(considering gap tolerance), False otherwise.
"""
# Use override if provided
if override is not None:
level_config = override
# Get appropriate config based on sensor type
elif reverse_sort:
# Peak price: minimum level filter (lower bound)
period_settings = self.config_entry.options.get("period_settings", {})
level_config = period_settings.get(
_const.CONF_PEAK_PRICE_MIN_LEVEL,
_const.DEFAULT_PEAK_PRICE_MIN_LEVEL,
)
else:
# Best price: maximum level filter (upper bound)
period_settings = self.config_entry.options.get("period_settings", {})
level_config = period_settings.get(
_const.CONF_BEST_PRICE_MAX_LEVEL,
_const.DEFAULT_BEST_PRICE_MAX_LEVEL,
)
# "any" means no level filtering
if level_config == "any":
return True
# Get today's intervals from flat list
# Build minimal coordinator_data structure for get_intervals_for_day_offsets
coordinator_data = {"priceInfo": price_info}
today_intervals = get_intervals_for_day_offsets(coordinator_data, [0])
if not today_intervals:
return True # If no data, don't filter
# Get gap tolerance configuration
period_settings = self.config_entry.options.get("period_settings", {})
if reverse_sort:
max_gap_count = period_settings.get(
_const.CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
_const.DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
)
else:
max_gap_count = period_settings.get(
_const.CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
_const.DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
)
# Note: level_config is lowercase from selector, but _const.PRICE_LEVEL_MAPPING uses uppercase
level_order = _const.PRICE_LEVEL_MAPPING.get(level_config.upper(), 0)
# If gap tolerance is 0, use simple ANY check (backwards compatible)
if max_gap_count == 0:
if reverse_sort:
# Peak price: level >= min_level (show if ANY interval is expensive enough)
return any(
_const.PRICE_LEVEL_MAPPING.get(interval.get("level", "NORMAL"), 0) >= level_order
for interval in today_intervals
)
# Best price: level <= max_level (show if ANY interval is cheap enough)
return any(
_const.PRICE_LEVEL_MAPPING.get(interval.get("level", "NORMAL"), 0) <= level_order
for interval in today_intervals
)
# Use gap-tolerant check
return self.check_level_filter_with_gaps(
today_intervals,
level_order,
max_gap_count,
reverse_sort=reverse_sort,
)
def calculate_periods_for_price_info(
self,
price_info: dict[str, Any],
) -> dict[str, Any]:
"""
Calculate periods (best price and peak price) for the given price info.
Applies volatility and level filtering based on user configuration.
If filters don't match, returns empty period lists.
Uses hash-based caching to avoid recalculating periods when price data
and configuration haven't changed (~70% performance improvement).
"""
# Check if we can use cached periods
current_hash = self._compute_periods_hash(price_info)
if self._cached_periods is not None and self._last_periods_hash == current_hash:
self._log("debug", "Using cached period calculation results (hash match)")
return self._cached_periods
self._log("debug", "Calculating periods (cache miss or hash mismatch)")
# Get all intervals at once (day before yesterday + yesterday + today + tomorrow)
# CRITICAL: 4 days ensure stable historical period calculations
# (periods calculated today for yesterday match periods calculated yesterday)
coordinator_data = {"priceInfo": price_info}
all_prices = get_intervals_for_day_offsets(coordinator_data, [-2, -1, 0, 1])
# Get rating thresholds from config (flat in options, not in sections)
# CRITICAL: Price rating thresholds are stored FLAT in options (no sections)
threshold_low = self.config_entry.options.get(
_const.CONF_PRICE_RATING_THRESHOLD_LOW,
_const.DEFAULT_PRICE_RATING_THRESHOLD_LOW,
)
threshold_high = self.config_entry.options.get(
_const.CONF_PRICE_RATING_THRESHOLD_HIGH,
_const.DEFAULT_PRICE_RATING_THRESHOLD_HIGH,
)
# Get volatility thresholds from config (flat in options, not in sections)
# CRITICAL: Volatility thresholds are stored FLAT in options (no sections)
threshold_volatility_moderate = self.config_entry.options.get(
_const.CONF_VOLATILITY_THRESHOLD_MODERATE,
_const.DEFAULT_VOLATILITY_THRESHOLD_MODERATE,
)
threshold_volatility_high = self.config_entry.options.get(
_const.CONF_VOLATILITY_THRESHOLD_HIGH,
_const.DEFAULT_VOLATILITY_THRESHOLD_HIGH,
)
threshold_volatility_very_high = self.config_entry.options.get(
_const.CONF_VOLATILITY_THRESHOLD_VERY_HIGH,
_const.DEFAULT_VOLATILITY_THRESHOLD_VERY_HIGH,
)
# Get relaxation configuration for best price
# CRITICAL: Relaxation settings are stored in nested section 'relaxation_and_target_periods'
# Override entities can override any of these values at runtime
enable_relaxation_best = self._get_option(
_const.CONF_ENABLE_MIN_PERIODS_BEST,
"relaxation_and_target_periods",
_const.DEFAULT_ENABLE_MIN_PERIODS_BEST,
)
# Check if best price periods should be shown
# If relaxation is enabled, always calculate (relaxation will try "any" filter)
# If relaxation is disabled, apply level filter check
if enable_relaxation_best:
show_best_price = bool(all_prices)
else:
show_best_price = self.should_show_periods(price_info, reverse_sort=False) if all_prices else False
min_periods_best = self._get_option(
_const.CONF_MIN_PERIODS_BEST,
"relaxation_and_target_periods",
_const.DEFAULT_MIN_PERIODS_BEST,
)
relaxation_attempts_best = self._get_option(
_const.CONF_RELAXATION_ATTEMPTS_BEST,
"relaxation_and_target_periods",
_const.DEFAULT_RELAXATION_ATTEMPTS_BEST,
)
# Calculate best price periods (or return empty if filtered)
if show_best_price:
best_config = self.get_period_config(reverse_sort=False)
# Get level filter configuration from period_settings section
# CRITICAL: max_level and gap_count are stored in nested section 'period_settings'
max_level_best = self._get_option(
_const.CONF_BEST_PRICE_MAX_LEVEL,
"period_settings",
_const.DEFAULT_BEST_PRICE_MAX_LEVEL,
)
gap_count_best = self._get_option(
_const.CONF_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
"period_settings",
_const.DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT,
)
best_period_config = TibberPricesPeriodConfig(
reverse_sort=False,
flex=best_config["flex"],
min_distance_from_avg=best_config["min_distance_from_avg"],
min_period_length=best_config["min_period_length"],
threshold_low=threshold_low,
threshold_high=threshold_high,
threshold_volatility_moderate=threshold_volatility_moderate,
threshold_volatility_high=threshold_volatility_high,
threshold_volatility_very_high=threshold_volatility_very_high,
level_filter=max_level_best,
gap_count=gap_count_best,
)
best_periods = calculate_periods_with_relaxation(
all_prices,
config=best_period_config,
enable_relaxation=enable_relaxation_best,
min_periods=min_periods_best,
max_relaxation_attempts=relaxation_attempts_best,
should_show_callback=lambda lvl: self.should_show_periods(
price_info,
reverse_sort=False,
level_override=lvl,
),
time=self.time,
config_entry=self.config_entry,
)
else:
best_periods = {
"periods": [],
"intervals": [],
"metadata": {
"total_intervals": 0,
"total_periods": 0,
"config": {},
"relaxation": {"relaxation_active": False, "relaxation_attempted": False},
},
}
# Get relaxation configuration for peak price
# CRITICAL: Relaxation settings are stored in nested section 'relaxation_and_target_periods'
# Override entities can override any of these values at runtime
enable_relaxation_peak = self._get_option(
_const.CONF_ENABLE_MIN_PERIODS_PEAK,
"relaxation_and_target_periods",
_const.DEFAULT_ENABLE_MIN_PERIODS_PEAK,
)
# Check if peak price periods should be shown
# If relaxation is enabled, always calculate (relaxation will try "any" filter)
# If relaxation is disabled, apply level filter check
if enable_relaxation_peak:
show_peak_price = bool(all_prices)
else:
show_peak_price = self.should_show_periods(price_info, reverse_sort=True) if all_prices else False
min_periods_peak = self._get_option(
_const.CONF_MIN_PERIODS_PEAK,
"relaxation_and_target_periods",
_const.DEFAULT_MIN_PERIODS_PEAK,
)
relaxation_attempts_peak = self._get_option(
_const.CONF_RELAXATION_ATTEMPTS_PEAK,
"relaxation_and_target_periods",
_const.DEFAULT_RELAXATION_ATTEMPTS_PEAK,
)
# Calculate peak price periods (or return empty if filtered)
if show_peak_price:
peak_config = self.get_period_config(reverse_sort=True)
# Get level filter configuration from period_settings section
# CRITICAL: min_level and gap_count are stored in nested section 'period_settings'
min_level_peak = self._get_option(
_const.CONF_PEAK_PRICE_MIN_LEVEL,
"period_settings",
_const.DEFAULT_PEAK_PRICE_MIN_LEVEL,
)
gap_count_peak = self._get_option(
_const.CONF_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
"period_settings",
_const.DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT,
)
peak_period_config = TibberPricesPeriodConfig(
reverse_sort=True,
flex=peak_config["flex"],
min_distance_from_avg=peak_config["min_distance_from_avg"],
min_period_length=peak_config["min_period_length"],
threshold_low=threshold_low,
threshold_high=threshold_high,
threshold_volatility_moderate=threshold_volatility_moderate,
threshold_volatility_high=threshold_volatility_high,
threshold_volatility_very_high=threshold_volatility_very_high,
level_filter=min_level_peak,
gap_count=gap_count_peak,
)
peak_periods = calculate_periods_with_relaxation(
all_prices,
config=peak_period_config,
enable_relaxation=enable_relaxation_peak,
min_periods=min_periods_peak,
max_relaxation_attempts=relaxation_attempts_peak,
should_show_callback=lambda lvl: self.should_show_periods(
price_info,
reverse_sort=True,
level_override=lvl,
),
time=self.time,
config_entry=self.config_entry,
)
else:
peak_periods = {
"periods": [],
"intervals": [],
"metadata": {
"total_intervals": 0,
"total_periods": 0,
"config": {},
"relaxation": {"relaxation_active": False, "relaxation_attempted": False},
},
}
result = {
"best_price": best_periods,
"peak_price": peak_periods,
}
# Cache the result
self._cached_periods = result
self._last_periods_hash = current_hash
return result

View file

@ -1,631 +0,0 @@
"""
Price data management for the coordinator.
This module manages all price-related data for the Tibber Prices integration:
**User Data** (fetched directly via API):
- Home metadata (name, address, timezone)
- Account info (subscription status)
- Currency settings
- Refreshed daily (24h interval)
**Price Data** (fetched via IntervalPool):
- Quarter-hourly price intervals
- Yesterday/today/tomorrow coverage
- The IntervalPool handles actual API fetching, deduplication, and caching
- This manager coordinates the data flow and user data refresh
Data flow:
Tibber API IntervalPool PriceDataManager Coordinator Sensors
(actual fetching) (orchestration + user data)
Note: Price data is NOT cached in this module - IntervalPool is the single
source of truth. This module only caches user_data for daily refresh cycle.
"""
from __future__ import annotations
import logging
from datetime import timedelta
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.api import (
TibberPricesApiClientAuthenticationError,
TibberPricesApiClientCommunicationError,
TibberPricesApiClientError,
)
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import UpdateFailed
from . import cache, helpers
if TYPE_CHECKING:
from collections.abc import Callable
from datetime import datetime
from custom_components.tibber_prices.api import TibberPricesApiClient
from custom_components.tibber_prices.interval_pool import TibberPricesIntervalPool
from .time_service import TibberPricesTimeService
_LOGGER = logging.getLogger(__name__)
# Hour when Tibber publishes tomorrow's prices (around 13:00 local time)
# Before this hour, requesting tomorrow data will always fail → wasted API call
TOMORROW_DATA_AVAILABLE_HOUR = 13
class TibberPricesPriceDataManager:
"""
Manages price and user data for the coordinator.
Responsibilities:
- User data: Fetches directly via API, validates, caches with persistence
- Price data: Coordinates with IntervalPool (which does actual API fetching)
- Cache management: Loads/stores both data types to HA persistent storage
- Update decisions: Determines when fresh data is needed
Note: Despite the name, this class does NOT do the actual price fetching.
The IntervalPool handles API calls, deduplication, and interval management.
This class orchestrates WHEN to fetch and processes the results.
"""
def __init__( # noqa: PLR0913
self,
api: TibberPricesApiClient,
store: Any,
log_prefix: str,
user_update_interval: timedelta,
time: TibberPricesTimeService,
home_id: str,
interval_pool: TibberPricesIntervalPool,
) -> None:
"""
Initialize the price data manager.
Args:
api: API client for direct requests (user data only).
store: Home Assistant storage for persistence.
log_prefix: Prefix for log messages (e.g., "[Home Name]").
user_update_interval: How often to refresh user data (default: 1 day).
time: TimeService for time operations.
home_id: Home ID this manager is responsible for.
interval_pool: IntervalPool for price data (handles actual fetching).
"""
self.api = api
self._store = store
self._log_prefix = log_prefix
self._user_update_interval = user_update_interval
self.time: TibberPricesTimeService = time
self.home_id = home_id
self._interval_pool = interval_pool
# Cached data (user data only - price data is in IntervalPool)
self._cached_user_data: dict[str, Any] | None = None
self._last_user_update: datetime | None = None
def _log(self, level: str, message: str, *args: object, **kwargs: object) -> None:
"""Log with coordinator-specific prefix."""
prefixed_message = f"{self._log_prefix} {message}"
getattr(_LOGGER, level)(prefixed_message, *args, **kwargs)
async def load_cache(self) -> None:
"""Load cached user data from storage (price data is in IntervalPool)."""
cache_data = await cache.load_cache(self._store, self._log_prefix, time=self.time)
self._cached_user_data = cache_data.user_data
self._last_user_update = cache_data.last_user_update
def should_fetch_tomorrow_data(
self,
current_price_info: list[dict[str, Any]] | None,
) -> bool:
"""
Determine if tomorrow's data should be requested from the API.
This is the key intelligence that prevents API spam:
- Tibber publishes tomorrow's prices around 13:00 each day
- Before 13:00, requesting tomorrow data will always fail wasted API call
- If we already have tomorrow data, no need to request it again
The decision logic:
1. Before 13:00 local time Don't fetch (data not available yet)
2. After 13:00 AND tomorrow data already present Don't fetch (already have it)
3. After 13:00 AND tomorrow data missing Fetch (data should be available)
Args:
current_price_info: List of price intervals from current coordinator data.
Used to check if tomorrow data already exists.
Returns:
True if tomorrow data should be requested, False otherwise.
"""
now = self.time.now()
now_local = self.time.as_local(now)
current_hour = now_local.hour
# Before TOMORROW_DATA_AVAILABLE_HOUR - tomorrow data not available yet
if current_hour < TOMORROW_DATA_AVAILABLE_HOUR:
self._log("debug", "Before %d:00 - not requesting tomorrow data", TOMORROW_DATA_AVAILABLE_HOUR)
return False
# After TOMORROW_DATA_AVAILABLE_HOUR - check if we already have tomorrow data
if current_price_info:
has_tomorrow = self.has_tomorrow_data(current_price_info)
if has_tomorrow:
self._log(
"debug", "After %d:00 but already have tomorrow data - not requesting", TOMORROW_DATA_AVAILABLE_HOUR
)
return False
self._log("debug", "After %d:00 and tomorrow data missing - will request", TOMORROW_DATA_AVAILABLE_HOUR)
return True
# No current data - request tomorrow data if after TOMORROW_DATA_AVAILABLE_HOUR
self._log(
"debug", "After %d:00 with no current data - will request tomorrow data", TOMORROW_DATA_AVAILABLE_HOUR
)
return True
async def store_cache(self, last_midnight_check: datetime | None = None) -> None:
"""Store cache data (user metadata only, price data is in IntervalPool)."""
cache_data = cache.TibberPricesCacheData(
user_data=self._cached_user_data,
last_user_update=self._last_user_update,
last_midnight_check=last_midnight_check,
)
await cache.save_cache(self._store, cache_data, self._log_prefix)
def _validate_user_data(self, user_data: dict, home_id: str) -> bool: # noqa: PLR0911
"""
Validate user data completeness.
Rejects incomplete/invalid data from API to prevent caching temporary errors.
Currency information is critical - if missing, we cannot safely calculate prices.
Args:
user_data: User data dict from API.
home_id: Home ID to validate against.
Returns:
True if data is valid and complete, False otherwise.
"""
if not user_data:
self._log("warning", "User data validation failed: Empty data")
return False
viewer = user_data.get("viewer")
if not viewer or not isinstance(viewer, dict):
self._log("warning", "User data validation failed: Missing or invalid viewer")
return False
homes = viewer.get("homes")
if not homes or not isinstance(homes, list) or len(homes) == 0:
self._log("warning", "User data validation failed: No homes found")
return False
# Find our home and validate it has required data
home_found = False
for home in homes:
if home.get("id") == home_id:
home_found = True
# Validate home has timezone (required for cursor calculation)
if not home.get("timeZone"):
self._log("warning", "User data validation failed: Home %s missing timezone", home_id)
return False
# Currency is REQUIRED - we cannot function without it
# The currency is nested in currentSubscription.priceInfo.current.currency
subscription = home.get("currentSubscription")
if not subscription:
self._log(
"warning",
"User data validation failed: Home %s has no active subscription",
home_id,
)
return False
price_info = subscription.get("priceInfo")
if not price_info:
self._log(
"warning",
"User data validation failed: Home %s subscription has no priceInfo",
home_id,
)
return False
current = price_info.get("current")
if not current:
self._log(
"warning",
"User data validation failed: Home %s priceInfo has no current data",
home_id,
)
return False
currency = current.get("currency")
if not currency:
self._log(
"warning",
"User data validation failed: Home %s has no currency",
home_id,
)
return False
break
if not home_found:
self._log("warning", "User data validation failed: Home %s not found in homes list", home_id)
return False
self._log("debug", "User data validation passed for home %s", home_id)
return True
async def update_user_data_if_needed(self, current_time: datetime) -> bool:
"""
Update user data if needed (daily check).
Only accepts complete and valid data. If API returns incomplete data
(e.g., during maintenance), keeps existing cached data and retries later.
Returns:
True if user data was updated, False otherwise
"""
if self._last_user_update is None or current_time - self._last_user_update >= self._user_update_interval:
try:
self._log("debug", "Updating user data")
user_data = await self.api.async_get_viewer_details()
# Validate before caching
if not self._validate_user_data(user_data, self.home_id):
self._log(
"warning",
"Rejecting incomplete user data from API - keeping existing cached data",
)
return False # Keep existing data, don't update timestamp
# Data is valid, cache it
self._cached_user_data = user_data
self._last_user_update = current_time
self._log("debug", "User data updated successfully")
except (
TibberPricesApiClientError,
TibberPricesApiClientCommunicationError,
) as ex:
self._log("warning", "Failed to update user data: %s", ex)
return False # Update failed
else:
return True # User data was updated
return False # No update needed
async def fetch_home_data(
self,
home_id: str,
current_time: datetime,
*,
include_tomorrow: bool = True,
) -> tuple[dict[str, Any], bool]:
"""
Fetch data for a single home via pool.
Args:
home_id: Home ID to fetch data for.
current_time: Current time for timestamp in result.
include_tomorrow: If True, request tomorrow's data too. If False,
only request up to end of today.
Returns:
Tuple of (data_dict, api_called):
- data_dict: Dictionary with timestamp, home_id, price_info, currency.
- api_called: True if API was called to fetch missing data.
"""
if not home_id:
self._log("warning", "No home ID provided - cannot fetch price data")
return (
{
"timestamp": current_time,
"home_id": "",
"price_info": [],
"currency": "EUR",
},
False, # No API call made
)
# Ensure we have user_data before fetching price data
# This is critical for timezone-aware cursor calculation
if not self._cached_user_data:
self._log("info", "User data not cached, fetching before price data")
try:
user_data = await self.api.async_get_viewer_details()
# Validate data before accepting it (especially on initial setup)
if not self._validate_user_data(user_data, self.home_id):
msg = "Received incomplete user data from API - cannot proceed with price fetching"
self._log("error", msg)
raise TibberPricesApiClientError(msg) # noqa: TRY301
self._cached_user_data = user_data
self._last_user_update = current_time
except (
TibberPricesApiClientError,
TibberPricesApiClientCommunicationError,
) as ex:
msg = f"Failed to fetch user data (required for price fetching): {ex}"
self._log("error", msg)
raise TibberPricesApiClientError(msg) from ex
# At this point, _cached_user_data is guaranteed to be not None (checked above)
if not self._cached_user_data:
msg = "User data unexpectedly None after fetch attempt"
raise TibberPricesApiClientError(msg)
# Retrieve price data via IntervalPool (single source of truth)
price_info, api_called = await self._fetch_via_pool(home_id, include_tomorrow=include_tomorrow)
# Extract currency for this home from user_data
currency = self._get_currency_for_home(home_id)
self._log(
"debug",
"Successfully fetched data for home %s (%d intervals, api_called=%s)",
home_id,
len(price_info),
api_called,
)
return (
{
"timestamp": current_time,
"home_id": home_id,
"price_info": price_info,
"currency": currency,
},
api_called,
)
async def _fetch_via_pool(
self,
home_id: str,
*,
include_tomorrow: bool = True,
) -> tuple[list[dict[str, Any]], bool]:
"""
Retrieve price data via IntervalPool.
The IntervalPool is the single source of truth for price data:
- Handles actual API calls to Tibber
- Manages deduplication and caching
- Provides intervals from day-before-yesterday to end-of-today/tomorrow
This method delegates to the Pool's get_sensor_data() which returns
all relevant intervals for sensor display.
Args:
home_id: Home ID (currently unused, Pool knows its home).
include_tomorrow: If True, request tomorrow's data too. If False,
only request up to end of today. This prevents
API spam before 13:00 when Tibber doesn't have
tomorrow data yet.
Returns:
Tuple of (intervals, api_called):
- intervals: List of price interval dicts.
- api_called: True if API was called to fetch missing data.
"""
# user_data is guaranteed by fetch_home_data(), but needed for type narrowing
if self._cached_user_data is None:
return [], False # No data, no API call
self._log(
"debug",
"Retrieving price data for home %s via interval pool (include_tomorrow=%s)",
home_id,
include_tomorrow,
)
intervals, api_called = await self._interval_pool.get_sensor_data(
api_client=self.api,
user_data=self._cached_user_data,
include_tomorrow=include_tomorrow,
)
return intervals, api_called
def _get_currency_for_home(self, home_id: str) -> str:
"""
Get currency for a specific home from cached user_data.
Note: The cached user_data is validated before storage, so if we have
cached data it should contain valid currency. This method extracts
the currency from the nested structure.
Returns:
Currency code (e.g., "EUR", "NOK", "SEK").
Raises:
TibberPricesApiClientError: If currency cannot be determined.
"""
if not self._cached_user_data:
msg = "No user data cached - cannot determine currency"
self._log("error", msg)
raise TibberPricesApiClientError(msg)
viewer = self._cached_user_data.get("viewer", {})
homes = viewer.get("homes", [])
for home in homes:
if home.get("id") == home_id:
# Extract currency from nested structure
# Use 'or {}' to handle None values (homes without active subscription)
subscription = home.get("currentSubscription") or {}
price_info = subscription.get("priceInfo") or {}
current = price_info.get("current") or {}
currency = current.get("currency")
if not currency:
# This should not happen if validation worked correctly
msg = f"Home {home_id} has no active subscription - currency unavailable"
self._log("error", msg)
raise TibberPricesApiClientError(msg)
self._log("debug", "Extracted currency %s for home %s", currency, home_id)
return currency
# Home not found in cached data - data validation should have caught this
msg = f"Home {home_id} not found in user data - data validation failed"
self._log("error", msg)
raise TibberPricesApiClientError(msg)
def _check_home_exists(self, home_id: str) -> bool:
"""
Check if a home ID exists in cached user data.
Args:
home_id: The home ID to check.
Returns:
True if home exists, False otherwise.
"""
if not self._cached_user_data:
# No user data yet - assume home exists (will be checked on next update)
return True
viewer = self._cached_user_data.get("viewer", {})
homes = viewer.get("homes", [])
return any(home.get("id") == home_id for home in homes)
async def handle_main_entry_update(
self,
current_time: datetime,
home_id: str,
transform_fn: Callable[[dict[str, Any]], dict[str, Any]],
*,
current_price_info: list[dict[str, Any]] | None = None,
) -> tuple[dict[str, Any], bool]:
"""
Handle update for main entry - fetch data for this home.
The IntervalPool is the single source of truth for price data:
- It handles API fetching, deduplication, and caching internally
- We decide WHEN to fetch tomorrow data (after 13:00, if not already present)
- This prevents API spam before 13:00 when Tibber doesn't have tomorrow data
This method:
1. Updates user data if needed (daily)
2. Determines if tomorrow data should be requested
3. Fetches price data via IntervalPool
4. Transforms result for coordinator
Args:
current_time: Current time for update decisions.
home_id: Home ID to fetch data for.
transform_fn: Function to transform raw data for coordinator.
current_price_info: Current price intervals (from coordinator.data["priceInfo"]).
Used to check if tomorrow data already exists.
Returns:
Tuple of (transformed_data, api_called):
- transformed_data: Transformed data dict for coordinator.
- api_called: True if API was called to fetch missing data.
"""
# Update user data if needed (daily check)
user_data_updated = await self.update_user_data_if_needed(current_time)
# Check if this home still exists in user data after update
# This detects when a home was removed from the Tibber account
home_exists = self._check_home_exists(home_id)
if not home_exists:
self._log("warning", "Home ID %s not found in Tibber account", home_id)
# Return a special marker in the result that coordinator can check
result = transform_fn({})
result["_home_not_found"] = True # Special marker for coordinator
return result, False # No API call made (home doesn't exist)
# Determine if we should request tomorrow data
include_tomorrow = self.should_fetch_tomorrow_data(current_price_info)
# Fetch price data via IntervalPool
self._log(
"debug",
"Fetching price data for home %s via interval pool (include_tomorrow=%s)",
home_id,
include_tomorrow,
)
raw_data, api_called = await self.fetch_home_data(home_id, current_time, include_tomorrow=include_tomorrow)
# Parse timestamps immediately after fetch
raw_data = helpers.parse_all_timestamps(raw_data, time=self.time)
# Store user data cache (price data persisted by IntervalPool)
if user_data_updated:
await self.store_cache()
# Transform for main entry
return transform_fn(raw_data), api_called
async def handle_api_error(
self,
error: Exception,
) -> None:
"""
Handle API errors - re-raise appropriate exceptions.
Note: With IntervalPool as source of truth, there's no local price cache
to fall back to. The Pool has its own persistence, so on next update
it will use its cached intervals if API is unavailable.
"""
if isinstance(error, TibberPricesApiClientAuthenticationError):
msg = "Invalid access token"
raise ConfigEntryAuthFailed(msg) from error
msg = f"Error communicating with API: {error}"
raise UpdateFailed(msg) from error
@property
def cached_user_data(self) -> dict[str, Any] | None:
"""Get cached user data."""
return self._cached_user_data
def has_tomorrow_data(self, price_info: list[dict[str, Any]]) -> bool:
"""
Check if tomorrow's price data is available.
Args:
price_info: List of price intervals from coordinator data.
Returns:
True if at least one interval from tomorrow is present.
"""
if not price_info:
return False
# Get tomorrow's date
now = self.time.now()
tomorrow = (self.time.as_local(now) + timedelta(days=1)).date()
# Check if any interval is from tomorrow
for interval in price_info:
if "startsAt" not in interval:
continue
# startsAt is already a datetime object after _transform_data()
interval_time = interval["startsAt"]
if isinstance(interval_time, str):
# Fallback: parse if still string (shouldn't happen with transformed data)
interval_time = self.time.parse_datetime(interval_time)
if interval_time and self.time.as_local(interval_time).date() == tomorrow:
return True
return False

View file

@ -1,228 +0,0 @@
"""
Repair issue management for Tibber Prices integration.
This module handles creation and cleanup of repair issues that notify users
about problems requiring attention in the Home Assistant UI.
Repair Types:
1. Tomorrow Data Missing - Warns when tomorrow's price data is unavailable after 18:00
2. Persistent Rate Limits - Warns when API rate limiting persists after multiple errors
3. Home Not Found - Warns when a home no longer exists in the Tibber account
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import DOMAIN
from homeassistant.helpers import issue_registry as ir
if TYPE_CHECKING:
from datetime import datetime
from homeassistant.core import HomeAssistant
_LOGGER = logging.getLogger(__name__)
# Repair issue tracking thresholds
TOMORROW_DATA_WARNING_HOUR = 18 # Warn after 18:00 if tomorrow data missing
RATE_LIMIT_WARNING_THRESHOLD = 3 # Warn after 3 consecutive rate limit errors
class TibberPricesRepairManager:
"""Manage repair issues for Tibber Prices integration."""
def __init__(self, hass: HomeAssistant, entry_id: str, home_name: str) -> None:
"""
Initialize repair manager.
Args:
hass: Home Assistant instance
entry_id: Config entry ID for this home
home_name: Display name of the home (for user-friendly messages)
"""
self._hass = hass
self._entry_id = entry_id
self._home_name = home_name
# Track consecutive rate limit errors
self._rate_limit_error_count = 0
# Track if repairs are currently active
self._tomorrow_data_repair_active = False
self._rate_limit_repair_active = False
self._home_not_found_repair_active = False
async def check_tomorrow_data_availability(
self,
has_tomorrow_data: bool, # noqa: FBT001 - Clear meaning in context
current_time: datetime,
) -> None:
"""
Check if tomorrow data is available and create/clear repair as needed.
Creates repair if:
- Current hour >= 18:00 (after expected data availability)
- Tomorrow's data is missing
Clears repair if:
- Tomorrow's data is now available
Args:
has_tomorrow_data: Whether tomorrow's data is available
current_time: Current local datetime for hour check
"""
should_warn = current_time.hour >= TOMORROW_DATA_WARNING_HOUR and not has_tomorrow_data
if should_warn and not self._tomorrow_data_repair_active:
await self._create_tomorrow_data_repair()
elif not should_warn and self._tomorrow_data_repair_active:
await self._clear_tomorrow_data_repair()
async def track_rate_limit_error(self) -> None:
"""
Track rate limit error and create repair if threshold exceeded.
Increments rate limit error counter and creates repair issue
if threshold (3 consecutive errors) is reached.
"""
self._rate_limit_error_count += 1
if self._rate_limit_error_count >= RATE_LIMIT_WARNING_THRESHOLD and not self._rate_limit_repair_active:
await self._create_rate_limit_repair()
async def clear_rate_limit_tracking(self) -> None:
"""
Clear rate limit error tracking after successful API call.
Resets counter and clears any active repair issue.
"""
self._rate_limit_error_count = min(self._rate_limit_error_count, 0)
if self._rate_limit_repair_active:
await self._clear_rate_limit_repair()
async def create_home_not_found_repair(self) -> None:
"""
Create repair for home no longer found in Tibber account.
This indicates the home was deleted from the user's Tibber account
but the config entry still exists in Home Assistant.
"""
if self._home_not_found_repair_active:
return
_LOGGER.warning(
"Home '%s' not found in Tibber account - creating repair issue",
self._home_name,
)
ir.async_create_issue(
self._hass,
DOMAIN,
f"home_not_found_{self._entry_id}",
is_fixable=True,
severity=ir.IssueSeverity.ERROR,
translation_key="home_not_found",
translation_placeholders={
"home_name": self._home_name,
"entry_id": self._entry_id,
},
)
self._home_not_found_repair_active = True
async def clear_home_not_found_repair(self) -> None:
"""Clear home not found repair (home is available again or entry removed)."""
if not self._home_not_found_repair_active:
return
_LOGGER.debug("Clearing home not found repair for '%s'", self._home_name)
ir.async_delete_issue(
self._hass,
DOMAIN,
f"home_not_found_{self._entry_id}",
)
self._home_not_found_repair_active = False
async def clear_all_repairs(self) -> None:
"""
Clear all active repair issues.
Called during coordinator shutdown or entry removal.
"""
if self._tomorrow_data_repair_active:
await self._clear_tomorrow_data_repair()
if self._rate_limit_repair_active:
await self._clear_rate_limit_repair()
if self._home_not_found_repair_active:
await self.clear_home_not_found_repair()
async def _create_tomorrow_data_repair(self) -> None:
"""Create repair issue for missing tomorrow data."""
_LOGGER.warning(
"Tomorrow's price data missing after %d:00 for home '%s' - creating repair issue",
TOMORROW_DATA_WARNING_HOUR,
self._home_name,
)
ir.async_create_issue(
self._hass,
DOMAIN,
f"tomorrow_data_missing_{self._entry_id}",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="tomorrow_data_missing",
translation_placeholders={
"home_name": self._home_name,
"warning_hour": str(TOMORROW_DATA_WARNING_HOUR),
},
)
self._tomorrow_data_repair_active = True
async def _clear_tomorrow_data_repair(self) -> None:
"""Clear tomorrow data repair issue."""
_LOGGER.debug("Tomorrow's data now available for '%s' - clearing repair issue", self._home_name)
ir.async_delete_issue(
self._hass,
DOMAIN,
f"tomorrow_data_missing_{self._entry_id}",
)
self._tomorrow_data_repair_active = False
async def _create_rate_limit_repair(self) -> None:
"""Create repair issue for persistent rate limiting."""
_LOGGER.warning(
"Persistent API rate limiting detected for home '%s' (%d consecutive errors) - creating repair issue",
self._home_name,
self._rate_limit_error_count,
)
ir.async_create_issue(
self._hass,
DOMAIN,
f"rate_limit_exceeded_{self._entry_id}",
is_fixable=False,
severity=ir.IssueSeverity.WARNING,
translation_key="rate_limit_exceeded",
translation_placeholders={
"home_name": self._home_name,
"error_count": str(self._rate_limit_error_count),
},
)
self._rate_limit_repair_active = True
async def _clear_rate_limit_repair(self) -> None:
"""Clear rate limit repair issue."""
_LOGGER.debug("Rate limiting resolved for '%s' - clearing repair issue", self._home_name)
ir.async_delete_issue(
self._hass,
DOMAIN,
f"rate_limit_exceeded_{self._entry_id}",
)
self._rate_limit_repair_active = False

View file

@ -1,828 +0,0 @@
"""
TimeService - Centralized time management for Tibber Prices integration.
This service provides:
1. Single source of truth for current time
2. Timezone-aware operations (respects HA user timezone)
3. Domain-specific datetime methods (intervals, boundaries, horizons)
4. Time-travel capability (inject simulated time for testing)
All datetime operations MUST go through TimeService to ensure:
- Consistent time across update cycles
- Proper timezone handling (local time, not UTC)
- Testability (mock time in one place)
- Future time-travel feature support
TIMER ARCHITECTURE:
This integration uses three distinct timer mechanisms:
1. **Timer #1: API Polling (DataUpdateCoordinator)**
- Runs every 15 minutes at a RANDOM offset (e.g., 10:04:37, 10:19:37, 10:34:37)
- Offset determined by when last API call completed
- Tracked via _last_coordinator_update for next poll prediction
- NO tolerance needed - offset variation is INTENTIONAL
- Purpose: Spread API load, avoid thundering herd problem
2. **Timer #2: Entity Updates (quarter-hour boundaries)**
- Must trigger at EXACT boundaries (00, 15, 30, 45 minutes)
- Uses _BOUNDARY_TOLERANCE_SECONDS for HA scheduling jitter correction
- Scheduled via async_track_utc_time_change(minute=[0,15,30,45], second=0)
- If HA triggers at 15:00:01 round to 15:00:00 (within ±2s tolerance)
- Purpose: Entity state updates reflect correct quarter-hour interval
3. **Timer #3: Timing Sensors (30-second boundaries)**
- Must trigger at EXACT boundaries (0, 30 seconds)
- Uses _BOUNDARY_TOLERANCE_SECONDS for HA scheduling jitter correction
- Scheduled via async_track_utc_time_change(second=[0,30])
- Purpose: Update countdown/time-to sensors
CRITICAL: The tolerance is ONLY for Timer #2 and #3 to correct HA's
scheduling delays. It is NOT used for Timer #1's offset tracking.
"""
from __future__ import annotations
import math
from datetime import datetime, timedelta
from typing import TYPE_CHECKING
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
from datetime import date
# =============================================================================
# CRITICAL: This is the ONLY module allowed to import dt_util for operations!
# =============================================================================
#
# Other modules may import dt_util ONLY in these cases:
# 1. api/client.py - Rate limiting (non-critical, cosmetic)
# 2. entity_utils/icons.py - Icon updates (cosmetic, independent)
#
# All business logic MUST use TimeService instead.
# =============================================================================
# Constants (private - use TimeService methods instead)
_DEFAULT_INTERVAL_MINUTES = 15 # Tibber uses 15-minute intervals
_INTERVALS_PER_HOUR = 60 // _DEFAULT_INTERVAL_MINUTES # 4
_INTERVALS_PER_DAY = 24 * _INTERVALS_PER_HOUR # 96
# Rounding tolerance for boundary detection (±2 seconds)
# This handles Home Assistant's scheduling jitter for Timer #2 (entity updates)
# and Timer #3 (timing sensors). When HA schedules a callback for exactly
# 15:00:00 but actually triggers it at 15:00:01, this tolerance ensures we
# still recognize it as the 15:00:00 boundary.
#
# NOT used for Timer #1 (API polling), which intentionally runs at random
# offsets determined by last API call completion time.
_BOUNDARY_TOLERANCE_SECONDS = 2
class TibberPricesTimeService:
"""
Centralized time service for Tibber Prices integration.
Provides timezone-aware datetime operations with consistent time context.
All times are in user's Home Assistant local timezone.
Features:
- Single source of truth for "now" per update cycle
- Domain-specific methods (intervals, periods, boundaries)
- Time-travel support (inject simulated time)
- Timezone-safe (all operations respect HA user timezone)
Usage:
# Create service with current time
time_service = TimeService()
# Get consistent "now" throughout update cycle
now = time_service.now()
# Domain-specific operations
current_interval_start = time_service.get_current_interval_start()
next_interval = time_service.get_interval_offset_time(1)
midnight = time_service.get_local_midnight()
"""
def __init__(self, reference_time: datetime | None = None) -> None:
"""
Initialize TimeService with reference time.
Args:
reference_time: Optional fixed time for this context.
If None, uses actual current time.
For time-travel: pass simulated time here.
"""
self._reference_time = reference_time or dt_util.now()
# =========================================================================
# Low-Level API: Direct dt_util wrappers
# =========================================================================
def now(self) -> datetime:
"""
Get current reference time in user's local timezone.
Returns same value throughout the lifetime of this TimeService instance.
This ensures consistent time across all calculations in an update cycle.
Returns:
Timezone-aware datetime in user's HA local timezone.
"""
return self._reference_time
def get_rounded_now(self) -> datetime:
"""
Get current reference time rounded to nearest 15-minute boundary.
Convenience method that combines now() + round_to_nearest_quarter().
Use this when you need the current interval timestamp for calculations.
Returns:
Current reference time rounded to :00, :15, :30, or :45
Examples:
If now is 14:59:58 returns 15:00:00
If now is 14:59:30 returns 14:45:00
If now is 15:00:01 returns 15:00:00
"""
return self.round_to_nearest_quarter()
def as_local(self, dt: datetime) -> datetime:
"""
Convert datetime to user's local timezone.
Args:
dt: Timezone-aware datetime (any timezone).
Returns:
Same moment in time, converted to user's local timezone.
"""
return dt_util.as_local(dt)
def parse_datetime(self, dt_str: str) -> datetime | None:
"""
Parse ISO 8601 datetime string.
Args:
dt_str: ISO 8601 formatted string (e.g., "2025-11-19T13:00:00+00:00").
Returns:
Timezone-aware datetime, or None if parsing fails.
"""
return dt_util.parse_datetime(dt_str)
def parse_and_localize(self, dt_str: str) -> datetime | None:
"""
Parse ISO string and convert to user's local timezone.
Combines parse_datetime() + as_local() in one call.
Use this for API timestamps that need immediate localization.
Args:
dt_str: ISO 8601 formatted string (e.g., "2025-11-19T13:00:00+00:00").
Returns:
Timezone-aware datetime in user's local timezone, or None if parsing fails.
"""
parsed = self.parse_datetime(dt_str)
return self.as_local(parsed) if parsed else None
def start_of_local_day(self, dt: datetime | None = None) -> datetime:
"""
Get midnight (00:00) of the given datetime in user's local timezone.
Args:
dt: Reference datetime. If None, uses reference_time.
Returns:
Midnight (start of day) in user's local timezone.
"""
target = dt if dt is not None else self._reference_time
return dt_util.start_of_local_day(target)
# =========================================================================
# High-Level API: Domain-Specific Methods
# =========================================================================
# -------------------------------------------------------------------------
# Interval Data Extraction
# -------------------------------------------------------------------------
def get_interval_time(self, interval: dict) -> datetime | None:
"""
Extract and parse interval timestamp from API data.
Handles common pattern: parse "startsAt" + convert to local timezone.
Replaces repeated parse_datetime() + as_local() pattern.
Args:
interval: Price interval dict with "startsAt" field (ISO string or datetime object)
Returns:
Localized datetime or None if parsing/conversion fails
"""
starts_at = interval.get("startsAt")
if not starts_at:
return None
# If already a datetime object (parsed from cache), return as-is
if isinstance(starts_at, datetime):
return starts_at
# Otherwise parse the string
return self.parse_and_localize(starts_at)
# -------------------------------------------------------------------------
# Time Comparison Helpers
# -------------------------------------------------------------------------
def is_in_past(self, dt: datetime) -> bool:
"""
Check if datetime is before reference time (now).
Args:
dt: Datetime to check
Returns:
True if dt < now()
"""
return dt < self.now()
def is_in_future(self, dt: datetime) -> bool:
"""
Check if datetime is after or equal to reference time (now).
Args:
dt: Datetime to check
Returns:
True if dt >= now()
"""
return dt >= self.now()
def is_current_interval(self, start: datetime, end: datetime) -> bool:
"""
Check if reference time (now) falls within interval [start, end).
Args:
start: Interval start time (inclusive)
end: Interval end time (exclusive)
Returns:
True if start <= now() < end
"""
now = self.now()
return start <= now < end
def is_in_day(self, dt: datetime, day: str) -> bool:
"""
Check if datetime falls within specified calendar day.
Args:
dt: Datetime to check (should be localized)
day: "yesterday", "today", or "tomorrow"
Returns:
True if dt is within day boundaries
"""
start, end = self.get_day_boundaries(day)
return start <= dt < end
# -------------------------------------------------------------------------
# Duration Calculations
# -------------------------------------------------------------------------
def get_hours_until(self, future_time: datetime) -> float:
"""
Calculate hours from reference time (now) until future_time.
Args:
future_time: Future datetime
Returns:
Hours (can be negative if in past, decimal for partial hours)
"""
delta = future_time - self.now()
return delta.total_seconds() / 3600
def get_local_date(self, offset_days: int = 0) -> date:
"""
Get date for day at offset from reference date.
Convenience method to replace repeated time.now().date() or
time.get_local_midnight(n).date() patterns.
Args:
offset_days: Days to offset.
0 = today, 1 = tomorrow, -1 = yesterday, etc.
Returns:
Date object in user's local timezone.
Examples:
get_local_date() today's date
get_local_date(1) tomorrow's date
get_local_date(-1) yesterday's date
"""
target_datetime = self._reference_time + timedelta(days=offset_days)
return target_datetime.date()
def is_time_in_period(self, start: datetime, end: datetime, check_time: datetime | None = None) -> bool:
"""
Check if time falls within period [start, end).
Args:
start: Period start time (inclusive)
end: Period end time (exclusive)
check_time: Time to check. If None, uses reference time (now).
Returns:
True if start <= check_time < end
Examples:
# Check if now is in period:
is_time_in_period(period_start, period_end)
# Check if specific time is in period:
is_time_in_period(window_start, window_end, some_timestamp)
"""
t = check_time if check_time is not None else self.now()
return start <= t < end
def is_time_within_horizon(self, target_time: datetime, hours: int) -> bool:
"""
Check if target time is in future within specified hour horizon.
Combines two common checks:
1. Is target_time in the future? (target_time > now)
2. Is target_time within N hours? (target_time <= now + N hours)
Args:
target_time: Time to check
hours: Lookahead horizon in hours
Returns:
True if now < target_time <= now + hours
Examples:
# Check if period starts within next 6 hours:
is_time_within_horizon(period_start, hours=6)
# Check if event happens within next 24 hours:
is_time_within_horizon(event_time, hours=24)
"""
now = self.now()
horizon = now + timedelta(hours=hours)
return now < target_time <= horizon
def hours_since(self, past_time: datetime) -> float:
"""
Calculate hours from past_time until reference time (now).
Args:
past_time: Past datetime
Returns:
Hours (can be negative if in future, decimal for partial hours)
"""
delta = self.now() - past_time
return delta.total_seconds() / 3600
def minutes_until(self, future_time: datetime) -> float:
"""
Calculate minutes from reference time (now) until future_time.
Args:
future_time: Future datetime
Returns:
Minutes (can be negative if in past, decimal for partial minutes)
"""
delta = future_time - self.now()
return delta.total_seconds() / 60
def minutes_until_rounded(self, future_time: datetime | str) -> int:
"""
Calculate ROUNDED minutes from reference time (now) until future_time.
Uses standard rounding (0.5 rounds up) to match Home Assistant frontend
relative time display. This ensures sensor values match what users see
in the UI ("in X minutes").
Args:
future_time: Future datetime or ISO string to parse
Returns:
Rounded minutes (negative if in past)
Examples:
44.2 minutes 44
44.5 minutes 45 (rounds up, like HA frontend)
44.7 minutes 45
"""
# Parse string if needed
if isinstance(future_time, str):
parsed = self.parse_and_localize(future_time)
if not parsed:
return 0
future_time = parsed
delta = future_time - self.now()
seconds = delta.total_seconds()
# Standard rounding: 0.5 rounds up (matches HA frontend behavior)
# Using math.floor + 0.5 instead of Python's round() which uses banker's rounding
return math.floor(seconds / 60 + 0.5)
# -------------------------------------------------------------------------
# Interval Operations (15-minute grid)
# -------------------------------------------------------------------------
def get_interval_duration(self) -> timedelta:
"""
Get duration of one interval.
Returns:
Timedelta representing interval length (15 minutes for Tibber).
"""
return timedelta(minutes=_DEFAULT_INTERVAL_MINUTES)
def minutes_to_intervals(self, minutes: int) -> int:
"""
Convert minutes to number of intervals.
Args:
minutes: Number of minutes to convert.
Returns:
Number of intervals (rounded down).
Examples:
15 minutes 1 interval
30 minutes 2 intervals
45 minutes 3 intervals
60 minutes 4 intervals
"""
return minutes // _DEFAULT_INTERVAL_MINUTES
def round_to_nearest_quarter(self, dt: datetime | None = None) -> datetime:
"""
Round datetime to nearest 15-minute boundary with smart tolerance.
Handles HA scheduling jitter: if within ±2 seconds of boundary,
round to that boundary. Otherwise, floor to current interval.
Args:
dt: Datetime to round. If None, uses reference_time.
Returns:
Datetime rounded to nearest quarter-hour boundary.
Examples:
14:59:58 15:00:00 (within 2s of boundary)
14:59:30 14:45:00 (not within 2s, stay in current)
15:00:01 15:00:00 (within 2s of boundary)
"""
target = dt if dt is not None else self._reference_time
# Calculate total seconds in day
total_seconds = target.hour * 3600 + target.minute * 60 + target.second + target.microsecond / 1_000_000
# Find current interval boundaries
interval_index = int(total_seconds // (_DEFAULT_INTERVAL_MINUTES * 60))
interval_start_seconds = interval_index * _DEFAULT_INTERVAL_MINUTES * 60
next_interval_index = (interval_index + 1) % _INTERVALS_PER_DAY
next_interval_start_seconds = next_interval_index * _DEFAULT_INTERVAL_MINUTES * 60
# Distance to boundaries
distance_to_current = total_seconds - interval_start_seconds
if next_interval_index == 0: # Midnight wrap
distance_to_next = (24 * 3600) - total_seconds
else:
distance_to_next = next_interval_start_seconds - total_seconds
# Apply tolerance: if within 2 seconds of a boundary, round to it
if distance_to_current <= _BOUNDARY_TOLERANCE_SECONDS:
# Near current interval start → use it
rounded_seconds = interval_start_seconds
elif distance_to_next <= _BOUNDARY_TOLERANCE_SECONDS:
# Near next interval start → use it
# CRITICAL: If rounding to next interval and it wraps to midnight (index 0),
# we need to increment to next day, not stay on same day!
if next_interval_index == 0:
# Rounding to midnight of NEXT day
return (target + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
rounded_seconds = next_interval_start_seconds
else:
# Not near any boundary → floor to current interval
rounded_seconds = interval_start_seconds
# Build rounded datetime (no midnight wrap needed here - handled above)
hours = int(rounded_seconds // 3600)
minutes = int((rounded_seconds % 3600) // 60)
return target.replace(hour=hours, minute=minutes, second=0, microsecond=0)
def get_current_interval_start(self) -> datetime:
"""
Get start time of current 15-minute interval.
Returns:
Datetime at start of current interval (rounded down).
Example:
Reference time 14:37:23 returns 14:30:00
"""
return self.round_to_nearest_quarter(self._reference_time)
def get_next_interval_start(self) -> datetime:
"""
Get start time of next 15-minute interval.
Returns:
Datetime at start of next interval.
Example:
Reference time 14:37:23 returns 14:45:00
"""
return self.get_interval_offset_time(1)
def get_interval_offset_time(self, offset: int = 0) -> datetime:
"""
Get start time of interval at offset from current.
Args:
offset: Number of intervals to offset.
0 = current, 1 = next, -1 = previous, etc.
Returns:
Datetime at start of target interval.
Examples:
offset=0 current interval (14:30:00)
offset=1 next interval (14:45:00)
offset=-1 previous interval (14:15:00)
"""
current_start = self.get_current_interval_start()
delta = timedelta(minutes=_DEFAULT_INTERVAL_MINUTES * offset)
return current_start + delta
# -------------------------------------------------------------------------
# Day Boundaries (midnight-to-midnight windows)
# -------------------------------------------------------------------------
def get_local_midnight(self, offset_days: int = 0) -> datetime:
"""
Get midnight (00:00) for day at offset from reference date.
Args:
offset_days: Days to offset.
0 = today, 1 = tomorrow, -1 = yesterday, etc.
Returns:
Midnight (start of day) in user's local timezone.
Examples:
offset_days=0 today 00:00
offset_days=1 tomorrow 00:00
offset_days=-1 yesterday 00:00
"""
target_date = self._reference_time.date() + timedelta(days=offset_days)
target_datetime = datetime.combine(target_date, datetime.min.time())
return dt_util.as_local(target_datetime)
def get_day_boundaries(self, day: str = "today") -> tuple[datetime, datetime]:
"""
Get start and end times for a day (midnight to midnight).
Args:
day: Day identifier ("day_before_yesterday", "yesterday", "today", "tomorrow").
Returns:
Tuple of (start_time, end_time) for the day.
start_time: midnight (00:00:00) of that day
end_time: midnight (00:00:00) of next day (exclusive boundary)
Examples:
day="today" (today 00:00, tomorrow 00:00)
day="yesterday" (yesterday 00:00, today 00:00)
"""
day_map = {
"day_before_yesterday": -2,
"yesterday": -1,
"today": 0,
"tomorrow": 1,
}
if day not in day_map:
msg = f"Invalid day: {day}. Must be one of {list(day_map.keys())}"
raise ValueError(msg)
offset = day_map[day]
start = self.get_local_midnight(offset)
end = self.get_local_midnight(offset + 1) # Next day's midnight
return start, end
def get_expected_intervals_for_day(self, day_date: date | None = None) -> int:
"""
Calculate expected number of 15-minute intervals for a day.
Handles DST transitions:
- Normal day: 96 intervals (24 hours * 4)
- Spring forward (lose 1 hour): 92 intervals (23 hours * 4)
- Fall back (gain 1 hour): 100 intervals (25 hours * 4)
Args:
day_date: Date to check. If None, uses reference date.
Returns:
Expected number of 15-minute intervals for that day.
"""
target_date = day_date if day_date is not None else self._reference_time.date()
# Get midnight of target day and next day in local timezone
#
# IMPORTANT: We cannot use dt_util.start_of_local_day() here due to TWO issues:
#
# Issue 1 - pytz LMT Bug:
# dt_util.start_of_local_day() uses: datetime.combine(date, time(), tzinfo=tz)
# With pytz, this triggers the "Local Mean Time" bug - using historical timezone
# offsets from before standard timezones were established (e.g., +00:53 for Berlin
# instead of +01:00/+02:00). Both timestamps get the same wrong offset, making
# duration calculations incorrect for DST transitions.
#
# Issue 2 - Python datetime Subtraction Ignores Timezone Offsets:
# Even with correct offsets (e.g., via zoneinfo):
# start = 2025-03-30 00:00+01:00 (= 2025-03-29 23:00 UTC)
# end = 2025-03-31 00:00+02:00 (= 2025-03-30 22:00 UTC)
# end - start = 1 day = 24 hours (WRONG!)
#
# Python's datetime subtraction uses naive date/time difference, ignoring that
# timezone offsets changed between the two timestamps. The real UTC duration is
# 23 hours (Spring Forward) or 25 hours (Fall Back).
#
# Solution:
# 1. Use timezone.localize() (pytz) or replace(tzinfo=tz) (zoneinfo) to get
# correct timezone-aware datetimes with proper offsets
# 2. Convert to UTC before calculating duration to account for offset changes
#
# This ensures DST transitions are correctly handled:
# - Spring Forward: 23 hours (92 intervals)
# - Fall Back: 25 hours (100 intervals)
# - Normal day: 24 hours (96 intervals)
#
tz = self._reference_time.tzinfo # Get timezone from reference time
if tz is None:
# Should never happen - dt_util.now() always returns timezone-aware datetime
msg = "Reference time has no timezone information"
raise ValueError(msg)
# Create naive datetimes for midnight of target and next day
start_naive = datetime.combine(target_date, datetime.min.time())
next_day = target_date + timedelta(days=1)
end_naive = datetime.combine(next_day, datetime.min.time())
# Localize to get correct DST offset for each date
if hasattr(tz, "localize"):
# pytz timezone - use localize() to handle DST correctly
# Type checker doesn't understand hasattr runtime check, but this is safe
start_midnight_local = tz.localize(start_naive) # type: ignore[attr-defined]
end_midnight_local = tz.localize(end_naive) # type: ignore[attr-defined]
else:
# zoneinfo or other timezone - can use replace directly
start_midnight_local = start_naive.replace(tzinfo=tz)
end_midnight_local = end_naive.replace(tzinfo=tz)
# Calculate actual duration via UTC to handle timezone offset changes correctly
# Direct subtraction (end - start) would ignore DST offset changes and always
# return 24 hours, even on Spring Forward (23h) or Fall Back (25h) days
start_utc = start_midnight_local.astimezone(dt_util.UTC)
end_utc = end_midnight_local.astimezone(dt_util.UTC)
duration = end_utc - start_utc
hours = duration.total_seconds() / 3600
# Convert to intervals (4 per hour for 15-minute intervals)
return int(hours * _INTERVALS_PER_HOUR)
# -------------------------------------------------------------------------
# Time Windows (relative to current interval)
# -------------------------------------------------------------------------
def get_trailing_window(self, hours: int = 24) -> tuple[datetime, datetime]:
"""
Get trailing time window ending at current interval.
Args:
hours: Window size in hours (default 24).
Returns:
Tuple of (start_time, end_time) for trailing window.
start_time: current interval - hours
end_time: current interval start (exclusive)
Example:
Current interval: 14:30
hours=24 (yesterday 14:30, today 14:30)
"""
end = self.get_current_interval_start()
start = end - timedelta(hours=hours)
return start, end
def get_leading_window(self, hours: int = 24) -> tuple[datetime, datetime]:
"""
Get leading time window starting at current interval.
Args:
hours: Window size in hours (default 24).
Returns:
Tuple of (start_time, end_time) for leading window.
start_time: current interval start
end_time: current interval + hours (exclusive)
Example:
Current interval: 14:30
hours=24 (today 14:30, tomorrow 14:30)
"""
start = self.get_current_interval_start()
end = start + timedelta(hours=hours)
return start, end
def get_next_n_hours_window(self, hours: int) -> tuple[datetime, datetime]:
"""
Get window for next N hours starting from NEXT interval.
Args:
hours: Window size in hours.
Returns:
Tuple of (start_time, end_time).
start_time: next interval start
end_time: next interval start + hours (exclusive)
Example:
Current interval: 14:30
hours=3 (14:45, 17:45)
"""
start = self.get_interval_offset_time(1) # Next interval
end = start + timedelta(hours=hours)
return start, end
# -------------------------------------------------------------------------
# Time-Travel Support
# -------------------------------------------------------------------------
def with_reference_time(self, new_time: datetime) -> TibberPricesTimeService:
"""
Create new TibberPricesTimeService with different reference time.
Used for time-travel testing: inject simulated "now".
Args:
new_time: New reference time.
Returns:
New TibberPricesTimeService instance with updated reference time.
Example:
# Simulate being at 14:30 on 2025-11-19
simulated_time = datetime(2025, 11, 19, 14, 30)
future_service = time_service.with_reference_time(simulated_time)
"""
return TibberPricesTimeService(reference_time=new_time)

View file

@ -1,32 +1,10 @@
{ {
"apexcharts": {
"title_rating_level": "Preisphasen Tagesverlauf",
"title_level": "Preisniveau",
"hourly_suffix": "(Ø stündlich)",
"best_price_period_name": "Bestpreis-Zeitraum",
"peak_price_period_name": "Spitzenpreis-Zeitraum",
"notification": {
"metadata_sensor_unavailable": {
"title": "Tibber Prices: ApexCharts YAML mit eingeschränkter Funktionalität generiert",
"message": "Du hast gerade eine ApexCharts-Card-Konfiguration über die Entwicklerwerkzeuge generiert. Der Chart-Metadaten-Sensor ist aktuell deaktiviert, daher zeigt das generierte YAML nur **Basisfunktionalität** (Auto-Skalierung, fester Gradient bei 50%).\n\n**Für volle Funktionalität** (optimierte Skalierung, dynamische Verlaufsfarben):\n1. [Tibber Prices Integration öffnen](https://my.home-assistant.io/redirect/integration/?domain=tibber_prices)\n2. Aktiviere den 'Chart Metadata' Sensor\n3. **Generiere das YAML erneut** über die Entwicklerwerkzeuge\n4. **Ersetze den alten YAML-Code** in deinem Dashboard durch die neue Version\n\n⚠ Nur den Sensor zu aktivieren reicht nicht - du musst das YAML neu generieren und ersetzen!"
},
"missing_cards": {
"title": "Tibber Prices: ApexCharts YAML kann nicht verwendet werden",
"message": "Du hast gerade eine ApexCharts-Card-Konfiguration über die Entwicklerwerkzeuge generiert, aber das generierte YAML **funktioniert nicht**, weil erforderliche Custom Cards fehlen.\n\n**Fehlende Cards:**\n{cards}\n\n**Um das generierte YAML zu nutzen:**\n1. Klicke auf die obigen Links, um die fehlenden Cards über HACS zu installieren\n2. Starte Home Assistant neu (manchmal erforderlich)\n3. **Generiere das YAML erneut** über die Entwicklerwerkzeuge\n4. Füge das YAML zu deinem Dashboard hinzu\n\n⚠ Der aktuelle YAML-Code funktioniert nicht, bis alle Cards installiert sind!"
}
}
},
"sensor": { "sensor": {
"current_interval_price": { "current_price": {
"description": "Der aktuelle Strompreis pro kWh", "description": "Der aktuelle Strompreis pro kWh",
"long_description": "Zeigt den aktuellen Preis pro kWh von deinem Tibber-Abonnement an", "long_description": "Zeigt den aktuellen Preis pro kWh von deinem Tibber-Abonnement an",
"usage_tips": "Nutze dies, um Preise zu verfolgen oder Automatisierungen zu erstellen, die bei günstigem Strom ausgeführt werden" "usage_tips": "Nutze dies, um Preise zu verfolgen oder Automatisierungen zu erstellen, die bei günstigem Strom ausgeführt werden"
}, },
"current_interval_price_base": {
"description": "Aktueller Strompreis in Hauptwährung (EUR/kWh, NOK/kWh, etc.) für Energie-Dashboard",
"long_description": "Zeigt den aktuellen Preis pro kWh in Hauptwährungseinheiten an (z.B. EUR/kWh statt ct/kWh, NOK/kWh statt øre/kWh). Dieser Sensor ist speziell für die Verwendung mit dem Energie-Dashboard von Home Assistant konzipiert, das Preise in Standard-Währungseinheiten benötigt.",
"usage_tips": "Verwende diesen Sensor beim Konfigurieren des Energie-Dashboards unter Einstellungen → Dashboards → Energie. Wähle diesen Sensor als 'Entität mit dem aktuellen Preis' aus, um deine Energiekosten automatisch zu berechnen. Das Energie-Dashboard multipliziert deinen Energieverbrauch (kWh) mit diesem Preis, um die Gesamtkosten anzuzeigen."
},
"next_interval_price": { "next_interval_price": {
"description": "Der Strompreis für das nächste 15-Minuten-Intervall pro kWh", "description": "Der Strompreis für das nächste 15-Minuten-Intervall pro kWh",
"long_description": "Zeigt den Preis für das nächste 15-Minuten-Intervall von deinem Tibber-Abonnement an", "long_description": "Zeigt den Preis für das nächste 15-Minuten-Intervall von deinem Tibber-Abonnement an",
@ -37,12 +15,12 @@
"long_description": "Zeigt den Preis für das vorherige 15-Minuten-Intervall von deinem Tibber-Abonnement an", "long_description": "Zeigt den Preis für das vorherige 15-Minuten-Intervall von deinem Tibber-Abonnement an",
"usage_tips": "Nutze dies, um vergangene Preisänderungen zu überprüfen oder den Preisverlauf zu verfolgen" "usage_tips": "Nutze dies, um vergangene Preisänderungen zu überprüfen oder den Preisverlauf zu verfolgen"
}, },
"current_hour_average_price": { "current_hour_average": {
"description": "Gleitender 5-Intervall-Durchschnittspreis pro kWh", "description": "Gleitender 5-Intervall-Durchschnittspreis pro kWh",
"long_description": "Zeigt den durchschnittlichen Preis pro kWh berechnet aus 5 Intervallen: 2 vorherige, aktuelles und 2 nächste Intervalle (ca. 75 Minuten insgesamt). Dies bietet einen geglätteten 'Stundenpreis', der sich mit der Zeit anpasst, anstatt an feste Uhrzeiten gebunden zu sein.", "long_description": "Zeigt den durchschnittlichen Preis pro kWh berechnet aus 5 Intervallen: 2 vorherige, aktuelles und 2 nächste Intervalle (ca. 75 Minuten insgesamt). Dies bietet einen geglätteten 'Stundenpreis', der sich mit der Zeit anpasst, anstatt an feste Uhrzeiten gebunden zu sein.",
"usage_tips": "Nutze dies für einen stabileren Preisindikator, der kurzfristige Schwankungen glättet und dennoch auf Preisänderungen reagiert. Besser als feste Stundenpreise für Verbrauchsentscheidungen." "usage_tips": "Nutze dies für einen stabileren Preisindikator, der kurzfristige Schwankungen glättet und dennoch auf Preisänderungen reagiert. Besser als feste Stundenpreise für Verbrauchsentscheidungen."
}, },
"next_hour_average_price": { "next_hour_average": {
"description": "Gleitender 5-Intervall-Durchschnittspreis für nächste Stunde pro kWh", "description": "Gleitender 5-Intervall-Durchschnittspreis für nächste Stunde pro kWh",
"long_description": "Zeigt den durchschnittlichen Preis pro kWh berechnet aus 5 Intervallen, die eine Stunde voraus zentriert sind: ungefähr Intervalle +2 bis +6 von jetzt (Minuten +30 bis +105 abdeckend). Dies bietet einen vorausschauenden geglätteten 'Stundenpreis' zur Verbrauchsplanung.", "long_description": "Zeigt den durchschnittlichen Preis pro kWh berechnet aus 5 Intervallen, die eine Stunde voraus zentriert sind: ungefähr Intervalle +2 bis +6 von jetzt (Minuten +30 bis +105 abdeckend). Dies bietet einen vorausschauenden geglätteten 'Stundenpreis' zur Verbrauchsplanung.",
"usage_tips": "Nutze dies, um Preisänderungen in der nächsten Stunde vorherzusehen. Hilfreich für die Planung von verbrauchsintensiven Aktivitäten wie Elektrofahrzeug-Laden, Geschirrspüler oder Heizsysteme." "usage_tips": "Nutze dies, um Preisänderungen in der nächsten Stunde vorherzusehen. Hilfreich für die Planung von verbrauchsintensiven Aktivitäten wie Elektrofahrzeug-Laden, Geschirrspüler oder Heizsysteme."
@ -58,9 +36,9 @@
"usage_tips": "Nutze dies, um den Betrieb von Geräten während Spitzenpreiszeiten zu vermeiden" "usage_tips": "Nutze dies, um den Betrieb von Geräten während Spitzenpreiszeiten zu vermeiden"
}, },
"average_price_today": { "average_price_today": {
"description": "Der typische Strompreis für heute pro kWh (konfigurierbares Anzeigeformat)", "description": "Der durchschnittliche Strompreis für heute pro kWh",
"long_description": "Zeigt den typischen Preis pro kWh für heute. **Standardmäßig zeigt der Status den Median** (resistent gegen extreme Preisspitzen, zeigt was du generell erwarten kannst). Du kannst dies in den Integrationsoptionen ändern, um stattdessen das arithmetische Mittel anzuzeigen. Der alternative Wert ist immer als Attribut `price_mean` oder `price_median` für Automatisierungen verfügbar.", "long_description": "Zeigt den durchschnittlichen Preis pro kWh für den aktuellen Tag von deinem Tibber-Abonnement an",
"usage_tips": "Nutze den Status-Wert für die Anzeige. Für exakte Kostenberechnungen in Automatisierungen nutze: {{ state_attr('sensor.average_price_today', 'price_mean') }}" "usage_tips": "Nutze dies als Grundlage für den Vergleich mit aktuellen Preisen"
}, },
"lowest_price_tomorrow": { "lowest_price_tomorrow": {
"description": "Der niedrigste Strompreis für morgen pro kWh", "description": "Der niedrigste Strompreis für morgen pro kWh",
@ -73,49 +51,19 @@
"usage_tips": "Nutze dies, um den Betrieb von Geräten während der teuersten Stunden morgen zu vermeiden. Plane nicht-essentielle Lasten außerhalb dieser Spitzenpreiszeiten." "usage_tips": "Nutze dies, um den Betrieb von Geräten während der teuersten Stunden morgen zu vermeiden. Plane nicht-essentielle Lasten außerhalb dieser Spitzenpreiszeiten."
}, },
"average_price_tomorrow": { "average_price_tomorrow": {
"description": "Der typische Strompreis für morgen pro kWh (konfigurierbares Anzeigeformat)", "description": "Der durchschnittliche Strompreis für morgen pro kWh",
"long_description": "Zeigt den typischen Preis pro kWh für morgen. **Standardmäßig zeigt der Status den Median** (resistent gegen extreme Preisspitzen). Du kannst dies in den Integrationsoptionen ändern, um stattdessen das arithmetische Mittel anzuzeigen. Der alternative Wert ist als Attribut verfügbar. Dieser Sensor wird nicht verfügbar, bis die Preise für morgen von Tibber veröffentlicht werden (typischerweise zwischen 13:00 und 14:00 Uhr MEZ).", "long_description": "Zeigt den durchschnittlichen Preis pro kWh für den morgigen Tag von deinem Tibber-Abonnement an. Dieser Sensor wird nicht verfügbar, bis die Preise für morgen von Tibber veröffentlicht werden (typischerweise zwischen 13:00 und 14:00 Uhr MEZ).",
"usage_tips": "Nutze den Status-Wert für Anzeige und schnelle Vergleiche. Für Automatisierungen, die exakte Kostenberechnungen benötigen, nutze das Attribut `price_mean`: {{ state_attr('sensor.average_price_tomorrow', 'price_mean') }}" "usage_tips": "Nutze dies als Grundlinie für den Vergleich mit den morgigen Preisen und zur Verbrauchsplanung. Vergleiche mit dem heutigen Durchschnitt, um zu sehen, ob morgen insgesamt teurer oder günstiger wird."
},
"yesterday_price_level": {
"description": "Aggregiertes Preisniveau für gestern",
"long_description": "Zeigt das aggregierte Preisniveau für alle Intervalle von gestern. Verwendet die gleiche Logik wie die Stundensensoren, um das Gesamtpreisniveau für den ganzen Tag zu ermitteln.",
"usage_tips": "Nutze dies, um die gestrige Preissituation zu verstehen. Vergleiche mit heute, um tägliche Trends zu sehen."
},
"today_price_level": {
"description": "Aggregiertes Preisniveau für heute",
"long_description": "Zeigt das aggregierte Preisniveau für alle Intervalle von heute. Verwendet die gleiche Logik wie die Stundensensoren, um das Gesamtpreisniveau für den ganzen Tag zu ermitteln.",
"usage_tips": "Nutze dies, um die heutige Preissituation auf einen Blick zu verstehen. Hilfreich für schnelle Einschätzungen, ob heute generell günstig oder teuer ist."
},
"tomorrow_price_level": {
"description": "Aggregiertes Preisniveau für morgen",
"long_description": "Zeigt das aggregierte Preisniveau für alle Intervalle von morgen. Verwendet die gleiche Logik wie die Stundensensoren, um das Gesamtpreisniveau für den ganzen Tag zu ermitteln. Dieser Sensor wird nicht verfügbar, bis die Preise für morgen von Tibber veröffentlicht werden (typischerweise zwischen 13:00 und 14:00 Uhr MEZ).",
"usage_tips": "Nutze dies, um die morgige Preissituation zu verstehen. Vergleiche mit heute, um zu sehen, ob morgen günstiger oder teurer für den Energieverbrauch wird."
},
"yesterday_price_rating": {
"description": "Aggregierte Preisbewertung für gestern",
"long_description": "Zeigt die aggregierte Preisbewertung (niedrig/normal/hoch) für alle Intervalle von gestern, basierend auf deinen konfigurierten Schwellenwerten. Verwendet die gleiche Logik wie die Stundensensoren, um die Gesamtbewertung für den ganzen Tag zu ermitteln.",
"usage_tips": "Nutze dies, um die gestrige Preissituation relativ zu deinen persönlichen Schwellenwerten zu verstehen. Vergleiche mit heute für Trendanalysen."
},
"today_price_rating": {
"description": "Aggregierte Preisbewertung für heute",
"long_description": "Zeigt die aggregierte Preisbewertung (niedrig/normal/hoch) für alle Intervalle von heute, basierend auf deinen konfigurierten Schwellenwerten. Verwendet die gleiche Logik wie die Stundensensoren, um die Gesamtbewertung für den ganzen Tag zu ermitteln.",
"usage_tips": "Nutze dies, um die heutige Preissituation relativ zu deinen persönlichen Schwellenwerten schnell einzuschätzen. Hilft bei Verbrauchsentscheidungen für den aktuellen Tag."
},
"tomorrow_price_rating": {
"description": "Aggregierte Preisbewertung für morgen",
"long_description": "Zeigt die aggregierte Preisbewertung (niedrig/normal/hoch) für alle Intervalle von morgen, basierend auf deinen konfigurierten Schwellenwerten. Verwendet die gleiche Logik wie die Stundensensoren, um die Gesamtbewertung für den ganzen Tag zu ermitteln. Dieser Sensor wird nicht verfügbar, bis die Preise für morgen von Tibber veröffentlicht werden (typischerweise zwischen 13:00 und 14:00 Uhr MEZ).",
"usage_tips": "Nutze dies, um den morgigen Energieverbrauch basierend auf deinen persönlichen Preisschwellenwerten zu planen. Vergleiche mit heute, um zu entscheiden, ob du den Verbrauch auf morgen verschieben oder heute nutzen solltest."
}, },
"trailing_price_average": { "trailing_price_average": {
"description": "Der typische Strompreis der letzten 24 Stunden pro kWh (konfigurierbares Anzeigeformat)", "description": "Der durchschnittliche Strompreis für die letzten 24 Stunden pro kWh",
"long_description": "Zeigt den typischen Preis pro kWh der letzten 24 Stunden. **Standardmäßig zeigt der Status den Median** (resistent gegen extreme Spitzen, zeigt welches Preisniveau typisch war). Du kannst dies in den Integrationsoptionen ändern, um stattdessen das arithmetische Mittel anzuzeigen. Der alternative Wert ist als Attribut verfügbar. Wird alle 15 Minuten aktualisiert.", "long_description": "Zeigt den durchschnittlichen Preis pro kWh berechnet aus den letzten 24 Stunden (nachlaufender Durchschnitt) von deinem Tibber-Abonnement an. Dies bietet einen gleitenden Durchschnitt, der alle 15 Minuten basierend auf historischen Daten aktualisiert wird.",
"usage_tips": "Nutze den Status-Wert, um das typische aktuelle Preisniveau zu sehen. Für Kostenberechnungen nutze: {{ state_attr('sensor.trailing_price_average', 'price_mean') }}" "usage_tips": "Nutze dies, um aktuelle Preise mit den jüngsten Trends zu vergleichen. Ein aktueller Preis deutlich über diesem Durchschnitt kann ein guter Zeitpunkt sein, um den Verbrauch zu reduzieren."
}, },
"leading_price_average": { "leading_price_average": {
"description": "Der typische Strompreis für die nächsten 24 Stunden pro kWh (konfigurierbares Anzeigeformat)", "description": "Der durchschnittliche Strompreis für die nächsten 24 Stunden pro kWh",
"long_description": "Zeigt den typischen Preis pro kWh für die nächsten 24 Stunden. **Standardmäßig zeigt der Status den Median** (resistent gegen extreme Spitzen, zeigt welches Preisniveau zu erwarten ist). Du kannst dies in den Integrationsoptionen ändern, um stattdessen das arithmetische Mittel anzuzeigen. Der alternative Wert ist als Attribut verfügbar.", "long_description": "Zeigt den durchschnittlichen Preis pro kWh berechnet aus den nächsten 24 Stunden (vorlaufender Durchschnitt) von deinem Tibber-Abonnement an. Dies bietet einen vorausschauenden Durchschnitt basierend auf verfügbaren Prognosedaten.",
"usage_tips": "Nutze den Status-Wert, um das typische kommende Preisniveau zu sehen. Für Kostenberechnungen nutze: {{ state_attr('sensor.leading_price_average', 'price_mean') }}" "usage_tips": "Nutze dies zur Energieverbrauchsplanung. Wenn der aktuelle Preis unter dem vorlaufenden Durchschnitt liegt, kann es ein guter Zeitpunkt sein, um energieintensive Geräte zu betreiben."
}, },
"trailing_price_min": { "trailing_price_min": {
"description": "Der niedrigste Strompreis für die letzten 24 Stunden pro kWh", "description": "Der niedrigste Strompreis für die letzten 24 Stunden pro kWh",
@ -137,7 +85,7 @@
"long_description": "Zeigt den höchsten Preis pro kWh für die nächsten 24 Stunden (vorlaufendes Maximum) von deinem Tibber-Abonnement an. Dies bietet den höchsten erwarteten Preis in den nächsten 24 Stunden basierend auf Prognosedaten.", "long_description": "Zeigt den höchsten Preis pro kWh für die nächsten 24 Stunden (vorlaufendes Maximum) von deinem Tibber-Abonnement an. Dies bietet den höchsten erwarteten Preis in den nächsten 24 Stunden basierend auf Prognosedaten.",
"usage_tips": "Nutze dies, um den Betrieb von Geräten während kommender Spitzenpreiszeiten zu vermeiden." "usage_tips": "Nutze dies, um den Betrieb von Geräten während kommender Spitzenpreiszeiten zu vermeiden."
}, },
"current_interval_price_level": { "price_level": {
"description": "Die aktuelle Preislevelklassifikation", "description": "Die aktuelle Preislevelklassifikation",
"long_description": "Zeigt die Klassifizierung von Tibber für den aktuellen Preis im Vergleich zu historischen Preisen an", "long_description": "Zeigt die Klassifizierung von Tibber für den aktuellen Preis im Vergleich zu historischen Preisen an",
"usage_tips": "Nutze dies, um Automatisierungen auf Basis des relativen Preisniveaus anstelle der absoluten Preise zu erstellen" "usage_tips": "Nutze dies, um Automatisierungen auf Basis des relativen Preisniveaus anstelle der absoluten Preise zu erstellen"
@ -162,7 +110,7 @@
"long_description": "Zeigt das mediane Preisniveau über 5 Intervalle, die eine Stunde voraus zentriert sind. Hilft bei der Verbrauchsplanung basierend auf kommenden Preistrends statt momentanen zukünftigen Preisen.", "long_description": "Zeigt das mediane Preisniveau über 5 Intervalle, die eine Stunde voraus zentriert sind. Hilft bei der Verbrauchsplanung basierend auf kommenden Preistrends statt momentanen zukünftigen Preisen.",
"usage_tips": "Nutze dies, um Aktivitäten für die nächste Stunde basierend auf einer geglätteten Preisniveau-Prognose zu planen." "usage_tips": "Nutze dies, um Aktivitäten für die nächste Stunde basierend auf einer geglätteten Preisniveau-Prognose zu planen."
}, },
"current_interval_price_rating": { "price_rating": {
"description": "Wie sich der Preis des aktuellen Intervalls mit historischen Daten vergleicht", "description": "Wie sich der Preis des aktuellen Intervalls mit historischen Daten vergleicht",
"long_description": "Zeigt, wie sich der Preis des aktuellen Intervalls im Vergleich zu historischen Preisdaten als Prozentsatz verhält", "long_description": "Zeigt, wie sich der Preis des aktuellen Intervalls im Vergleich zu historischen Preisdaten als Prozentsatz verhält",
"usage_tips": "Ein positiver Prozentsatz bedeutet, dass der aktuelle Preis überdurchschnittlich ist, negativ bedeutet unterdurchschnittlich" "usage_tips": "Ein positiver Prozentsatz bedeutet, dass der aktuelle Preis überdurchschnittlich ist, negativ bedeutet unterdurchschnittlich"
@ -189,8 +137,8 @@
}, },
"next_avg_1h": { "next_avg_1h": {
"description": "Durchschnittspreis für die nächste 1 Stunde (vorwärts-blickend ab nächstem Intervall)", "description": "Durchschnittspreis für die nächste 1 Stunde (vorwärts-blickend ab nächstem Intervall)",
"long_description": "Vorwärts-blickender Durchschnitt: Zeigt den Durchschnitt der nächsten 4 Intervalle (1 Stunde) beginnend ab dem NÄCHSTEN 15-Minuten-Intervall (aktuelles nicht inkludiert). Unterscheidet sich von current_hour_average_price, das vergangene Intervalle einbezieht. Nutze dies für absolute Preisschwellen-Planung.", "long_description": "Vorwärts-blickender Durchschnitt: Zeigt den Durchschnitt der nächsten 4 Intervalle (1 Stunde) beginnend ab dem NÄCHSTEN 15-Minuten-Intervall (aktuelles nicht inkludiert). Unterscheidet sich von current_hour_average, das vergangene Intervalle einbezieht. Nutze dies für absolute Preisschwellen-Planung.",
"usage_tips": "Absolute Preisschwelle: Starte Geräte nur, wenn der Durchschnitt unter deinem maximal akzeptablen Preis bleibt (z.B. unter 0,25 EUR/kWh). Kombiniere mit Trend-Sensor für optimales Timing. Hinweis: Dies ist KEIN Ersatz für Stundenpreise - nutze current_hour_average_price dafür." "usage_tips": "Absolute Preisschwelle: Starte Geräte nur, wenn der Durchschnitt unter deinem maximal akzeptablen Preis bleibt (z.B. unter 0,25 EUR/kWh). Kombiniere mit Trend-Sensor für optimales Timing. Hinweis: Dies ist KEIN Ersatz für Stundenpreise - nutze current_hour_average dafür."
}, },
"next_avg_2h": { "next_avg_2h": {
"description": "Durchschnittspreis für die nächsten 2 Stunden", "description": "Durchschnittspreis für die nächsten 2 Stunden",
@ -205,7 +153,7 @@
"next_avg_4h": { "next_avg_4h": {
"description": "Durchschnittspreis für die nächsten 4 Stunden", "description": "Durchschnittspreis für die nächsten 4 Stunden",
"long_description": "Zeigt den Durchschnittspreis für die nächsten 16 Intervalle (4 Stunden) beginnend ab dem nächsten 15-Minuten-Intervall.", "long_description": "Zeigt den Durchschnittspreis für die nächsten 16 Intervalle (4 Stunden) beginnend ab dem nächsten 15-Minuten-Intervall.",
"usage_tips": "Absolute Preisschwelle: Lege maximal akzeptablen Preis für Wärmepumpen oder Warmwasserbereiter fest. Verhindert Betrieb während teurer Zeiträume unabhängig von relativen Trends." "usage_tips": "Absolute Preisschwelle: Lege maximal akzeptablen Preis für Wärmepumpen oder Warmwasserbereiter fest. Verhindert Betrieb während teurer Perioden unabhängig von relativen Trends."
}, },
"next_avg_5h": { "next_avg_5h": {
"description": "Durchschnittspreis für die nächsten 5 Stunden", "description": "Durchschnittspreis für die nächsten 5 Stunden",
@ -267,16 +215,6 @@
"long_description": "Vergleicht aktuellen Intervallpreis mit Durchschnitt der nächsten 12 Stunden (48 Intervalle). Steigend wenn Zukunft >5% höher, fallend wenn >5% niedriger, sonst stabil.", "long_description": "Vergleicht aktuellen Intervallpreis mit Durchschnitt der nächsten 12 Stunden (48 Intervalle). Steigend wenn Zukunft >5% höher, fallend wenn >5% niedriger, sonst stabil.",
"usage_tips": "Relative Optimierung: Langfristige strategische Entscheidungen. 'fallend' = deutlich bessere Preise kommen heute Nacht/morgen. Findet optimales Timing in jeder Marktsituation. Am besten kombiniert mit avg-Sensor Preisobergrenze." "usage_tips": "Relative Optimierung: Langfristige strategische Entscheidungen. 'fallend' = deutlich bessere Preise kommen heute Nacht/morgen. Findet optimales Timing in jeder Marktsituation. Am besten kombiniert mit avg-Sensor Preisobergrenze."
}, },
"current_price_trend": {
"description": "Aktuelle Preistrend-Richtung und wie lange sie anhält",
"long_description": "Zeigt den aktuellen Preistrend (steigend/fallend/stabil) durch Kombination von historischem Momentum (gewichteter 1h-Rückblick) mit Zukunftsausblick. Erkennt laufende Trends früher als reine Zukunftsanalyse. Nutzt ±3% Momentum-Schwelle und volatilitätsabhängigen Zukunftsvergleich. Berechnet dynamisch bis zur nächsten Trendänderung (oder 3h Standard, falls keine Änderung in 24h). Der Status zeigt die aktuelle Richtung, Attribute zeigen, wann sich der Trend ändert und was als Nächstes kommt.",
"usage_tips": "Status-Anzeige: Dashboard-Sichtbarkeit von 'was passiert jetzt bis wann'. Perfekt synchronisiert mit next_price_trend_change. Beispiel: Badge mit 'Steigend für 2,5h' oder 'Fallend bis 16:45'. Besser als Zeitfenster-Sensoren, weil es versteht, dass du dich BEREITS in einem Trend befindest, nicht nur zukünftige Änderungen vorhersagt. Nutze für schnelle visuelle Übersicht, nicht für Automations-Trigger."
},
"next_price_trend_change": {
"description": "Wann die nächste bedeutende Preistrend-Änderung eintreten wird",
"long_description": "Scannt die nächsten 24 Stunden (96 Intervalle), um zu finden, wann sich der Preistrend (steigend/fallend/stabil) vom aktuellen Momentum ändern wird. Bestimmt zuerst den aktuellen Trend mit gewichtetem 1h-Rückblick (erkennt laufende Trends), dann findet es die Umkehr. Verwendet volatilitätsadaptive Schwellwerte (3% Momentum-Erkennung, marktangepasster Zukunftsvergleich). Gibt den Zeitstempel zurück, wann die Änderung erwartet wird.",
"usage_tips": "Ereignisbasierte Automation: Aktionen WENN Trend wechselt auslösen, nicht IN X Stunden. Beispiel: 'E-Auto laden wenn nächste Trendänderung fallende Preise zeigt' oder 'Spülmaschine vor Preisanstieg starten'. Ergänzt Zeitfenster-Sensoren (price_trend_Xh), die beantworten 'WERDEN Preise in X Stunden höher sein?'"
},
"daily_rating": { "daily_rating": {
"description": "Wie sich die heutigen Preise mit historischen Daten vergleichen", "description": "Wie sich die heutigen Preise mit historischen Daten vergleichen",
"long_description": "Zeigt, wie sich die heutigen Preise im Vergleich zu historischen Preisdaten als Prozentsatz verhält", "long_description": "Zeigt, wie sich die heutigen Preise im Vergleich zu historischen Preisdaten als Prozentsatz verhält",
@ -289,172 +227,32 @@
}, },
"data_timestamp": { "data_timestamp": {
"description": "Zeitstempel des letzten verfügbaren Preisintervalls", "description": "Zeitstempel des letzten verfügbaren Preisintervalls",
"long_description": "Zeigt den Zeitstempel des letzten verfügbaren Preisdatenintervalls von deinem Tibber-Abonnement" "long_description": "Zeigt den Zeitstempel des letzten verfügbaren Preisdatenintervalls von Ihrem Tibber-Abonnement"
}, },
"today_volatility": { "today_volatility": {
"description": "Wie stark sich die Strompreise heute verändern", "description": "Preisvolatilitätsklassifizierung für heute",
"long_description": "Zeigt, ob die heutigen Preise stabil bleiben oder stark schwanken. Niedrige Volatilität bedeutet recht konstante Preise Timing ist kaum wichtig. Hohe Volatilität bedeutet spürbare Preisunterschiede über den Tag gute Chance, den Verbrauch auf günstigere Zeiten zu verschieben. `price_coefficient_variation_%` zeigt den Prozentwert, `price_spread` die absolute Preisspanne.", "long_description": "Zeigt, wie stark die Strompreise im Laufe des heutigen Tages variieren, basierend auf der Spannweite (Differenz zwischen höchstem und niedrigstem Preis). Klassifizierung: NIEDRIG = Spannweite < 5ct, MODERAT = 5-15ct, HOCH = 15-30ct, SEHR HOCH = >30ct.",
"usage_tips": "Nutze dies, um zu entscheiden, ob Optimierung sich lohnt. Bei niedriger Volatilität kannst du Geräte jederzeit laufen lassen. Bei hoher Volatilität sparst du spürbar, wenn du Best-Price-Perioden nutzt." "usage_tips": "Verwenden Sie dies, um zu entscheiden, ob preisbasierte Optimierung lohnenswert ist. Zum Beispiel lohnt sich bei einer Balkonbatterie mit 15% Effizienzverlusten die Optimierung nur, wenn die Volatilität mindestens MODERAT ist. Erstellen Sie Automatisierungen, die die Volatilität prüfen, bevor Lade-/Entladezyklen geplant werden."
}, },
"tomorrow_volatility": { "tomorrow_volatility": {
"description": "Wie stark sich die Strompreise morgen verändern werden", "description": "Preisvolatilitätsklassifizierung für morgen",
"long_description": "Zeigt, ob die Preise morgen stabil bleiben oder stark schwanken. Verfügbar, sobald die morgigen Daten veröffentlicht sind (typischerweise 13:0014:00 MEZ). Niedrige Volatilität bedeutet recht konstante Preise Timing ist nicht kritisch. Hohe Volatilität bedeutet deutliche Preisunterschiede über den Tag gute Gelegenheit, energieintensive Aufgaben zu planen. `price_coefficient_variation_%` zeigt den Prozentwert, `price_spread` die absolute Preisspanne.", "long_description": "Zeigt, wie stark die Strompreise im Laufe des morgigen Tages variieren werden, basierend auf der Spannweite (Differenz zwischen höchstem und niedrigstem Preis). Wird nicht verfügbar, bis morgige Daten veröffentlicht sind (typischerweise 13:00-14:00 MEZ).",
"usage_tips": "Nutze dies für die Planung des morgigen Energieverbrauchs. Hohe Volatilität? Plane flexible Lasten in Best-Price-Perioden. Niedrige Volatilität? Lass Geräte laufen, wann es dir passt." "usage_tips": "Verwenden Sie dies zur Vorausplanung des morgigen Energieverbrauchs. Bei HOHER oder SEHR HOHER Volatilität morgen lohnt sich die Optimierung des Energieverbrauchs. Bei NIEDRIGER Volatilität können Sie Geräte jederzeit ohne wesentliche Kostenunterschiede betreiben."
}, },
"next_24h_volatility": { "next_24h_volatility": {
"description": "Wie stark sich die Preise in den nächsten 24 Stunden verändern", "description": "Preisvolatilitätsklassifizierung für die rollierenden nächsten 24 Stunden",
"long_description": "Zeigt die Preisvolatilität für ein rollierendes 24-Stunden-Fenster ab jetzt (aktualisiert alle 15 Minuten). Niedrige Volatilität bedeutet recht konstante Preise. Hohe Volatilität bedeutet spürbare Preisschwankungen und damit Chancen zur Optimierung. Im Unterschied zu Heute/Morgen-Sensoren überschreitet dieser Tagesgrenzen und liefert eine durchgängige Vorhersage. `price_coefficient_variation_%` zeigt den Prozentwert, `price_spread` die absolute Preisspanne.", "long_description": "Zeigt, wie stark die Strompreise in den nächsten 24 Stunden ab jetzt variieren (rollierendes Fenster). Dies überschreitet Tagesgrenzen und aktualisiert sich alle 15 Minuten, wodurch eine vorausschauende Volatilitätsbewertung unabhängig von Kalendertagen bereitgestellt wird.",
"usage_tips": "Am besten für Entscheidungen in Echtzeit. Nutze dies für Batterieladestrategien oder andere flexible Lasten, die über Mitternacht laufen könnten. Bietet eine konsistente 24h-Perspektive unabhängig vom Kalendertag." "usage_tips": "Bester Sensor für Echtzeitoptimierungsentscheidungen. Im Gegensatz zu Heute/Morgen-Sensoren, die um Mitternacht wechseln, bietet dies eine kontinuierliche 24h-Volatilitätsbewertung. Verwenden Sie dies für Batterielade-Strategien, die Tagesgrenzen überschreiten."
}, },
"today_tomorrow_volatility": { "today_tomorrow_volatility": {
"description": "Kombinierte Preisvolatilität für heute und morgen", "description": "Kombinierte Preisvolatilitätsklassifizierung für heute und morgen",
"long_description": "Zeigt die Gesamtvolatilität, wenn heute und morgen gemeinsam betrachtet werden (sobald die morgigen Daten verfügbar sind). Zeigt, ob über die Tagesgrenze hinweg deutliche Preisunterschiede bestehen. Fällt auf nur-heute zurück, wenn morgige Daten noch fehlen. Hilfreich für mehrtägige Optimierung. `price_coefficient_variation_%` zeigt den Prozentwert, `price_spread` die absolute Preisspanne.", "long_description": "Zeigt die Volatilität über heute und morgen zusammen (wenn morgige Daten verfügbar sind). Bietet eine erweiterte Ansicht der Preisvariation über bis zu 48 Stunden. Fällt auf Nur-Heute zurück, wenn morgige Daten noch nicht verfügbar sind.",
"usage_tips": "Nutze dies für Aufgaben, die sich über mehrere Tage erstrecken. Prüfe, ob die Preisunterschiede groß genug für eine Planung sind. Die einzelnen Tages-Sensoren zeigen die Beiträge pro Tag, falls du mehr Details brauchst." "usage_tips": "Verwenden Sie dies für Mehrtagsplanung und um zu verstehen, ob Preismöglichkeiten über die Tagesgrenze hinweg bestehen. Die Attribute 'today_volatility' und 'tomorrow_volatility' zeigen individuelle Tagesbeiträge. Nützlich für die Planung von Ladesitzungen, die Mitternacht überschreiten könnten."
}, },
"data_lifecycle_status": { "price_forecast": {
"description": "Aktueller Status des Preisdaten-Lebenszyklus und der Zwischenspeicherung", "description": "Prognose kommender Strompreise",
"long_description": "Zeigt an, ob die Integration zwischengespeicherte Daten oder frische Daten von der API verwendet. Zeigt aktuellen Lebenszyklus-Status: 'cached' (verwendet gespeicherte Daten), 'fresh' (gerade von API abgerufen), 'refreshing' (wird gerade abgerufen), 'searching_tomorrow' (sucht aktiv nach Morgendaten nach 13:00 Uhr), 'turnover_pending' (innerhalb 15 Minuten vor Mitternacht, 23:45-00:00) oder 'error' (Abruf fehlgeschlagen). Enthält umfassende Attribute wie Cache-Alter, nächste API-Abfragezeit, Datenvollständigkeit und API-Aufruf-Statistiken.", "long_description": "Zeigt kommende Strompreise für zukünftige Intervalle in einem Format, das einfach in Dashboards verwendet werden kann",
"usage_tips": "Verwende diesen Diagnosesensor, um Datenaktualität und API-Aufrufmuster zu verstehen. Prüfe das 'cache_age'-Attribut, um zu sehen, wie alt die aktuellen Daten sind. Überwache 'next_api_poll', um zu wissen, wann das nächste Update geplant ist. Verwende 'data_completeness', um zu sehen, ob Daten für gestern/heute/morgen verfügbar sind. Der 'api_calls_today'-Zähler hilft, die API-Nutzung zu verfolgen. Perfekt zur Fehlersuche oder zum Verständnis des Integrationsverhaltens." "usage_tips": "Verwenden Sie die Attribute dieser Entität, um kommende Preise in Diagrammen oder benutzerdefinierten Karten anzuzeigen. Greifen Sie entweder auf 'intervals' für alle zukünftigen Intervalle oder auf 'hours' für stündliche Zusammenfassungen zu."
},
"best_price_end_time": {
"description": "Wann der aktuelle oder nächste günstige Zeitraum endet",
"long_description": "Zeigt den Endzeitstempel des aktuellen günstigen Zeitraums an, wenn dieser aktiv ist, oder das Ende des nächsten Zeitraums, wenn kein Zeitraum aktiv ist. Zeigt immer eine nützliche Zeitreferenz zur Planung. Gibt nur 'Unbekannt' zurück, wenn keine Zeiträume ermittelt wurden.",
"usage_tips": "Nutze dies, um einen Countdown wie 'Günstiger Zeitraum endet in 2 Stunden' (wenn aktiv) oder 'Nächster günstiger Zeitraum endet um 14:00' (wenn inaktiv) anzuzeigen. Home Assistant zeigt automatisch relative Zeit für Zeitstempel-Sensoren an."
},
"best_price_period_duration": {
"description": "Gesamtlänge des aktuellen oder nächsten günstigen Zeitraums",
"long_description": "Zeigt, wie lange der günstige Zeitraum insgesamt dauert. Der State wird in Stunden angezeigt (z. B. 1,5 h) für eine einfache Lesbarkeit in der UI, während das Attribut `period_duration_minutes` denselben Wert in Minuten bereitstellt (z. B. 90) für Automationen. Während eines aktiven Zeitraums zeigt dies die Dauer des aktuellen Zeitraums. Wenn kein Zeitraum aktiv ist, zeigt dies die Dauer des nächsten kommenden Zeitraums. Gibt nur 'Unbekannt' zurück, wenn keine Zeiträume ermittelt wurden.",
"usage_tips": "Für Anzeige: State-Wert (Stunden) in Dashboards nutzen. Für Automationen: Attribut `period_duration_minutes` verwenden, um zu prüfen, ob genug Zeit für langläufige Geräte ist (z. B. 'Wenn period_duration_minutes >= 90, starte Waschmaschine')."
},
"best_price_remaining_minutes": {
"description": "Verbleibende Zeit im aktuellen günstigen Zeitraum",
"long_description": "Zeigt, wie viel Zeit im aktuellen günstigen Zeitraum noch verbleibt. Der State wird in Stunden angezeigt (z. B. 0,5 h) für eine einfache Lesbarkeit, während das Attribut `remaining_minutes` Minuten bereitstellt (z. B. 30) für Automationslogik. Gibt 0 zurück, wenn kein Zeitraum aktiv ist. Aktualisiert sich jede Minute. Prüfe binary_sensor.best_price_period, um zu sehen, ob ein Zeitraum aktuell aktiv ist.",
"usage_tips": "Für Automationen: Attribut `remaining_minutes` mit numerischen Vergleichen nutzen wie 'Wenn remaining_minutes > 0 UND remaining_minutes < 30, starte Waschmaschine jetzt'. Der Wert 0 macht es einfach zu prüfen, ob ein Zeitraum aktiv ist (Wert > 0) oder nicht (Wert = 0)."
},
"best_price_progress": {
"description": "Fortschritt durch aktuellen günstigen Zeitraum (0% wenn inaktiv)",
"long_description": "Zeigt den Fortschritt durch den aktuellen günstigen Zeitraum als 0100%. Gibt 0% zurück, wenn kein Zeitraum aktiv ist. Aktualisiert sich jede Minute. 0% bedeutet Zeitraum gerade gestartet, 100% bedeutet, er endet gleich.",
"usage_tips": "Super für visuelle Fortschrittsbalken. Nutze in Automatisierungen: 'Wenn progress > 0 UND progress > 75, sende Benachrichtigung, dass günstiger Zeitraum bald endet'. Wert 0 zeigt keinen aktiven Zeitraum an."
},
"best_price_next_start_time": {
"description": "Wann der nächste günstige Zeitraum startet",
"long_description": "Zeigt, wann der nächste kommende günstige Zeitraum startet. Während eines aktiven Zeitraums zeigt dies den Start des nächsten Zeitraums nach dem aktuellen. Gibt nur 'Unbekannt' zurück, wenn keine zukünftigen Zeiträume ermittelt wurden.",
"usage_tips": "Immer nützlich für Vorausplanung: 'Nächster günstiger Zeitraum startet in 3 Stunden' (egal, ob du gerade in einem Zeitraum bist oder nicht). Kombiniere mit Automatisierungen: 'Wenn nächste Startzeit in 10 Minuten ist, sende Benachrichtigung zur Vorbereitung der Waschmaschine'."
},
"best_price_next_in_minutes": {
"description": "Zeit bis zum nächsten günstigen Zeitraum",
"long_description": "Zeigt, wie lange es bis zum nächsten günstigen Zeitraum dauert. Der State wird in Stunden angezeigt (z. B. 2,25 h) für Dashboards, während das Attribut `next_in_minutes` Minuten bereitstellt (z. B. 135) für Automationsbedingungen. Während eines aktiven Zeitraums zeigt dies die Zeit bis zum Zeitraum nach dem aktuellen. Gibt 0 während kurzer Übergangsphasen zurück. Aktualisiert sich jede Minute.",
"usage_tips": "Für Automationen: Attribut `next_in_minutes` nutzen wie 'Wenn next_in_minutes > 0 UND next_in_minutes < 15, warte, bevor du die Geschirrspülmaschine startest'. Wert > 0 zeigt immer an, dass ein zukünftiger Zeitraum geplant ist."
},
"peak_price_end_time": {
"description": "Wann der aktuelle oder nächste teure Zeitraum endet",
"long_description": "Zeigt den Endzeitstempel des aktuellen teuren Zeitraums an, wenn dieser aktiv ist, oder das Ende des nächsten Zeitraums, wenn kein Zeitraum aktiv ist. Zeigt immer eine nützliche Zeitreferenz zur Planung. Gibt nur 'Unbekannt' zurück, wenn keine Zeiträume ermittelt wurden.",
"usage_tips": "Nutze dies, um 'Teurer Zeitraum endet in 1 Stunde' (wenn aktiv) oder 'Nächster teurer Zeitraum endet um 18:00' (wenn inaktiv) anzuzeigen. Kombiniere mit Automatisierungen, um den Betrieb nach der Spitzenzeit fortzusetzen."
},
"peak_price_period_duration": {
"description": "Länge des aktuellen/nächsten teuren Zeitraums",
"long_description": "Gesamtdauer des aktuellen oder nächsten teuren Zeitraums. Der State wird in Stunden angezeigt (z. B. 1,5 h) für leichtes Ablesen in der UI, während das Attribut `period_duration_minutes` denselben Wert in Minuten bereitstellt (z. B. 90) für Automationen. Dieser Wert repräsentiert die **volle geplante Dauer** des Zeitraums und ist konstant während des gesamten Zeitraums, auch wenn die verbleibende Zeit (remaining_minutes) abnimmt.",
"usage_tips": "Kombiniere mit remaining_minutes, um zu berechnen, wann langlaufende Geräte gestoppt werden sollen: Zeitraum begann vor `period_duration_minutes - remaining_minutes` Minuten. Dieses Attribut unterstützt Energiespar-Strategien, indem es hilft, Hochverbrauchsaktivitäten außerhalb teurer Perioden zu planen."
},
"peak_price_remaining_minutes": {
"description": "Verbleibende Zeit im aktuellen teuren Zeitraum",
"long_description": "Zeigt, wie viel Zeit im aktuellen teuren Zeitraum noch verbleibt. Der State wird in Stunden angezeigt (z. B. 0,75 h) für einfaches Ablesen in Dashboards, während das Attribut `remaining_minutes` dieselbe Zeit in Minuten liefert (z. B. 45) für Automationsbedingungen. **Countdown-Timer**: Dieser Wert dekrementiert jede Minute während eines aktiven Zeitraums. Gibt 0 zurück, wenn kein teurer Zeitraum aktiv ist. Aktualisiert sich minütlich.",
"usage_tips": "Für Automationen: Nutze Attribut `remaining_minutes` wie 'Wenn remaining_minutes > 60, setze Heizung auf Energiesparmodus' oder 'Wenn remaining_minutes < 15, erhöhe Temperatur wieder'. UI zeigt benutzerfreundliche Stunden (z. B. 1,25 h). Wert 0 zeigt an, dass kein teurer Zeitraum aktiv ist."
},
"peak_price_progress": {
"description": "Fortschritt durch aktuellen teuren Zeitraum (0% wenn inaktiv)",
"long_description": "Zeigt den Fortschritt durch den aktuellen teuren Zeitraum als 0100%. Gibt 0% zurück, wenn kein Zeitraum aktiv ist. Aktualisiert sich jede Minute. 0% bedeutet Zeitraum gerade gestartet, 100% bedeutet, er endet gleich.",
"usage_tips": "Visueller Fortschrittsindikator in Dashboards. Automatisierung: 'Wenn progress > 0 UND progress > 90, bereite normale Heizplanung vor'. Wert 0 zeigt keinen aktiven Zeitraum an."
},
"peak_price_next_start_time": {
"description": "Wann der nächste teure Zeitraum startet",
"long_description": "Zeigt, wann der nächste kommende teure Zeitraum startet. Während eines aktiven Zeitraums zeigt dies den Start des nächsten Zeitraums nach dem aktuellen. Gibt nur 'Unbekannt' zurück, wenn kein zukünftiger Zeitraum ermittelt wurde.",
"usage_tips": "Immer nützlich für Planung: 'Nächster teurer Zeitraum startet in 2 Stunden'. Automatisierung: 'Wenn nächste Startzeit in 30 Minuten ist, reduziere Heiztemperatur vorsorglich'."
},
"peak_price_next_in_minutes": {
"description": "Zeit bis zum nächsten teuren Zeitraum",
"long_description": "Zeigt, wie lange es bis zum nächsten teuren Zeitraum dauert. Der State wird in Stunden angezeigt (z. B. 2,25 h) für Dashboards, während das Attribut `next_in_minutes` Minuten bereitstellt (z. B. 135) für Automationsbedingungen. Während eines aktiven Zeitraums zeigt dies die Zeit bis zum Zeitraum nach dem aktuellen. Gibt 0 während kurzer Übergangsphasen zurück. Aktualisiert sich jede Minute.",
"usage_tips": "Für Automationen: Attribut `next_in_minutes` nutzen wie 'Wenn next_in_minutes > 0 UND next_in_minutes < 10, reduziere Heizung vorsorglich bevor der teure Zeitraum beginnt'. Wert > 0 zeigt immer an, dass ein zukünftiger teurer Zeitraum geplant ist."
},
"home_type": {
"description": "Art der Wohnung (Wohnung, Haus usw.)",
"long_description": "Zeigt den Wohnungstyp, wie in deinem Tibber-Konto konfiguriert. Diese Metadaten können nützlich sein, um Energieverbrauchsmuster zu kategorisieren.",
"usage_tips": "Nutze dies zur Organisation deines Smart-Home-Systems oder für Analysezwecke."
},
"home_size": {
"description": "Wohnfläche in Quadratmetern",
"long_description": "Zeigt die Größe deiner Wohnung in Quadratmetern, wie in deinem Tibber-Konto konfiguriert. Kann verwendet werden, um den Energieverbrauch pro Quadratmeter zu berechnen.",
"usage_tips": "Nutze dies für Energieeffizienzberechnungen: 'Meine Wohnung verbraucht X kWh pro Quadratmeter pro Jahr'."
},
"main_fuse_size": {
"description": "Hauptsicherungsgröße in Ampere",
"long_description": "Zeigt die Kapazität deiner Hauptsicherung in Ampere. Dies bestimmt die maximale elektrische Last, die deine Wohnung gleichzeitig bewältigen kann.",
"usage_tips": "Nutze dies zur Überlastungsvermeidung: 'Wenn der Gesamtstromverbrauch sich der Sicherungsgröße nähert, verschiebe das Starten zusätzlicher Geräte'."
},
"number_of_residents": {
"description": "Anzahl der im Haushalt lebenden Personen",
"long_description": "Zeigt die Anzahl der Bewohner, wie in deinem Tibber-Konto konfiguriert. Nützlich für Pro-Kopf-Energieverbrauchsberechnungen.",
"usage_tips": "Nutze dies für Haushalts-Energieanalysen: 'Energieverbrauch pro Person pro Tag'."
},
"primary_heating_source": {
"description": "Primärer Heizungstyp",
"long_description": "Zeigt den Typ des in deiner Wohnung verwendeten Heizsystems, wie in deinem Tibber-Konto konfiguriert. Dies kann eine Wärmepumpe, Elektroheizung, Gas, Öl oder andere Heizquellen sein.",
"usage_tips": "Nutze dies zur Kategorisierung heizungsbezogener Automatisierungen oder für Energieverbrauchsanalysen nach Heizungsart."
},
"grid_company": {
"description": "Name deines Stromnetzbetreibers",
"long_description": "Zeigt den Namen des Unternehmens, das das Stromnetz in deiner Region betreibt. Dies ist der Verteilnetzbetreiber (VNB), der für die Stromlieferung zu deinem Haus verantwortlich ist.",
"usage_tips": "Nützlich für administrative Zwecke und zur Behebung netzbezogener Probleme."
},
"grid_area_code": {
"description": "Netzgebiets-Kennziffer",
"long_description": "Zeigt den Code, der dein Stromnetzgebiet identifiziert. Dieser Code wird vom Netzbetreiber für Routing- und Abrechnungszwecke verwendet.",
"usage_tips": "Nutze dies als administrative Referenz oder bei der Kontaktaufnahme mit deinem Netzbetreiber."
},
"price_area_code": {
"description": "Strompreis-Zonencode",
"long_description": "Zeigt den Code für deine Strompreiszone (z.B. NO1, NO2, SE3, DK1). Verschiedene Zonen haben unterschiedliche Großhandelsstrompreise basierend auf regionalem Angebot und Nachfrage.",
"usage_tips": "Nutze dies, um zu verstehen, in welcher Preisregion du dich befindest. Nützlich beim Preisvergleich mit anderen oder bei der Analyse regionaler Preismuster."
},
"consumption_ean": {
"description": "EAN-Code für Stromverbrauchsmessung",
"long_description": "Zeigt die Europäische Artikelnummer (EAN), die deinen Stromverbrauchszähler eindeutig identifiziert. Dieser 18-stellige Code wird für Abrechnungs- und Verwaltungszwecke verwendet.",
"usage_tips": "Nutze dies bei der Kommunikation mit deinem Stromanbieter oder für administrative Dokumentation."
},
"production_ean": {
"description": "EAN-Code für Stromerzeugungsmessung",
"long_description": "Zeigt die Europäische Artikelnummer (EAN) für deinen Stromerzeugungszähler (wenn du Solarpanels oder andere Erzeugung hast). Dieser Code verfolgt Strom, den du ins Netz einspeist.",
"usage_tips": "Relevant, wenn du Solarpanels oder andere Stromerzeugung hast. Nutze für administrative Zwecke und bei der Beantragung von Einspeisevergütung."
},
"energy_tax_type": {
"description": "Art der angewandten Energiesteuer",
"long_description": "Zeigt die Energiesteuerkategorie, die auf deinen Stromverbrauch angewendet wird. Steuersätze variieren nach Land und manchmal nach Verbrauchertyp (privat, gewerblich usw.).",
"usage_tips": "Nutze dies zum Verständnis der Aufschlüsselung deiner Stromrechnung und für Gesamtkostenberechnungen."
},
"vat_type": {
"description": "Mehrwertsteuerkategorie",
"long_description": "Zeigt die Mehrwertsteuerkategorie, die auf deinen Stromverbrauch angewendet wird. Mehrwertsteuersätze variieren nach Land und können für Strom anders sein als für andere Waren und Dienstleistungen.",
"usage_tips": "Nutze dies zum Verständnis deiner Stromrechnung und zur Berechnung der Gesamtkosten inklusive Steuern."
},
"estimated_annual_consumption": {
"description": "Geschätzter jährlicher Stromverbrauch in kWh",
"long_description": "Zeigt deinen geschätzten jährlichen Stromverbrauch in Kilowattstunden, wie in deinem Tibber-Konto berechnet oder konfiguriert. Diese Schätzung wird verwendet, um den tatsächlichen Verbrauch mit den erwarteten Werten zu vergleichen.",
"usage_tips": "Nutze dies, um zu verfolgen, ob dein tatsächlicher Verbrauch über oder unter den Erwartungen liegt. Vergleiche den monatlichen Verbrauch mit 1/12 dieses Wertes, um ungewöhnliche Muster zu identifizieren."
},
"subscription_status": {
"description": "Status deines Tibber-Abonnements",
"long_description": "Zeigt, ob dein Tibber-Abonnement derzeit aktiv ist, beendet wurde oder auf Aktivierung wartet. Ein Status 'Aktiv' bedeutet, dass du aktiv Strom über Tibber beziehst.",
"usage_tips": "Nutze dies zur Überwachung deines Abonnementstatus. Richte Benachrichtigungen ein, wenn sich der Status von 'Aktiv' ändert, um einen unterbrechungsfreien Service sicherzustellen."
},
"chart_data_export": {
"description": "Datenexport für Dashboard-Integrationen",
"long_description": "Dieser Sensor ruft den get_chartdata-Service mit deiner konfigurierten YAML-Konfiguration auf und stellt das Ergebnis als Entity-Attribute bereit. Der Status zeigt 'ready' wenn Daten verfügbar sind, 'error' bei Fehlern, oder 'pending' vor dem ersten Aufruf. Perfekt für Dashboard-Integrationen wie ApexCharts, die Preisdaten aus Entity-Attributen lesen.",
"usage_tips": "Konfiguriere die YAML-Parameter in den Integrationsoptionen entsprechend deinem get_chartdata-Service-Aufruf. Der Sensor aktualisiert automatisch bei Preisdaten-Updates (typischerweise nach Mitternacht und wenn morgige Daten eintreffen). Greife auf die Service-Response-Daten direkt über die Entity-Attribute zu - die Struktur entspricht exakt dem, was get_chartdata zurückgibt."
},
"chart_metadata": {
"description": "Leichtgewichtige Metadaten für Diagrammkonfiguration",
"long_description": "Liefert wesentliche Diagrammkonfigurationswerte als Sensor-Attribute. Nützlich für jede Diagrammkarte, die Y-Achsen-Grenzen benötigt. Der Sensor ruft get_chartdata im Nur-Metadaten-Modus auf (keine Datenverarbeitung) und extrahiert: yaxis_min, yaxis_max (vorgeschlagener Y-Achsenbereich für optimale Skalierung). Der Status spiegelt das Service-Call-Ergebnis wider: 'ready' bei Erfolg, 'error' bei Fehler, 'pending' während der Initialisierung.",
"usage_tips": "Konfiguriere über configuration.yaml unter tibber_prices.chart_metadata_config (optional: day, subunit_currency, resolution). Der Sensor aktualisiert sich automatisch bei Preisdatenänderungen. Greife auf Metadaten aus Attributen zu: yaxis_min, yaxis_max. Verwende mit config-template-card oder jedem Tool, das Entity-Attribute liest - perfekt für dynamische Diagrammkonfiguration ohne manuelle Berechnungen."
} }
}, },
"binary_sensor": { "binary_sensor": {
@ -477,90 +275,6 @@
"description": "Ob die Verbindung zur Tibber API funktioniert", "description": "Ob die Verbindung zur Tibber API funktioniert",
"long_description": "Zeigt an, ob die Integration erfolgreich eine Verbindung zur Tibber API herstellen kann", "long_description": "Zeigt an, ob die Integration erfolgreich eine Verbindung zur Tibber API herstellen kann",
"usage_tips": "Nutze dies, um den Verbindungsstatus zur Tibber API zu überwachen" "usage_tips": "Nutze dies, um den Verbindungsstatus zur Tibber API zu überwachen"
},
"has_ventilation_system": {
"description": "Ob deine Wohnung über eine Lüftungsanlage verfügt",
"long_description": "Zeigt an, ob eine Lüftungsanlage für deine Wohnung im Tibber-Konto registriert ist. Lüftungsanlagen können erhebliche Stromverbraucher sein, die von intelligenter Planung profitieren können.",
"usage_tips": "Nutze dies, um lüftungsspezifische Automatisierungen oder Energieüberwachung zu aktivieren. Falls aktiv, erwäge die Planung der Lüftung während Niedrigpreisphasen."
},
"realtime_consumption_enabled": {
"description": "Ob die Echtzeit-Verbrauchsüberwachung aktiv ist",
"long_description": "Zeigt an, ob die Echtzeit-Stromverbrauchsüberwachung für dein Tibber-Zuhause aktiviert und aktiv ist. Dies erfordert kompatible Messhardware (z. B. Tibber Pulse) und ein aktives Abonnement.",
"usage_tips": "Verwende dies, um zu überprüfen, ob Echtzeit-Verbrauchsdaten verfügbar sind. Aktiviere Benachrichtigungen, falls dies unerwartet auf 'Aus' wechselt, was auf potenzielle Hardware- oder Verbindungsprobleme hinweist."
}
},
"number": {
"best_price_flex_override": {
"description": "Maximaler Prozentsatz über dem Tagesminimumpreis, den Intervalle haben können und trotzdem als 'Bestpreis' gelten. Empfohlen: 15-20 mit Lockerung aktiviert (Standard), oder 25-35 ohne Lockerung. Maximum: 50 (Obergrenze für zuverlässige Periodenerkennung).",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Flexibilität' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Aktiviere diese Entität, um die Bestpreiserkennung dynamisch über Automatisierungen anzupassen, z.B. höhere Flexibilität bei kritischen Lasten oder engere Anforderungen für flexible Geräte."
},
"best_price_min_distance_override": {
"description": "Minimaler prozentualer Abstand unter dem Tagesdurchschnitt. Intervalle müssen so weit unter dem Durchschnitt liegen, um als 'Bestpreis' zu gelten. Hilft, echte Niedrigpreis-Perioden von durchschnittlichen Preisen zu unterscheiden.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestabstand' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Erhöhe den Wert, wenn du strengere Bestpreis-Kriterien möchtest. Verringere ihn, wenn zu wenige Perioden erkannt werden."
},
"best_price_min_period_length_override": {
"description": "Minimale Periodenl\u00e4nge in 15-Minuten-Intervallen. Perioden kürzer als diese werden nicht gemeldet. Beispiel: 2 = mindestens 30 Minuten.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestperiodenlänge' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Passe an die typische Laufzeit deiner Geräte an: 2 (30 Min) für Schnellprogramme, 4-8 (1-2 Std) für normale Zyklen, 8+ für lange ECO-Programme."
},
"best_price_min_periods_override": {
"description": "Minimale Anzahl an Bestpreis-Perioden, die täglich gefunden werden sollen. Wenn Lockerung aktiviert ist, wird das System die Kriterien automatisch anpassen, um diese Zahl zu erreichen.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestperioden' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Setze dies auf die Anzahl zeitkritischer Aufgaben, die du täglich hast. Beispiel: 2 für zwei Waschmaschinenladungen."
},
"best_price_relaxation_attempts_override": {
"description": "Anzahl der Versuche, die Kriterien schrittweise zu lockern, um die Mindestperiodenanzahl zu erreichen. Jeder Versuch erhöht die Flexibilität um 3 Prozent. Bei 0 werden nur Basis-Kriterien verwendet.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Lockerungsversuche' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Höhere Werte machen die Periodenerkennung anpassungsfähiger an Tage mit stabilen Preisen. Setze auf 0, um strenge Kriterien ohne Lockerung zu erzwingen."
},
"best_price_gap_count_override": {
"description": "Maximale Anzahl teurerer Intervalle, die zwischen günstigen Intervallen erlaubt sind und trotzdem als eine zusammenhängende Periode gelten. Bei 0 müssen günstige Intervalle aufeinander folgen.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Lückentoleranz' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Erhöhe dies für Geräte mit variabler Last (z.B. Wärmepumpen), die kurze teurere Intervalle tolerieren können. Setze auf 0 für kontinuierliche günstige Perioden."
},
"peak_price_flex_override": {
"description": "Maximaler Prozentsatz unter dem Tagesmaximumpreis, den Intervalle haben können und trotzdem als 'Spitzenpreis' gelten. Gleiche Empfehlungen wie für Bestpreis-Flexibilität.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Flexibilität' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Nutze dies, um den Spitzenpreis-Schwellenwert zur Laufzeit für Automatisierungen anzupassen, die den Verbrauch während teurer Stunden vermeiden."
},
"peak_price_min_distance_override": {
"description": "Minimaler prozentualer Abstand über dem Tagesdurchschnitt. Intervalle müssen so weit über dem Durchschnitt liegen, um als 'Spitzenpreis' zu gelten.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestabstand' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Erhöhe den Wert, um nur extreme Preisspitzen zu erfassen. Verringere ihn, um mehr Hochpreiszeiten einzubeziehen."
},
"peak_price_min_period_length_override": {
"description": "Minimale Periodenl\u00e4nge in 15-Minuten-Intervallen für Spitzenpreise. Kürzere Preisspitzen werden nicht als Perioden gemeldet.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestperiodenlänge' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Kürzere Werte erfassen kurze Preisspitzen. Längere Werte fokussieren auf anhaltende Hochpreisphasen."
},
"peak_price_min_periods_override": {
"description": "Minimale Anzahl an Spitzenpreis-Perioden, die täglich gefunden werden sollen.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestperioden' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Setze dies basierend darauf, wie viele Hochpreisphasen du pro Tag für Automatisierungen erfassen möchtest."
},
"peak_price_relaxation_attempts_override": {
"description": "Anzahl der Versuche, die Kriterien zu lockern, um die Mindestanzahl an Spitzenpreis-Perioden zu erreichen.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Lockerungsversuche' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Erhöhe dies, wenn an Tagen mit stabilen Preisen keine Perioden gefunden werden. Setze auf 0, um strenge Kriterien zu erzwingen."
},
"peak_price_gap_count_override": {
"description": "Maximale Anzahl günstigerer Intervalle, die zwischen teuren Intervallen erlaubt sind und trotzdem als eine Spitzenpreis-Periode gelten.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Lückentoleranz' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Höhere Werte erfassen längere Hochpreisphasen auch mit kurzen Preiseinbrüchen. Setze auf 0, um strikt zusammenhängende Spitzenpreise zu erfassen."
}
},
"switch": {
"best_price_enable_relaxation_override": {
"description": "Wenn aktiviert, werden die Kriterien automatisch gelockert, um die Mindestperiodenanzahl zu erreichen. Wenn deaktiviert, werden nur Perioden gemeldet, die die strengen Kriterien erfüllen (möglicherweise null Perioden bei stabilen Preisen).",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestanzahl erreichen' aus dem Optionen-Dialog für die Bestpreis-Periodenberechnung.",
"usage_tips": "Aktiviere dies für garantierte tägliche Automatisierungsmöglichkeiten. Deaktiviere es, wenn du nur wirklich günstige Zeiträume willst, auch wenn das bedeutet, dass an manchen Tagen keine Perioden gefunden werden."
},
"peak_price_enable_relaxation_override": {
"description": "Wenn aktiviert, werden die Kriterien automatisch gelockert, um die Mindestperiodenanzahl zu erreichen. Wenn deaktiviert, werden nur echte Preisspitzen gemeldet.",
"long_description": "Wenn diese Entität aktiviert ist, überschreibt ihr Wert die Einstellung 'Mindestanzahl erreichen' aus dem Optionen-Dialog für die Spitzenpreis-Periodenberechnung.",
"usage_tips": "Aktiviere dies für konsistente Spitzenpreis-Warnungen. Deaktiviere es, um nur extreme Preisspitzen zu erfassen."
} }
}, },
"home_types": { "home_types": {
@ -568,16 +282,5 @@
"ROWHOUSE": "Reihenhaus", "ROWHOUSE": "Reihenhaus",
"HOUSE": "Haus", "HOUSE": "Haus",
"COTTAGE": "Ferienhaus" "COTTAGE": "Ferienhaus"
}, }
"time_units": {
"day": "{count} Tag",
"days": "{count} Tagen",
"hour": "{count} Stunde",
"hours": "{count} Stunden",
"minute": "{count} Minute",
"minutes": "{count} Minuten",
"ago": "vor {parts}",
"now": "jetzt"
},
"attribution": "Daten bereitgestellt von Tibber"
} }

View file

@ -1,32 +1,10 @@
{ {
"apexcharts": {
"title_rating_level": "Price Phases Daily Progress",
"title_level": "Price Level",
"hourly_suffix": "(Ø hourly)",
"best_price_period_name": "Best Price Period",
"peak_price_period_name": "Peak Price Period",
"notification": {
"metadata_sensor_unavailable": {
"title": "Tibber Prices: ApexCharts YAML Generated with Limited Functionality",
"message": "You just generated an ApexCharts card configuration via Developer Tools. The Chart Metadata sensor is currently disabled, so the generated YAML will only show **basic functionality** (auto-scale axis, fixed gradient at 50%).\n\n**To enable full functionality** (optimized scaling, dynamic gradient colors):\n1. [Open Tibber Prices Integration](https://my.home-assistant.io/redirect/integration/?domain=tibber_prices)\n2. Enable the 'Chart Metadata' sensor\n3. **Generate the YAML again** via Developer Tools\n4. **Replace the old YAML** in your dashboard with the new version\n\n⚠ Simply enabling the sensor is not enough - you must regenerate and replace the YAML code!"
},
"missing_cards": {
"title": "Tibber Prices: ApexCharts YAML Cannot Be Used",
"message": "You just generated an ApexCharts card configuration via Developer Tools, but the generated YAML **will not work** because required custom cards are missing.\n\n**Missing cards:**\n{cards}\n\n**To use the generated YAML:**\n1. Click the links above to install the missing cards from HACS\n2. Restart Home Assistant (sometimes needed)\n3. **Generate the YAML again** via Developer Tools\n4. Add the YAML to your dashboard\n\n⚠ The current YAML code will not work until all cards are installed!"
}
}
},
"sensor": { "sensor": {
"current_interval_price": { "current_price": {
"description": "The current electricity price per kWh", "description": "The current electricity price per kWh",
"long_description": "Shows the current price per kWh from your Tibber subscription", "long_description": "Shows the current price per kWh from your Tibber subscription",
"usage_tips": "Use this to track prices or to create automations that run when electricity is cheap" "usage_tips": "Use this to track prices or to create automations that run when electricity is cheap"
}, },
"current_interval_price_base": {
"description": "Current electricity price in base currency (EUR/kWh, NOK/kWh, etc.) for Energy Dashboard",
"long_description": "Shows the current price per kWh in base currency units (e.g., EUR/kWh instead of ct/kWh, NOK/kWh instead of øre/kWh). This sensor is specifically designed for use with Home Assistant's Energy Dashboard, which requires prices in standard currency units.",
"usage_tips": "Use this sensor when configuring the Energy Dashboard under Settings → Dashboards → Energy. Select this sensor as the 'Entity with current price' to automatically calculate your energy costs. The Energy Dashboard multiplies your energy consumption (kWh) by this price to show total costs."
},
"next_interval_price": { "next_interval_price": {
"description": "The next interval electricity price per kWh", "description": "The next interval electricity price per kWh",
"long_description": "Shows the price for the next 15-minute interval from your Tibber subscription", "long_description": "Shows the price for the next 15-minute interval from your Tibber subscription",
@ -37,12 +15,12 @@
"long_description": "Shows the price for the previous 15-minute interval from your Tibber subscription", "long_description": "Shows the price for the previous 15-minute interval from your Tibber subscription",
"usage_tips": "Use this to review past price changes or track price history" "usage_tips": "Use this to review past price changes or track price history"
}, },
"current_hour_average_price": { "current_hour_average": {
"description": "Rolling 5-interval average price per kWh", "description": "Rolling 5-interval average price per kWh",
"long_description": "Shows the average price per kWh calculated from 5 intervals: 2 previous, current, and 2 next intervals (approximately 75 minutes total). This provides a smoothed 'hour price' that adapts as time moves, rather than being fixed to clock hours.", "long_description": "Shows the average price per kWh calculated from 5 intervals: 2 previous, current, and 2 next intervals (approximately 75 minutes total). This provides a smoothed 'hour price' that adapts as time moves, rather than being fixed to clock hours.",
"usage_tips": "Use this for a more stable price indicator that smooths out short-term fluctuations while still being responsive to price changes. Better than fixed hourly prices for making consumption decisions." "usage_tips": "Use this for a more stable price indicator that smooths out short-term fluctuations while still being responsive to price changes. Better than fixed hourly prices for making consumption decisions."
}, },
"next_hour_average_price": { "next_hour_average": {
"description": "Rolling 5-interval average price for next hour per kWh", "description": "Rolling 5-interval average price for next hour per kWh",
"long_description": "Shows the average price per kWh calculated from 5 intervals centered one hour ahead: approximately intervals +2 through +6 from now (covering minutes +30 to +105). This provides a forward-looking smoothed 'hour price' for planning consumption.", "long_description": "Shows the average price per kWh calculated from 5 intervals centered one hour ahead: approximately intervals +2 through +6 from now (covering minutes +30 to +105). This provides a forward-looking smoothed 'hour price' for planning consumption.",
"usage_tips": "Use this to anticipate price changes in the next hour. Helpful for scheduling high-consumption activities like charging electric vehicles, running dishwashers, or heating systems." "usage_tips": "Use this to anticipate price changes in the next hour. Helpful for scheduling high-consumption activities like charging electric vehicles, running dishwashers, or heating systems."
@ -58,9 +36,9 @@
"usage_tips": "Use this to avoid running appliances during peak price times" "usage_tips": "Use this to avoid running appliances during peak price times"
}, },
"average_price_today": { "average_price_today": {
"description": "The typical electricity price for today per kWh (configurable display format)", "description": "The average electricity price for today per kWh",
"long_description": "Shows the typical price per kWh for today. **By default, the state displays the median** (resistant to extreme spikes, showing what you can generally expect). You can change this in the integration options to show the arithmetic mean instead. The alternate value is always available as attribute `price_mean` or `price_median` for automations.", "long_description": "Shows the average price per kWh for the current day from your Tibber subscription",
"usage_tips": "Use the state value for display. For exact cost calculations in automations, use: {{ state_attr('sensor.average_price_today', 'price_mean') }}" "usage_tips": "Use this as a baseline for comparing current prices"
}, },
"lowest_price_tomorrow": { "lowest_price_tomorrow": {
"description": "The lowest electricity price for tomorrow per kWh", "description": "The lowest electricity price for tomorrow per kWh",
@ -73,49 +51,19 @@
"usage_tips": "Use this to avoid running appliances during tomorrow's peak price times. Helpful for planning around expensive periods." "usage_tips": "Use this to avoid running appliances during tomorrow's peak price times. Helpful for planning around expensive periods."
}, },
"average_price_tomorrow": { "average_price_tomorrow": {
"description": "The typical electricity price for tomorrow per kWh (configurable display format)", "description": "The average electricity price for tomorrow per kWh",
"long_description": "Shows the typical price per kWh for tomorrow. **By default, the state displays the median** (resistant to extreme spikes). You can change this in the integration options to show the arithmetic mean instead. The alternate value is available as attribute. This sensor becomes unavailable until tomorrow's data is published by Tibber (typically around 13:00-14:00 CET).", "long_description": "Shows the average price per kWh for tomorrow from your Tibber subscription. This sensor becomes unavailable until tomorrow's data is published by Tibber (typically around 13:00-14:00 CET).",
"usage_tips": "Use this to plan tomorrow's energy consumption. For cost calculations, use: {{ state_attr('sensor.average_price_tomorrow', 'price_mean') }}" "usage_tips": "Use this as a baseline for comparing tomorrow's prices and planning consumption. Compare with today's average to see if tomorrow will be more or less expensive overall."
},
"yesterday_price_level": {
"description": "Aggregated price level for yesterday",
"long_description": "Shows the aggregated price level classification for all intervals in yesterday. Uses the same logic as hourly sensors to determine the overall price level for the entire day.",
"usage_tips": "Use this to understand yesterday's overall price situation. Compare with today to see daily trends."
},
"today_price_level": {
"description": "Aggregated price level for today",
"long_description": "Shows the aggregated price level classification for all intervals in today. Uses the same logic as hourly sensors to determine the overall price level for the entire day.",
"usage_tips": "Use this to understand today's overall price situation at a glance. Helpful for quick assessments of whether today is generally cheap or expensive."
},
"tomorrow_price_level": {
"description": "Aggregated price level for tomorrow",
"long_description": "Shows the aggregated price level classification for all intervals in tomorrow. Uses the same logic as hourly sensors to determine the overall price level for the entire day. This sensor becomes unavailable until tomorrow's data is published by Tibber (typically around 13:00-14:00 CET).",
"usage_tips": "Use this to understand tomorrow's overall price situation. Compare with today to see if tomorrow will be more or less favorable for energy consumption."
},
"yesterday_price_rating": {
"description": "Aggregated price rating for yesterday",
"long_description": "Shows the aggregated price rating (low/normal/high) for all intervals in yesterday, based on your configured thresholds. Uses the same logic as hourly sensors to determine the overall rating for the entire day.",
"usage_tips": "Use this to understand yesterday's price situation relative to your personalized thresholds. Compare with today for trend analysis."
},
"today_price_rating": {
"description": "Aggregated price rating for today",
"long_description": "Shows the aggregated price rating (low/normal/high) for all intervals in today, based on your configured thresholds. Uses the same logic as hourly sensors to determine the overall rating for the entire day.",
"usage_tips": "Use this to quickly assess today's price situation relative to your personalized thresholds. Helps make consumption decisions for the current day."
},
"tomorrow_price_rating": {
"description": "Aggregated price rating for tomorrow",
"long_description": "Shows the aggregated price rating (low/normal/high) for all intervals in tomorrow, based on your configured thresholds. Uses the same logic as hourly sensors to determine the overall rating for the entire day. This sensor becomes unavailable until tomorrow's data is published by Tibber (typically around 13:00-14:00 CET).",
"usage_tips": "Use this to plan tomorrow's energy consumption based on your personalized price thresholds. Compare with today to decide if you should shift consumption to tomorrow or use energy today."
}, },
"trailing_price_average": { "trailing_price_average": {
"description": "The typical electricity price for the past 24 hours per kWh (configurable display format)", "description": "The average electricity price for the past 24 hours per kWh",
"long_description": "Shows the typical price per kWh for the past 24 hours. **By default, the state displays the median** (resistant to extreme spikes, showing what price level was typical). You can change this in the integration options to show the arithmetic mean instead. The alternate value is available as attribute. Updates every 15 minutes.", "long_description": "Shows the average price per kWh calculated from the past 24 hours (trailing average) from your Tibber subscription. This provides a rolling average that updates every 15 minutes based on historical data.",
"usage_tips": "Use the state value to see the typical recent price level. For cost calculations, use: {{ state_attr('sensor.trailing_price_average', 'price_mean') }}" "usage_tips": "Use this to compare current prices against recent trends. A current price significantly above this average may indicate a good time to reduce consumption."
}, },
"leading_price_average": { "leading_price_average": {
"description": "The typical electricity price for the next 24 hours per kWh (configurable display format)", "description": "The average electricity price for the next 24 hours per kWh",
"long_description": "Shows the typical price per kWh for the next 24 hours. **By default, the state displays the median** (resistant to extreme spikes, showing what price level to expect). You can change this in the integration options to show the arithmetic mean instead. The alternate value is available as attribute.", "long_description": "Shows the average price per kWh calculated from the next 24 hours (leading average) from your Tibber subscription. This provides a forward-looking average based on available forecast data.",
"usage_tips": "Use the state value to see the typical upcoming price level. For cost calculations, use: {{ state_attr('sensor.leading_price_average', 'price_mean') }}" "usage_tips": "Use this to plan energy usage. If the current price is below the leading average, it may be a good time to run energy-intensive appliances."
}, },
"trailing_price_min": { "trailing_price_min": {
"description": "The minimum electricity price for the past 24 hours per kWh", "description": "The minimum electricity price for the past 24 hours per kWh",
@ -137,7 +85,7 @@
"long_description": "Shows the maximum price per kWh from the next 24 hours (leading maximum) from your Tibber subscription. This provides the highest price expected in the next 24 hours based on forecast data.", "long_description": "Shows the maximum price per kWh from the next 24 hours (leading maximum) from your Tibber subscription. This provides the highest price expected in the next 24 hours based on forecast data.",
"usage_tips": "Use this to avoid running appliances during upcoming peak price periods." "usage_tips": "Use this to avoid running appliances during upcoming peak price periods."
}, },
"current_interval_price_level": { "price_level": {
"description": "The current price level classification", "description": "The current price level classification",
"long_description": "Shows Tibber's classification of the current price compared to historical prices", "long_description": "Shows Tibber's classification of the current price compared to historical prices",
"usage_tips": "Use this to create automations based on relative price levels rather than absolute prices" "usage_tips": "Use this to create automations based on relative price levels rather than absolute prices"
@ -162,7 +110,7 @@
"long_description": "Shows the median price level across 5 intervals centered one hour ahead. Helps plan consumption based on upcoming price trends rather than instantaneous future prices.", "long_description": "Shows the median price level across 5 intervals centered one hour ahead. Helps plan consumption based on upcoming price trends rather than instantaneous future prices.",
"usage_tips": "Use to schedule activities for the next hour based on a smoothed price level forecast." "usage_tips": "Use to schedule activities for the next hour based on a smoothed price level forecast."
}, },
"current_interval_price_rating": { "price_rating": {
"description": "How the current interval's price compares to historical data", "description": "How the current interval's price compares to historical data",
"long_description": "Shows how the current interval's price compares to historical price data as a percentage", "long_description": "Shows how the current interval's price compares to historical price data as a percentage",
"usage_tips": "A positive percentage means the current price is above average, negative means below average" "usage_tips": "A positive percentage means the current price is above average, negative means below average"
@ -189,8 +137,8 @@
}, },
"next_avg_1h": { "next_avg_1h": {
"description": "Average price for the next 1 hour (forward-only from next interval)", "description": "Average price for the next 1 hour (forward-only from next interval)",
"long_description": "Forward-looking average: Shows average of next 4 intervals (1 hour) starting from the NEXT 15-minute interval (not including current). Different from current_hour_average_price which includes past intervals. Use for absolute price threshold planning.", "long_description": "Forward-looking average: Shows average of next 4 intervals (1 hour) starting from the NEXT 15-minute interval (not including current). Different from current_hour_average which includes past intervals. Use for absolute price threshold planning.",
"usage_tips": "Absolute price threshold: Only start appliances when average stays below your maximum acceptable price (e.g., below 0.25 EUR/kWh). Combine with trend sensor for optimal timing. Note: This is NOT a replacement for hourly prices - use current_hour_average_price for that." "usage_tips": "Absolute price threshold: Only start appliances when average stays below your maximum acceptable price (e.g., below 0.25 EUR/kWh). Combine with trend sensor for optimal timing. Note: This is NOT a replacement for hourly prices - use current_hour_average for that."
}, },
"next_avg_2h": { "next_avg_2h": {
"description": "Average price for the next 2 hours", "description": "Average price for the next 2 hours",
@ -267,16 +215,6 @@
"long_description": "Compares current interval price with average of next 12 hours (48 intervals). Rising if future is >5% higher, falling if >5% lower, stable otherwise.", "long_description": "Compares current interval price with average of next 12 hours (48 intervals). Rising if future is >5% higher, falling if >5% lower, stable otherwise.",
"usage_tips": "Relative optimization: Long-term strategic decisions. 'falling' = significantly better prices coming tonight/tomorrow. Finds optimal timing in any market condition. Best combined with avg sensor price cap." "usage_tips": "Relative optimization: Long-term strategic decisions. 'falling' = significantly better prices coming tonight/tomorrow. Finds optimal timing in any market condition. Best combined with avg sensor price cap."
}, },
"current_price_trend": {
"description": "Current price trend direction and how long it will last",
"long_description": "Shows the current price trend (rising/falling/stable) by combining historical momentum (weighted 1h lookback) with future outlook. Recognizes ongoing trends earlier than future-only analysis. Uses ±3% momentum threshold and volatility-adaptive future comparison. Calculates dynamically until the next trend change occurs (or 3h default if no change in 24h). The state shows the current direction, attributes show when it changes and what comes next.",
"usage_tips": "Status display: Dashboard visibility of 'what's happening now until when'. Perfectly synchronized with next_price_trend_change. Example: Badge showing 'Rising for 2.5h' or 'Falling until 16:45'. Better than time-window sensors because it understands you're ALREADY in a trend, not just predicting future changes. Use for quick visual overview, not automation triggers."
},
"next_price_trend_change": {
"description": "When the next significant price trend change will occur",
"long_description": "Scans the next 24 hours (96 intervals) to find when the price trend (rising/falling/stable) will change from the current momentum. First determines current trend using weighted 1h lookback (recognizes ongoing trends), then finds when that trend reverses. Uses volatility-adaptive thresholds (3% momentum detection, market-adjusted future comparison). Returns the timestamp when the change is expected.",
"usage_tips": "Event-based automation: Trigger actions WHEN trend changes, not IN X hours. Example: 'Charge EV when next trend change shows falling prices' or 'Run dishwasher before prices start rising'. More accurate than simple future comparison because it knows if you're already in a trend. Complements time-window sensors (price_trend_Xh) which answer 'WILL prices be higher in X hours?'"
},
"daily_rating": { "daily_rating": {
"description": "How today's prices compare to historical data", "description": "How today's prices compare to historical data",
"long_description": "Shows how today's prices compare to historical price data as a percentage", "long_description": "Shows how today's prices compare to historical price data as a percentage",
@ -292,169 +230,29 @@
"long_description": "Shows the timestamp of the latest available price data interval from your Tibber subscription" "long_description": "Shows the timestamp of the latest available price data interval from your Tibber subscription"
}, },
"today_volatility": { "today_volatility": {
"description": "How much electricity prices change throughout today", "description": "Price volatility classification for today",
"long_description": "Indicates whether today's prices are stable or have big swings. Low volatility means prices stay fairly consistent—timing doesn't matter much. High volatility means significant price differences throughout the day—great opportunity to shift consumption to cheaper periods. Check `price_coefficient_variation_%` for the variance percentage and `price_spread` for the absolute price span.", "long_description": "Shows how much electricity prices vary throughout today based on the spread (difference between highest and lowest price). Classification: LOW = spread < 5ct, MODERATE = 5-15ct, HIGH = 15-30ct, VERY HIGH = >30ct.",
"usage_tips": "Use this to decide if optimization is worth your effort. On low-volatility days, you can run devices anytime. On high-volatility days, following Best Price periods saves meaningful money." "usage_tips": "Use this to decide if price-based optimization is worthwhile. For example, with a balcony battery that has 15% efficiency losses, optimization only makes sense when volatility is at least MODERATE. Create automations that check volatility before scheduling charging/discharging cycles."
}, },
"tomorrow_volatility": { "tomorrow_volatility": {
"description": "How much electricity prices will change tomorrow", "description": "Price volatility classification for tomorrow",
"long_description": "Indicates whether tomorrow's prices will be stable or have big swings. Available once tomorrow's data is published (typically 13:00-14:00 CET). Low volatility means prices stay fairly consistent—timing isn't critical. High volatility means significant price differences throughout the day—good opportunity for scheduling energy-intensive activities. Check `price_coefficient_variation_%` for the variance percentage and `price_spread` for the absolute price span.", "long_description": "Shows how much electricity prices will vary throughout tomorrow based on the spread (difference between highest and lowest price). Becomes unavailable until tomorrow's data is published (typically 13:00-14:00 CET).",
"usage_tips": "Use for planning tomorrow's energy consumption. High volatility? Schedule flexible loads during Best Price periods. Low volatility? Run devices whenever is convenient." "usage_tips": "Use this for advance planning of tomorrow's energy usage. If tomorrow has HIGH or VERY HIGH volatility, it's worth optimizing energy consumption timing. If LOW, you can run devices anytime without significant cost differences."
}, },
"next_24h_volatility": { "next_24h_volatility": {
"description": "How much prices will change over the next 24 hours", "description": "Price volatility classification for the rolling next 24 hours",
"long_description": "Indicates price volatility for a rolling 24-hour window from now (updates every 15 minutes). Low volatility means prices stay fairly consistent. High volatility means significant price swings offer optimization opportunities. Unlike today/tomorrow sensors, this crosses day boundaries and provides a continuous forward-looking assessment. Check `price_coefficient_variation_%` for the variance percentage and `price_spread` for the absolute price span.", "long_description": "Shows how much electricity prices vary in the next 24 hours from now (rolling window). This crosses day boundaries and updates every 15 minutes, providing a forward-looking volatility assessment independent of calendar days.",
"usage_tips": "Best for real-time decisions. Use when planning battery charging strategies or other flexible loads that might span across midnight. Provides consistent 24h perspective regardless of calendar day." "usage_tips": "Best sensor for real-time optimization decisions. Unlike today/tomorrow sensors that switch at midnight, this provides continuous 24h volatility assessment. Use for battery charging strategies that span across day boundaries."
}, },
"today_tomorrow_volatility": { "today_tomorrow_volatility": {
"description": "Combined price volatility across today and tomorrow", "description": "Combined price volatility classification for today and tomorrow",
"long_description": "Shows overall price volatility when considering both today and tomorrow together (when available). Indicates whether there are significant price differences across the day boundary. Falls back to today-only when tomorrow's data isn't available yet. Useful for understanding multi-day optimization opportunities. Check `price_coefficient_variation_%` for the variance percentage and `price_spread` for the absolute price span.", "long_description": "Shows volatility across both today and tomorrow combined (when tomorrow's data is available). Provides an extended view of price variation spanning up to 48 hours. Falls back to today-only when tomorrow's data isn't available yet.",
"usage_tips": "Use for planning tasks that span multiple days. Check if prices vary enough to make scheduling worthwhile. The individual day volatility sensors show breakdown per day if you need more detail." "usage_tips": "Use this for multi-day planning and to understand if price opportunities exist across the day boundary. The 'today_volatility' and 'tomorrow_volatility' breakdown attributes show individual day contributions. Useful for scheduling charging sessions that might span midnight."
}, },
"data_lifecycle_status": { "price_forecast": {
"description": "Current state of price data lifecycle and caching", "description": "Forecast of upcoming electricity prices",
"long_description": "Shows whether the integration is using cached data or fresh data from the API. Displays current lifecycle state: 'cached' (using stored data), 'fresh' (just fetched from API), 'refreshing' (currently fetching), 'searching_tomorrow' (actively polling for tomorrow's data after 13:00), 'turnover_pending' (within 15 minutes of midnight, 23:45-00:00), or 'error' (fetch failed). Includes comprehensive attributes like cache age, next API poll time, data completeness, and API call statistics.", "long_description": "Shows upcoming electricity prices for future intervals in a format that's easy to use in dashboards",
"usage_tips": "Use this diagnostic sensor to understand data freshness and API call patterns. Check 'cache_age' attribute to see how old the current data is. Monitor 'next_api_poll' to know when the next update is scheduled. Use 'data_completeness' to see if yesterday/today/tomorrow data is available. The 'api_calls_today' counter helps track API usage. Perfect for troubleshooting or understanding the integration's behavior." "usage_tips": "Use this entity's attributes to display upcoming prices in charts or custom cards. Access either 'intervals' for all future intervals or 'hours' for hourly summaries."
},
"best_price_end_time": {
"description": "When the current or next best price period ends",
"long_description": "Shows the end timestamp of the current best price period when active, or the end of the next period when no period is active. Always shows a useful time reference for planning. Returns 'Unknown' only when no periods are configured.",
"usage_tips": "Use this to display a countdown like 'Cheap period ends in 2 hours' (when active) or 'Next cheap period ends at 14:00' (when inactive). Home Assistant automatically shows relative time for timestamp sensors."
},
"best_price_period_duration": {
"description": "Total length of current or next best price period",
"long_description": "Shows how long the best price period lasts in total. The state is displayed in hours (e.g., 1.5 h) for easy reading in the UI, while the `period_duration_minutes` attribute provides the same value in minutes (e.g., 90) for use in automations. During an active period, shows the duration of the current period. When no period is active, shows the duration of the next upcoming period. Returns 'Unknown' only when no periods are configured.",
"usage_tips": "For display: Use the state value (hours) in dashboards. For automations: Use `period_duration_minutes` attribute to check if there's enough time for long-running tasks (e.g., 'If period_duration_minutes >= 90, start washing machine')."
},
"best_price_remaining_minutes": {
"description": "Time remaining in current best price period",
"long_description": "Shows how much time is left in the current best price period. The state displays in hours (e.g., 0.5 h) for easy reading, while the `remaining_minutes` attribute provides minutes (e.g., 30) for automation logic. Returns 0 when no period is active. Updates every minute. Check binary_sensor.best_price_period to see if a period is currently active.",
"usage_tips": "For automations: Use `remaining_minutes` attribute with numeric comparisons like 'If remaining_minutes > 0 AND remaining_minutes < 30, start washing machine now'. The value 0 makes it easy to check if a period is active (value > 0) or not (value = 0)."
},
"best_price_progress": {
"description": "Progress through current best price period (0% when inactive)",
"long_description": "Shows progress through the current best price period as 0-100%. Returns 0% when no period is active. Updates every minute. 0% means period just started, 100% means it's about to end.",
"usage_tips": "Great for visual progress bars. Use in automations: 'If progress > 0 AND progress > 75, send notification that cheap period is ending soon'. Value 0 indicates no active period."
},
"best_price_next_start_time": {
"description": "When the next best price period starts",
"long_description": "Shows when the next upcoming best price period starts. During an active period, this shows the start of the NEXT period after the current one. Returns 'Unknown' only when no future periods are configured.",
"usage_tips": "Always useful for planning ahead: 'Next cheap period starts in 3 hours' (whether you're in a period now or not). Combine with automations: 'When next start time is in 10 minutes, send notification to prepare washing machine'."
},
"best_price_next_in_minutes": {
"description": "Time until next best price period starts",
"long_description": "Shows how long until the next best price period starts. The state displays in hours (e.g., 2.25 h) for dashboards, while the `next_in_minutes` attribute provides minutes (e.g., 135) for automation conditions. During an active period, shows time until the period AFTER the current one. Returns 0 during brief transition moments. Updates every minute.",
"usage_tips": "For automations: Use `next_in_minutes` attribute like 'If next_in_minutes > 0 AND next_in_minutes < 15, wait before starting dishwasher'. Value > 0 always indicates a future period is scheduled."
},
"peak_price_end_time": {
"description": "When the current or next peak price period ends",
"long_description": "Shows the end timestamp of the current peak price period when active, or the end of the next period when no period is active. Always shows a useful time reference for planning. Returns 'Unknown' only when no periods are configured.",
"usage_tips": "Use this to display 'Expensive period ends in 1 hour' (when active) or 'Next expensive period ends at 18:00' (when inactive). Combine with automations to resume operations after peak."
},
"peak_price_period_duration": {
"description": "Total length of current or next peak price period",
"long_description": "Shows how long the peak price period lasts in total. The state is displayed in hours (e.g., 0.75 h) for easy reading in the UI, while the `period_duration_minutes` attribute provides the same value in minutes (e.g., 45) for use in automations. During an active period, shows the duration of the current period. When no period is active, shows the duration of the next upcoming period. Returns 'Unknown' only when no periods are configured.",
"usage_tips": "For display: Use the state value (hours) in dashboards. For automations: Use `period_duration_minutes` attribute to decide whether to wait out the peak or proceed (e.g., 'If period_duration_minutes <= 60, pause operations')."
},
"peak_price_remaining_minutes": {
"description": "Time remaining in current peak price period",
"long_description": "Shows how much time is left in the current peak price period. The state displays in hours (e.g., 1.0 h) for easy reading, while the `remaining_minutes` attribute provides minutes (e.g., 60) for automation logic. Returns 0 when no period is active. Updates every minute. Check binary_sensor.peak_price_period to see if a period is currently active.",
"usage_tips": "For automations: Use `remaining_minutes` attribute like 'If remaining_minutes > 60, cancel deferred charging session'. Value 0 makes it easy to distinguish active (value > 0) from inactive (value = 0) periods."
},
"peak_price_progress": {
"description": "Progress through current peak price period (0% when inactive)",
"long_description": "Shows progress through the current peak price period as 0-100%. Returns 0% when no period is active. Updates every minute.",
"usage_tips": "Visual progress indicator in dashboards. Automation: 'If progress > 0 AND progress > 90, prepare to resume normal heating schedule'. Value 0 indicates no active period."
},
"peak_price_next_start_time": {
"description": "When the next peak price period starts",
"long_description": "Shows when the next upcoming peak price period starts. During an active period, this shows the start of the NEXT period after the current one. Returns 'Unknown' only when no future periods are configured.",
"usage_tips": "Always useful for planning: 'Next expensive period starts in 2 hours'. Automation: 'When next start time is in 30 minutes, reduce heating temperature preemptively'."
},
"peak_price_next_in_minutes": {
"description": "Time until next peak price period starts",
"long_description": "Shows how long until the next peak price period starts. The state displays in hours (e.g., 0.5 h) for dashboards, while the `next_in_minutes` attribute provides minutes (e.g., 30) for automation conditions. During an active period, shows time until the period AFTER the current one. Returns 0 during brief transition moments. Updates every minute.",
"usage_tips": "For automations: Use `next_in_minutes` attribute like 'If next_in_minutes > 0 AND next_in_minutes < 10, complete current charging cycle now before prices increase'."
},
"home_type": {
"description": "Type of home (apartment, house, etc.)",
"long_description": "Shows the type of dwelling as configured in your Tibber account. This metadata can be useful for categorizing energy consumption patterns.",
"usage_tips": "Use this for organizing your smart home system or for analytics purposes."
},
"home_size": {
"description": "Size of home in square meters",
"long_description": "Shows the size of your home in square meters as configured in your Tibber account. Can be used to calculate energy consumption per square meter.",
"usage_tips": "Use this in energy efficiency calculations: 'My home uses X kWh per square meter per year'."
},
"main_fuse_size": {
"description": "Main electrical fuse size in amperes",
"long_description": "Shows the capacity of your main electrical fuse in amperes. This determines the maximum electrical load your home can handle simultaneously.",
"usage_tips": "Use this to prevent overloading: 'If total current consumption approaches fuse size, postpone starting additional appliances'."
},
"number_of_residents": {
"description": "Number of people living in the home",
"long_description": "Shows the number of residents as configured in your Tibber account. Useful for per-capita energy consumption calculations.",
"usage_tips": "Use this for household energy analytics: 'Energy consumption per person per day'."
},
"primary_heating_source": {
"description": "Primary heating system type",
"long_description": "Shows the type of heating system used in your home as configured in your Tibber account. This can be a heat pump, electric heating, gas, oil, or other heating sources.",
"usage_tips": "Use this to categorize heating-related automations or for energy consumption analysis by heating type."
},
"grid_company": {
"description": "Name of your electricity grid operator",
"long_description": "Shows the name of the company operating the electrical grid in your area. This is the distribution system operator (DSO) responsible for delivering electricity to your home.",
"usage_tips": "Useful for administrative purposes and troubleshooting grid-related issues."
},
"grid_area_code": {
"description": "Grid area identifier code",
"long_description": "Shows the code identifying your electrical grid area. This code is used by the grid operator for routing and billing purposes.",
"usage_tips": "Use this for administrative reference or when contacting your grid operator."
},
"price_area_code": {
"description": "Electricity price area code",
"long_description": "Shows the code for your electricity price area (e.g., NO1, NO2, SE3, DK1). Different areas have different wholesale electricity prices based on regional supply and demand.",
"usage_tips": "Use this to understand which price region you're in. Useful when comparing prices with others or analyzing regional price patterns."
},
"consumption_ean": {
"description": "EAN code for electricity consumption metering",
"long_description": "Shows the European Article Number (EAN) code that uniquely identifies your electricity consumption meter. This 18-digit code is used for billing and administrative purposes.",
"usage_tips": "Use this when communicating with your electricity provider or for administrative documentation."
},
"production_ean": {
"description": "EAN code for electricity production metering",
"long_description": "Shows the European Article Number (EAN) code for your electricity production meter (if you have solar panels or other generation). This code tracks electricity you feed back into the grid.",
"usage_tips": "Relevant if you have solar panels or other electricity generation. Use for administrative purposes and when claiming feed-in compensation."
},
"energy_tax_type": {
"description": "Type of energy tax applied",
"long_description": "Shows the energy tax category applied to your electricity consumption. Tax rates vary by country and sometimes by consumer type (residential, commercial, etc.).",
"usage_tips": "Use this for understanding your electricity bill breakdown and total cost calculations."
},
"vat_type": {
"description": "VAT (Value Added Tax) category",
"long_description": "Shows the VAT category applied to your electricity consumption. VAT rates vary by country and may differ for electricity compared to other goods and services.",
"usage_tips": "Use this for understanding your electricity bill and calculating total costs including taxes."
},
"estimated_annual_consumption": {
"description": "Estimated yearly electricity consumption in kWh",
"long_description": "Shows your estimated annual electricity consumption in kilowatt-hours as calculated or configured in your Tibber account. This estimate is used for comparing actual consumption to expected values.",
"usage_tips": "Use this to track if your actual consumption is above or below expectations. Compare monthly consumption to 1/12 of this value to identify unusual patterns."
},
"subscription_status": {
"description": "Status of your Tibber subscription",
"long_description": "Shows whether your Tibber subscription is currently running, has ended, or is pending activation. A status of 'running' means you're actively receiving electricity through Tibber.",
"usage_tips": "Use this to monitor your subscription status. Set up alerts if status changes from 'running' to ensure uninterrupted service."
},
"chart_data_export": {
"description": "Data export for dashboard integrations",
"long_description": "This binary sensor calls the get_chartdata service with your configured YAML parameters and exposes the result as entity attributes. The state is 'on' when the service call succeeds and data is available, 'off' when the call fails or no configuration is set. Perfect for dashboard integrations like ApexCharts that need to read price data from entity attributes.",
"usage_tips": "Configure the YAML parameters in the integration options to match your get_chartdata service call. The sensor will automatically refresh when price data updates (typically after midnight and when tomorrow's data arrives). Access the service response data directly from the entity's attributes - the structure matches exactly what get_chartdata returns."
},
"chart_metadata": {
"description": "Lightweight metadata for chart configuration",
"long_description": "Provides essential chart configuration values as sensor attributes. Useful for any chart card that needs Y-axis bounds. The sensor calls get_chartdata with metadata-only mode (no data processing) and extracts: yaxis_min, yaxis_max (suggested Y-axis range for optimal scaling). The state reflects the service call result: 'ready' when successful, 'error' on failure, 'pending' during initialization.",
"usage_tips": "Configure via configuration.yaml under tibber_prices.chart_metadata_config (optional: day, subunit_currency, resolution). The sensor automatically refreshes when price data updates. Access metadata from attributes: yaxis_min, yaxis_max. Use with config-template-card or any tool that reads entity attributes - perfect for dynamic chart configuration without manual calculations."
} }
}, },
"binary_sensor": { "binary_sensor": {
@ -477,90 +275,6 @@
"description": "Whether the connection to the Tibber API is working", "description": "Whether the connection to the Tibber API is working",
"long_description": "Indicates if the integration can successfully connect to the Tibber API", "long_description": "Indicates if the integration can successfully connect to the Tibber API",
"usage_tips": "Use this to monitor the connection status to the Tibber API" "usage_tips": "Use this to monitor the connection status to the Tibber API"
},
"has_ventilation_system": {
"description": "Whether your home has a ventilation system",
"long_description": "Indicates if a ventilation system is registered for your home in the Tibber account. Ventilation systems can be significant electricity consumers that may benefit from smart scheduling.",
"usage_tips": "Use this to enable ventilation-specific automations or energy monitoring. If true, consider scheduling ventilation during low-price periods."
},
"realtime_consumption_enabled": {
"description": "Whether realtime consumption monitoring is active",
"long_description": "Indicates if realtime electricity consumption monitoring is enabled and active for your Tibber home. This requires compatible metering hardware (e.g., Tibber Pulse) and an active subscription.",
"usage_tips": "Use this to verify that realtime consumption data is available. Enable notifications if this changes to 'off' unexpectedly, indicating potential hardware or connectivity issues."
}
},
"number": {
"best_price_flex_override": {
"description": "Maximum above the daily minimum price that intervals can be and still qualify as 'best price'. Recommended: 15-20 with relaxation enabled (default), or 25-35 without relaxation. Maximum: 50 (hard cap for reliable period detection).",
"long_description": "When this entity is enabled, its value overrides the 'Flexibility' setting from the options flow for best price period calculations.",
"usage_tips": "Enable this entity to dynamically adjust best price detection via automations. Higher values create longer periods, lower values are stricter."
},
"best_price_min_distance_override": {
"description": "Ensures periods are significantly cheaper than the daily average, not just marginally below it. This filters out noise and prevents marking slightly-below-average periods as 'best price' on days with flat prices. Higher values = stricter filtering (only truly cheap periods qualify).",
"long_description": "When this entity is enabled, its value overrides the 'Minimum Distance' setting from the options flow for best price period calculations.",
"usage_tips": "Use in automations to adjust how much better than average the best price periods must be. Higher values require prices to be further below average."
},
"best_price_min_period_length_override": {
"description": "Minimum duration for a period to be considered as 'best price'. Longer periods are more practical for running appliances like dishwashers or heat pumps. Best price periods require 60 minutes minimum (vs. 30 minutes for peak price warnings) because they should provide meaningful time windows for consumption planning.",
"long_description": "When this entity is enabled, its value overrides the 'Minimum Period Length' setting from the options flow for best price period calculations.",
"usage_tips": "Increase when your appliances need longer uninterrupted run times (e.g., washing machines, dishwashers)."
},
"best_price_min_periods_override": {
"description": "Minimum number of best price periods to aim for per day. Filters will be relaxed step-by-step to try achieving this count. Only active when 'Achieve Minimum Count' is enabled.",
"long_description": "When this entity is enabled, its value overrides the 'Minimum Periods' setting from the options flow for best price period calculations.",
"usage_tips": "Adjust dynamically based on how many times per day you need cheap electricity windows."
},
"best_price_relaxation_attempts_override": {
"description": "How many flex levels (attempts) to try before giving up. Each attempt runs all filter combinations at the new flex level. More attempts increase the chance of finding additional periods at the cost of longer processing time.",
"long_description": "When this entity is enabled, its value overrides the 'Relaxation Attempts' setting from the options flow for best price period calculations.",
"usage_tips": "Increase when periods are hard to find. Decrease for stricter price filtering."
},
"best_price_gap_count_override": {
"description": "Maximum number of consecutive intervals allowed that deviate by exactly one level step from the required level. This prevents periods from being split by occasional level deviations. Gap tolerance requires periods ≥90 minutes (6 intervals) to detect outliers effectively.",
"long_description": "When this entity is enabled, its value overrides the 'Gap Tolerance' setting from the options flow for best price period calculations.",
"usage_tips": "Increase to allow longer periods with occasional price spikes. Keep low for stricter continuous cheap periods."
},
"peak_price_flex_override": {
"description": "Maximum below the daily maximum price that intervals can be and still qualify as 'peak price'. Recommended: -15 to -20 with relaxation enabled (default), or -25 to -35 without relaxation. Maximum: -50 (hard cap for reliable period detection). Note: Negative values indicate distance below maximum.",
"long_description": "When this entity is enabled, its value overrides the 'Flexibility' setting from the options flow for peak price period calculations.",
"usage_tips": "Enable this entity to dynamically adjust peak price detection via automations. Higher values create longer peak periods."
},
"peak_price_min_distance_override": {
"description": "Ensures periods are significantly more expensive than the daily average, not just marginally above it. This filters out noise and prevents marking slightly-above-average periods as 'peak price' on days with flat prices. Higher values = stricter filtering (only truly expensive periods qualify).",
"long_description": "When this entity is enabled, its value overrides the 'Minimum Distance' setting from the options flow for peak price period calculations.",
"usage_tips": "Use in automations to adjust how much higher than average the peak price periods must be."
},
"peak_price_min_period_length_override": {
"description": "Minimum duration for a period to be considered as 'peak price'. Peak price warnings are allowed for shorter periods (30 minutes minimum vs. 60 minutes for best price) because brief expensive spikes are worth alerting about, even if they're too short for consumption planning.",
"long_description": "When this entity is enabled, its value overrides the 'Minimum Period Length' setting from the options flow for peak price period calculations.",
"usage_tips": "Increase to filter out brief price spikes, focusing on sustained expensive periods."
},
"peak_price_min_periods_override": {
"description": "Minimum number of peak price periods to aim for per day. Filters will be relaxed step-by-step to try achieving this count. Only active when 'Achieve Minimum Count' is enabled.",
"long_description": "When this entity is enabled, its value overrides the 'Minimum Periods' setting from the options flow for peak price period calculations.",
"usage_tips": "Adjust based on how many peak periods you want to identify and avoid."
},
"peak_price_relaxation_attempts_override": {
"description": "How many flex levels (attempts) to try before giving up. Each attempt runs all filter combinations at the new flex level. More attempts increase the chance of finding additional peak periods at the cost of longer processing time.",
"long_description": "When this entity is enabled, its value overrides the 'Relaxation Attempts' setting from the options flow for peak price period calculations.",
"usage_tips": "Increase when peak periods are hard to detect. Decrease for stricter peak price filtering."
},
"peak_price_gap_count_override": {
"description": "Maximum number of consecutive intervals allowed that deviate by exactly one level step from the required level. This prevents periods from being split by occasional level deviations. Gap tolerance requires periods ≥90 minutes (6 intervals) to detect outliers effectively.",
"long_description": "When this entity is enabled, its value overrides the 'Gap Tolerance' setting from the options flow for peak price period calculations.",
"usage_tips": "Increase to identify sustained expensive periods with brief dips. Keep low for stricter continuous peak detection."
}
},
"switch": {
"best_price_enable_relaxation_override": {
"description": "When enabled, filters will be gradually relaxed if not enough periods are found. This attempts to reach the desired minimum number of periods, which may include less optimal time windows as best-price periods.",
"long_description": "When this entity is enabled, its value overrides the 'Achieve Minimum Count' setting from the options flow for best price period calculations.",
"usage_tips": "Turn OFF to disable relaxation and use strict filtering only. Turn ON to allow the algorithm to relax criteria to find more periods."
},
"peak_price_enable_relaxation_override": {
"description": "When enabled, filters will be gradually relaxed if not enough periods are found. This attempts to reach the desired minimum number of periods to ensure you're warned about expensive periods even on days with unusual price patterns.",
"long_description": "When this entity is enabled, its value overrides the 'Achieve Minimum Count' setting from the options flow for peak price period calculations.",
"usage_tips": "Turn OFF to disable relaxation and use strict filtering only. Turn ON to allow the algorithm to relax criteria to find more peak periods."
} }
}, },
"home_types": { "home_types": {
@ -568,16 +282,5 @@
"ROWHOUSE": "Rowhouse", "ROWHOUSE": "Rowhouse",
"HOUSE": "House", "HOUSE": "House",
"COTTAGE": "Cottage" "COTTAGE": "Cottage"
}, }
"time_units": {
"day": "{count} day",
"days": "{count} days",
"hour": "{count} hour",
"hours": "{count} hours",
"minute": "{count} minute",
"minutes": "{count} minutes",
"ago": "{parts} ago",
"now": "now"
},
"attribution": "Data provided by Tibber"
} }

View file

@ -1,32 +1,10 @@
{ {
"apexcharts": {
"title_rating_level": "Prisfaser dagsfremdrift",
"title_level": "Prisnivå",
"hourly_suffix": "(Ø per time)",
"best_price_period_name": "Beste prisperiode",
"peak_price_period_name": "Toppprisperiode",
"notification": {
"metadata_sensor_unavailable": {
"title": "Tibber Prices: ApexCharts YAML generert med begrenset funksjonalitet",
"message": "Du har nettopp generert en ApexCharts-kort-konfigurasjon via Utviklerverktøy. Diagram-metadata-sensoren er deaktivert, så den genererte YAML-en vil bare vise **grunnleggende funksjonalitet** (auto-skalering, fast gradient på 50%).\n\n**For full funksjonalitet** (optimert skalering, dynamiske gradientfarger):\n1. [Åpne Tibber Prices-integrasjonen](https://my.home-assistant.io/redirect/integration/?domain=tibber_prices)\n2. Aktiver 'Chart Metadata'-sensoren\n3. **Generer YAML-en på nytt** via Utviklerverktøy\n4. **Erstatt den gamle YAML-en** i dashbordet ditt med den nye versjonen\n\n⚠ Det er ikke nok å bare aktivere sensoren - du må regenerere og erstatte YAML-koden!"
},
"missing_cards": {
"title": "Tibber Prices: ApexCharts YAML kan ikke brukes",
"message": "Du har nettopp generert en ApexCharts-kort-konfigurasjon via Utviklerverktøy, men den genererte YAML-en **vil ikke fungere** fordi nødvendige tilpassede kort mangler.\n\n**Manglende kort:**\n{cards}\n\n**For å bruke den genererte YAML-en:**\n1. Klikk på lenkene ovenfor for å installere de manglende kortene fra HACS\n2. Start Home Assistant på nytt (noen ganger nødvendig)\n3. **Generer YAML-en på nytt** via Utviklerverktøy\n4. Legg til YAML-en i dashbordet ditt\n\n⚠ Den nåværende YAML-koden vil ikke fungere før alle kort er installert!"
}
}
},
"sensor": { "sensor": {
"current_interval_price": { "current_price": {
"description": "Den nåværende elektrisitetsprisen per kWh", "description": "Den nåværende elektrisitetsprisen per kWh",
"long_description": "Viser nåværende pris per kWh fra ditt Tibber-abonnement", "long_description": "Viser nåværende pris per kWh fra ditt Tibber-abonnement",
"usage_tips": "Bruk dette til å spore priser eller lage automatiseringer som kjører når strøm er billig" "usage_tips": "Bruk dette til å spore priser eller lage automatiseringer som kjører når strøm er billig"
}, },
"current_interval_price_base": {
"description": "Nåværende elektrisitetspris i hovedvaluta (EUR/kWh, NOK/kWh, osv.) for Energi-dashboard",
"long_description": "Viser nåværende pris per kWh i hovedvalutaenheter (f.eks. EUR/kWh i stedet for ct/kWh, NOK/kWh i stedet for øre/kWh). Denne sensoren er spesielt designet for bruk med Home Assistants Energi-dashboard, som krever priser i standard valutaenheter.",
"usage_tips": "Bruk denne sensoren når du konfigurerer Energi-dashboardet under Innstillinger → Dashbord → Energi. Velg denne sensoren som 'Entitet med nåværende pris' for automatisk å beregne energikostnadene. Energi-dashboardet multipliserer energiforbruket ditt (kWh) med denne prisen for å vise totale kostnader."
},
"next_interval_price": { "next_interval_price": {
"description": "Neste intervalls elektrisitetspris per kWh", "description": "Neste intervalls elektrisitetspris per kWh",
"long_description": "Viser prisen for det neste 15-minutters intervallet fra ditt Tibber-abonnement", "long_description": "Viser prisen for det neste 15-minutters intervallet fra ditt Tibber-abonnement",
@ -37,12 +15,12 @@
"long_description": "Viser prisen for det forrige 15-minutters intervallet fra ditt Tibber-abonnement", "long_description": "Viser prisen for det forrige 15-minutters intervallet fra ditt Tibber-abonnement",
"usage_tips": "Bruk dette til å gjennomgå tidligere prisendringer eller spore prishistorikk" "usage_tips": "Bruk dette til å gjennomgå tidligere prisendringer eller spore prishistorikk"
}, },
"current_hour_average_price": { "current_hour_average": {
"description": "Rullende 5-intervalls gjennomsnittspris per kWh", "description": "Rullende 5-intervalls gjennomsnittspris per kWh",
"long_description": "Viser gjennomsnittsprisen per kWh beregnet fra 5 intervaller: 2 foregående, nåværende og 2 neste intervaller (omtrent 75 minutter totalt). Dette gir en utjevnet 'timepris' som tilpasser seg etter hvert som tiden går, i stedet for å være fiksert til klokkeslett.", "long_description": "Viser gjennomsnittsprisen per kWh beregnet fra 5 intervaller: 2 foregående, nåværende og 2 neste intervaller (omtrent 75 minutter totalt). Dette gir en utjevnet 'timepris' som tilpasser seg etter hvert som tiden går, i stedet for å være fiksert til klokkeslett.",
"usage_tips": "Bruk dette for en mer stabil prisindikator som jevner ut kortsiktige svingninger mens den fortsatt er responsiv til prisendringer. Bedre enn faste timepriser for å ta forbruksbeslutninger." "usage_tips": "Bruk dette for en mer stabil prisindikator som jevner ut kortsiktige svingninger mens den fortsatt er responsiv til prisendringer. Bedre enn faste timepriser for å ta forbruksbeslutninger."
}, },
"next_hour_average_price": { "next_hour_average": {
"description": "Rullende 5-intervalls gjennomsnittspris for neste time per kWh", "description": "Rullende 5-intervalls gjennomsnittspris for neste time per kWh",
"long_description": "Viser gjennomsnittsprisen per kWh beregnet fra 5 intervaller sentrert en time frem: omtrent intervallene +2 til +6 fra nå (dekker minutter +30 til +105). Dette gir en fremtidsrettet utjevnet 'timepris' for å planlegge forbruk.", "long_description": "Viser gjennomsnittsprisen per kWh beregnet fra 5 intervaller sentrert en time frem: omtrent intervallene +2 til +6 fra nå (dekker minutter +30 til +105). Dette gir en fremtidsrettet utjevnet 'timepris' for å planlegge forbruk.",
"usage_tips": "Bruk dette til å forutse prisendringer i neste time. Nyttig for å planlegge høyforbruksaktiviteter som lading av elbiler, kjøring av oppvaskmaskiner eller varmesystemer." "usage_tips": "Bruk dette til å forutse prisendringer i neste time. Nyttig for å planlegge høyforbruksaktiviteter som lading av elbiler, kjøring av oppvaskmaskiner eller varmesystemer."
@ -58,9 +36,9 @@
"usage_tips": "Bruk dette til å unngå å kjøre apparater i toppristider" "usage_tips": "Bruk dette til å unngå å kjøre apparater i toppristider"
}, },
"average_price_today": { "average_price_today": {
"description": "Typisk elektrisitetspris i dag per kWh (konfigurerbart visningsformat)", "description": "Den gjennomsnittlige elektrisitetsprisen i dag per kWh",
"long_description": "Viser prisen per kWh for gjeldende dag fra ditt Tibber-abonnement. **Som standard viser statusen medianen** (motstandsdyktig mot ekstreme prisspiss, viser typisk prisnivå). Du kan endre dette i integrasjonsinnstillingene for å vise det aritmetiske gjennomsnittet i stedet. Den alternative verdien er tilgjengelig som attributt.", "long_description": "Viser gjennomsnittsprisen per kWh for gjeldende dag fra ditt Tibber-abonnement",
"usage_tips": "Bruk dette som baseline for å sammenligne nåværende priser. For beregninger bruk: {{ state_attr('sensor.average_price_today', 'price_mean') }}" "usage_tips": "Bruk dette som en baseline for å sammenligne nåværende priser"
}, },
"lowest_price_tomorrow": { "lowest_price_tomorrow": {
"description": "Den laveste elektrisitetsprisen i morgen per kWh", "description": "Den laveste elektrisitetsprisen i morgen per kWh",
@ -73,49 +51,19 @@
"usage_tips": "Bruk dette til å unngå å kjøre apparater i morgendagens toppristider. Nyttig for å planlegge rundt dyre perioder." "usage_tips": "Bruk dette til å unngå å kjøre apparater i morgendagens toppristider. Nyttig for å planlegge rundt dyre perioder."
}, },
"average_price_tomorrow": { "average_price_tomorrow": {
"description": "Typisk elektrisitetspris i morgen per kWh (konfigurerbart visningsformat)", "description": "Den gjennomsnittlige elektrisitetsprisen i morgen per kWh",
"long_description": "Viser prisen per kWh for morgendagen fra ditt Tibber-abonnement. **Som standard viser statusen medianen** (motstandsdyktig mot ekstreme prisspiss). Du kan endre dette i integrasjonsinnstillingene for å vise det aritmetiske gjennomsnittet i stedet. Den alternative verdien er tilgjengelig som attributt. Denne sensoren blir utilgjengelig inntil morgendagens data er publisert av Tibber (vanligvis rundt 13:00-14:00 CET).", "long_description": "Viser gjennomsnittsprisen per kWh for morgendagen fra ditt Tibber-abonnement. Denne sensoren blir utilgjengelig inntil morgendagens data er publisert av Tibber (vanligvis rundt 13:00-14:00 CET).",
"usage_tips": "Bruk dette som baseline for å sammenligne morgendagens priser og planlegge forbruk. Sammenlign med dagens median for å se om morgendagen vil være mer eller mindre dyr totalt sett." "usage_tips": "Bruk dette som en baseline for å sammenligne morgendagens priser og planlegge forbruk. Sammenlign med dagens gjennomsnitt for å se om morgendagen vil være mer eller mindre dyr totalt sett."
},
"yesterday_price_level": {
"description": "Aggregert prisnivå for i går",
"long_description": "Viser det aggregerte prisnivået for alle intervaller i går. Bruker samme logikk som timesensorene for å bestemme det samlede prisnivået for hele dagen.",
"usage_tips": "Bruk dette for å forstå gårsdagens generelle prissituasjon. Sammenlign med i dag for å se daglige trender."
},
"today_price_level": {
"description": "Aggregert prisnivå for i dag",
"long_description": "Viser det aggregerte prisnivået for alle intervaller i dag. Bruker samme logikk som timesensorene for å bestemme det samlede prisnivået for hele dagen.",
"usage_tips": "Bruk dette for å forstå dagens generelle prissituasjon på et øyeblikk. Nyttig for raske vurderinger av om i dag generelt er billig eller dyrt."
},
"tomorrow_price_level": {
"description": "Aggregert prisnivå for i morgen",
"long_description": "Viser det aggregerte prisnivået for alle intervaller i morgen. Bruker samme logikk som timesensorene for å bestemme det samlede prisnivået for hele dagen. Denne sensoren blir utilgjengelig inntil morgendagens data er publisert av Tibber (vanligvis rundt 13:00-14:00 CET).",
"usage_tips": "Bruk dette for å forstå morgendagens generelle prissituasjon. Sammenlign med i dag for å se om morgendagen vil være mer eller mindre gunstig for energiforbruk."
},
"yesterday_price_rating": {
"description": "Aggregert prisvurdering for i går",
"long_description": "Viser den aggregerte prisvurderingen (lav/normal/høy) for alle intervaller i går, basert på dine konfigurerte terskelverdier. Bruker samme logikk som timesensorene for å bestemme den samlede vurderingen for hele dagen.",
"usage_tips": "Bruk dette for å forstå gårsdagens prissituasjon i forhold til dine personlige terskelverdier. Sammenlign med i dag for trendanalyse."
},
"today_price_rating": {
"description": "Aggregert prisvurdering for i dag",
"long_description": "Viser den aggregerte prisvurderingen (lav/normal/høy) for alle intervaller i dag, basert på dine konfigurerte terskelverdier. Bruker samme logikk som timesensorene for å bestemme den samlede vurderingen for hele dagen.",
"usage_tips": "Bruk dette for raskt å vurdere dagens prissituasjon i forhold til dine personlige terskelverdier. Hjelper med å ta forbruksbeslutninger for gjeldende dag."
},
"tomorrow_price_rating": {
"description": "Aggregert prisvurdering for i morgen",
"long_description": "Viser den aggregerte prisvurderingen (lav/normal/høy) for alle intervaller i morgen, basert på dine konfigurerte terskelverdier. Bruker samme logikk som timesensorene for å bestemme den samlede vurderingen for hele dagen. Denne sensoren blir utilgjengelig inntil morgendagens data er publisert av Tibber (vanligvis rundt 13:00-14:00 CET).",
"usage_tips": "Bruk dette for å planlegge morgendagens energiforbruk basert på dine personlige pristerskelverdier. Sammenlign med i dag for å bestemme om du skal flytte forbruk til i morgen eller bruke energi i dag."
}, },
"trailing_price_average": { "trailing_price_average": {
"description": "Typisk elektrisitetspris for de siste 24 timene per kWh (konfigurerbart visningsformat)", "description": "Den gjennomsnittlige elektrisitetsprisen for de siste 24 timene per kWh",
"long_description": "Viser prisen per kWh beregnet fra de siste 24 timene. **Som standard viser statusen medianen** (motstandsdyktig mot ekstreme prisspiss, viser typisk prisnivå). Du kan endre dette i integrasjonsinnstillingene for å vise det aritmetiske gjennomsnittet i stedet. Den alternative verdien er tilgjengelig som attributt. Oppdateres hvert 15. minutt.", "long_description": "Viser gjennomsnittsprisen per kWh beregnet fra de siste 24 timene (glidende gjennomsnitt) fra ditt Tibber-abonnement. Dette gir et rullende gjennomsnitt som oppdateres hvert 15. minutt basert på historiske data.",
"usage_tips": "Bruk statusverdien for å se det typiske nåværende prisnivået. For kostnadsberegninger bruk: {{ state_attr('sensor.trailing_price_average', 'price_mean') }}" "usage_tips": "Bruk dette til å sammenligne nåværende priser mot nylige trender. En nåværende pris betydelig over dette gjennomsnittet kan indikere et godt tidspunkt å redusere forbruket."
}, },
"leading_price_average": { "leading_price_average": {
"description": "Typisk elektrisitetspris for de neste 24 timene per kWh (konfigurerbart visningsformat)", "description": "Den gjennomsnittlige elektrisitetsprisen for de neste 24 timene per kWh",
"long_description": "Viser prisen per kWh beregnet fra de neste 24 timene. **Som standard viser statusen medianen** (motstandsdyktig mot ekstreme prisspiss, viser forventet prisnivå). Du kan endre dette i integrasjonsinnstillingene for å vise det aritmetiske gjennomsnittet i stedet. Den alternative verdien er tilgjengelig som attributt.", "long_description": "Viser gjennomsnittsprisen per kWh beregnet fra de neste 24 timene (fremtidsrettet gjennomsnitt) fra ditt Tibber-abonnement. Dette gir et fremtidsrettet gjennomsnitt basert på tilgjengelige prognosedata.",
"usage_tips": "Bruk statusverdien for å se det typiske kommende prisnivået. For kostnadsberegninger bruk: {{ state_attr('sensor.leading_price_average', 'price_mean') }}" "usage_tips": "Bruk dette til å planlegge energibruk. Hvis nåværende pris er under det fremtidsrettede gjennomsnittet, kan det være et godt tidspunkt å kjøre energikrevende apparater."
}, },
"trailing_price_min": { "trailing_price_min": {
"description": "Den minste elektrisitetsprisen for de siste 24 timene per kWh", "description": "Den minste elektrisitetsprisen for de siste 24 timene per kWh",
@ -137,7 +85,7 @@
"long_description": "Viser maksimumsprisen per kWh fra de neste 24 timene (fremtidsrettet maksimum) fra ditt Tibber-abonnement. Dette gir den høyeste prisen forventet i de neste 24 timene basert på prognosedata.", "long_description": "Viser maksimumsprisen per kWh fra de neste 24 timene (fremtidsrettet maksimum) fra ditt Tibber-abonnement. Dette gir den høyeste prisen forventet i de neste 24 timene basert på prognosedata.",
"usage_tips": "Bruk dette til å unngå å kjøre apparater i kommende topprisperioder." "usage_tips": "Bruk dette til å unngå å kjøre apparater i kommende topprisperioder."
}, },
"current_interval_price_level": { "price_level": {
"description": "Den nåværende prisnivåklassifiseringen", "description": "Den nåværende prisnivåklassifiseringen",
"long_description": "Viser Tibbers klassifisering av nåværende pris sammenlignet med historiske priser", "long_description": "Viser Tibbers klassifisering av nåværende pris sammenlignet med historiske priser",
"usage_tips": "Bruk dette til å lage automatiseringer basert på relative prisnivåer i stedet for absolutte priser" "usage_tips": "Bruk dette til å lage automatiseringer basert på relative prisnivåer i stedet for absolutte priser"
@ -162,7 +110,7 @@
"long_description": "Viser median prisnivå på tvers av 5 intervaller sentrert en time frem. Hjelper med å planlegge forbruk basert på kommende pristrender i stedet for øyeblikkelige fremtidige priser.", "long_description": "Viser median prisnivå på tvers av 5 intervaller sentrert en time frem. Hjelper med å planlegge forbruk basert på kommende pristrender i stedet for øyeblikkelige fremtidige priser.",
"usage_tips": "Bruk for å planlegge aktiviteter for neste time basert på en utjevnet prisnivåprognose." "usage_tips": "Bruk for å planlegge aktiviteter for neste time basert på en utjevnet prisnivåprognose."
}, },
"current_interval_price_rating": { "price_rating": {
"description": "Hvordan nåværende intervalls pris sammenlignes med historiske data", "description": "Hvordan nåværende intervalls pris sammenlignes med historiske data",
"long_description": "Viser hvordan nåværende intervalls pris sammenlignes med historiske prisdata som en prosentandel", "long_description": "Viser hvordan nåværende intervalls pris sammenlignes med historiske prisdata som en prosentandel",
"usage_tips": "En positiv prosentandel betyr at nåværende pris er over gjennomsnittet, negativ betyr under gjennomsnittet" "usage_tips": "En positiv prosentandel betyr at nåværende pris er over gjennomsnittet, negativ betyr under gjennomsnittet"
@ -189,8 +137,8 @@
}, },
"next_avg_1h": { "next_avg_1h": {
"description": "Gjennomsnittspris for neste 1 time (kun fremover fra neste intervall)", "description": "Gjennomsnittspris for neste 1 time (kun fremover fra neste intervall)",
"long_description": "Fremtidsrettet gjennomsnitt: Viser gjennomsnitt av neste 4 intervaller (1 time) fra og med NESTE 15-minutters intervall (ikke inkludert nåværende). Forskjellig fra current_hour_average_price som inkluderer tidligere intervaller. Bruk for planlegging med absolutt pristerskel.", "long_description": "Fremtidsrettet gjennomsnitt: Viser gjennomsnitt av neste 4 intervaller (1 time) fra og med NESTE 15-minutters intervall (ikke inkludert nåværende). Forskjellig fra current_hour_average som inkluderer tidligere intervaller. Bruk for planlegging med absolutt pristerskel.",
"usage_tips": "Absolutt pristerskel: Start kun apparater når gjennomsnittet forblir under din maksimalt akseptable pris (f.eks. under 0,25 EUR/kWh). Kombiner med trendsensor for optimal timing. Merk: Dette er IKKE en erstatning for timepriser - bruk current_hour_average_price for det." "usage_tips": "Absolutt pristerskel: Start kun apparater når gjennomsnittet forblir under din maksimalt akseptable pris (f.eks. under 0,25 EUR/kWh). Kombiner med trendsensor for optimal timing. Merk: Dette er IKKE en erstatning for timepriser - bruk current_hour_average for det."
}, },
"next_avg_2h": { "next_avg_2h": {
"description": "Gjennomsnittspris for neste 2 timer", "description": "Gjennomsnittspris for neste 2 timer",
@ -263,19 +211,9 @@
"usage_tips": "Relativ optimalisering: Nattplanlegging. 'fallende' betyr at å vente til natten lønner seg (>5% billigere). Fungerer hele året uten manuelle terskeljusteringer. Start når 'stabil' eller 'stigende'." "usage_tips": "Relativ optimalisering: Nattplanlegging. 'fallende' betyr at å vente til natten lønner seg (>5% billigere). Fungerer hele året uten manuelle terskeljusteringer. Start når 'stabil' eller 'stigende'."
}, },
"price_trend_12h": { "price_trend_12h": {
"description": "Pristrend for de neste 12 timene", "description": "Pristrend for neste 12 timer",
"long_description": "Sammenligner nåværende intervallpris med gjennomsnittet av de neste 12 timene (48 intervaller). Økende hvis framtidig pris er >5% høyere, synkende hvis >5% lavere, ellers stabil.", "long_description": "Sammenligner nåværende intervallpris med gjennomsnitt av neste 12 timer (48 intervaller). Stigende hvis fremtiden er >5% høyere, fallende hvis >5% lavere, ellers stabil.",
"usage_tips": "Relativ optimalisering: Langsiktige strategiske beslutninger. 'synkende' = betydelig bedre priser kommer i natt/i morgen. Finner optimal timing i enhver markedssituasjon. Best kombinert med prisgrense fra avg-sensor." "usage_tips": "Relativ optimalisering: Langsiktige strategiske beslutninger. 'fallende' = betydelig bedre priser kommer i natt/i morgen. Finner optimal timing i enhver markedstilstand. Best kombinert med avg-sensor pristak."
},
"current_price_trend": {
"description": "Nåværende pristrend-retning og hvor lenge den varer",
"long_description": "Viser nåværende pristrend (økende/synkende/stabil) ved å kombinere historisk momentum (vektet 1t tilbakeblikk) med fremtidsutsikt. Gjenkjenner pågående trender tidligere enn bare fremtidsanalyse. Bruker ±3 % momentum-terskel og volatilitetsavhengig fremtidssammenligning. Beregner dynamisk til neste trendendring (eller 3t standard hvis ingen endring på 24t). Status viser nåværende retning, attributter viser når den endres og hva som kommer etterpå.",
"usage_tips": "Statusvisning: Dashboard-synlighet av 'hva skjer nå til når'. Perfekt synkronisert med next_price_trend_change. Eksempel: Badge som viser 'Økende i 2,5t' eller 'Synkende til 16:45'. Bedre enn tidsvindu-sensorer fordi den forstår at du ALLEREDE er i en trend, ikke bare forutsier fremtidige endringer. Bruk for rask visuell oversikt, ikke automatiseringsutløsere."
},
"next_price_trend_change": {
"description": "Når neste betydelige pristrendendring vil skje",
"long_description": "Skanner de neste 24 timene (96 intervaller) for å finne når pristrenden (økende/synkende/stabil) vil endre seg fra nåværende momentum. Bestemmer først nåværende trend med vektet 1t tilbakeblikk (gjenkjenner pågående trender), deretter finner den reverseringen. Bruker volatilitetsadaptive terskelverdier (3 % momentum-deteksjon, markedsjustert fremtidssammenligning). Returnerer tidsstempelet når endringen forventes.",
"usage_tips": "Hendelsesbasert automatisering: Utløs handlinger NÅR trenden endres, ikke OM X timer. Eksempel: 'Lad EV når neste trendendring viser synkende priser' eller 'Start oppvaskmaskin før prisene stiger'. Kompletterer tidsvindu-sensorer (price_trend_Xh) som svarer på 'VIL prisene være høyere om X timer?'"
}, },
"daily_rating": { "daily_rating": {
"description": "Hvordan dagens priser sammenlignes med historiske data", "description": "Hvordan dagens priser sammenlignes med historiske data",
@ -292,169 +230,29 @@
"long_description": "Viser tidsstempelet for siste tilgjengelige prisdataintervall fra ditt Tibber-abonnement" "long_description": "Viser tidsstempelet for siste tilgjengelige prisdataintervall fra ditt Tibber-abonnement"
}, },
"today_volatility": { "today_volatility": {
"description": "Hvor mye strømprisene endrer seg i dag", "description": "Prisvolatilitetsklassifisering for i dag",
"long_description": "Viser om dagens priser er stabile eller har store svingninger. Lav volatilitet betyr ganske jevne priser timing betyr lite. Høy volatilitet betyr tydelige prisforskjeller gjennom dagen en god sjanse til å flytte forbruk til billigere perioder. `price_coefficient_variation_%` viser prosentverdien, `price_spread` viser den absolutte prisspennet.", "long_description": "Viser hvor mye strømprisene varierer gjennom dagen basert på spredningen (forskjellen mellom høyeste og laveste pris). Klassifisering: LOW = spredning < 5øre, MODERATE = 5-15øre, HIGH = 15-30øre, VERY HIGH = >30øre.",
"usage_tips": "Bruk dette for å avgjøre om optimalisering er verdt innsatsen. Ved lav volatilitet kan du kjøre enheter når som helst. Ved høy volatilitet sparer du merkbart ved å følge Best Price-perioder." "usage_tips": "Bruk dette til å bestemme om prisbasert optimalisering er verdt det. For eksempel, med et balkongbatteri som har 15% effektivitetstap, er optimalisering kun meningsfull når volatiliteten er minst MODERATE. Opprett automatiseringer som sjekker volatilitet før planlegging av lade-/utladingssykluser."
}, },
"tomorrow_volatility": { "tomorrow_volatility": {
"description": "Hvor mye strømprisene vil endre seg i morgen", "description": "Prisvolatilitetsklassifisering for i morgen",
"long_description": "Viser om prisene i morgen blir stabile eller får store svingninger. Tilgjengelig når morgendagens data er publisert (vanligvis 13:0014:00 CET). Lav volatilitet betyr jevne priser timing er ikke kritisk. Høy volatilitet betyr tydelige prisforskjeller gjennom dagen en god mulighet til å planlegge energikrevende oppgaver. `price_coefficient_variation_%` viser prosentverdien, `price_spread` viser den absolutte prisspennet.", "long_description": "Viser hvor mye strømprisene vil variere gjennom morgendagen basert på spredningen (forskjellen mellom høyeste og laveste pris). Blir utilgjengelig til morgendagens data er publisert (typisk 13:00-14:00 CET).",
"usage_tips": "Bruk dette til å planlegge morgendagens forbruk. Høy volatilitet? Planlegg fleksible laster i Best Price-perioder. Lav volatilitet? Kjør enheter når det passer deg." "usage_tips": "Bruk dette til forhåndsplanlegging av morgendagens energiforbruk. Hvis morgendagen har HIGH eller VERY HIGH volatilitet, er det verdt å optimalisere tidspunktet for energiforbruk. Hvis LOW, kan du kjøre enheter når som helst uten betydelige kostnadsforskjeller."
}, },
"next_24h_volatility": { "next_24h_volatility": {
"description": "Hvor mye prisene endrer seg de neste 24 timene", "description": "Prisvolatilitetsklassifisering for de rullerende neste 24 timene",
"long_description": "Viser prisvolatilitet for et rullerende 24-timers vindu fra nå (oppdateres hvert 15. minutt). Lav volatilitet betyr jevne priser. Høy volatilitet betyr merkbare prissvingninger og mulighet for optimalisering. I motsetning til i dag/i morgen-sensorer krysser denne daggrenser og gir en kontinuerlig fremoverskuende vurdering. `price_coefficient_variation_%` viser prosentverdien, `price_spread` viser den absolutte prisspennet.", "long_description": "Viser hvor mye strømprisene varierer i de neste 24 timene fra nå (rullerende vindu). Dette krysser daggrenser og oppdateres hvert 15. minutt, og gir en fremoverskuende volatilitetsvurdering uavhengig av kalenderdager.",
"usage_tips": "Best for beslutninger i sanntid. Bruk når du planlegger batterilading eller andre fleksible laster som kan gå over midnatt. Gir et konsistent 24t-bilde uavhengig av kalenderdag." "usage_tips": "Beste sensor for sanntids optimaliseringsbeslutninger. I motsetning til dagens/morgendagens sensorer som bytter ved midnatt, gir denne kontinuerlig 24t volatilitetsvurdering. Bruk til batteriladingsstrategier som spenner over daggrenser."
}, },
"today_tomorrow_volatility": { "today_tomorrow_volatility": {
"description": "Kombinert prisvolatilitet for i dag og i morgen", "description": "Kombinert prisvolatilitetsklassifisering for i dag og i morgen",
"long_description": "Viser samlet volatilitet når i dag og i morgen sees sammen (når morgendata er tilgjengelig). Viser om det finnes klare prisforskjeller over dagsgrensen. Faller tilbake til kun i dag hvis morgendata mangler. Nyttig for flerdagers optimalisering. `price_coefficient_variation_%` viser prosentverdien, `price_spread` viser den absolutte prisspennet.", "long_description": "Viser volatilitet på tvers av både i dag og i morgen kombinert (når morgendagens data er tilgjengelig). Gir en utvidet oversikt over prisvariasjoner som spenner over opptil 48 timer. Faller tilbake til kun i dag når morgendagens data ikke er tilgjengelig ennå.",
"usage_tips": "Bruk for oppgaver som går over flere dager. Sjekk om prisforskjellene er store nok til å planlegge etter. De enkelte dagssensorene viser bidrag per dag om du trenger mer detalj." "usage_tips": "Bruk dette til flerørs planlegging og for å forstå om prismuligheter eksisterer på tvers av daggrensen. Attributtene 'today_volatility' og 'tomorrow_volatility' viser individuelle dagsbidrag. Nyttig for planlegging av ladesesjoner som kan strekke seg over midnatt."
}, },
"data_lifecycle_status": { "price_forecast": {
"description": "Gjeldende tilstand for prisdatalivssyklus og hurtigbufring", "description": "Prognose for kommende elektrisitetspriser",
"long_description": "Viser om integrasjonen bruker hurtigbufrede data eller ferske data fra API-et. Viser gjeldende livssyklustilstand: 'cached' (bruker lagrede data), 'fresh' (nettopp hentet fra API), 'refreshing' (henter for øyeblikket), 'searching_tomorrow' (søker aktivt etter morgendagens data etter 13:00), 'turnover_pending' (innen 15 minutter før midnatt, 23:45-00:00), eller 'error' (henting mislyktes). Inkluderer omfattende attributter som cache-alder, neste API-spørring, datafullstendighet og API-anropsstatistikk.", "long_description": "Viser kommende elektrisitetspriser for fremtidige intervaller i et format som er enkelt å bruke i dashboards",
"usage_tips": "Bruk denne diagnosesensoren for å forstå dataferskhet og API-anropsmønstre. Sjekk 'cache_age'-attributtet for å se hvor gamle de nåværende dataene er. Overvåk 'next_api_poll' for å vite når neste oppdatering er planlagt. Bruk 'data_completeness' for å se om data for i går/i dag/i morgen er tilgjengelig. 'api_calls_today'-telleren hjelper med å spore API-bruk. Perfekt for feilsøking eller forståelse av integrasjonens oppførsel." "usage_tips": "Bruk denne entitetens attributter til å vise kommende priser i diagrammer eller tilpassede kort. Få tilgang til enten 'intervals' for alle fremtidige intervaller eller 'hours' for timesammendrag."
},
"best_price_end_time": {
"description": "Total lengde på nåværende eller neste billigperiode (state i timer, attributt i minutter)",
"long_description": "Viser hvor lenge billigperioden varer. State bruker timer (desimal) for lesbar UI; attributtet `period_duration_minutes` beholder avrundede minutter for automasjoner. Aktiv → varighet for gjeldende periode, ellers neste.",
"usage_tips": "UI kan vise 1,5 t mens `period_duration_minutes` = 90 for automasjoner."
},
"best_price_period_duration": {
"description": "Lengde på gjeldende/neste billigperiode",
"long_description": "Total varighet av gjeldende eller neste billigperiode. State vises i timer (f.eks. 1,5 t) for enkel lesing i UI, mens attributtet `period_duration_minutes` gir samme verdi i minutter (f.eks. 90) for automasjoner. Denne verdien representerer den **fulle planlagte varigheten** av perioden og er konstant gjennom hele perioden, selv om gjenværende tid (remaining_minutes) reduseres.",
"usage_tips": "Kombiner med remaining_minutes for å beregne når langvarige enheter skal stoppes: Perioden startet for `period_duration_minutes - remaining_minutes` minutter siden. Dette attributtet støtter energioptimeringsstrategier ved å hjelpe til med å planlegge høyforbruksaktiviteter innenfor billige perioder."
},
"best_price_remaining_minutes": {
"description": "Gjenværende tid i gjeldende billigperiode",
"long_description": "Viser hvor mye tid som gjenstår i gjeldende billigperiode. State vises i timer (f.eks. 0,75 t) for enkel lesing i dashboards, mens attributtet `remaining_minutes` gir samme tid i minutter (f.eks. 45) for automasjonsbetingelser. **Nedtellingstimer**: Denne verdien reduseres hvert minutt under en aktiv periode. Returnerer 0 når ingen billigperiode er aktiv. Oppdateres hvert minutt.",
"usage_tips": "For automasjoner: Bruk attributtet `remaining_minutes` som 'Hvis remaining_minutes > 60, start oppvaskmaskinen nå (nok tid til å fullføre)' eller 'Hvis remaining_minutes < 15, fullfør gjeldende syklus snart'. UI viser brukervennlige timer (f.eks. 1,25 t). Verdi 0 indikerer ingen aktiv billigperiode."
},
"best_price_progress": {
"description": "Fremdrift gjennom gjeldende billigperiode (0% når inaktiv)",
"long_description": "Viser fremdrift gjennom gjeldende billigperiode som 0-100%. Returnerer 0% når ingen periode er aktiv. Oppdateres hvert minutt. 0% betyr perioden nettopp startet, 100% betyr den slutter snart.",
"usage_tips": "Flott for visuelle fremgangsindikatorer. Bruk i automatiseringer: 'Hvis progress > 0 OG progress > 75, send varsel om at billigperioden snart slutter'. Verdi 0 indikerer ingen aktiv periode."
},
"best_price_next_start_time": {
"description": "Total lengde på nåværende eller neste dyr-periode (state i timer, attributt i minutter)",
"long_description": "Viser hvor lenge den dyre perioden varer. State bruker timer (desimal) for UI; attributtet `period_duration_minutes` beholder avrundede minutter for automasjoner. Aktiv → varighet for gjeldende periode, ellers neste.",
"usage_tips": "UI kan vise 0,75 t mens `period_duration_minutes` = 45 for automasjoner."
},
"best_price_next_in_minutes": {
"description": "Tid til neste billigperiode",
"long_description": "Viser hvor lenge til neste billigperiode. State vises i timer (f.eks. 2,25 t) for dashboards, mens attributtet `next_in_minutes` gir minutter (f.eks. 135) for automasjonsbetingelser. Under en aktiv periode viser dette tiden til perioden ETTER den gjeldende. Returnerer 0 under korte overgangsmomenter. Oppdateres hvert minutt.",
"usage_tips": "For automasjoner: Bruk attributtet `next_in_minutes` som 'Hvis next_in_minutes > 0 OG next_in_minutes < 15, vent før start av oppvaskmaskin'. Verdi > 0 indikerer alltid at en fremtidig periode er planlagt."
},
"peak_price_end_time": {
"description": "Tid til neste dyr-periode (state i timer, attributt i minutter)",
"long_description": "Viser hvor lenge til neste dyre periode starter. State bruker timer (desimal); attributtet `next_in_minutes` beholder avrundede minutter for automasjoner. Under aktiv periode viser dette tiden til perioden etter den nåværende. 0 i korte overgangsøyeblikk. Oppdateres hvert minutt.",
"usage_tips": "Bruk `next_in_minutes` i automasjoner (f.eks. < 10) mens state er lett å lese i timer."
},
"peak_price_period_duration": {
"description": "Lengde på gjeldende/neste dyr periode",
"long_description": "Total varighet av gjeldende eller neste dyre periode. State vises i timer (f.eks. 1,5 t) for enkel lesing i UI, mens attributtet `period_duration_minutes` gir samme verdi i minutter (f.eks. 90) for automasjoner. Denne verdien representerer den **fulle planlagte varigheten** av perioden og er konstant gjennom hele perioden, selv om gjenværende tid (remaining_minutes) reduseres.",
"usage_tips": "Kombiner med remaining_minutes for å beregne når langvarige enheter skal stoppes: Perioden startet for `period_duration_minutes - remaining_minutes` minutter siden. Dette attributtet støtter energisparingsstrategier ved å hjelpe til med å planlegge høyforbruksaktiviteter utenfor dyre perioder."
},
"peak_price_remaining_minutes": {
"description": "Gjenværende tid i gjeldende dyre periode",
"long_description": "Viser hvor mye tid som gjenstår i gjeldende dyre periode. State vises i timer (f.eks. 0,75 t) for enkel lesing i dashboards, mens attributtet `remaining_minutes` gir samme tid i minutter (f.eks. 45) for automasjonsbetingelser. **Nedtellingstimer**: Denne verdien reduseres hvert minutt under en aktiv periode. Returnerer 0 når ingen dyr periode er aktiv. Oppdateres hvert minutt.",
"usage_tips": "For automasjoner: Bruk attributtet `remaining_minutes` som 'Hvis remaining_minutes > 60, avbryt utsatt ladeøkt' eller 'Hvis remaining_minutes < 15, fortsett normal drift snart'. UI viser brukervennlige timer (f.eks. 1,0 t). Verdi 0 indikerer ingen aktiv dyr periode."
},
"peak_price_progress": {
"description": "Fremdrift gjennom gjeldende dyrperiode (0% når inaktiv)",
"long_description": "Viser fremdrift gjennom gjeldende dyrperiode som 0-100%. Returnerer 0% når ingen periode er aktiv. Oppdateres hvert minutt.",
"usage_tips": "Visuell fremdriftsindikator i dashboards. Automatisering: 'Hvis progress > 0 OG progress > 90, forbered normal varmestyringsplan'. Verdi 0 indikerer ingen aktiv periode."
},
"peak_price_next_start_time": {
"description": "Når neste dyrperiode starter",
"long_description": "Viser når neste kommende dyrperiode starter. Under en aktiv periode viser dette starten av NESTE periode etter den gjeldende. Returnerer 'Ukjent' bare når ingen fremtidige perioder er konfigurert.",
"usage_tips": "Alltid nyttig for planlegging: 'Neste dyrperiode starter om 2 timer'. Automatisering: 'Når neste starttid er om 30 minutter, reduser varmetemperatur forebyggende'."
},
"peak_price_next_in_minutes": {
"description": "Tid til neste dyre periode",
"long_description": "Viser hvor lenge til neste dyre periode starter. State vises i timer (f.eks. 0,5 t) for dashboards, mens attributtet `next_in_minutes` gir minutter (f.eks. 30) for automasjonsbetingelser. Under en aktiv periode viser dette tiden til perioden ETTER den gjeldende. Returnerer 0 under korte overgangsmomenter. Oppdateres hvert minutt.",
"usage_tips": "For automasjoner: Bruk attributtet `next_in_minutes` som 'Hvis next_in_minutes > 0 OG next_in_minutes < 10, fullfør gjeldende ladesyklus nå før prisene øker'. Verdi > 0 indikerer alltid at en fremtidig dyr periode er planlagt."
},
"home_type": {
"description": "Type bolig (leilighet, hus osv.)",
"long_description": "Viser boligtypen som er konfigurert i Tibber-kontoen din. Disse metadataene kan være nyttige for å kategorisere energiforbruksmønstre.",
"usage_tips": "Bruk dette til å organisere smarthussystemet ditt eller for analyseformål."
},
"home_size": {
"description": "Boligareal i kvadratmeter",
"long_description": "Viser størrelsen på boligen din i kvadratmeter som konfigurert i Tibber-kontoen din. Kan brukes til å beregne energiforbruk per kvadratmeter.",
"usage_tips": "Bruk dette i energieffektivitetsberegninger: 'Boligen min bruker X kWh per kvadratmeter per år'."
},
"main_fuse_size": {
"description": "Hovedsikringsstørrelse i ampere",
"long_description": "Viser kapasiteten til hovedsikringen din i ampere. Dette bestemmer maksimal elektrisk belastning boligen din kan håndtere samtidig.",
"usage_tips": "Bruk dette til å forhindre overbelastning: 'Hvis totalt strømforbruk nærmer seg sikringsstørrelsen, utsett oppstart av flere apparater'."
},
"number_of_residents": {
"description": "Antall personer som bor i boligen",
"long_description": "Viser antall beboere som konfigurert i Tibber-kontoen din. Nyttig for beregninger av energiforbruk per person.",
"usage_tips": "Bruk dette til energianalyse for husholdningen: 'Energiforbruk per person per dag'."
},
"primary_heating_source": {
"description": "Primær varmesystemtype",
"long_description": "Viser typen varmesystem som brukes i boligen din som konfigurert i Tibber-kontoen din. Dette kan være en varmepumpe, elektrisk oppvarming, gass, olje eller andre varmekilder.",
"usage_tips": "Bruk dette til å kategorisere varmerelaterte automatiseringer eller for energiforbruksanalyse etter varmetype."
},
"grid_company": {
"description": "Navn på nettoperatøren din",
"long_description": "Viser navnet på selskapet som driver strømnettet i området ditt. Dette er distribusjonssystemoperatøren (DSO) som er ansvarlig for strømleveransen til hjemmet ditt.",
"usage_tips": "Nyttig for administrative formål og feilsøking av nettrelaterte problemer."
},
"grid_area_code": {
"description": "Nettområdets identifikasjonskode",
"long_description": "Viser koden som identifiserer strømnettsområdet ditt. Denne koden brukes av nettoperatøren til routing og faktureringsformål.",
"usage_tips": "Bruk dette som administrativ referanse eller når du kontakter nettoperatøren din."
},
"price_area_code": {
"description": "Strømprisområdekode",
"long_description": "Viser koden for strømprisområdet ditt (f.eks. NO1, NO2, SE3, DK1). Ulike områder har forskjellige engrosstrømpriser basert på regional tilbud og etterspørsel.",
"usage_tips": "Bruk dette til å forstå hvilket prisområde du er i. Nyttig ved sammenligning av priser med andre eller analyse av regionale prismønstre."
},
"consumption_ean": {
"description": "EAN-kode for strømforbruksmåling",
"long_description": "Viser European Article Number (EAN)-koden som unikt identifiserer strømforbruksmåleren din. Denne 18-sifrede koden brukes til fakturerings- og administrasjonsformål.",
"usage_tips": "Bruk dette ved kommunikasjon med strømleverandøren din eller for administrativ dokumentasjon."
},
"production_ean": {
"description": "EAN-kode for strømproduksjonsmåling",
"long_description": "Viser European Article Number (EAN)-koden for strømproduksjonsmåleren din (hvis du har solcellepaneler eller annen produksjon). Denne koden sporer strøm du sender tilbake til nettet.",
"usage_tips": "Relevant hvis du har solcellepaneler eller annen strømproduksjon. Bruk til administrative formål og ved krav om innmatingsutbetaling."
},
"energy_tax_type": {
"description": "Type energiavgift som påløper",
"long_description": "Viser energiavgiftskategorien som gjelder for strømforbruket ditt. Avgiftssatser varierer etter land og noen ganger etter forbrukertype (privat, næring osv.).",
"usage_tips": "Bruk dette til å forstå nedbrytningen av strømregningen din og for totale kostnadsberegninger."
},
"vat_type": {
"description": "MVA-kategori (merverdiavgift)",
"long_description": "Viser MVA-kategorien som gjelder for strømforbruket ditt. MVA-satser varierer etter land og kan være forskjellige for strøm sammenlignet med andre varer og tjenester.",
"usage_tips": "Bruk dette til å forstå strømregningen din og beregne totale kostnader inkludert avgifter."
},
"estimated_annual_consumption": {
"description": "Estimert årlig strømforbruk i kWh",
"long_description": "Viser ditt estimerte årlige strømforbruk i kilowattimer som beregnet eller konfigurert i Tibber-kontoen din. Dette estimatet brukes til å sammenligne faktisk forbruk med forventede verdier.",
"usage_tips": "Bruk dette til å spore om ditt faktiske forbruk er over eller under forventningene. Sammenlign månedlig forbruk med 1/12 av denne verdien for å identifisere uvanlige mønstre."
},
"subscription_status": {
"description": "Status for Tibber-abonnementet ditt",
"long_description": "Viser om Tibber-abonnementet ditt for øyeblikket er aktivt, avsluttet eller venter på aktivering. En status 'Aktiv' betyr at du aktivt mottar strøm gjennom Tibber.",
"usage_tips": "Bruk dette til å overvåke abonnementsstatusen din. Sett opp varsler hvis statusen endres fra 'Aktiv' for å sikre uavbrutt tjeneste."
},
"chart_data_export": {
"description": "Dataeksport for dashboardintegrasjoner",
"long_description": "Denne sensoren kaller get_chartdata-tjenesten med din konfigurerte YAML-konfigurasjon og eksponerer resultatet som entitetsattributter. Status viser 'ready' når data er tilgjengelig, 'error' ved feil, eller 'pending' før første kall. Perfekt for dashboardintegrasjoner som ApexCharts som trenger å lese prisdata fra entitetsattributter.",
"usage_tips": "Konfigurer YAML-parametrene i integrasjonsinnstillingene for å matche get_chartdata-tjenestekallet ditt. Sensoren vil automatisk oppdatere når prisdata oppdateres (typisk etter midnatt og når morgendagens data ankommer). Få tilgang til tjenesteresponsdataene direkte fra entitetens attributter - strukturen matcher nøyaktig det get_chartdata returnerer."
},
"chart_metadata": {
"description": "Lettvekts metadata for diagramkonfigurasjon",
"long_description": "Gir essensielle diagramkonfigurasjonsverdier som sensorattributter. Nyttig for ethvert diagramkort som trenger Y-aksegrenser. Sensoren kaller get_chartdata med kun-metadata-modus (ingen databehandling) og trekker ut: yaxis_min, yaxis_max (foreslått Y-akseområde for optimal skalering). Status reflekterer tjenestekallresultatet: 'ready' ved suksess, 'error' ved feil, 'pending' under initialisering.",
"usage_tips": "Konfigurer via configuration.yaml under tibber_prices.chart_metadata_config (valgfritt: day, subunit_currency, resolution). Sensoren oppdateres automatisk når prisdata endres. Få tilgang til metadata fra attributter: yaxis_min, yaxis_max. Bruk med config-template-card eller ethvert verktøy som leser entitetsattributter - perfekt for dynamisk diagramkonfigurasjon uten manuelle beregninger."
} }
}, },
"binary_sensor": { "binary_sensor": {
@ -477,90 +275,6 @@
"description": "Om tilkoblingen til Tibber API fungerer", "description": "Om tilkoblingen til Tibber API fungerer",
"long_description": "Indikerer om integrasjonen kan koble til Tibber API", "long_description": "Indikerer om integrasjonen kan koble til Tibber API",
"usage_tips": "Bruk dette til å overvåke tilkoblingsstatusen til Tibber API" "usage_tips": "Bruk dette til å overvåke tilkoblingsstatusen til Tibber API"
},
"has_ventilation_system": {
"description": "Om boligen din har ventilasjonsanlegg",
"long_description": "Indikerer om et ventilasjonsanlegg er registrert for boligen din i Tibber-kontoen. Ventilasjonsanlegg kan være betydelige strømforbrukere som kan dra nytte av smart planlegging.",
"usage_tips": "Bruk dette til å aktivere ventilasjonsrelaterte automatiseringer eller energiovervåking. Hvis aktivt, vurder å planlegge ventilasjon i lavprisperioder."
},
"realtime_consumption_enabled": {
"description": "Om sanntidsforbruksovervåking er aktiv",
"long_description": "Indikerer om sanntidsovervåking av strømforbruk er aktivert og aktiv for ditt Tibber-hjem. Dette krever kompatibel målehardware (f.eks. Tibber Pulse) og et aktivt abonnement.",
"usage_tips": "Bruk dette for å bekrefte at sanntidsforbruksdata er tilgjengelig. Aktiver varsler hvis dette endres til 'av' uventet, noe som indikerer potensielle maskinvare- eller tilkoblingsproblemer."
}
},
"number": {
"best_price_flex_override": {
"description": "Maksimal prosent over daglig minimumspris som intervaller kan ha og fortsatt kvalifisere som 'beste pris'. Anbefalt: 15-20 med lemping aktivert (standard), eller 25-35 uten lemping. Maksimum: 50 (tak for pålitelig periodedeteksjon).",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Fleksibilitet'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Aktiver denne entiteten for å dynamisk justere beste pris-deteksjon via automatiseringer, f.eks. høyere fleksibilitet for kritiske laster eller strengere krav for fleksible apparater."
},
"best_price_min_distance_override": {
"description": "Minimum prosentavstand under daglig gjennomsnitt. Intervaller må være så langt under gjennomsnittet for å kvalifisere som 'beste pris'. Hjelper med å skille ekte lavprisperioder fra gjennomsnittspriser.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Minimumsavstand'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Øk verdien for strengere beste pris-kriterier. Reduser hvis for få perioder blir oppdaget."
},
"best_price_min_period_length_override": {
"description": "Minimum periodelengde i 15-minutters intervaller. Perioder kortere enn dette blir ikke rapportert. Eksempel: 2 = minimum 30 minutter.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Minimum periodelengde'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Juster til typisk apparatkjøretid: 2 (30 min) for hurtigprogrammer, 4-8 (1-2 timer) for normale sykluser, 8+ for lange ECO-programmer."
},
"best_price_min_periods_override": {
"description": "Minimum antall beste pris-perioder å finne daglig. Når lemping er aktivert, vil systemet automatisk justere kriterier for å oppnå dette antallet.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Minimum perioder'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Sett dette til antall tidskritiske oppgaver du har daglig. Eksempel: 2 for to vaskemaskinkjøringer."
},
"best_price_relaxation_attempts_override": {
"description": "Antall forsøk på å gradvis lempe kriteriene for å oppnå minimum periodeantall. Hvert forsøk øker fleksibiliteten med 3 prosent. Ved 0 brukes kun basiskriterier.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Lemping forsøk'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Høyere verdier gjør periodedeteksjon mer adaptiv for dager med stabile priser. Sett til 0 for å tvinge strenge kriterier uten lemping."
},
"best_price_gap_count_override": {
"description": "Maksimalt antall dyrere intervaller som kan tillates mellom billige intervaller mens de fortsatt regnes som en sammenhengende periode. Ved 0 må billige intervaller være påfølgende.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Gaptoleranse'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Øk dette for apparater med variabel last (f.eks. varmepumper) som kan tåle korte dyrere intervaller. Sett til 0 for kontinuerlige billige perioder."
},
"peak_price_flex_override": {
"description": "Maksimal prosent under daglig maksimumspris som intervaller kan ha og fortsatt kvalifisere som 'topppris'. Samme anbefalinger som for beste pris-fleksibilitet.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Fleksibilitet'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Bruk dette for å justere topppris-terskelen ved kjøretid for automatiseringer som unngår forbruk under dyre timer."
},
"peak_price_min_distance_override": {
"description": "Minimum prosentavstand over daglig gjennomsnitt. Intervaller må være så langt over gjennomsnittet for å kvalifisere som 'topppris'.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Minimumsavstand'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Øk verdien for kun å fange ekstreme pristopper. Reduser for å inkludere flere høypristider."
},
"peak_price_min_period_length_override": {
"description": "Minimum periodelengde i 15-minutters intervaller for topppriser. Kortere pristopper rapporteres ikke som perioder.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Minimum periodelengde'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Kortere verdier fanger korte pristopper. Lengre verdier fokuserer på vedvarende høyprisperioder."
},
"peak_price_min_periods_override": {
"description": "Minimum antall topppris-perioder å finne daglig.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Minimum perioder'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Sett dette basert på hvor mange høyprisperioder du vil fange per dag for automatiseringer."
},
"peak_price_relaxation_attempts_override": {
"description": "Antall forsøk på å lempe kriteriene for å oppnå minimum antall topppris-perioder.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Lemping forsøk'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Øk dette hvis ingen perioder blir funnet på dager med stabile priser. Sett til 0 for å tvinge strenge kriterier."
},
"peak_price_gap_count_override": {
"description": "Maksimalt antall billigere intervaller som kan tillates mellom dyre intervaller mens de fortsatt regnes som en topppris-periode.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Gaptoleranse'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Høyere verdier fanger lengre høyprisperioder selv med korte prisdykk. Sett til 0 for strengt sammenhengende topppriser."
}
},
"switch": {
"best_price_enable_relaxation_override": {
"description": "Når aktivert, lempes kriteriene automatisk for å oppnå minimum periodeantall. Når deaktivert, rapporteres kun perioder som oppfyller strenge kriterier (muligens null perioder på dager med stabile priser).",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Oppnå minimumsantall'-innstillingen fra alternativer-dialogen for beste pris-periodeberegninger.",
"usage_tips": "Aktiver dette for garanterte daglige automatiseringsmuligheter. Deaktiver hvis du kun vil ha virkelig billige perioder, selv om det betyr ingen perioder på noen dager."
},
"peak_price_enable_relaxation_override": {
"description": "Når aktivert, lempes kriteriene automatisk for å oppnå minimum periodeantall. Når deaktivert, rapporteres kun ekte pristopper.",
"long_description": "Når denne entiteten er aktivert, overstyrer verdien 'Oppnå minimumsantall'-innstillingen fra alternativer-dialogen for topppris-periodeberegninger.",
"usage_tips": "Aktiver dette for konsistente topppris-varsler. Deaktiver for kun å fange ekstreme pristopper."
} }
}, },
"home_types": { "home_types": {
@ -568,16 +282,5 @@
"ROWHOUSE": "Rekkehus", "ROWHOUSE": "Rekkehus",
"HOUSE": "Hus", "HOUSE": "Hus",
"COTTAGE": "Hytte" "COTTAGE": "Hytte"
}, }
"time_units": {
"day": "{count} dag",
"days": "{count} dager",
"hour": "{count} time",
"hours": "{count} timer",
"minute": "{count} minutt",
"minutes": "{count} minutter",
"ago": "{parts} siden",
"now": "nå"
},
"attribution": "Data levert av Tibber"
} }

View file

@ -1,143 +1,91 @@
{ {
"apexcharts": {
"title_rating_level": "Prijsfasen dagverloop",
"title_level": "Prijsniveau",
"hourly_suffix": "(Ø per uur)",
"best_price_period_name": "Beste prijsperiode",
"peak_price_period_name": "Piekprijsperiode",
"notification": {
"metadata_sensor_unavailable": {
"title": "Tibber Prices: ApexCharts YAML gegenereerd met beperkte functionaliteit",
"message": "Je hebt zojuist een ApexCharts-kaartconfiguratie gegenereerd via Ontwikkelaarstools. De grafiek-metadata-sensor is momenteel uitgeschakeld, dus de gegenereerde YAML toont alleen **basisfunctionaliteit** (auto-schaal as, vaste verloop op 50%).\n\n**Voor volledige functionaliteit** (geoptimaliseerde schaling, dynamische verloopkleuren):\n1. [Open Tibber Prices-integratie](https://my.home-assistant.io/redirect/integration/?domain=tibber_prices)\n2. Schakel de 'Chart Metadata'-sensor in\n3. **Genereer de YAML opnieuw** via Ontwikkelaarstools\n4. **Vervang de oude YAML** in je dashboard door de nieuwe versie\n\n⚠ Alleen de sensor inschakelen is niet genoeg - je moet de YAML opnieuw genereren en vervangen!"
},
"missing_cards": {
"title": "Tibber Prices: ApexCharts YAML kan niet worden gebruikt",
"message": "Je hebt zojuist een ApexCharts-kaartconfiguratie gegenereerd via Ontwikkelaarstools, maar de gegenereerde YAML **zal niet werken** omdat vereiste aangepaste kaarten ontbreken.\n\n**Ontbrekende kaarten:**\n{cards}\n\n**Om de gegenereerde YAML te gebruiken:**\n1. Klik op de bovenstaande links om de ontbrekende kaarten te installeren vanuit HACS\n2. Herstart Home Assistant (soms nodig)\n3. **Genereer de YAML opnieuw** via Ontwikkelaarstools\n4. Voeg de YAML toe aan je dashboard\n\n⚠ De huidige YAML-code werkt niet totdat alle kaarten zijn geïnstalleerd!"
}
}
},
"sensor": { "sensor": {
"current_interval_price": { "current_price": {
"description": "De huidige elektriciteitsprijs per kWh", "description": "De huidige elektriciteitsprijs per kWh",
"long_description": "Toont de huidige prijs per kWh van je Tibber-abonnement", "long_description": "Toont de huidige prijs per kWh van uw Tibber-abonnement",
"usage_tips": "Gebruik dit om prijzen bij te houden of om automatiseringen te maken die worden uitgevoerd wanneer elektriciteit goedkoop is" "usage_tips": "Gebruik dit om prijzen bij te houden of om automatiseringen te maken die worden uitgevoerd wanneer elektriciteit goedkoop is"
}, },
"current_interval_price_base": {
"description": "Huidige elektriciteitsprijs in hoofdvaluta (EUR/kWh, NOK/kWh, enz.) voor Energie-dashboard",
"long_description": "Toont de huidige prijs per kWh in hoofdvaluta-eenheden (bijv. EUR/kWh in plaats van ct/kWh, NOK/kWh in plaats van øre/kWh). Deze sensor is speciaal ontworpen voor gebruik met het Energie-dashboard van Home Assistant, dat prijzen in standaard valuta-eenheden vereist.",
"usage_tips": "Gebruik deze sensor bij het configureren van het Energie-dashboard onder Instellingen → Dashboards → Energie. Selecteer deze sensor als 'Entiteit met huidige prijs' om automatisch je energiekosten te berekenen. Het Energie-dashboard vermenigvuldigt je energieverbruik (kWh) met deze prijs om totale kosten weer te geven."
},
"next_interval_price": { "next_interval_price": {
"description": "De volgende interval elektriciteitsprijs per kWh", "description": "De volgende interval elektriciteitsprijs per kWh",
"long_description": "Toont de prijs voor het volgende 15-minuten interval van je Tibber-abonnement", "long_description": "Toont de prijs voor het volgende 15-minuten interval van uw Tibber-abonnement",
"usage_tips": "Gebruik dit om je voor te bereiden op aanstaande prijswijzigingen of om apparaten te plannen om tijdens goedkopere intervallen te draaien" "usage_tips": "Gebruik dit om u voor te bereiden op aanstaande prijswijzigingen of om apparaten te plannen om tijdens goedkopere intervallen te draaien"
}, },
"previous_interval_price": { "previous_interval_price": {
"description": "De vorige interval elektriciteitsprijs per kWh", "description": "De vorige interval elektriciteitsprijs per kWh",
"long_description": "Toont de prijs voor het vorige 15-minuten interval van je Tibber-abonnement", "long_description": "Toont de prijs voor het vorige 15-minuten interval van uw Tibber-abonnement",
"usage_tips": "Gebruik dit om eerdere prijswijzigingen te bekijken of prijsgeschiedenis bij te houden" "usage_tips": "Gebruik dit om eerdere prijswijzigingen te bekijken of prijsgeschiedenis bij te houden"
}, },
"current_hour_average_price": { "current_hour_average": {
"description": "Voortschrijdend 5-interval gemiddelde prijs per kWh", "description": "Voortschrijdend 5-interval gemiddelde prijs per kWh",
"long_description": "Toont de gemiddelde prijs per kWh berekend uit 5 intervallen: 2 vorige, huidige en 2 volgende intervallen (ongeveer 75 minuten totaal). Dit biedt een vloeiende 'uurprijs' die zich aanpast naarmate de tijd verstrijkt, in plaats van vast te zitten aan klokuren.", "long_description": "Toont de gemiddelde prijs per kWh berekend uit 5 intervallen: 2 vorige, huidige en 2 volgende intervallen (ongeveer 75 minuten totaal). Dit biedt een vloeiende 'uurprijs' die zich aanpast naarmate de tijd verstrijkt, in plaats van vast te zitten aan klokuren.",
"usage_tips": "Gebruik dit voor een stabielere prijsindicator die korte schommelingen afvlakt terwijl deze nog steeds reageert op prijswijzigingen. Beter dan vaste uurprijzen voor verbruiksbeslissingen." "usage_tips": "Gebruik dit voor een stabielere prijsindicator die korte schommelingen afvlakt terwijl deze nog steeds reageert op prijswijzigingen. Beter dan vaste uurprijzen voor verbruiksbeslissingen."
}, },
"next_hour_average_price": { "next_hour_average": {
"description": "Voortschrijdend 5-interval gemiddelde prijs voor volgend uur per kWh", "description": "Voortschrijdend 5-interval gemiddelde prijs voor volgend uur per kWh",
"long_description": "Toont de gemiddelde prijs per kWh berekend uit 5 intervallen gecentreerd één uur vooruit: ongeveer intervallen +2 tot +6 vanaf nu (dekking van minuten +30 tot +105). Dit biedt een vooruitkijkende vloeiende 'uurprijs' voor verbruiksplanning.", "long_description": "Toont de gemiddelde prijs per kWh berekend uit 5 intervallen gecentreerd één uur vooruit: ongeveer intervallen +2 tot +6 vanaf nu (dekking van minuten +30 tot +105). Dit biedt een vooruitkijkende vloeiende 'uurprijs' voor verbruiksplanning.",
"usage_tips": "Gebruik dit om prijswijzigingen in het volgende uur te anticiperen. Handig voor het plannen van activiteiten met hoog verbruik zoals het opladen van elektrische voertuigen, het draaien van vaatwassers of verwarmingssystemen." "usage_tips": "Gebruik dit om prijswijzigingen in het volgende uur te anticiperen. Handig voor het plannen van activiteiten met hoog verbruik zoals het opladen van elektrische voertuigen, het draaien van vaatwassers of verwarmingssystemen."
}, },
"lowest_price_today": { "lowest_price_today": {
"description": "De laagste elektriciteitsprijs voor vandaag per kWh", "description": "De laagste elektriciteitsprijs voor vandaag per kWh",
"long_description": "Toont de laagste prijs per kWh voor de huidige dag van je Tibber-abonnement", "long_description": "Toont de laagste prijs per kWh voor de huidige dag van uw Tibber-abonnement",
"usage_tips": "Gebruik dit om huidige prijzen te vergelijken met de goedkoopste tijd van de dag" "usage_tips": "Gebruik dit om huidige prijzen te vergelijken met de goedkoopste tijd van de dag"
}, },
"highest_price_today": { "highest_price_today": {
"description": "De hoogste elektriciteitsprijs voor vandaag per kWh", "description": "De hoogste elektriciteitsprijs voor vandaag per kWh",
"long_description": "Toont de hoogste prijs per kWh voor de huidige dag van je Tibber-abonnement", "long_description": "Toont de hoogste prijs per kWh voor de huidige dag van uw Tibber-abonnement",
"usage_tips": "Gebruik dit om te voorkomen dat je apparaten draait tijdens piekprijstijden" "usage_tips": "Gebruik dit om te voorkomen dat u apparaten draait tijdens piekprijstijden"
}, },
"average_price_today": { "average_price_today": {
"description": "Typische elektriciteitsprijs voor vandaag per kWh (configureerbare weergave)", "description": "De gemiddelde elektriciteitsprijs voor vandaag per kWh",
"long_description": "Toont de prijs per kWh voor de huidige dag van je Tibber-abonnement. **Standaard toont de status de mediaan** (resistent tegen extreme prijspieken, toont typisch prijsniveau). Je kunt dit wijzigen in de integratie-instellingen om het rekenkundig gemiddelde te tonen. De alternatieve waarde is beschikbaar als attribuut.", "long_description": "Toont de gemiddelde prijs per kWh voor de huidige dag van uw Tibber-abonnement",
"usage_tips": "Gebruik dit als basislijn voor het vergelijken van huidige prijzen. Voor berekeningen gebruik: {{ state_attr('sensor.average_price_today', 'price_mean') }}" "usage_tips": "Gebruik dit als basislijn voor het vergelijken van huidige prijzen"
}, },
"lowest_price_tomorrow": { "lowest_price_tomorrow": {
"description": "De laagste elektriciteitsprijs voor morgen per kWh", "description": "De laagste elektriciteitsprijs voor morgen per kWh",
"long_description": "Toont de laagste prijs per kWh voor morgen van je Tibber-abonnement. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).", "long_description": "Toont de laagste prijs per kWh voor morgen van uw Tibber-abonnement. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).",
"usage_tips": "Gebruik dit om energie-intensieve activiteiten te plannen voor de goedkoopste tijd van morgen. Perfect voor vooraf plannen van verwarming, EV-laden of apparaten." "usage_tips": "Gebruik dit om energie-intensieve activiteiten te plannen voor de goedkoopste tijd van morgen. Perfect voor vooraf plannen van verwarming, EV-laden of apparaten."
}, },
"highest_price_tomorrow": { "highest_price_tomorrow": {
"description": "De hoogste elektriciteitsprijs voor morgen per kWh", "description": "De hoogste elektriciteitsprijs voor morgen per kWh",
"long_description": "Toont de hoogste prijs per kWh voor morgen van je Tibber-abonnement. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).", "long_description": "Toont de hoogste prijs per kWh voor morgen van uw Tibber-abonnement. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).",
"usage_tips": "Gebruik dit om te voorkomen dat je apparaten draait tijdens de piekprijstijden van morgen. Handig voor het plannen rond dure perioden." "usage_tips": "Gebruik dit om te voorkomen dat u apparaten draait tijdens de piekprijstijden van morgen. Handig voor het plannen rond dure perioden."
}, },
"average_price_tomorrow": { "average_price_tomorrow": {
"description": "Typische elektriciteitsprijs voor morgen per kWh (configureerbare weergave)", "description": "De gemiddelde elektriciteitsprijs voor morgen per kWh",
"long_description": "Toont de prijs per kWh voor morgen van je Tibber-abonnement. **Standaard toont de status de mediaan** (resistent tegen extreme prijspieken). Je kunt dit wijzigen in de integratie-instellingen om het rekenkundig gemiddelde te tonen. De alternatieve waarde is beschikbaar als attribuut. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).", "long_description": "Toont de gemiddelde prijs per kWh voor morgen van uw Tibber-abonnement. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).",
"usage_tips": "Gebruik dit als basislijn voor het vergelijken van prijzen van morgen en het plannen van verbruik. Vergelijk met de mediaan van vandaag om te zien of morgen over het algemeen duurder of goedkoper wordt." "usage_tips": "Gebruik dit als basislijn voor het vergelijken van prijzen van morgen en het plannen van verbruik. Vergelijk met het gemiddelde van vandaag om te zien of morgen over het algemeen duurder of goedkoper wordt."
},
"yesterday_price_level": {
"description": "Geaggregeerd prijsniveau voor gisteren",
"long_description": "Toont het geaggregeerde prijsniveau voor alle intervallen van gisteren. Gebruikt dezelfde logica als de uursensoren om het totale prijsniveau voor de hele dag te bepalen.",
"usage_tips": "Gebruik dit om de algemene prijssituatie van gisteren te begrijpen. Vergelijk met vandaag om dagelijkse trends te zien."
},
"today_price_level": {
"description": "Geaggregeerd prijsniveau voor vandaag",
"long_description": "Toont het geaggregeerde prijsniveau voor alle intervallen van vandaag. Gebruikt dezelfde logica als de uursensoren om het totale prijsniveau voor de hele dag te bepalen.",
"usage_tips": "Gebruik dit om de prijssituatie van vandaag in één oogopslag te begrijpen. Handig voor snelle beoordelingen of vandaag over het algemeen goedkoop of duur is."
},
"tomorrow_price_level": {
"description": "Geaggregeerd prijsniveau voor morgen",
"long_description": "Toont het geaggregeerde prijsniveau voor alle intervallen van morgen. Gebruikt dezelfde logica als de uursensoren om het totale prijsniveau voor de hele dag te bepalen. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).",
"usage_tips": "Gebruik dit om de prijssituatie van morgen te begrijpen. Vergelijk met vandaag om te zien of morgen gunstiger of ongunstiger zal zijn voor energieverbruik."
},
"yesterday_price_rating": {
"description": "Geaggregeerde prijsbeoordeling voor gisteren",
"long_description": "Toont de geaggregeerde prijsbeoordeling (laag/normaal/hoog) voor alle intervallen van gisteren, gebaseerd op jouw geconfigureerde drempelwaarden. Gebruikt dezelfde logica als de uursensoren om de totale beoordeling voor de hele dag te bepalen.",
"usage_tips": "Gebruik dit om de prijssituatie van gisteren te begrijpen ten opzichte van jouw persoonlijke drempelwaarden. Vergelijk met vandaag voor trendanalyse."
},
"today_price_rating": {
"description": "Geaggregeerde prijsbeoordeling voor vandaag",
"long_description": "Toont de geaggregeerde prijsbeoordeling (laag/normaal/hoog) voor alle intervallen van vandaag, gebaseerd op jouw geconfigureerde drempelwaarden. Gebruikt dezelfde logica als de uursensoren om de totale beoordeling voor de hele dag te bepalen.",
"usage_tips": "Gebruik dit om snel de prijssituatie van vandaag te beoordelen ten opzichte van jouw persoonlijke drempelwaarden. Helpt bij het nemen van verbruiksbeslissingen voor de huidige dag."
},
"tomorrow_price_rating": {
"description": "Geaggregeerde prijsbeoordeling voor morgen",
"long_description": "Toont de geaggregeerde prijsbeoordeling (laag/normaal/hoog) voor alle intervallen van morgen, gebaseerd op jouw geconfigureerde drempelwaarden. Gebruikt dezelfde logica als de uursensoren om de totale beoordeling voor de hele dag te bepalen. Deze sensor wordt niet beschikbaar totdat de gegevens van morgen door Tibber worden gepubliceerd (meestal rond 13:00-14:00 CET).",
"usage_tips": "Gebruik dit om het energieverbruik van morgen te plannen op basis van jouw persoonlijke prijsdrempelwaarden. Vergelijk met vandaag om te beslissen of je verbruik naar morgen moet verschuiven of vandaag energie moet gebruiken."
}, },
"trailing_price_average": { "trailing_price_average": {
"description": "Typische elektriciteitsprijs voor de afgelopen 24 uur per kWh (configureerbare weergave)", "description": "De gemiddelde elektriciteitsprijs voor de afgelopen 24 uur per kWh",
"long_description": "Toont de prijs per kWh berekend uit de afgelopen 24 uur. **Standaard toont de status de mediaan** (resistent tegen extreme prijspieken, toont typisch prijsniveau). Je kunt dit wijzigen in de integratie-instellingen om het rekenkundig gemiddelde te tonen. De alternatieve waarde is beschikbaar als attribuut. Wordt elke 15 minuten bijgewerkt.", "long_description": "Toont de gemiddelde prijs per kWh berekend uit de afgelopen 24 uur (voortschrijdend gemiddelde) van uw Tibber-abonnement. Dit biedt een voortschrijdend gemiddelde dat elke 15 minuten wordt bijgewerkt op basis van historische gegevens.",
"usage_tips": "Gebruik de statuswaarde om het typische huidige prijsniveau te zien. Voor kostenberekeningen gebruik: {{ state_attr('sensor.trailing_price_average', 'price_mean') }}" "usage_tips": "Gebruik dit om huidige prijzen te vergelijken met recente trends. Een huidige prijs die aanzienlijk boven dit gemiddelde ligt, kan aangeven dat het een goed moment is om het verbruik te verminderen."
}, },
"leading_price_average": { "leading_price_average": {
"description": "Typische elektriciteitsprijs voor de komende 24 uur per kWh (configureerbare weergave)", "description": "De gemiddelde elektriciteitsprijs voor de komende 24 uur per kWh",
"long_description": "Toont de prijs per kWh berekend uit de komende 24 uur. **Standaard toont de status de mediaan** (resistent tegen extreme prijspieken, toont verwacht prijsniveau). Je kunt dit wijzigen in de integratie-instellingen om het rekenkundig gemiddelde te tonen. De alternatieve waarde is beschikbaar als attribuut.", "long_description": "Toont de gemiddelde prijs per kWh berekend uit de komende 24 uur (vooruitlopend gemiddelde) van uw Tibber-abonnement. Dit biedt een vooruitkijkend gemiddelde op basis van beschikbare prognosegegevens.",
"usage_tips": "Gebruik de statuswaarde om het typische toekomstige prijsniveau te zien. Voor kostenberekeningen gebruik: {{ state_attr('sensor.leading_price_average', 'price_mean') }}" "usage_tips": "Gebruik dit om energieverbruik te plannen. Als de huidige prijs onder het vooruitlopende gemiddelde ligt, kan het een goed moment zijn om energie-intensieve apparaten te laten draaien."
}, },
"trailing_price_min": { "trailing_price_min": {
"description": "De minimale elektriciteitsprijs voor de afgelopen 24 uur per kWh", "description": "De minimale elektriciteitsprijs voor de afgelopen 24 uur per kWh",
"long_description": "Toont de minimumprijs per kWh van de afgelopen 24 uur (voortschrijdend minimum) van je Tibber-abonnement. Dit geeft de laagste prijs die in de afgelopen 24 uur is gezien.", "long_description": "Toont de minimumprijs per kWh van de afgelopen 24 uur (voortschrijdend minimum) van uw Tibber-abonnement. Dit geeft de laagste prijs die in de afgelopen 24 uur is gezien.",
"usage_tips": "Gebruik dit om de beste prijsmogelijkheid te zien die je in de afgelopen 24 uur had en vergelijk deze met huidige prijzen." "usage_tips": "Gebruik dit om de beste prijsmogelijkheid te zien die u in de afgelopen 24 uur had en vergelijk deze met huidige prijzen."
}, },
"trailing_price_max": { "trailing_price_max": {
"description": "De maximale elektriciteitsprijs voor de afgelopen 24 uur per kWh", "description": "De maximale elektriciteitsprijs voor de afgelopen 24 uur per kWh",
"long_description": "Toont de maximumprijs per kWh van de afgelopen 24 uur (voortschrijdend maximum) van je Tibber-abonnement. Dit geeft de hoogste prijs die in de afgelopen 24 uur is gezien.", "long_description": "Toont de maximumprijs per kWh van de afgelopen 24 uur (voortschrijdend maximum) van uw Tibber-abonnement. Dit geeft de hoogste prijs die in de afgelopen 24 uur is gezien.",
"usage_tips": "Gebruik dit om de piekprijs in de afgelopen 24 uur te zien en prijsvolatiliteit te beoordelen." "usage_tips": "Gebruik dit om de piekprijs in de afgelopen 24 uur te zien en prijsvolatiliteit te beoordelen."
}, },
"leading_price_min": { "leading_price_min": {
"description": "De minimale elektriciteitsprijs voor de komende 24 uur per kWh", "description": "De minimale elektriciteitsprijs voor de komende 24 uur per kWh",
"long_description": "Toont de minimumprijs per kWh van de komende 24 uur (vooruitlopend minimum) van je Tibber-abonnement. Dit geeft de laagste prijs die wordt verwacht in de komende 24 uur op basis van prognosegegevens.", "long_description": "Toont de minimumprijs per kWh van de komende 24 uur (vooruitlopend minimum) van uw Tibber-abonnement. Dit geeft de laagste prijs die wordt verwacht in de komende 24 uur op basis van prognosegegevens.",
"usage_tips": "Gebruik dit om de beste prijsmogelijkheid te identificeren die eraan komt en plan energie-intensieve taken dienovereenkomstig." "usage_tips": "Gebruik dit om de beste prijsmogelijkheid te identificeren die eraan komt en plan energie-intensieve taken dienovereenkomstig."
}, },
"leading_price_max": { "leading_price_max": {
"description": "De maximale elektriciteitsprijs voor de komende 24 uur per kWh", "description": "De maximale elektriciteitsprijs voor de komende 24 uur per kWh",
"long_description": "Toont de maximumprijs per kWh van de komende 24 uur (vooruitlopend maximum) van je Tibber-abonnement. Dit geeft de hoogste prijs die wordt verwacht in de komende 24 uur op basis van prognosegegevens.", "long_description": "Toont de maximumprijs per kWh van de komende 24 uur (vooruitlopend maximum) van uw Tibber-abonnement. Dit geeft de hoogste prijs die wordt verwacht in de komende 24 uur op basis van prognosegegevens.",
"usage_tips": "Gebruik dit om te voorkomen dat je apparaten draait tijdens aanstaande piekprijsperioden." "usage_tips": "Gebruik dit om te voorkomen dat u apparaten draait tijdens aanstaande piekprijsperioden."
}, },
"current_interval_price_level": { "price_level": {
"description": "De huidige prijsniveauclassificatie", "description": "De huidige prijsniveauclassificatie",
"long_description": "Toont de classificatie van Tibber van de huidige prijs vergeleken met historische prijzen", "long_description": "Toont de classificatie van Tibber van de huidige prijs vergeleken met historische prijzen",
"usage_tips": "Gebruik dit om automatiseringen te maken op basis van relatieve prijsniveaus in plaats van absolute prijzen" "usage_tips": "Gebruik dit om automatiseringen te maken op basis van relatieve prijsniveaus in plaats van absolute prijzen"
@ -155,14 +103,14 @@
"current_hour_price_level": { "current_hour_price_level": {
"description": "Geaggregeerd prijsniveau voor huidig voortschrijdend uur (5 intervallen)", "description": "Geaggregeerd prijsniveau voor huidig voortschrijdend uur (5 intervallen)",
"long_description": "Toont het mediane prijsniveau over 5 intervallen (2 ervoor, huidig, 2 erna) dat ongeveer 75 minuten beslaat. Biedt een stabielere prijsniveauindicator die kortetermijnschommelingen afvlakt.", "long_description": "Toont het mediane prijsniveau over 5 intervallen (2 ervoor, huidig, 2 erna) dat ongeveer 75 minuten beslaat. Biedt een stabielere prijsniveauindicator die kortetermijnschommelingen afvlakt.",
"usage_tips": "Gebruik voor planningsbeslissingen op middellange termijn waarbij je niet wilt reageren op korte prijspieken of -dalingen." "usage_tips": "Gebruik voor planningsbeslissingen op middellange termijn waarbij u niet wilt reageren op korte prijspieken of -dalingen."
}, },
"next_hour_price_level": { "next_hour_price_level": {
"description": "Geaggregeerd prijsniveau voor volgend voortschrijdend uur (5 intervallen vooruit)", "description": "Geaggregeerd prijsniveau voor volgend voortschrijdend uur (5 intervallen vooruit)",
"long_description": "Toont het mediane prijsniveau over 5 intervallen gecentreerd één uur vooruit. Helpt verbruik te plannen op basis van aanstaande prijstrends in plaats van momentane toekomstige prijzen.", "long_description": "Toont het mediane prijsniveau over 5 intervallen gecentreerd één uur vooruit. Helpt verbruik te plannen op basis van aanstaande prijstrends in plaats van momentane toekomstige prijzen.",
"usage_tips": "Gebruik om activiteiten voor het volgende uur te plannen op basis van een vloeiende prijsniveauprognose." "usage_tips": "Gebruik om activiteiten voor het volgende uur te plannen op basis van een vloeiende prijsniveauprognose."
}, },
"current_interval_price_rating": { "price_rating": {
"description": "Hoe de prijs van het huidige interval zich verhoudt tot historische gegevens", "description": "Hoe de prijs van het huidige interval zich verhoudt tot historische gegevens",
"long_description": "Toont hoe de prijs van het huidige interval zich verhoudt tot historische prijsgegevens als een percentage", "long_description": "Toont hoe de prijs van het huidige interval zich verhoudt tot historische prijsgegevens als een percentage",
"usage_tips": "Een positief percentage betekent dat de huidige prijs boven het gemiddelde ligt, negatief betekent onder het gemiddelde" "usage_tips": "Een positief percentage betekent dat de huidige prijs boven het gemiddelde ligt, negatief betekent onder het gemiddelde"
@ -185,22 +133,22 @@
"next_hour_price_rating": { "next_hour_price_rating": {
"description": "Geaggregeerde prijsbeoordeling voor volgend voortschrijdend uur (5 intervallen vooruit)", "description": "Geaggregeerde prijsbeoordeling voor volgend voortschrijdend uur (5 intervallen vooruit)",
"long_description": "Toont de gemiddelde beoordeling voor 5 intervallen gecentreerd één uur vooruit. Helpt te begrijpen of het volgende uur over het algemeen boven of onder gemiddelde prijzen zal liggen.", "long_description": "Toont de gemiddelde beoordeling voor 5 intervallen gecentreerd één uur vooruit. Helpt te begrijpen of het volgende uur over het algemeen boven of onder gemiddelde prijzen zal liggen.",
"usage_tips": "Gebruik om te beslissen of je een uur moet wachten voordat je activiteiten met hoog verbruik start." "usage_tips": "Gebruik om te beslissen of u een uur moet wachten voordat u activiteiten met hoog verbruik start."
}, },
"next_avg_1h": { "next_avg_1h": {
"description": "Gemiddelde prijs voor het volgende 1 uur (alleen vooruit vanaf volgend interval)", "description": "Gemiddelde prijs voor het volgende 1 uur (alleen vooruit vanaf volgend interval)",
"long_description": "Vooruitkijkend gemiddelde: Toont gemiddelde van volgende 4 intervallen (1 uur) vanaf het VOLGENDE 15-minuten interval (niet inclusief huidig). Verschilt van current_hour_average_price die vorige intervallen omvat. Gebruik voor absolute prijsdrempelplanning.", "long_description": "Vooruitkijkend gemiddelde: Toont gemiddelde van volgende 4 intervallen (1 uur) vanaf het VOLGENDE 15-minuten interval (niet inclusief huidig). Verschilt van current_hour_average die vorige intervallen omvat. Gebruik voor absolute prijsdrempelplanning.",
"usage_tips": "Absolute prijsdrempel: Start apparaten alleen wanneer het gemiddelde onder je maximaal acceptabele prijs blijft (bijv. onder 0,25 EUR/kWh). Combineer met trendsensor voor optimale timing. Let op: Dit is GEEN vervanging voor uurprijzen - gebruik current_hour_average_price daarvoor." "usage_tips": "Absolute prijsdrempel: Start apparaten alleen wanneer het gemiddelde onder uw maximaal acceptabele prijs blijft (bijv. onder 0,25 EUR/kWh). Combineer met trendsensor voor optimale timing. Let op: Dit is GEEN vervanging voor uurprijzen - gebruik current_hour_average daarvoor."
}, },
"next_avg_2h": { "next_avg_2h": {
"description": "Gemiddelde prijs voor de volgende 2 uur", "description": "Gemiddelde prijs voor de volgende 2 uur",
"long_description": "Toont de gemiddelde prijs voor de volgende 8 intervallen (2 uur) vanaf het volgende 15-minuten interval.", "long_description": "Toont de gemiddelde prijs voor de volgende 8 intervallen (2 uur) vanaf het volgende 15-minuten interval.",
"usage_tips": "Absolute prijsdrempel: Stel een maximaal acceptabele gemiddelde prijs in voor standaard apparaten zoals wasmachines. Zorgt ervoor dat je nooit meer betaalt dan je limiet." "usage_tips": "Absolute prijsdrempel: Stel een maximaal acceptabele gemiddelde prijs in voor standaard apparaten zoals wasmachines. Zorgt ervoor dat u nooit meer betaalt dan uw limiet."
}, },
"next_avg_3h": { "next_avg_3h": {
"description": "Gemiddelde prijs voor de volgende 3 uur", "description": "Gemiddelde prijs voor de volgende 3 uur",
"long_description": "Toont de gemiddelde prijs voor de volgende 12 intervallen (3 uur) vanaf het volgende 15-minuten interval.", "long_description": "Toont de gemiddelde prijs voor de volgende 12 intervallen (3 uur) vanaf het volgende 15-minuten interval.",
"usage_tips": "Absolute prijsdrempel: Voor EU Eco-programma's (vaatwassers, 3-4u looptijd). Start alleen wanneer 3u gemiddelde onder je prijslimiet is. Gebruik met trendsensor om beste moment binnen acceptabel prijsbereik te vinden." "usage_tips": "Absolute prijsdrempel: Voor EU Eco-programma's (vaatwassers, 3-4u looptijd). Start alleen wanneer 3u gemiddelde onder uw prijslimiet is. Gebruik met trendsensor om beste moment binnen acceptabel prijsbereik te vinden."
}, },
"next_avg_4h": { "next_avg_4h": {
"description": "Gemiddelde prijs voor de volgende 4 uur", "description": "Gemiddelde prijs voor de volgende 4 uur",
@ -215,32 +163,32 @@
"next_avg_6h": { "next_avg_6h": {
"description": "Gemiddelde prijs voor de volgende 6 uur", "description": "Gemiddelde prijs voor de volgende 6 uur",
"long_description": "Toont de gemiddelde prijs voor de volgende 24 intervallen (6 uur) vanaf het volgende 15-minuten interval.", "long_description": "Toont de gemiddelde prijs voor de volgende 24 intervallen (6 uur) vanaf het volgende 15-minuten interval.",
"usage_tips": "Absolute prijsdrempel: Avondplanning met prijslimieten. Plan taken alleen als 6u gemiddelde onder je maximaal acceptabele kosten blijft." "usage_tips": "Absolute prijsdrempel: Avondplanning met prijslimieten. Plan taken alleen als 6u gemiddelde onder uw maximaal acceptabele kosten blijft."
}, },
"next_avg_8h": { "next_avg_8h": {
"description": "Gemiddelde prijs voor de volgende 8 uur", "description": "Gemiddelde prijs voor de volgende 8 uur",
"long_description": "Toont de gemiddelde prijs voor de volgende 32 intervallen (8 uur) vanaf het volgende 15-minuten interval.", "long_description": "Toont de gemiddelde prijs voor de volgende 32 intervallen (8 uur) vanaf het volgende 15-minuten interval.",
"usage_tips": "Absolute prijsdrempel: Nachtelijke bedieningsbeslissingen. Stel harde prijslimieten in voor nachtelijke belastingen (batterij opladen, thermische opslag). Overschrijd nooit je budget." "usage_tips": "Absolute prijsdrempel: Nachtelijke bedieningsbeslissingen. Stel harde prijslimieten in voor nachtelijke belastingen (batterij opladen, thermische opslag). Overschrijd nooit uw budget."
}, },
"next_avg_12h": { "next_avg_12h": {
"description": "Gemiddelde prijs voor de volgende 12 uur", "description": "Gemiddelde prijs voor de volgende 12 uur",
"long_description": "Toont de gemiddelde prijs voor de volgende 48 intervallen (12 uur) vanaf het volgende 15-minuten interval.", "long_description": "Toont de gemiddelde prijs voor de volgende 48 intervallen (12 uur) vanaf het volgende 15-minuten interval.",
"usage_tips": "Absolute prijsdrempel: Strategische beslissingen met prijslimieten. Ga alleen door als 12u gemiddelde onder je maximaal acceptabele prijs is. Goed voor uitgestelde grote belastingen." "usage_tips": "Absolute prijsdrempel: Strategische beslissingen met prijslimieten. Ga alleen door als 12u gemiddelde onder uw maximaal acceptabele prijs is. Goed voor uitgestelde grote belastingen."
}, },
"price_trend_1h": { "price_trend_1h": {
"description": "Prijstrend voor het volgende uur", "description": "Prijstrend voor het volgende uur",
"long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgend 1 uur (4 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.", "long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgend 1 uur (4 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.",
"usage_tips": "Relatieve optimalisatie: 'dalend' = wacht, prijzen dalen. 'stijgend' = handel nu of je betaalt meer. 'stabiel' = prijs maakt nu niet veel uit. Werkt onafhankelijk van absoluut prijsniveau." "usage_tips": "Relatieve optimalisatie: 'dalend' = wacht, prijzen dalen. 'stijgend' = handel nu of u betaalt meer. 'stabiel' = prijs maakt nu niet veel uit. Werkt onafhankelijk van absoluut prijsniveau."
}, },
"price_trend_2h": { "price_trend_2h": {
"description": "Prijstrend voor de volgende 2 uur", "description": "Prijstrend voor de volgende 2 uur",
"long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 2 uur (8 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.", "long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 2 uur (8 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.",
"usage_tips": "Relatieve optimalisatie: Ideaal voor apparaten. 'dalend' betekent betere prijzen komen over 2u - stel uit indien mogelijk. Vindt beste timing binnen je beschikbare venster, ongeacht seizoen." "usage_tips": "Relatieve optimalisatie: Ideaal voor apparaten. 'dalend' betekent betere prijzen komen over 2u - stel uit indien mogelijk. Vindt beste timing binnen uw beschikbare venster, ongeacht seizoen."
}, },
"price_trend_3h": { "price_trend_3h": {
"description": "Prijstrend voor de volgende 3 uur", "description": "Prijstrend voor de volgende 3 uur",
"long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 3 uur (12 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.", "long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 3 uur (12 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.",
"usage_tips": "Relatieve optimalisatie: Voor Eco-programma's. 'dalend' betekent prijzen dalen >5% - het wachten waard. Werkt in elk seizoen. Combineer met avg-sensor voor prijslimiet: alleen wanneer avg < je limiet EN trend niet 'dalend'." "usage_tips": "Relatieve optimalisatie: Voor Eco-programma's. 'dalend' betekent prijzen dalen >5% - het wachten waard. Werkt in elk seizoen. Combineer met avg-sensor voor prijslimiet: alleen wanneer avg < uw limiet EN trend niet 'dalend'."
}, },
"price_trend_4h": { "price_trend_4h": {
"description": "Prijstrend voor de volgende 4 uur", "description": "Prijstrend voor de volgende 4 uur",
@ -250,12 +198,12 @@
"price_trend_5h": { "price_trend_5h": {
"description": "Prijstrend voor de volgende 5 uur", "description": "Prijstrend voor de volgende 5 uur",
"long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 5 uur (20 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.", "long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 5 uur (20 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.",
"usage_tips": "Relatieve optimalisatie: Uitgebreide operaties. Past zich aan de markt aan - vindt beste relatieve timing in elke prijsomgeving. 'stabiel/stijgend' = goed moment om te starten binnen je planningsvenster." "usage_tips": "Relatieve optimalisatie: Uitgebreide operaties. Past zich aan de markt aan - vindt beste relatieve timing in elke prijsomgeving. 'stabiel/stijgend' = goed moment om te starten binnen uw planningsvenster."
}, },
"price_trend_6h": { "price_trend_6h": {
"description": "Prijstrend voor de volgende 6 uur", "description": "Prijstrend voor de volgende 6 uur",
"long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 6 uur (24 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.", "long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 6 uur (24 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.",
"usage_tips": "Relatieve optimalisatie: Avandbeslissingen. 'dalend' = prijzen verbeteren aanzienlijk als je wacht. Geen vaste drempels nodig - past automatisch aan winter/zomer prijsniveaus." "usage_tips": "Relatieve optimalisatie: Avandbeslissingen. 'dalend' = prijzen verbeteren aanzienlijk als u wacht. Geen vaste drempels nodig - past automatisch aan winter/zomer prijsniveaus."
}, },
"price_trend_8h": { "price_trend_8h": {
"description": "Prijstrend voor de volgende 8 uur", "description": "Prijstrend voor de volgende 8 uur",
@ -263,23 +211,13 @@
"usage_tips": "Relatieve optimalisatie: Nachtplanning. 'dalend' betekent wachten tot de nacht loont (>5% goedkoper). Werkt het hele jaar door zonder handmatige drempelaanpassingen. Start wanneer 'stabiel' of 'stijgend'." "usage_tips": "Relatieve optimalisatie: Nachtplanning. 'dalend' betekent wachten tot de nacht loont (>5% goedkoper). Werkt het hele jaar door zonder handmatige drempelaanpassingen. Start wanneer 'stabiel' of 'stijgend'."
}, },
"price_trend_12h": { "price_trend_12h": {
"description": "Prijstrend voor de komende 12 uur", "description": "Prijstrend voor de volgende 12 uur",
"long_description": "Vergelijkt huidige intervalprijs met gemiddelde van de komende 12 uur (48 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.", "long_description": "Vergelijkt huidige intervalprijs met gemiddelde van volgende 12 uur (48 intervallen). Stijgend als toekomst >5% hoger is, dalend als >5% lager, anders stabiel.",
"usage_tips": "Relatieve optimalisatie: Lange termijn strategische beslissingen. 'dalend' = aanzienlijk betere prijzen komen vanavond/morgen. Vindt optimale timing in elke marktsituatie. Het beste gecombineerd met prijslimiet van avg-sensor." "usage_tips": "Relatieve optimalisatie: Lange termijn strategische beslissingen. 'dalend' = aanzienlijk betere prijzen komen vannacht/morgen. Vindt optimale timing in elke marktconditie. Best gecombineerd met avg-sensor prijslimiet."
},
"current_price_trend": {
"description": "Huidige prijstrend-richting en hoe lang deze aanhoudt",
"long_description": "Toont de huidige prijstrend (stijgend/dalend/stabiel) door historisch momentum (gewogen 1u terugblik) te combineren met toekomstperspectief. Herkent lopende trends eerder dan alleen toekomstanalyse. Gebruikt ±3% momentum-drempel en volatiliteit-afhankelijke toekomstvergelijking. Berekent dynamisch tot de volgende trendwijziging (of 3u standaard als geen wijziging in 24u). De status toont de huidige richting, attributen tonen wanneer het verandert en wat er daarna komt.",
"usage_tips": "Statusweergave: Dashboard-zichtbaarheid van 'wat gebeurt er nu tot wanneer'. Perfect gesynchroniseerd met next_price_trend_change. Voorbeeld: Badge met 'Stijgend voor 2,5u' of 'Dalend tot 16:45'. Beter dan tijdvenster-sensoren omdat het begrijpt dat je REEDS in een trend zit, niet alleen toekomstige veranderingen voorspelt. Gebruik voor snelle visuele overview, niet voor automatiserings-triggers."
},
"next_price_trend_change": {
"description": "Wanneer de volgende significante prijstrendwijziging zal plaatsvinden",
"long_description": "Scant de komende 24 uur (96 intervallen) om te vinden wanneer de prijstrend (stijgend/dalend/stabiel) zal veranderen ten opzichte van het huidige momentum. Bepaalt eerst de huidige trend met gewogen 1u terugblik (herkent lopende trends), vindt dan de omkering. Gebruikt volatiliteit-adaptieve drempelwaarden (3% momentum-detectie, marktaangepaste toekomstvergelijking). Retourneert het tijdstempel wanneer de wijziging wordt verwacht.",
"usage_tips": "Gebeurtenisgestuurde automatisering: Trigger acties WANNEER trend wijzigt, niet OVER X uur. Voorbeeld: 'Laad EV wanneer volgende trendwijziging dalende prijzen toont' of 'Start vaatwasser voordat prijzen stijgen'. Vult tijdvenster-sensors aan (price_trend_Xh) die beantwoorden 'ZULLEN prijzen over X uur hoger zijn?'"
}, },
"daily_rating": { "daily_rating": {
"description": "Hoe de prijzen van vandaag zich verhouden tot historische gegevens", "description": "Hoe de prijzen van vandaag zich verhouden tot historische gegevens",
"long_description": "Toont hoe de prijzen van vandaag zich verhouden tot historische prijsgegevens als percentage", "long_description": "Toont hoe de prijzen van vandaag zich verhouden tot historische prijsgegevens als een percentage",
"usage_tips": "Een positief percentage betekent dat de prijzen van vandaag boven het gemiddelde liggen, negatief betekent onder het gemiddelde" "usage_tips": "Een positief percentage betekent dat de prijzen van vandaag boven het gemiddelde liggen, negatief betekent onder het gemiddelde"
}, },
"monthly_rating": { "monthly_rating": {
@ -289,172 +227,32 @@
}, },
"data_timestamp": { "data_timestamp": {
"description": "Tijdstempel van het laatst beschikbare prijsgegevensinterval", "description": "Tijdstempel van het laatst beschikbare prijsgegevensinterval",
"long_description": "Toont het tijdstempel van het laatst beschikbare prijsgegevensinterval van je Tibber-abonnement" "long_description": "Toont het tijdstempel van het laatst beschikbare prijsgegevensinterval van uw Tibber-abonnement"
}, },
"today_volatility": { "today_volatility": {
"description": "Hoeveel de stroomprijzen vandaag schommelen", "description": "Prijsvolatiliteitsclassificatie voor vandaag",
"long_description": "Geeft aan of de prijzen vandaag stabiel blijven of grote schommelingen hebben. Lage volatiliteit betekent vrij constante prijzen timing maakt weinig uit. Hoge volatiliteit betekent duidelijke prijsverschillen gedurende de dag goede kans om verbruik naar goedkopere periodes te verschuiven. `price_coefficient_variation_%` toont het percentage, `price_spread` de absolute prijsspanne.", "long_description": "Toont hoeveel elektriciteitsprijzen variëren gedurende vandaag op basis van de spreiding (verschil tussen hoogste en laagste prijs). Classificatie: LOW = spreiding < 5ct, MODERATE = 5-15ct, HIGH = 15-30ct, VERY HIGH = >30ct.",
"usage_tips": "Gebruik dit om te beslissen of optimaliseren de moeite waard is. Bij lage volatiliteit kun je apparaten op elk moment laten draaien. Bij hoge volatiliteit bespaar je merkbaar door Best Price-periodes te volgen." "usage_tips": "Gebruik dit om te bepalen of prijsgebaseerde optimalisatie de moeite waard is. Bijvoorbeeld, met een balkonbatterij met 15% efficiëntieverlies is optimalisatie alleen zinvol wanneer volatiliteit ten minste MODERATE is. Maak automatiseringen die volatiliteit controleren voordat u laad-/ontlaadcycli plant."
}, },
"tomorrow_volatility": { "tomorrow_volatility": {
"description": "Hoeveel de stroomprijzen morgen zullen schommelen", "description": "Prijsvolatiliteitsclassificatie voor morgen",
"long_description": "Geeft aan of de prijzen morgen stabiel blijven of grote schommelingen hebben. Beschikbaar zodra de gegevens voor morgen zijn gepubliceerd (meestal 13:0014:00 CET). Lage volatiliteit betekent vrij constante prijzen timing is niet kritisch. Hoge volatiliteit betekent duidelijke prijsverschillen gedurende de dag goede kans om energie-intensieve taken te plannen. `price_coefficient_variation_%` toont het percentage, `price_spread` de absolute prijsspanne.", "long_description": "Toont hoeveel elektriciteitsprijzen zullen variëren gedurende morgen op basis van de spreiding (verschil tussen hoogste en laagste prijs). Wordt onbeschikbaar totdat de gegevens van morgen zijn gepubliceerd (meestal 13:00-14:00 CET).",
"usage_tips": "Gebruik dit om het verbruik van morgen te plannen. Hoge volatiliteit? Plan flexibele lasten in Best Price-periodes. Lage volatiliteit? Laat apparaten draaien wanneer het jou uitkomt." "usage_tips": "Gebruik dit voor vooruitplanning van het energieverbruik van morgen. Als morgen HIGH of VERY HIGH volatiliteit heeft, is het de moeite waard om de timing van energieverbruik te optimaliseren. Bij LOW kunt u apparaten op elk moment gebruiken zonder significante kostenverschillen."
}, },
"next_24h_volatility": { "next_24h_volatility": {
"description": "Hoeveel de prijzen de komende 24 uur zullen schommelen", "description": "Prijsvolatiliteitsclassificatie voor de rollende volgende 24 uur",
"long_description": "Geeft de prijsvolatiliteit aan voor een rollend 24-uursvenster vanaf nu (wordt elke 15 minuten bijgewerkt). Lage volatiliteit betekent vrij constante prijzen. Hoge volatiliteit betekent merkbare prijsschommelingen en dus optimalisatiemogelijkheden. In tegenstelling tot vandaag/morgen-sensoren overschrijdt deze daggrenzen en geeft een doorlopende vooruitblik. `price_coefficient_variation_%` toont het percentage, `price_spread` de absolute prijsspanne.", "long_description": "Toont hoeveel elektriciteitsprijzen variëren in de volgende 24 uur vanaf nu (rollend venster). Dit overschrijdt daggrenzen en wordt elke 15 minuten bijgewerkt, wat een vooruitkijkende volatiliteitsbeoordeling biedt onafhankelijk van kalenderdagen.",
"usage_tips": "Het beste voor beslissingen in real-time. Gebruik bij het plannen van batterijladen of andere flexibele lasten die over middernacht kunnen lopen. Biedt een consistent 24-uurs beeld, los van de kalenderdag." "usage_tips": "Beste sensor voor realtime optimalisatiebeslissingen. In tegenstelling tot vandaag/morgen-sensoren die om middernacht wisselen, biedt deze een continue 24-uurs volatiliteitsbeoordeling. Gebruik voor batterijlaadstrategieën die over daggrenzen heen gaan."
}, },
"today_tomorrow_volatility": { "today_tomorrow_volatility": {
"description": "Gecombineerde prijsvolatiliteit voor vandaag en morgen", "description": "Gecombineerde prijsvolatiliteitsclassificatie voor vandaag en morgen",
"long_description": "Geeft de totale volatiliteit weer wanneer vandaag en morgen samen worden bekeken (zodra morgengegevens beschikbaar zijn). Toont of er duidelijke prijsverschillen over de daggrens heen zijn. Valt terug naar alleen vandaag als morgengegevens ontbreken. Handig voor meerdaagse optimalisatie. `price_coefficient_variation_%` toont het percentage, `price_spread` de absolute prijsspanne.", "long_description": "Toont volatiliteit over zowel vandaag als morgen gecombineerd (wanneer de gegevens van morgen beschikbaar zijn). Biedt een uitgebreid beeld van prijsvariatie over maximaal 48 uur. Valt terug op alleen vandaag wanneer de gegevens van morgen nog niet beschikbaar zijn.",
"usage_tips": "Gebruik voor taken die meerdere dagen beslaan. Kijk of de prijsverschillen groot genoeg zijn om plannen op te baseren. De afzonderlijke dag-sensoren tonen per-dag bijdragen als je meer detail wilt." "usage_tips": "Gebruik dit voor meerdaagse planning en om te begrijpen of prijskansen bestaan over de daggrens heen. De 'today_volatility' en 'tomorrow_volatility' breakdown-attributen tonen individuele dagbijdragen. Nuttig voor het plannen van laadsessies die middernacht kunnen overschrijden."
}, },
"data_lifecycle_status": { "price_forecast": {
"description": "Huidige status van prijsgegevenslevenscyclus en caching", "description": "Prognose van aanstaande elektriciteitsprijzen",
"long_description": "Toont of de integratie gebruikmaakt van gecachte gegevens of verse gegevens van de API. Toont huidige levenscyclusstatus: 'cached' (gebruikt opgeslagen gegevens), 'fresh' (net opgehaald van API), 'refreshing' (momenteel aan het ophalen), 'searching_tomorrow' (actief aan het zoeken naar morgengegevens na 13:00), 'turnover_pending' (binnen 15 minuten voor middernacht, 23:45-00:00), of 'error' (ophalen mislukt). Bevat uitgebreide attributen zoals cache-leeftijd, volgende API-poll-tijd, gegevensvolledigheid en API-aanroepstatistieken.", "long_description": "Toont aanstaande elektriciteitsprijzen voor toekomstige intervallen in een formaat dat gemakkelijk te gebruiken is in dashboards",
"usage_tips": "Gebruik deze diagnostische sensor om gegevensfrisheid en API-aanroeppatronen te begrijpen. Controleer het 'cache_age'-attribuut om te zien hoe oud de huidige gegevens zijn. Monitor 'next_api_poll' om te weten wanneer de volgende update is gepland. Gebruik 'data_completeness' om te zien of gisteren/vandaag/morgen gegevens beschikbaar zijn. De 'api_calls_today'-teller helpt API-gebruik bij te houden. Perfect voor probleemoplossing of begrip van integratiegedrag." "usage_tips": "Gebruik de attributen van deze entiteit om aanstaande prijzen weer te geven in grafieken of aangepaste kaarten. Toegang tot 'intervals' voor alle toekomstige intervallen of 'hours' voor uuroverzichten."
},
"best_price_end_time": {
"description": "Totale lengte van huidige of volgende voordelige periode (state in uren, attribuut in minuten)",
"long_description": "Toont hoe lang de voordelige periode duurt. State gebruikt uren (float) voor een leesbare UI; attribuut `period_duration_minutes` behoudt afgeronde minuten voor automatiseringen. Actief → duur van de huidige periode, anders de volgende.",
"usage_tips": "UI kan 1,5 u tonen terwijl `period_duration_minutes` = 90 voor automatiseringen blijft."
},
"best_price_period_duration": {
"description": "Lengte van huidige/volgende goedkope periode",
"long_description": "Totale duur van huidige of volgende goedkope periode. De state wordt weergegeven in uren (bijv. 1,5 u) voor gemakkelijk aflezen in de UI, terwijl het attribuut `period_duration_minutes` dezelfde waarde in minuten levert (bijv. 90) voor automatiseringen. Deze waarde vertegenwoordigt de **volledige geplande duur** van de periode en is constant gedurende de gehele periode, zelfs als de resterende tijd (remaining_minutes) afneemt.",
"usage_tips": "Combineer met remaining_minutes om te berekenen wanneer langlopende apparaten moeten worden gestopt: Periode is `period_duration_minutes - remaining_minutes` minuten geleden gestart. Dit attribuut ondersteunt energie-optimalisatiestrategieën door te helpen bij het plannen van hoog-verbruiksactiviteiten binnen goedkope periodes."
},
"best_price_remaining_minutes": {
"description": "Resterende tijd in huidige goedkope periode",
"long_description": "Toont hoeveel tijd er nog overblijft in de huidige goedkope periode. De state wordt weergegeven in uren (bijv. 0,75 u) voor gemakkelijk aflezen in dashboards, terwijl het attribuut `remaining_minutes` dezelfde tijd in minuten levert (bijv. 45) voor automatiseringsvoorwaarden. **Afteltimer**: Deze waarde neemt elke minuut af tijdens een actieve periode. Geeft 0 terug wanneer geen goedkope periode actief is. Werkt elke minuut bij.",
"usage_tips": "Voor automatiseringen: Gebruik attribuut `remaining_minutes` zoals 'Als remaining_minutes > 60, start vaatwasser nu (genoeg tijd om te voltooien)' of 'Als remaining_minutes < 15, rond huidige cyclus binnenkort af'. UI toont gebruiksvriendelijke uren (bijv. 1,25 u). Waarde 0 geeft aan dat geen goedkope periode actief is."
},
"best_price_progress": {
"description": "Voortgang door huidige goedkope periode (0% wanneer inactief)",
"long_description": "Toont voortgang door de huidige goedkope periode als 0-100%. Geeft 0% terug wanneer geen periode actief is. Werkt elke minuut bij. 0% betekent periode net gestart, 100% betekent dat deze bijna eindigt.",
"usage_tips": "Geweldig voor visuele voortgangsbalken. Gebruik in automatiseringen: 'Als progress > 0 EN progress > 75, stuur melding dat goedkope periode bijna eindigt'. Waarde 0 geeft aan dat geen periode actief is."
},
"best_price_next_start_time": {
"description": "Totale lengte van huidige of volgende dure periode (state in uren, attribuut in minuten)",
"long_description": "Toont hoe lang de dure periode duurt. State gebruikt uren (float) voor de UI; attribuut `period_duration_minutes` behoudt afgeronde minuten voor automatiseringen. Actief → duur van de huidige periode, anders de volgende.",
"usage_tips": "UI kan 0,75 u tonen terwijl `period_duration_minutes` = 45 voor automatiseringen blijft."
},
"best_price_next_in_minutes": {
"description": "Resterende tijd in huidige dure periode (state in uren, attribuut in minuten)",
"long_description": "Toont hoeveel tijd er nog over is. State gebruikt uren (float); attribuut `remaining_minutes` behoudt afgeronde minuten voor automatiseringen. Geeft 0 terug wanneer er geen periode actief is. Werkt elke minuut bij.",
"usage_tips": "Gebruik `remaining_minutes` voor drempels (bijv. > 60) terwijl de state in uren goed leesbaar blijft."
},
"peak_price_end_time": {
"description": "Tijd tot volgende dure periode (state in uren, attribuut in minuten)",
"long_description": "Toont hoe lang het duurt tot de volgende dure periode start. State gebruikt uren (float); attribuut `next_in_minutes` behoudt afgeronde minuten voor automatiseringen. Tijdens een actieve periode is dit de tijd tot de periode na de huidige. 0 tijdens korte overgangen. Werkt elke minuut bij.",
"usage_tips": "Gebruik `next_in_minutes` in automatiseringen (bijv. < 10) terwijl de state in uren leesbaar blijft."
},
"peak_price_period_duration": {
"description": "Totale duur van huidige of volgende dure periode in minuten",
"long_description": "Toont de totale duur van de dure periode in minuten. Tijdens een actieve periode toont dit de volledige lengte van de huidige periode. Wanneer geen periode actief is, toont dit de duur van de volgende komende periode. Voorbeeld: '60 minuten' voor een 1-uur periode.",
"usage_tips": "Gebruik om energiebesparende maatregelen te plannen: 'Als duration > 120, verlaag verwarmingstemperatuur agressiever (lange dure periode)'. Helpt bij het inschatten hoeveel energieverbruik moet worden verminderd."
},
"peak_price_remaining_minutes": {
"description": "Resterende tijd in huidige dure periode",
"long_description": "Toont hoeveel tijd er nog overblijft in de huidige dure periode. De state wordt weergegeven in uren (bijv. 0,75 u) voor gemakkelijk aflezen in dashboards, terwijl het attribuut `remaining_minutes` dezelfde tijd in minuten levert (bijv. 45) voor automatiseringsvoorwaarden. **Afteltimer**: Deze waarde neemt elke minuut af tijdens een actieve periode. Geeft 0 terug wanneer geen dure periode actief is. Werkt elke minuut bij.",
"usage_tips": "Voor automatiseringen: Gebruik attribuut `remaining_minutes` zoals 'Als remaining_minutes > 60, annuleer uitgestelde laadronde' of 'Als remaining_minutes < 15, hervat normaal gebruik binnenkort'. UI toont gebruiksvriendelijke uren (bijv. 1,0 u). Waarde 0 geeft aan dat geen dure periode actief is."
},
"peak_price_progress": {
"description": "Voortgang door huidige dure periode (0% wanneer inactief)",
"long_description": "Toont de voortgang door de huidige dure periode als 0-100%. Geeft 0% terug wanneer geen periode actief is. Werkt elke minuut bij.",
"usage_tips": "Visuele voortgangsindicator in dashboards. Automatisering: 'Als progress > 0 EN progress > 90, bereid normale verwarmingsplanning voor'. Waarde 0 geeft aan dat er geen actieve periode is."
},
"peak_price_next_start_time": {
"description": "Wanneer de volgende dure periode begint",
"long_description": "Toont wanneer de volgende komende dure periode begint. Tijdens een actieve periode toont dit de start van de VOLGENDE periode na de huidige. Geeft alleen 'Onbekend' terug wanneer geen toekomstige periodes zijn geconfigureerd.",
"usage_tips": "Altijd nuttig voor planning: 'Volgende dure periode begint over 2 uur'. Automatisering: 'Wanneer volgende starttijd over 30 minuten is, verlaag verwarmingstemperatuur preventief'."
},
"peak_price_next_in_minutes": {
"description": "Tijd tot volgende dure periode",
"long_description": "Toont hoe lang het duurt tot de volgende dure periode. De state wordt weergegeven in uren (bijv. 0,5 u) voor dashboards, terwijl het attribuut `next_in_minutes` minuten levert (bijv. 30) voor automatiseringsvoorwaarden. Tijdens een actieve periode toont dit de tijd tot de periode NA de huidige. Geeft 0 terug tijdens korte overgangsmomenten. Werkt elke minuut bij.",
"usage_tips": "Voor automatiseringen: Gebruik attribuut `next_in_minutes` zoals 'Als next_in_minutes > 0 EN next_in_minutes < 10, voltooi huidige laadcyclus nu voordat prijzen stijgen'. Waarde > 0 geeft altijd aan dat een toekomstige dure periode is gepland."
},
"home_type": {
"description": "Type woning (appartement, huis enz.)",
"long_description": "Toont het woningtype zoals geconfigureerd in je Tibber-account. Deze metadata kunnen nuttig zijn voor het categoriseren van energieverbruikspatronen.",
"usage_tips": "Gebruik dit voor het organiseren van je smart home-systeem of voor analysedoeleinden."
},
"home_size": {
"description": "Woonoppervlakte in vierkante meters",
"long_description": "Toont de grootte van je woning in vierkante meters zoals geconfigureerd in je Tibber-account. Kan worden gebruikt om energieverbruik per vierkante meter te berekenen.",
"usage_tips": "Gebruik dit in energie-efficiëntieberekeningen: 'Mijn woning verbruikt X kWh per vierkante meter per jaar'."
},
"main_fuse_size": {
"description": "Hoofdzekeringgrootte in ampères",
"long_description": "Toont de capaciteit van je hoofdzekering in ampères. Dit bepaalt de maximale elektrische belasting die je woning tegelijkertijd aankan.",
"usage_tips": "Gebruik dit om overbelasting te voorkomen: 'Als het totale stroomverbruik de zekeringgrootte nadert, stel het starten van extra apparaten uit'."
},
"number_of_residents": {
"description": "Aantal personen dat in de woning woont",
"long_description": "Toont het aantal bewoners zoals geconfigureerd in je Tibber-account. Nuttig voor berekeningen van energieverbruik per persoon.",
"usage_tips": "Gebruik dit voor huishoudelijke energie-analyses: 'Energieverbruik per persoon per dag'."
},
"primary_heating_source": {
"description": "Primair verwarmingssysteemtype",
"long_description": "Toont het type verwarmingssysteem dat in je woning wordt gebruikt zoals geconfigureerd in je Tibber-account. Dit kan een warmtepomp, elektrische verwarming, gas, olie of andere warmtebronnen zijn.",
"usage_tips": "Gebruik dit om verwarmingsgerelateerde automatiseringen te categoriseren of voor energieverbruiksanalyse per verwarmingstype."
},
"grid_company": {
"description": "Naam van je elektriciteitsnetbeheerder",
"long_description": "Toont de naam van het bedrijf dat het elektriciteitsnet in je gebied beheert. Dit is de distributienetwerkbeheerder (DNB) die verantwoordelijk is voor de levering van elektriciteit aan je woning.",
"usage_tips": "Nuttig voor administratieve doeleinden en het oplossen van netgerelateerde problemen."
},
"grid_area_code": {
"description": "Netgebied-identificatiecode",
"long_description": "Toont de code die je elektriciteitsnetgebied identificeert. Deze code wordt door de netbeheerder gebruikt voor routing- en factureringsdoeleinden.",
"usage_tips": "Gebruik dit als administratieve referentie of bij contact met je netbeheerder."
},
"price_area_code": {
"description": "Elektriciteitsprijszonecode",
"long_description": "Toont de code voor je elektriciteitsprijszone (bijv. NO1, NO2, SE3, DK1). Verschillende zones hebben verschillende groothandelselektriciteitsprijzen op basis van regionaal aanbod en vraag.",
"usage_tips": "Gebruik dit om te begrijpen in welke prijsregio je je bevindt. Nuttig bij het vergelijken van prijzen met anderen of het analyseren van regionale prijspatronen."
},
"consumption_ean": {
"description": "EAN-code voor elektriciteitsverbruiksmeting",
"long_description": "Toont de European Article Number (EAN)-code die je elektriciteitsverbruiksmeter uniek identificeert. Deze 18-cijferige code wordt gebruikt voor facturerings- en administratieve doeleinden.",
"usage_tips": "Gebruik dit bij communicatie met je elektriciteitsleverancier of voor administratieve documentatie."
},
"production_ean": {
"description": "EAN-code voor elektriciteitsproductiemeting",
"long_description": "Toont de European Article Number (EAN)-code voor je elektriciteitsproductiemeter (als je zonnepanelen of andere opwekking hebt). Deze code volgt elektriciteit die je terug het net op stuurt.",
"usage_tips": "Relevant als je zonnepanelen of andere elektriciteitsopwekking hebt. Gebruik voor administratieve doeleinden en bij het claimen van teruglevering."
},
"energy_tax_type": {
"description": "Type energiebelasting toegepast",
"long_description": "Toont de energiebelastingcategorie die wordt toegepast op je elektriciteitsverbruik. Belastingtarieven variëren per land en soms per consumententype (particulier, zakelijk enz.).",
"usage_tips": "Gebruik dit voor het begrijpen van de uitsplitsing van je elektriciteitsrekening en voor berekeningen van totale kosten."
},
"vat_type": {
"description": "BTW-categorie (belasting over toegevoegde waarde)",
"long_description": "Toont de BTW-categorie die wordt toegepast op je elektriciteitsverbruik. BTW-tarieven variëren per land en kunnen verschillen voor elektriciteit vergeleken met andere goederen en diensten.",
"usage_tips": "Gebruik dit voor het begrijpen van je elektriciteitsrekening en het berekenen van totale kosten inclusief belastingen."
},
"estimated_annual_consumption": {
"description": "Geschat jaarlijks elektriciteitsverbruik in kWh",
"long_description": "Toont je geschatte jaarlijkse elektriciteitsverbruik in kilowattuur zoals berekend of geconfigureerd in je Tibber-account. Deze schatting wordt gebruikt om daadwerkelijk verbruik te vergelijken met verwachte waarden.",
"usage_tips": "Gebruik dit om te volgen of je daadwerkelijke verbruik boven of onder de verwachtingen ligt. Vergelijk maandelijks verbruik met 1/12 van deze waarde om ongebruikelijke patronen te identificeren."
},
"subscription_status": {
"description": "Status van je Tibber-abonnement",
"long_description": "Geeft aan of je Tibber-abonnement momenteel actief is, beëindigd of wacht op activering. Een 'Actief'-status betekent dat je actief elektriciteit via Tibber afneemt.",
"usage_tips": "Gebruik dit om je abonnementsstatus te monitoren. Stel meldingen in als de status verandert van 'Actief' om ononderbroken service te waarborgen."
},
"chart_data_export": {
"description": "Data-export voor dashboard-integraties",
"long_description": "Deze sensor roept de get_chartdata-service aan met jouw geconfigureerde YAML-configuratie en stelt het resultaat beschikbaar als entiteitsattributen. De status toont 'ready' wanneer data beschikbaar is, 'error' bij fouten, of 'pending' voor de eerste aanroep. Perfekt voor dashboard-integraties zoals ApexCharts die prijsgegevens uit entiteitsattributen moeten lezen.",
"usage_tips": "Configureer de YAML-parameters in de integratie-opties om overeen te komen met jouw get_chartdata-service-aanroep. De sensor wordt automatisch bijgewerkt wanneer prijsgegevens worden bijgewerkt (typisch na middernacht en wanneer gegevens van morgen binnenkomen). Krijg toegang tot de service-responsgegevens direct vanuit de entiteitsattributen - de structuur komt exact overeen met wat get_chartdata retourneert."
},
"chart_metadata": {
"description": "Lichtgewicht metadata voor diagramconfiguratie",
"long_description": "Biedt essentiële diagramconfiguratiewaarden als sensorattributen. Nuttig voor elke grafiekkaart die Y-as-grenzen nodig heeft. De sensor roept get_chartdata aan in alleen-metadata-modus (geen dataverwerking) en extraheert: yaxis_min, yaxis_max (gesuggereerd Y-asbereik voor optimale schaling). De status weerspiegelt het service-aanroepresultaat: 'ready' bij succes, 'error' bij fouten, 'pending' tijdens initialisatie.",
"usage_tips": "Configureer via configuration.yaml onder tibber_prices.chart_metadata_config (optioneel: day, subunit_currency, resolution). De sensor wordt automatisch bijgewerkt bij prijsgegevenswijzigingen. Krijg toegang tot metadata vanuit attributen: yaxis_min, yaxis_max. Gebruik met config-template-card of elk hulpmiddel dat entiteitsattributen leest - perfect voor dynamische diagramconfiguratie zonder handmatige berekeningen."
} }
}, },
"binary_sensor": { "binary_sensor": {
@ -466,7 +264,7 @@
"peak_price_period": { "peak_price_period": {
"description": "Of het huidige interval tot de duurste van de dag behoort", "description": "Of het huidige interval tot de duurste van de dag behoort",
"long_description": "Wordt geactiveerd wanneer de huidige prijs in de top 20% van de prijzen van vandaag ligt", "long_description": "Wordt geactiveerd wanneer de huidige prijs in de top 20% van de prijzen van vandaag ligt",
"usage_tips": "Gebruik dit om te voorkomen dat je apparaten met hoog verbruik draait tijdens dure intervallen" "usage_tips": "Gebruik dit om te voorkomen dat u apparaten met hoog verbruik draait tijdens dure intervallen"
}, },
"best_price_period": { "best_price_period": {
"description": "Of het huidige interval tot de goedkoopste van de dag behoort", "description": "Of het huidige interval tot de goedkoopste van de dag behoort",
@ -477,90 +275,6 @@
"description": "Of de verbinding met de Tibber API werkt", "description": "Of de verbinding met de Tibber API werkt",
"long_description": "Geeft aan of de integratie succesvol verbinding kan maken met de Tibber API", "long_description": "Geeft aan of de integratie succesvol verbinding kan maken met de Tibber API",
"usage_tips": "Gebruik dit om de verbindingsstatus met de Tibber API te monitoren" "usage_tips": "Gebruik dit om de verbindingsstatus met de Tibber API te monitoren"
},
"has_ventilation_system": {
"description": "Of je woning een ventilatiesysteem heeft",
"long_description": "Geeft aan of een ventilatiesysteem is geregistreerd voor je woning in het Tibber-account. Ventilatiesystemen kunnen aanzienlijke elektriciteitsverbruikers zijn die baat kunnen hebben bij slimme planning.",
"usage_tips": "Gebruik dit om ventilatiespecifieke automatiseringen of energiemonitoring in te schakelen. Indien actief, overweeg ventilatie te plannen tijdens perioden met lage prijzen."
},
"realtime_consumption_enabled": {
"description": "Of realtime verbruiksmonitoring actief is",
"long_description": "Geeft aan of realtime elektriciteitsverbruikmonitoring is ingeschakeld en actief voor je Tibber-woning. Dit vereist compatibele meethardware (bijv. Tibber Pulse) en een actief abonnement.",
"usage_tips": "Gebruik dit om te verifiëren dat realtimeverbruiksgegevens beschikbaar zijn. Schakel meldingen in als dit onverwacht verandert naar 'uit', wat wijst op mogelijke hardware- of verbindingsproblemen."
}
},
"number": {
"best_price_flex_override": {
"description": "Maximaal percentage boven de dagelijkse minimumprijs dat intervallen kunnen hebben en nog steeds als 'beste prijs' kwalificeren. Aanbevolen: 15-20 met versoepeling ingeschakeld (standaard), of 25-35 zonder versoepeling. Maximum: 50 (harde limiet voor betrouwbare periodedetectie).",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Flexibiliteit'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Schakel deze entiteit in om beste prijs-detectie dynamisch aan te passen via automatiseringen, bijv. hogere flexibiliteit voor kritieke lasten of strengere eisen voor flexibele apparaten."
},
"best_price_min_distance_override": {
"description": "Minimale procentuele afstand onder het daggemiddelde. Intervallen moeten zo ver onder het gemiddelde liggen om als 'beste prijs' te kwalificeren. Helpt echte lage prijsperioden te onderscheiden van gemiddelde prijzen.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimale afstand'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Verhoog de waarde voor strengere beste prijs-criteria. Verlaag als te weinig perioden worden gedetecteerd."
},
"best_price_min_period_length_override": {
"description": "Minimale periodelengte in 15-minuten intervallen. Perioden korter dan dit worden niet gerapporteerd. Voorbeeld: 2 = minimaal 30 minuten.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimale periodelengte'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Pas aan op typische apparaatlooptijd: 2 (30 min) voor snelle programma's, 4-8 (1-2 uur) voor normale cycli, 8+ voor lange ECO-programma's."
},
"best_price_min_periods_override": {
"description": "Minimum aantal beste prijs-perioden om dagelijks te vinden. Wanneer versoepeling is ingeschakeld, past het systeem automatisch de criteria aan om dit aantal te bereiken.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimum periodes'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Stel dit in op het aantal tijdkritieke taken dat je dagelijks hebt. Voorbeeld: 2 voor twee wasladingen."
},
"best_price_relaxation_attempts_override": {
"description": "Aantal pogingen om de criteria geleidelijk te versoepelen om het minimum aantal perioden te bereiken. Elke poging verhoogt de flexibiliteit met 3 procent. Bij 0 worden alleen basiscriteria gebruikt.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Versoepeling pogingen'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Hogere waarden maken periodedetectie adaptiever voor dagen met stabiele prijzen. Stel in op 0 om strikte criteria af te dwingen zonder versoepeling."
},
"best_price_gap_count_override": {
"description": "Maximum aantal duurdere intervallen dat mag worden toegestaan tussen goedkope intervallen terwijl ze nog steeds als één aaneengesloten periode tellen. Bij 0 moeten goedkope intervallen opeenvolgend zijn.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Gap tolerantie'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Verhoog dit voor apparaten met variabele belasting (bijv. warmtepompen) die korte duurdere intervallen kunnen tolereren. Stel in op 0 voor continu goedkope perioden."
},
"peak_price_flex_override": {
"description": "Maximaal percentage onder de dagelijkse maximumprijs dat intervallen kunnen hebben en nog steeds als 'piekprijs' kwalificeren. Dezelfde aanbevelingen als voor beste prijs-flexibiliteit.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Flexibiliteit'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Gebruik dit om de piekprijs-drempel tijdens runtime aan te passen voor automatiseringen die verbruik tijdens dure uren vermijden."
},
"peak_price_min_distance_override": {
"description": "Minimale procentuele afstand boven het daggemiddelde. Intervallen moeten zo ver boven het gemiddelde liggen om als 'piekprijs' te kwalificeren.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimale afstand'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Verhoog de waarde om alleen extreme prijspieken te vangen. Verlaag om meer dure tijden mee te nemen."
},
"peak_price_min_period_length_override": {
"description": "Minimale periodelengte in 15-minuten intervallen voor piekprijzen. Kortere prijspieken worden niet als perioden gerapporteerd.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimale periodelengte'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Kortere waarden vangen korte prijspieken. Langere waarden focussen op aanhoudende dure perioden."
},
"peak_price_min_periods_override": {
"description": "Minimum aantal piekprijs-perioden om dagelijks te vinden.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimum periodes'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Stel dit in op basis van hoeveel dure perioden je per dag wilt vangen voor automatiseringen."
},
"peak_price_relaxation_attempts_override": {
"description": "Aantal pogingen om de criteria te versoepelen om het minimum aantal piekprijs-perioden te bereiken.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Versoepeling pogingen'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Verhoog dit als geen perioden worden gevonden op dagen met stabiele prijzen. Stel in op 0 om strikte criteria af te dwingen."
},
"peak_price_gap_count_override": {
"description": "Maximum aantal goedkopere intervallen dat mag worden toegestaan tussen dure intervallen terwijl ze nog steeds als één piekprijs-periode tellen.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Gap tolerantie'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Hogere waarden vangen langere dure perioden zelfs met korte prijsdips. Stel in op 0 voor strikt aaneengesloten piekprijzen."
}
},
"switch": {
"best_price_enable_relaxation_override": {
"description": "Indien ingeschakeld, worden criteria automatisch versoepeld om het minimum aantal perioden te bereiken. Indien uitgeschakeld, worden alleen perioden gerapporteerd die aan strikte criteria voldoen (mogelijk nul perioden op dagen met stabiele prijzen).",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimum aantal bereiken'-instelling uit de opties-dialoog voor beste prijs-periodeberekeningen.",
"usage_tips": "Schakel dit in voor gegarandeerde dagelijkse automatiseringsmogelijkheden. Schakel uit als je alleen echt goedkope perioden wilt, ook als dat betekent dat er op sommige dagen geen perioden zijn."
},
"peak_price_enable_relaxation_override": {
"description": "Indien ingeschakeld, worden criteria automatisch versoepeld om het minimum aantal perioden te bereiken. Indien uitgeschakeld, worden alleen echte prijspieken gerapporteerd.",
"long_description": "Wanneer deze entiteit is ingeschakeld, overschrijft de waarde de 'Minimum aantal bereiken'-instelling uit de opties-dialoog voor piekprijs-periodeberekeningen.",
"usage_tips": "Schakel dit in voor consistente piekprijs-waarschuwingen. Schakel uit om alleen extreme prijspieken te vangen."
} }
}, },
"home_types": { "home_types": {
@ -568,16 +282,5 @@
"ROWHOUSE": "Rijhuis", "ROWHOUSE": "Rijhuis",
"HOUSE": "Huis", "HOUSE": "Huis",
"COTTAGE": "Huisje" "COTTAGE": "Huisje"
}, }
"time_units": {
"day": "{count} dag",
"days": "{count} dagen",
"hour": "{count} uur",
"hours": "{count} uur",
"minute": "{count} minuut",
"minutes": "{count} minuten",
"ago": "{parts} geleden",
"now": "nu"
},
"attribution": "Gegevens geleverd door Tibber"
} }

View file

@ -1,32 +1,10 @@
{ {
"apexcharts": {
"title_rating_level": "Prisfaser dagsprogress",
"title_level": "Prisnivå",
"hourly_suffix": "(Ø per timme)",
"best_price_period_name": "Bästa prisperiod",
"peak_price_period_name": "Toppprisperiod",
"notification": {
"metadata_sensor_unavailable": {
"title": "Tibber Prices: ApexCharts YAML genererad med begränsad funktionalitet",
"message": "Du har precis genererat en ApexCharts-kortkonfiguration via Utvecklarverktyg. Diagram-metadata-sensorn är inaktiverad, så den genererade YAML:en visar bara **grundläggande funktionalitet** (auto-skalning, fast gradient vid 50%).\n\n**För full funktionalitet** (optimerad skalning, dynamiska gradientfärger):\n1. [Öppna Tibber Prices-integrationen](https://my.home-assistant.io/redirect/integration/?domain=tibber_prices)\n2. Aktivera 'Chart Metadata'-sensorn\n3. **Generera YAML:en igen** via Utvecklarverktyg\n4. **Ersätt den gamla YAML:en** i din instrumentpanel med den nya versionen\n\n⚠ Det räcker inte att bara aktivera sensorn - du måste regenerera och ersätta YAML-koden!"
},
"missing_cards": {
"title": "Tibber Prices: ApexCharts YAML kan inte användas",
"message": "Du har precis genererat en ApexCharts-kortkonfiguration via Utvecklarverktyg, men den genererade YAML:en **kommer inte att fungera** eftersom nödvändiga anpassade kort saknas.\n\n**Saknade kort:**\n{cards}\n\n**För att använda den genererade YAML:en:**\n1. Klicka på länkarna ovan för att installera de saknade korten från HACS\n2. Starta om Home Assistant (ibland nödvändigt)\n3. **Generera YAML:en igen** via Utvecklarverktyg\n4. Lägg till YAML:en i din instrumentpanel\n\n⚠ Den nuvarande YAML-koden fungerar inte förrän alla kort är installerade!"
}
}
},
"sensor": { "sensor": {
"current_interval_price": { "current_price": {
"description": "Det nuvarande elpriset per kWh", "description": "Det nuvarande elpriset per kWh",
"long_description": "Visar nuvarande pris per kWh från ditt Tibber-abonnemang", "long_description": "Visar nuvarande pris per kWh från ditt Tibber-abonnemang",
"usage_tips": "Använd detta för att spåra priser eller skapa automationer som körs när el är billig" "usage_tips": "Använd detta för att spåra priser eller skapa automationer som körs när el är billig"
}, },
"current_interval_price_base": {
"description": "Nuvarande elpris i huvudvaluta (EUR/kWh, NOK/kWh, osv.) för Energipanelen",
"long_description": "Visar nuvarande pris per kWh i huvudvaluta-enheter (t.ex. EUR/kWh istället för ct/kWh, NOK/kWh istället för øre/kWh). Denna sensor är speciellt utformad för användning med Home Assistants Energipanel, som kräver priser i standardvalutaenheter.",
"usage_tips": "Använd denna sensor när du konfigurerar Energipanelen under Inställningar → Instrumentpaneler → Energi. Välj denna sensor som 'Entitet med nuvarande pris' för att automatiskt beräkna dina energikostnader. Energipanelen multiplicerar din energiförbrukning (kWh) med detta pris för att visa totala kostnader."
},
"next_interval_price": { "next_interval_price": {
"description": "Nästa intervalls elpris per kWh", "description": "Nästa intervalls elpris per kWh",
"long_description": "Visar priset för nästa 15-minuters intervall från ditt Tibber-abonnemang", "long_description": "Visar priset för nästa 15-minuters intervall från ditt Tibber-abonnemang",
@ -37,12 +15,12 @@
"long_description": "Visar priset för föregående 15-minuters intervall från ditt Tibber-abonnemang", "long_description": "Visar priset för föregående 15-minuters intervall från ditt Tibber-abonnemang",
"usage_tips": "Använd detta för att granska tidigare prisändringar eller spåra prishistorik" "usage_tips": "Använd detta för att granska tidigare prisändringar eller spåra prishistorik"
}, },
"current_hour_average_price": { "current_hour_average": {
"description": "Rullande 5-intervalls genomsnittspris per kWh", "description": "Rullande 5-intervalls genomsnittspris per kWh",
"long_description": "Visar genomsnittspriset per kWh beräknat från 5 intervaller: 2 föregående, nuvarande och 2 nästa intervaller (ungefär 75 minuter totalt). Detta ger ett utjämnat 'timpris' som anpassar sig när tiden går, istället för att vara fixerat till klockslag.", "long_description": "Visar genomsnittspriset per kWh beräknat från 5 intervaller: 2 föregående, nuvarande och 2 nästa intervaller (ungefär 75 minuter totalt). Detta ger ett utjämnat 'timpris' som anpassar sig när tiden går, istället för att vara fixerat till klockslag.",
"usage_tips": "Använd detta för en stabilare prisindikator som jämnar ut kortsiktiga fluktuationer medan den fortfarande är responsiv till prisändringar. Bättre än fasta timpriser för konsumtionsbeslut." "usage_tips": "Använd detta för en stabilare prisindikator som jämnar ut kortsiktiga fluktuationer medan den fortfarande är responsiv till prisändringar. Bättre än fasta timpriser för konsumtionsbeslut."
}, },
"next_hour_average_price": { "next_hour_average": {
"description": "Rullande 5-intervalls genomsnittspris för nästa timme per kWh", "description": "Rullande 5-intervalls genomsnittspris för nästa timme per kWh",
"long_description": "Visar genomsnittspriset per kWh beräknat från 5 intervaller centrerade en timme framåt: ungefär intervaller +2 till +6 från nu (täcker minuter +30 till +105). Detta ger ett framåtblickande utjämnat 'timpris' för konsumtionsplanering.", "long_description": "Visar genomsnittspriset per kWh beräknat från 5 intervaller centrerade en timme framåt: ungefär intervaller +2 till +6 från nu (täcker minuter +30 till +105). Detta ger ett framåtblickande utjämnat 'timpris' för konsumtionsplanering.",
"usage_tips": "Använd detta för att förutse prisändringar nästa timme. Användbart för att schemalägga högkonsumtionsaktiviteter som laddning av elfordon, körning av diskmaskiner eller värmesystem." "usage_tips": "Använd detta för att förutse prisändringar nästa timme. Användbart för att schemalägga högkonsumtionsaktiviteter som laddning av elfordon, körning av diskmaskiner eller värmesystem."
@ -58,9 +36,9 @@
"usage_tips": "Använd detta för att undvika att köra apparater under topppristider" "usage_tips": "Använd detta för att undvika att köra apparater under topppristider"
}, },
"average_price_today": { "average_price_today": {
"description": "Typiskt elpris för idag per kWh (konfigurerbart visningsformat)", "description": "Det genomsnittliga elpriset för idag per kWh",
"long_description": "Visar priset per kWh för nuvarande dag från ditt Tibber-abonnemang. **Som standard visar statusen medianen** (motståndskraftig mot extrema prispikar, visar typisk prisnåvå). Du kan ändra detta i integrationsinstllningarna för att visa det aritmetiska medelvärdet istället. Det alternativa värdet är tillgängligt som attribut.", "long_description": "Visar genomsnittspriset per kWh för nuvarande dag från ditt Tibber-abonnemang",
"usage_tips": "Använd detta som baslinje för att jämföra nuvarande priser. För beräkningar använd: {{ state_attr('sensor.average_price_today', 'price_mean') }}" "usage_tips": "Använd detta som baslinje för att jämföra nuvarande priser"
}, },
"lowest_price_tomorrow": { "lowest_price_tomorrow": {
"description": "Det lägsta elpriset för imorgon per kWh", "description": "Det lägsta elpriset för imorgon per kWh",
@ -73,49 +51,19 @@
"usage_tips": "Använd detta för att undvika att köra apparater under morgondagens topppristider. Användbart för att planera runt dyra perioder." "usage_tips": "Använd detta för att undvika att köra apparater under morgondagens topppristider. Användbart för att planera runt dyra perioder."
}, },
"average_price_tomorrow": { "average_price_tomorrow": {
"description": "Typiskt elpris för imorgon per kWh (konfigurerbart visningsformat)", "description": "Det genomsnittliga elpriset för imorgon per kWh",
"long_description": "Visar priset per kWh för morgondagen från ditt Tibber-abonnemang. **Som standard visar statusen medianen** (motståndskraftig mot extrema prispikar). Du kan ändra detta i integrationsinstllningarna för att visa det aritmetiska medelvärdet istället. Det alternativa värdet är tillgängligt som attribut. Denna sensor blir otillgänglig tills morgondagens data publiceras av Tibber (vanligtvis runt 13:00-14:00 CET).", "long_description": "Visar genomsnittspriset per kWh för morgondagen från ditt Tibber-abonnemang. Denna sensor blir otillgänglig tills morgondagens data publiceras av Tibber (vanligtvis runt 13:00-14:00 CET).",
"usage_tips": "Använd detta som baslinje för att jämföra morgondagens priser och planera konsumtion. Jämför med dagens median för att se om morgondagen kommer att bli dyrare eller billigare totalt sett." "usage_tips": "Använd detta som baslinje för att jämföra morgondagens priser och planera konsumtion. Jämför med dagens genomsnitt för att se om morgondagen kommer att bli dyrare eller billigare totalt sett."
},
"yesterday_price_level": {
"description": "Aggregerad prisnivå för igår",
"long_description": "Visar den aggregerade prisnivån för alla intervall igår. Använder samma logik som timsensorerna för att bestämma den totala prisnivån för hela dagen.",
"usage_tips": "Använd detta för att förstå den övergripande prissituationen igår. Jämför med idag för att se dagliga trender."
},
"today_price_level": {
"description": "Aggregerad prisnivå för idag",
"long_description": "Visar den aggregerade prisnivån för alla intervall idag. Använder samma logik som timsensorerna för att bestämma den totala prisnivån för hela dagen.",
"usage_tips": "Använd detta för att förstå dagens prissituation snabbt. Praktiskt för snabba bedömningar om dagen i allmänhet är billig eller dyr."
},
"tomorrow_price_level": {
"description": "Aggregerad prisnivå för imorgon",
"long_description": "Visar den aggregerade prisnivån för alla intervall imorgon. Använder samma logik som timsensorerna för att bestämma den totala prisnivån för hela dagen. Denna sensor blir otillgänglig tills morgondagens data publiceras av Tibber (vanligtvis runt 13:00-14:00 CET).",
"usage_tips": "Använd detta för att förstå imorgonens prissituation. Jämför med idag för att se om imorgon blir mer eller mindre gynnsamt för energiförbrukning."
},
"yesterday_price_rating": {
"description": "Aggregerad prisvärdering för igår",
"long_description": "Visar den aggregerade prisvärderingen (låg/normal/hög) för alla intervall igår, baserat på dina konfigurerade tröskelvärden. Använder samma logik som timsensorerna för att bestämma den totala värderingen för hela dagen.",
"usage_tips": "Använd detta för att förstå igårens prissituation i förhållande till dina personliga tröskelvärden. Jämför med idag för trendanalys."
},
"today_price_rating": {
"description": "Aggregerad prisvärdering för idag",
"long_description": "Visar den aggregerade prisvärderingen (låg/normal/hög) för alla intervall idag, baserat på dina konfigurerade tröskelvärden. Använder samma logik som timsensorerna för att bestämme den totala värderingen för hela dagen.",
"usage_tips": "Använd detta för att snabbt bedöma dagens prissituation i förhållande till dina personliga tröskelvärden. Hjälper till att fatta förbrukningsbeslut för innevarande dag."
},
"tomorrow_price_rating": {
"description": "Aggregerad prisvärdering för imorgon",
"long_description": "Visar den aggregerade prisvärderingen (låg/normal/hög) för alla intervall imorgon, baserat på dina konfigurerade tröskelvärden. Använder samma logik som timsensorerna för att bestämma den totala värderingen för hela dagen. Denna sensor blir otillgänglig tills morgondagens data publiceras av Tibber (vanligtvis runt 13:00-14:00 CET).",
"usage_tips": "Använd detta för att planera imorgonens energiförbrukning baserat på dina personliga priströskelvärden. Jämför med idag för att avgöra om du ska skjuta upp förbrukning till imorgon eller använda energi idag."
}, },
"trailing_price_average": { "trailing_price_average": {
"description": "Typiskt elpris för de senaste 24 timmarna per kWh (konfigurerbart visningsformat)", "description": "Det genomsnittliga elpriset för de senaste 24 timmarna per kWh",
"long_description": "Visar priset per kWh beräknat från de senaste 24 timmarna. **Som standard visar statusen medianen** (motståndskraftig mot extrema prispikar, visar typisk prisnåvå). Du kan ändra detta i integrationsinstllningarna för att visa det aritmetiska medelvärdet istället. Det alternativa värdet är tillgängligt som attribut. Uppdateras var 15:e minut.", "long_description": "Visar genomsnittspriset per kWh beräknat från de senaste 24 timmarna (rullande genomsnitt) från ditt Tibber-abonnemang. Detta ger ett rullande genomsnitt som uppdateras var 15:e minut baserat på historiska data.",
"usage_tips": "Använd statusvärdet för att se den typiska nuvarande prisnåvån. För kostnadsberäkningar använd: {{ state_attr('sensor.trailing_price_average', 'price_mean') }}" "usage_tips": "Använd detta för att jämföra nuvarande priser mot senaste trender. Ett nuvarande pris som ligger väsentligt över detta genomsnitt kan indikera ett bra tillfälle att minska konsumtionen."
}, },
"leading_price_average": { "leading_price_average": {
"description": "Typiskt elpris för nästa 24 timmar per kWh (konfigurerbart visningsformat)", "description": "Det genomsnittliga elpriset för nästa 24 timmar per kWh",
"long_description": "Visar priset per kWh beräknat från nästa 24 timmar. **Som standard visar statusen medianen** (motståndskraftig mot extrema prispikar, visar förväntad prisnåvå). Du kan ändra detta i integrationsinstllningarna för att visa det aritmetiska medelvärdet istället. Det alternativa värdet är tillgängligt som attribut.", "long_description": "Visar genomsnittspriset per kWh beräknat från nästa 24 timmar (framåtblickande genomsnitt) från ditt Tibber-abonnemang. Detta ger ett framåtblickande genomsnitt baserat på tillgängliga prognosdata.",
"usage_tips": "Använd statusvärdet för att se den typiska kommande prisnåvån. För kostnadsberäkningar använd: {{ state_attr('sensor.leading_price_average', 'price_mean') }}" "usage_tips": "Använd detta för att planera energianvändning. Om nuvarande pris är under det framåtblickande genomsnittet kan det vara ett bra tillfälle att köra energikrävande apparater."
}, },
"trailing_price_min": { "trailing_price_min": {
"description": "Det minsta elpriset för de senaste 24 timmarna per kWh", "description": "Det minsta elpriset för de senaste 24 timmarna per kWh",
@ -137,7 +85,7 @@
"long_description": "Visar maximipriset per kWh från nästa 24 timmar (framåtblickande maximum) från ditt Tibber-abonnemang. Detta ger det högsta priset som förväntas nästa 24 timmar baserat på prognosdata.", "long_description": "Visar maximipriset per kWh från nästa 24 timmar (framåtblickande maximum) från ditt Tibber-abonnemang. Detta ger det högsta priset som förväntas nästa 24 timmar baserat på prognosdata.",
"usage_tips": "Använd detta för att undvika att köra apparater under kommande toppprisperioder." "usage_tips": "Använd detta för att undvika att köra apparater under kommande toppprisperioder."
}, },
"current_interval_price_level": { "price_level": {
"description": "Den nuvarande prisnivåklassificeringen", "description": "Den nuvarande prisnivåklassificeringen",
"long_description": "Visar Tibbers klassificering av nuvarande pris jämfört med historiska priser", "long_description": "Visar Tibbers klassificering av nuvarande pris jämfört med historiska priser",
"usage_tips": "Använd detta för att skapa automationer baserade på relativa prisnivåer istället för absoluta priser" "usage_tips": "Använd detta för att skapa automationer baserade på relativa prisnivåer istället för absoluta priser"
@ -162,7 +110,7 @@
"long_description": "Visar median prisnivå över 5 intervaller centrerade en timme framåt. Hjälper att planera konsumtion baserat på kommande pristrender istället för ögonblickliga framtida priser.", "long_description": "Visar median prisnivå över 5 intervaller centrerade en timme framåt. Hjälper att planera konsumtion baserat på kommande pristrender istället för ögonblickliga framtida priser.",
"usage_tips": "Använd för att schemalägga aktiviteter för nästa timme baserat på en utjämnad prisnivåprognos." "usage_tips": "Använd för att schemalägga aktiviteter för nästa timme baserat på en utjämnad prisnivåprognos."
}, },
"current_interval_price_rating": { "price_rating": {
"description": "Hur nuvarande intervalls pris jämförs med historiska data", "description": "Hur nuvarande intervalls pris jämförs med historiska data",
"long_description": "Visar hur nuvarande intervalls pris jämförs med historiska prisdata som en procentsats", "long_description": "Visar hur nuvarande intervalls pris jämförs med historiska prisdata som en procentsats",
"usage_tips": "En positiv procentsats betyder att nuvarande pris är över genomsnittet, negativ betyder under genomsnittet" "usage_tips": "En positiv procentsats betyder att nuvarande pris är över genomsnittet, negativ betyder under genomsnittet"
@ -189,8 +137,8 @@
}, },
"next_avg_1h": { "next_avg_1h": {
"description": "Genomsnittspris för nästa 1 timme (endast framåt från nästa intervall)", "description": "Genomsnittspris för nästa 1 timme (endast framåt från nästa intervall)",
"long_description": "Framåtblickande genomsnitt: Visar genomsnitt av nästa 4 intervaller (1 timme) från och med NÄSTA 15-minuters intervall (inte inklusive nuvarande). Skiljer sig från current_hour_average_price som inkluderar tidigare intervaller. Använd för absolut priströskelpla nering.", "long_description": "Framåtblickande genomsnitt: Visar genomsnitt av nästa 4 intervaller (1 timme) från och med NÄSTA 15-minuters intervall (inte inklusive nuvarande). Skiljer sig från current_hour_average som inkluderar tidigare intervaller. Använd för absolut priströskelpla nering.",
"usage_tips": "Absolut priströskel: Starta endast apparater när genomsnittet stannar under ditt maximalt acceptabla pris (t.ex. under 0,25 EUR/kWh). Kombinera med trendsensor för optimal timing. Obs: Detta är INTE en ersättning för timpriser - använd current_hour_average_price för det." "usage_tips": "Absolut priströskel: Starta endast apparater när genomsnittet stannar under ditt maximalt acceptabla pris (t.ex. under 0,25 EUR/kWh). Kombinera med trendsensor för optimal timing. Obs: Detta är INTE en ersättning för timpriser - använd current_hour_average för det."
}, },
"next_avg_2h": { "next_avg_2h": {
"description": "Genomsnittspris för nästa 2 timmar", "description": "Genomsnittspris för nästa 2 timmar",
@ -267,16 +215,6 @@
"long_description": "Jämför nuvarande intervallpris med genomsnitt av nästa 12 timmar (48 intervaller). Stigande om framtid är >5% högre, fallande om >5% lägre, annars stabil.", "long_description": "Jämför nuvarande intervallpris med genomsnitt av nästa 12 timmar (48 intervaller). Stigande om framtid är >5% högre, fallande om >5% lägre, annars stabil.",
"usage_tips": "Relativ optimering: Långsiktiga strategiska beslut. 'fallande' = avsevärt bättre priser kommer ikväll/imorgon. Hittar optimal timing i vilket marknadsläge som helst. Bäst kombinerad med avg-sensor prisgräns." "usage_tips": "Relativ optimering: Långsiktiga strategiska beslut. 'fallande' = avsevärt bättre priser kommer ikväll/imorgon. Hittar optimal timing i vilket marknadsläge som helst. Bäst kombinerad med avg-sensor prisgräns."
}, },
"current_price_trend": {
"description": "Nuvarande pristrend-riktning och hur länge den varar",
"long_description": "Visar nuvarande pristrend (stigande/fallande/stabil) genom att kombinera historiskt momentum (viktad 1h tillbakablick) med framtidsutsikt. Känner igen pågående trender tidigare än endast framtidsanalys. Använder ±3 % momentum-tröskel och volatilitetsanpassad framtidsjämförelse. Beräknar dynamiskt till nästa trendändring (eller 3t standard om ingen ändring på 24t). Status visar nuvarande riktning, attribut visar när den ändras och vad som kommer härnäst.",
"usage_tips": "Statusvisning: Dashboard-synlighet av 'vad händer nu till när'. Perfekt synkroniserad med next_price_trend_change. Exempel: Badge som visar 'Stigande i 2,5t' eller 'Fallande till 16:45'. Bättre än tidsfönster-sensorer eftersom den förstår att du REDAN är i en trend, inte bara förutsäger framtida ändringar. Använd för snabb visuell överblick, inte automationsutlösare."
},
"next_price_trend_change": {
"description": "När nästa betydande pristrendändring kommer att inträffa",
"long_description": "Skannar de nästa 24 timmarna (96 intervaller) för att hitta när pristrenden (stigande/fallande/stabil) kommer att ändras från nuvarande momentum. Bestämmer först nuvarande trend med viktad 1h tillbakablick (känner igen pågående trender), hittar sedan reverseringen. Använder volatilitetsadaptiva tröskelvärden (3 % momentum-detektering, marknadsanpassad framtidsjämförelse). Returnerar tidsstämpeln när ändringen förväntas.",
"usage_tips": "Händelsestyrd automatisering: Utlös åtgärder NÄR trenden ändras, inte OM X timmar. Exempel: 'Ladda EV när nästa trendändring visar fallande priser' eller 'Starta diskmaskin innan priserna stiger'. Kompletterar tidsfönster-sensorer (price_trend_Xh) som svarar på 'KOMMER priserna att vara högre om X timmar?'"
},
"daily_rating": { "daily_rating": {
"description": "Hur dagens priser jämförs med historiska data", "description": "Hur dagens priser jämförs med historiska data",
"long_description": "Visar hur dagens priser jämförs med historiska prisdata som en procentsats", "long_description": "Visar hur dagens priser jämförs med historiska prisdata som en procentsats",
@ -292,169 +230,29 @@
"long_description": "Visar tidsstämpeln för det senaste tillgängliga prisdataintervallet från ditt Tibber-abonnemang" "long_description": "Visar tidsstämpeln för det senaste tillgängliga prisdataintervallet från ditt Tibber-abonnemang"
}, },
"today_volatility": { "today_volatility": {
"description": "Hur mycket elpriserna varierar idag", "description": "Prisvolatilitetsklassificering för idag",
"long_description": "Visar om dagens priser är stabila eller har stora svängningar. Låg volatilitet innebär ganska jämna priser timing spelar liten roll. Hög volatilitet innebär tydliga prisskillnader under dagen bra tillfälle att flytta förbrukning till billigare perioder. `price_coefficient_variation_%` visar procentvärdet, `price_spread` visar den absoluta prisspannet.", "long_description": "Visar hur mycket elpriserna varierar under dagen baserat på spridningen (skillnaden mellan högsta och lägsta pris). Klassificering: LÅG = spridning < 5 öre, MÅTTLIG = 5-15 öre, HÖG = 15-30 öre, MYCKET HÖG = >30 öre.",
"usage_tips": "Använd detta för att avgöra om optimering är värt besväret. Vid låg volatilitet kan du köra enheter när som helst. Vid hög volatilitet sparar du märkbart genom att följa Best Price-perioder." "usage_tips": "Använd detta för att avgöra om prisbaserad optimering är värt besväret. Till exempel, med ett balkongbatteri som har 15% effektivitetsförlust är optimering endast meningsfull när volatiliteten är åtminstone MÅTTLIG. Skapa automationer som kontrollerar volatiliteten innan laddnings-/urladdningscykler planeras."
}, },
"tomorrow_volatility": { "tomorrow_volatility": {
"description": "Hur mycket elpriserna kommer att variera i morgon", "description": "Prisvolatilitetsklassificering för imorgon",
"long_description": "Visar om priserna i morgon blir stabila eller får stora svängningar. Tillgänglig när morgondagens data är publicerad (vanligen 13:0014:00 CET). Låg volatilitet innebär ganska jämna priser timing är inte kritisk. Hög volatilitet innebär tydliga prisskillnader under dagen bra läge att planera energikrävande uppgifter. `price_coefficient_variation_%` visar procentvärdet, `price_spread` visar den absoluta prisspannet.", "long_description": "Visar hur mycket elpriserna kommer att variera under morgondagen baserat på spridningen (skillnaden mellan högsta och lägsta pris). Blir otillgänglig tills morgondagens data publiceras (vanligtvis 13:00-14:00 CET).",
"usage_tips": "Använd för att planera morgondagens förbrukning. Hög volatilitet? Planera flexibla laster i Best Price-perioder. Låg volatilitet? Kör enheter när det passar dig." "usage_tips": "Använd detta för förhandsplanering av morgondagens energianvändning. Om morgondagen har HÖG eller MYCKET HÖG volatilitet är det värt att optimera energiförbrukningstiming. Vid LÅG volatilitet kan du köra enheter när som helst utan betydande kostnadsskillnader."
}, },
"next_24h_volatility": { "next_24h_volatility": {
"description": "Hur mycket priserna varierar de kommande 24 timmarna", "description": "Prisvolatilitetsklassificering för rullande nästa 24 timmar",
"long_description": "Visar prisvolatilitet för ett rullande 24-timmarsfönster från nu (uppdateras var 15:e minut). Låg volatilitet innebär ganska jämna priser. Hög volatilitet innebär märkbara prissvängningar och därmed optimeringsmöjligheter. Till skillnad från idag/i morgon-sensorer korsar den här dagsgränser och ger en kontinuerlig framåtblickande bedömning. `price_coefficient_variation_%` visar procentvärdet, `price_spread` visar den absoluta prisspannet.", "long_description": "Visar hur mycket elpriserna varierar under de nästa 24 timmarna från nu (rullande fönster). Detta korsar daggränser och uppdateras var 15:e minut, vilket ger en framåtblickande volatilitetsbedömning oberoende av kalenderdagar.",
"usage_tips": "Bäst för beslut i realtid. Använd vid planering av batteriladdning eller andra flexibla laster som kan gå över midnatt. Ger en konsekvent 24h-bild oberoende av kalenderdag." "usage_tips": "Bästa sensorn för realtidsoptimeringsbeslut. Till skillnad från idag/imorgon-sensorer som växlar vid midnatt ger detta en kontinuerlig 24t volatilitetsbedömning. Använd för batteriladningsstrategier som sträcker sig över daggränser."
}, },
"today_tomorrow_volatility": { "today_tomorrow_volatility": {
"description": "Kombinerad prisvolatilitet för idag och imorgon", "description": "Kombinerad prisvolatilitetsklassificering för idag och imorgon",
"long_description": "Visar den samlade volatiliteten när idag och imorgon ses tillsammans (när morgondatan finns). Visar om det finns tydliga prisskillnader över dagsgränsen. Faller tillbaka till endast idag om morgondatan saknas. Nyttig för flerdagarsoptimering. `price_coefficient_variation_%` visar procentvärdet, `price_spread` visar den absoluta prisspannet.", "long_description": "Visar volatilitet över både idag och imorgon kombinerat (när morgondagens data är tillgänglig). Ger en utökad vy av prisvariationen som sträcker sig upp till 48 timmar. Faller tillbaka på endast idag när morgondagens data inte är tillgänglig än.",
"usage_tips": "Använd för uppgifter som sträcker sig över flera dagar. Kontrollera om prisskillnaderna är stora nog för att planera efter. De enskilda dag-sensorerna visar bidrag per dag om du behöver mer detaljer." "usage_tips": "Använd detta för flerdagarsplanering och för att förstå om prismöjligheter finns över daggränsen. 'today_volatility' och 'tomorrow_volatility' uppdelningsattributen visar individuella dagsbidrag. Användbart för planering av laddningssessioner som kan sträcka sig över midnatt."
}, },
"data_lifecycle_status": { "price_forecast": {
"description": "Gjeldende tilstand for prisdatalivssyklus og hurtigbufring", "description": "Prognos för kommande elpriser",
"long_description": "Viser om integrasjonen bruker hurtigbufrede data eller ferske data fra API-et. Viser gjeldende livssyklustilstand: 'cached' (bruker lagrede data), 'fresh' (nettopp hentet fra API), 'refreshing' (henter for øyeblikket), 'searching_tomorrow' (søker aktivt etter morgendagens data etter 13:00), 'turnover_pending' (innen 15 minutter før midnatt, 23:45-00:00), eller 'error' (henting mislyktes). Inkluderer omfattende attributter som cache-alder, neste API-spørring, datafullstendighet og API-anropsstatistikk.", "long_description": "Visar kommande elpriser för framtida intervaller i ett format som är enkelt att använda i instrumentpaneler",
"usage_tips": "Bruk denne diagnosesensoren for å forstå dataferskhet og API-anropsmønstre. Sjekk 'cache_age'-attributtet for å se hvor gamle de nåværende dataene er. Overvåk 'next_api_poll' for å vite når neste oppdatering er planlagt. Bruk 'data_completeness' for å se om data for i går/i dag/i morgen er tilgjengelig. 'api_calls_today'-telleren hjelper med å spore API-bruk. Perfekt for feilsøking eller forståelse av integrasjonens oppførsel." "usage_tips": "Använd denna enhets attribut för att visa kommande priser i diagram eller anpassade kort. Få åtkomst till antingen 'intervals' för alla framtida intervaller eller 'hours' för timvisa sammanfattningar."
},
"best_price_end_time": {
"description": "Total längd för nuvarande eller nästa billigperiod (state i timmar, attribut i minuter)",
"long_description": "Visar hur länge billigperioden varar. State använder timmar (decimal) för en läsbar UI; attributet `period_duration_minutes` behåller avrundade minuter för automationer. Aktiv → varaktighet för aktuell period, annars nästa.",
"usage_tips": "UI kan visa 1,5 h medan `period_duration_minutes` = 90 för automationer."
},
"best_price_period_duration": {
"description": "Längd på nuvarande/nästa billigperiod",
"long_description": "Total längd av nuvarande eller nästa billigperiod. State visas i timmar (t.ex. 1,5 h) för enkel avläsning i UI, medan attributet `period_duration_minutes` ger samma värde i minuter (t.ex. 90) för automationer. Detta värde representerar den **fullständigt planerade längden** av perioden och är konstant under hela perioden, även när återstående tid (remaining_minutes) minskar.",
"usage_tips": "Kombinera med remaining_minutes för att beräkna när långvariga enheter ska stoppas: Perioden startade för `period_duration_minutes - remaining_minutes` minuter sedan. Detta attribut stöder energioptimeringsstrategier genom att hjälpa till med att planera högförbruksaktiviteter inom billiga perioder."
},
"best_price_remaining_minutes": {
"description": "Tid kvar i nuvarande billigperiod",
"long_description": "Visar hur mycket tid som återstår i nuvarande billigperiod. State visas i timmar (t.ex. 0,75 h) för enkel avläsning i instrumentpaneler, medan attributet `remaining_minutes` ger samma tid i minuter (t.ex. 45) för automationsvillkor. **Nedräkningstimer**: Detta värde minskar varje minut under en aktiv period. Returnerar 0 när ingen billigperiod är aktiv. Uppdateras varje minut.",
"usage_tips": "För automationer: Använd attribut `remaining_minutes` som 'Om remaining_minutes > 60, starta diskmaskin nu (tillräckligt med tid för att slutföra)' eller 'Om remaining_minutes < 15, avsluta nuvarande cykel snart'. UI visar användarvänliga timmar (t.ex. 1,25 h). Värde 0 indikerar ingen aktiv billigperiod."
},
"best_price_progress": {
"description": "Framsteg genom nuvarande billigperiod (0% när inaktiv)",
"long_description": "Visar framsteg genom nuvarande billigperiod som 0-100%. Returnerar 0% när ingen period är aktiv. Uppdateras varje minut. 0% betyder att perioden just startade, 100% betyder att den snart slutar.",
"usage_tips": "Perfekt för visuella framstegsindikatorer. Använd i automationer: 'Om progress > 0 OCH progress > 75, skicka avisering om att billigperioden snart slutar'. Värde 0 indikerar ingen aktiv period."
},
"best_price_next_start_time": {
"description": "Total längd för nuvarande eller nästa dyrperiod (state i timmar, attribut i minuter)",
"long_description": "Visar hur länge den dyra perioden varar. State använder timmar (decimal) för UI; attributet `period_duration_minutes` behåller avrundade minuter för automationer. Aktiv → varaktighet för aktuell period, annars nästa.",
"usage_tips": "UI kan visa 0,75 h medan `period_duration_minutes` = 45 för automationer."
},
"best_price_next_in_minutes": {
"description": "Tid kvar i nuvarande dyrperiod (state i timmar, attribut i minuter)",
"long_description": "Visar hur mycket tid som återstår. State använder timmar (decimal); attributet `remaining_minutes` behåller avrundade minuter för automationer. Returnerar 0 när ingen period är aktiv. Uppdateras varje minut.",
"usage_tips": "Använd `remaining_minutes` för trösklar (t.ex. > 60) medan state är lätt att läsa i timmar."
},
"peak_price_end_time": {
"description": "Tid tills nästa dyrperiod startar (state i timmar, attribut i minuter)",
"long_description": "Visar hur länge tills nästa dyrperiod startar. State använder timmar (decimal); attributet `next_in_minutes` behåller avrundade minuter för automationer. Under en aktiv period visar detta tiden till perioden efter den aktuella. 0 under korta övergångar. Uppdateras varje minut.",
"usage_tips": "Använd `next_in_minutes` i automationer (t.ex. < 10) medan state är lätt att läsa i timmar."
},
"peak_price_period_duration": {
"description": "Längd på nuvarande/nästa dyrperiod",
"long_description": "Total längd av nuvarande eller nästa dyrperiod. State visas i timmar (t.ex. 1,5 h) för enkel avläsning i UI, medan attributet `period_duration_minutes` ger samma värde i minuter (t.ex. 90) för automationer. Detta värde representerar den **fullständigt planerade längden** av perioden och är konstant under hela perioden, även när återstående tid (remaining_minutes) minskar.",
"usage_tips": "Kombinera med remaining_minutes för att beräkna när långvariga enheter ska stoppas: Perioden startade för `period_duration_minutes - remaining_minutes` minuter sedan. Detta attribut stöder energibesparingsstrategier genom att hjälpa till med att planera högförbruksaktiviteter utanför dyra perioder."
},
"peak_price_remaining_minutes": {
"description": "Tid kvar i nuvarande dyrperiod",
"long_description": "Visar hur mycket tid som återstår i nuvarande dyrperiod. State visas i timmar (t.ex. 0,75 h) för enkel avläsning i instrumentpaneler, medan attributet `remaining_minutes` ger samma tid i minuter (t.ex. 45) för automationsvillkor. **Nedräkningstimer**: Detta värde minskar varje minut under en aktiv period. Returnerar 0 när ingen dyrperiod är aktiv. Uppdateras varje minut.",
"usage_tips": "För automationer: Använd attribut `remaining_minutes` som 'Om remaining_minutes > 60, avbryt uppskjuten laddningssession' eller 'Om remaining_minutes < 15, återuppta normal drift snart'. UI visar användarvänliga timmar (t.ex. 1,0 h). Värde 0 indikerar ingen aktiv dyrperiod."
},
"peak_price_progress": {
"description": "Framsteg genom nuvarande dyrperiod (0% när inaktiv)",
"long_description": "Visar framsteg genom nuvarande dyrperiod som 0-100%. Returnerar 0% när ingen period är aktiv. Uppdateras varje minut.",
"usage_tips": "Visuell framstegsindikator i instrumentpaneler. Automation: 'Om progress > 0 OCH progress > 90, förbered normal värmeplanering'. Värde 0 indikerar ingen aktiv period."
},
"peak_price_next_start_time": {
"description": "När nästa dyrperiod startar",
"long_description": "Visar när nästa kommande dyrperiod startar. Under en aktiv period visar detta starten av NÄSTA period efter den nuvarande. Returnerar 'Okänt' endast när inga framtida perioder är konfigurerade.",
"usage_tips": "Alltid användbart för planering: 'Nästa dyrperiod startar om 2 timmar'. Automation: 'När nästa starttid är om 30 minuter, minska värmetemperatur förebyggande'."
},
"peak_price_next_in_minutes": {
"description": "Tid till nästa dyrperiod",
"long_description": "Visar hur länge till nästa dyrperiod. State visas i timmar (t.ex. 0,5 h) för instrumentpaneler, medan attributet `next_in_minutes` ger minuter (t.ex. 30) för automationsvillkor. Under en aktiv period visar detta tiden till perioden EFTER den nuvarande. Returnerar 0 under korta övergångsmoment. Uppdateras varje minut.",
"usage_tips": "För automationer: Använd attribut `next_in_minutes` som 'Om next_in_minutes > 0 OCH next_in_minutes < 10, slutför nuvarande laddcykel nu innan priserna ökar'. Värde > 0 indikerar alltid att en framtida dyrperiod är planerad."
},
"home_type": {
"description": "Bostadstyp (lägenhet, hus osv.)",
"long_description": "Visar bostadstypen som konfigurerats i ditt Tibber-konto. Denna metadata kan vara användbar för att kategorisera energiförbrukningsmönster.",
"usage_tips": "Använd detta för att organisera ditt smarthussystem eller för analysändamål."
},
"home_size": {
"description": "Boyta i kvadratmeter",
"long_description": "Visar storleken på din bostad i kvadratmeter som konfigurerats i ditt Tibber-konto. Kan användas för att beräkna energiförbrukning per kvadratmeter.",
"usage_tips": "Använd detta i energieffektivitetsberäkningar: 'Min bostad använder X kWh per kvadratmeter per år'."
},
"main_fuse_size": {
"description": "Huvudsäkringsstorlek i ampere",
"long_description": "Visar kapaciteten på din huvudsäkring i ampere. Detta bestämmer den maximala elektriska belastningen din bostad kan hantera samtidigt.",
"usage_tips": "Använd detta för att förhindra överbelastning: 'Om total strömförbrukning närmar sig säkringsstorleken, skjut upp start av ytterligare apparater'."
},
"number_of_residents": {
"description": "Antal personer som bor i bostaden",
"long_description": "Visar antalet boende som konfigurerats i ditt Tibber-konto. Användbart för beräkningar av energiförbrukning per person.",
"usage_tips": "Använd detta för hushållets energianalyser: 'Energiförbrukning per person per dag'."
},
"primary_heating_source": {
"description": "Primär värmesystemtyp",
"long_description": "Visar typen av värmesystem som används i din bostad som konfigurerats i ditt Tibber-konto. Detta kan vara en värmepump, elektrisk uppvärmning, gas, olja eller andra värmekällor.",
"usage_tips": "Använd detta för att kategorisera värmerelaterade automationer eller för energiförbrukningsanalys efter värmetyp."
},
"grid_company": {
"description": "Namn på din elnätsoperatör",
"long_description": "Visar namnet på företaget som driver elnätet i ditt område. Detta är distributionssystemoperatören (DSO) som ansvarar för att leverera elektricitet till ditt hem.",
"usage_tips": "Användbart för administrativa ändamål och felsökning av nätrelaterade problem."
},
"grid_area_code": {
"description": "Nätområdets identifieringskod",
"long_description": "Visar koden som identifierar ditt elnätsområde. Denna kod används av nätoperatören för routing- och faktureringsändamål.",
"usage_tips": "Använd detta som administrativ referens eller vid kontakt med din nätoperatör."
},
"price_area_code": {
"description": "Elprisområdeskod",
"long_description": "Visar koden för ditt elprisområde (t.ex. NO1, NO2, SE3, DK1). Olika områden har olika grossistelpriser baserat på regionalt utbud och efterfrågan.",
"usage_tips": "Använd detta för att förstå vilket prisområde du befinner dig i. Användbart vid jämförelse av priser med andra eller analys av regionala prismönster."
},
"consumption_ean": {
"description": "EAN-kod för elförbrukningsmätning",
"long_description": "Visar European Article Number (EAN)-koden som unikt identifierar din elförbrukningsmätare. Denna 18-siffriga kod används för fakturerings- och administrativa ändamål.",
"usage_tips": "Använd detta vid kommunikation med din elleverantör eller för administrativ dokumentation."
},
"production_ean": {
"description": "EAN-kod för elproduktionsmätning",
"long_description": "Visar European Article Number (EAN)-koden för din elproduktionsmätare (om du har solpaneler eller annan produktion). Denna kod spårar elektricitet du matar tillbaka till nätet.",
"usage_tips": "Relevant om du har solpaneler eller annan elproduktion. Använd för administrativa ändamål och vid krav på inmatningsersättning."
},
"energy_tax_type": {
"description": "Typ av energiskatt som tillämpas",
"long_description": "Visar energiskattekategorin som tillämpas på din elförbrukning. Skattesatser varierar per land och ibland per konsumenttyp (privat, kommersiell osv.).",
"usage_tips": "Använd detta för att förstå uppdelningen av din elräkning och för totala kostnadsberäkningar."
},
"vat_type": {
"description": "Momskategori (mervärdesskatt)",
"long_description": "Visar momskategorin som tillämpas på din elförbrukning. Momssatser varierar per land och kan skilja sig för elektricitet jämfört med andra varor och tjänster.",
"usage_tips": "Använd detta för att förstå din elräkning och beräkna totala kostnader inklusive skatter."
},
"estimated_annual_consumption": {
"description": "Uppskattat årligt elförbrukning i kWh",
"long_description": "Visar din uppskattade årliga elförbrukning i kilowattimmar som beräknats eller konfigurerats i ditt Tibber-konto. Denna uppskattning används för att jämföra faktisk förbrukning med förväntade värden.",
"usage_tips": "Använd detta för att spåra om din faktiska förbrukning är över eller under förväntningarna. Jämför månatlig förbrukning med 1/12 av detta värde för att identifiera ovanliga mönster."
},
"subscription_status": {
"description": "Status för ditt Tibber-abonnemang",
"long_description": "Visar om ditt Tibber-abonnemang för närvarande är aktivt, har avslutats eller väntar på aktivering. En status 'Aktiv' betyder att du aktivt tar emot elektricitet genom Tibber.",
"usage_tips": "Använd detta för att övervaka din abonnemangsstatus. Ställ in varningar om statusen ändras från 'Aktiv' för att säkerställa oavbruten service."
},
"chart_data_export": {
"description": "Dataexport för dashboard-integrationer",
"long_description": "Denna sensor anropar get_chartdata-tjänsten med din konfigurerade YAML-konfiguration och exponerar resultatet som entitetsattribut. Statusen visar 'ready' när data är tillgänglig, 'error' vid fel, eller 'pending' före första anropet. Perfekt för dashboard-integrationer som ApexCharts som behöver läsa prisdata från entitetsattribut.",
"usage_tips": "Konfigurera YAML-parametrarna i integrationsalternativen för att matcha ditt get_chartdata-tjänstanrop. Sensorn uppdateras automatiskt när prisdata uppdateras (vanligtvis efter midnatt och när morgondagens data anländer). Få tillgång till tjänstesvarsdata direkt från entitetens attribut - strukturen matchar exakt vad get_chartdata returnerar."
},
"chart_metadata": {
"description": "Lättviktig metadata för diagramkonfiguration",
"long_description": "Tillhandahåller väsentliga diagramkonfigurationsvärden som sensorattribut. Användbart för vilket diagramkort som helst som behöver Y-axelgränser. Sensorn anropar get_chartdata med endast-metadata-läge (ingen databehandling) och extraherar: yaxis_min, yaxis_max (föreslagen Y-axelomfång för optimal skalning). Statusen återspeglar tjänstanropsresultatet: 'ready' vid framgång, 'error' vid fel, 'pending' under initialisering.",
"usage_tips": "Konfigurera via configuration.yaml under tibber_prices.chart_metadata_config (valfritt: day, subunit_currency, resolution). Sensorn uppdateras automatiskt vid pris dataändringar. Få tillgång till metadata från attribut: yaxis_min, yaxis_max. Använd med config-template-card eller vilket verktyg som helst som läser entitetsattribut - perfekt för dynamisk diagramkonfiguration utan manuella beräkningar."
} }
}, },
"binary_sensor": { "binary_sensor": {
@ -477,90 +275,6 @@
"description": "Om anslutningen till Tibber API fungerar", "description": "Om anslutningen till Tibber API fungerar",
"long_description": "Indikerar om integrationen framgångsrikt kan ansluta till Tibber API", "long_description": "Indikerar om integrationen framgångsrikt kan ansluta till Tibber API",
"usage_tips": "Använd detta för att övervaka anslutningsstatus till Tibber API" "usage_tips": "Använd detta för att övervaka anslutningsstatus till Tibber API"
},
"has_ventilation_system": {
"description": "Om din bostad har ventilationssystem",
"long_description": "Indikerar om ett ventilationssystem är registrerat för din bostad i Tibber-kontot. Ventilationssystem kan vara betydande elförbrukare som kan dra nytta av smart schemaläggning.",
"usage_tips": "Använd detta för att aktivera ventilationsspecifika automationer eller energiövervakning. Om aktivt, överväg att schemalägga ventilation under lågprisperioder."
},
"realtime_consumption_enabled": {
"description": "Om realtidsförbrukningsövervakning är aktiv",
"long_description": "Indikerar om realtidsövervakning av elförbrukning är aktiverad och aktiv för ditt Tibber-hem. Detta kräver kompatibel mätutrustning (t.ex. Tibber Pulse) och en aktiv prenumeration.",
"usage_tips": "Använd detta för att verifiera att realtidsförbrukningen är tillgänglig. Aktivera meddelanden om detta oväntat ändras till 'av', vilket indikerar potentiella hårdvaru- eller anslutningsproblem."
}
},
"number": {
"best_price_flex_override": {
"description": "Maximal procent över daglig minimumpris som intervaller kan ha och fortfarande kvalificera som 'bästa pris'. Rekommenderas: 15-20 med lättnad aktiverad (standard), eller 25-35 utan lättnad. Maximum: 50 (hårt tak för tillförlitlig perioddetektering).",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Flexibilitet'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Aktivera denna entitet för att dynamiskt justera bästa pris-detektering via automatiseringar, t.ex. högre flexibilitet för kritiska laster eller striktare krav för flexibla apparater."
},
"best_price_min_distance_override": {
"description": "Minsta procentuella avstånd under dagligt genomsnitt. Intervaller måste vara så långt under genomsnittet för att kvalificera som 'bästa pris'. Hjälper att skilja äkta lågprisperioder från genomsnittspriser.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Minimiavstånd'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Öka värdet för striktare bästa pris-kriterier. Minska om för få perioder detekteras."
},
"best_price_min_period_length_override": {
"description": "Minsta periodlängd i 15-minuters intervaller. Perioder kortare än detta rapporteras inte. Exempel: 2 = minst 30 minuter.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Minsta periodlängd'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Anpassa till typisk apparatkörtid: 2 (30 min) för snabbprogram, 4-8 (1-2 timmar) för normala cykler, 8+ för långa ECO-program."
},
"best_price_min_periods_override": {
"description": "Minsta antal bästa pris-perioder att hitta dagligen. När lättnad är aktiverad kommer systemet automatiskt att justera kriterierna för att uppnå detta antal.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Minsta antal perioder'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Ställ in detta på antalet tidskritiska uppgifter du har dagligen. Exempel: 2 för två tvattmaskinskörningar."
},
"best_price_relaxation_attempts_override": {
"description": "Antal försök att gradvis lätta på kriterierna för att uppnå minsta periodantal. Varje försök ökar flexibiliteten med 3 procent. Vid 0 används endast baskriterier.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Lättnadsförsök'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Högre värden gör perioddetektering mer adaptiv för dagar med stabila priser. Ställ in på 0 för att tvinga strikta kriterier utan lättnad."
},
"best_price_gap_count_override": {
"description": "Maximalt antal dyrare intervaller som kan tillåtas mellan billiga intervaller medan de fortfarande räknas som en sammanhängande period. Vid 0 måste billiga intervaller vara påföljande.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Glaptolerans'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Öka detta för apparater med variabel last (t.ex. värmepumpar) som kan tolerera korta dyrare intervaller. Ställ in på 0 för kontinuerligt billiga perioder."
},
"peak_price_flex_override": {
"description": "Maximal procent under daglig maximumpris som intervaller kan ha och fortfarande kvalificera som 'topppris'. Samma rekommendationer som för bästa pris-flexibilitet.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Flexibilitet'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Använd detta för att justera topppris-tröskeln vid körtid för automatiseringar som undviker förbrukning under dyra timmar."
},
"peak_price_min_distance_override": {
"description": "Minsta procentuella avstånd över dagligt genomsnitt. Intervaller måste vara så långt över genomsnittet för att kvalificera som 'topppris'.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Minimiavstånd'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Öka värdet för att endast fånga extrema pristoppar. Minska för att inkludera fler högpristider."
},
"peak_price_min_period_length_override": {
"description": "Minsta periodlängd i 15-minuters intervaller för topppriser. Kortare pristoppar rapporteras inte som perioder.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Minsta periodlängd'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Kortare värden fångar korta pristoppar. Längre värden fokuserar på ihållande högprisperioder."
},
"peak_price_min_periods_override": {
"description": "Minsta antal topppris-perioder att hitta dagligen.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Minsta antal perioder'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Ställ in detta baserat på hur många högprisperioder du vill fånga per dag för automatiseringar."
},
"peak_price_relaxation_attempts_override": {
"description": "Antal försök att lätta på kriterierna för att uppnå minsta antal topppris-perioder.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Lättnadsförsök'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Öka detta om inga perioder hittas på dagar med stabila priser. Ställ in på 0 för att tvinga strikta kriterier."
},
"peak_price_gap_count_override": {
"description": "Maximalt antal billigare intervaller som kan tillåtas mellan dyra intervaller medan de fortfarande räknas som en topppris-period.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Glaptolerans'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Högre värden fångar längre högprisperioder även med korta prisdipp. Ställ in på 0 för strikt sammanhängande topppriser."
}
},
"switch": {
"best_price_enable_relaxation_override": {
"description": "När aktiverad lättas kriterierna automatiskt för att uppnå minsta periodantal. När inaktiverad rapporteras endast perioder som uppfyller strikta kriterier (möjligen noll perioder på dagar med stabila priser).",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Uppnå minimiantal'-inställningen från alternativ-dialogen för bästa pris-periodberäkningar.",
"usage_tips": "Aktivera detta för garanterade dagliga automatiseringsmöjligheter. Inaktivera om du endast vill ha riktigt billiga perioder, även om det innebär inga perioder vissa dagar."
},
"peak_price_enable_relaxation_override": {
"description": "När aktiverad lättas kriterierna automatiskt för att uppnå minsta periodantal. När inaktiverad rapporteras endast äkta pristoppar.",
"long_description": "När denna entitet är aktiverad överskriver värdet 'Uppnå minimiantal'-inställningen från alternativ-dialogen för topppris-periodberäkningar.",
"usage_tips": "Aktivera detta för konsekventa topppris-varningar. Inaktivera för att endast fånga extrema pristoppar."
} }
}, },
"home_types": { "home_types": {
@ -568,16 +282,5 @@
"ROWHOUSE": "Radhus", "ROWHOUSE": "Radhus",
"HOUSE": "Hus", "HOUSE": "Hus",
"COTTAGE": "Stuga" "COTTAGE": "Stuga"
}, }
"time_units": {
"day": "{count} dag",
"days": "{count} dagar",
"hour": "{count} timme",
"hours": "{count} timmar",
"minute": "{count} minut",
"minutes": "{count} minuter",
"ago": "{parts} sedan",
"now": "nu"
},
"attribution": "Data tillhandahålls av Tibber"
} }

View file

@ -11,7 +11,6 @@ if TYPE_CHECKING:
from .api import TibberPricesApiClient from .api import TibberPricesApiClient
from .coordinator import TibberPricesDataUpdateCoordinator from .coordinator import TibberPricesDataUpdateCoordinator
from .interval_pool import TibberPricesIntervalPool
@dataclass @dataclass
@ -21,7 +20,6 @@ class TibberPricesData:
client: TibberPricesApiClient client: TibberPricesApiClient
coordinator: TibberPricesDataUpdateCoordinator coordinator: TibberPricesDataUpdateCoordinator
integration: Integration integration: Integration
interval_pool: TibberPricesIntervalPool # Shared interval pool per config entry
if TYPE_CHECKING: if TYPE_CHECKING:

View file

@ -22,9 +22,6 @@ async def async_get_config_entry_diagnostics(
"""Return diagnostics for a config entry.""" """Return diagnostics for a config entry."""
coordinator = entry.runtime_data.coordinator coordinator = entry.runtime_data.coordinator
# Get period metadata from coordinator data
price_periods = coordinator.data.get("pricePeriods", {}) if coordinator.data else {}
return { return {
"entry": { "entry": {
"entry_id": entry.entry_id, "entry_id": entry.entry_id,
@ -33,46 +30,16 @@ async def async_get_config_entry_diagnostics(
"domain": entry.domain, "domain": entry.domain,
"title": entry.title, "title": entry.title,
"state": str(entry.state), "state": str(entry.state),
"home_id": entry.data.get("home_id", ""),
}, },
"coordinator": { "coordinator": {
"last_update_success": coordinator.last_update_success, "last_update_success": coordinator.last_update_success,
"update_interval": str(coordinator.update_interval), "update_interval": str(coordinator.update_interval),
"is_main_entry": coordinator.is_main_entry(),
"data": coordinator.data, "data": coordinator.data,
"update_timestamps": { "update_timestamps": {
"price": coordinator._last_price_update.isoformat() if coordinator._last_price_update else None, # noqa: SLF001 "price": coordinator._last_price_update.isoformat() if coordinator._last_price_update else None, # noqa: SLF001
"user": coordinator._last_user_update.isoformat() if coordinator._last_user_update else None, # noqa: SLF001 "user": coordinator._last_user_update.isoformat() if coordinator._last_user_update else None, # noqa: SLF001
"last_coordinator_update": coordinator._last_coordinator_update.isoformat() # noqa: SLF001
if coordinator._last_coordinator_update # noqa: SLF001
else None,
}, },
"lifecycle": {
"state": coordinator._lifecycle_state, # noqa: SLF001
"is_fetching": coordinator._is_fetching, # noqa: SLF001
"api_calls_today": coordinator._api_calls_today, # noqa: SLF001
"last_api_call_date": coordinator._last_api_call_date.isoformat() # noqa: SLF001
if coordinator._last_api_call_date # noqa: SLF001
else None,
},
},
"periods": {
"best_price": {
"count": len(price_periods.get("best_price", {}).get("periods", [])),
"metadata": price_periods.get("best_price", {}).get("metadata", {}),
},
"peak_price": {
"count": len(price_periods.get("peak_price", {}).get("periods", [])),
"metadata": price_periods.get("peak_price", {}).get("metadata", {}),
},
},
"config": {
"options": dict(entry.options),
},
"cache_status": {
"user_data_cached": coordinator._cached_user_data is not None, # noqa: SLF001
"has_price_data": coordinator.data is not None and "priceInfo" in (coordinator.data or {}),
"transformer_cache_valid": coordinator._data_transformer._cached_transformed_data is not None, # noqa: SLF001
"period_calculator_cache_valid": coordinator._period_calculator._cached_periods is not None, # noqa: SLF001
}, },
"error": { "error": {
"last_exception": str(coordinator.last_exception) if coordinator.last_exception else None, "last_exception": str(coordinator.last_exception) if coordinator.last_exception else None,

View file

@ -5,13 +5,14 @@ from __future__ import annotations
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ATTRIBUTION, DOMAIN, get_home_type_translation, get_translation from .const import ATTRIBUTION, DOMAIN, get_home_type_translation
from .coordinator import TibberPricesDataUpdateCoordinator from .coordinator import TibberPricesDataUpdateCoordinator
class TibberPricesEntity(CoordinatorEntity[TibberPricesDataUpdateCoordinator]): class TibberPricesEntity(CoordinatorEntity[TibberPricesDataUpdateCoordinator]):
"""TibberPricesEntity class.""" """TibberPricesEntity class."""
_attr_attribution = ATTRIBUTION
_attr_has_entity_name = True _attr_has_entity_name = True
def __init__(self, coordinator: TibberPricesDataUpdateCoordinator) -> None: def __init__(self, coordinator: TibberPricesDataUpdateCoordinator) -> None:
@ -21,13 +22,9 @@ class TibberPricesEntity(CoordinatorEntity[TibberPricesDataUpdateCoordinator]):
# Get device information # Get device information
home_name, home_id, home_type = self._get_device_info() home_name, home_id, home_type = self._get_device_info()
# Get configured language # Get translated home type using the configured language
language = coordinator.hass.config.language or "en" language = coordinator.hass.config.language or "en"
# Get translated home type and attribution
translated_model = get_home_type_translation(home_type, language) if home_type else "Unknown" translated_model = get_home_type_translation(home_type, language) if home_type else "Unknown"
# Get translated attribution, fallback to constant if translation not found
self._attr_attribution = get_translation(["attribution"], language) or ATTRIBUTION
self._attr_device_info = DeviceInfo( self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE, entry_type=DeviceEntryType.SERVICE,
@ -44,22 +41,6 @@ class TibberPricesEntity(CoordinatorEntity[TibberPricesDataUpdateCoordinator]):
configuration_url="https://developer.tibber.com/explorer", configuration_url="https://developer.tibber.com/explorer",
) )
@property
def available(self) -> bool:
"""
Return if entity is available.
Entity is unavailable when:
- Coordinator has not completed first update (no data yet)
- Coordinator has encountered an error (last_update_success = False)
Note: Auth failures are handled by coordinator's update method,
which raises ConfigEntryAuthFailed and triggers reauth flow.
"""
# Return False if coordinator not ready or has errors
# Return True if coordinator has data (bool conversion handles None/empty)
return self.coordinator.last_update_success and bool(self.coordinator.data)
def _get_device_info(self) -> tuple[str, str | None, str | None]: def _get_device_info(self) -> tuple[str, str | None, str | None]:
"""Get device name, ID and type.""" """Get device name, ID and type."""
user_profile = self.coordinator.get_user_profile() user_profile = self.coordinator.get_user_profile()
@ -69,6 +50,9 @@ class TibberPricesEntity(CoordinatorEntity[TibberPricesDataUpdateCoordinator]):
if is_subentry: if is_subentry:
home_name, home_id, home_type = self._get_subentry_device_info() home_name, home_id, home_type = self._get_subentry_device_info()
# Add user information if available
if user_profile and user_profile.get("name"):
home_name = f"{home_name} ({user_profile['name']})"
elif user_profile: elif user_profile:
home_name = self._get_main_entry_device_info(user_profile) home_name = self._get_main_entry_device_info(user_profile)
else: else:
@ -118,10 +102,8 @@ class TibberPricesEntity(CoordinatorEntity[TibberPricesDataUpdateCoordinator]):
return "Tibber Home", None return "Tibber Home", None
try: try:
# Use 'or {}' to handle None values (API may return None during maintenance) address1 = str(self.coordinator.data.get("address", {}).get("address1", ""))
address = self.coordinator.data.get("address") or {} city = str(self.coordinator.data.get("address", {}).get("city", ""))
address1 = str(address.get("address1", ""))
city = str(address.get("city", ""))
app_nickname = str(self.coordinator.data.get("appNickname", "")) app_nickname = str(self.coordinator.data.get("appNickname", ""))
home_type = str(self.coordinator.data.get("type", "")) home_type = str(self.coordinator.data.get("type", ""))

View file

@ -1,64 +0,0 @@
"""
Home Assistant entity-specific utilities for Tibber Prices integration.
This package contains HA entity integration logic:
- Dynamic icon selection based on state/price levels
- Icon color mapping for visual feedback
- Attribute builders (timestamps, descriptions, periods)
- Translation-aware formatting
These functions depend on Home Assistant concepts:
- Entity states and attributes
- Translation systems (custom_translations/)
- Configuration entries and coordinator data
- User-configurable options (CONF_EXTENDED_DESCRIPTIONS, etc.)
For pure data transformation (no HA dependencies), see utils/ package.
"""
from __future__ import annotations
from .attributes import (
add_description_attributes,
async_add_description_attributes,
build_period_attributes,
build_timestamp_attribute,
)
from .colors import add_icon_color_attribute, get_icon_color
from .helpers import (
find_rolling_hour_center_index,
get_price_value,
translate_level,
translate_rating_level,
)
from .icons import (
get_binary_sensor_icon,
get_dynamic_icon,
get_level_sensor_icon,
get_price_level_for_icon,
get_price_sensor_icon,
get_rating_sensor_icon,
get_trend_icon,
get_volatility_sensor_icon,
)
__all__ = [
"add_description_attributes",
"add_icon_color_attribute",
"async_add_description_attributes",
"build_period_attributes",
"build_timestamp_attribute",
"find_rolling_hour_center_index",
"get_binary_sensor_icon",
"get_dynamic_icon",
"get_icon_color",
"get_level_sensor_icon",
"get_price_level_for_icon",
"get_price_sensor_icon",
"get_price_value",
"get_rating_sensor_icon",
"get_trend_icon",
"get_volatility_sensor_icon",
"translate_level",
"translate_rating_level",
]

View file

@ -1,262 +0,0 @@
"""Common attribute utilities for Tibber Prices entities."""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
from ..data import TibberPricesConfigEntry # noqa: TID252
def build_timestamp_attribute(interval_data: dict | None) -> str | None:
"""
Build timestamp attribute from interval data.
Extracts startsAt field consistently across all sensors.
Args:
interval_data: Interval data dictionary containing startsAt field
Returns:
ISO format timestamp string or None
"""
if not interval_data:
return None
return interval_data.get("startsAt")
def build_period_attributes(period_data: dict) -> dict:
"""
Build common period attributes (start, end, duration, timestamp).
Used by binary sensors for period-based entities.
Args:
period_data: Period data dictionary
Returns:
Dictionary with common period attributes
"""
return {
"start": period_data.get("start"),
"end": period_data.get("end"),
"duration_minutes": period_data.get("duration_minutes"),
"timestamp": period_data.get("start"), # Timestamp = period start
}
def add_description_attributes( # noqa: PLR0913, PLR0912
attributes: dict,
platform: str,
translation_key: str | None,
hass: HomeAssistant,
config_entry: TibberPricesConfigEntry,
*,
position: str = "end",
) -> None:
"""
Add description attributes from custom translations to an existing attributes dict.
The 'description' attribute is always present, but its content changes based on
CONF_EXTENDED_DESCRIPTIONS setting:
- When disabled: Uses short 'description' from translations
- When enabled: Uses 'long_description' from translations (falls back to short if not available)
Additionally, when CONF_EXTENDED_DESCRIPTIONS is enabled, 'usage_tips' is added as
a separate attribute.
This function modifies the attributes dict in-place. By default, descriptions are
added at the END of the dict (after all other attributes). For special cases like
chart_data_export, use position="before_service_data" to add descriptions before
service data attributes.
Args:
attributes: Existing attributes dict to modify (in-place)
platform: Platform name ("sensor" or "binary_sensor")
translation_key: Translation key for entity
hass: Home Assistant instance
config_entry: Config entry with options
position: Where to add descriptions:
- "end" (default): Add at the very end
- "before_service_data": Add before service data (for chart_data_export)
"""
if not translation_key or not hass:
return
# Import here to avoid circular dependency
from ..const import ( # noqa: PLC0415, TID252
CONF_EXTENDED_DESCRIPTIONS,
DEFAULT_EXTENDED_DESCRIPTIONS,
get_entity_description,
)
language = hass.config.language if hass.config.language else "en"
# Build description dict
desc_attrs: dict[str, str] = {}
extended_descriptions = config_entry.options.get(
CONF_EXTENDED_DESCRIPTIONS,
config_entry.data.get(CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS),
)
# Choose description based on extended_descriptions setting
if extended_descriptions:
# Use long_description as description content (if available)
description = get_entity_description(platform, translation_key, language, "long_description")
if not description:
# Fallback to short description if long_description not available
description = get_entity_description(platform, translation_key, language, "description")
else:
# Use short description
description = get_entity_description(platform, translation_key, language, "description")
if description:
desc_attrs["description"] = description
# Add usage_tips as separate attribute if extended_descriptions enabled
if extended_descriptions:
usage_tips = get_entity_description(platform, translation_key, language, "usage_tips")
if usage_tips:
desc_attrs["usage_tips"] = usage_tips
# Add descriptions at appropriate position
if position == "end":
# Default: Add at the very end
attributes.update(desc_attrs)
elif position == "before_service_data":
# Special case: Insert before service data
# This is used by chart_data_export to keep our attributes before foreign data
# We need to rebuild the dict to maintain order
temp_attrs = dict(attributes)
attributes.clear()
# Add everything except service data
for key, value in temp_attrs.items():
if key not in ("timestamp", "error"):
continue
attributes[key] = value
# Add descriptions here (before service data)
attributes.update(desc_attrs)
# Add service data last
for key, value in temp_attrs.items():
if key in ("timestamp", "error"):
continue
attributes[key] = value
async def async_add_description_attributes( # noqa: PLR0913, PLR0912
attributes: dict,
platform: str,
translation_key: str | None,
hass: HomeAssistant,
config_entry: TibberPricesConfigEntry,
*,
position: str = "end",
) -> None:
"""
Async version of add_description_attributes.
Adds description attributes from custom translations to an existing attributes dict.
Uses async translation loading (calls async_get_entity_description).
Args:
attributes: Existing attributes dict to modify (in-place)
platform: Platform name ("sensor" or "binary_sensor")
translation_key: Translation key for entity
hass: Home Assistant instance
config_entry: Config entry with options
position: Where to add descriptions ("end" or "before_service_data")
"""
if not translation_key or not hass:
return
# Import here to avoid circular dependency
from ..const import ( # noqa: PLC0415, TID252
CONF_EXTENDED_DESCRIPTIONS,
DEFAULT_EXTENDED_DESCRIPTIONS,
async_get_entity_description,
)
language = hass.config.language if hass.config.language else "en"
# Build description dict
desc_attrs: dict[str, str] = {}
extended_descriptions = config_entry.options.get(
CONF_EXTENDED_DESCRIPTIONS,
config_entry.data.get(CONF_EXTENDED_DESCRIPTIONS, DEFAULT_EXTENDED_DESCRIPTIONS),
)
# Choose description based on extended_descriptions setting
if extended_descriptions:
# Use long_description as description content (if available)
description = await async_get_entity_description(
hass,
platform,
translation_key,
language,
"long_description",
)
if not description:
# Fallback to short description if long_description not available
description = await async_get_entity_description(
hass,
platform,
translation_key,
language,
"description",
)
else:
# Use short description
description = await async_get_entity_description(
hass,
platform,
translation_key,
language,
"description",
)
if description:
desc_attrs["description"] = description
# Add usage_tips as separate attribute if extended_descriptions enabled
if extended_descriptions:
usage_tips = await async_get_entity_description(
hass,
platform,
translation_key,
language,
"usage_tips",
)
if usage_tips:
desc_attrs["usage_tips"] = usage_tips
# Add descriptions at appropriate position
if position == "end":
# Default: Add at the very end
attributes.update(desc_attrs)
elif position == "before_service_data":
# Special case: Insert before service data (same logic as sync version)
temp_attrs = dict(attributes)
attributes.clear()
for key, value in temp_attrs.items():
if key not in ("timestamp", "error"):
continue
attributes[key] = value
attributes.update(desc_attrs)
for key, value in temp_attrs.items():
if key in ("timestamp", "error"):
continue
attributes[key] = value

View file

@ -1,124 +0,0 @@
"""Color utilities for Tibber Prices entities."""
from __future__ import annotations
from typing import Any
from custom_components.tibber_prices.const import (
BINARY_SENSOR_COLOR_MAPPING,
PRICE_LEVEL_COLOR_MAPPING,
PRICE_RATING_COLOR_MAPPING,
VOLATILITY_COLOR_MAPPING,
)
# Timing sensor color thresholds
TIMING_HIGH_PROGRESS_THRESHOLD = 75 # >=75%: High intensity color
TIMING_URGENT_THRESHOLD = 15 # <=15 min: Urgent
TIMING_SOON_THRESHOLD = 60 # <=60 min: Soon
def add_icon_color_attribute(
attributes: dict,
key: str,
state_value: Any = None,
*,
is_on: bool | None = None,
) -> None:
"""
Add icon_color attribute if color mapping exists.
Used by both sensor and binary_sensor platforms.
Args:
attributes: Attribute dictionary to update
key: Entity description key
state_value: Sensor value (for sensors) or None (for binary sensors)
is_on: Binary sensor state (for binary sensors) or None (for sensors)
"""
color = get_icon_color(key, state_value, is_on=is_on)
if color:
attributes["icon_color"] = color
def get_icon_color(
key: str,
state_value: Any = None,
*,
is_on: bool | None = None,
) -> str | None:
"""
Get icon color from various mappings.
Args:
key: Entity description key
state_value: Sensor value (for sensors)
is_on: Binary sensor state (for binary sensors)
Returns:
CSS color variable string or None
"""
# Binary sensor colors (based on on/off state)
if key in BINARY_SENSOR_COLOR_MAPPING and is_on is not None:
state_key = "on" if is_on else "off"
return BINARY_SENSOR_COLOR_MAPPING[key].get(state_key)
# Trend sensor colors (based on trend state)
if key.startswith("price_trend_") and isinstance(state_value, str):
trend_colors = {
"rising": "var(--error-color)", # Red/Orange for rising prices
"falling": "var(--success-color)", # Green for falling prices
"stable": "var(--state-icon-color)", # Default gray for stable
}
return trend_colors.get(state_value)
# Timing sensor colors (best_price = green, peak_price = red/orange)
timing_color = get_timing_sensor_color(key, state_value)
if timing_color:
return timing_color
# Price level/rating/volatility colors (based on uppercase value)
if isinstance(state_value, str):
return (
PRICE_LEVEL_COLOR_MAPPING.get(state_value.upper())
or PRICE_RATING_COLOR_MAPPING.get(state_value.upper())
or VOLATILITY_COLOR_MAPPING.get(state_value.upper())
)
return None
def get_timing_sensor_color(key: str, state_value: Any) -> str | None:
"""
Get color for best_price/peak_price timing sensors.
Best price sensors: Green (good for user)
Peak price sensors: Red/Orange (warning/alert)
Args:
key: Entity description key
state_value: Sensor value (percentage or minutes)
Returns:
CSS color variable string or None
"""
is_best_price = key.startswith("best_price_")
if not (is_best_price or key.startswith("peak_price_")):
return None
# No data / zero value
if state_value is None or (isinstance(state_value, (int, float)) and state_value == 0):
return "var(--disabled-color)"
# Progress sensors: Intensity based on completion
if key.endswith("_progress") and isinstance(state_value, (int, float)):
high_intensity = state_value >= TIMING_HIGH_PROGRESS_THRESHOLD
if is_best_price:
return "var(--success-color)" if high_intensity else "var(--info-color)"
return "var(--error-color)" if high_intensity else "var(--warning-color)"
# All other sensors: Simple period-type color
return "var(--success-color)" if is_best_price else "var(--warning-color)"

View file

@ -1,151 +0,0 @@
"""
Common helper functions for entities across platforms.
This module provides utility functions used by both sensor and binary_sensor platforms:
- Price value conversion (major/subunit currency units)
- Translation helpers (price levels, ratings)
- Time-based calculations (rolling hour center index)
These functions operate on entity-level concepts (states, translations) but are
platform-agnostic and can be used by both sensor and binary_sensor platforms.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import get_display_unit_factor, get_price_level_translation
if TYPE_CHECKING:
from datetime import datetime
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
def get_price_value(
price: float,
*,
in_euro: bool | None = None,
config_entry: ConfigEntry | TibberPricesConfigEntry | None = None,
) -> float:
"""
Convert price based on unit.
NOTE: This function supports two modes for backward compatibility:
1. Legacy mode: in_euro=True/False (hardcoded conversion)
2. New mode: config_entry (config-driven conversion)
New code should use get_display_unit_factor(config_entry) directly.
Args:
price: Price value to convert.
in_euro: (Legacy) If True, return in base currency; if False, in subunit currency.
config_entry: (New) Config entry to get display unit configuration.
Returns:
Price in requested unit (major or subunit currency units).
"""
# Legacy mode: use in_euro parameter
if in_euro is not None:
return price if in_euro else round(price * 100, 2)
# New mode: use config_entry
if config_entry is not None:
factor = get_display_unit_factor(config_entry)
return round(price * factor, 2)
# Fallback: default to subunit currency (backward compatibility)
return round(price * 100, 2)
def translate_level(hass: HomeAssistant, level: str) -> str:
"""
Translate price level to the user's language.
Args:
hass: HomeAssistant instance for language configuration
level: Price level to translate (e.g., VERY_CHEAP, NORMAL, etc.)
Returns:
Translated level string, or original level if translation not found
"""
if not hass:
return level
language = hass.config.language or "en"
translated = get_price_level_translation(level, language)
if translated:
return translated
if language != "en":
fallback = get_price_level_translation(level, "en")
if fallback:
return fallback
return level
def translate_rating_level(rating: str) -> str:
"""
Translate price rating level to the user's language.
Args:
rating: Price rating to translate (e.g., LOW, NORMAL, HIGH)
Returns:
Translated rating string, or original rating if translation not found
Note:
Currently returns the rating as-is. Translation mapping for ratings
can be added here when needed, similar to translate_level().
"""
# For now, ratings are returned as-is
# Add translation mapping here when needed
return rating
def find_rolling_hour_center_index(
all_prices: list[dict],
current_time: datetime,
hour_offset: int,
*,
time: TibberPricesTimeService,
) -> int | None:
"""
Find the center index for the rolling hour window.
Args:
all_prices: List of all price interval dictionaries with 'startsAt' key
current_time: Current datetime to find the current interval
hour_offset: Number of hours to offset from current interval (can be negative)
time: TibberPricesTimeService instance (required)
Returns:
Index of the center interval for the rolling hour window, or None if not found
"""
# Round to nearest interval boundary to handle edge cases where HA schedules
# us slightly before the boundary (e.g., 14:59:59.999 → 15:00:00)
target_time = time.round_to_nearest_quarter(current_time)
current_idx = None
for idx, price_data in enumerate(all_prices):
starts_at = time.get_interval_time(price_data)
if starts_at is None:
continue
# Exact match after rounding
if starts_at == target_time:
current_idx = idx
break
if current_idx is None:
return None
return current_idx + (hour_offset * 4)

View file

@ -1,391 +0,0 @@
"""Icon utilities for Tibber Prices entities."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.const import (
BINARY_SENSOR_ICON_MAPPING,
PRICE_LEVEL_CASH_ICON_MAPPING,
PRICE_LEVEL_ICON_MAPPING,
PRICE_RATING_ICON_MAPPING,
VOLATILITY_ICON_MAPPING,
)
from custom_components.tibber_prices.coordinator.helpers import get_intervals_for_day_offsets
from custom_components.tibber_prices.entity_utils.helpers import find_rolling_hour_center_index
from custom_components.tibber_prices.sensor.helpers import aggregate_level_data
from custom_components.tibber_prices.utils.price import find_price_data_for_interval
# Icon update logic uses timedelta directly (cosmetic, independent - allowed per AGENTS.md)
_INTERVAL_MINUTES = 15 # Tibber's 15-minute intervals
@dataclass
class TibberPricesIconContext:
"""Context data for dynamic icon selection."""
is_on: bool | None = None
coordinator_data: dict | None = None
has_future_periods_callback: Callable[[], bool] | None = None
period_is_active_callback: Callable[[], bool] | None = None
time: TibberPricesTimeService | None = None
if TYPE_CHECKING:
from collections.abc import Callable
# Timing sensor icon thresholds (in minutes)
TIMING_URGENT_THRESHOLD = 15 # ≤15 min: Alert icon
TIMING_SOON_THRESHOLD = 60 # ≤1 hour: Timer icon
TIMING_MEDIUM_THRESHOLD = 180 # ≤3 hours: Sand timer icon
# >3 hours: Outline timer icon
# Progress sensor constants
PROGRESS_MAX = 100 # Maximum progress value (100%)
def get_dynamic_icon(
key: str,
value: Any,
*,
context: TibberPricesIconContext | None = None,
) -> str | None:
"""
Get dynamic icon based on sensor state.
Unified function for both sensor and binary_sensor platforms.
Args:
key: Entity description key
value: Native value of the sensor
context: Optional context with is_on state, coordinator_data, and callbacks
Returns:
Icon string or None if no dynamic icon applies
"""
ctx = context or TibberPricesIconContext()
# Try various icon sources in order
return (
get_trend_icon(key, value)
or get_timing_sensor_icon(key, value, period_is_active_callback=ctx.period_is_active_callback)
or get_price_sensor_icon(key, ctx.coordinator_data, time=ctx.time)
or get_level_sensor_icon(key, value)
or get_rating_sensor_icon(key, value)
or get_volatility_sensor_icon(key, value)
or get_binary_sensor_icon(key, is_on=ctx.is_on, has_future_periods_callback=ctx.has_future_periods_callback)
)
def get_trend_icon(key: str, value: Any) -> str | None:
"""Get icon for trend sensors using 5-level trend scale."""
# Handle next_price_trend_change TIMESTAMP sensor differently
# (icon based on attributes, not value which is a timestamp)
if key == "next_price_trend_change":
return None # Will be handled by sensor's icon property using attributes
if not key.startswith("price_trend_") and key != "current_price_trend":
return None
if not isinstance(value, str):
return None
# 5-level trend icons: strongly uses double arrows, normal uses single
trend_icons = {
"strongly_rising": "mdi:chevron-double-up", # Strong upward movement
"rising": "mdi:trending-up", # Normal upward trend
"stable": "mdi:trending-neutral", # No significant change
"falling": "mdi:trending-down", # Normal downward trend
"strongly_falling": "mdi:chevron-double-down", # Strong downward movement
}
return trend_icons.get(value)
def get_timing_sensor_icon(
key: str,
value: Any,
*,
period_is_active_callback: Callable[[], bool] | None = None,
) -> str | None:
"""
Get dynamic icon for best_price/peak_price timing sensors.
Progress sensors: Different icons based on period state
- No period: mdi:help-circle-outline (Unknown/gray)
- Waiting (0%, period not active): mdi:timer-pause-outline (paused/waiting)
- Active (0%, period running): mdi:circle-outline (just started)
- Progress 1-99%: mdi:circle-slice-1 to mdi:circle-slice-7
- Complete (100%): mdi:circle-slice-8
Remaining/Next-in sensors: Different timer icons based on time remaining
Timestamp sensors: Static icons (handled by entity description)
Args:
key: Entity description key
value: Sensor value (percentage for progress, minutes for countdown)
period_is_active_callback: Callback to check if related period is currently active
Returns:
Icon string or None if not a timing sensor with dynamic icon
"""
# Unknown state: Show help icon for all timing sensors
if value is None and key.startswith(("best_price_", "peak_price_")):
return "mdi:help-circle-outline"
# Progress sensors: Circle-slice icons for visual progress indication
# mdi:circle-slice-N where N represents filled portions (1=12.5%, 8=100%)
if key.endswith("_progress") and isinstance(value, (int, float)):
# Special handling for 0%: Distinguish between waiting and active
if value <= 0:
# Check if period is currently active via callback
is_active = (
period_is_active_callback()
if (period_is_active_callback and callable(period_is_active_callback))
else True
)
# Period just started (0% but running) vs waiting for next
return "mdi:circle-outline" if is_active else "mdi:timer-pause-outline"
# Calculate slice based on progress percentage
slice_num = 8 if value >= PROGRESS_MAX else min(7, max(1, int((value / PROGRESS_MAX) * 8)))
return f"mdi:circle-slice-{slice_num}"
# Remaining/Next-in minutes sensors: Timer icons based on urgency thresholds
if key.endswith(("_remaining_minutes", "_next_in_minutes")) and isinstance(value, (int, float)):
# Map time remaining to appropriate timer icon
urgency_map = [
(0, "mdi:timer-off-outline"), # Exactly 0 minutes
(TIMING_URGENT_THRESHOLD, "mdi:timer-alert"), # < 15 min: urgent
(TIMING_SOON_THRESHOLD, "mdi:timer"), # < 60 min: soon
(TIMING_MEDIUM_THRESHOLD, "mdi:timer-sand"), # < 180 min: medium
]
for threshold, icon in urgency_map:
if value <= threshold:
return icon
return "mdi:timer-outline" # >= 180 min: far away
# Timestamp sensors use static icons from entity description
return None
def get_price_sensor_icon(
key: str,
coordinator_data: dict | None,
*,
time: TibberPricesTimeService | None,
) -> str | None:
"""
Get icon for current price sensors (dynamic based on price level).
Dynamic icons for: current_interval_price, next_interval_price,
current_hour_average_price, next_hour_average_price
Other price sensors (previous interval) use static icons from entity description.
Args:
key: Entity description key
coordinator_data: Coordinator data for price level lookups
time: TibberPricesTimeService instance (required for determining current interval)
Returns:
Icon string or None if not a current price sensor
"""
# Early exit if coordinator_data or time not available
if not coordinator_data or time is None:
return None
# Only current price sensors get dynamic icons
if key in ("current_interval_price", "current_interval_price_base"):
level = get_price_level_for_icon(coordinator_data, interval_offset=0, time=time)
if level:
return PRICE_LEVEL_CASH_ICON_MAPPING.get(level.upper())
elif key == "next_interval_price":
# For next interval, use the next interval price level to determine icon
level = get_price_level_for_icon(coordinator_data, interval_offset=1, time=time)
if level:
return PRICE_LEVEL_CASH_ICON_MAPPING.get(level.upper())
elif key == "current_hour_average_price":
# For current hour average, use the current hour price level to determine icon
level = get_rolling_hour_price_level_for_icon(coordinator_data, hour_offset=0, time=time)
if level:
return PRICE_LEVEL_CASH_ICON_MAPPING.get(level.upper())
elif key == "next_hour_average_price":
# For next hour average, use the next hour price level to determine icon
level = get_rolling_hour_price_level_for_icon(coordinator_data, hour_offset=1, time=time)
if level:
return PRICE_LEVEL_CASH_ICON_MAPPING.get(level.upper())
# For all other price sensors, let entity description handle the icon
return None
def get_level_sensor_icon(key: str, value: Any) -> str | None:
"""Get icon for price level sensors."""
if key not in [
"current_interval_price_level",
"next_interval_price_level",
"previous_interval_price_level",
"current_hour_price_level",
"next_hour_price_level",
"yesterday_price_level",
"today_price_level",
"tomorrow_price_level",
] or not isinstance(value, str):
return None
return PRICE_LEVEL_ICON_MAPPING.get(value.upper())
def get_rating_sensor_icon(key: str, value: Any) -> str | None:
"""Get icon for price rating sensors."""
if key not in [
"current_interval_price_rating",
"next_interval_price_rating",
"previous_interval_price_rating",
"current_hour_price_rating",
"next_hour_price_rating",
"yesterday_price_rating",
"today_price_rating",
"tomorrow_price_rating",
] or not isinstance(value, str):
return None
return PRICE_RATING_ICON_MAPPING.get(value.upper())
def get_volatility_sensor_icon(key: str, value: Any) -> str | None:
"""Get icon for volatility sensors."""
if not key.endswith("_volatility") or not isinstance(value, str):
return None
return VOLATILITY_ICON_MAPPING.get(value.upper())
def get_binary_sensor_icon(
key: str,
*,
is_on: bool | None,
has_future_periods_callback: Callable[[], bool] | None = None,
) -> str | None:
"""
Get icon for binary sensors with dynamic state-based icons.
Args:
key: Entity description key
is_on: Binary sensor state
has_future_periods_callback: Callback to check if future periods exist
Returns:
Icon string or None if not a binary sensor with dynamic icons
"""
if key not in BINARY_SENSOR_ICON_MAPPING or is_on is None:
return None
if is_on:
# Sensor is ON - use "on" icon
return BINARY_SENSOR_ICON_MAPPING[key].get("on")
# Sensor is OFF - check if future periods exist
has_future_periods = has_future_periods_callback() if has_future_periods_callback else False
if has_future_periods:
return BINARY_SENSOR_ICON_MAPPING[key].get("off")
return BINARY_SENSOR_ICON_MAPPING[key].get("off_no_future")
def get_price_level_for_icon(
coordinator_data: dict,
*,
interval_offset: int | None = None,
time: TibberPricesTimeService,
) -> str | None:
"""
Get the price level for icon determination.
Supports interval-based lookups (current/next/previous interval).
Args:
coordinator_data: Coordinator data
interval_offset: Interval offset (0=current, 1=next, -1=previous)
time: TibberPricesTimeService instance (required)
Returns:
Price level string or None if not found
"""
if not coordinator_data or interval_offset is None:
return None
now = time.now()
# Interval-based lookup
target_time = now + timedelta(minutes=_INTERVAL_MINUTES * interval_offset)
interval_data = find_price_data_for_interval(coordinator_data, target_time, time=time)
if not interval_data or "level" not in interval_data:
return None
return interval_data["level"]
def get_rolling_hour_price_level_for_icon(
coordinator_data: dict,
*,
hour_offset: int = 0,
time: TibberPricesTimeService,
) -> str | None:
"""
Get the aggregated price level for rolling hour icon determination.
Uses the same logic as the sensor platform: 5-interval rolling window
(2 before + center + 2 after) to determine the price level.
This ensures icon calculation matches the actual sensor value calculation.
Args:
coordinator_data: Coordinator data
hour_offset: Hour offset (0=current hour, 1=next hour)
time: TibberPricesTimeService instance (required)
Returns:
Aggregated price level string or None if not found
"""
if not coordinator_data:
return None
# Get all intervals (yesterday, today, tomorrow) via helper
all_prices = get_intervals_for_day_offsets(coordinator_data, [-1, 0, 1])
if not all_prices:
return None
# Find center index using the same helper function as the sensor platform
now = time.now()
center_idx = find_rolling_hour_center_index(all_prices, now, hour_offset, time=time)
if center_idx is None:
return None
# Collect data from 5-interval window (-2, -1, 0, +1, +2) - same as sensor platform
window_data = []
for offset in range(-2, 3):
idx = center_idx + offset
if 0 <= idx < len(all_prices):
window_data.append(all_prices[idx])
if not window_data:
return None
# Use the same aggregation function as the sensor platform
return aggregate_level_data(window_data)

View file

@ -1,33 +0,0 @@
{
"services": {
"get_price": {
"service": "mdi:table-search"
},
"get_chartdata": {
"service": "mdi:chart-bar",
"sections": {
"general": "mdi:identifier",
"selection": "mdi:calendar-range",
"filters": "mdi:filter-variant",
"transformation": "mdi:tune",
"format": "mdi:file-table",
"arrays_of_objects": "mdi:code-json",
"arrays_of_arrays": "mdi:code-brackets"
}
},
"get_apexcharts_yaml": {
"service": "mdi:chart-line",
"sections": {
"entry_id": "mdi:identifier",
"day": "mdi:calendar-range",
"level_type": "mdi:format-list-bulleted-type",
"resolution": "mdi:timer-sand",
"highlight_best_price": "mdi:battery-charging-low",
"highlight_peak_price": "mdi:battery-alert"
}
},
"refresh_user_data": {
"service": "mdi:refresh"
}
}
}

View file

@ -1,21 +0,0 @@
"""Interval Pool - Intelligent interval caching and routing."""
from .manager import TibberPricesIntervalPool
from .routing import get_price_intervals_for_range
from .storage import (
INTERVAL_POOL_STORAGE_VERSION,
async_load_pool_state,
async_remove_pool_storage,
async_save_pool_state,
get_storage_key,
)
__all__ = [
"INTERVAL_POOL_STORAGE_VERSION",
"TibberPricesIntervalPool",
"async_load_pool_state",
"async_remove_pool_storage",
"async_save_pool_state",
"get_price_intervals_for_range",
"get_storage_key",
]

View file

@ -1,206 +0,0 @@
"""Fetch group cache for price intervals."""
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.util import dt as dt_utils
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import (
TibberPricesTimeService,
)
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Protected date range: day-before-yesterday to tomorrow (4 days total)
PROTECTED_DAYS_BEFORE = 2 # day-before-yesterday + yesterday
PROTECTED_DAYS_AFTER = 1 # tomorrow
class TibberPricesIntervalPoolFetchGroupCache:
"""
Storage for fetch groups with protected range management.
A fetch group is a collection of intervals fetched at the same time,
stored together with their fetch timestamp for GC purposes.
Structure:
{
"fetched_at": datetime, # When this group was fetched
"intervals": [dict, ...] # List of interval dicts
}
Protected Range:
Intervals within day-before-yesterday to tomorrow are protected
and never evicted from cache. This range shifts daily automatically.
Example (today = 2025-11-25):
Protected: 2025-11-23 00:00 to 2025-11-27 00:00
"""
def __init__(self, *, time_service: TibberPricesTimeService | None = None) -> None:
"""Initialize empty fetch group cache with optional TimeService."""
self._fetch_groups: list[dict[str, Any]] = []
self._time_service = time_service
# Protected range cache (invalidated daily)
self._protected_range_cache: tuple[str, str] | None = None
self._protected_range_cache_date: str | None = None
def add_fetch_group(
self,
intervals: list[dict[str, Any]],
fetched_at: datetime,
) -> int:
"""
Add new fetch group to cache.
Args:
intervals: List of interval dicts (sorted by startsAt).
fetched_at: Timestamp when intervals were fetched.
Returns:
Index of the newly added fetch group.
"""
fetch_group = {
"fetched_at": fetched_at,
"intervals": intervals,
}
fetch_group_index = len(self._fetch_groups)
self._fetch_groups.append(fetch_group)
_LOGGER_DETAILS.debug(
"Added fetch group %d: %d intervals (fetched at %s)",
fetch_group_index,
len(intervals),
fetched_at.isoformat(),
)
return fetch_group_index
def get_fetch_groups(self) -> list[dict[str, Any]]:
"""Get all fetch groups (read-only access)."""
return self._fetch_groups
def set_fetch_groups(self, fetch_groups: list[dict[str, Any]]) -> None:
"""Replace all fetch groups (used during GC)."""
self._fetch_groups = fetch_groups
def get_protected_range(self) -> tuple[str, str]:
"""
Get protected date range as ISO strings.
Protected range: day-before-yesterday 00:00 to day-after-tomorrow 00:00.
This range shifts daily automatically.
Time Machine Support:
If time_service was provided at init, uses time_service.now() for
"today" calculation. This protects the correct date range when
simulating a different date.
Returns:
Tuple of (start_iso, end_iso) for protected range.
Start is inclusive, end is exclusive.
Example (today = 2025-11-25):
Returns: ("2025-11-23T00:00:00+01:00", "2025-11-27T00:00:00+01:00")
Protected days: 2025-11-23, 2025-11-24, 2025-11-25, 2025-11-26
"""
# Use TimeService if available (Time Machine support), else real time
now = self._time_service.now() if self._time_service else dt_utils.now()
today_date_str = now.date().isoformat()
# Check cache validity (invalidate daily)
if self._protected_range_cache_date == today_date_str and self._protected_range_cache:
return self._protected_range_cache
# Calculate new protected range
today_midnight = now.replace(hour=0, minute=0, second=0, microsecond=0)
# Start: day-before-yesterday at 00:00
start_dt = today_midnight - timedelta(days=PROTECTED_DAYS_BEFORE)
# End: day after tomorrow at 00:00 (exclusive, so tomorrow is included)
end_dt = today_midnight + timedelta(days=PROTECTED_DAYS_AFTER + 1)
# Convert to ISO strings and cache
start_iso = start_dt.isoformat()
end_iso = end_dt.isoformat()
self._protected_range_cache = (start_iso, end_iso)
self._protected_range_cache_date = today_date_str
return start_iso, end_iso
def is_interval_protected(self, interval: dict[str, Any]) -> bool:
"""
Check if interval is within protected date range.
Protected intervals are never evicted from cache.
Args:
interval: Interval dict with "startsAt" ISO timestamp.
Returns:
True if interval is protected (within protected range).
"""
starts_at_iso = interval["startsAt"]
start_protected_iso, end_protected_iso = self.get_protected_range()
# Fast string comparison (ISO timestamps are lexicographically sortable)
return start_protected_iso <= starts_at_iso < end_protected_iso
def count_total_intervals(self) -> int:
"""Count total intervals across all fetch groups."""
return sum(len(group["intervals"]) for group in self._fetch_groups)
def to_dict(self) -> dict[str, Any]:
"""
Serialize fetch groups for storage.
Returns:
Dict with serializable fetch groups.
"""
return {
"fetch_groups": [
{
"fetched_at": group["fetched_at"].isoformat(),
"intervals": group["intervals"],
}
for group in self._fetch_groups
],
}
@classmethod
def from_dict(cls, data: dict[str, Any]) -> TibberPricesIntervalPoolFetchGroupCache:
"""
Restore fetch groups from storage.
Args:
data: Dict with "fetch_groups" list.
Returns:
TibberPricesIntervalPoolFetchGroupCache instance with restored data.
"""
cache = cls()
fetch_groups_data = data.get("fetch_groups", [])
cache._fetch_groups = [
{
"fetched_at": datetime.fromisoformat(group["fetched_at"]),
"intervals": group["intervals"],
}
for group in fetch_groups_data
]
return cache

View file

@ -1,321 +0,0 @@
"""Interval fetcher - coverage check and API coordination for interval pool."""
from __future__ import annotations
import logging
from datetime import UTC, datetime, timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.util import dt as dt_utils
if TYPE_CHECKING:
from collections.abc import Callable
from custom_components.tibber_prices.api import TibberPricesApiClient
from .cache import TibberPricesIntervalPoolFetchGroupCache
from .index import TibberPricesIntervalPoolTimestampIndex
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Resolution change date (hourly before, quarter-hourly after)
# Use UTC for constant - timezone adjusted at runtime when comparing
RESOLUTION_CHANGE_DATETIME = datetime(2025, 10, 1, tzinfo=UTC)
RESOLUTION_CHANGE_ISO = "2025-10-01T00:00:00"
# Interval lengths in minutes
INTERVAL_HOURLY = 60
INTERVAL_QUARTER_HOURLY = 15
# Minimum gap sizes in seconds
MIN_GAP_HOURLY = 3600 # 1 hour
MIN_GAP_QUARTER_HOURLY = 900 # 15 minutes
# Tolerance for time comparisons (±1 second for floating point/timezone issues)
TIME_TOLERANCE_SECONDS = 1
TIME_TOLERANCE_MINUTES = 1
class TibberPricesIntervalPoolFetcher:
"""Fetch missing intervals from API based on coverage check."""
def __init__(
self,
api: TibberPricesApiClient,
cache: TibberPricesIntervalPoolFetchGroupCache,
index: TibberPricesIntervalPoolTimestampIndex,
home_id: str,
) -> None:
"""
Initialize fetcher.
Args:
api: API client for Tibber GraphQL queries.
cache: Fetch group cache for storage operations.
index: Timestamp index for gap detection.
home_id: Tibber home ID for API calls.
"""
self._api = api
self._cache = cache
self._index = index
self._home_id = home_id
def check_coverage(
self,
cached_intervals: list[dict[str, Any]],
start_time_iso: str,
end_time_iso: str,
) -> list[tuple[str, str]]:
"""
Check cache coverage and find missing time ranges.
This method minimizes API calls by:
1. Finding all gaps in cached intervals
2. Treating each cached interval as a discrete timestamp
3. Gaps are time ranges between consecutive cached timestamps
Handles both resolutions:
- Pre-2025-10-01: Hourly intervals (:00:00 only)
- Post-2025-10-01: Quarter-hourly intervals (:00:00, :15:00, :30:00, :45:00)
- DST transitions (23h/25h days)
The API requires an interval count (first: X parameter).
For historical data (pre-2025-10-01), Tibber only stored hourly prices.
The API returns whatever intervals exist for the requested period.
Args:
cached_intervals: List of cached intervals (may be empty).
start_time_iso: ISO timestamp string (inclusive).
end_time_iso: ISO timestamp string (exclusive).
Returns:
List of (start_iso, end_iso) tuples representing missing ranges.
Each tuple represents a continuous time span that needs fetching.
Ranges are automatically split at resolution change boundary.
Example:
Requested: 2025-11-13T00:00:00 to 2025-11-13T02:00:00
Cached: [00:00, 00:15, 01:30, 01:45]
Gaps: [(00:15, 01:30)] - missing intervals between groups
"""
if not cached_intervals:
# No cache → fetch entire range
return [(start_time_iso, end_time_iso)]
# Filter and sort cached intervals within requested range
in_range_intervals = [
interval for interval in cached_intervals if start_time_iso <= interval["startsAt"] < end_time_iso
]
sorted_intervals = sorted(in_range_intervals, key=lambda x: x["startsAt"])
if not sorted_intervals:
# All cached intervals are outside requested range
return [(start_time_iso, end_time_iso)]
missing_ranges = []
# Parse start/end times once
start_time_dt = datetime.fromisoformat(start_time_iso)
end_time_dt = datetime.fromisoformat(end_time_iso)
# Get first cached interval datetime for resolution logic
first_cached_dt = datetime.fromisoformat(sorted_intervals[0]["startsAt"])
resolution_change_dt = RESOLUTION_CHANGE_DATETIME.replace(tzinfo=first_cached_dt.tzinfo)
# Check gap before first cached interval
time_diff_before_first = (first_cached_dt - start_time_dt).total_seconds()
if time_diff_before_first > TIME_TOLERANCE_SECONDS:
missing_ranges.append((start_time_iso, sorted_intervals[0]["startsAt"]))
_LOGGER_DETAILS.debug(
"Missing range before first cached interval: %s to %s (%.1f seconds)",
start_time_iso,
sorted_intervals[0]["startsAt"],
time_diff_before_first,
)
# Check gaps between consecutive cached intervals
for i in range(len(sorted_intervals) - 1):
current_interval = sorted_intervals[i]
next_interval = sorted_intervals[i + 1]
current_start = current_interval["startsAt"]
next_start = next_interval["startsAt"]
# Parse to datetime for accurate time difference
current_dt = datetime.fromisoformat(current_start)
next_dt = datetime.fromisoformat(next_start)
# Calculate time difference in minutes
time_diff_minutes = (next_dt - current_dt).total_seconds() / 60
# Determine expected interval length based on date
expected_interval_minutes = (
INTERVAL_HOURLY if current_dt < resolution_change_dt else INTERVAL_QUARTER_HOURLY
)
# Only create gap if intervals are NOT consecutive
if time_diff_minutes > expected_interval_minutes + TIME_TOLERANCE_MINUTES:
# Gap exists - missing intervals between them
# Missing range starts AFTER current interval ends
current_interval_end = current_dt + timedelta(minutes=expected_interval_minutes)
missing_ranges.append((current_interval_end.isoformat(), next_start))
_LOGGER_DETAILS.debug(
"Missing range between cached intervals: %s (ends at %s) to %s (%.1f min, expected %d min)",
current_start,
current_interval_end.isoformat(),
next_start,
time_diff_minutes,
expected_interval_minutes,
)
# Check gap after last cached interval
# An interval's startsAt time represents the START of that interval.
# The interval covers [startsAt, startsAt + interval_length).
# So the last interval ENDS at (startsAt + interval_length), not at startsAt!
last_cached_dt = datetime.fromisoformat(sorted_intervals[-1]["startsAt"])
# Calculate when the last interval ENDS
interval_minutes = INTERVAL_QUARTER_HOURLY if last_cached_dt >= resolution_change_dt else INTERVAL_HOURLY
last_interval_end_dt = last_cached_dt + timedelta(minutes=interval_minutes)
# Only create gap if there's uncovered time AFTER the last interval ends
time_diff_after_last = (end_time_dt - last_interval_end_dt).total_seconds()
# Need at least one full interval of gap
min_gap_seconds = MIN_GAP_QUARTER_HOURLY if last_cached_dt >= resolution_change_dt else MIN_GAP_HOURLY
if time_diff_after_last >= min_gap_seconds:
# Missing range starts AFTER the last cached interval ends
missing_ranges.append((last_interval_end_dt.isoformat(), end_time_iso))
_LOGGER_DETAILS.debug(
"Missing range after last cached interval: %s (ends at %s) to %s (%.1f seconds, need >= %d)",
sorted_intervals[-1]["startsAt"],
last_interval_end_dt.isoformat(),
end_time_iso,
time_diff_after_last,
min_gap_seconds,
)
if not missing_ranges:
_LOGGER.debug(
"Full coverage - all intervals cached for range %s to %s",
start_time_iso,
end_time_iso,
)
return missing_ranges
# Split ranges at resolution change boundary (2025-10-01 00:00:00)
# This simplifies interval count calculation in API calls:
# - Pre-2025-10-01: Always hourly (1 interval/hour)
# - Post-2025-10-01: Always quarter-hourly (4 intervals/hour)
return self._split_at_resolution_boundary(missing_ranges)
def _split_at_resolution_boundary(self, ranges: list[tuple[str, str]]) -> list[tuple[str, str]]:
"""
Split time ranges at resolution change boundary.
Args:
ranges: List of (start_iso, end_iso) tuples.
Returns:
List of ranges split at 2025-10-01T00:00:00 boundary.
"""
split_ranges = []
for start_iso, end_iso in ranges:
# Check if range crosses the boundary
if start_iso < RESOLUTION_CHANGE_ISO < end_iso:
# Split into two ranges: before and after boundary
split_ranges.append((start_iso, RESOLUTION_CHANGE_ISO))
split_ranges.append((RESOLUTION_CHANGE_ISO, end_iso))
_LOGGER_DETAILS.debug(
"Split range at resolution boundary: (%s, %s) → (%s, %s) + (%s, %s)",
start_iso,
end_iso,
start_iso,
RESOLUTION_CHANGE_ISO,
RESOLUTION_CHANGE_ISO,
end_iso,
)
else:
# Range doesn't cross boundary - keep as is
split_ranges.append((start_iso, end_iso))
return split_ranges
async def fetch_missing_ranges(
self,
api_client: TibberPricesApiClient,
user_data: dict[str, Any],
missing_ranges: list[tuple[str, str]],
*,
on_intervals_fetched: Callable[[list[dict[str, Any]], str], None] | None = None,
) -> list[list[dict[str, Any]]]:
"""
Fetch missing intervals from API.
Makes one API call per missing range. Uses routing logic to select
the optimal endpoint (PRICE_INFO vs PRICE_INFO_RANGE).
Args:
api_client: TibberPricesApiClient instance for API calls.
user_data: User data dict containing home metadata.
missing_ranges: List of (start_iso, end_iso) tuples to fetch.
on_intervals_fetched: Optional callback for each fetch result.
Receives (intervals, fetch_time_iso).
Returns:
List of interval lists (one per missing range).
Each sublist contains intervals from one API call.
Raises:
TibberPricesApiClientError: If API calls fail.
"""
# Import here to avoid circular dependency
from custom_components.tibber_prices.interval_pool.routing import ( # noqa: PLC0415
get_price_intervals_for_range,
)
fetch_time_iso = dt_utils.now().isoformat()
all_fetched_intervals = []
for idx, (missing_start_iso, missing_end_iso) in enumerate(missing_ranges, start=1):
_LOGGER_DETAILS.debug(
"Fetching from Tibber API (%d/%d) for home %s: range %s to %s",
idx,
len(missing_ranges),
self._home_id,
missing_start_iso,
missing_end_iso,
)
# Parse ISO strings back to datetime for API call
missing_start_dt = datetime.fromisoformat(missing_start_iso)
missing_end_dt = datetime.fromisoformat(missing_end_iso)
# Fetch intervals from API - routing returns ALL intervals (unfiltered)
fetched_intervals = await get_price_intervals_for_range(
api_client=api_client,
home_id=self._home_id,
user_data=user_data,
start_time=missing_start_dt,
end_time=missing_end_dt,
)
all_fetched_intervals.append(fetched_intervals)
_LOGGER_DETAILS.debug(
"Received %d intervals from Tibber API for home %s",
len(fetched_intervals),
self._home_id,
)
# Notify callback if provided (for immediate caching)
if on_intervals_fetched:
on_intervals_fetched(fetched_intervals, fetch_time_iso)
return all_fetched_intervals

View file

@ -1,283 +0,0 @@
"""Garbage collector for interval cache eviction."""
from __future__ import annotations
import logging
from datetime import datetime
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from .cache import TibberPricesIntervalPoolFetchGroupCache
from .index import TibberPricesIntervalPoolTimestampIndex
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Maximum number of intervals to cache
# 1 days @ 15min resolution = 10 * 96 = 960 intervals
MAX_CACHE_SIZE = 960
def _normalize_starts_at(starts_at: datetime | str) -> str:
"""Normalize startsAt to consistent format (YYYY-MM-DDTHH:MM:SS)."""
if isinstance(starts_at, datetime):
return starts_at.strftime("%Y-%m-%dT%H:%M:%S")
return starts_at[:19]
class TibberPricesIntervalPoolGarbageCollector:
"""
Manages cache eviction and dead interval cleanup.
Eviction Strategy:
- Evict oldest fetch groups first (by fetched_at timestamp)
- Protected intervals (day-before-yesterday to tomorrow) are NEVER evicted
- Evict complete fetch groups, not individual intervals
Dead Interval Cleanup:
When intervals are "touched" (re-fetched), they move to a new fetch group
but remain in the old group. This creates "dead intervals" that occupy
memory but are no longer referenced by the index.
"""
def __init__(
self,
cache: TibberPricesIntervalPoolFetchGroupCache,
index: TibberPricesIntervalPoolTimestampIndex,
home_id: str,
) -> None:
"""
Initialize garbage collector.
Args:
home_id: Home ID for logging purposes.
cache: Fetch group cache to manage.
index: Timestamp index for living interval detection.
"""
self._home_id = home_id
self._cache = cache
self._index = index
def run_gc(self) -> bool:
"""
Run garbage collection if needed.
Process:
1. Clean up dead intervals from all fetch groups
2. Count total intervals
3. If > MAX_CACHE_SIZE, evict oldest fetch groups
4. Rebuild index after eviction
Returns:
True if any cleanup or eviction happened, False otherwise.
"""
fetch_groups = self._cache.get_fetch_groups()
# Phase 1: Clean up dead intervals
dead_count = self._cleanup_dead_intervals(fetch_groups)
if dead_count > 0:
_LOGGER_DETAILS.debug(
"GC cleaned %d dead intervals (home %s)",
dead_count,
self._home_id,
)
# Phase 1.5: Remove empty fetch groups (after dead interval cleanup)
empty_removed = self._remove_empty_groups(fetch_groups)
if empty_removed > 0:
_LOGGER_DETAILS.debug(
"GC removed %d empty fetch groups (home %s)",
empty_removed,
self._home_id,
)
# Phase 2: Count total intervals after cleanup
total_intervals = self._cache.count_total_intervals()
if total_intervals <= MAX_CACHE_SIZE:
_LOGGER_DETAILS.debug(
"GC cleanup only for home %s: %d intervals <= %d limit (no eviction needed)",
self._home_id,
total_intervals,
MAX_CACHE_SIZE,
)
return dead_count > 0
# Phase 3: Evict old fetch groups
evicted_indices = self._evict_old_groups(fetch_groups, total_intervals)
if not evicted_indices:
# All intervals are protected, cannot evict
return dead_count > 0 or empty_removed > 0
# Phase 4: Rebuild cache and index
new_fetch_groups = [group for idx, group in enumerate(fetch_groups) if idx not in evicted_indices]
self._cache.set_fetch_groups(new_fetch_groups)
self._index.rebuild(new_fetch_groups)
_LOGGER_DETAILS.debug(
"GC evicted %d fetch groups (home %s): %d intervals remaining",
len(evicted_indices),
self._home_id,
self._cache.count_total_intervals(),
)
return True
def _remove_empty_groups(self, fetch_groups: list[dict[str, Any]]) -> int:
"""
Remove fetch groups with no intervals.
After dead interval cleanup, some groups may be completely empty.
These should be removed to prevent memory accumulation.
Note: This modifies the cache's internal list in-place and rebuilds
the index to maintain consistency.
Args:
fetch_groups: List of fetch groups (will be modified).
Returns:
Number of empty groups removed.
"""
# Find non-empty groups
non_empty_groups = [group for group in fetch_groups if group["intervals"]]
removed_count = len(fetch_groups) - len(non_empty_groups)
if removed_count > 0:
# Update cache with filtered list
self._cache.set_fetch_groups(non_empty_groups)
# Rebuild index since group indices changed
self._index.rebuild(non_empty_groups)
return removed_count
def _cleanup_dead_intervals(self, fetch_groups: list[dict[str, Any]]) -> int:
"""
Remove dead intervals from all fetch groups.
Dead intervals are no longer referenced by the index (they were touched
and moved to a newer fetch group).
Args:
fetch_groups: List of fetch groups to clean.
Returns:
Total number of dead intervals removed.
"""
total_dead = 0
for group_idx, group in enumerate(fetch_groups):
old_intervals = group["intervals"]
if not old_intervals:
continue
# Find living intervals (still in index at correct position)
living_intervals = []
for interval_idx, interval in enumerate(old_intervals):
starts_at_normalized = _normalize_starts_at(interval["startsAt"])
index_entry = self._index.get(starts_at_normalized)
if index_entry is not None:
# Check if index points to THIS position
if index_entry["fetch_group_index"] == group_idx and index_entry["interval_index"] == interval_idx:
living_intervals.append(interval)
else:
# Dead: index points elsewhere
total_dead += 1
else:
# Dead: not in index
total_dead += 1
# Replace with cleaned list if any dead intervals found
if len(living_intervals) < len(old_intervals):
group["intervals"] = living_intervals
dead_count = len(old_intervals) - len(living_intervals)
_LOGGER_DETAILS.debug(
"GC cleaned %d dead intervals from fetch group %d (home %s)",
dead_count,
group_idx,
self._home_id,
)
return total_dead
def _evict_old_groups(
self,
fetch_groups: list[dict[str, Any]],
total_intervals: int,
) -> set[int]:
"""
Determine which fetch groups to evict to stay under MAX_CACHE_SIZE.
Only evicts groups without protected intervals.
Groups evicted oldest-first (by fetched_at).
Args:
fetch_groups: List of fetch groups.
total_intervals: Total interval count.
Returns:
Set of fetch group indices to evict.
"""
start_protected_iso, end_protected_iso = self._cache.get_protected_range()
_LOGGER_DETAILS.debug(
"Protected range: %s to %s",
start_protected_iso[:10],
end_protected_iso[:10],
)
# Classify: protected vs evictable
evictable_groups = []
for idx, group in enumerate(fetch_groups):
has_protected = any(self._cache.is_interval_protected(interval) for interval in group["intervals"])
if not has_protected:
evictable_groups.append((idx, group))
# Sort by fetched_at (oldest first)
evictable_groups.sort(key=lambda x: x[1]["fetched_at"])
_LOGGER_DETAILS.debug(
"GC: %d protected groups, %d evictable groups",
len(fetch_groups) - len(evictable_groups),
len(evictable_groups),
)
# Evict until under limit
evicted_indices = set()
remaining = total_intervals
for idx, group in evictable_groups:
if remaining <= MAX_CACHE_SIZE:
break
group_count = len(group["intervals"])
evicted_indices.add(idx)
remaining -= group_count
_LOGGER_DETAILS.debug(
"GC evicting group %d (fetched %s): %d intervals, %d remaining",
idx,
group["fetched_at"].isoformat(),
group_count,
remaining,
)
if not evicted_indices:
_LOGGER.warning(
"GC cannot evict any groups (home %s): all %d intervals are protected",
self._home_id,
total_intervals,
)
return evicted_indices

View file

@ -1,173 +0,0 @@
"""Timestamp index for O(1) interval lookups."""
from __future__ import annotations
import logging
from typing import Any
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
class TibberPricesIntervalPoolTimestampIndex:
"""
Fast O(1) timestamp-based interval lookup.
Maps normalized ISO timestamp strings to fetch group + interval indices.
Structure:
{
"2025-11-25T00:00:00": {
"fetch_group_index": 0, # Index in fetch groups list
"interval_index": 2 # Index within that group's intervals
},
...
}
Normalization:
Timestamps are normalized to 19 characters (YYYY-MM-DDTHH:MM:SS)
by truncating microseconds and timezone info for fast string comparison.
"""
def __init__(self) -> None:
"""Initialize empty timestamp index."""
self._index: dict[str, dict[str, int]] = {}
def add(
self,
interval: dict[str, Any],
fetch_group_index: int,
interval_index: int,
) -> None:
"""
Add interval to index.
Args:
interval: Interval dict with "startsAt" ISO timestamp.
fetch_group_index: Index of fetch group containing this interval.
interval_index: Index within that fetch group's intervals list.
"""
starts_at_normalized = self._normalize_timestamp(interval["startsAt"])
self._index[starts_at_normalized] = {
"fetch_group_index": fetch_group_index,
"interval_index": interval_index,
}
def get(self, timestamp: str) -> dict[str, int] | None:
"""
Look up interval location by timestamp.
Args:
timestamp: ISO timestamp string (will be normalized).
Returns:
Dict with fetch_group_index and interval_index, or None if not found.
"""
starts_at_normalized = self._normalize_timestamp(timestamp)
return self._index.get(starts_at_normalized)
def contains(self, timestamp: str) -> bool:
"""
Check if timestamp exists in index.
Args:
timestamp: ISO timestamp string (will be normalized).
Returns:
True if timestamp is in index.
"""
starts_at_normalized = self._normalize_timestamp(timestamp)
return starts_at_normalized in self._index
def remove(self, timestamp: str) -> None:
"""
Remove timestamp from index.
Args:
timestamp: ISO timestamp string (will be normalized).
"""
starts_at_normalized = self._normalize_timestamp(timestamp)
self._index.pop(starts_at_normalized, None)
def update_batch(
self,
updates: list[tuple[str, int, int]],
) -> None:
"""
Update multiple index entries efficiently in a single operation.
More efficient than calling remove() + add() for each entry,
as it avoids repeated dict operations and normalization.
Args:
updates: List of (timestamp, fetch_group_index, interval_index) tuples.
Timestamps will be normalized automatically.
"""
for timestamp, fetch_group_index, interval_index in updates:
starts_at_normalized = self._normalize_timestamp(timestamp)
self._index[starts_at_normalized] = {
"fetch_group_index": fetch_group_index,
"interval_index": interval_index,
}
def clear(self) -> None:
"""Clear entire index."""
self._index.clear()
def rebuild(self, fetch_groups: list[dict[str, Any]]) -> None:
"""
Rebuild index from fetch groups.
Used after GC operations that modify fetch group structure.
Args:
fetch_groups: List of fetch group dicts.
"""
self._index.clear()
for fetch_group_idx, group in enumerate(fetch_groups):
for interval_idx, interval in enumerate(group["intervals"]):
starts_at_normalized = self._normalize_timestamp(interval["startsAt"])
self._index[starts_at_normalized] = {
"fetch_group_index": fetch_group_idx,
"interval_index": interval_idx,
}
_LOGGER_DETAILS.debug(
"Rebuilt index: %d timestamps indexed",
len(self._index),
)
def get_raw_index(self) -> dict[str, dict[str, int]]:
"""Get raw index dict (for serialization)."""
return self._index
def count(self) -> int:
"""Count total indexed timestamps."""
return len(self._index)
@staticmethod
def _normalize_timestamp(timestamp: str) -> str:
"""
Normalize ISO timestamp for indexing.
Truncates to 19 characters (YYYY-MM-DDTHH:MM:SS) to remove
microseconds and timezone info for consistent string comparison.
Args:
timestamp: Full ISO timestamp string.
Returns:
Normalized timestamp (19 chars).
Example:
"2025-11-25T00:00:00.000+01:00" "2025-11-25T00:00:00"
"""
return timestamp[:19]

View file

@ -1,830 +0,0 @@
"""Interval pool manager - main coordinator for interval caching."""
from __future__ import annotations
import asyncio
import contextlib
import logging
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any
from zoneinfo import ZoneInfo
from custom_components.tibber_prices.api.exceptions import TibberPricesApiClientError
from homeassistant.util import dt as dt_utils
from .cache import TibberPricesIntervalPoolFetchGroupCache
from .fetcher import TibberPricesIntervalPoolFetcher
from .garbage_collector import MAX_CACHE_SIZE, TibberPricesIntervalPoolGarbageCollector
from .index import TibberPricesIntervalPoolTimestampIndex
from .storage import async_save_pool_state
if TYPE_CHECKING:
from custom_components.tibber_prices.api.client import TibberPricesApiClient
from custom_components.tibber_prices.coordinator.time_service import (
TibberPricesTimeService,
)
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Interval lengths in minutes
INTERVAL_HOURLY = 60
INTERVAL_QUARTER_HOURLY = 15
# Debounce delay for auto-save (seconds)
DEBOUNCE_DELAY_SECONDS = 3.0
def _normalize_starts_at(starts_at: datetime | str) -> str:
"""Normalize startsAt to consistent format (YYYY-MM-DDTHH:MM:SS)."""
if isinstance(starts_at, datetime):
return starts_at.strftime("%Y-%m-%dT%H:%M:%S")
return starts_at[:19]
class TibberPricesIntervalPool:
"""
High-performance interval cache manager for a single Tibber home.
Coordinates all interval pool components:
- TibberPricesIntervalPoolFetchGroupCache: Stores fetch groups and manages protected ranges
- TibberPricesIntervalPoolTimestampIndex: Provides O(1) timestamp lookups
- TibberPricesIntervalPoolGarbageCollector: Evicts old fetch groups when cache exceeds limits
- TibberPricesIntervalPoolFetcher: Detects gaps and fetches missing intervals from API
Architecture:
- Each manager handles exactly ONE home (1:1 with config entry)
- home_id is immutable after initialization
- All operations are thread-safe via asyncio locks
Features:
- Fetch-time based eviction (oldest fetch groups removed first)
- Protected date range (day-before-yesterday to tomorrow never evicted)
- Fast O(1) lookups by timestamp
- Automatic gap detection and API fetching
- Debounced auto-save to prevent excessive I/O
Example:
manager = TibberPricesIntervalPool(home_id="abc123", hass=hass, entry_id=entry.entry_id)
intervals = await manager.get_intervals(
api_client=client,
user_data=data,
start_time=datetime(...),
end_time=datetime(...),
)
"""
def __init__(
self,
*,
home_id: str,
api: TibberPricesApiClient,
hass: Any | None = None,
entry_id: str | None = None,
time_service: TibberPricesTimeService | None = None,
) -> None:
"""
Initialize interval pool manager.
Args:
home_id: Tibber home ID (required, immutable).
api: API client for fetching intervals.
hass: HomeAssistant instance for auto-save (optional).
entry_id: Config entry ID for auto-save (optional).
time_service: TimeService for time-travel support (optional).
If None, uses real time (dt_utils.now()).
"""
self._home_id = home_id
self._time_service = time_service
# Initialize components with dependency injection
self._cache = TibberPricesIntervalPoolFetchGroupCache(time_service=time_service)
self._index = TibberPricesIntervalPoolTimestampIndex()
self._gc = TibberPricesIntervalPoolGarbageCollector(self._cache, self._index, home_id)
self._fetcher = TibberPricesIntervalPoolFetcher(api, self._cache, self._index, home_id)
# Auto-save support
self._hass = hass
self._entry_id = entry_id
self._background_tasks: set[asyncio.Task] = set()
self._save_debounce_task: asyncio.Task | None = None
self._save_lock = asyncio.Lock()
async def get_intervals(
self,
api_client: TibberPricesApiClient,
user_data: dict[str, Any],
start_time: datetime,
end_time: datetime,
) -> tuple[list[dict[str, Any]], bool]:
"""
Get price intervals for time range (cached + fetch missing).
Main entry point for retrieving intervals. Coordinates:
1. Check cache for existing intervals
2. Detect missing time ranges
3. Fetch missing ranges from API
4. Add new intervals to cache (may trigger GC)
5. Return complete interval list
User receives ALL requested intervals even if cache exceeds limits.
Cache only keeps the most recent intervals (FIFO eviction).
Args:
api_client: TibberPricesApiClient instance for API calls.
user_data: User data dict containing home metadata.
start_time: Start of range (inclusive, timezone-aware).
end_time: End of range (exclusive, timezone-aware).
Returns:
Tuple of (intervals, api_called):
- intervals: List of price interval dicts, sorted by startsAt.
Contains ALL intervals in requested range (cached + fetched).
- api_called: True if API was called to fetch missing data, False if all from cache.
Raises:
TibberPricesApiClientError: If API calls fail or validation errors.
"""
# Validate inputs
if not user_data:
msg = "User data required for timezone-aware price fetching"
raise TibberPricesApiClientError(msg)
if start_time >= end_time:
msg = f"Invalid time range: start_time ({start_time}) must be before end_time ({end_time})"
raise TibberPricesApiClientError(msg)
# Convert to ISO strings for cache operations
start_time_iso = start_time.isoformat()
end_time_iso = end_time.isoformat()
_LOGGER_DETAILS.debug(
"Interval pool request for home %s: range %s to %s",
self._home_id,
start_time_iso,
end_time_iso,
)
# Get cached intervals using index
cached_intervals = self._get_cached_intervals(start_time_iso, end_time_iso)
# Check coverage - find ranges not in cache
missing_ranges = self._fetcher.check_coverage(cached_intervals, start_time_iso, end_time_iso)
if missing_ranges:
_LOGGER_DETAILS.debug(
"Coverage check for home %s: %d range(s) missing - will fetch from API",
self._home_id,
len(missing_ranges),
)
else:
_LOGGER_DETAILS.debug(
"Coverage check for home %s: full coverage in cache - no API calls needed",
self._home_id,
)
# Fetch missing ranges from API
if missing_ranges:
fetch_time_iso = dt_utils.now().isoformat()
# Fetch with callback for immediate caching
await self._fetcher.fetch_missing_ranges(
api_client=api_client,
user_data=user_data,
missing_ranges=missing_ranges,
on_intervals_fetched=lambda intervals, _: self._add_intervals(intervals, fetch_time_iso),
)
# After caching all API responses, read from cache again to get final result
# This ensures we return exactly what user requested, filtering out extra intervals
final_result = self._get_cached_intervals(start_time_iso, end_time_iso)
# Track if API was called (True if any missing ranges were fetched)
api_called = len(missing_ranges) > 0
_LOGGER_DETAILS.debug(
"Pool returning %d intervals for home %s (from cache: %d, fetched from API: %d ranges, api_called=%s)",
len(final_result),
self._home_id,
len(cached_intervals),
len(missing_ranges),
api_called,
)
return final_result, api_called
async def get_sensor_data(
self,
api_client: TibberPricesApiClient,
user_data: dict[str, Any],
home_timezone: str | None = None,
*,
include_tomorrow: bool = True,
) -> tuple[list[dict[str, Any]], bool]:
"""
Get price intervals for sensor data (day-before-yesterday to end-of-tomorrow).
Convenience method for coordinator/sensors that need the standard 4-day window:
- Day before yesterday (for trailing 24h averages at midnight)
- Yesterday (for trailing 24h averages)
- Today (current prices)
- Tomorrow (if available in cache)
IMPORTANT - Two distinct behaviors:
1. API FETCH: Controlled by include_tomorrow flag
- include_tomorrow=False Only fetch up to end of today (prevents API spam before 13:00)
- include_tomorrow=True Fetch including tomorrow data
2. RETURN DATA: Always returns full protected range (including tomorrow if cached)
- This ensures cached tomorrow data is used even if include_tomorrow=False
The separation prevents the following bug:
- If include_tomorrow affected both fetch AND return, cached tomorrow data
would be lost when include_tomorrow=False, causing infinite refresh loops.
Args:
api_client: TibberPricesApiClient instance for API calls.
user_data: User data dict containing home metadata.
home_timezone: Optional timezone string (e.g., "Europe/Berlin").
include_tomorrow: If True, fetch tomorrow's data from API. If False,
only fetch up to end of today. Default True.
DOES NOT affect returned data - always returns full range.
Returns:
Tuple of (intervals, api_called):
- intervals: List of price interval dicts for the 4-day window (including any cached
tomorrow data), sorted by startsAt.
- api_called: True if API was called to fetch missing data, False if all from cache.
"""
# Determine timezone
tz_str = home_timezone
if not tz_str:
tz_str = self._extract_timezone_from_user_data(user_data)
# Calculate range in home's timezone
tz = ZoneInfo(tz_str) if tz_str else None
now = self._time_service.now() if self._time_service else dt_utils.now()
now_local = now.astimezone(tz) if tz else now
# Day before yesterday 00:00 (start) - same for both fetch and return
day_before_yesterday = (now_local - timedelta(days=2)).replace(hour=0, minute=0, second=0, microsecond=0)
# End of tomorrow (full protected range) - used for RETURN data
end_of_tomorrow = (now_local + timedelta(days=2)).replace(hour=0, minute=0, second=0, microsecond=0)
# API fetch range depends on include_tomorrow flag
if include_tomorrow:
fetch_end_time = end_of_tomorrow
fetch_desc = "end-of-tomorrow"
else:
# Only fetch up to end of today (prevents API spam before 13:00)
fetch_end_time = (now_local + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
fetch_desc = "end-of-today"
_LOGGER.debug(
"Sensor data request for home %s: fetch %s to %s (%s), return up to %s",
self._home_id,
day_before_yesterday.isoformat(),
fetch_end_time.isoformat(),
fetch_desc,
end_of_tomorrow.isoformat(),
)
# Fetch data (may be partial if include_tomorrow=False)
_intervals, api_called = await self.get_intervals(
api_client=api_client,
user_data=user_data,
start_time=day_before_yesterday,
end_time=fetch_end_time,
)
# Return FULL protected range (including any cached tomorrow data)
# This ensures cached tomorrow data is available even when include_tomorrow=False
final_intervals = self._get_cached_intervals(
day_before_yesterday.isoformat(),
end_of_tomorrow.isoformat(),
)
return final_intervals, api_called
def get_pool_stats(self) -> dict[str, Any]:
"""
Get statistics about the interval pool.
Returns comprehensive statistics for diagnostic sensors, separated into:
- Sensor intervals (protected range: day-before-yesterday to tomorrow)
- Cache statistics (entire pool including service-requested data)
Protected Range:
The protected range covers 4 days at 15-min resolution = 384 intervals.
These intervals are never evicted by garbage collection.
Cache Fill Level:
Shows how full the cache is relative to MAX_CACHE_SIZE (960).
100% is not bad - just means we're using the available space.
GC will evict oldest non-protected intervals when limit is reached.
Returns:
Dict with sensor intervals, cache stats, and timestamps.
"""
fetch_groups = self._cache.get_fetch_groups()
# === Sensor Intervals (Protected Range) ===
sensor_stats = self._get_sensor_interval_stats()
# === Cache Statistics (Entire Pool) ===
cache_total = self._index.count()
cache_limit = MAX_CACHE_SIZE
cache_fill_percent = round((cache_total / cache_limit) * 100, 1) if cache_limit > 0 else 0
cache_extra = max(0, cache_total - sensor_stats["count"]) # Intervals outside protected range
# === Timestamps ===
# Last sensor fetch (for protected range data)
last_sensor_fetch: str | None = None
oldest_interval: str | None = None
newest_interval: str | None = None
if fetch_groups:
# Find newest fetch group (most recent API call)
newest_group = max(fetch_groups, key=lambda g: g["fetched_at"])
last_sensor_fetch = newest_group["fetched_at"].isoformat()
# Find oldest and newest intervals across all fetch groups
all_timestamps = list(self._index.get_raw_index().keys())
if all_timestamps:
oldest_interval = min(all_timestamps)
newest_interval = max(all_timestamps)
return {
# Sensor intervals (protected range)
"sensor_intervals_count": sensor_stats["count"],
"sensor_intervals_expected": sensor_stats["expected"],
"sensor_intervals_has_gaps": sensor_stats["has_gaps"],
# Cache statistics
"cache_intervals_total": cache_total,
"cache_intervals_limit": cache_limit,
"cache_fill_percent": cache_fill_percent,
"cache_intervals_extra": cache_extra,
# Timestamps
"last_sensor_fetch": last_sensor_fetch,
"cache_oldest_interval": oldest_interval,
"cache_newest_interval": newest_interval,
# Fetch groups (API calls)
"fetch_groups_count": len(fetch_groups),
}
def _get_sensor_interval_stats(self) -> dict[str, Any]:
"""
Get statistics for sensor intervals (protected range).
Protected range: day-before-yesterday 00:00 to day-after-tomorrow 00:00.
Expected: 4 days * 24 hours * 4 intervals = 384 intervals.
Returns:
Dict with count, expected, and has_gaps.
"""
start_iso, end_iso = self._cache.get_protected_range()
start_dt = datetime.fromisoformat(start_iso)
end_dt = datetime.fromisoformat(end_iso)
# Count expected intervals (15-min resolution)
expected_count = int((end_dt - start_dt).total_seconds() / (15 * 60))
# Count actual intervals in range
actual_count = 0
current_dt = start_dt
while current_dt < end_dt:
current_key = current_dt.isoformat()[:19]
if self._index.contains(current_key):
actual_count += 1
current_dt += timedelta(minutes=15)
return {
"count": actual_count,
"expected": expected_count,
"has_gaps": actual_count < expected_count,
}
def _has_gaps_in_protected_range(self) -> bool:
"""
Check if there are gaps in the protected date range.
Delegates to _get_sensor_interval_stats() for consistency.
Returns:
True if any gaps exist, False if protected range is complete.
"""
return self._get_sensor_interval_stats()["has_gaps"]
def _extract_timezone_from_user_data(self, user_data: dict[str, Any]) -> str | None:
"""Extract timezone for this home from user_data."""
if not user_data:
return None
viewer = user_data.get("viewer", {})
homes = viewer.get("homes", [])
for home in homes:
if home.get("id") == self._home_id:
return home.get("timeZone")
return None
def _get_cached_intervals(
self,
start_time_iso: str,
end_time_iso: str,
) -> list[dict[str, Any]]:
"""
Get cached intervals for time range using timestamp index.
Uses timestamp_index for O(1) lookups per timestamp.
IMPORTANT: Returns shallow copies of interval dicts to prevent external
mutations (e.g., by parse_all_timestamps()) from affecting cached data.
The Pool cache must remain immutable to ensure consistent behavior.
Args:
start_time_iso: ISO timestamp string (inclusive).
end_time_iso: ISO timestamp string (exclusive).
Returns:
List of cached interval dicts in time range (may be empty or incomplete).
Sorted by startsAt timestamp. Each dict is a shallow copy.
"""
# Parse query range once
start_time_dt = datetime.fromisoformat(start_time_iso)
end_time_dt = datetime.fromisoformat(end_time_iso)
# CRITICAL: Use NAIVE local timestamps for iteration.
#
# Index keys are naive local timestamps (timezone stripped via [:19]).
# When start and end span a DST transition, they have different UTC offsets
# (e.g., start=+01:00 CET, end=+02:00 CEST). Using fixed-offset datetimes
# from fromisoformat() causes the loop to compare UTC values for the end
# boundary, ending 1 hour early on spring-forward days (or 1 hour late on
# fall-back days).
#
# By iterating in naive local time, we match the index key format exactly
# and the end boundary comparison works correctly regardless of DST.
current_naive = start_time_dt.replace(tzinfo=None)
end_naive = end_time_dt.replace(tzinfo=None)
# Use index to find intervals: iterate through expected timestamps
result = []
# Determine interval step (15 min post-2025-10-01, 60 min pre)
resolution_change_naive = datetime(2025, 10, 1) # noqa: DTZ001
interval_minutes = INTERVAL_QUARTER_HOURLY if current_naive >= resolution_change_naive else INTERVAL_HOURLY
while current_naive < end_naive:
# Check if this timestamp exists in index (O(1) lookup)
current_dt_key = current_naive.isoformat()[:19]
location = self._index.get(current_dt_key)
if location is not None:
# Get interval from fetch group
fetch_groups = self._cache.get_fetch_groups()
fetch_group = fetch_groups[location["fetch_group_index"]]
interval = fetch_group["intervals"][location["interval_index"]]
# CRITICAL: Return shallow copy to prevent external mutations
# (e.g., parse_all_timestamps() converts startsAt to datetime in-place)
result.append(dict(interval))
# Move to next expected interval
current_naive += timedelta(minutes=interval_minutes)
# Handle resolution change boundary
if interval_minutes == INTERVAL_HOURLY and current_naive >= resolution_change_naive:
interval_minutes = INTERVAL_QUARTER_HOURLY
_LOGGER_DETAILS.debug(
"Retrieved %d intervals from cache for home %s (range %s to %s)",
len(result),
self._home_id,
start_time_iso,
end_time_iso,
)
return result
def _add_intervals(
self,
intervals: list[dict[str, Any]],
fetch_time_iso: str,
) -> None:
"""
Add intervals as new fetch group to cache with GC.
Strategy:
1. Filter out duplicates (intervals already in cache)
2. Handle "touch" (move cached intervals to new fetch group)
3. Add new fetch group to cache
4. Update timestamp index
5. Run GC if needed
6. Schedule debounced auto-save
Args:
intervals: List of interval dicts from API.
fetch_time_iso: ISO timestamp string when intervals were fetched.
"""
if not intervals:
return
fetch_time_dt = datetime.fromisoformat(fetch_time_iso)
# Classify intervals: new vs already cached
new_intervals = []
intervals_to_touch = []
for interval in intervals:
starts_at_normalized = _normalize_starts_at(interval["startsAt"])
if not self._index.contains(starts_at_normalized):
new_intervals.append(interval)
else:
intervals_to_touch.append((starts_at_normalized, interval))
_LOGGER_DETAILS.debug(
"Interval %s already cached for home %s, will touch (update fetch time)",
interval["startsAt"],
self._home_id,
)
# Handle touched intervals: move to new fetch group
if intervals_to_touch:
self._touch_intervals(intervals_to_touch, fetch_time_dt)
if not new_intervals:
if intervals_to_touch:
_LOGGER_DETAILS.debug(
"All %d intervals already cached for home %s (touched only)",
len(intervals),
self._home_id,
)
return
# Sort new intervals by startsAt
new_intervals.sort(key=lambda x: x["startsAt"])
# Add new fetch group to cache
fetch_group_index = self._cache.add_fetch_group(new_intervals, fetch_time_dt)
# Update timestamp index for all new intervals
for interval_index, interval in enumerate(new_intervals):
starts_at_normalized = _normalize_starts_at(interval["startsAt"])
self._index.add(interval, fetch_group_index, interval_index)
_LOGGER_DETAILS.debug(
"Added fetch group %d to home %s cache: %d new intervals (fetched at %s)",
fetch_group_index,
self._home_id,
len(new_intervals),
fetch_time_iso,
)
# Run GC to evict old fetch groups if needed
gc_changed_data = self._gc.run_gc()
# Schedule debounced auto-save if data changed
data_changed = len(new_intervals) > 0 or len(intervals_to_touch) > 0 or gc_changed_data
if data_changed and self._hass is not None and self._entry_id is not None:
self._schedule_debounced_save()
def _touch_intervals(
self,
intervals_to_touch: list[tuple[str, dict[str, Any]]],
fetch_time_dt: datetime,
) -> None:
"""
Move cached intervals to new fetch group (update fetch time).
Creates a new fetch group containing references to existing intervals.
Updates the index to point to the new fetch group.
Args:
intervals_to_touch: List of (normalized_timestamp, interval_dict) tuples.
fetch_time_dt: Datetime when intervals were fetched.
"""
fetch_groups = self._cache.get_fetch_groups()
# Create touch fetch group with existing interval references
touch_intervals = []
for starts_at_normalized, _interval in intervals_to_touch:
# Get existing interval from old fetch group
location = self._index.get(starts_at_normalized)
if location is None:
continue # Should not happen, but be defensive
old_group = fetch_groups[location["fetch_group_index"]]
existing_interval = old_group["intervals"][location["interval_index"]]
touch_intervals.append(existing_interval)
# Add touch group to cache
touch_group_index = self._cache.add_fetch_group(touch_intervals, fetch_time_dt)
# Update index to point to new fetch group using batch operation
# This is more efficient than individual remove+add calls
index_updates = [
(starts_at_normalized, touch_group_index, interval_index)
for interval_index, (starts_at_normalized, _) in enumerate(intervals_to_touch)
]
self._index.update_batch(index_updates)
_LOGGER.debug(
"Touched %d cached intervals for home %s (moved to fetch group %d, fetched at %s)",
len(intervals_to_touch),
self._home_id,
touch_group_index,
fetch_time_dt.isoformat(),
)
def _schedule_debounced_save(self) -> None:
"""
Schedule debounced save with configurable delay.
Cancels existing timer and starts new one if already scheduled.
This prevents multiple saves during rapid successive changes.
"""
# Cancel existing debounce timer if running
if self._save_debounce_task is not None and not self._save_debounce_task.done():
self._save_debounce_task.cancel()
_LOGGER.debug("Cancelled pending auto-save (new changes detected, resetting timer)")
# Schedule new debounced save
task = asyncio.create_task(
self._debounced_save_worker(),
name=f"interval_pool_debounce_{self._entry_id}",
)
self._save_debounce_task = task
self._background_tasks.add(task)
task.add_done_callback(self._background_tasks.discard)
async def _debounced_save_worker(self) -> None:
"""Debounce worker: waits configured delay, then saves if not cancelled."""
try:
await asyncio.sleep(DEBOUNCE_DELAY_SECONDS)
await self._auto_save_pool_state()
except asyncio.CancelledError:
_LOGGER.debug("Auto-save timer cancelled (expected - new changes arrived)")
raise
async def async_shutdown(self) -> None:
"""
Clean shutdown - cancel pending background tasks.
Should be called when the config entry is unloaded to prevent
orphaned tasks and ensure clean resource cleanup.
"""
_LOGGER.debug("Shutting down interval pool for home %s", self._home_id)
# Cancel debounce task if running
if self._save_debounce_task is not None and not self._save_debounce_task.done():
self._save_debounce_task.cancel()
with contextlib.suppress(asyncio.CancelledError):
await self._save_debounce_task
_LOGGER.debug("Cancelled pending auto-save task")
# Cancel any other background tasks
if self._background_tasks:
for task in list(self._background_tasks):
if not task.done():
task.cancel()
# Wait for all tasks to complete cancellation
if self._background_tasks:
await asyncio.gather(*self._background_tasks, return_exceptions=True)
_LOGGER.debug("Cancelled %d background tasks", len(self._background_tasks))
self._background_tasks.clear()
_LOGGER.debug("Interval pool shutdown complete for home %s", self._home_id)
async def _auto_save_pool_state(self) -> None:
"""Auto-save pool state to storage with lock protection."""
if self._hass is None or self._entry_id is None:
return
async with self._save_lock:
try:
pool_state = self.to_dict()
await async_save_pool_state(self._hass, self._entry_id, pool_state)
_LOGGER.debug("Auto-saved interval pool for entry %s", self._entry_id)
except Exception:
_LOGGER.exception("Failed to auto-save interval pool for entry %s", self._entry_id)
def to_dict(self) -> dict[str, Any]:
"""
Serialize interval pool state for storage.
Filters out dead intervals (no longer referenced by index).
Returns:
Dictionary containing serialized pool state (only living intervals).
"""
fetch_groups = self._cache.get_fetch_groups()
# Serialize fetch groups (only living intervals)
serialized_fetch_groups = []
for group_idx, fetch_group in enumerate(fetch_groups):
living_intervals = []
for interval_idx, interval in enumerate(fetch_group["intervals"]):
starts_at_normalized = _normalize_starts_at(interval["startsAt"])
# Check if interval is still referenced in index
location = self._index.get(starts_at_normalized)
# Only keep if index points to THIS position in THIS group
if (
location is not None
and location["fetch_group_index"] == group_idx
and location["interval_index"] == interval_idx
):
living_intervals.append(interval)
# Only serialize groups with living intervals
if living_intervals:
serialized_fetch_groups.append(
{
"fetched_at": fetch_group["fetched_at"].isoformat(),
"intervals": living_intervals,
}
)
return {
"version": 1,
"home_id": self._home_id,
"fetch_groups": serialized_fetch_groups,
}
@classmethod
def from_dict(
cls,
data: dict[str, Any],
*,
api: TibberPricesApiClient,
hass: Any | None = None,
entry_id: str | None = None,
time_service: TibberPricesTimeService | None = None,
) -> TibberPricesIntervalPool | None:
"""
Restore interval pool manager from storage.
Expects single-home format: {"version": 1, "home_id": "...", "fetch_groups": [...]}
Old multi-home format is treated as corrupted and returns None.
Args:
data: Dictionary containing serialized pool state.
api: API client for fetching intervals.
hass: HomeAssistant instance for auto-save (optional).
entry_id: Config entry ID for auto-save (optional).
time_service: TimeService for time-travel support (optional).
Returns:
Restored TibberPricesIntervalPool instance, or None if format unknown/corrupted.
"""
# Validate format
if not data or "home_id" not in data or "fetch_groups" not in data:
if "homes" in data:
_LOGGER.info(
"Interval pool storage uses old multi-home format (pre-2025-11-25). "
"Treating as corrupted. Pool will rebuild from API."
)
else:
_LOGGER.warning("Interval pool storage format unknown or corrupted. Pool will rebuild from API.")
return None
home_id = data["home_id"]
# Create manager with home_id from storage
manager = cls(home_id=home_id, api=api, hass=hass, entry_id=entry_id, time_service=time_service)
# Restore fetch groups to cache
for serialized_group in data.get("fetch_groups", []):
fetched_at_dt = datetime.fromisoformat(serialized_group["fetched_at"])
intervals = serialized_group["intervals"]
fetch_group_index = manager._cache.add_fetch_group(intervals, fetched_at_dt)
# Rebuild index for this fetch group
for interval_index, interval in enumerate(intervals):
manager._index.add(interval, fetch_group_index, interval_index)
total_intervals = sum(len(group["intervals"]) for group in manager._cache.get_fetch_groups())
_LOGGER.debug(
"Interval pool restored from storage (home %s, %d intervals)",
home_id,
total_intervals,
)
return manager

View file

@ -1,180 +0,0 @@
"""
Routing Module - API endpoint selection for price intervals.
This module handles intelligent routing between different Tibber API endpoints:
- PRICE_INFO: Recent data (from "day before yesterday midnight" onwards)
- PRICE_INFO_RANGE: Historical data (before "day before yesterday midnight")
- Automatic splitting and merging when range spans the boundary
CRITICAL: Uses REAL TIME (dt_utils.now()) for API boundary calculation,
NOT TimeService.now() which may be shifted for internal simulation.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.api.exceptions import TibberPricesApiClientError
from homeassistant.util import dt as dt_utils
if TYPE_CHECKING:
from datetime import datetime
from custom_components.tibber_prices.api.client import TibberPricesApiClient
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
async def get_price_intervals_for_range(
api_client: TibberPricesApiClient,
home_id: str,
user_data: dict[str, Any],
start_time: datetime,
end_time: datetime,
) -> list[dict[str, Any]]:
"""
Get price intervals for a specific time range with automatic routing.
Automatically routes to the correct API endpoint based on the time range:
- PRICE_INFO_RANGE: For intervals exclusively before "day before yesterday midnight" (real time)
- PRICE_INFO: For intervals from "day before yesterday midnight" onwards
- Both: If range spans across the boundary, splits the request
CRITICAL: Uses REAL TIME (dt_utils.now()) for API boundary calculation,
NOT TimeService.now() which may be shifted for internal simulation.
This ensures predictable API responses.
CACHING STRATEGY: Returns ALL intervals from API response, NOT filtered.
The caller (pool.py) will cache everything and then filter to user request.
This maximizes cache efficiency - one API call can populate cache for
multiple subsequent queries.
Args:
api_client: TibberPricesApiClient instance for API calls.
home_id: Home ID to fetch price data for.
user_data: User data dict containing home metadata (including timezone).
start_time: Start of the range (inclusive, timezone-aware).
end_time: End of the range (exclusive, timezone-aware).
Returns:
List of ALL price interval dicts from API (unfiltered).
- PRICE_INFO: Returns ~384 intervals (day-before-yesterday to tomorrow)
- PRICE_INFO_RANGE: Returns intervals for requested historical range
- Both: Returns all intervals from both endpoints
Raises:
TibberPricesApiClientError: If arguments invalid or requests fail.
"""
if not user_data:
msg = "User data required for timezone-aware price fetching - fetch user data first"
raise TibberPricesApiClientError(msg)
if not home_id:
msg = "Home ID is required"
raise TibberPricesApiClientError(msg)
if start_time >= end_time:
msg = f"Invalid time range: start_time ({start_time}) must be before end_time ({end_time})"
raise TibberPricesApiClientError(msg)
# Calculate boundary: day before yesterday midnight (REAL TIME, not TimeService)
boundary = _calculate_boundary(api_client, user_data, home_id)
_LOGGER_DETAILS.debug(
"Routing price interval request for home %s: range %s to %s, boundary %s",
home_id,
start_time,
end_time,
boundary,
)
# Route based on time range
if end_time <= boundary:
# Entire range is historical (before day before yesterday) → use PRICE_INFO_RANGE
_LOGGER_DETAILS.debug("Range is fully historical, using PRICE_INFO_RANGE")
result = await api_client.async_get_price_info_range(
home_id=home_id,
user_data=user_data,
start_time=start_time,
end_time=end_time,
)
return result["price_info"]
if start_time >= boundary:
# Entire range is recent (from day before yesterday onwards) → use PRICE_INFO
_LOGGER_DETAILS.debug("Range is fully recent, using PRICE_INFO")
result = await api_client.async_get_price_info(home_id, user_data)
# Return ALL intervals (unfiltered) for maximum cache efficiency
# Pool will cache everything, then filter to user request
return result["price_info"]
# Range spans boundary → split request
_LOGGER_DETAILS.debug("Range spans boundary, splitting request")
# Fetch historical part (start_time to boundary)
historical_result = await api_client.async_get_price_info_range(
home_id=home_id,
user_data=user_data,
start_time=start_time,
end_time=boundary,
)
# Fetch recent part (boundary onwards)
recent_result = await api_client.async_get_price_info(home_id, user_data)
# Return ALL intervals (unfiltered) for maximum cache efficiency
# Pool will cache everything, then filter to user request
return historical_result["price_info"] + recent_result["price_info"]
def _calculate_boundary(
api_client: TibberPricesApiClient,
user_data: dict[str, Any],
home_id: str,
) -> datetime:
"""
Calculate the API boundary (day before yesterday midnight).
Uses the API client's helper method to extract timezone and calculate boundary.
Args:
api_client: TibberPricesApiClient instance.
user_data: User data dict containing home metadata.
home_id: Home ID to get timezone for.
Returns:
Timezone-aware datetime for day before yesterday midnight.
"""
# Extract timezone for this home
home_timezones = api_client._extract_home_timezones(user_data) # noqa: SLF001
home_tz = home_timezones.get(home_id)
# Calculate boundary using API client's method
return api_client._calculate_day_before_yesterday_midnight(home_tz) # noqa: SLF001
def _parse_timestamp(timestamp_str: str) -> datetime:
"""
Parse ISO timestamp string to timezone-aware datetime.
Args:
timestamp_str: ISO format timestamp string.
Returns:
Timezone-aware datetime object.
Raises:
ValueError: If timestamp string cannot be parsed.
"""
result = dt_utils.parse_datetime(timestamp_str)
if result is None:
msg = f"Failed to parse timestamp: {timestamp_str}"
raise ValueError(msg)
return result

View file

@ -1,165 +0,0 @@
"""Storage management for interval pool."""
from __future__ import annotations
import errno
import logging
from typing import TYPE_CHECKING, Any
from homeassistant.helpers.storage import Store
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
_LOGGER = logging.getLogger(__name__)
_LOGGER_DETAILS = logging.getLogger(__name__ + ".details")
# Storage version - increment when changing data structure
INTERVAL_POOL_STORAGE_VERSION = 1
def get_storage_key(entry_id: str) -> str:
"""
Get storage key for interval pool based on config entry ID.
Args:
entry_id: Home Assistant config entry ID
Returns:
Storage key string
"""
return f"tibber_prices.interval_pool.{entry_id}"
async def async_load_pool_state(
hass: HomeAssistant,
entry_id: str,
) -> dict[str, Any] | None:
"""
Load interval pool state from storage.
Args:
hass: Home Assistant instance
entry_id: Config entry ID
Returns:
Pool state dict or None if no cache exists
"""
storage_key = get_storage_key(entry_id)
store: Store = Store(hass, INTERVAL_POOL_STORAGE_VERSION, storage_key)
try:
stored = await store.async_load()
except Exception:
# Corrupted storage file, JSON parse error, or other exception
_LOGGER.exception(
"Failed to load interval pool storage for entry %s (corrupted file?), starting with empty pool",
entry_id,
)
return None
if stored is None:
_LOGGER.debug("No interval pool cache found for entry %s (first run)", entry_id)
return None
# Validate storage structure (single-home format)
if not isinstance(stored, dict):
_LOGGER.warning(
"Invalid interval pool storage structure for entry %s (not a dict), ignoring",
entry_id,
)
return None
# Check for new single-home format (version 1, home_id, fetch_groups)
if "home_id" in stored and "fetch_groups" in stored:
_LOGGER.debug(
"Interval pool state loaded for entry %s (single-home format, %d fetch groups)",
entry_id,
len(stored.get("fetch_groups", [])),
)
return stored
# Check for old multi-home format (homes dict) - treat as incompatible
if "homes" in stored:
_LOGGER.info(
"Interval pool storage for entry %s uses old multi-home format (pre-2025-11-25). "
"Treating as incompatible. Pool will rebuild from API.",
entry_id,
)
return None
# Unknown format
_LOGGER.warning(
"Invalid interval pool storage structure for entry %s (missing required keys), ignoring",
entry_id,
)
return None
async def async_save_pool_state(
hass: HomeAssistant,
entry_id: str,
pool_state: dict[str, Any],
) -> None:
"""
Save interval pool state to storage.
Args:
hass: Home Assistant instance
entry_id: Config entry ID
pool_state: Pool state dict to save
"""
storage_key = get_storage_key(entry_id)
store: Store = Store(hass, INTERVAL_POOL_STORAGE_VERSION, storage_key)
try:
await store.async_save(pool_state)
_LOGGER_DETAILS.debug(
"Interval pool state saved for entry %s (%d fetch groups)",
entry_id,
len(pool_state.get("fetch_groups", [])),
)
except OSError as err:
# Provide specific error messages based on errno
if err.errno == errno.ENOSPC: # Disk full
_LOGGER.exception(
"Cannot save interval pool storage for entry %s: Disk full!",
entry_id,
)
elif err.errno == errno.EACCES: # Permission denied
_LOGGER.exception(
"Cannot save interval pool storage for entry %s: Permission denied!",
entry_id,
)
else:
_LOGGER.exception(
"Failed to save interval pool storage for entry %s",
entry_id,
)
async def async_remove_pool_storage(
hass: HomeAssistant,
entry_id: str,
) -> None:
"""
Remove interval pool storage file.
Used when config entry is removed.
Args:
hass: Home Assistant instance
entry_id: Config entry ID
"""
storage_key = get_storage_key(entry_id)
store: Store = Store(hass, INTERVAL_POOL_STORAGE_VERSION, storage_key)
try:
await store.async_remove()
_LOGGER.debug("Interval pool storage removed for entry %s", entry_id)
except OSError as ex:
_LOGGER.warning("Failed to remove interval pool storage for entry %s: %s", entry_id, ex)

View file

@ -6,10 +6,11 @@
], ],
"config_flow": true, "config_flow": true,
"documentation": "https://github.com/jpawlowski/hass.tibber_prices", "documentation": "https://github.com/jpawlowski/hass.tibber_prices",
"integration_type": "hub",
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"issue_tracker": "https://github.com/jpawlowski/hass.tibber_prices/issues", "issue_tracker": "https://github.com/jpawlowski/hass.tibber_prices/issues",
"requirements": [ "requirements": [
"aiofiles>=23.2.1" "aiofiles>=23.2.1"
], ],
"version": "0.27.0" "version": "0.3.0"
} }

View file

@ -1,39 +0,0 @@
"""
Number platform for Tibber Prices integration.
Provides configurable number entities for runtime overrides of Best Price
and Peak Price period calculation settings. These entities allow automation
of configuration parameters without using the options flow.
When enabled, these entities take precedence over the options flow settings.
When disabled (default), the options flow settings are used.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from .core import TibberPricesConfigNumber
from .definitions import NUMBER_ENTITY_DESCRIPTIONS
if TYPE_CHECKING:
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
async def async_setup_entry(
_hass: HomeAssistant,
entry: TibberPricesConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Tibber Prices number entities based on a config entry."""
coordinator = entry.runtime_data.coordinator
async_add_entities(
TibberPricesConfigNumber(
coordinator=coordinator,
entity_description=entity_description,
)
for entity_description in NUMBER_ENTITY_DESCRIPTIONS
)

View file

@ -1,242 +0,0 @@
"""
Number entity implementation for Tibber Prices configuration overrides.
These entities allow runtime configuration of period calculation settings.
When a config entity is enabled, its value takes precedence over the
options flow setting for period calculations.
"""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.const import (
DOMAIN,
get_home_type_translation,
get_translation,
)
from homeassistant.components.number import NumberEntity, RestoreNumber
from homeassistant.core import callback
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator import (
TibberPricesDataUpdateCoordinator,
)
from .definitions import TibberPricesNumberEntityDescription
_LOGGER = logging.getLogger(__name__)
class TibberPricesConfigNumber(RestoreNumber, NumberEntity):
"""
A number entity for configuring period calculation settings at runtime.
When this entity is enabled, its value overrides the corresponding
options flow setting. When disabled (default), the options flow
setting is used for period calculations.
The entity restores its value after Home Assistant restart.
"""
_attr_has_entity_name = True
entity_description: TibberPricesNumberEntityDescription
# Exclude all attributes from recorder history - config entities don't need history
_unrecorded_attributes = frozenset(
{
"description",
"long_description",
"usage_tips",
"friendly_name",
"icon",
"unit_of_measurement",
"mode",
"min",
"max",
"step",
}
)
def __init__(
self,
coordinator: TibberPricesDataUpdateCoordinator,
entity_description: TibberPricesNumberEntityDescription,
) -> None:
"""Initialize the config number entity."""
self.coordinator = coordinator
self.entity_description = entity_description
# Set unique ID
self._attr_unique_id = (
f"{coordinator.config_entry.unique_id or coordinator.config_entry.entry_id}_{entity_description.key}"
)
# Initialize with None - will be set in async_added_to_hass
self._attr_native_value: float | None = None
# Setup device info
self._setup_device_info()
def _setup_device_info(self) -> None:
"""Set up device information."""
home_name, home_id, home_type = self._get_device_info()
language = self.coordinator.hass.config.language or "en"
translated_model = get_home_type_translation(home_type, language) if home_type else "Unknown"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={
(
DOMAIN,
self.coordinator.config_entry.unique_id or self.coordinator.config_entry.entry_id,
)
},
name=home_name,
manufacturer="Tibber",
model=translated_model,
serial_number=home_id if home_id else None,
configuration_url="https://developer.tibber.com/explorer",
)
def _get_device_info(self) -> tuple[str, str | None, str | None]:
"""Get device name, ID and type."""
user_profile = self.coordinator.get_user_profile()
is_subentry = bool(self.coordinator.config_entry.data.get("home_id"))
home_id = self.coordinator.config_entry.unique_id
home_type = None
if is_subentry:
home_data = self.coordinator.config_entry.data.get("home_data", {})
home_id = self.coordinator.config_entry.data.get("home_id")
address = home_data.get("address", {})
address1 = address.get("address1", "")
city = address.get("city", "")
app_nickname = home_data.get("appNickname", "")
home_type = home_data.get("type", "")
if app_nickname and app_nickname.strip():
home_name = app_nickname.strip()
elif address1:
home_name = address1
if city:
home_name = f"{home_name}, {city}"
else:
home_name = f"Tibber Home {home_id[:8]}" if home_id else "Tibber Home"
elif user_profile:
home_name = user_profile.get("name") or "Tibber Home"
else:
home_name = "Tibber Home"
return home_name, home_id, home_type
async def async_added_to_hass(self) -> None:
"""Handle entity which was added to Home Assistant."""
await super().async_added_to_hass()
# Try to restore previous state
last_number_data = await self.async_get_last_number_data()
if last_number_data is not None and last_number_data.native_value is not None:
self._attr_native_value = last_number_data.native_value
_LOGGER.debug(
"Restored %s value: %s",
self.entity_description.key,
self._attr_native_value,
)
else:
# Initialize with value from options flow (or default)
self._attr_native_value = self._get_value_from_options()
_LOGGER.debug(
"Initialized %s from options: %s",
self.entity_description.key,
self._attr_native_value,
)
# Register override with coordinator if entity is enabled
# This happens during add, so check entity registry
await self._sync_override_state()
async def async_will_remove_from_hass(self) -> None:
"""Handle entity removal from Home Assistant."""
# Remove override when entity is removed
self.coordinator.remove_config_override(
self.entity_description.config_key,
self.entity_description.config_section,
)
await super().async_will_remove_from_hass()
def _get_value_from_options(self) -> float:
"""Get the current value from options flow or default."""
options = self.coordinator.config_entry.options
section = options.get(self.entity_description.config_section, {})
value = section.get(
self.entity_description.config_key,
self.entity_description.default_value,
)
return float(value)
async def _sync_override_state(self) -> None:
"""Sync the override state with the coordinator based on entity enabled state."""
# Check if entity is enabled in registry
if self.registry_entry is not None and not self.registry_entry.disabled:
# Entity is enabled - register the override
if self._attr_native_value is not None:
self.coordinator.set_config_override(
self.entity_description.config_key,
self.entity_description.config_section,
self._attr_native_value,
)
else:
# Entity is disabled - remove override
self.coordinator.remove_config_override(
self.entity_description.config_key,
self.entity_description.config_section,
)
async def async_set_native_value(self, value: float) -> None:
"""Update the current value and trigger recalculation."""
self._attr_native_value = value
# Update the coordinator's runtime override
self.coordinator.set_config_override(
self.entity_description.config_key,
self.entity_description.config_section,
value,
)
# Trigger period recalculation (same path as options update)
await self.coordinator.async_handle_config_override_update()
_LOGGER.debug(
"Updated %s to %s, triggered period recalculation",
self.entity_description.key,
value,
)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return entity state attributes with description."""
language = self.coordinator.hass.config.language or "en"
# Try to get description from custom translations
# Custom translations use direct path: number.{key}.description
translation_path = [
"number",
self.entity_description.translation_key or self.entity_description.key,
"description",
]
description = get_translation(translation_path, language)
attrs: dict[str, Any] = {}
if description:
attrs["description"] = description
return attrs if attrs else None
@callback
def async_registry_entry_updated(self) -> None:
"""Handle entity registry update (enabled/disabled state change)."""
# This is called when the entity is enabled/disabled in the UI
self.hass.async_create_task(self._sync_override_state())

View file

@ -1,250 +0,0 @@
"""
Number entity definitions for Tibber Prices configuration overrides.
These number entities allow runtime configuration of Best Price and Peak Price
period calculation settings. They are disabled by default - users can enable
individual entities to override specific settings at runtime.
When enabled, the entity value takes precedence over the options flow setting.
When disabled (default), the options flow setting is used.
"""
from __future__ import annotations
from dataclasses import dataclass
from homeassistant.components.number import (
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import PERCENTAGE, EntityCategory
@dataclass(frozen=True, kw_only=True)
class TibberPricesNumberEntityDescription(NumberEntityDescription):
"""Describes a Tibber Prices number entity for config overrides."""
# The config key this entity overrides (matches CONF_* constants)
config_key: str
# The section in options where this setting is stored (e.g., "flexibility_settings")
config_section: str
# Whether this is for best_price (False) or peak_price (True)
is_peak_price: bool = False
# Default value from const.py
default_value: float | int = 0
# ============================================================================
# BEST PRICE PERIOD CONFIGURATION OVERRIDES
# ============================================================================
BEST_PRICE_NUMBER_ENTITIES = (
TibberPricesNumberEntityDescription(
key="best_price_flex_override",
translation_key="best_price_flex_override",
name="Best Price: Flexibility",
icon="mdi:arrow-down-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=0,
native_max_value=50,
native_step=1,
native_unit_of_measurement=PERCENTAGE,
mode=NumberMode.SLIDER,
config_key="best_price_flex",
config_section="flexibility_settings",
is_peak_price=False,
default_value=15, # DEFAULT_BEST_PRICE_FLEX
),
TibberPricesNumberEntityDescription(
key="best_price_min_distance_override",
translation_key="best_price_min_distance_override",
name="Best Price: Minimum Distance",
icon="mdi:arrow-down-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=-50,
native_max_value=0,
native_step=1,
native_unit_of_measurement=PERCENTAGE,
mode=NumberMode.SLIDER,
config_key="best_price_min_distance_from_avg",
config_section="flexibility_settings",
is_peak_price=False,
default_value=-5, # DEFAULT_BEST_PRICE_MIN_DISTANCE_FROM_AVG
),
TibberPricesNumberEntityDescription(
key="best_price_min_period_length_override",
translation_key="best_price_min_period_length_override",
name="Best Price: Minimum Period Length",
icon="mdi:arrow-down-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=15,
native_max_value=180,
native_step=15,
native_unit_of_measurement="min",
mode=NumberMode.SLIDER,
config_key="best_price_min_period_length",
config_section="period_settings",
is_peak_price=False,
default_value=60, # DEFAULT_BEST_PRICE_MIN_PERIOD_LENGTH
),
TibberPricesNumberEntityDescription(
key="best_price_min_periods_override",
translation_key="best_price_min_periods_override",
name="Best Price: Minimum Periods",
icon="mdi:arrow-down-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=1,
native_max_value=10,
native_step=1,
mode=NumberMode.SLIDER,
config_key="min_periods_best",
config_section="relaxation_and_target_periods",
is_peak_price=False,
default_value=2, # DEFAULT_MIN_PERIODS_BEST
),
TibberPricesNumberEntityDescription(
key="best_price_relaxation_attempts_override",
translation_key="best_price_relaxation_attempts_override",
name="Best Price: Relaxation Attempts",
icon="mdi:arrow-down-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=1,
native_max_value=12,
native_step=1,
mode=NumberMode.SLIDER,
config_key="relaxation_attempts_best",
config_section="relaxation_and_target_periods",
is_peak_price=False,
default_value=11, # DEFAULT_RELAXATION_ATTEMPTS_BEST
),
TibberPricesNumberEntityDescription(
key="best_price_gap_count_override",
translation_key="best_price_gap_count_override",
name="Best Price: Gap Tolerance",
icon="mdi:arrow-down-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=0,
native_max_value=8,
native_step=1,
mode=NumberMode.SLIDER,
config_key="best_price_max_level_gap_count",
config_section="period_settings",
is_peak_price=False,
default_value=1, # DEFAULT_BEST_PRICE_MAX_LEVEL_GAP_COUNT
),
)
# ============================================================================
# PEAK PRICE PERIOD CONFIGURATION OVERRIDES
# ============================================================================
PEAK_PRICE_NUMBER_ENTITIES = (
TibberPricesNumberEntityDescription(
key="peak_price_flex_override",
translation_key="peak_price_flex_override",
name="Peak Price: Flexibility",
icon="mdi:arrow-up-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=-50,
native_max_value=0,
native_step=1,
native_unit_of_measurement=PERCENTAGE,
mode=NumberMode.SLIDER,
config_key="peak_price_flex",
config_section="flexibility_settings",
is_peak_price=True,
default_value=-20, # DEFAULT_PEAK_PRICE_FLEX
),
TibberPricesNumberEntityDescription(
key="peak_price_min_distance_override",
translation_key="peak_price_min_distance_override",
name="Peak Price: Minimum Distance",
icon="mdi:arrow-up-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=0,
native_max_value=50,
native_step=1,
native_unit_of_measurement=PERCENTAGE,
mode=NumberMode.SLIDER,
config_key="peak_price_min_distance_from_avg",
config_section="flexibility_settings",
is_peak_price=True,
default_value=5, # DEFAULT_PEAK_PRICE_MIN_DISTANCE_FROM_AVG
),
TibberPricesNumberEntityDescription(
key="peak_price_min_period_length_override",
translation_key="peak_price_min_period_length_override",
name="Peak Price: Minimum Period Length",
icon="mdi:arrow-up-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=15,
native_max_value=180,
native_step=15,
native_unit_of_measurement="min",
mode=NumberMode.SLIDER,
config_key="peak_price_min_period_length",
config_section="period_settings",
is_peak_price=True,
default_value=30, # DEFAULT_PEAK_PRICE_MIN_PERIOD_LENGTH
),
TibberPricesNumberEntityDescription(
key="peak_price_min_periods_override",
translation_key="peak_price_min_periods_override",
name="Peak Price: Minimum Periods",
icon="mdi:arrow-up-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=1,
native_max_value=10,
native_step=1,
mode=NumberMode.SLIDER,
config_key="min_periods_peak",
config_section="relaxation_and_target_periods",
is_peak_price=True,
default_value=2, # DEFAULT_MIN_PERIODS_PEAK
),
TibberPricesNumberEntityDescription(
key="peak_price_relaxation_attempts_override",
translation_key="peak_price_relaxation_attempts_override",
name="Peak Price: Relaxation Attempts",
icon="mdi:arrow-up-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=1,
native_max_value=12,
native_step=1,
mode=NumberMode.SLIDER,
config_key="relaxation_attempts_peak",
config_section="relaxation_and_target_periods",
is_peak_price=True,
default_value=11, # DEFAULT_RELAXATION_ATTEMPTS_PEAK
),
TibberPricesNumberEntityDescription(
key="peak_price_gap_count_override",
translation_key="peak_price_gap_count_override",
name="Peak Price: Gap Tolerance",
icon="mdi:arrow-up-bold-circle",
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
native_min_value=0,
native_max_value=8,
native_step=1,
mode=NumberMode.SLIDER,
config_key="peak_price_max_level_gap_count",
config_section="period_settings",
is_peak_price=True,
default_value=1, # DEFAULT_PEAK_PRICE_MAX_LEVEL_GAP_COUNT
),
)
# All number entity descriptions combined
NUMBER_ENTITY_DESCRIPTIONS = BEST_PRICE_NUMBER_ENTITIES + PEAK_PRICE_NUMBER_ENTITIES

View file

@ -0,0 +1,444 @@
"""Utility functions for calculating price periods (best price and peak price)."""
from __future__ import annotations
import logging
from datetime import date, timedelta
from typing import Any, NamedTuple
from homeassistant.util import dt as dt_util
from .const import DEFAULT_PRICE_RATING_THRESHOLD_HIGH, DEFAULT_PRICE_RATING_THRESHOLD_LOW
from .price_utils import aggregate_period_levels, aggregate_period_ratings
_LOGGER = logging.getLogger(__name__)
MINUTES_PER_INTERVAL = 15
class PeriodConfig(NamedTuple):
"""Configuration for period calculation."""
reverse_sort: bool
flex: float
min_distance_from_avg: float
min_period_length: int
threshold_low: float = DEFAULT_PRICE_RATING_THRESHOLD_LOW
threshold_high: float = DEFAULT_PRICE_RATING_THRESHOLD_HIGH
def calculate_periods(
all_prices: list[dict],
*,
config: PeriodConfig,
) -> dict[str, Any]:
"""
Calculate price periods (best or peak) from price data.
This function identifies periods but does NOT store full interval data redundantly.
It returns lightweight period summaries that reference the original price data.
Steps:
1. Split prices by day and calculate daily averages
2. Calculate reference prices (min/max per day)
3. Build periods based on criteria
4. Filter by minimum length
5. Merge adjacent periods at midnight
6. Extract period summaries (start/end times, not full price data)
Args:
all_prices: All price data points from yesterday/today/tomorrow
config: Period configuration containing reverse_sort, flex, min_distance_from_avg,
min_period_length, threshold_low, and threshold_high
Returns:
Dict with:
- periods: List of lightweight period summaries (start/end times only)
- metadata: Config and statistics
- reference_data: Daily min/max/avg for on-demand annotation
"""
# Extract config values
reverse_sort = config.reverse_sort
flex = config.flex
min_distance_from_avg = config.min_distance_from_avg
min_period_length = config.min_period_length
threshold_low = config.threshold_low
threshold_high = config.threshold_high
if not all_prices:
return {
"periods": [],
"metadata": {
"total_periods": 0,
"config": {
"reverse_sort": reverse_sort,
"flex": flex,
"min_distance_from_avg": min_distance_from_avg,
"min_period_length": min_period_length,
},
},
"reference_data": {
"ref_prices": {},
"avg_prices": {},
},
}
# Ensure prices are sorted chronologically
all_prices_sorted = sorted(all_prices, key=lambda p: p["startsAt"])
# Step 1: Split by day and calculate averages
intervals_by_day, avg_price_by_day = _split_intervals_by_day(all_prices_sorted)
# Step 2: Calculate reference prices (min or max per day)
ref_prices = _calculate_reference_prices(intervals_by_day, reverse_sort=reverse_sort)
# Step 3: Build periods
price_context = {
"ref_prices": ref_prices,
"avg_prices": avg_price_by_day,
"flex": flex,
"min_distance_from_avg": min_distance_from_avg,
}
raw_periods = _build_periods(all_prices_sorted, price_context, reverse_sort=reverse_sort)
# Step 4: Filter by minimum length
raw_periods = _filter_periods_by_min_length(raw_periods, min_period_length)
# Step 5: Merge adjacent periods at midnight
raw_periods = _merge_adjacent_periods_at_midnight(raw_periods)
# Step 6: Add interval ends
_add_interval_ends(raw_periods)
# Step 7: Filter periods by end date (keep periods ending today or later)
raw_periods = _filter_periods_by_end_date(raw_periods)
# Step 8: Extract lightweight period summaries (no full price data)
# Note: Filtering for current/future is done here based on end date,
# not start date. This preserves periods that started yesterday but end today.
period_summaries = _extract_period_summaries(
raw_periods,
all_prices_sorted,
threshold_low=threshold_low,
threshold_high=threshold_high,
)
return {
"periods": period_summaries, # Lightweight summaries only
"metadata": {
"total_periods": len(period_summaries),
"config": {
"reverse_sort": reverse_sort,
"flex": flex,
"min_distance_from_avg": min_distance_from_avg,
"min_period_length": min_period_length,
},
},
"reference_data": {
"ref_prices": {k.isoformat(): v for k, v in ref_prices.items()},
"avg_prices": {k.isoformat(): v for k, v in avg_price_by_day.items()},
},
}
def _split_intervals_by_day(all_prices: list[dict]) -> tuple[dict[date, list[dict]], dict[date, float]]:
"""Split intervals by day and calculate average price per day."""
intervals_by_day: dict[date, list[dict]] = {}
avg_price_by_day: dict[date, float] = {}
for price_data in all_prices:
dt = dt_util.parse_datetime(price_data["startsAt"])
if dt is None:
continue
dt = dt_util.as_local(dt)
date_key = dt.date()
intervals_by_day.setdefault(date_key, []).append(price_data)
for date_key, intervals in intervals_by_day.items():
avg_price_by_day[date_key] = sum(float(p["total"]) for p in intervals) / len(intervals)
return intervals_by_day, avg_price_by_day
def _calculate_reference_prices(intervals_by_day: dict[date, list[dict]], *, reverse_sort: bool) -> dict[date, float]:
"""Calculate reference prices for each day (min for best, max for peak)."""
ref_prices: dict[date, float] = {}
for date_key, intervals in intervals_by_day.items():
prices = [float(p["total"]) for p in intervals]
ref_prices[date_key] = max(prices) if reverse_sort else min(prices)
return ref_prices
def _build_periods(
all_prices: list[dict],
price_context: dict[str, Any],
*,
reverse_sort: bool,
) -> list[list[dict]]:
"""
Build periods, allowing periods to cross midnight (day boundary).
Periods are built day-by-day, comparing each interval to its own day's reference.
When a day boundary is crossed, the current period is ended.
Adjacent periods at midnight are merged in a later step.
"""
ref_prices = price_context["ref_prices"]
avg_prices = price_context["avg_prices"]
flex = price_context["flex"]
min_distance_from_avg = price_context["min_distance_from_avg"]
periods: list[list[dict]] = []
current_period: list[dict] = []
last_ref_date: date | None = None
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
date_key = starts_at.date()
ref_price = ref_prices[date_key]
avg_price = avg_prices[date_key]
price = float(price_data["total"])
# Calculate percentage difference from reference
percent_diff = ((price - ref_price) / ref_price) * 100 if ref_price != 0 else 0.0
percent_diff = round(percent_diff, 2)
# Check if interval qualifies for the period
in_flex = percent_diff >= flex * 100 if reverse_sort else percent_diff <= flex * 100
within_avg_boundary = price >= avg_price if reverse_sort else price <= avg_price
# Minimum distance from average
if reverse_sort:
# Peak price: must be at least min_distance_from_avg% above average
min_distance_threshold = avg_price * (1 + min_distance_from_avg / 100)
meets_min_distance = price >= min_distance_threshold
else:
# Best price: must be at least min_distance_from_avg% below average
min_distance_threshold = avg_price * (1 - min_distance_from_avg / 100)
meets_min_distance = price <= min_distance_threshold
# Split period if day changes
if last_ref_date is not None and date_key != last_ref_date and current_period:
periods.append(current_period)
current_period = []
last_ref_date = date_key
# Add to period if all criteria are met
if in_flex and within_avg_boundary and meets_min_distance:
current_period.append(
{
"interval_hour": starts_at.hour,
"interval_minute": starts_at.minute,
"interval_time": f"{starts_at.hour:02d}:{starts_at.minute:02d}",
"price": price,
"interval_start": starts_at,
}
)
elif current_period:
# Criteria no longer met, end current period
periods.append(current_period)
current_period = []
# Add final period if exists
if current_period:
periods.append(current_period)
return periods
def _filter_periods_by_min_length(periods: list[list[dict]], min_period_length: int) -> list[list[dict]]:
"""Filter periods to only include those meeting the minimum length requirement."""
min_intervals = min_period_length // MINUTES_PER_INTERVAL
return [period for period in periods if len(period) >= min_intervals]
def _merge_adjacent_periods_at_midnight(periods: list[list[dict]]) -> list[list[dict]]:
"""
Merge adjacent periods that meet at midnight.
When two periods are detected separately for consecutive days but are directly
adjacent at midnight (15 minutes apart), merge them into a single period.
"""
if not periods:
return periods
merged = []
i = 0
while i < len(periods):
current_period = periods[i]
# Check if there's a next period and if they meet at midnight
if i + 1 < len(periods):
next_period = periods[i + 1]
last_start = current_period[-1].get("interval_start")
next_start = next_period[0].get("interval_start")
if last_start and next_start:
time_diff = next_start - last_start
last_date = last_start.date()
next_date = next_start.date()
# If they are 15 minutes apart and on different days (crossing midnight)
if time_diff == timedelta(minutes=MINUTES_PER_INTERVAL) and next_date > last_date:
# Merge the two periods
merged_period = current_period + next_period
merged.append(merged_period)
i += 2 # Skip both periods as we've merged them
continue
# If no merge happened, just add the current period
merged.append(current_period)
i += 1
return merged
def _add_interval_ends(periods: list[list[dict]]) -> None:
"""Add interval_end to each interval in-place."""
for period in periods:
for interval in period:
start = interval.get("interval_start")
if start:
interval["interval_end"] = start + timedelta(minutes=MINUTES_PER_INTERVAL)
def _filter_periods_by_end_date(periods: list[list[dict]]) -> list[list[dict]]:
"""
Filter periods to keep only relevant ones for today and tomorrow.
Keep periods that:
- End in the future (> now)
- End today but after the start of the day (not exactly at midnight)
This removes:
- Periods that ended yesterday
- Periods that ended exactly at midnight today (they're completely in the past)
"""
now = dt_util.now()
today = now.date()
midnight_today = dt_util.start_of_local_day(now)
filtered = []
for period in periods:
if not period:
continue
# Get the end time of the period (last interval's end)
last_interval = period[-1]
period_end = last_interval.get("interval_end")
if not period_end:
continue
# Keep if period ends in the future
if period_end > now:
filtered.append(period)
continue
# Keep if period ends today but AFTER midnight (not exactly at midnight)
if period_end.date() == today and period_end > midnight_today:
filtered.append(period)
return filtered
def _extract_period_summaries(
periods: list[list[dict]],
all_prices: list[dict],
*,
threshold_low: float | None,
threshold_high: float | None,
) -> list[dict]:
"""
Extract lightweight period summaries without storing full price data.
Returns minimal information needed to identify periods:
- start/end timestamps
- interval count
- duration
- aggregated level (from API's "level" field)
- aggregated rating_level (from calculated "rating_level" field)
Sensors can use these summaries to query the actual price data from priceInfo on demand.
Args:
periods: List of periods, where each period is a list of interval dictionaries
all_prices: All price data from the API (enriched with level, difference, rating_level)
threshold_low: Low threshold for rating level calculation
threshold_high: High threshold for rating level calculation
"""
# Build lookup dictionary for full price data by timestamp
price_lookup: dict[str, dict] = {}
for price_data in all_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at:
starts_at = dt_util.as_local(starts_at)
price_lookup[starts_at.isoformat()] = price_data
summaries = []
for period in periods:
if not period:
continue
first_interval = period[0]
last_interval = period[-1]
start_time = first_interval.get("interval_start")
end_time = last_interval.get("interval_end")
if not start_time or not end_time:
continue
# Collect interval timestamps
interval_starts = [
start.isoformat() for interval in period if (start := interval.get("interval_start")) is not None
]
# Look up full price data for each interval in the period
period_price_data: list[dict] = []
for start_iso in interval_starts:
price_data = price_lookup.get(start_iso)
if price_data:
period_price_data.append(price_data)
# Calculate aggregated level and rating_level
aggregated_level = None
aggregated_rating = None
if period_price_data:
# Aggregate level (from API's "level" field)
aggregated_level = aggregate_period_levels(period_price_data)
# Aggregate rating_level (from calculated "rating_level" and "difference" fields)
if threshold_low is not None and threshold_high is not None:
aggregated_rating, _ = aggregate_period_ratings(
period_price_data,
threshold_low,
threshold_high,
)
summary = {
"start": start_time,
"end": end_time,
"interval_count": len(period),
"duration_minutes": len(period) * MINUTES_PER_INTERVAL,
# Store interval timestamps for reference (minimal data)
"interval_starts": interval_starts,
# Aggregated attributes
"level": aggregated_level,
"rating_level": aggregated_rating,
}
summaries.append(summary)
return summaries

View file

@ -0,0 +1,450 @@
"""Utility functions for price data calculations."""
from __future__ import annotations
import logging
from datetime import datetime, timedelta
from typing import Any
from homeassistant.util import dt as dt_util
from .const import PRICE_LEVEL_MAPPING, PRICE_LEVEL_NORMAL, PRICE_RATING_NORMAL
_LOGGER = logging.getLogger(__name__)
MINUTES_PER_INTERVAL = 15
def calculate_trailing_average_for_interval(
interval_start: datetime,
all_prices: list[dict[str, Any]],
) -> float | None:
"""
Calculate the trailing 24-hour average price for a specific interval.
Args:
interval_start: The start time of the interval we're calculating for
all_prices: List of all available price intervals (yesterday + today + tomorrow)
Returns:
The average price of all intervals in the 24 hours before interval_start,
or None if insufficient data is available.
"""
if not all_prices:
return None
# Calculate the lookback period: 24 hours before this interval
lookback_start = interval_start - timedelta(hours=24)
# Collect all prices that fall within the 24-hour lookback window
matching_prices = []
for price_data in all_prices:
starts_at_str = price_data.get("startsAt")
if not starts_at_str:
continue
# Parse the timestamp
price_time = dt_util.parse_datetime(starts_at_str)
if price_time is None:
continue
# Convert to local timezone for comparison
price_time = dt_util.as_local(price_time)
# Check if this price falls within our lookback window
# Include prices that start >= lookback_start and start < interval_start
if lookback_start <= price_time < interval_start:
total_price = price_data.get("total")
if total_price is not None:
matching_prices.append(float(total_price))
if not matching_prices:
_LOGGER.debug(
"No prices found in 24-hour lookback window for interval starting at %s (lookback: %s to %s)",
interval_start,
lookback_start,
interval_start,
)
return None
# Calculate and return the average
return sum(matching_prices) / len(matching_prices)
def calculate_difference_percentage(
current_price: float,
trailing_average: float | None,
) -> float | None:
"""
Calculate the difference percentage between current price and trailing average.
This mimics the API's "difference" field from priceRating endpoint.
Args:
current_price: The current interval's price
trailing_average: The 24-hour trailing average price
Returns:
The percentage difference: ((current - average) / average) * 100
or None if trailing_average is None or zero.
"""
if trailing_average is None or trailing_average == 0:
return None
return ((current_price - trailing_average) / trailing_average) * 100
def calculate_rating_level(
difference: float | None,
threshold_low: float,
threshold_high: float,
) -> str | None:
"""
Calculate the rating level based on difference percentage and thresholds.
This mimics the API's "level" field from priceRating endpoint.
Args:
difference: The difference percentage (from calculate_difference_percentage)
threshold_low: The low threshold percentage (typically -100 to 0)
threshold_high: The high threshold percentage (typically 0 to 100)
Returns:
"LOW" if difference <= threshold_low
"HIGH" if difference >= threshold_high
"NORMAL" otherwise
None if difference is None
"""
if difference is None:
return None
# If difference falls in both ranges (shouldn't normally happen), return NORMAL
if difference <= threshold_low and difference >= threshold_high:
return PRICE_RATING_NORMAL
# Classify based on thresholds
if difference <= threshold_low:
return "LOW"
if difference >= threshold_high:
return "HIGH"
return PRICE_RATING_NORMAL
def _process_price_interval(
price_interval: dict[str, Any],
all_prices: list[dict[str, Any]],
threshold_low: float,
threshold_high: float,
day_label: str,
) -> None:
"""
Process a single price interval and add difference and rating_level.
Args:
price_interval: The price interval to process (modified in place)
all_prices: All available price intervals for lookback calculation
threshold_low: Low threshold percentage
threshold_high: High threshold percentage
day_label: Label for logging ("today" or "tomorrow")
"""
starts_at_str = price_interval.get("startsAt")
if not starts_at_str:
return
starts_at = dt_util.parse_datetime(starts_at_str)
if starts_at is None:
return
starts_at = dt_util.as_local(starts_at)
current_price = price_interval.get("total")
if current_price is None:
return
# Calculate trailing average
trailing_avg = calculate_trailing_average_for_interval(starts_at, all_prices)
# Calculate and set the difference and rating_level
if trailing_avg is not None:
difference = calculate_difference_percentage(float(current_price), trailing_avg)
price_interval["difference"] = difference
# Calculate rating_level based on difference
rating_level = calculate_rating_level(difference, threshold_low, threshold_high)
price_interval["rating_level"] = rating_level
else:
# Set to None if we couldn't calculate
price_interval["difference"] = None
price_interval["rating_level"] = None
_LOGGER.debug(
"Could not calculate trailing average for %s interval %s",
day_label,
starts_at,
)
def enrich_price_info_with_differences(
price_info: dict[str, Any],
threshold_low: float | None = None,
threshold_high: float | None = None,
) -> dict[str, Any]:
"""
Enrich price info with calculated 'difference' and 'rating_level' values.
Computes the trailing 24-hour average, difference percentage, and rating level
for each interval in today and tomorrow (excluding yesterday since it's historical).
Args:
price_info: Dictionary with 'yesterday', 'today', 'tomorrow' keys
threshold_low: Low threshold percentage for rating_level (defaults to -10)
threshold_high: High threshold percentage for rating_level (defaults to 10)
Returns:
Updated price_info dict with 'difference' and 'rating_level' added
"""
if threshold_low is None:
threshold_low = -10
if threshold_high is None:
threshold_high = 10
yesterday_prices = price_info.get("yesterday", [])
today_prices = price_info.get("today", [])
tomorrow_prices = price_info.get("tomorrow", [])
# Combine all prices for lookback calculation
all_prices = yesterday_prices + today_prices + tomorrow_prices
_LOGGER.debug(
"Enriching price info with differences and rating levels: "
"yesterday=%d, today=%d, tomorrow=%d, thresholds: low=%.2f, high=%.2f",
len(yesterday_prices),
len(today_prices),
len(tomorrow_prices),
threshold_low,
threshold_high,
)
# Process today's prices
for price_interval in today_prices:
_process_price_interval(price_interval, all_prices, threshold_low, threshold_high, "today")
# Process tomorrow's prices
for price_interval in tomorrow_prices:
_process_price_interval(price_interval, all_prices, threshold_low, threshold_high, "tomorrow")
return price_info
def find_price_data_for_interval(price_info: Any, target_time: datetime) -> dict | None:
"""
Find the price data for a specific 15-minute interval timestamp.
Args:
price_info: The price info dictionary from Tibber API
target_time: The target timestamp to find price data for
Returns:
Price data dict if found, None otherwise
"""
day_key = "tomorrow" if target_time.date() > dt_util.now().date() else "today"
search_days = [day_key, "tomorrow" if day_key == "today" else "today"]
for search_day in search_days:
day_prices = price_info.get(search_day, [])
if not day_prices:
continue
for price_data in day_prices:
starts_at = dt_util.parse_datetime(price_data["startsAt"])
if starts_at is None:
continue
starts_at = dt_util.as_local(starts_at)
interval_end = starts_at + timedelta(minutes=MINUTES_PER_INTERVAL)
if starts_at <= target_time < interval_end and starts_at.date() == target_time.date():
return price_data
return None
def aggregate_price_levels(levels: list[str]) -> str:
"""
Aggregate multiple price levels into a single representative level using median.
Takes a list of price level strings (e.g., "VERY_CHEAP", "NORMAL", "EXPENSIVE")
and returns the median level after sorting by numeric values. This naturally
tends toward "NORMAL" when levels are mixed.
Args:
levels: List of price level strings from intervals
Returns:
The median price level string, or PRICE_LEVEL_NORMAL if input is empty
"""
if not levels:
return PRICE_LEVEL_NORMAL
# Convert levels to numeric values and sort
numeric_values = [PRICE_LEVEL_MAPPING.get(level, 0) for level in levels]
numeric_values.sort()
# Get median (middle value for odd length, lower-middle for even length)
median_idx = len(numeric_values) // 2
median_value = numeric_values[median_idx]
# Convert back to level string
for level, value in PRICE_LEVEL_MAPPING.items():
if value == median_value:
return level
return PRICE_LEVEL_NORMAL
def aggregate_price_rating(differences: list[float], threshold_low: float, threshold_high: float) -> tuple[str, float]:
"""
Aggregate multiple price differences into a single rating level.
Calculates the average difference percentage across multiple intervals
and applies thresholds to determine the overall rating level.
Args:
differences: List of difference percentages from intervals
threshold_low: The low threshold percentage for LOW rating
threshold_high: The high threshold percentage for HIGH rating
Returns:
Tuple of (rating_level, average_difference)
rating_level: "LOW", "NORMAL", or "HIGH"
average_difference: The averaged difference percentage
"""
if not differences:
return PRICE_RATING_NORMAL, 0.0
# Filter out None values
valid_differences = [d for d in differences if d is not None]
if not valid_differences:
return PRICE_RATING_NORMAL, 0.0
# Calculate average difference
avg_difference = sum(valid_differences) / len(valid_differences)
# Apply thresholds
rating_level = calculate_rating_level(avg_difference, threshold_low, threshold_high)
return rating_level or PRICE_RATING_NORMAL, avg_difference
def aggregate_period_levels(interval_data_list: list[dict[str, Any]]) -> str | None:
"""
Aggregate price levels across multiple intervals in a period.
Extracts "level" from each interval and uses the same logic as
aggregate_price_levels() to determine the overall level for the period.
Args:
interval_data_list: List of price interval dictionaries with "level" keys
Returns:
The aggregated level string in lowercase (e.g., "very_cheap", "normal", "expensive"),
or None if no valid levels found
"""
levels: list[str] = []
for interval in interval_data_list:
level = interval.get("level")
if level is not None and isinstance(level, str):
levels.append(level)
if not levels:
return None
aggregated = aggregate_price_levels(levels)
# Convert to lowercase for consistency with other enum sensors
return aggregated.lower() if aggregated else None
def aggregate_period_ratings(
interval_data_list: list[dict[str, Any]],
threshold_low: float,
threshold_high: float,
) -> tuple[str | None, float | None]:
"""
Aggregate price ratings across multiple intervals in a period.
Extracts "difference" from each interval and uses the same logic as
aggregate_price_rating() to determine the overall rating for the period.
Args:
interval_data_list: List of price interval dictionaries with "difference" keys
threshold_low: The low threshold percentage for LOW rating
threshold_high: The high threshold percentage for HIGH rating
Returns:
Tuple of (rating_level, average_difference)
rating_level: "low", "normal", "high" (lowercase), or None if no valid data
average_difference: The averaged difference percentage, or None if no valid data
"""
differences: list[float] = []
for interval in interval_data_list:
diff = interval.get("difference")
if diff is not None:
differences.append(float(diff))
if not differences:
return None, None
rating_level, avg_diff = aggregate_price_rating(differences, threshold_low, threshold_high)
# Convert to lowercase for consistency with other enum sensors
return rating_level.lower() if rating_level else None, avg_diff
def calculate_price_trend(
current_price: float,
future_average: float,
threshold_rising: float = 5.0,
threshold_falling: float = -5.0,
) -> tuple[str, float]:
"""
Calculate price trend by comparing current price with future average.
Args:
current_price: Current interval price
future_average: Average price of future intervals
threshold_rising: Percentage threshold for rising trend (positive, default 5%)
threshold_falling: Percentage threshold for falling trend (negative, default -5%)
Returns:
Tuple of (trend_state, difference_percentage)
trend_state: "rising" | "falling" | "stable"
difference_percentage: % change from current to future ((future - current) / current * 100)
"""
if current_price == 0:
# Avoid division by zero
return "stable", 0.0
# Calculate percentage difference from current to future
diff_pct = ((future_average - current_price) / current_price) * 100
# Determine trend based on thresholds
# threshold_falling is negative, so we compare with it directly
if diff_pct > threshold_rising:
trend = "rising"
elif diff_pct < threshold_falling:
trend = "falling"
else:
trend = "stable"
return trend, diff_pct

File diff suppressed because it is too large Load diff

View file

@ -1,60 +0,0 @@
"""
Sensor platform for Tibber Prices integration.
Provides electricity price sensors organized by calculation method:
- Interval-based: Current/next/previous price intervals
- Rolling hour: 5-interval sliding windows (2h 30m periods)
- Daily statistics: Min/max/avg within calendar day boundaries
- 24h windows: Trailing/leading statistics from current interval
- Future forecast: N-hour price predictions
- Volatility: Price variation analysis
- Diagnostic: System information and metadata
See definitions.py for complete sensor catalog.
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import (
CONF_CURRENCY_DISPLAY_MODE,
DISPLAY_MODE_BASE,
)
from .core import TibberPricesSensor
from .definitions import ENTITY_DESCRIPTIONS
if TYPE_CHECKING:
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
async def async_setup_entry(
_hass: HomeAssistant,
entry: TibberPricesConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up Tibber Prices sensor based on a config entry."""
coordinator = entry.runtime_data.coordinator
# Get display mode from config
display_mode = entry.options.get(CONF_CURRENCY_DISPLAY_MODE, DISPLAY_MODE_BASE)
# Filter entity descriptions based on display mode
# Skip current_interval_price_base if user configured major display
# (regular current_interval_price already shows major units)
entities_to_create = [
entity_description
for entity_description in ENTITY_DESCRIPTIONS
if not (entity_description.key == "current_interval_price_base" and display_mode == DISPLAY_MODE_BASE)
]
async_add_entities(
TibberPricesSensor(
coordinator=coordinator,
entity_description=entity_description,
)
for entity_description in entities_to_create
)

View file

@ -1,325 +0,0 @@
"""
Attribute builders for Tibber Prices sensors.
This package contains attribute building functions organized by sensor calculation type.
The main entry point is build_sensor_attributes() which routes to the appropriate
specialized attribute builder.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.entity_utils import (
add_description_attributes,
add_icon_color_attribute,
)
from custom_components.tibber_prices.sensor.types import (
DailyStatPriceAttributes,
DailyStatRatingAttributes,
FutureAttributes,
IntervalLevelAttributes,
# Import all types for re-export
IntervalPriceAttributes,
IntervalRatingAttributes,
LifecycleAttributes,
MetadataAttributes,
SensorAttributes,
TimingAttributes,
TrendAttributes,
VolatilityAttributes,
Window24hAttributes,
)
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.core import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from homeassistant.core import HomeAssistant
# Import from specialized modules
from .daily_stat import add_statistics_attributes
from .future import add_next_avg_attributes, get_future_prices
from .interval import add_current_interval_price_attributes
from .lifecycle import build_lifecycle_attributes
from .timing import _is_timing_or_volatility_sensor
from .trend import _add_cached_trend_attributes, _add_timing_or_volatility_attributes
from .volatility import add_volatility_type_attributes, get_prices_for_volatility
from .window_24h import add_average_price_attributes
__all__ = [
"DailyStatPriceAttributes",
"DailyStatRatingAttributes",
"FutureAttributes",
"IntervalLevelAttributes",
"IntervalPriceAttributes",
"IntervalRatingAttributes",
"LifecycleAttributes",
"MetadataAttributes",
# Type exports
"SensorAttributes",
"TimingAttributes",
"TrendAttributes",
"VolatilityAttributes",
"Window24hAttributes",
"add_volatility_type_attributes",
"build_extra_state_attributes",
"build_sensor_attributes",
"get_future_prices",
"get_prices_for_volatility",
]
def build_sensor_attributes(
key: str,
coordinator: TibberPricesDataUpdateCoordinator,
native_value: Any,
cached_data: dict,
*,
config_entry: TibberPricesConfigEntry,
) -> dict[str, Any] | None:
"""
Build attributes for a sensor based on its key.
Routes to specialized attribute builders based on sensor type.
Args:
key: The sensor entity key
coordinator: The data update coordinator
native_value: The current native value of the sensor
cached_data: Dictionary containing cached sensor data
config_entry: Config entry for user preferences
Returns:
Dictionary of attributes or None if no attributes should be added
"""
time = coordinator.time
if not coordinator.data:
return None
try:
attributes: dict[str, Any] = {}
# For trend sensors, use cached attributes
_add_cached_trend_attributes(attributes, key, cached_data)
# Group sensors by type and delegate to specific handlers
if key in [
"current_interval_price",
"current_interval_price_level",
"next_interval_price",
"previous_interval_price",
"current_hour_average_price",
"next_hour_average_price",
"next_interval_price_level",
"previous_interval_price_level",
"current_hour_price_level",
"next_hour_price_level",
"next_interval_price_rating",
"previous_interval_price_rating",
"current_hour_price_rating",
"next_hour_price_rating",
]:
add_current_interval_price_attributes(
attributes=attributes,
key=key,
coordinator=coordinator,
native_value=native_value,
cached_data=cached_data,
time=time,
config_entry=config_entry,
)
elif key in [
"trailing_price_average",
"leading_price_average",
"trailing_price_min",
"trailing_price_max",
"leading_price_min",
"leading_price_max",
]:
add_average_price_attributes(
attributes=attributes,
key=key,
coordinator=coordinator,
time=time,
cached_data=cached_data,
config_entry=config_entry,
)
elif key.startswith("next_avg_"):
add_next_avg_attributes(
attributes=attributes,
key=key,
coordinator=coordinator,
time=time,
cached_data=cached_data,
config_entry=config_entry,
)
elif any(
pattern in key
for pattern in [
"_price_today",
"_price_tomorrow",
"_price_yesterday",
"yesterday_price_level",
"today_price_level",
"tomorrow_price_level",
"yesterday_price_rating",
"today_price_rating",
"tomorrow_price_rating",
"rating",
"data_timestamp",
]
):
add_statistics_attributes(
attributes=attributes,
key=key,
cached_data=cached_data,
time=time,
config_entry=config_entry,
)
elif key == "data_lifecycle_status":
# Lifecycle sensor uses dedicated builder with calculator
lifecycle_calculator = cached_data.get("lifecycle_calculator")
if lifecycle_calculator:
lifecycle_attrs = build_lifecycle_attributes(coordinator, lifecycle_calculator)
attributes.update(lifecycle_attrs)
elif _is_timing_or_volatility_sensor(key):
_add_timing_or_volatility_attributes(attributes, key, cached_data, native_value, time=time)
# For current_interval_price_level, add the original level as attribute
if key == "current_interval_price_level" and cached_data.get("last_price_level") is not None:
attributes["level_id"] = cached_data["last_price_level"]
# Add icon_color for daily level and rating sensors (uses native_value)
if key in [
"yesterday_price_level",
"today_price_level",
"tomorrow_price_level",
"yesterday_price_rating",
"today_price_rating",
"tomorrow_price_rating",
]:
add_icon_color_attribute(attributes, key=key, state_value=native_value)
except (KeyError, ValueError, TypeError) as ex:
coordinator.logger.exception(
"Error getting sensor attributes",
extra={
"error": str(ex),
"entity": key,
},
)
return None
else:
return attributes if attributes else None
def build_extra_state_attributes( # noqa: PLR0913
entity_key: str,
translation_key: str | None,
hass: HomeAssistant,
*,
config_entry: TibberPricesConfigEntry,
coordinator_data: dict,
sensor_attrs: dict[str, Any] | None = None,
time: TibberPricesTimeService,
) -> dict[str, Any] | None:
"""
Build extra state attributes for sensors.
This function implements the unified attribute building pattern:
1. Generate default timestamp (current time rounded to nearest quarter hour)
2. Merge sensor-specific attributes (may override timestamp)
3. Preserve timestamp ordering (always FIRST in dict)
4. Add description attributes (always LAST)
Args:
entity_key: Entity key (e.g., "current_interval_price")
translation_key: Translation key for entity
hass: Home Assistant instance
config_entry: Config entry with options (keyword-only)
coordinator_data: Coordinator data dict (keyword-only)
sensor_attrs: Sensor-specific attributes (keyword-only)
time: TibberPricesTimeService instance (required)
Returns:
Complete attributes dict or None if no data available
"""
if not coordinator_data:
return None
# Calculate default timestamp: current time rounded to nearest quarter hour
# This ensures all sensors have a consistent reference time for when calculations were made
# Individual sensors can override this if they need a different timestamp
now = time.now()
default_timestamp = time.round_to_nearest_quarter(now)
# Special handling for chart_data_export: metadata → descriptions → service data
if entity_key == "chart_data_export":
attributes: dict[str, Any] = {
"timestamp": default_timestamp,
}
# Step 1: Add metadata (timestamp + error if present)
if sensor_attrs:
if "timestamp" in sensor_attrs and sensor_attrs["timestamp"] is not None:
# Chart data has its own timestamp (when service was last called)
attributes["timestamp"] = sensor_attrs["timestamp"]
if "error" in sensor_attrs:
attributes["error"] = sensor_attrs["error"]
# Step 2: Add descriptions before service data (via central utility)
add_description_attributes(
attributes,
"sensor",
translation_key,
hass,
config_entry,
position="before_service_data",
)
# Step 3: Add service data (everything except metadata)
if sensor_attrs:
attributes.update({k: v for k, v in sensor_attrs.items() if k not in ("timestamp", "error")})
return attributes if attributes else None
# For all other sensors: standard behavior
# Start with default timestamp (datetime object - HA serializes automatically)
attributes: dict[str, Any] = {
"timestamp": default_timestamp,
}
# Add sensor-specific attributes (may override timestamp)
if sensor_attrs:
# Extract timestamp override if present
timestamp_override = sensor_attrs.pop("timestamp", None)
# Add all other sensor attributes
attributes.update(sensor_attrs)
# If sensor wants to override timestamp, rebuild dict with timestamp FIRST
if timestamp_override is not None:
temp_attrs = dict(attributes)
attributes.clear()
attributes["timestamp"] = timestamp_override
for key, value in temp_attrs.items():
if key != "timestamp":
attributes[key] = value
# Add description attributes (always last, via central utility)
add_description_attributes(
attributes,
"sensor",
translation_key,
hass,
config_entry,
position="end",
)
return attributes if attributes else None

View file

@ -1,160 +0,0 @@
"""Daily statistics attribute builders for Tibber Prices sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import PRICE_RATING_MAPPING
from custom_components.tibber_prices.coordinator.helpers import (
get_intervals_for_day_offsets,
)
from homeassistant.const import PERCENTAGE
if TYPE_CHECKING:
from datetime import datetime
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from .helpers import add_alternate_average_attribute
def _get_day_midnight_timestamp(key: str, *, time: TibberPricesTimeService) -> datetime:
"""Get midnight timestamp for a given day sensor key (returns datetime object)."""
# Determine which day based on sensor key
if key.startswith("yesterday") or key == "average_price_yesterday":
day = "yesterday"
elif key.startswith("tomorrow") or key == "average_price_tomorrow":
day = "tomorrow"
else:
day = "today"
# Use TimeService to get midnight for that day
local_midnight, _ = time.get_day_boundaries(day)
return local_midnight
def _get_day_key_from_sensor_key(key: str) -> str:
"""
Extract day key (yesterday/today/tomorrow) from sensor key.
Args:
key: The sensor entity key
Returns:
Day key: "yesterday", "today", or "tomorrow"
"""
if "yesterday" in key:
return "yesterday"
if "tomorrow" in key:
return "tomorrow"
return "today"
def _add_fallback_timestamp(
attributes: dict,
key: str,
price_info: dict,
) -> None:
"""
Add fallback timestamp to attributes based on the day in the sensor key.
Args:
attributes: Dictionary to add timestamp to
key: The sensor entity key
price_info: Price info dictionary from coordinator data (flat structure)
"""
day_key = _get_day_key_from_sensor_key(key)
# Use helper to get intervals for this day
# Build minimal coordinator_data structure for helper
coordinator_data = {"priceInfo": price_info}
# Map day key to offset: yesterday=-1, today=0, tomorrow=1
day_offset = {"yesterday": -1, "today": 0, "tomorrow": 1}[day_key]
day_intervals = get_intervals_for_day_offsets(coordinator_data, [day_offset])
# Use first interval's timestamp if available
if day_intervals:
attributes["timestamp"] = day_intervals[0].get("startsAt")
def add_statistics_attributes(
attributes: dict,
key: str,
cached_data: dict,
*,
time: TibberPricesTimeService,
config_entry: TibberPricesConfigEntry,
) -> None:
"""
Add attributes for statistics and rating sensors.
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key
cached_data: Dictionary containing cached sensor data
time: TibberPricesTimeService instance (required)
config_entry: Config entry for user preferences
"""
# Data timestamp sensor - shows API fetch time
if key == "data_timestamp":
latest_timestamp = cached_data.get("data_timestamp")
if latest_timestamp:
attributes["timestamp"] = latest_timestamp
return
# Current interval price rating - add rating attributes
if key == "current_interval_price_rating":
if cached_data.get("last_rating_difference") is not None:
attributes["diff_" + PERCENTAGE] = cached_data["last_rating_difference"]
if cached_data.get("last_rating_level") is not None:
attributes["level_id"] = cached_data["last_rating_level"]
attributes["level_value"] = PRICE_RATING_MAPPING.get(
cached_data["last_rating_level"], cached_data["last_rating_level"]
)
return
# Extreme value sensors - show when the extreme occurs
extreme_sensors = {
"lowest_price_today",
"highest_price_today",
"lowest_price_tomorrow",
"highest_price_tomorrow",
}
if key in extreme_sensors:
if cached_data.get("last_extreme_interval"):
extreme_starts_at = cached_data["last_extreme_interval"].get("startsAt")
if extreme_starts_at:
attributes["timestamp"] = extreme_starts_at
return
# Daily average sensors - show midnight to indicate whole day + add alternate value
daily_avg_sensors = {"average_price_today", "average_price_tomorrow"}
if key in daily_avg_sensors:
attributes["timestamp"] = _get_day_midnight_timestamp(key, time=time)
# Add alternate average attribute
add_alternate_average_attribute(
attributes,
cached_data,
key, # base_key = key itself ("average_price_today" or "average_price_tomorrow")
config_entry=config_entry,
)
return
# Daily aggregated level/rating sensors - show midnight to indicate whole day
daily_aggregated_sensors = {
"yesterday_price_level",
"today_price_level",
"tomorrow_price_level",
"yesterday_price_rating",
"today_price_rating",
"tomorrow_price_rating",
}
if key in daily_aggregated_sensors:
attributes["timestamp"] = _get_day_midnight_timestamp(key, time=time)
return
# All other statistics sensors - keep default timestamp (when calculation was made)

View file

@ -1,164 +0,0 @@
"""Future price/trend attribute builders for Tibber Prices sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.const import get_display_unit_factor
from custom_components.tibber_prices.coordinator.helpers import get_intervals_for_day_offsets
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.core import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from .helpers import add_alternate_average_attribute
# Constants
MAX_FORECAST_INTERVALS = 8 # Show up to 8 future intervals (2 hours with 15-min intervals)
def add_next_avg_attributes( # noqa: PLR0913
attributes: dict,
key: str,
coordinator: TibberPricesDataUpdateCoordinator,
*,
time: TibberPricesTimeService,
cached_data: dict | None = None,
config_entry: TibberPricesConfigEntry | None = None,
) -> None:
"""
Add attributes for next N hours average price sensors.
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key
coordinator: The data update coordinator
time: TibberPricesTimeService instance (required)
cached_data: Optional cached data dictionary for median values
config_entry: Optional config entry for user preferences
"""
# Extract hours from sensor key (e.g., "next_avg_3h" -> 3)
try:
hours = int(key.split("_")[-1].replace("h", ""))
except (ValueError, AttributeError):
return
# Use TimeService to get the N-hour window starting from next interval
next_interval_start, window_end = time.get_next_n_hours_window(hours)
# Get all intervals (yesterday, today, tomorrow) via helper
all_prices = get_intervals_for_day_offsets(coordinator.data, [-1, 0, 1])
if not all_prices:
return
# Find all intervals in the window
intervals_in_window = []
for price_data in all_prices:
starts_at = time.get_interval_time(price_data)
if starts_at is None:
continue
if next_interval_start <= starts_at < window_end:
intervals_in_window.append(price_data)
# Add timestamp attribute (start of next interval - where calculation begins)
if intervals_in_window:
attributes["timestamp"] = intervals_in_window[0].get("startsAt")
attributes["interval_count"] = len(intervals_in_window)
attributes["hours"] = hours
# Add alternate average attribute if available in cached_data
if cached_data and config_entry:
base_key = f"next_avg_{hours}h"
add_alternate_average_attribute(
attributes,
cached_data,
base_key,
config_entry=config_entry,
)
def get_future_prices(
coordinator: TibberPricesDataUpdateCoordinator,
max_intervals: int | None = None,
*,
time: TibberPricesTimeService,
config_entry: TibberPricesConfigEntry,
) -> list[dict] | None:
"""
Get future price data for multiple upcoming intervals.
Args:
coordinator: The data update coordinator.
max_intervals: Maximum number of future intervals to return.
time: TibberPricesTimeService instance (required).
config_entry: Config entry to get display unit configuration.
Returns:
List of upcoming price intervals with timestamps and prices.
"""
if not coordinator.data:
return None
# Get all intervals (yesterday, today, tomorrow) via helper
all_prices = get_intervals_for_day_offsets(coordinator.data, [-1, 0, 1])
if not all_prices:
return None
# Initialize the result list
future_prices = []
# Track the maximum intervals to return
intervals_to_return = MAX_FORECAST_INTERVALS if max_intervals is None else max_intervals
# Get current date for day key determination
now = time.now()
today_date = now.date()
tomorrow_date = time.get_local_date(offset_days=1)
for price_data in all_prices:
starts_at = time.get_interval_time(price_data)
if starts_at is None:
continue
interval_end = starts_at + time.get_interval_duration()
# Use TimeService to check if interval is in future
if time.is_in_future(starts_at):
# Determine which day this interval belongs to
interval_date = starts_at.date()
if interval_date == today_date:
day_key = "today"
elif interval_date == tomorrow_date:
day_key = "tomorrow"
else:
day_key = "unknown"
# Convert to display currency unit based on configuration
price_major = float(price_data["total"])
factor = get_display_unit_factor(config_entry)
price_display = round(price_major * factor, 2)
future_prices.append(
{
"interval_start": starts_at,
"interval_end": interval_end,
"price": price_major,
"price_minor": price_display,
"level": price_data.get("level", "NORMAL"),
"rating": price_data.get("difference", None),
"rating_level": price_data.get("rating_level"),
"day": day_key,
}
)
# Sort by start time
future_prices.sort(key=lambda x: x["interval_start"])
# Limit to the requested number of intervals
return future_prices[:intervals_to_return] if future_prices else None

View file

@ -1,41 +0,0 @@
"""Helper functions for sensor attributes."""
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from custom_components.tibber_prices.data import TibberPricesConfigEntry
def add_alternate_average_attribute(
attributes: dict,
cached_data: dict,
base_key: str,
*,
config_entry: TibberPricesConfigEntry, # noqa: ARG001
) -> None:
"""
Add both average values (mean and median) as attributes.
This ensures automations work consistently regardless of which value
is displayed in the state. Both values are always available as attributes.
Note: To avoid duplicate recording, the value used as state should be
excluded from recorder via dynamic _unrecorded_attributes in sensor core.
Args:
attributes: Dictionary to add attribute to
cached_data: Cached calculation data containing mean/median values
base_key: Base key for cached values (e.g., "average_price_today", "rolling_hour_0")
config_entry: Config entry for user preferences (used to determine which value is in state)
"""
# Always add both mean and median values as attributes
mean_value = cached_data.get(f"{base_key}_mean")
if mean_value is not None:
attributes["price_mean"] = mean_value
median_value = cached_data.get(f"{base_key}_median")
if median_value is not None:
attributes["price_median"] = median_value

View file

@ -1,270 +0,0 @@
"""Interval attribute builders for Tibber Prices sensors."""
from __future__ import annotations
from datetime import timedelta
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.const import (
PRICE_LEVEL_MAPPING,
PRICE_RATING_MAPPING,
)
from custom_components.tibber_prices.entity_utils import add_icon_color_attribute
from custom_components.tibber_prices.utils.price import find_price_data_for_interval
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.core import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
from custom_components.tibber_prices.data import TibberPricesConfigEntry
from .helpers import add_alternate_average_attribute
from .metadata import get_current_interval_data
def _get_interval_data_for_attributes(
key: str,
coordinator: TibberPricesDataUpdateCoordinator,
attributes: dict,
*,
time: TibberPricesTimeService,
) -> dict | None:
"""
Get interval data and set timestamp based on sensor type.
Refactored to reduce branch complexity in main function.
Args:
key: The sensor entity key
coordinator: The data update coordinator
attributes: Attributes dict to update with timestamp if needed
time: TibberPricesTimeService instance
Returns:
Interval data if found, None otherwise
"""
now = time.now()
# Current/next price sensors - override timestamp with interval's startsAt
next_sensors = ["next_interval_price", "next_interval_price_level", "next_interval_price_rating"]
prev_sensors = ["previous_interval_price", "previous_interval_price_level", "previous_interval_price_rating"]
next_hour = ["next_hour_average_price", "next_hour_price_level", "next_hour_price_rating"]
curr_interval = ["current_interval_price", "current_interval_price_base"]
curr_hour = ["current_hour_average_price", "current_hour_price_level", "current_hour_price_rating"]
if key in next_sensors:
target_time = time.get_next_interval_start()
interval_data = find_price_data_for_interval(coordinator.data, target_time, time=time)
if interval_data:
attributes["timestamp"] = interval_data["startsAt"]
return interval_data
if key in prev_sensors:
target_time = time.get_interval_offset_time(-1)
interval_data = find_price_data_for_interval(coordinator.data, target_time, time=time)
if interval_data:
attributes["timestamp"] = interval_data["startsAt"]
return interval_data
if key in next_hour:
target_time = now + timedelta(hours=1)
interval_data = find_price_data_for_interval(coordinator.data, target_time, time=time)
if interval_data:
attributes["timestamp"] = interval_data["startsAt"]
return interval_data
# Current interval sensors (both variants)
if key in curr_interval:
interval_data = get_current_interval_data(coordinator, time=time)
if interval_data and "startsAt" in interval_data:
attributes["timestamp"] = interval_data["startsAt"]
return interval_data
# Current hour sensors - keep default timestamp
if key in curr_hour:
return get_current_interval_data(coordinator, time=time)
return None
def add_current_interval_price_attributes( # noqa: PLR0913
attributes: dict,
key: str,
coordinator: TibberPricesDataUpdateCoordinator,
native_value: Any,
cached_data: dict,
*,
time: TibberPricesTimeService,
config_entry: TibberPricesConfigEntry,
) -> None:
"""
Add attributes for current interval price sensors.
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key
coordinator: The data update coordinator
native_value: The current native value of the sensor
cached_data: Dictionary containing cached sensor data
time: TibberPricesTimeService instance (required)
config_entry: Config entry for user preferences
"""
# Get interval data and handle timestamp overrides
interval_data = _get_interval_data_for_attributes(key, coordinator, attributes, time=time)
# Add icon_color for price sensors (based on their price level)
if key in [
"current_interval_price",
"current_interval_price_base",
"next_interval_price",
"previous_interval_price",
]:
# For interval-based price sensors, get level from interval_data
if interval_data and "level" in interval_data:
level = interval_data["level"]
add_icon_color_attribute(attributes, key="price_level", state_value=level)
elif key in ["current_hour_average_price", "next_hour_average_price"]:
# For hour-based price sensors, get level from cached_data
level = cached_data.get("rolling_hour_level")
if level:
add_icon_color_attribute(attributes, key="price_level", state_value=level)
# Add alternate average attribute for rolling hour average price sensors
base_key = "rolling_hour_0" if key == "current_hour_average_price" else "rolling_hour_1"
add_alternate_average_attribute(
attributes,
cached_data,
base_key,
config_entry=config_entry,
)
# Add price level attributes for all level sensors
add_level_attributes_for_sensor(
attributes=attributes,
key=key,
interval_data=interval_data,
coordinator=coordinator,
native_value=native_value,
time=time,
)
# Add price rating attributes for all rating sensors
add_rating_attributes_for_sensor(
attributes=attributes,
key=key,
interval_data=interval_data,
coordinator=coordinator,
native_value=native_value,
time=time,
)
def add_level_attributes_for_sensor( # noqa: PLR0913
attributes: dict,
key: str,
interval_data: dict | None,
coordinator: TibberPricesDataUpdateCoordinator,
native_value: Any,
*,
time: TibberPricesTimeService,
) -> None:
"""
Add price level attributes based on sensor type.
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key
interval_data: Interval data for next/previous sensors
coordinator: The data update coordinator
native_value: The current native value of the sensor
time: TibberPricesTimeService instance (required)
"""
# For interval-based level sensors (next/previous), use interval data
if key in ["next_interval_price_level", "previous_interval_price_level"]:
if interval_data and "level" in interval_data:
add_price_level_attributes(attributes, interval_data["level"])
# For hour-aggregated level sensors, use native_value
elif key in ["current_hour_price_level", "next_hour_price_level"]:
level_value = native_value
if level_value and isinstance(level_value, str):
add_price_level_attributes(attributes, level_value.upper())
# For current price level sensor
elif key == "current_interval_price_level":
current_interval_data = get_current_interval_data(coordinator, time=time)
if current_interval_data and "level" in current_interval_data:
add_price_level_attributes(attributes, current_interval_data["level"])
def add_price_level_attributes(attributes: dict, level: str) -> None:
"""
Add price level specific attributes.
Args:
attributes: Dictionary to add attributes to
level: The price level value (e.g., VERY_CHEAP, NORMAL, etc.)
"""
if level in PRICE_LEVEL_MAPPING:
attributes["level_value"] = PRICE_LEVEL_MAPPING[level]
attributes["level_id"] = level
# Add icon_color for dynamic styling
add_icon_color_attribute(attributes, key="price_level", state_value=level)
def add_rating_attributes_for_sensor( # noqa: PLR0913
attributes: dict,
key: str,
interval_data: dict | None,
coordinator: TibberPricesDataUpdateCoordinator,
native_value: Any,
*,
time: TibberPricesTimeService,
) -> None:
"""
Add price rating attributes based on sensor type.
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key
interval_data: Interval data for next/previous sensors
coordinator: The data update coordinator
native_value: The current native value of the sensor
time: TibberPricesTimeService instance (required)
"""
# For interval-based rating sensors (next/previous), use interval data
if key in ["next_interval_price_rating", "previous_interval_price_rating"]:
if interval_data and "rating_level" in interval_data:
add_price_rating_attributes(attributes, interval_data["rating_level"])
# For hour-aggregated rating sensors, use native_value
elif key in ["current_hour_price_rating", "next_hour_price_rating"]:
rating_value = native_value
if rating_value and isinstance(rating_value, str):
add_price_rating_attributes(attributes, rating_value.upper())
# For current price rating sensor
elif key == "current_interval_price_rating":
current_interval_data = get_current_interval_data(coordinator, time=time)
if current_interval_data and "rating_level" in current_interval_data:
add_price_rating_attributes(attributes, current_interval_data["rating_level"])
def add_price_rating_attributes(attributes: dict, rating: str) -> None:
"""
Add price rating specific attributes.
Args:
attributes: Dictionary to add attributes to
rating: The price rating value (e.g., LOW, NORMAL, HIGH)
"""
if rating in PRICE_RATING_MAPPING:
attributes["rating_value"] = PRICE_RATING_MAPPING[rating]
attributes["rating_id"] = rating
# Add icon_color for dynamic styling
add_icon_color_attribute(attributes, key="price_rating", state_value=rating)

View file

@ -1,83 +0,0 @@
"""
Attribute builders for lifecycle diagnostic sensor.
This sensor uses event-based updates with state-change filtering to minimize
recorder entries. Only attributes that are relevant to the lifecycle STATE
are included here - attributes that change independently of state belong
in a separate sensor or diagnostics.
Included attributes (update only on state change):
- tomorrow_available: Whether tomorrow's price data is available
- next_api_poll: When the next API poll will occur (builds user trust)
- updates_today: Number of API calls made today
- last_turnover: When the last midnight turnover occurred
- last_error: Details of the last error (if any)
Pool statistics (sensor_intervals_count, cache_fill_percent, etc.) are
intentionally NOT included here because they change independently of
the lifecycle state. With state-change filtering, these would become
stale. Pool statistics are available via diagnostics or could be
exposed as a separate sensor if needed.
"""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.core import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.sensor.calculators.lifecycle import (
TibberPricesLifecycleCalculator,
)
def build_lifecycle_attributes(
coordinator: TibberPricesDataUpdateCoordinator,
lifecycle_calculator: TibberPricesLifecycleCalculator,
) -> dict[str, Any]:
"""
Build attributes for data_lifecycle_status sensor.
Event-based updates with state-change filtering - attributes only update
when the lifecycle STATE changes (freshcached, cachedturnover_pending, etc.).
Only includes attributes that are directly relevant to the lifecycle state.
Pool statistics are intentionally excluded to avoid stale data.
Returns:
Dict with lifecycle attributes
"""
attributes: dict[str, Any] = {}
# === Tomorrow Data Status ===
# Critical for understanding lifecycle state transitions
attributes["tomorrow_available"] = lifecycle_calculator.has_tomorrow_data()
# === Next API Poll Time ===
# Builds user trust: shows when the integration will check for tomorrow data
# - Before 13:00: Shows today 13:00 (when tomorrow-search begins)
# - After 13:00 without tomorrow data: Shows next Timer #1 execution (active polling)
# - After 13:00 with tomorrow data: Shows tomorrow 13:00 (predictive)
next_poll = lifecycle_calculator.get_next_api_poll_time()
if next_poll:
attributes["next_api_poll"] = next_poll.isoformat()
# === Update Statistics ===
# Shows API activity - resets at midnight with turnover
api_calls = lifecycle_calculator.get_api_calls_today()
attributes["updates_today"] = api_calls
# === Midnight Turnover Info ===
# When was the last successful data rotation
if coordinator._midnight_handler.last_turnover_time: # noqa: SLF001
attributes["last_turnover"] = coordinator._midnight_handler.last_turnover_time.isoformat() # noqa: SLF001
# === Error Status ===
# Present only when there's an active error
if coordinator.last_exception:
attributes["last_error"] = str(coordinator.last_exception)
return attributes

View file

@ -1,37 +0,0 @@
"""Metadata attribute builders for Tibber Prices sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING
from custom_components.tibber_prices.utils.price import find_price_data_for_interval
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.core import (
TibberPricesDataUpdateCoordinator,
)
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
def get_current_interval_data(
coordinator: TibberPricesDataUpdateCoordinator,
*,
time: TibberPricesTimeService,
) -> dict | None:
"""
Get current interval's price data.
Args:
coordinator: The data update coordinator
time: TibberPricesTimeService instance (required)
Returns:
Current interval data or None if not found
"""
if not coordinator.data:
return None
now = time.now()
return find_price_data_for_interval(coordinator.data, now, time=time)

View file

@ -1,95 +0,0 @@
"""Period timing attribute builders for Tibber Prices sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from custom_components.tibber_prices.entity_utils import add_icon_color_attribute
if TYPE_CHECKING:
from custom_components.tibber_prices.coordinator.time_service import TibberPricesTimeService
# Timer #3 triggers every 30 seconds
TIMER_30_SEC_BOUNDARY = 30
def _hours_to_minutes(state_value: Any) -> int | None:
"""Convert hour-based state back to rounded minutes for attributes."""
if state_value is None:
return None
try:
return round(float(state_value) * 60)
except (TypeError, ValueError):
return None
def _is_timing_or_volatility_sensor(key: str) -> bool:
"""Check if sensor is a timing or volatility sensor."""
return key.endswith("_volatility") or (
key.startswith(("best_price_", "peak_price_"))
and any(
suffix in key
for suffix in [
"end_time",
"remaining_minutes",
"progress",
"next_start_time",
"next_in_minutes",
]
)
)
def add_period_timing_attributes(
attributes: dict,
key: str,
state_value: Any = None,
*,
time: TibberPricesTimeService,
) -> None:
"""
Add timestamp and icon_color attributes for best_price/peak_price timing sensors.
The timestamp indicates when the sensor value was calculated:
- Quarter-hour sensors (end_time, next_start_time): Rounded to 15-min boundary (:00, :15, :30, :45)
- 30-second update sensors (remaining_minutes, progress, next_in_minutes): Current time with seconds
Args:
attributes: Dictionary to add attributes to
key: The sensor entity key (e.g., "best_price_end_time")
state_value: Current sensor value for icon_color calculation
time: TibberPricesTimeService instance (required)
"""
# Determine if this is a quarter-hour or 30-second update sensor
is_quarter_hour_sensor = key.endswith(("_end_time", "_next_start_time"))
now = time.now()
if is_quarter_hour_sensor:
# Quarter-hour sensors: Use timestamp of current 15-minute interval
# Round down to the nearest quarter hour (:00, :15, :30, :45)
minute = (now.minute // 15) * 15
timestamp = now.replace(minute=minute, second=0, microsecond=0)
else:
# 30-second update sensors: Round to nearest 30-second boundary (:00 or :30)
# Timer triggers at :00 and :30, so round current time to these boundaries
second = 0 if now.second < TIMER_30_SEC_BOUNDARY else TIMER_30_SEC_BOUNDARY
timestamp = now.replace(second=second, microsecond=0)
attributes["timestamp"] = timestamp
# Add minute-precision attributes for hour-based states to keep automation-friendly values
minute_value = _hours_to_minutes(state_value)
if minute_value is not None:
if key.endswith("period_duration"):
attributes["period_duration_minutes"] = minute_value
elif key.endswith("remaining_minutes"):
attributes["remaining_minutes"] = minute_value
elif key.endswith("next_in_minutes"):
attributes["next_in_minutes"] = minute_value
# Add icon_color for dynamic styling
add_icon_color_attribute(attributes, key=key, state_value=state_value)

Some files were not shown because too many files have changed in this diff Show more